summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGeorg Brandl <georg@python.org>2014-10-08 09:21:15 +0200
committerGeorg Brandl <georg@python.org>2014-10-08 09:21:15 +0200
commit444fb6fd9b3492040a36fcca672fee8175f8d603 (patch)
tree2bd411ca78decf276b95dc6b1788e594b2e35287
parent491fec23ef01687906f5d71ee718522cd2917926 (diff)
parentc1bfe4eed3805d3556bffa3c6b9cc2d3f6976205 (diff)
downloadpygments-444fb6fd9b3492040a36fcca672fee8175f8d603.tar.gz
Merged in leodemoura/pygments-main (pull request #399)
-rw-r--r--.hgignore1
-rw-r--r--AUTHORS2
-rw-r--r--CHANGES90
-rw-r--r--Makefile6
-rw-r--r--README.rst39
-rw-r--r--doc/conf.py2
-rw-r--r--doc/docs/lexerdevelopment.rst406
-rw-r--r--doc/docs/styles.rst2
-rw-r--r--doc/faq.rst2
-rw-r--r--doc/index.rst23
-rwxr-xr-xpygmentize3
-rw-r--r--pygments/cmdline.py118
-rw-r--r--pygments/filters/__init__.py54
-rw-r--r--pygments/formatter.py6
-rw-r--r--pygments/formatters/__init__.py128
-rwxr-xr-xpygments/formatters/_mapping.py107
-rw-r--r--pygments/formatters/html.py8
-rw-r--r--pygments/formatters/img.py18
-rw-r--r--pygments/formatters/latex.py22
-rw-r--r--pygments/formatters/other.py5
-rw-r--r--pygments/formatters/rtf.py53
-rw-r--r--pygments/formatters/svg.py1
-rw-r--r--pygments/lexer.py120
-rw-r--r--pygments/lexers/__init__.py90
-rw-r--r--pygments/lexers/_asy_builtins.py (renamed from pygments/lexers/_asybuiltins.py)12
-rw-r--r--pygments/lexers/_cl_builtins.py (renamed from pygments/lexers/_clbuiltins.py)32
-rw-r--r--pygments/lexers/_cocoa_builtins.py73
-rw-r--r--pygments/lexers/_cocoabuiltins.py73
-rw-r--r--pygments/lexers/_lasso_builtins.py (renamed from pygments/lexers/_lassobuiltins.py)28
-rw-r--r--pygments/lexers/_lua_builtins.py (renamed from pygments/lexers/_luabuiltins.py)57
-rw-r--r--pygments/lexers/_mapping.py477
-rw-r--r--pygments/lexers/_openedge_builtins.py2569
-rw-r--r--pygments/lexers/_openedgebuiltins.py562
-rw-r--r--pygments/lexers/_php_builtins.py (renamed from pygments/lexers/_phpbuiltins.py)698
-rw-r--r--pygments/lexers/_postgres_builtins.py619
-rw-r--r--pygments/lexers/_scilab_builtins.py3115
-rw-r--r--pygments/lexers/_sourcemod_builtins.py1161
-rw-r--r--pygments/lexers/_sourcemodbuiltins.py1077
-rw-r--r--pygments/lexers/_stan_builtins.py70
-rw-r--r--pygments/lexers/_vim_builtins.py1939
-rw-r--r--pygments/lexers/_vimbuiltins.py13
-rw-r--r--pygments/lexers/actionscript.py238
-rw-r--r--pygments/lexers/agile.py2554
-rw-r--r--pygments/lexers/algebra.py187
-rw-r--r--pygments/lexers/ambient.py76
-rw-r--r--pygments/lexers/apl.py101
-rw-r--r--pygments/lexers/asm.py26
-rw-r--r--pygments/lexers/automation.py373
-rw-r--r--pygments/lexers/basic.py500
-rw-r--r--pygments/lexers/business.py592
-rw-r--r--pygments/lexers/c_cpp.py231
-rw-r--r--pygments/lexers/c_like.py840
-rw-r--r--pygments/lexers/chapel.py98
-rw-r--r--pygments/lexers/compiled.py5203
-rw-r--r--pygments/lexers/configs.py536
-rw-r--r--pygments/lexers/console.py114
-rw-r--r--pygments/lexers/css.py496
-rw-r--r--pygments/lexers/d.py251
-rw-r--r--pygments/lexers/data.py509
-rw-r--r--pygments/lexers/diff.py106
-rw-r--r--pygments/lexers/dotnet.py84
-rw-r--r--pygments/lexers/dsls.py510
-rw-r--r--pygments/lexers/dylan.py289
-rw-r--r--pygments/lexers/ecl.py125
-rw-r--r--pygments/lexers/eiffel.py65
-rw-r--r--pygments/lexers/erlang.py508
-rw-r--r--pygments/lexers/esoteric.py114
-rw-r--r--pygments/lexers/factor.py344
-rw-r--r--pygments/lexers/fantom.py250
-rw-r--r--pygments/lexers/felix.py273
-rw-r--r--pygments/lexers/fortran.py160
-rw-r--r--pygments/lexers/functional.py3757
-rw-r--r--pygments/lexers/go.py101
-rw-r--r--pygments/lexers/graph.py10
-rw-r--r--pygments/lexers/graphics.py551
-rw-r--r--pygments/lexers/haskell.py839
-rw-r--r--pygments/lexers/haxe.py934
-rw-r--r--pygments/lexers/html.py589
-rw-r--r--pygments/lexers/idl.py262
-rw-r--r--pygments/lexers/igor.py280
-rw-r--r--pygments/lexers/installers.py322
-rw-r--r--pygments/lexers/int_fiction.py724
-rw-r--r--pygments/lexers/iolang.py63
-rw-r--r--pygments/lexers/javascript.py1199
-rw-r--r--pygments/lexers/julia.py194
-rw-r--r--pygments/lexers/jvm.py249
-rw-r--r--pygments/lexers/lisp.py1480
-rw-r--r--pygments/lexers/make.py199
-rw-r--r--pygments/lexers/markup.py380
-rw-r--r--pygments/lexers/math.py2285
-rw-r--r--pygments/lexers/matlab.py663
-rw-r--r--pygments/lexers/ml.py768
-rw-r--r--pygments/lexers/modeling.py375
-rw-r--r--pygments/lexers/nimrod.py159
-rw-r--r--pygments/lexers/nit.py64
-rw-r--r--pygments/lexers/nix.py140
-rw-r--r--pygments/lexers/objective.py322
-rw-r--r--pygments/lexers/ooc.py85
-rw-r--r--pygments/lexers/other.py4508
-rw-r--r--pygments/lexers/parsers.py225
-rw-r--r--pygments/lexers/pascal.py833
-rw-r--r--pygments/lexers/pawn.py199
-rw-r--r--pygments/lexers/perl.py614
-rw-r--r--pygments/lexers/php.py246
-rw-r--r--pygments/lexers/prolog.py306
-rw-r--r--pygments/lexers/python.py833
-rw-r--r--pygments/lexers/qbasic.py157
-rw-r--r--pygments/lexers/r.py453
-rw-r--r--pygments/lexers/rebol.py433
-rw-r--r--pygments/lexers/robotframework.py (renamed from pygments/lexers/_robotframeworklexer.py)6
-rw-r--r--pygments/lexers/ruby.py516
-rw-r--r--pygments/lexers/rust.py163
-rw-r--r--pygments/lexers/scripting.py922
-rw-r--r--pygments/lexers/shell.py28
-rw-r--r--pygments/lexers/smalltalk.py195
-rw-r--r--pygments/lexers/snobol.py83
-rw-r--r--pygments/lexers/sql.py198
-rw-r--r--pygments/lexers/tcl.py145
-rw-r--r--pygments/lexers/templates.py93
-rw-r--r--pygments/lexers/testing.py135
-rw-r--r--pygments/lexers/text.py2058
-rw-r--r--pygments/lexers/textedit.py169
-rw-r--r--pygments/lexers/textfmts.py279
-rw-r--r--pygments/lexers/theorem.py466
-rw-r--r--pygments/lexers/urbi.py133
-rw-r--r--pygments/lexers/web.py4510
-rw-r--r--pygments/lexers/webmisc.py922
-rw-r--r--pygments/regexopt.py92
-rw-r--r--pygments/unistring.py7
-rw-r--r--pygments/util.py105
-rwxr-xr-xscripts/check_sources.py59
-rwxr-xr-xscripts/debug_lexer.py233
-rwxr-xr-xscripts/find_codetags.py14
l---------[-rwxr-xr-x]scripts/find_error.py174
-rw-r--r--scripts/get_vimkw.py39
-rwxr-xr-xsetup.py17
-rw-r--r--tests/examplefiles/99_bottles_of_beer.chpl43
-rw-r--r--tests/examplefiles/Errors.scala5
-rw-r--r--tests/examplefiles/all.nit1986
-rw-r--r--tests/examplefiles/docker.docker5
-rw-r--r--tests/examplefiles/example.cob936
-rw-r--r--tests/examplefiles/example.golo113
-rw-r--r--tests/examplefiles/example.hs4
-rw-r--r--tests/examplefiles/example.hx7
-rw-r--r--tests/examplefiles/example.stan11
-rw-r--r--tests/examplefiles/example.thy751
-rw-r--r--tests/examplefiles/example.weechatlog4
-rw-r--r--tests/examplefiles/example_coq.v4
-rw-r--r--tests/examplefiles/example_elixir.ex48
-rw-r--r--tests/examplefiles/interp.scala10
-rw-r--r--tests/examplefiles/matlab_sample4
-rw-r--r--tests/examplefiles/pycon_test.pycon5
-rw-r--r--tests/examplefiles/rust_example.rs4
-rw-r--r--tests/examplefiles/test.agda7
-rw-r--r--tests/examplefiles/test.idr8
-rw-r--r--tests/examplefiles/test.php8
-rw-r--r--tests/examplefiles/test.pypylog839
-rw-r--r--tests/examplefiles/unicode.js5
-rw-r--r--tests/examplefiles/vpath.mk16
-rw-r--r--tests/run.py3
-rw-r--r--tests/string_asserts.py4
-rw-r--r--tests/support.py2
-rw-r--r--tests/test_basic_api.py121
-rw-r--r--tests/test_cmdline.py27
-rw-r--r--tests/test_examplefiles.py58
-rw-r--r--tests/test_html_formatter.py5
-rw-r--r--tests/test_java.py42
-rw-r--r--tests/test_latex_formatter.py5
-rw-r--r--tests/test_lexers_other.py21
-rw-r--r--tests/test_perllexer.py2
-rw-r--r--tests/test_qbasiclexer.py4
-rw-r--r--tests/test_regexopt.py52
-rw-r--r--tests/test_ruby.py145
-rw-r--r--tests/test_shell.py63
-rw-r--r--tests/test_smarty.py40
-rw-r--r--tests/test_string_asserts.py12
-rw-r--r--tests/test_util.py12
177 files changed, 42109 insertions, 30528 deletions
diff --git a/.hgignore b/.hgignore
index 57aaeff5..a70d8593 100644
--- a/.hgignore
+++ b/.hgignore
@@ -9,3 +9,4 @@ Pygments.egg-info/*
.ropeproject
tests/examplefiles/output
.idea/
+.tags
diff --git a/AUTHORS b/AUTHORS
index 1bedef4b..4f699f6b 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -14,6 +14,7 @@ Other contributors, listed alphabetically, are:
* Jeffrey Arnold -- R/S, Rd, BUGS, Jags, and Stan lexers
* Jeremy Ashkenas -- CoffeeScript lexer
* Stefan Matthias Aust -- Smalltalk lexer
+* Lucas Bajolet -- Nit lexer
* Ben Bangert -- Mako lexers
* Max Battcher -- Darcs patch lexer
* Thomas Baruchel -- APL lexer
@@ -78,6 +79,7 @@ Other contributors, listed alphabetically, are:
* Igor Kalnitsky -- vhdl lexer
* Alexander Kit -- MaskJS lexer
* Pekka Klärck -- Robot Framework lexer
+* Gerwin Klein -- Isabelle lexer
* Eric Knibbe -- Lasso lexer
* Stepan Koltsov -- Clay lexer
* Adam Koprowski -- Opa lexer
diff --git a/CHANGES b/CHANGES
index 8459e05a..85293b4a 100644
--- a/CHANGES
+++ b/CHANGES
@@ -17,34 +17,78 @@ Version 2.0
- Lexers added:
- * Clay (PR#184)
- * Perl 6 (PR#181)
- * Swig (PR#168)
- * nesC (PR#166)
- * BlitzBasic (PR#197)
- * EBNF (PR#193)
- * Igor Pro (PR#172)
- * Rexx (PR#199)
+ * APL (#969)
* Agda and Literate Agda (PR#203)
- * Mathematica (PR#245)
- * Nix (PR#267)
- * Pike (PR#237)
- * Hy (PR#238)
+ * Alloy (PR#355)
+ * AmbientTalk
+ * BlitzBasic (PR#197)
+ * ChaiScript (PR#24)
* Chapel (PR#256)
- * Kal (PR#233)
- * Eiffel (PR#273)
* Cirru (PR#275)
+ * Clay (PR#184)
* ColdFusion CFC (PR#283)
- * Idris (PR#210)
- * Intel objdump (PR#279)
- * MaskJS (PR#280)
+ * Cryptol and Literate Cryptol (PR#344)
+ * Cypher (PR#257)
+ * Docker config files
+ * EBNF (PR#193)
+ * Eiffel (PR#273)
+ * GAP (PR#311)
+ * Golo (PR#309)
+ * Handlebars (PR#186)
+ * Hy (PR#238)
+ * Idris and Literate Idris (PR#210)
+ * Igor Pro (PR#172)
* Inform 6/7 (PR#281)
+ * Intel objdump (PR#279)
+ * Isabelle (PR#386)
+ * Jasmin (PR#349)
+ * Kal (PR#233)
+ * Lean (PR#399)
+ * LSL (PR#296)
+ * Limbo (PR#291)
+ * Liquid (#977)
* MQL (PR#285)
- * APL (#969)
+ * MaskJS (PR#280)
+ * Mathematica (PR#245)
+ * NesC (PR#166)
+ * Nit (PR#375)
+ * Nix (PR#267)
+ * Pan
+ * Pawn (PR#211)
+ * Perl 6 (PR#181)
+ * Pig (PR#304)
+ * Pike (PR#237)
+ * QBasic (PR#182)
+ * Red (PR#341)
+ * Rexx (PR#199)
+ * Rql (PR#251)
+ * Rsl
+ * SPARQL (PR#78)
+ * Slim (PR#366)
+ * Swift (PR#371)
+ * Swig (PR#168)
+ * Todo.txt todo lists
+
+- Added a helper to "optimize" regular expressions that match one of many
+ literal words; this can save 20% and more lexing time with lexers that
+ highlight many keywords or builtins.
- New styles: "xcode" and "igor", similar to the default highlighting of
the respective IDEs.
+- The command-line "pygmentize" tool now tries a little harder to find the
+ correct encoding for files and the terminal (#979).
+
+- Added "inencoding" option for lexers to override "encoding" analogous
+ to "outencoding" (#800).
+
+- Added line-by-line "streaming" mode for pygmentize with the "-s" option.
+ (PR#165) Only fully works for lexers that have no constructs spanning
+ lines!
+
+- Added an "envname" option to the LaTeX formatter to select a replacement
+ verbatim environment (PR#235).
+
- Updated the Makefile lexer to yield a little more useful highlighting.
- Lexer aliases passed to ``get_lexer_by_name()`` are now case-insensitive.
@@ -72,7 +116,7 @@ Version 2.0
- Ruby lexer: fix lexing of Name::Space tokens (#860) and of symbols
in hashes (#873).
-- Stan lexer: update for version 1.3.0 of the language (PR#162).
+- Stan lexer: update for version 2.4.0 of the language (PR#162, PR#255, PR#377).
- JavaScript lexer: add the "yield" keyword (PR#196).
@@ -91,8 +135,6 @@ Version 2.0
- Rebol lexer: fix comment detection and analyse_text (PR#261).
-- Stan lexer: update to v2.0.1 (PR#255).
-
- LLVM lexer: update keywords to v3.4 (PR#258).
- PHP lexer: add new keywords and binary literals (PR#222).
@@ -105,7 +147,11 @@ Version 2.0
- C family lexers: fix parsing of indented preprocessor directives (#944).
-- Rust lexer: update to 0.9 language version (PR#270).
+- Rust lexer: update to 0.9 language version (PR#270, PR#388).
+
+- Elixir lexer: update to 0.15 language version (PR#392).
+
+- Fix swallowing incomplete tracebacks in Python console lexer (#874).
Version 1.6
diff --git a/Makefile b/Makefile
index e28c90c7..020f6acd 100644
--- a/Makefile
+++ b/Makefile
@@ -19,9 +19,9 @@ all: clean-pyc check test
check:
@$(PYTHON) scripts/detect_missing_analyse_text.py || true
+ @pyflakes pygments | grep -v 'but unused' || true
@$(PYTHON) scripts/check_sources.py -i build -i dist -i pygments/lexers/_mapping.py \
- -i docs/build -i pygments/formatters/_mapping.py -i pygments/unistring.py \
- -i pygments/lexers/_vimbuiltins.py
+ -i docs/build -i pygments/formatters/_mapping.py -i pygments/unistring.py
clean: clean-pyc
-rm -rf build
@@ -40,8 +40,8 @@ docs:
make -C doc html
mapfiles:
- (cd pygments/lexers; $(PYTHON) _mapping.py)
(cd pygments/formatters; $(PYTHON) _mapping.py)
+ (cd pygments/lexers; $(PYTHON) _mapping.py)
pylint:
@pylint --rcfile scripts/pylintrc pygments
diff --git a/README.rst b/README.rst
new file mode 100644
index 00000000..e6c03926
--- /dev/null
+++ b/README.rst
@@ -0,0 +1,39 @@
+README for Pygments
+===================
+
+This is the source of Pygments. It is a generic syntax highlighter that
+supports over 300 languages and text formats, for use in code hosting, forums,
+wikis or other applications that need to prettify source code.
+
+Installing
+----------
+
+... works as usual, use ``python setup.py install``.
+
+Documentation
+-------------
+
+... can be found online at http://pygments.org/ or created by ::
+
+ cd doc
+ make html
+
+Development
+-----------
+
+... takes place on `Bitbucket
+<https://bitbucket.org/birkenfeld/pygments-main>`_, where the Mercurial
+repository, tickets and pull requests can be viewed.
+
+Continuous testing runs on drone.io:
+
+.. image:: https://drone.io/bitbucket.org/birkenfeld/pygments-main/status.png
+ :target: https://drone.io/bitbucket.org/birkenfeld/pygments-main/
+
+The authors
+-----------
+
+Pygments is maintained by **Georg Brandl**, e-mail address *georg*\ *@*\ *python.org*.
+
+Many lexers and fixes have been contributed by **Armin Ronacher**, the rest of
+the `Pocoo <http://dev.pocoo.org/>`_ team and **Tim Hatch**.
diff --git a/doc/conf.py b/doc/conf.py
index 864ec7a1..4ac487fa 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -117,7 +117,7 @@ html_theme_path = ['_themes']
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
-html_favicon = 'favicon.ico'
+html_favicon = '_static/favicon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
diff --git a/doc/docs/lexerdevelopment.rst b/doc/docs/lexerdevelopment.rst
index eab1306a..6ea08dba 100644
--- a/doc/docs/lexerdevelopment.rst
+++ b/doc/docs/lexerdevelopment.rst
@@ -1,55 +1,56 @@
.. -*- mode: rst -*-
+.. highlight:: python
+
====================
Write your own lexer
====================
-If a lexer for your favorite language is missing in the Pygments package, you can
-easily write your own and extend Pygments.
+If a lexer for your favorite language is missing in the Pygments package, you
+can easily write your own and extend Pygments.
-All you need can be found inside the :mod:`pygments.lexer` module. As you can
+All you need can be found inside the :mod:`pygments.lexer` module. As you can
read in the :doc:`API documentation <api>`, a lexer is a class that is
initialized with some keyword arguments (the lexer options) and that provides a
:meth:`.get_tokens_unprocessed()` method which is given a string or unicode
-object with the data to parse.
+object with the data to lex.
The :meth:`.get_tokens_unprocessed()` method must return an iterator or iterable
-containing tuples in the form ``(index, token, value)``. Normally you don't need
-to do this since there are numerous base lexers you can subclass.
+containing tuples in the form ``(index, token, value)``. Normally you don't
+need to do this since there are base lexers that do most of the work and that
+you can subclass.
RegexLexer
==========
-A very powerful (but quite easy to use) lexer is the :class:`RegexLexer`. This
-lexer base class allows you to define lexing rules in terms of *regular
-expressions* for different *states*.
+The lexer base class used by almost all of Pygments' lexers is the
+:class:`RegexLexer`. This class allows you to define lexing rules in terms of
+*regular expressions* for different *states*.
States are groups of regular expressions that are matched against the input
-string at the *current position*. If one of these expressions matches, a
-corresponding action is performed (normally yielding a token with a specific
-type), the current position is set to where the last match ended and the
-matching process continues with the first regex of the current state.
+string at the *current position*. If one of these expressions matches, a
+corresponding action is performed (such as yielding a token with a specific
+type, or changing state), the current position is set to where the last match
+ended and the matching process continues with the first regex of the current
+state.
-Lexer states are kept in a state stack: each time a new state is entered, the
-new state is pushed onto the stack. The most basic lexers (like the
-`DiffLexer`) just need one state.
+Lexer states are kept on a stack: each time a new state is entered, the new
+state is pushed onto the stack. The most basic lexers (like the `DiffLexer`)
+just need one state.
Each state is defined as a list of tuples in the form (`regex`, `action`,
`new_state`) where the last item is optional. In the most basic form, `action`
is a token type (like `Name.Builtin`). That means: When `regex` matches, emit a
token with the match text and type `tokentype` and push `new_state` on the state
stack. If the new state is ``'#pop'``, the topmost state is popped from the
-stack instead. (To pop more than one state, use ``'#pop:2'`` and so on.)
-``'#push'`` is a synonym for pushing the current state on the
-stack.
+stack instead. To pop more than one state, use ``'#pop:2'`` and so on.
+``'#push'`` is a synonym for pushing the current state on the stack.
-The following example shows the `DiffLexer` from the builtin lexers. Note that
+The following example shows the `DiffLexer` from the builtin lexers. Note that
it contains some additional attributes `name`, `aliases` and `filenames` which
-aren't required for a lexer. They are used by the builtin lexer lookup
-functions.
-
-.. sourcecode:: python
+aren't required for a lexer. They are used by the builtin lexer lookup
+functions. ::
from pygments.lexer import RegexLexer
from pygments.token import *
@@ -72,15 +73,16 @@ functions.
}
As you can see this lexer only uses one state. When the lexer starts scanning
-the text, it first checks if the current character is a space. If this is true
-it scans everything until newline and returns the parsed data as `Text` token.
+the text, it first checks if the current character is a space. If this is true
+it scans everything until newline and returns the data as a `Text` token (which
+is the "no special highlighting" token).
If this rule doesn't match, it checks if the current char is a plus sign. And
so on.
If no rule matches at the current position, the current char is emitted as an
-`Error` token that indicates a parsing error, and the position is increased by
-1.
+`Error` token that indicates a lexing error, and the position is increased by
+one.
Adding and testing a new lexer
@@ -91,33 +93,33 @@ steps:
First, change to the current directory containing the pygments source code:
-.. sourcecode:: console
+.. code-block:: console
$ cd .../pygments-main
-Next, make sure the lexer is known from outside of the module. All modules in
-the ``pygments.lexers`` specify ``__all__``. For example, ``other.py`` sets:
+Select a matching module under ``pygments/lexers``, or create a new module for
+your lexer class.
-.. sourcecode:: python
+Next, make sure the lexer is known from outside of the module. All modules in
+the ``pygments.lexers`` specify ``__all__``. For example, ``esoteric.py`` sets::
__all__ = ['BrainfuckLexer', 'BefungeLexer', ...]
Simply add the name of your lexer class to this list.
-Finally the lexer can be made publically known by rebuilding the lexer
-mapping:
+Finally the lexer can be made publicly known by rebuilding the lexer mapping:
-.. sourcecode:: console
+.. code-block:: console
$ make mapfiles
To test the new lexer, store an example file with the proper extension in
-``tests/examplefiles``. For example, to test your ``DiffLexer``, add a
+``tests/examplefiles``. For example, to test your ``DiffLexer``, add a
``tests/examplefiles/example.diff`` containing a sample diff output.
Now you can use pygmentize to render your example to HTML:
-.. sourcecode:: console
+.. code-block:: console
$ ./pygmentize -O full -f html -o /tmp/example.html tests/examplefiles/example.diff
@@ -130,29 +132,35 @@ To view the result, open ``/tmp/example.html`` in your browser.
Once the example renders as expected, you should run the complete test suite:
-.. sourcecode:: console
+.. code-block:: console
$ make test
+It also tests that your lexer fulfills the lexer API and certain invariants,
+such as that the concatenation of all token text is the same as the input text.
+
Regex Flags
===========
-You can either define regex flags in the regex (``r'(?x)foo bar'``) or by adding
-a `flags` attribute to your lexer class. If no attribute is defined, it defaults
-to `re.MULTILINE`. For more informations about regular expression flags see the
-`regular expressions`_ help page in the python documentation.
+You can either define regex flags locally in the regex (``r'(?x)foo bar'``) or
+globally by adding a `flags` attribute to your lexer class. If no attribute is
+defined, it defaults to `re.MULTILINE`. For more informations about regular
+expression flags see the page about `regular expressions`_ in the Python
+documentation.
-.. _regular expressions: http://docs.python.org/lib/re-syntax.html
+.. _regular expressions: http://docs.python.org/library/re.html#regular-expression-syntax
Scanning multiple tokens at once
================================
-Here is a more complex lexer that highlights INI files. INI files consist of
-sections, comments and key = value pairs:
+So far, the `action` element in the rule tuple of regex, action and state has
+been a single token type. Now we look at the first of several other possible
+values.
-.. sourcecode:: python
+Here is a more complex lexer that highlights INI files. INI files consist of
+sections, comments and ``key = value`` pairs::
from pygments.lexer import RegexLexer, bygroups
from pygments.token import *
@@ -172,43 +180,41 @@ sections, comments and key = value pairs:
]
}
-The lexer first looks for whitespace, comments and section names. And later it
+The lexer first looks for whitespace, comments and section names. Later it
looks for a line that looks like a key, value pair, separated by an ``'='``
sign, and optional whitespace.
-The `bygroups` helper makes sure that each group is yielded with a different
-token type. First the `Name.Attribute` token, then a `Text` token for the
+The `bygroups` helper yields each capturing group in the regex with a different
+token type. First the `Name.Attribute` token, then a `Text` token for the
optional whitespace, after that a `Operator` token for the equals sign. Then a
-`Text` token for the whitespace again. The rest of the line is returned as
+`Text` token for the whitespace again. The rest of the line is returned as
`String`.
Note that for this to work, every part of the match must be inside a capturing
group (a ``(...)``), and there must not be any nested capturing groups. If you
nevertheless need a group, use a non-capturing group defined using this syntax:
-``r'(?:some|words|here)'`` (note the ``?:`` after the beginning parenthesis).
+``(?:some|words|here)`` (note the ``?:`` after the beginning parenthesis).
-If you find yourself needing a capturing group inside the regex which
-shouldn't be part of the output but is used in the regular expressions for
-backreferencing (eg: ``r'(<(foo|bar)>)(.*?)(</\2>)'``), you can pass `None`
-to the bygroups function and it will skip that group will be skipped in the
-output.
+If you find yourself needing a capturing group inside the regex which shouldn't
+be part of the output but is used in the regular expressions for backreferencing
+(eg: ``r'(<(foo|bar)>)(.*?)(</\2>)'``), you can pass `None` to the bygroups
+function and that group will be skipped in the output.
Changing states
===============
-Many lexers need multiple states to work as expected. For example, some
-languages allow multiline comments to be nested. Since this is a recursive
+Many lexers need multiple states to work as expected. For example, some
+languages allow multiline comments to be nested. Since this is a recursive
pattern it's impossible to lex just using regular expressions.
-Here is the solution:
-
-.. sourcecode:: python
+Here is a lexer that recognizes C++ style comments (multi-line with ``/* */``
+and single-line with ``//`` until end of line)::
from pygments.lexer import RegexLexer
from pygments.token import *
- class ExampleLexer(RegexLexer):
+ class CppCommentLexer(RegexLexer):
name = 'Example Lexer with states'
tokens = {
@@ -227,28 +233,29 @@ Here is the solution:
}
This lexer starts lexing in the ``'root'`` state. It tries to match as much as
-possible until it finds a slash (``'/'``). If the next character after the slash
-is a star (``'*'``) the `RegexLexer` sends those two characters to the output
-stream marked as `Comment.Multiline` and continues parsing with the rules
+possible until it finds a slash (``'/'``). If the next character after the slash
+is an asterisk (``'*'``) the `RegexLexer` sends those two characters to the
+output stream marked as `Comment.Multiline` and continues lexing with the rules
defined in the ``'comment'`` state.
-If there wasn't a star after the slash, the `RegexLexer` checks if it's a
-singleline comment (eg: followed by a second slash). If this also wasn't the
-case it must be a single slash (the separate regex for a single slash must also
-be given, else the slash would be marked as an error token).
+If there wasn't an asterisk after the slash, the `RegexLexer` checks if it's a
+singleline comment (i.e. followed by a second slash). If this also wasn't the
+case it must be a single slash, which is not a comment starter (the separate
+regex for a single slash must also be given, else the slash would be marked as
+an error token).
-Inside the ``'comment'`` state, we do the same thing again. Scan until the lexer
-finds a star or slash. If it's the opening of a multiline comment, push the
-``'comment'`` state on the stack and continue scanning, again in the
-``'comment'`` state. Else, check if it's the end of the multiline comment. If
+Inside the ``'comment'`` state, we do the same thing again. Scan until the
+lexer finds a star or slash. If it's the opening of a multiline comment, push
+the ``'comment'`` state on the stack and continue scanning, again in the
+``'comment'`` state. Else, check if it's the end of the multiline comment. If
yes, pop one state from the stack.
-Note: If you pop from an empty stack you'll get an `IndexError`. (There is an
+Note: If you pop from an empty stack you'll get an `IndexError`. (There is an
easy way to prevent this from happening: don't ``'#pop'`` in the root state).
If the `RegexLexer` encounters a newline that is flagged as an error token, the
-stack is emptied and the lexer continues scanning in the ``'root'`` state. This
-helps producing error-tolerant highlighting for erroneous input, e.g. when a
+stack is emptied and the lexer continues scanning in the ``'root'`` state. This
+can help producing error-tolerant highlighting for erroneous input, e.g. when a
single-line string is not closed.
@@ -258,14 +265,14 @@ Advanced state tricks
There are a few more things you can do with states:
- You can push multiple states onto the stack if you give a tuple instead of a
- simple string as the third item in a rule tuple. For example, if you want to
- match a comment containing a directive, something like::
+ simple string as the third item in a rule tuple. For example, if you want to
+ match a comment containing a directive, something like:
- /* <processing directive> rest of comment */
+ .. code-block:: text
- you can use this rule:
+ /* <processing directive> rest of comment */
- .. sourcecode:: python
+ you can use this rule::
tokens = {
'root': [
@@ -286,7 +293,7 @@ There are a few more things you can do with states:
When this encounters the above sample, first ``'comment'`` and ``'directive'``
are pushed onto the stack, then the lexer continues in the directive state
until it finds the closing ``>``, then it continues in the comment state until
- the closing ``*/``. Then, both states are popped from the stack again and
+ the closing ``*/``. Then, both states are popped from the stack again and
lexing continues in the root state.
.. versionadded:: 0.9
@@ -295,9 +302,7 @@ There are a few more things you can do with states:
- You can include the rules of a state in the definition of another. This is
- done by using `include` from `pygments.lexer`:
-
- .. sourcecode:: python
+ done by using `include` from `pygments.lexer`::
from pygments.lexer import RegexLexer, bygroups, include
from pygments.token import *
@@ -323,15 +328,14 @@ There are a few more things you can do with states:
}
This is a hypothetical lexer for a language that consist of functions and
- comments. Because comments can occur at toplevel and in functions, we need
- rules for comments in both states. As you can see, the `include` helper saves
+ comments. Because comments can occur at toplevel and in functions, we need
+ rules for comments in both states. As you can see, the `include` helper saves
repeating rules that occur more than once (in this example, the state
``'comment'`` will never be entered by the lexer, as it's only there to be
included in ``'root'`` and ``'function'``).
-
- Sometimes, you may want to "combine" a state from existing ones. This is
- possible with the `combine` helper from `pygments.lexer`.
+ possible with the `combined` helper from `pygments.lexer`.
If you, instead of a new state, write ``combined('state1', 'state2')`` as the
third item of a rule tuple, a new anonymous state will be formed from state1
@@ -340,14 +344,12 @@ There are a few more things you can do with states:
This is not used very often, but can be helpful in some cases, such as the
`PythonLexer`'s string literal processing.
-- If you want your lexer to start lexing in a different state you can modify
- the stack by overloading the `get_tokens_unprocessed()` method:
-
- .. sourcecode:: python
+- If you want your lexer to start lexing in a different state you can modify the
+ stack by overloading the `get_tokens_unprocessed()` method::
from pygments.lexer import RegexLexer
- class MyLexer(RegexLexer):
+ class ExampleLexer(RegexLexer):
tokens = {...}
def get_tokens_unprocessed(self, text):
@@ -356,29 +358,88 @@ There are a few more things you can do with states:
yield item
Some lexers like the `PhpLexer` use this to make the leading ``<?php``
- preprocessor comments optional. Note that you can crash the lexer easily
- by putting values into the stack that don't exist in the token map. Also
+ preprocessor comments optional. Note that you can crash the lexer easily by
+ putting values into the stack that don't exist in the token map. Also
removing ``'root'`` from the stack can result in strange errors!
-- An empty regex at the end of a state list, combined with ``'#pop'``, can
- act as a return point from a state that doesn't have a clear end marker.
+- In some lexers, a state should be popped if anything is encountered that isn't
+ matched by a rule in the state. You could use an empty regex at the end of
+ the state list, but Pygments provides a more obvious way of spelling that:
+ ``default('#pop')`` is equivalent to ``('', Text, '#pop')``.
+
+ .. versionadded:: 2.0
+
+
+Subclassing lexers derived from RegexLexer
+==========================================
+
+.. versionadded:: 1.6
+
+Sometimes multiple languages are very similar, but should still be lexed by
+different lexer classes.
+
+When subclassing a lexer derived from RegexLexer, the ``tokens`` dictionaries
+defined in the parent and child class are merged. For example::
+
+ from pygments.lexer import RegexLexer, inherit
+ from pygments.token import *
+
+ class BaseLexer(RegexLexer):
+ tokens = {
+ 'root': [
+ ('[a-z]+', Name),
+ (r'/\*', Comment, 'comment'),
+ ('"', String, 'string'),
+ ('\s+', Text),
+ ],
+ 'string': [
+ ('[^"]+', String),
+ ('"', String, '#pop'),
+ ],
+ 'comment': [
+ ...
+ ],
+ }
+
+ class DerivedLexer(BaseLexer):
+ tokens = {
+ 'root': [
+ ('[0-9]+', Number),
+ inherit,
+ ],
+ 'string': [
+ (r'[^"\\]+', String),
+ (r'\\.', String.Escape),
+ ('"', String, '#pop'),
+ ],
+ }
+
+The `BaseLexer` defines two states, lexing names and strings. The
+`DerivedLexer` defines its own tokens dictionary, which extends the definitions
+of the base lexer:
+
+* The "root" state has an additional rule and then the special object `inherit`,
+ which tells Pygments to insert the token definitions of the parent class at
+ that point.
+
+* The "string" state is replaced entirely, since there is not `inherit` rule.
+
+* The "comment" state is inherited entirely.
Using multiple lexers
=====================
-Using multiple lexers for the same input can be tricky. One of the easiest
-combination techniques is shown here: You can replace the token type entry in a
-rule tuple (the second item) with a lexer class. The matched text will then be
-lexed with that lexer, and the resulting tokens will be yielded.
+Using multiple lexers for the same input can be tricky. One of the easiest
+combination techniques is shown here: You can replace the action entry in a rule
+tuple with a lexer class. The matched text will then be lexed with that lexer,
+and the resulting tokens will be yielded.
-For example, look at this stripped-down HTML lexer:
-
-.. sourcecode:: python
+For example, look at this stripped-down HTML lexer::
from pygments.lexer import RegexLexer, bygroups, using
from pygments.token import *
- from pygments.lexers.web import JavascriptLexer
+ from pygments.lexers.javascript import JavascriptLexer
class HtmlLexer(RegexLexer):
name = 'HTML'
@@ -402,26 +463,29 @@ For example, look at this stripped-down HTML lexer:
}
Here the content of a ``<script>`` tag is passed to a newly created instance of
-a `JavascriptLexer` and not processed by the `HtmlLexer`. This is done using the
-`using` helper that takes the other lexer class as its parameter.
-
-Note the combination of `bygroups` and `using`. This makes sure that the content
-up to the ``</script>`` end tag is processed by the `JavascriptLexer`, while the
-end tag is yielded as a normal token with the `Name.Tag` type.
+a `JavascriptLexer` and not processed by the `HtmlLexer`. This is done using
+the `using` helper that takes the other lexer class as its parameter.
-As an additional goodie, if the lexer class is replaced by `this` (imported from
-`pygments.lexer`), the "other" lexer will be the current one (because you cannot
-refer to the current class within the code that runs at class definition time).
+Note the combination of `bygroups` and `using`. This makes sure that the
+content up to the ``</script>`` end tag is processed by the `JavascriptLexer`,
+while the end tag is yielded as a normal token with the `Name.Tag` type.
Also note the ``(r'<\s*script\s*', Name.Tag, ('script-content', 'tag'))`` rule.
Here, two states are pushed onto the state stack, ``'script-content'`` and
-``'tag'``. That means that first ``'tag'`` is processed, which will parse
+``'tag'``. That means that first ``'tag'`` is processed, which will lex
attributes and the closing ``>``, then the ``'tag'`` state is popped and the
next state on top of the stack will be ``'script-content'``.
+Since you cannot refer to the class currently being defined, use `this`
+(imported from `pygments.lexer`) to refer to the current lexer class, i.e.
+``using(this)``. This construct may seem unnecessary, but this is often the
+most obvious way of lexing arbitrary syntax between fixed delimiters without
+introducing deeply nested states.
+
The `using()` helper has a special keyword argument, `state`, which works as
follows: if given, the lexer to use initially is not in the ``"root"`` state,
-but in the state given by this argument. This *only* works with a `RegexLexer`.
+but in the state given by this argument. This does not work with advanced
+`RegexLexer` subclasses such as `ExtendedRegexLexer` (see below).
Any other keywords arguments passed to `using()` are added to the keyword
arguments used to create the lexer.
@@ -430,17 +494,15 @@ arguments used to create the lexer.
Delegating Lexer
================
-Another approach for nested lexers is the `DelegatingLexer` which is for
-example used for the template engine lexers. It takes two lexers as
-arguments on initialisation: a `root_lexer` and a `language_lexer`.
+Another approach for nested lexers is the `DelegatingLexer` which is for example
+used for the template engine lexers. It takes two lexers as arguments on
+initialisation: a `root_lexer` and a `language_lexer`.
The input is processed as follows: First, the whole text is lexed with the
-`language_lexer`. All tokens yielded with a type of ``Other`` are then
-concatenated and given to the `root_lexer`. The language tokens of the
-`language_lexer` are then inserted into the `root_lexer`'s token stream
-at the appropriate positions.
-
-.. sourcecode:: python
+`language_lexer`. All tokens yielded with the special type of ``Other`` are
+then concatenated and given to the `root_lexer`. The language tokens of the
+`language_lexer` are then inserted into the `root_lexer`'s token stream at the
+appropriate positions. ::
from pygments.lexer import DelegatingLexer
from pygments.lexers.web import HtmlLexer, PhpLexer
@@ -452,10 +514,8 @@ at the appropriate positions.
This procedure ensures that e.g. HTML with template tags in it is highlighted
correctly even if the template tags are put into HTML tags or attributes.
-If you want to change the needle token ``Other`` to something else, you can
-give the lexer another token type as the third parameter:
-
-.. sourcecode:: python
+If you want to change the needle token ``Other`` to something else, you can give
+the lexer another token type as the third parameter::
DelegatingLexer.__init__(MyLexer, OtherLexer, Text, **options)
@@ -464,24 +524,22 @@ Callbacks
=========
Sometimes the grammar of a language is so complex that a lexer would be unable
-to parse it just by using regular expressions and stacks.
+to process it just by using regular expressions and stacks.
For this, the `RegexLexer` allows callbacks to be given in rule tuples, instead
of token types (`bygroups` and `using` are nothing else but preimplemented
-callbacks). The callback must be a function taking two arguments:
+callbacks). The callback must be a function taking two arguments:
* the lexer itself
* the match object for the last matched rule
The callback must then return an iterable of (or simply yield) ``(index,
tokentype, value)`` tuples, which are then just passed through by
-`get_tokens_unprocessed()`. The ``index`` here is the position of the token in
+`get_tokens_unprocessed()`. The ``index`` here is the position of the token in
the input string, ``tokentype`` is the normal token type (like `Name.Builtin`),
and ``value`` the associated part of the input string.
-You can see an example here:
-
-.. sourcecode:: python
+You can see an example here::
from pygments.lexer import RegexLexer
from pygments.token import Generic
@@ -499,26 +557,25 @@ You can see an example here:
]
}
-If the regex for the `headline_callback` matches, the function is called with the
-match object. Note that after the callback is done, processing continues
-normally, that is, after the end of the previous match. The callback has no
+If the regex for the `headline_callback` matches, the function is called with
+the match object. Note that after the callback is done, processing continues
+normally, that is, after the end of the previous match. The callback has no
possibility to influence the position.
There are not really any simple examples for lexer callbacks, but you can see
-them in action e.g. in the `compiled.py`_ source code in the `CLexer` and
-`JavaLexer` classes.
+them in action e.g. in the `SMLLexer` class in `ml.py`_.
-.. _compiled.py: http://bitbucket.org/birkenfeld/pygments-main/src/tip/pygments/lexers/compiled.py
+.. _ml.py: http://bitbucket.org/birkenfeld/pygments-main/src/tip/pygments/lexers/ml.py
The ExtendedRegexLexer class
============================
The `RegexLexer`, even with callbacks, unfortunately isn't powerful enough for
-the funky syntax rules of some languages that will go unnamed, such as Ruby.
+the funky syntax rules of languages such as Ruby.
But fear not; even then you don't have to abandon the regular expression
-approach. For Pygments has a subclass of `RegexLexer`, the `ExtendedRegexLexer`.
+approach: Pygments has a subclass of `RegexLexer`, the `ExtendedRegexLexer`.
All features known from RegexLexers are available here too, and the tokens are
specified in exactly the same way, *except* for one detail:
@@ -542,9 +599,7 @@ creating a new one for the string argument.
Note that because you can set the current position to anything in the callback,
it won't be automatically be set by the caller after the callback is finished.
For example, this is how the hypothetical lexer above would be written with the
-`ExtendedRegexLexer`:
-
-.. sourcecode:: python
+`ExtendedRegexLexer`::
from pygments.lexer import ExtendedRegexLexer
from pygments.token import Generic
@@ -564,31 +619,58 @@ For example, this is how the hypothetical lexer above would be written with the
}
This might sound confusing (and it can really be). But it is needed, and for an
-example look at the Ruby lexer in `agile.py`_.
+example look at the Ruby lexer in `ruby.py`_.
-.. _agile.py: https://bitbucket.org/birkenfeld/pygments-main/src/tip/pygments/lexers/agile.py
+.. _ruby.py: https://bitbucket.org/birkenfeld/pygments-main/src/tip/pygments/lexers/ruby.py
-Filtering Token Streams
+Handling Lists of Keywords
+==========================
+
+For a relatively short list (hundreds) you can construct an optimized regular
+expression directly using ``words()`` (longer lists, see next section). This
+function handles a few things for you automatically, including escaping
+metacharacters and Python's first-match rather than longest-match in
+alternations. Feel free to put the lists themselves in
+``pygments/lexers/_$lang_builtins.py`` (see examples there), and generated by
+code if possible.
+
+An example of using ``words()`` is something like::
+
+ from pygments.lexer import RegexLexer, words, Name
+
+ class MyLexer(RegexLexer):
+
+ tokens = {
+ 'root': [
+ (words(('else', 'elseif'), suffix=r'\b'), Name.Builtin),
+ (r'\w+', Name),
+ ],
+ }
+
+As you can see, you can add ``prefix`` and ``suffix`` parts to the constructed
+regex.
+
+
+Modifying Token Streams
=======================
-Some languages ship a lot of builtin functions (for example PHP). The total
+Some languages ship a lot of builtin functions (for example PHP). The total
amount of those functions differs from system to system because not everybody
-has every extension installed. In the case of PHP there are over 3000 builtin
-functions. That's an incredible huge amount of functions, much more than you
-can put into a regular expression.
-
-But because only `Name` tokens can be function names it's solvable by overriding
-the ``get_tokens_unprocessed()`` method. The following lexer subclasses the
-`PythonLexer` so that it highlights some additional names as pseudo keywords:
+has every extension installed. In the case of PHP there are over 3000 builtin
+functions. That's an incredibly huge amount of functions, much more than you
+want to put into a regular expression.
-.. sourcecode:: python
+But because only `Name` tokens can be function names this is solvable by
+overriding the ``get_tokens_unprocessed()`` method. The following lexer
+subclasses the `PythonLexer` so that it highlights some additional names as
+pseudo keywords::
- from pygments.lexers.agile import PythonLexer
+ from pygments.lexers.python import PythonLexer
from pygments.token import Name, Keyword
class MyPythonLexer(PythonLexer):
- EXTRA_KEYWORDS = ['foo', 'bar', 'foobar', 'barfoo', 'spam', 'eggs']
+ EXTRA_KEYWORDS = set(('foo', 'bar', 'foobar', 'barfoo', 'spam', 'eggs'))
def get_tokens_unprocessed(self, text):
for index, token, value in PythonLexer.get_tokens_unprocessed(self, text):
@@ -598,5 +680,3 @@ the ``get_tokens_unprocessed()`` method. The following lexer subclasses the
yield index, token, value
The `PhpLexer` and `LuaLexer` use this method to resolve builtin functions.
-
-.. note:: Do not confuse this with the :doc:`filter <filters>` system.
diff --git a/doc/docs/styles.rst b/doc/docs/styles.rst
index 7ef4de1b..d56db0db 100644
--- a/doc/docs/styles.rst
+++ b/doc/docs/styles.rst
@@ -21,6 +21,7 @@ option in form of a string:
.. sourcecode:: pycon
>>> from pygments.styles import get_style_by_name
+ >>> from pygments.formatters import HtmlFormatter
>>> HtmlFormatter(style='colorful').style
<class 'pygments.styles.colorful.ColorfulStyle'>
@@ -30,6 +31,7 @@ Or you can also import your own style (which must be a subclass of
.. sourcecode:: pycon
>>> from yourapp.yourmodule import YourStyle
+ >>> from pygments.formatters import HtmlFormatter
>>> HtmlFormatter(style=YourStyle).style
<class 'yourapp.yourmodule.YourStyle'>
diff --git a/doc/faq.rst b/doc/faq.rst
index 0f65b9fe..f040e053 100644
--- a/doc/faq.rst
+++ b/doc/faq.rst
@@ -58,7 +58,7 @@ Usage as a library is thoroughly demonstrated in the Documentation section.
How do I make a new style?
--------------------------
-Please see the documentation on styles.
+Please see the :doc:`documentation on styles <docs/styles>`.
How can I report a bug or suggest a feature?
--------------------------------------------
diff --git a/doc/index.rst b/doc/index.rst
index a0e41210..26114045 100644
--- a/doc/index.rst
+++ b/doc/index.rst
@@ -1,20 +1,21 @@
Welcome!
========
-This is the home of Pygments. It is a generic syntax highlighter for general use
-in all kinds of software such as forum systems, wikis or other applications that
-need to prettify source code. Highlights are:
+This is the home of Pygments. It is a generic syntax highlighter suitable for
+use in code hosting, forums, wikis or other applications that need to prettify
+source code. Highlights are:
-* a wide range of common languages and markup formats is supported
+* a wide range of over 300 languages and other text formats is supported
* special attention is paid to details that increase highlighting quality
-* support for new languages and formats are added easily; most languages use a simple regex-based lexing mechanism
-* a number of output formats is available, among them HTML, RTF, LaTeX and ANSI sequences
+* support for new languages and formats are added easily; most languages use a
+ simple regex-based lexing mechanism
+* a number of output formats is available, among them HTML, RTF, LaTeX and ANSI
+ sequences
* it is usable as a command-line tool and as a library
-* ... and it highlights even Brainf*ck!
+* ... and it highlights even Perl 6!
-Read more in the FAQ list or the documentation, or download the latest release.
-
-Though Pygments has not yet won an award, we trust that you will notice it's a top quality product <wink>.
+Read more in the :doc:`FAQ list <faq>` or the :doc:`documentation <docs/index>`,
+or `download the latest release <http://pypi.python.org/pypi/Pygments>`_.
.. _contribute:
@@ -45,7 +46,7 @@ Pygments is maintained by **Georg Brandl**, e-mail address *georg*\ *@*\ *python
Many lexers and fixes have been contributed by **Armin Ronacher**, the rest of
the `Pocoo <http://dev.pocoo.org/>`_ team and **Tim Hatch**.
-
+
.. toctree::
:maxdepth: 1
:hidden:
diff --git a/pygmentize b/pygmentize
index 8b3b2067..aea38727 100755
--- a/pygmentize
+++ b/pygmentize
@@ -1,6 +1,7 @@
#!/usr/bin/env python2
-import sys, pygments.cmdline
+import sys
+import pygments.cmdline
try:
sys.exit(pygments.cmdline.main(sys.argv))
except KeyboardInterrupt:
diff --git a/pygments/cmdline.py b/pygments/cmdline.py
index 7c23ebee..907c51f0 100644
--- a/pygments/cmdline.py
+++ b/pygments/cmdline.py
@@ -16,20 +16,21 @@ import getopt
from textwrap import dedent
from pygments import __version__, highlight
-from pygments.util import ClassNotFound, OptionError, docstring_headline
-from pygments.lexers import get_all_lexers, get_lexer_by_name, get_lexer_for_filename, \
- find_lexer_class, guess_lexer, TextLexer
+from pygments.util import ClassNotFound, OptionError, docstring_headline, \
+ guess_decode, guess_decode_from_terminal, terminal_encoding
+from pygments.lexers import get_all_lexers, get_lexer_by_name, guess_lexer, \
+ get_lexer_for_filename, find_lexer_class, TextLexer
from pygments.formatters.latex import LatexEmbeddedLexer, LatexFormatter
from pygments.formatters import get_all_formatters, get_formatter_by_name, \
- get_formatter_for_filename, find_formatter_class, \
- TerminalFormatter # pylint:disable-msg=E0611
+ get_formatter_for_filename, find_formatter_class, \
+ TerminalFormatter # pylint:disable-msg=E0611
from pygments.filters import get_all_filters, find_filter_class
from pygments.styles import get_all_styles, get_style_by_name
USAGE = """\
Usage: %s [-l <lexer> | -g] [-F <filter>[:<options>]] [-f <formatter>]
- [-O <options>] [-P <option=value>] [-o <outfile>] [<infile>]
+ [-O <options>] [-P <option=value>] [-s] [-o <outfile>] [<infile>]
%s -S <style> -f <formatter> [-a <arg>] [-O <options>] [-P <option=value>]
%s -L [<which> ...]
@@ -41,6 +42,10 @@ Highlight the input file and write the result to <outfile>.
If no input file is given, use stdin, if -o is not given, use stdout.
+If -s is passed, lexing will be done in "streaming" mode, reading and
+highlighting one line at a time. This will only work properly with
+lexers that have no constructs spanning multiple lines!
+
<lexer> is a lexer name (query all lexer names with -L). If -l is not
given, the lexer is guessed from the extension of the input file name
(this obviously doesn't work if the input is stdin). If -g is passed,
@@ -80,6 +85,11 @@ If no specific lexer can be determined "text" is returned.
The -H option prints detailed help for the object <name> of type <type>,
where <type> is one of "lexer", "formatter" or "filter".
+The -s option processes lines one at a time until EOF, rather than
+waiting to process the entire file. This only works for stdin, and
+is intended for streaming input such as you get from 'tail -f'.
+Example usage: "tail -f sql.log | pygmentize -s -l sql"
+
The -h option prints this help.
The -V option prints the package version.
"""
@@ -205,7 +215,7 @@ def main(args=sys.argv):
pass
try:
- popts, args = getopt.getopt(args[1:], "l:f:F:o:O:P:LS:a:N:hVHg")
+ popts, args = getopt.getopt(args[1:], "l:f:F:o:O:P:LS:a:N:hVHgs")
except getopt.GetoptError:
print(usage, file=sys.stderr)
return 2
@@ -222,10 +232,6 @@ def main(args=sys.argv):
F_opts.append(arg)
opts[opt] = arg
- if not opts and not args:
- print(usage)
- return 0
-
if opts.pop('-h', None) is not None:
print(usage)
return 0
@@ -278,6 +284,10 @@ def main(args=sys.argv):
parsed_opts[name] = value
opts.pop('-P', None)
+ # encodings
+ inencoding = parsed_opts.get('inencoding', parsed_opts.get('encoding'))
+ outencoding = parsed_opts.get('outencoding', parsed_opts.get('encoding'))
+
# handle ``pygmentize -N``
infn = opts.pop('-N', None)
if infn is not None:
@@ -353,7 +363,11 @@ def main(args=sys.argv):
else:
if not fmter:
fmter = TerminalFormatter(**parsed_opts)
- outfile = sys.stdout
+ if sys.version_info > (3,):
+ # Python 3: we have to use .buffer to get a binary stream
+ outfile = sys.stdout.buffer
+ else:
+ outfile = sys.stdout
# select lexer
lexer = opts.pop('-l', None)
@@ -364,18 +378,28 @@ def main(args=sys.argv):
print('Error:', err, file=sys.stderr)
return 1
+ # read input code
if args:
if len(args) > 1:
print(usage, file=sys.stderr)
return 2
+ if '-s' in opts:
+ print('Error: -s option not usable when input file specified',
+ file=sys.stderr)
+ return 1
+
infn = args[0]
try:
- code = open(infn, 'rb').read()
+ with open(infn, 'rb') as infp:
+ code = infp.read()
except Exception as err:
print('Error: cannot read infile:', err, file=sys.stderr)
return 1
+ if not inencoding:
+ code, inencoding = guess_decode(code)
+ # do we have to guess the lexer?
if not lexer:
try:
lexer = get_lexer_for_filename(infn, code, **parsed_opts)
@@ -392,19 +416,22 @@ def main(args=sys.argv):
print('Error:', err, file=sys.stderr)
return 1
- else:
- if '-g' in opts:
+ elif '-s' not in opts: # treat stdin as full file (-s support is later)
+ # read code from terminal, always in binary mode since we want to
+ # decode ourselves and be tolerant with it
+ if sys.version_info > (3,):
+ # Python 3: we have to use .buffer to get a binary stream
+ code = sys.stdin.buffer.read()
+ else:
code = sys.stdin.read()
+ if not inencoding:
+ code, inencoding = guess_decode_from_terminal(code, sys.stdin)
+ # else the lexer will do the decoding
+ if not lexer:
try:
lexer = guess_lexer(code, **parsed_opts)
except ClassNotFound:
lexer = TextLexer(**parsed_opts)
- elif not lexer:
- print('Error: no lexer name given and reading ' + \
- 'from stdin (try using -g or -l <lexer>)', file=sys.stderr)
- return 2
- else:
- code = sys.stdin.read()
# When using the LaTeX formatter and the option `escapeinside` is
# specified, we need a special lexer which collects escaped text
@@ -415,30 +442,47 @@ def main(args=sys.argv):
right = escapeinside[1]
lexer = LatexEmbeddedLexer(left, right, lexer)
- # No encoding given? Use latin1 if output file given,
- # stdin/stdout encoding otherwise.
- # (This is a compromise, I'm not too happy with it...)
- if 'encoding' not in parsed_opts and 'outencoding' not in parsed_opts:
+ # determine output encoding if not explicitly selected
+ if not outencoding:
if outfn:
- # encoding pass-through
- fmter.encoding = 'latin1'
+ # output file? -> encoding pass-through
+ fmter.encoding = inencoding
else:
- if sys.version_info < (3,):
- # use terminal encoding; Python 3's terminals already do that
- lexer.encoding = getattr(sys.stdin, 'encoding',
- None) or 'ascii'
- fmter.encoding = getattr(sys.stdout, 'encoding',
- None) or 'ascii'
- elif not outfn and sys.version_info > (3,):
- # output to terminal with encoding -> use .buffer
- outfile = sys.stdout.buffer
+ # else use terminal encoding
+ fmter.encoding = terminal_encoding(sys.stdout)
# ... and do it!
try:
# process filters
for fname, fopts in F_opts:
lexer.add_filter(fname, **fopts)
- highlight(code, lexer, fmter, outfile)
+
+ if '-s' not in opts:
+ # process whole input as per normal...
+ highlight(code, lexer, fmter, outfile)
+ else:
+ if not lexer:
+ print('Error: when using -s a lexer has to be selected with -l',
+ file=sys.stderr)
+ return 1
+ # line by line processing of stdin (eg: for 'tail -f')...
+ try:
+ while 1:
+ if sys.version_info > (3,):
+ # Python 3: we have to use .buffer to get a binary stream
+ line = sys.stdin.buffer.readline()
+ else:
+ line = sys.stdin.readline()
+ if not line:
+ break
+ if not inencoding:
+ line = guess_decode_from_terminal(line, sys.stdin)[0]
+ highlight(line, lexer, fmter, outfile)
+ if hasattr(outfile, 'flush'):
+ outfile.flush()
+ except KeyboardInterrupt:
+ return 0
+
except Exception:
import traceback
info = traceback.format_exception(*sys.exc_info())
diff --git a/pygments/filters/__init__.py b/pygments/filters/__init__.py
index 2de661c7..2685c784 100644
--- a/pygments/filters/__init__.py
+++ b/pygments/filters/__init__.py
@@ -21,9 +21,7 @@ from pygments.plugin import find_plugin_filters
def find_filter_class(filtername):
- """
- Lookup a filter by name. Return None if not found.
- """
+ """Lookup a filter by name. Return None if not found."""
if filtername in FILTERS:
return FILTERS[filtername]
for name, cls in find_plugin_filters():
@@ -33,9 +31,10 @@ def find_filter_class(filtername):
def get_filter_by_name(filtername, **options):
- """
- Return an instantiated filter. Options are passed to the filter
- initializer if wanted. Raise a ClassNotFound if not found.
+ """Return an instantiated filter.
+
+ Options are passed to the filter initializer if wanted.
+ Raise a ClassNotFound if not found.
"""
cls = find_filter_class(filtername)
if cls:
@@ -45,9 +44,7 @@ def get_filter_by_name(filtername, **options):
def get_all_filters():
- """
- Return a generator of all filter names.
- """
+ """Return a generator of all filter names."""
for name in FILTERS:
yield name
for name, _ in find_plugin_filters():
@@ -68,8 +65,7 @@ def _replace_special(ttype, value, regex, specialttype,
class CodeTagFilter(Filter):
- """
- Highlight special code tags in comments and docstrings.
+ """Highlight special code tags in comments and docstrings.
Options accepted:
@@ -100,8 +96,7 @@ class CodeTagFilter(Filter):
class KeywordCaseFilter(Filter):
- """
- Convert keywords to lowercase or uppercase or capitalize them, which
+ """Convert keywords to lowercase or uppercase or capitalize them, which
means first letter uppercase, rest lowercase.
This can be useful e.g. if you highlight Pascal code and want to adapt the
@@ -116,7 +111,8 @@ class KeywordCaseFilter(Filter):
def __init__(self, **options):
Filter.__init__(self, **options)
- case = get_choice_opt(options, 'case', ['lower', 'upper', 'capitalize'], 'lower')
+ case = get_choice_opt(options, 'case',
+ ['lower', 'upper', 'capitalize'], 'lower')
self.convert = getattr(text_type, case)
def filter(self, lexer, stream):
@@ -128,8 +124,7 @@ class KeywordCaseFilter(Filter):
class NameHighlightFilter(Filter):
- """
- Highlight a normal Name (and Name.*) token with a different token type.
+ """Highlight a normal Name (and Name.*) token with a different token type.
Example::
@@ -172,9 +167,9 @@ class NameHighlightFilter(Filter):
class ErrorToken(Exception):
pass
+
class RaiseOnErrorTokenFilter(Filter):
- """
- Raise an exception when the lexer generates an error token.
+ """Raise an exception when the lexer generates an error token.
Options accepted:
@@ -203,8 +198,7 @@ class RaiseOnErrorTokenFilter(Filter):
class VisibleWhitespaceFilter(Filter):
- """
- Convert tabs, newlines and/or spaces to visible characters.
+ """Convert tabs, newlines and/or spaces to visible characters.
Options accepted:
@@ -245,16 +239,16 @@ class VisibleWhitespaceFilter(Filter):
setattr(self, name, (opt and default or ''))
tabsize = get_int_opt(options, 'tabsize', 8)
if self.tabs:
- self.tabs += ' '*(tabsize-1)
+ self.tabs += ' ' * (tabsize - 1)
if self.newlines:
self.newlines += '\n'
self.wstt = get_bool_opt(options, 'wstokentype', True)
def filter(self, lexer, stream):
if self.wstt:
- spaces = self.spaces or ' '
- tabs = self.tabs or '\t'
- newlines = self.newlines or '\n'
+ spaces = self.spaces or u' '
+ tabs = self.tabs or u'\t'
+ newlines = self.newlines or u'\n'
regex = re.compile(r'\s')
def replacefunc(wschar):
if wschar == ' ':
@@ -283,8 +277,7 @@ class VisibleWhitespaceFilter(Filter):
class GobbleFilter(Filter):
- """
- Gobbles source code lines (eats initial characters).
+ """Gobbles source code lines (eats initial characters).
This filter drops the first ``n`` characters off every line of code. This
may be useful when the source code fed to the lexer is indented by a fixed
@@ -305,7 +298,7 @@ class GobbleFilter(Filter):
if left < len(value):
return value[left:], 0
else:
- return '', left - len(value)
+ return u'', left - len(value)
def filter(self, lexer, stream):
n = self.n
@@ -316,16 +309,15 @@ class GobbleFilter(Filter):
(parts[0], left) = self.gobble(parts[0], left)
for i in range(1, len(parts)):
(parts[i], left) = self.gobble(parts[i], n)
- value = '\n'.join(parts)
+ value = u'\n'.join(parts)
if value != '':
yield ttype, value
class TokenMergeFilter(Filter):
- """
- Merges consecutive tokens with the same token type in the output stream of a
- lexer.
+ """Merges consecutive tokens with the same token type in the output
+ stream of a lexer.
.. versionadded:: 1.2
"""
diff --git a/pygments/formatter.py b/pygments/formatter.py
index b16ffee8..86821383 100644
--- a/pygments/formatter.py
+++ b/pygments/formatter.py
@@ -68,10 +68,10 @@ class Formatter(object):
self.full = get_bool_opt(options, 'full', False)
self.title = options.get('title', '')
self.encoding = options.get('encoding', None) or None
- if self.encoding == 'guess':
- # can happen for pygmentize -O encoding=guess
+ if self.encoding in ('guess', 'chardet'):
+ # can happen for e.g. pygmentize -O encoding=guess
self.encoding = 'utf-8'
- self.encoding = options.get('outencoding', None) or self.encoding
+ self.encoding = options.get('outencoding') or self.encoding
self.options = options
def get_style_defs(self, arg=''):
diff --git a/pygments/formatters/__init__.py b/pygments/formatters/__init__.py
index f0c5dc41..45ceaff6 100644
--- a/pygments/formatters/__init__.py
+++ b/pygments/formatters/__init__.py
@@ -8,63 +8,111 @@
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import os.path
-import fnmatch
+
import re
+import sys
+import types
+import fnmatch
+from os.path import basename
from pygments.formatters._mapping import FORMATTERS
from pygments.plugin import find_plugin_formatters
-from pygments.util import ClassNotFound
-
-ns = globals()
-for fcls in FORMATTERS:
- ns[fcls.__name__] = fcls
-del fcls
+from pygments.util import ClassNotFound, itervalues
__all__ = ['get_formatter_by_name', 'get_formatter_for_filename',
- 'get_all_formatters'] + [cls.__name__ for cls in FORMATTERS]
+ 'get_all_formatters'] + list(FORMATTERS)
+
+_formatter_cache = {} # classes by name
+_pattern_cache = {}
+
+
+def _fn_matches(fn, glob):
+ """Return whether the supplied file name fn matches pattern filename."""
+ if glob not in _pattern_cache:
+ pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob))
+ return pattern.match(fn)
+ return _pattern_cache[glob].match(fn)
+
+
+def _load_formatters(module_name):
+ """Load a formatter (and all others in the module too)."""
+ mod = __import__(module_name, None, None, ['__all__'])
+ for formatter_name in mod.__all__:
+ cls = getattr(mod, formatter_name)
+ _formatter_cache[cls.name] = cls
+
+def get_all_formatters():
+ """Return a generator for all formatter classes."""
+ # NB: this returns formatter classes, not info like get_all_lexers().
+ for info in itervalues(FORMATTERS):
+ if info[1] not in _formatter_cache:
+ _load_formatters(info[0])
+ yield _formatter_cache[info[1]]
+ for _, formatter in find_plugin_formatters():
+ yield formatter
-_formatter_alias_cache = {}
-_formatter_filename_cache = []
-def _init_formatter_cache():
- if _formatter_alias_cache:
- return
- for cls in get_all_formatters():
- for alias in cls.aliases:
- _formatter_alias_cache[alias] = cls
- for fn in cls.filenames:
- _formatter_filename_cache.append((
- re.compile(fnmatch.translate(fn)), cls))
+def find_formatter_class(alias):
+ """Lookup a formatter by alias.
+ Returns None if not found.
+ """
+ for module_name, name, aliases, _, _ in itervalues(FORMATTERS):
+ if alias in aliases:
+ if name not in _formatter_cache:
+ _load_formatters(module_name)
+ return _formatter_cache[name]
+ for _, cls in find_plugin_formatters():
+ if alias in cls.aliases:
+ return cls
-def find_formatter_class(name):
- _init_formatter_cache()
- cls = _formatter_alias_cache.get(name, None)
- return cls
+def get_formatter_by_name(_alias, **options):
+ """Lookup and instantiate a formatter by alias.
-def get_formatter_by_name(name, **options):
- _init_formatter_cache()
- cls = _formatter_alias_cache.get(name, None)
- if not cls:
- raise ClassNotFound("No formatter found for name %r" % name)
+ Raises ClassNotFound if not found.
+ """
+ cls = find_formatter_class(_alias)
+ if cls is None:
+ raise ClassNotFound("No formatter found for name %r" % _alias)
return cls(**options)
def get_formatter_for_filename(fn, **options):
- _init_formatter_cache()
- fn = os.path.basename(fn)
- for pattern, cls in _formatter_filename_cache:
- if pattern.match(fn):
- return cls(**options)
+ """Lookup and instantiate a formatter by filename pattern.
+
+ Raises ClassNotFound if not found.
+ """
+ fn = basename(fn)
+ for modname, name, _, filenames, _ in itervalues(FORMATTERS):
+ for filename in filenames:
+ if _fn_matches(fn, filename):
+ if name not in _formatter_cache:
+ _load_formatters(modname)
+ return _formatter_cache[name](**options)
+ for cls in find_plugin_formatters():
+ for filename in cls.filenames:
+ if _fn_matches(fn, filename):
+ return cls(**options)
raise ClassNotFound("No formatter found for file name %r" % fn)
-def get_all_formatters():
- """Return a generator for all formatters."""
- for formatter in FORMATTERS:
- yield formatter
- for _, formatter in find_plugin_formatters():
- yield formatter
+class _automodule(types.ModuleType):
+ """Automatically import formatters."""
+
+ def __getattr__(self, name):
+ info = FORMATTERS.get(name)
+ if info:
+ _load_formatters(info[0])
+ cls = _formatter_cache[info[1]]
+ setattr(self, name, cls)
+ return cls
+ raise AttributeError(name)
+
+
+oldmod = sys.modules[__name__]
+newmod = _automodule(__name__)
+newmod.__dict__.update(oldmod.__dict__)
+sys.modules[__name__] = newmod
+del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types
diff --git a/pygments/formatters/_mapping.py b/pygments/formatters/_mapping.py
index 8b3fc977..d4aeaeb0 100755
--- a/pygments/formatters/_mapping.py
+++ b/pygments/formatters/_mapping.py
@@ -14,45 +14,22 @@
"""
from __future__ import print_function
-try:
- import pygments
-except ImportError:
- # This block makes this mapping work like the lexer one -- not requiring
- # that Pygments already be on your PYTHONPATH.
- import os.path, sys
- sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
-
-# start
-from pygments.formatters.bbcode import BBCodeFormatter
-from pygments.formatters.html import HtmlFormatter
-from pygments.formatters.img import BmpImageFormatter
-from pygments.formatters.img import GifImageFormatter
-from pygments.formatters.img import ImageFormatter
-from pygments.formatters.img import JpgImageFormatter
-from pygments.formatters.latex import LatexFormatter
-from pygments.formatters.other import NullFormatter
-from pygments.formatters.other import RawTokenFormatter
-from pygments.formatters.other import TestcaseFormatter
-from pygments.formatters.rtf import RtfFormatter
-from pygments.formatters.svg import SvgFormatter
-from pygments.formatters.terminal import TerminalFormatter
-from pygments.formatters.terminal256 import Terminal256Formatter
FORMATTERS = {
- BBCodeFormatter: ('BBCode', ('bbcode', 'bb'), (), 'Format tokens with BBcodes. These formatting codes are used by many bulletin boards, so you can highlight your sourcecode with pygments before posting it there.'),
- BmpImageFormatter: ('img_bmp', ('bmp', 'bitmap'), ('*.bmp',), 'Create a bitmap image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
- GifImageFormatter: ('img_gif', ('gif',), ('*.gif',), 'Create a GIF image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
- HtmlFormatter: ('HTML', ('html',), ('*.html', '*.htm'), "Format tokens as HTML 4 ``<span>`` tags within a ``<pre>`` tag, wrapped in a ``<div>`` tag. The ``<div>``'s CSS class can be set by the `cssclass` option."),
- ImageFormatter: ('img', ('img', 'IMG', 'png'), ('*.png',), 'Create a PNG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
- JpgImageFormatter: ('img_jpg', ('jpg', 'jpeg'), ('*.jpg',), 'Create a JPEG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
- LatexFormatter: ('LaTeX', ('latex', 'tex'), ('*.tex',), 'Format tokens as LaTeX code. This needs the `fancyvrb` and `color` standard packages.'),
- NullFormatter: ('Text only', ('text', 'null'), ('*.txt',), 'Output the text unchanged without any formatting.'),
- RawTokenFormatter: ('Raw tokens', ('raw', 'tokens'), ('*.raw',), 'Format tokens as a raw representation for storing token streams.'),
- RtfFormatter: ('RTF', ('rtf',), ('*.rtf',), 'Format tokens as RTF markup. This formatter automatically outputs full RTF documents with color information and other useful stuff. Perfect for Copy and Paste into Microsoft\xc2\xae Word\xc2\xae documents.'),
- SvgFormatter: ('SVG', ('svg',), ('*.svg',), 'Format tokens as an SVG graphics file. This formatter is still experimental. Each line of code is a ``<text>`` element with explicit ``x`` and ``y`` coordinates containing ``<tspan>`` elements with the individual token styles.'),
- Terminal256Formatter: ('Terminal256', ('terminal256', 'console256', '256'), (), 'Format tokens with ANSI color sequences, for output in a 256-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'),
- TerminalFormatter: ('Terminal', ('terminal', 'console'), (), 'Format tokens with ANSI color sequences, for output in a text console. Color sequences are terminated at newlines, so that paging the output works correctly.'),
- TestcaseFormatter: ('Testcase', ('testcase',), (), 'Format tokens as appropriate for a new testcase.')
+ 'BBCodeFormatter': ('pygments.formatters.bbcode', 'BBCode', ('bbcode', 'bb'), (), 'Format tokens with BBcodes. These formatting codes are used by many bulletin boards, so you can highlight your sourcecode with pygments before posting it there.'),
+ 'BmpImageFormatter': ('pygments.formatters.img', 'img_bmp', ('bmp', 'bitmap'), ('*.bmp',), 'Create a bitmap image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
+ 'GifImageFormatter': ('pygments.formatters.img', 'img_gif', ('gif',), ('*.gif',), 'Create a GIF image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
+ 'HtmlFormatter': ('pygments.formatters.html', 'HTML', ('html',), ('*.html', '*.htm'), "Format tokens as HTML 4 ``<span>`` tags within a ``<pre>`` tag, wrapped in a ``<div>`` tag. The ``<div>``'s CSS class can be set by the `cssclass` option."),
+ 'ImageFormatter': ('pygments.formatters.img', 'img', ('img', 'IMG', 'png'), ('*.png',), 'Create a PNG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
+ 'JpgImageFormatter': ('pygments.formatters.img', 'img_jpg', ('jpg', 'jpeg'), ('*.jpg',), 'Create a JPEG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'),
+ 'LatexFormatter': ('pygments.formatters.latex', 'LaTeX', ('latex', 'tex'), ('*.tex',), 'Format tokens as LaTeX code. This needs the `fancyvrb` and `color` standard packages.'),
+ 'NullFormatter': ('pygments.formatters.other', 'Text only', ('text', 'null'), ('*.txt',), 'Output the text unchanged without any formatting.'),
+ 'RawTokenFormatter': ('pygments.formatters.other', 'Raw tokens', ('raw', 'tokens'), ('*.raw',), 'Format tokens as a raw representation for storing token streams.'),
+ 'RtfFormatter': ('pygments.formatters.rtf', 'RTF', ('rtf',), ('*.rtf',), 'Format tokens as RTF markup. This formatter automatically outputs full RTF documents with color information and other useful stuff. Perfect for Copy and Paste into Microsoft\xc2\xae Word\xc2\xae documents.'),
+ 'SvgFormatter': ('pygments.formatters.svg', 'SVG', ('svg',), ('*.svg',), 'Format tokens as an SVG graphics file. This formatter is still experimental. Each line of code is a ``<text>`` element with explicit ``x`` and ``y`` coordinates containing ``<tspan>`` elements with the individual token styles.'),
+ 'Terminal256Formatter': ('pygments.formatters.terminal256', 'Terminal256', ('terminal256', 'console256', '256'), (), 'Format tokens with ANSI color sequences, for output in a 256-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'),
+ 'TerminalFormatter': ('pygments.formatters.terminal', 'Terminal', ('terminal', 'console'), (), 'Format tokens with ANSI color sequences, for output in a text console. Color sequences are terminated at newlines, so that paging the output works correctly.'),
+ 'TestcaseFormatter': ('pygments.formatters.other', 'Testcase', ('testcase',), (), 'Format tokens as appropriate for a new testcase.')
}
if __name__ == '__main__':
@@ -65,39 +42,35 @@ if __name__ == '__main__':
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
from pygments.util import docstring_headline
- for filename in os.listdir('.'):
- if filename.endswith('.py') and not filename.startswith('_'):
- module_name = 'pygments.formatters.%s' % filename[:-3]
- print(module_name)
- module = __import__(module_name, None, None, [''])
- for formatter_name in module.__all__:
- imports.append((module_name, formatter_name))
- formatter = getattr(module, formatter_name)
- found_formatters.append(
- '%s: %r' % (formatter_name,
- (formatter.name,
- tuple(formatter.aliases),
- tuple(formatter.filenames),
- docstring_headline(formatter))))
- # sort them, that should make the diff files for svn smaller
+ for root, dirs, files in os.walk('.'):
+ for filename in files:
+ if filename.endswith('.py') and not filename.startswith('_'):
+ module_name = 'pygments.formatters%s.%s' % (
+ root[1:].replace('/', '.'), filename[:-3])
+ print(module_name)
+ module = __import__(module_name, None, None, [''])
+ for formatter_name in module.__all__:
+ formatter = getattr(module, formatter_name)
+ found_formatters.append(
+ '%r: %r' % (formatter_name,
+ (module_name,
+ formatter.name,
+ tuple(formatter.aliases),
+ tuple(formatter.filenames),
+ docstring_headline(formatter))))
+ # sort them to make the diff minimal
found_formatters.sort()
- imports.sort()
# extract useful sourcecode from this file
- f = open(__file__)
- try:
- content = f.read()
- finally:
- f.close()
- header = content[:content.find('# start')]
+ with open(__file__) as fp:
+ content = fp.read()
+ header = content[:content.find('FORMATTERS = {')]
footer = content[content.find("if __name__ == '__main__':"):]
# write new file
- f = open(__file__, 'w')
- f.write(header)
- f.write('# start\n')
- f.write('\n'.join(['from %s import %s' % imp for imp in imports]))
- f.write('\n\n')
- f.write('FORMATTERS = {\n %s\n}\n\n' % ',\n '.join(found_formatters))
- f.write(footer)
- f.close()
+ with open(__file__, 'w') as fp:
+ fp.write(header)
+ fp.write('FORMATTERS = {\n %s\n}\n\n' % ',\n '.join(found_formatters))
+ fp.write(footer)
+
+ print ('=== %d formatters processed.' % len(found_formatters))
diff --git a/pygments/formatters/html.py b/pygments/formatters/html.py
index 970e595b..2c6de4ca 100644
--- a/pygments/formatters/html.py
+++ b/pygments/formatters/html.py
@@ -628,24 +628,24 @@ class HtmlFormatter(Formatter):
style = 'background-color: #ffffc0; padding: 0 5px 0 5px'
else:
style = 'background-color: #f0f0f0; padding: 0 5px 0 5px'
- yield 1, '<span style="%s">%*s</span> ' % (
+ yield 1, '<span style="%s">%*s </span>' % (
style, mw, (num%st and ' ' or num)) + line
num += 1
else:
for t, line in lines:
yield 1, ('<span style="background-color: #f0f0f0; '
- 'padding: 0 5px 0 5px">%*s</span> ' % (
+ 'padding: 0 5px 0 5px">%*s </span>' % (
mw, (num%st and ' ' or num)) + line)
num += 1
elif sp:
for t, line in lines:
- yield 1, '<span class="lineno%s">%*s</span> ' % (
+ yield 1, '<span class="lineno%s">%*s </span>' % (
num%sp == 0 and ' special' or '', mw,
(num%st and ' ' or num)) + line
num += 1
else:
for t, line in lines:
- yield 1, '<span class="lineno">%*s</span> ' % (
+ yield 1, '<span class="lineno">%*s </span>' % (
mw, (num%st and ' ' or num)) + line
num += 1
diff --git a/pygments/formatters/img.py b/pygments/formatters/img.py
index 8e2b5f9e..db5bee3b 100644
--- a/pygments/formatters/img.py
+++ b/pygments/formatters/img.py
@@ -295,6 +295,7 @@ class ImageFormatter(Formatter):
raise PilNotAvailable(
'Python Imaging Library is required for this formatter')
Formatter.__init__(self, **options)
+ self.encoding = 'latin1' # let pygments.format() do the right thing
# Read the style
self.styles = dict(self.style)
if self.style.background_color is None:
@@ -315,20 +316,20 @@ class ImageFormatter(Formatter):
self.line_number_fg = options.get('line_number_fg', '#886')
self.line_number_bg = options.get('line_number_bg', '#eed')
self.line_number_chars = get_int_opt(options,
- 'line_number_chars', 2)
+ 'line_number_chars', 2)
self.line_number_bold = get_bool_opt(options,
- 'line_number_bold', False)
+ 'line_number_bold', False)
self.line_number_italic = get_bool_opt(options,
- 'line_number_italic', False)
+ 'line_number_italic', False)
self.line_number_pad = get_int_opt(options, 'line_number_pad', 6)
self.line_numbers = get_bool_opt(options, 'line_numbers', True)
self.line_number_separator = get_bool_opt(options,
- 'line_number_separator', True)
+ 'line_number_separator', True)
self.line_number_step = get_int_opt(options, 'line_number_step', 1)
self.line_number_start = get_int_opt(options, 'line_number_start', 1)
if self.line_numbers:
self.line_number_width = (self.fontw * self.line_number_chars +
- self.line_number_pad * 2)
+ self.line_number_pad * 2)
else:
self.line_number_width = 0
self.hl_lines = []
@@ -437,7 +438,7 @@ class ImageFormatter(Formatter):
# quite complex.
value = value.expandtabs(4)
lines = value.splitlines(True)
- #print lines
+ # print lines
for i, line in enumerate(lines):
temp = line.rstrip('\n')
if temp:
@@ -478,9 +479,8 @@ class ImageFormatter(Formatter):
draw = ImageDraw.Draw(im)
recth = im.size[-1]
rectw = self.image_pad + self.line_number_width - self.line_number_pad
- draw.rectangle([(0, 0),
- (rectw, recth)],
- fill=self.line_number_bg)
+ draw.rectangle([(0, 0), (rectw, recth)],
+ fill=self.line_number_bg)
draw.line([(rectw, 0), (rectw, recth)], fill=self.line_number_fg)
del draw
diff --git a/pygments/formatters/latex.py b/pygments/formatters/latex.py
index 352684b0..4b22215f 100644
--- a/pygments/formatters/latex.py
+++ b/pygments/formatters/latex.py
@@ -236,6 +236,13 @@ class LatexFormatter(Formatter):
set. (default: ``''``).
.. versionadded:: 2.0
+
+ `envname`
+ Allows you to pick an alternative environment name replacing Verbatim.
+ The alternate environment still has to support Verbatim's option syntax.
+ (default: ``'Verbatim'``).
+
+ .. versionadded:: 2.0
"""
name = 'LaTeX'
aliases = ['latex', 'tex']
@@ -254,16 +261,15 @@ class LatexFormatter(Formatter):
self.texcomments = get_bool_opt(options, 'texcomments', False)
self.mathescape = get_bool_opt(options, 'mathescape', False)
self.escapeinside = options.get('escapeinside', '')
-
if len(self.escapeinside) == 2:
self.left = self.escapeinside[0]
self.right = self.escapeinside[1]
else:
self.escapeinside = ''
+ self.envname = options.get('envname', u'Verbatim')
self._create_stylesheet()
-
def _create_stylesheet(self):
t2n = self.ttype2name = {Token: ''}
c2d = self.cmd2def = {}
@@ -271,7 +277,7 @@ class LatexFormatter(Formatter):
def rgbcolor(col):
if col:
- return ','.join(['%.2f' %(int(col[i] + col[i + 1], 16) / 255.0)
+ return ','.join(['%.2f' % (int(col[i] + col[i + 1], 16) / 255.0)
for i in (0, 2, 4)])
else:
return '1,1,1'
@@ -331,7 +337,7 @@ class LatexFormatter(Formatter):
realoutfile = outfile
outfile = StringIO()
- outfile.write(u'\\begin{Verbatim}[commandchars=\\\\\\{\\}')
+ outfile.write(u'\\begin{' + self.envname + u'}[commandchars=\\\\\\{\\}')
if self.linenos:
start, step = self.linenostart, self.linenostep
outfile.write(u',numbers=left' +
@@ -371,9 +377,9 @@ class LatexFormatter(Formatter):
text = value
value = ''
while len(text) > 0:
- a,sep1,text = text.partition(self.left)
+ a, sep1, text = text.partition(self.left)
if len(sep1) > 0:
- b,sep2,text = text.partition(self.right)
+ b, sep2, text = text.partition(self.right)
if len(sep2) > 0:
value += escape_tex(a, self.commandprefix) + b
else:
@@ -404,7 +410,7 @@ class LatexFormatter(Formatter):
else:
outfile.write(value)
- outfile.write(u'\\end{Verbatim}\n')
+ outfile.write(u'\\end{' + self.envname + u'}\n')
if self.full:
realoutfile.write(DOC_TEMPLATE %
@@ -436,6 +442,7 @@ class LatexEmbeddedLexer(Lexer):
def get_tokens_unprocessed(self, text):
buf = ''
+ idx = 0
for i, t, v in self.lang.get_tokens_unprocessed(text):
if t in Token.Comment or t in Token.String:
if buf:
@@ -467,4 +474,3 @@ class LatexEmbeddedLexer(Lexer):
yield index, Token.Error, sep1
index += len(sep1)
text = b
-
diff --git a/pygments/formatters/other.py b/pygments/formatters/other.py
index c8269b19..d8e5f4f7 100644
--- a/pygments/formatters/other.py
+++ b/pygments/formatters/other.py
@@ -62,9 +62,8 @@ class RawTokenFormatter(Formatter):
def __init__(self, **options):
Formatter.__init__(self, **options)
- if self.encoding:
- raise OptionError('the raw formatter does not support the '
- 'encoding option')
+ # We ignore self.encoding if it is set, since it gets set for lexer
+ # and formatter if given with -Oencoding on the command line.
self.encoding = 'ascii' # let pygments.format() do the right thing
self.compress = get_choice_opt(options, 'compress',
['', 'none', 'gz', 'bz2'], '')
diff --git a/pygments/formatters/rtf.py b/pygments/formatters/rtf.py
index cf65a927..abecd484 100644
--- a/pygments/formatters/rtf.py
+++ b/pygments/formatters/rtf.py
@@ -48,8 +48,6 @@ class RtfFormatter(Formatter):
aliases = ['rtf']
filenames = ['*.rtf']
- unicodeoutput = False
-
def __init__(self, **options):
"""
Additional options accepted:
@@ -67,14 +65,14 @@ class RtfFormatter(Formatter):
self.fontsize = get_int_opt(options, 'fontsize', 0)
def _escape(self, text):
- return text.replace('\\', '\\\\') \
- .replace('{', '\\{') \
- .replace('}', '\\}')
+ return text.replace(u'\\', u'\\\\') \
+ .replace(u'{', u'\\{') \
+ .replace(u'}', u'\\}')
def _escape_text(self, text):
# empty strings, should give a small performance improvment
if not text:
- return ''
+ return u''
# escape text
text = self._escape(text)
@@ -87,22 +85,21 @@ class RtfFormatter(Formatter):
buf.append(str(c))
elif (2**7) <= cn < (2**16):
# single unicode escape sequence
- buf.append(r'{\u%d}' % cn)
+ buf.append(u'{\\u%d}' % cn)
elif (2**16) <= cn:
# RTF limits unicode to 16 bits.
# Force surrogate pairs
- h,l = _surrogatepair(cn)
- buf.append(r'{\u%d}{\u%d}' % (h,l))
+ buf.append(u'{\\u%d}{\\u%d}' % _surrogatepair(cn))
- return ''.join(buf).replace('\n', '\\par\n')
+ return u''.join(buf).replace(u'\n', u'\\par\n')
def format_unencoded(self, tokensource, outfile):
# rtf 1.8 header
- outfile.write(r'{\rtf1\ansi\uc0\deff0'
- r'{\fonttbl{\f0\fmodern\fprq1\fcharset0%s;}}'
- r'{\colortbl;' % (self.fontface and
- ' ' + self._escape(self.fontface) or
- ''))
+ outfile.write(u'{\\rtf1\\ansi\\uc0\\deff0'
+ u'{\\fonttbl{\\f0\\fmodern\\fprq1\\fcharset0%s;}}'
+ u'{\\colortbl;' % (self.fontface and
+ u' ' + self._escape(self.fontface) or
+ u''))
# convert colors and save them in a mapping to access them later.
color_mapping = {}
@@ -111,15 +108,15 @@ class RtfFormatter(Formatter):
for color in style['color'], style['bgcolor'], style['border']:
if color and color not in color_mapping:
color_mapping[color] = offset
- outfile.write(r'\red%d\green%d\blue%d;' % (
+ outfile.write(u'\\red%d\\green%d\\blue%d;' % (
int(color[0:2], 16),
int(color[2:4], 16),
int(color[4:6], 16)
))
offset += 1
- outfile.write(r'}\f0 ')
+ outfile.write(u'}\\f0 ')
if self.fontsize:
- outfile.write(r'\fs%d' % (self.fontsize))
+ outfile.write(u'\\fs%d' % (self.fontsize))
# highlight stream
for ttype, value in tokensource:
@@ -128,23 +125,23 @@ class RtfFormatter(Formatter):
style = self.style.style_for_token(ttype)
buf = []
if style['bgcolor']:
- buf.append(r'\cb%d' % color_mapping[style['bgcolor']])
+ buf.append(u'\\cb%d' % color_mapping[style['bgcolor']])
if style['color']:
- buf.append(r'\cf%d' % color_mapping[style['color']])
+ buf.append(u'\\cf%d' % color_mapping[style['color']])
if style['bold']:
- buf.append(r'\b')
+ buf.append(u'\\b')
if style['italic']:
- buf.append(r'\i')
+ buf.append(u'\\i')
if style['underline']:
- buf.append(r'\ul')
+ buf.append(u'\\ul')
if style['border']:
- buf.append(r'\chbrdr\chcfpat%d' %
+ buf.append(u'\\chbrdr\\chcfpat%d' %
color_mapping[style['border']])
- start = ''.join(buf)
+ start = u''.join(buf)
if start:
- outfile.write('{%s ' % start)
+ outfile.write(u'{%s ' % start)
outfile.write(self._escape_text(value))
if start:
- outfile.write('}')
+ outfile.write(u'}')
- outfile.write('}')
+ outfile.write(u'}')
diff --git a/pygments/formatters/svg.py b/pygments/formatters/svg.py
index 07636943..4e534fa2 100644
--- a/pygments/formatters/svg.py
+++ b/pygments/formatters/svg.py
@@ -78,7 +78,6 @@ class SvgFormatter(Formatter):
filenames = ['*.svg']
def __init__(self, **options):
- # XXX outencoding
Formatter.__init__(self, **options)
self.nowrap = get_bool_opt(options, 'nowrap', False)
self.fontfamily = options.get('fontfamily', 'monospace')
diff --git a/pygments/lexer.py b/pygments/lexer.py
index 0ede7927..5b3ad358 100644
--- a/pygments/lexer.py
+++ b/pygments/lexer.py
@@ -8,17 +8,24 @@
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import re, itertools
+
+from __future__ import print_function
+
+import re
+import sys
+import time
+import itertools
from pygments.filter import apply_filters, Filter
from pygments.filters import get_filter_by_name
from pygments.token import Error, Text, Other, _TokenType
from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
- make_analysator, text_type, add_metaclass, iteritems
-
+ make_analysator, text_type, add_metaclass, iteritems, Future, guess_decode
+from pygments.regexopt import regex_opt
__all__ = ['Lexer', 'RegexLexer', 'ExtendedRegexLexer', 'DelegatingLexer',
- 'LexerContext', 'include', 'inherit', 'bygroups', 'using', 'this', 'default']
+ 'LexerContext', 'include', 'inherit', 'bygroups', 'using', 'this',
+ 'default', 'words']
_encoding_map = [(b'\xef\xbb\xbf', 'utf-8'),
@@ -65,8 +72,11 @@ class Lexer(object):
If given, must be an encoding name. This encoding will be used to
convert the input string to Unicode, if it is not already a Unicode
string (default: ``'latin1'``).
- Can also be ``'guess'`` to use a simple UTF-8 / Latin1 detection, or
- ``'chardet'`` to use the chardet library, if it is installed.
+ Can also be ``'guess'`` to use a simple UTF-8 / Locale / Latin1
+ detection, or ``'chardet'`` to use the chardet library, if it is
+ installed.
+ ``inencoding``
+ Overrides the ``encoding`` if given.
"""
#: Name of the lexer
@@ -94,7 +104,7 @@ class Lexer(object):
self.ensurenl = get_bool_opt(options, 'ensurenl', True)
self.tabsize = get_int_opt(options, 'tabsize', 0)
self.encoding = options.get('encoding', 'latin1')
- # self.encoding = options.get('inencoding', None) or self.encoding
+ self.encoding = options.get('inencoding') or self.encoding
self.filters = []
for filter_ in get_list_opt(options, 'filters', ()):
self.add_filter(filter_)
@@ -139,12 +149,7 @@ class Lexer(object):
"""
if not isinstance(text, text_type):
if self.encoding == 'guess':
- try:
- text = text.decode('utf-8')
- if text.startswith(u'\ufeff'):
- text = text[len(u'\ufeff'):]
- except UnicodeDecodeError:
- text = text.decode('latin1')
+ text, _ = guess_decode(text)
elif self.encoding == 'chardet':
try:
import chardet
@@ -160,7 +165,7 @@ class Lexer(object):
break
# no BOM found, so use chardet
if decoded is None:
- enc = chardet.detect(text[:1024]) # Guess using first 1KB
+ enc = chardet.detect(text[:1024]) # Guess using first 1KB
decoded = text.decode(enc.get('encoding') or 'utf-8',
'replace')
text = decoded
@@ -237,7 +242,7 @@ class DelegatingLexer(Lexer):
self.root_lexer.get_tokens_unprocessed(buffered))
-#-------------------------------------------------------------------------------
+# ------------------------------------------------------------------------------
# RegexLexer and ExtendedRegexLexer
#
@@ -387,26 +392,46 @@ class default:
"""
Indicates a state or state action (e.g. #pop) to apply.
For example default('#pop') is equivalent to ('', Token, '#pop')
- Note that state tuples may be used as well
+ Note that state tuples may be used as well.
+
+ .. versionadded:: 2.0
"""
def __init__(self, state):
self.state = state
+class words(Future):
+ """
+ Indicates a list of literal words that is transformed into an optimized
+ regex that matches any of the words.
+
+ .. versionadded:: 2.0
+ """
+ def __init__(self, words, prefix='', suffix=''):
+ self.words = words
+ self.prefix = prefix
+ self.suffix = suffix
+
+ def get(self):
+ return regex_opt(self.words, prefix=self.prefix, suffix=self.suffix)
+
+
class RegexLexerMeta(LexerMeta):
"""
Metaclass for RegexLexer, creates the self._tokens attribute from
self.tokens on the first instantiation.
"""
- def _process_regex(cls, regex, rflags):
+ def _process_regex(cls, regex, rflags, state):
"""Preprocess the regular expression component of a token definition."""
+ if isinstance(regex, Future):
+ regex = regex.get()
return re.compile(regex, rflags).match
def _process_token(cls, token):
"""Preprocess the token component of a token definition."""
assert type(token) is _TokenType or callable(token), \
- 'token type must be simple type or callable, not %r' % (token,)
+ 'token type must be simple type or callable, not %r' % (token,)
return token
def _process_new_state(cls, new_state, unprocessed, processed):
@@ -439,7 +464,7 @@ class RegexLexerMeta(LexerMeta):
for istate in new_state:
assert (istate in unprocessed or
istate in ('#pop', '#push')), \
- 'unknown new state ' + istate
+ 'unknown new state ' + istate
return new_state
else:
assert False, 'unknown new state def %r' % new_state
@@ -470,7 +495,7 @@ class RegexLexerMeta(LexerMeta):
assert type(tdef) is tuple, "wrong rule def %r" % tdef
try:
- rex = cls._process_regex(tdef[0], rflags)
+ rex = cls._process_regex(tdef[0], rflags, state)
except Exception as err:
raise ValueError("uncompilable regex %r in state %r of %r: %s" %
(tdef[0], state, cls, err))
@@ -645,7 +670,7 @@ class LexerContext(object):
def __init__(self, text, pos, stack=None, end=None):
self.text = text
self.pos = pos
- self.end = end or len(text) # end=0 not supported ;-)
+ self.end = end or len(text) # end=0 not supported ;-)
self.stack = stack or ['root']
def __repr__(self):
@@ -783,3 +808,56 @@ def do_insertions(insertions, tokens):
except StopIteration:
insleft = False
break # not strictly necessary
+
+
+class ProfilingRegexLexerMeta(RegexLexerMeta):
+ """Metaclass for ProfilingRegexLexer, collects regex timing info."""
+
+ def _process_regex(cls, regex, rflags, state):
+ if isinstance(regex, words):
+ rex = regex_opt(regex.words, prefix=regex.prefix,
+ suffix=regex.suffix)
+ else:
+ rex = regex
+ compiled = re.compile(rex, rflags)
+
+ def match_func(text, pos, endpos=sys.maxsize):
+ info = cls._prof_data[-1].setdefault((state, rex), [0, 0.0])
+ t0 = time.time()
+ res = compiled.match(text, pos, endpos)
+ t1 = time.time()
+ info[0] += 1
+ info[1] += t1 - t0
+ return res
+ return match_func
+
+
+@add_metaclass(ProfilingRegexLexerMeta)
+class ProfilingRegexLexer(RegexLexer):
+ """Drop-in replacement for RegexLexer that does profiling of its regexes."""
+
+ _prof_data = []
+ _prof_sort_index = 4 # defaults to time per call
+
+ def get_tokens_unprocessed(self, text, stack=('root',)):
+ # this needs to be a stack, since using(this) will produce nested calls
+ self.__class__._prof_data.append({})
+ for tok in RegexLexer.get_tokens_unprocessed(self, text, stack):
+ yield tok
+ rawdata = self.__class__._prof_data.pop()
+ data = sorted(((s, repr(r).strip('u\'').replace('\\\\', '\\')[:65],
+ n, 1000 * t, 1000 * t / n)
+ for ((s, r), (n, t)) in rawdata.items()),
+ key=lambda x: x[self._prof_sort_index],
+ reverse=True)
+ sum_total = sum(x[3] for x in data)
+
+ print()
+ print('Profiling result for %s lexing %d chars in %.3f ms' %
+ (self.__class__.__name__, len(text), sum_total))
+ print('=' * 110)
+ print('%-20s %-64s ncalls tottime percall' % ('state', 'regex'))
+ print('-' * 110)
+ for d in data:
+ print('%-20s %-65s %5d %8.4f %8.4f' % d)
+ print('=' * 110)
diff --git a/pygments/lexers/__init__.py b/pygments/lexers/__init__.py
index caedd479..578a9101 100644
--- a/pygments/lexers/__init__.py
+++ b/pygments/lexers/__init__.py
@@ -9,10 +9,10 @@
:license: BSD, see LICENSE for details.
"""
+import re
import sys
import types
import fnmatch
-import re
from os.path import basename
from pygments.lexers._mapping import LEXERS
@@ -27,23 +27,17 @@ __all__ = ['get_lexer_by_name', 'get_lexer_for_filename', 'find_lexer_class',
_lexer_cache = {}
_pattern_cache = {}
+
def _fn_matches(fn, glob):
- """
- Return whether the supplied file name fn matches pattern filename
- """
+ """Return whether the supplied file name fn matches pattern filename."""
if glob not in _pattern_cache:
- pattern = re.compile(fnmatch.translate(glob))
- _pattern_cache[glob] = pattern
- else:
- pattern = _pattern_cache[glob]
-
- return pattern.match(fn)
+ pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob))
+ return pattern.match(fn)
+ return _pattern_cache[glob].match(fn)
def _load_lexers(module_name):
- """
- Load a lexer (and all others in the module too).
- """
+ """Load a lexer (and all others in the module too)."""
mod = __import__(module_name, None, None, ['__all__'])
for lexer_name in mod.__all__:
cls = getattr(mod, lexer_name)
@@ -51,8 +45,7 @@ def _load_lexers(module_name):
def get_all_lexers():
- """
- Return a generator of tuples in the form ``(name, aliases,
+ """Return a generator of tuples in the form ``(name, aliases,
filenames, mimetypes)`` of all know lexers.
"""
for item in itervalues(LEXERS):
@@ -62,8 +55,9 @@ def get_all_lexers():
def find_lexer_class(name):
- """
- Lookup a lexer class by name. Return None if not found.
+ """Lookup a lexer class by name.
+
+ Return None if not found.
"""
if name in _lexer_cache:
return _lexer_cache[name]
@@ -79,8 +73,9 @@ def find_lexer_class(name):
def get_lexer_by_name(_alias, **options):
- """
- Get a lexer by an alias.
+ """Get a lexer by an alias.
+
+ Raises ClassNotFound if not found.
"""
if not _alias:
raise ClassNotFound('no lexer for alias %r found' % _alias)
@@ -98,11 +93,13 @@ def get_lexer_by_name(_alias, **options):
raise ClassNotFound('no lexer for alias %r found' % _alias)
-def get_lexer_for_filename(_fn, code=None, **options):
- """
- Get a lexer for a filename. If multiple lexers match the filename
- pattern, use ``analyse_text()`` to figure out which one is more
- appropriate.
+def find_lexer_class_for_filename(_fn, code=None):
+ """Get a lexer for a filename.
+
+ If multiple lexers match the filename pattern, use ``analyse_text()`` to
+ figure out which one is more appropriate.
+
+ Returns None if not found.
"""
matches = []
fn = basename(_fn)
@@ -135,14 +132,28 @@ def get_lexer_for_filename(_fn, code=None, **options):
if matches:
matches.sort(key=get_rating)
- #print "Possible lexers, after sort:", matches
- return matches[-1][0](**options)
- raise ClassNotFound('no lexer for filename %r found' % _fn)
+ # print "Possible lexers, after sort:", matches
+ return matches[-1][0]
-def get_lexer_for_mimetype(_mime, **options):
+def get_lexer_for_filename(_fn, code=None, **options):
+ """Get a lexer for a filename.
+
+ If multiple lexers match the filename pattern, use ``analyse_text()`` to
+ figure out which one is more appropriate.
+
+ Raises ClassNotFound if not found.
"""
- Get a lexer for a mimetype.
+ res = find_lexer_class_for_filename(_fn, code)
+ if not res:
+ raise ClassNotFound('no lexer for filename %r found' % _fn)
+ return res(**options)
+
+
+def get_lexer_for_mimetype(_mime, **options):
+ """Get a lexer for a mimetype.
+
+ Raises ClassNotFound if not found.
"""
for modname, name, _, _, mimetypes in itervalues(LEXERS):
if _mime in mimetypes:
@@ -155,17 +166,16 @@ def get_lexer_for_mimetype(_mime, **options):
raise ClassNotFound('no lexer for mimetype %r found' % _mime)
-def _iter_lexerclasses():
- """
- Return an iterator over all lexer classes.
- """
+def _iter_lexerclasses(plugins=True):
+ """Return an iterator over all lexer classes."""
for key in sorted(LEXERS):
module_name, name = LEXERS[key][:2]
if name not in _lexer_cache:
_load_lexers(module_name)
yield _lexer_cache[name]
- for lexer in find_plugin_lexers():
- yield lexer
+ if plugins:
+ for lexer in find_plugin_lexers():
+ yield lexer
def guess_lexer_for_filename(_fn, _text, **options):
@@ -218,9 +228,7 @@ def guess_lexer_for_filename(_fn, _text, **options):
def guess_lexer(_text, **options):
- """
- Guess a lexer by strong distinctions in the text (eg, shebang).
- """
+ """Guess a lexer by strong distinctions in the text (eg, shebang)."""
# try to get a vim modeline first
ft = get_filetype_from_buffer(_text)
@@ -256,8 +264,8 @@ class _automodule(types.ModuleType):
raise AttributeError(name)
-oldmod = sys.modules['pygments.lexers']
-newmod = _automodule('pygments.lexers')
+oldmod = sys.modules[__name__]
+newmod = _automodule(__name__)
newmod.__dict__.update(oldmod.__dict__)
-sys.modules['pygments.lexers'] = newmod
+sys.modules[__name__] = newmod
del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types
diff --git a/pygments/lexers/_asybuiltins.py b/pygments/lexers/_asy_builtins.py
index 5472cb63..2dcd60d7 100644
--- a/pygments/lexers/_asybuiltins.py
+++ b/pygments/lexers/_asy_builtins.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
"""
- pygments.lexers._asybuiltins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ pygments.lexers._asy_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This file contains the asy-function names and asy-variable names of
Asymptote.
@@ -14,7 +14,7 @@
:license: BSD, see LICENSE for details.
"""
-ASYFUNCNAME = set([
+ASYFUNCNAME = set((
'AND',
'Arc',
'ArcArrow',
@@ -1038,9 +1038,9 @@ ASYFUNCNAME = set([
'ztick',
'ztick3',
'ztrans'
-])
+))
-ASYVARNAME = set([
+ASYVARNAME = set((
'AliceBlue',
'Align',
'Allow',
@@ -1642,4 +1642,4 @@ ASYVARNAME = set([
'ylabelwidth',
'zerotickfuzz',
'zerowinding'
-])
+))
diff --git a/pygments/lexers/_clbuiltins.py b/pygments/lexers/_cl_builtins.py
index 3f9adf2f..9ed13b4a 100644
--- a/pygments/lexers/_clbuiltins.py
+++ b/pygments/lexers/_cl_builtins.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
"""
- pygments.lexers._clbuiltins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ pygments.lexers._cl_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
ANSI Common Lisp builtins.
@@ -9,7 +9,7 @@
:license: BSD, see LICENSE for details.
"""
-BUILTIN_FUNCTIONS = set([ # 638 functions
+BUILTIN_FUNCTIONS = set(( # 638 functions
'<', '<=', '=', '>', '>=', '-', '/', '/=', '*', '+', '1-', '1+',
'abort', 'abs', 'acons', 'acos', 'acosh', 'add-method', 'adjoin',
'adjustable-array-p', 'adjust-array', 'allocate-instance',
@@ -157,17 +157,17 @@ BUILTIN_FUNCTIONS = set([ # 638 functions
'wild-pathname-p', 'write', 'write-byte', 'write-char', 'write-line',
'write-sequence', 'write-string', 'write-to-string', 'yes-or-no-p',
'y-or-n-p', 'zerop',
-])
+))
-SPECIAL_FORMS = set([
+SPECIAL_FORMS = set((
'block', 'catch', 'declare', 'eval-when', 'flet', 'function', 'go', 'if',
'labels', 'lambda', 'let', 'let*', 'load-time-value', 'locally', 'macrolet',
'multiple-value-call', 'multiple-value-prog1', 'progn', 'progv', 'quote',
'return-from', 'setq', 'symbol-macrolet', 'tagbody', 'the', 'throw',
'unwind-protect',
-])
+))
-MACROS = set([
+MACROS = set((
'and', 'assert', 'call-method', 'case', 'ccase', 'check-type', 'cond',
'ctypecase', 'decf', 'declaim', 'defclass', 'defconstant', 'defgeneric',
'define-compiler-macro', 'define-condition', 'define-method-combination',
@@ -188,19 +188,19 @@ MACROS = set([
'with-input-from-string', 'with-open-file', 'with-open-stream',
'with-output-to-string', 'with-package-iterator', 'with-simple-restart',
'with-slots', 'with-standard-io-syntax',
-])
+))
-LAMBDA_LIST_KEYWORDS = set([
+LAMBDA_LIST_KEYWORDS = set((
'&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional',
'&rest', '&whole',
-])
+))
-DECLARATIONS = set([
+DECLARATIONS = set((
'dynamic-extent', 'ignore', 'optimize', 'ftype', 'inline', 'special',
'ignorable', 'notinline', 'type',
-])
+))
-BUILTIN_TYPES = set([
+BUILTIN_TYPES = set((
'atom', 'boolean', 'base-char', 'base-string', 'bignum', 'bit',
'compiled-function', 'extended-char', 'fixnum', 'keyword', 'nil',
'signed-byte', 'short-float', 'single-float', 'double-float', 'long-float',
@@ -217,9 +217,9 @@ BUILTIN_TYPES = set([
'simple-type-error', 'simple-warning', 'stream-error', 'storage-condition',
'style-warning', 'type-error', 'unbound-variable', 'unbound-slot',
'undefined-function', 'warning',
-])
+))
-BUILTIN_CLASSES = set([
+BUILTIN_CLASSES = set((
'array', 'broadcast-stream', 'bit-vector', 'built-in-class', 'character',
'class', 'complex', 'concatenated-stream', 'cons', 'echo-stream',
'file-stream', 'float', 'function', 'generic-function', 'hash-table',
@@ -229,4 +229,4 @@ BUILTIN_CLASSES = set([
'standard-generic-function', 'standard-method', 'standard-object',
'string-stream', 'stream', 'string', 'structure-class', 'structure-object',
'symbol', 'synonym-stream', 't', 'two-way-stream', 'vector',
-])
+))
diff --git a/pygments/lexers/_cocoa_builtins.py b/pygments/lexers/_cocoa_builtins.py
new file mode 100644
index 00000000..024bfc01
--- /dev/null
+++ b/pygments/lexers/_cocoa_builtins.py
@@ -0,0 +1,73 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._cocoa_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ This file defines a set of types used across Cocoa frameworks from Apple.
+ There is a list of @interfaces, @protocols and some other (structs, unions)
+
+ File may be also used as standalone generator for aboves.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from __future__ import print_function
+
+COCOA_INTERFACES = set(['UITableViewCell', 'HKCorrelationQuery', 'NSURLSessionDataTask', 'PHFetchOptions', 'NSLinguisticTagger', 'NSStream', 'AVAudioUnitDelay', 'GCMotion', 'SKPhysicsWorld', 'NSString', 'CMAttitude', 'AVAudioEnvironmentDistanceAttenuationParameters', 'HKStatisticsCollection', 'SCNPlane', 'CBPeer', 'JSContext', 'SCNTransaction', 'SCNTorus', 'AVAudioUnitEffect', 'UICollectionReusableView', 'MTLSamplerDescriptor', 'AVAssetReaderSampleReferenceOutput', 'AVMutableCompositionTrack', 'GKLeaderboard', 'NSFetchedResultsController', 'SKRange', 'MKTileOverlayRenderer', 'MIDINetworkSession', 'UIVisualEffectView', 'CIWarpKernel', 'PKObject', 'MKRoute', 'MPVolumeView', 'UIPrintInfo', 'SCNText', 'ADClient', 'UIKeyCommand', 'AVMutableAudioMix', 'GLKEffectPropertyLight', 'WKScriptMessage', 'AVMIDIPlayer', 'PHCollectionListChangeRequest', 'UICollectionViewLayout', 'NSMutableCharacterSet', 'SKPaymentTransaction', 'NEOnDemandRuleConnect', 'NSShadow', 'SCNView', 'NSURLSessionConfiguration', 'MTLVertexAttributeDescriptor', 'CBCharacteristic', 'HKQuantityType', 'CKLocationSortDescriptor', 'NEVPNIKEv2SecurityAssociationParameters', 'CMStepCounter', 'NSNetService', 'AVAssetWriterInputMetadataAdaptor', 'UICollectionView', 'UIViewPrintFormatter', 'SCNLevelOfDetail', 'CAShapeLayer', 'MCPeerID', 'MPRatingCommand', 'WKNavigation', 'NSDictionary', 'NSFileVersion', 'CMGyroData', 'AVAudioUnitDistortion', 'CKFetchRecordsOperation', 'SKPhysicsJointSpring', 'SCNHitTestResult', 'AVAudioTime', 'CIFilter', 'UIView', 'SCNConstraint', 'CAPropertyAnimation', 'MKMapItem', 'MPRemoteCommandCenter', 'UICollectionViewFlowLayoutInvalidationContext', 'UIInputViewController', 'PKPass', 'SCNPhysicsBehavior', 'MTLRenderPassColorAttachmentDescriptor', 'MKPolygonRenderer', 'CKNotification', 'JSValue', 'PHCollectionList', 'CLGeocoder', 'NSByteCountFormatter', 'AVCaptureScreenInput', 'MPFeedbackCommand', 'CAAnimation', 'MKOverlayPathView', 'UIActionSheet', 'UIMotionEffectGroup', 'NSLengthFormatter', 'UIBarItem', 'SKProduct', 'AVAssetExportSession', 'NSKeyedUnarchiver', 'NSMutableSet', 'SCNPyramid', 'PHAssetCollection', 'MKMapView', 'HMHomeManager', 'CATransition', 'MTLCompileOptions', 'UIVibrancyEffect', 'CLCircularRegion', 'MKTileOverlay', 'SCNShape', 'ACAccountCredential', 'SKPhysicsJointLimit', 'MKMapSnapshotter', 'AVMediaSelectionGroup', 'NSIndexSet', 'CBPeripheralManager', 'CKRecordZone', 'AVAudioRecorder', 'NSURL', 'CBCentral', 'NSNumber', 'AVAudioOutputNode', 'MTLVertexAttributeDescriptorArray', 'MKETAResponse', 'SKTransition', 'SSReadingList', 'HKSourceQuery', 'UITableViewRowAction', 'UITableView', 'SCNParticlePropertyController', 'AVCaptureStillImageOutput', 'GCController', 'AVAudioPlayerNode', 'AVAudioSessionPortDescription', 'NSHTTPURLResponse', 'NEOnDemandRuleEvaluateConnection', 'SKEffectNode', 'HKQuantity', 'GCControllerElement', 'AVPlayerItemAccessLogEvent', 'SCNBox', 'NSExtensionContext', 'MKOverlayRenderer', 'SCNPhysicsVehicle', 'NSDecimalNumber', 'EKReminder', 'MKPolylineView', 'CKQuery', 'AVAudioMixerNode', 'GKAchievementDescription', 'EKParticipant', 'NSBlockOperation', 'UIActivityItemProvider', 'CLLocation', 'NSBatchUpdateRequest', 'PHContentEditingOutput', 'PHObjectChangeDetails', 'MPMoviePlayerController', 'AVAudioFormat', 'HMTrigger', 'MTLRenderPassDepthAttachmentDescriptor', 'SCNRenderer', 'GKScore', 'UISplitViewController', 'HKSource', 'NSURLConnection', 'ABUnknownPersonViewController', 'SCNTechnique', 'UIMenuController', 'NSEvent', 'SKTextureAtlas', 'NSKeyedArchiver', 'GKLeaderboardSet', 'NSSimpleCString', 'AVAudioPCMBuffer', 'CBATTRequest', 'GKMatchRequest', 'AVMetadataObject', 'SKProductsRequest', 'UIAlertView', 'NSIncrementalStore', 'MFMailComposeViewController', 'SCNFloor', 'NSSortDescriptor', 'CKFetchNotificationChangesOperation', 'MPMovieAccessLog', 'NSManagedObjectContext', 'AVAudioUnitGenerator', 'WKBackForwardList', 'SKMutableTexture', 'AVCaptureAudioDataOutput', 'ACAccount', 'AVMetadataItem', 'MPRatingCommandEvent', 'AVCaptureDeviceInputSource', 'CLLocationManager', 'MPRemoteCommand', 'AVCaptureSession', 'UIStepper', 'UIRefreshControl', 'NEEvaluateConnectionRule', 'CKModifyRecordsOperation', 'UICollectionViewTransitionLayout', 'CBCentralManager', 'NSPurgeableData', 'SLComposeViewController', 'NSHashTable', 'MKUserTrackingBarButtonItem', 'UILexiconEntry', 'CMMotionActivity', 'SKAction', 'SKShader', 'AVPlayerItemOutput', 'MTLRenderPassAttachmentDescriptor', 'UIDocumentInteractionController', 'UIDynamicItemBehavior', 'NSMutableDictionary', 'UILabel', 'AVCaptureInputPort', 'NSExpression', 'CAInterAppAudioTransportView', 'SKMutablePayment', 'UIImage', 'PHCachingImageManager', 'SCNTransformConstraint', 'UIColor', 'SCNGeometrySource', 'AVCaptureAutoExposureBracketedStillImageSettings', 'UIPopoverBackgroundView', 'UIToolbar', 'NSNotificationCenter', 'AVAssetReaderOutputMetadataAdaptor', 'NSEntityMigrationPolicy', 'NSLocale', 'NSURLSession', 'SCNCamera', 'NSTimeZone', 'UIManagedDocument', 'AVMutableVideoCompositionLayerInstruction', 'AVAssetTrackGroup', 'NSInvocationOperation', 'ALAssetRepresentation', 'AVQueuePlayer', 'HMServiceGroup', 'UIPasteboard', 'PHContentEditingInput', 'NSLayoutManager', 'EKCalendarChooser', 'EKObject', 'CATiledLayer', 'GLKReflectionMapEffect', 'NSManagedObjectID', 'NSEnergyFormatter', 'SLRequest', 'HMCharacteristic', 'AVPlayerLayer', 'MTLRenderPassDescriptor', 'SKPayment', 'NSPointerArray', 'AVAudioMix', 'SCNLight', 'MCAdvertiserAssistant', 'MKMapSnapshotOptions', 'HKCategorySample', 'AVAudioEnvironmentReverbParameters', 'SCNMorpher', 'AVTimedMetadataGroup', 'CBMutableCharacteristic', 'NSFetchRequest', 'UIDevice', 'NSManagedObject', 'NKAssetDownload', 'AVOutputSettingsAssistant', 'SKPhysicsJointPin', 'UITabBar', 'UITextInputMode', 'NSFetchRequestExpression', 'HMActionSet', 'CTSubscriber', 'PHAssetChangeRequest', 'NSPersistentStoreRequest', 'UITabBarController', 'HKQuantitySample', 'AVPlayerItem', 'AVSynchronizedLayer', 'MKDirectionsRequest', 'NSMetadataItem', 'UIPresentationController', 'UINavigationItem', 'PHFetchResultChangeDetails', 'PHImageManager', 'AVCaptureManualExposureBracketedStillImageSettings', 'UIStoryboardPopoverSegue', 'SCNLookAtConstraint', 'UIGravityBehavior', 'UIWindow', 'CBMutableDescriptor', 'NEOnDemandRuleDisconnect', 'UIBezierPath', 'UINavigationController', 'ABPeoplePickerNavigationController', 'EKSource', 'AVAssetWriterInput', 'AVPlayerItemTrack', 'GLKEffectPropertyTexture', 'NSURLResponse', 'SKPaymentQueue', 'NSAssertionHandler', 'MKReverseGeocoder', 'GCControllerAxisInput', 'NSArray', 'NSOrthography', 'NSURLSessionUploadTask', 'NSCharacterSet', 'AVMutableVideoCompositionInstruction', 'AVAssetReaderOutput', 'EAGLContext', 'WKFrameInfo', 'CMPedometer', 'MyClass', 'CKModifyBadgeOperation', 'AVCaptureAudioFileOutput', 'SKEmitterNode', 'NSMachPort', 'AVVideoCompositionCoreAnimationTool', 'PHCollection', 'SCNPhysicsWorld', 'NSURLRequest', 'CMAccelerometerData', 'NSNetServiceBrowser', 'CLFloor', 'AVAsynchronousVideoCompositionRequest', 'SCNGeometry', 'SCNIKConstraint', 'CIKernel', 'CAGradientLayer', 'HKCharacteristicType', 'NSFormatter', 'SCNAction', 'CATransaction', 'CBUUID', 'UIStoryboard', 'MPMediaLibrary', 'UITapGestureRecognizer', 'MPMediaItemArtwork', 'NSURLSessionTask', 'AVAudioUnit', 'MCBrowserViewController', 'NSRelationshipDescription', 'HKSample', 'WKWebView', 'NSMutableAttributedString', 'NSPersistentStoreAsynchronousResult', 'MPNowPlayingInfoCenter', 'MKLocalSearch', 'EAAccessory', 'HKCorrelation', 'CATextLayer', 'NSNotificationQueue', 'UINib', 'GLKTextureLoader', 'HKObjectType', 'NSValue', 'NSMutableIndexSet', 'SKPhysicsContact', 'NSProgress', 'AVPlayerViewController', 'CAScrollLayer', 'GKSavedGame', 'NSTextCheckingResult', 'PHObjectPlaceholder', 'SKConstraint', 'EKEventEditViewController', 'NSEntityDescription', 'NSURLCredentialStorage', 'UIApplication', 'SKDownload', 'SCNNode', 'MKLocalSearchRequest', 'SKScene', 'UISearchDisplayController', 'NEOnDemandRule', 'MTLRenderPassStencilAttachmentDescriptor', 'CAReplicatorLayer', 'UIPrintPageRenderer', 'EKCalendarItem', 'NSUUID', 'EAAccessoryManager', 'NEOnDemandRuleIgnore', 'SKRegion', 'AVAssetResourceLoader', 'EAWiFiUnconfiguredAccessoryBrowser', 'NSUserActivity', 'CTCall', 'UIPrinterPickerController', 'CIVector', 'UINavigationBar', 'UIPanGestureRecognizer', 'MPMediaQuery', 'ABNewPersonViewController', 'CKRecordZoneID', 'HKAnchoredObjectQuery', 'CKFetchRecordZonesOperation', 'UIStoryboardSegue', 'ACAccountType', 'GKSession', 'SKVideoNode', 'PHChange', 'SKReceiptRefreshRequest', 'GCExtendedGamepadSnapshot', 'MPSeekCommandEvent', 'GCExtendedGamepad', 'CAValueFunction', 'SCNCylinder', 'NSNotification', 'NSBatchUpdateResult', 'PKPushCredentials', 'SCNPhysicsSliderJoint', 'AVCaptureDeviceFormat', 'AVPlayerItemErrorLog', 'NSMapTable', 'NSSet', 'CMMotionManager', 'GKVoiceChatService', 'UIPageControl', 'UILexicon', 'MTLArrayType', 'AVAudioUnitReverb', 'MKGeodesicPolyline', 'AVMutableComposition', 'NSLayoutConstraint', 'UIPrinter', 'NSOrderedSet', 'CBAttribute', 'PKPushPayload', 'NSIncrementalStoreNode', 'EKEventStore', 'MPRemoteCommandEvent', 'UISlider', 'UIBlurEffect', 'CKAsset', 'AVCaptureInput', 'AVAudioEngine', 'MTLVertexDescriptor', 'SKPhysicsBody', 'NSOperation', 'UIImageAsset', 'MKMapCamera', 'SKProductsResponse', 'GLKEffectPropertyMaterial', 'AVCaptureDevice', 'CTCallCenter', 'CABTMIDILocalPeripheralViewController', 'NEVPNManager', 'HKQuery', 'SCNPhysicsContact', 'CBMutableService', 'AVSampleBufferDisplayLayer', 'SCNSceneSource', 'SKLightNode', 'CKDiscoveredUserInfo', 'NSMutableArray', 'MTLDepthStencilDescriptor', 'MTLArgument', 'NSMassFormatter', 'CIRectangleFeature', 'PKPushRegistry', 'NEVPNConnection', 'MCNearbyServiceBrowser', 'NSOperationQueue', 'MKPolylineRenderer', 'UICollectionViewLayoutAttributes', 'NSValueTransformer', 'UICollectionViewFlowLayout', 'CIBarcodeFeature', 'MPChangePlaybackRateCommandEvent', 'NSEntityMapping', 'SKTexture', 'NSMergePolicy', 'UITextInputStringTokenizer', 'NSRecursiveLock', 'AVAsset', 'NSUndoManager', 'AVAudioUnitSampler', 'NSItemProvider', 'SKUniform', 'MPMediaPickerController', 'CKOperation', 'MTLRenderPipelineDescriptor', 'EAWiFiUnconfiguredAccessory', 'NSFileCoordinator', 'SKRequest', 'NSFileHandle', 'NSConditionLock', 'UISegmentedControl', 'NSManagedObjectModel', 'UITabBarItem', 'SCNCone', 'MPMediaItem', 'SCNMaterial', 'EKRecurrenceRule', 'UIEvent', 'UITouch', 'UIPrintInteractionController', 'CMDeviceMotion', 'NEVPNProtocol', 'NSCompoundPredicate', 'HKHealthStore', 'MKMultiPoint', 'HKSampleType', 'UIPrintFormatter', 'AVAudioUnitEQFilterParameters', 'SKView', 'NSConstantString', 'UIPopoverController', 'CKDatabase', 'AVMetadataFaceObject', 'UIAccelerometer', 'EKEventViewController', 'CMAltitudeData', 'MTLStencilDescriptor', 'UISwipeGestureRecognizer', 'NSPort', 'MKCircleRenderer', 'AVCompositionTrack', 'NSAsynchronousFetchRequest', 'NSUbiquitousKeyValueStore', 'NSMetadataQueryResultGroup', 'AVAssetResourceLoadingDataRequest', 'UITableViewHeaderFooterView', 'CKNotificationID', 'AVAudioSession', 'HKUnit', 'NSNull', 'NSPersistentStoreResult', 'MKCircleView', 'AVAudioChannelLayout', 'NEVPNProtocolIKEv2', 'WKProcessPool', 'UIAttachmentBehavior', 'CLBeacon', 'NSInputStream', 'NSURLCache', 'GKPlayer', 'NSMappingModel', 'NSHTTPCookie', 'AVMutableVideoComposition', 'PHFetchResult', 'NSAttributeDescription', 'AVPlayer', 'MKAnnotationView', 'UIFontDescriptor', 'NSTimer', 'CBDescriptor', 'MKOverlayView', 'AVAudioUnitTimePitch', 'NSSaveChangesRequest', 'UIReferenceLibraryViewController', 'SKPhysicsJointFixed', 'UILocalizedIndexedCollation', 'UIInterpolatingMotionEffect', 'UIDocumentPickerViewController', 'AVAssetWriter', 'NSBundle', 'SKStoreProductViewController', 'GLKViewController', 'NSMetadataQueryAttributeValueTuple', 'GKTurnBasedMatch', 'AVAudioFile', 'UIActivity', 'NSPipe', 'MKShape', 'NSMergeConflict', 'CIImage', 'HKObject', 'UIRotationGestureRecognizer', 'AVPlayerItemLegibleOutput', 'AVAssetImageGenerator', 'GCControllerButtonInput', 'CKMarkNotificationsReadOperation', 'CKSubscription', 'MPTimedMetadata', 'NKIssue', 'UIScreenMode', 'HMAccessoryBrowser', 'GKTurnBasedEventHandler', 'UIWebView', 'MKPolyline', 'JSVirtualMachine', 'AVAssetReader', 'NSAttributedString', 'GKMatchmakerViewController', 'NSCountedSet', 'UIButton', 'WKNavigationResponse', 'GKLocalPlayer', 'MPMovieErrorLog', 'AVSpeechUtterance', 'HKStatistics', 'UILocalNotification', 'HKBiologicalSexObject', 'AVURLAsset', 'CBPeripheral', 'NSDateComponentsFormatter', 'SKSpriteNode', 'UIAccessibilityElement', 'AVAssetWriterInputGroup', 'HMZone', 'AVAssetReaderAudioMixOutput', 'NSEnumerator', 'UIDocument', 'MKLocalSearchResponse', 'UISimpleTextPrintFormatter', 'PHPhotoLibrary', 'CBService', 'UIDocumentMenuViewController', 'MCSession', 'QLPreviewController', 'CAMediaTimingFunction', 'UITextPosition', 'ASIdentifierManager', 'AVAssetResourceLoadingRequest', 'SLComposeServiceViewController', 'UIPinchGestureRecognizer', 'PHObject', 'NSExtensionItem', 'HKSampleQuery', 'MTLRenderPipelineColorAttachmentDescriptorArray', 'MKRouteStep', 'SCNCapsule', 'NSMetadataQuery', 'AVAssetResourceLoadingContentInformationRequest', 'UITraitCollection', 'CTCarrier', 'NSFileSecurity', 'UIAcceleration', 'UIMotionEffect', 'MTLRenderPipelineReflection', 'CLHeading', 'CLVisit', 'MKDirectionsResponse', 'HMAccessory', 'MTLStructType', 'UITextView', 'CMMagnetometerData', 'UICollisionBehavior', 'UIProgressView', 'CKServerChangeToken', 'UISearchBar', 'MKPlacemark', 'AVCaptureConnection', 'NSPropertyMapping', 'ALAssetsFilter', 'SK3DNode', 'AVPlayerItemErrorLogEvent', 'NSJSONSerialization', 'AVAssetReaderVideoCompositionOutput', 'ABPersonViewController', 'CIDetector', 'GKTurnBasedMatchmakerViewController', 'MPMediaItemCollection', 'SCNSphere', 'NSCondition', 'NSURLCredential', 'MIDINetworkConnection', 'NSFileProviderExtension', 'NSDecimalNumberHandler', 'NSAtomicStoreCacheNode', 'NSAtomicStore', 'EKAlarm', 'CKNotificationInfo', 'AVAudioUnitEQ', 'UIPercentDrivenInteractiveTransition', 'MKPolygon', 'AVAssetTrackSegment', 'MTLVertexAttribute', 'NSExpressionDescription', 'HKStatisticsCollectionQuery', 'NSURLAuthenticationChallenge', 'NSDirectoryEnumerator', 'MKDistanceFormatter', 'UIAlertAction', 'NSPropertyListSerialization', 'GKPeerPickerController', 'UIUserNotificationSettings', 'UITableViewController', 'GKNotificationBanner', 'MKPointAnnotation', 'MTLRenderPassColorAttachmentDescriptorArray', 'NSCache', 'SKPhysicsJoint', 'NSXMLParser', 'UIViewController', 'MFMessageComposeViewController', 'AVAudioInputNode', 'NSDataDetector', 'CABTMIDICentralViewController', 'AVAudioUnitMIDIInstrument', 'AVCaptureVideoPreviewLayer', 'AVAssetWriterInputPassDescription', 'MPChangePlaybackRateCommand', 'NSURLComponents', 'CAMetalLayer', 'UISnapBehavior', 'AVMetadataMachineReadableCodeObject', 'CKDiscoverUserInfosOperation', 'NSTextAttachment', 'NSException', 'UIMenuItem', 'CMMotionActivityManager', 'SCNGeometryElement', 'NCWidgetController', 'CAEmitterLayer', 'MKUserLocation', 'UIImagePickerController', 'CIFeature', 'AVCaptureDeviceInput', 'ALAsset', 'NSURLSessionDownloadTask', 'SCNPhysicsHingeJoint', 'MPMoviePlayerViewController', 'NSMutableOrderedSet', 'SCNMaterialProperty', 'UIFont', 'AVCaptureVideoDataOutput', 'NSCachedURLResponse', 'ALAssetsLibrary', 'NSInvocation', 'UILongPressGestureRecognizer', 'NSTextStorage', 'WKWebViewConfiguration', 'CIFaceFeature', 'MKMapSnapshot', 'GLKEffectPropertyFog', 'AVComposition', 'CKDiscoverAllContactsOperation', 'AVAudioMixInputParameters', 'CAEmitterBehavior', 'PKPassLibrary', 'UIMutableUserNotificationCategory', 'NSLock', 'NEVPNProtocolIPSec', 'ADBannerView', 'UIDocumentPickerExtensionViewController', 'UIActivityIndicatorView', 'AVPlayerMediaSelectionCriteria', 'CALayer', 'UIAccessibilityCustomAction', 'UIBarButtonItem', 'AVAudioSessionRouteDescription', 'CLBeaconRegion', 'HKBloodTypeObject', 'MTLVertexBufferLayoutDescriptorArray', 'CABasicAnimation', 'AVVideoCompositionInstruction', 'AVMutableTimedMetadataGroup', 'EKRecurrenceEnd', 'NSTextContainer', 'TWTweetComposeViewController', 'UIScrollView', 'WKNavigationAction', 'AVPlayerItemMetadataOutput', 'EKRecurrenceDayOfWeek', 'NSNumberFormatter', 'MTLComputePipelineReflection', 'UIScreen', 'CLRegion', 'NSProcessInfo', 'GLKTextureInfo', 'SCNSkinner', 'AVCaptureMetadataOutput', 'SCNAnimationEvent', 'NSTextTab', 'JSManagedValue', 'NSDate', 'UITextChecker', 'WKBackForwardListItem', 'NSData', 'NSParagraphStyle', 'AVMutableMetadataItem', 'EKCalendar', 'NSMutableURLRequest', 'UIVideoEditorController', 'HMTimerTrigger', 'AVAudioUnitVarispeed', 'UIDynamicAnimator', 'AVCompositionTrackSegment', 'GCGamepadSnapshot', 'MPMediaEntity', 'GLKSkyboxEffect', 'UISwitch', 'EKStructuredLocation', 'UIGestureRecognizer', 'NSProxy', 'GLKBaseEffect', 'UIPushBehavior', 'GKScoreChallenge', 'NSCoder', 'MPMediaPlaylist', 'NSDateComponents', 'WKUserScript', 'EKEvent', 'NSDateFormatter', 'NSAsynchronousFetchResult', 'AVAssetWriterInputPixelBufferAdaptor', 'UIVisualEffect', 'UICollectionViewCell', 'UITextField', 'CLPlacemark', 'MPPlayableContentManager', 'AVCaptureOutput', 'HMCharacteristicWriteAction', 'CKModifySubscriptionsOperation', 'NSPropertyDescription', 'GCGamepad', 'UIMarkupTextPrintFormatter', 'SCNTube', 'NSPersistentStoreCoordinator', 'AVAudioEnvironmentNode', 'GKMatchmaker', 'CIContext', 'NSThread', 'SLComposeSheetConfigurationItem', 'SKPhysicsJointSliding', 'NSPredicate', 'GKVoiceChat', 'SKCropNode', 'AVCaptureAudioPreviewOutput', 'NSStringDrawingContext', 'GKGameCenterViewController', 'UIPrintPaper', 'SCNPhysicsBallSocketJoint', 'UICollectionViewLayoutInvalidationContext', 'GLKEffectPropertyTransform', 'AVAudioIONode', 'UIDatePicker', 'MKDirections', 'ALAssetsGroup', 'CKRecordZoneNotification', 'SCNScene', 'MPMovieAccessLogEvent', 'CKFetchSubscriptionsOperation', 'CAEmitterCell', 'AVAudioUnitTimeEffect', 'HMCharacteristicMetadata', 'MKPinAnnotationView', 'UIPickerView', 'UIImageView', 'UIUserNotificationCategory', 'SCNPhysicsVehicleWheel', 'HKCategoryType', 'MPMediaQuerySection', 'GKFriendRequestComposeViewController', 'NSError', 'MTLRenderPipelineColorAttachmentDescriptor', 'SCNPhysicsShape', 'UISearchController', 'SCNPhysicsBody', 'CTSubscriberInfo', 'AVPlayerItemAccessLog', 'MPMediaPropertyPredicate', 'CMLogItem', 'NSAutoreleasePool', 'NSSocketPort', 'AVAssetReaderTrackOutput', 'SKNode', 'UIMutableUserNotificationAction', 'SCNProgram', 'AVSpeechSynthesisVoice', 'CMAltimeter', 'AVCaptureAudioChannel', 'GKTurnBasedExchangeReply', 'AVVideoCompositionLayerInstruction', 'AVSpeechSynthesizer', 'GKChallengeEventHandler', 'AVCaptureFileOutput', 'UIControl', 'SCNPhysicsField', 'CKReference', 'LAContext', 'CKRecordID', 'ADInterstitialAd', 'AVAudioSessionDataSourceDescription', 'AVAudioBuffer', 'CIColorKernel', 'GCControllerDirectionPad', 'NSFileManager', 'AVMutableAudioMixInputParameters', 'UIScreenEdgePanGestureRecognizer', 'CAKeyframeAnimation', 'CKQueryNotification', 'PHAdjustmentData', 'EASession', 'AVAssetResourceRenewalRequest', 'UIInputView', 'NSFileWrapper', 'UIResponder', 'NSPointerFunctions', 'NSHTTPCookieStorage', 'AVMediaSelectionOption', 'NSRunLoop', 'NSFileAccessIntent', 'CAAnimationGroup', 'MKCircle', 'UIAlertController', 'NSMigrationManager', 'NSDateIntervalFormatter', 'UICollectionViewUpdateItem', 'CKDatabaseOperation', 'PHImageRequestOptions', 'SKReachConstraints', 'CKRecord', 'CAInterAppAudioSwitcherView', 'WKWindowFeatures', 'GKInvite', 'NSMutableData', 'PHAssetCollectionChangeRequest', 'NSMutableParagraphStyle', 'UIDynamicBehavior', 'GLKEffectProperty', 'CKFetchRecordChangesOperation', 'SKShapeNode', 'MPMovieErrorLogEvent', 'MKPolygonView', 'MPContentItem', 'HMAction', 'NSScanner', 'GKAchievementChallenge', 'AVAudioPlayer', 'CKContainer', 'AVVideoComposition', 'NKLibrary', 'NSPersistentStore', 'AVCaptureMovieFileOutput', 'HMRoom', 'GKChallenge', 'UITextRange', 'NSURLProtectionSpace', 'ACAccountStore', 'MPSkipIntervalCommand', 'NSComparisonPredicate', 'HMHome', 'PHVideoRequestOptions', 'NSOutputStream', 'MPSkipIntervalCommandEvent', 'PKAddPassesViewController', 'UITextSelectionRect', 'CTTelephonyNetworkInfo', 'AVTextStyleRule', 'NSFetchedPropertyDescription', 'UIPageViewController', 'CATransformLayer', 'UICollectionViewController', 'AVAudioNode', 'MCNearbyServiceAdvertiser', 'NSObject', 'PHAsset', 'GKLeaderboardViewController', 'CKQueryCursor', 'MPMusicPlayerController', 'MKOverlayPathRenderer', 'CMPedometerData', 'HMService', 'SKFieldNode', 'GKAchievement', 'WKUserContentController', 'AVAssetTrack', 'TWRequest', 'SKLabelNode', 'AVCaptureBracketedStillImageSettings', 'MIDINetworkHost', 'MPMediaPredicate', 'AVFrameRateRange', 'MTLTextureDescriptor', 'MTLVertexBufferLayoutDescriptor', 'MPFeedbackCommandEvent', 'UIUserNotificationAction', 'HKStatisticsQuery', 'SCNParticleSystem', 'NSIndexPath', 'AVVideoCompositionRenderContext', 'CADisplayLink', 'HKObserverQuery', 'UIPopoverPresentationController', 'CKQueryOperation', 'CAEAGLLayer', 'NSMutableString', 'NSMessagePort', 'NSURLQueryItem', 'MTLStructMember', 'AVAudioSessionChannelDescription', 'GLKView', 'UIActivityViewController', 'GKAchievementViewController', 'GKTurnBasedParticipant', 'NSURLProtocol', 'NSUserDefaults', 'NSCalendar', 'SKKeyframeSequence', 'AVMetadataItemFilter', 'CKModifyRecordZonesOperation', 'WKPreferences', 'NSMethodSignature', 'NSRegularExpression', 'EAGLSharegroup', 'AVPlayerItemVideoOutput', 'PHContentEditingInputRequestOptions', 'GKMatch', 'CIColor', 'UIDictationPhrase'])
+COCOA_PROTOCOLS = set(['SKStoreProductViewControllerDelegate', 'AVVideoCompositionInstruction', 'AVAudioSessionDelegate', 'GKMatchDelegate', 'NSFileManagerDelegate', 'UILayoutSupport', 'NSCopying', 'UIPrintInteractionControllerDelegate', 'QLPreviewControllerDataSource', 'SKProductsRequestDelegate', 'NSTextStorageDelegate', 'MCBrowserViewControllerDelegate', 'MTLComputeCommandEncoder', 'SCNSceneExportDelegate', 'UISearchResultsUpdating', 'MFMailComposeViewControllerDelegate', 'MTLBlitCommandEncoder', 'NSDecimalNumberBehaviors', 'PHContentEditingController', 'NSMutableCopying', 'UIActionSheetDelegate', 'UIViewControllerTransitioningDelegate', 'UIAlertViewDelegate', 'AVAudioPlayerDelegate', 'MKReverseGeocoderDelegate', 'NSCoding', 'UITextInputTokenizer', 'GKFriendRequestComposeViewControllerDelegate', 'UIActivityItemSource', 'NSCacheDelegate', 'UIAdaptivePresentationControllerDelegate', 'GKAchievementViewControllerDelegate', 'UIViewControllerTransitionCoordinator', 'EKEventEditViewDelegate', 'NSURLConnectionDelegate', 'UITableViewDelegate', 'GKPeerPickerControllerDelegate', 'UIGuidedAccessRestrictionDelegate', 'AVSpeechSynthesizerDelegate', 'AVAudio3DMixing', 'AVPlayerItemLegibleOutputPushDelegate', 'ADInterstitialAdDelegate', 'HMAccessoryBrowserDelegate', 'AVAssetResourceLoaderDelegate', 'UITabBarControllerDelegate', 'CKRecordValue', 'SKPaymentTransactionObserver', 'AVCaptureAudioDataOutputSampleBufferDelegate', 'UIInputViewAudioFeedback', 'GKChallengeListener', 'SKSceneDelegate', 'UIPickerViewDelegate', 'UIWebViewDelegate', 'UIApplicationDelegate', 'GKInviteEventListener', 'MPMediaPlayback', 'MyClassJavaScriptMethods', 'AVAsynchronousKeyValueLoading', 'QLPreviewItem', 'SCNBoundingVolume', 'NSPortDelegate', 'UIContentContainer', 'SCNNodeRendererDelegate', 'SKRequestDelegate', 'SKPhysicsContactDelegate', 'HMAccessoryDelegate', 'UIPageViewControllerDataSource', 'SCNSceneRendererDelegate', 'SCNPhysicsContactDelegate', 'MKMapViewDelegate', 'AVPlayerItemOutputPushDelegate', 'UICollectionViewDelegate', 'UIImagePickerControllerDelegate', 'MTLRenderCommandEncoder', 'UIToolbarDelegate', 'WKUIDelegate', 'SCNActionable', 'NSURLConnectionDataDelegate', 'MKOverlay', 'CBCentralManagerDelegate', 'JSExport', 'NSTextLayoutOrientationProvider', 'UIPickerViewDataSource', 'PKPushRegistryDelegate', 'UIViewControllerTransitionCoordinatorContext', 'NSLayoutManagerDelegate', 'MTLLibrary', 'NSFetchedResultsControllerDelegate', 'ABPeoplePickerNavigationControllerDelegate', 'MTLResource', 'NSDiscardableContent', 'UITextFieldDelegate', 'MTLBuffer', 'MTLSamplerState', 'GKGameCenterControllerDelegate', 'MPMediaPickerControllerDelegate', 'UISplitViewControllerDelegate', 'UIAppearance', 'UIPickerViewAccessibilityDelegate', 'UITraitEnvironment', 'UIScrollViewAccessibilityDelegate', 'ADBannerViewDelegate', 'MPPlayableContentDataSource', 'MTLComputePipelineState', 'NSURLSessionDelegate', 'MTLCommandBuffer', 'NSXMLParserDelegate', 'UIViewControllerRestoration', 'UISearchBarDelegate', 'UIBarPositioning', 'CBPeripheralDelegate', 'UISearchDisplayDelegate', 'CAAction', 'PKAddPassesViewControllerDelegate', 'MCNearbyServiceAdvertiserDelegate', 'MTLDepthStencilState', 'GKTurnBasedMatchmakerViewControllerDelegate', 'MPPlayableContentDelegate', 'AVCaptureVideoDataOutputSampleBufferDelegate', 'UIAppearanceContainer', 'UIStateRestoring', 'UITextDocumentProxy', 'MTLDrawable', 'NSURLSessionTaskDelegate', 'NSFilePresenter', 'AVAudioStereoMixing', 'UIViewControllerContextTransitioning', 'UITextInput', 'CBPeripheralManagerDelegate', 'UITextInputDelegate', 'NSFastEnumeration', 'NSURLAuthenticationChallengeSender', 'SCNProgramDelegate', 'AVVideoCompositing', 'SCNAnimatable', 'NSSecureCoding', 'MCAdvertiserAssistantDelegate', 'GKLocalPlayerListener', 'GLKNamedEffect', 'UIPopoverControllerDelegate', 'AVCaptureMetadataOutputObjectsDelegate', 'NSExtensionRequestHandling', 'UITextSelecting', 'UIPrinterPickerControllerDelegate', 'NCWidgetProviding', 'MTLCommandEncoder', 'NSURLProtocolClient', 'MFMessageComposeViewControllerDelegate', 'UIVideoEditorControllerDelegate', 'WKNavigationDelegate', 'GKSavedGameListener', 'UITableViewDataSource', 'MTLFunction', 'EKCalendarChooserDelegate', 'NSUserActivityDelegate', 'UICollisionBehaviorDelegate', 'NSStreamDelegate', 'MCNearbyServiceBrowserDelegate', 'HMHomeDelegate', 'UINavigationControllerDelegate', 'MCSessionDelegate', 'UIDocumentPickerDelegate', 'UIViewControllerInteractiveTransitioning', 'GKTurnBasedEventListener', 'SCNSceneRenderer', 'MTLTexture', 'GLKViewDelegate', 'EAAccessoryDelegate', 'WKScriptMessageHandler', 'PHPhotoLibraryChangeObserver', 'NSKeyedUnarchiverDelegate', 'AVPlayerItemMetadataOutputPushDelegate', 'NSMachPortDelegate', 'SCNShadable', 'UIPopoverBackgroundViewMethods', 'UIDocumentMenuDelegate', 'UIBarPositioningDelegate', 'ABPersonViewControllerDelegate', 'NSNetServiceBrowserDelegate', 'EKEventViewDelegate', 'UIScrollViewDelegate', 'NSURLConnectionDownloadDelegate', 'UIGestureRecognizerDelegate', 'UINavigationBarDelegate', 'AVAudioMixing', 'NSFetchedResultsSectionInfo', 'UIDocumentInteractionControllerDelegate', 'MTLParallelRenderCommandEncoder', 'QLPreviewControllerDelegate', 'UIAccessibilityReadingContent', 'ABUnknownPersonViewControllerDelegate', 'GLKViewControllerDelegate', 'UICollectionViewDelegateFlowLayout', 'UIPopoverPresentationControllerDelegate', 'UIDynamicAnimatorDelegate', 'NSTextAttachmentContainer', 'MKAnnotation', 'UIAccessibilityIdentification', 'UICoordinateSpace', 'ABNewPersonViewControllerDelegate', 'MTLDevice', 'CAMediaTiming', 'AVCaptureFileOutputRecordingDelegate', 'HMHomeManagerDelegate', 'UITextViewDelegate', 'UITabBarDelegate', 'GKLeaderboardViewControllerDelegate', 'UISearchControllerDelegate', 'EAWiFiUnconfiguredAccessoryBrowserDelegate', 'UITextInputTraits', 'MTLRenderPipelineState', 'GKVoiceChatClient', 'UIKeyInput', 'UICollectionViewDataSource', 'SCNTechniqueSupport', 'NSLocking', 'AVCaptureFileOutputDelegate', 'GKChallengeEventHandlerDelegate', 'UIObjectRestoration', 'CIFilterConstructor', 'AVPlayerItemOutputPullDelegate', 'EAGLDrawable', 'AVVideoCompositionValidationHandling', 'UIViewControllerAnimatedTransitioning', 'NSURLSessionDownloadDelegate', 'UIAccelerometerDelegate', 'UIPageViewControllerDelegate', 'MTLCommandQueue', 'UIDataSourceModelAssociation', 'AVAudioRecorderDelegate', 'GKSessionDelegate', 'NSKeyedArchiverDelegate', 'CAMetalDrawable', 'UIDynamicItem', 'CLLocationManagerDelegate', 'NSMetadataQueryDelegate', 'NSNetServiceDelegate', 'GKMatchmakerViewControllerDelegate', 'NSURLSessionDataDelegate'])
+COCOA_PRIMITIVES = set(['ROTAHeader', '__CFBundle', 'MortSubtable', 'AudioFilePacketTableInfo', 'CGPDFOperatorTable', 'KerxStateEntry', 'ExtendedTempoEvent', 'CTParagraphStyleSetting', 'OpaqueMIDIPort', '_GLKMatrix3', '_GLKMatrix2', '_GLKMatrix4', 'ExtendedControlEvent', 'CAFAudioDescription', 'OpaqueCMBlockBuffer', 'CGTextDrawingMode', 'EKErrorCode', 'GCAcceleration', 'AudioUnitParameterInfo', '__SCPreferences', '__CTFrame', '__CTLine', 'AudioFile_SMPTE_Time', 'gss_krb5_lucid_context_v1', 'OpaqueJSValue', 'TrakTableEntry', 'AudioFramePacketTranslation', 'CGImageSource', 'OpaqueJSPropertyNameAccumulator', 'JustPCGlyphRepeatAddAction', '__CFBinaryHeap', 'OpaqueMIDIThruConnection', 'opaqueCMBufferQueue', 'OpaqueMusicSequence', 'MortRearrangementSubtable', 'MixerDistanceParams', 'MorxSubtable', 'MIDIObjectPropertyChangeNotification', 'SFNTLookupSegment', 'CGImageMetadataErrors', 'CGPath', 'OpaqueMIDIEndpoint', 'AudioComponentPlugInInterface', 'gss_ctx_id_t_desc_struct', 'sfntFontFeatureSetting', 'OpaqueJSContextGroup', '__SCNetworkConnection', 'AudioUnitParameterValueTranslation', 'CGImageMetadataType', 'CGPattern', 'AudioFileTypeAndFormatID', 'CGContext', 'AUNodeInteraction', 'SFNTLookupTable', 'JustPCDecompositionAction', 'KerxControlPointHeader', 'AudioStreamPacketDescription', 'KernSubtableHeader', '__SecCertificate', 'AUMIDIOutputCallbackStruct', 'MIDIMetaEvent', 'AudioQueueChannelAssignment', 'AnchorPoint', 'JustTable', '__CFNetService', 'CF_BRIDGED_TYPE', 'gss_krb5_lucid_key', 'CGPDFDictionary', 'KerxSubtableHeader', 'CAF_UUID_ChunkHeader', 'gss_krb5_cfx_keydata', 'OpaqueJSClass', 'CGGradient', 'OpaqueMIDISetup', 'JustPostcompTable', '__CTParagraphStyle', 'AudioUnitParameterHistoryInfo', 'OpaqueJSContext', 'CGShading', 'MIDIThruConnectionParams', 'BslnFormat0Part', 'SFNTLookupSingle', '__CFHost', '__SecRandom', '__CTFontDescriptor', '_NSRange', 'sfntDirectory', 'AudioQueueLevelMeterState', 'CAFPositionPeak', 'PropLookupSegment', '__CVOpenGLESTextureCache', 'sfntInstance', '_GLKQuaternion', 'AnkrTable', '__SCNetworkProtocol', 'gss_buffer_desc_struct', 'CAFFileHeader', 'KerxOrderedListHeader', 'CGBlendMode', 'STXEntryOne', 'CAFRegion', 'SFNTLookupTrimmedArrayHeader', 'SCNMatrix4', 'KerxControlPointEntry', 'OpaqueMusicTrack', '_GLKVector4', 'gss_OID_set_desc_struct', 'OpaqueMusicPlayer', '_CFHTTPAuthentication', 'CGAffineTransform', 'CAFMarkerChunk', 'AUHostIdentifier', 'ROTAGlyphEntry', 'BslnTable', 'gss_krb5_lucid_context_version', '_GLKMatrixStack', 'CGImage', 'KernStateEntry', 'SFNTLookupSingleHeader', 'MortLigatureSubtable', 'CAFUMIDChunk', 'SMPTETime', 'CAFDataChunk', 'CGPDFStream', 'AudioFileRegionList', 'STEntryTwo', 'SFNTLookupBinarySearchHeader', 'OpbdTable', '__CTGlyphInfo', 'BslnFormat2Part', 'KerxIndexArrayHeader', 'TrakTable', 'KerxKerningPair', '__CFBitVector', 'KernVersion0SubtableHeader', 'OpaqueAudioComponentInstance', 'AudioChannelLayout', '__CFUUID', 'MIDISysexSendRequest', '__CFNumberFormatter', 'CGImageSourceStatus', 'AudioFileMarkerList', 'AUSamplerBankPresetData', 'CGDataProvider', 'AudioFormatInfo', '__SecIdentity', 'sfntCMapExtendedSubHeader', 'MIDIChannelMessage', 'KernOffsetTable', 'CGColorSpaceModel', 'MFMailComposeErrorCode', 'CGFunction', '__SecTrust', 'AVAudio3DAngularOrientation', 'CGFontPostScriptFormat', 'KernStateHeader', 'AudioUnitCocoaViewInfo', 'CGDataConsumer', 'OpaqueMIDIDevice', 'KernVersion0Header', 'AnchorPointTable', 'CGImageDestination', 'CAFInstrumentChunk', 'AudioUnitMeterClipping', 'MorxChain', '__CTFontCollection', 'STEntryOne', 'STXEntryTwo', 'ExtendedNoteOnEvent', 'CGColorRenderingIntent', 'KerxSimpleArrayHeader', 'MorxTable', '_GLKVector3', '_GLKVector2', 'MortTable', 'CGPDFBox', 'AudioUnitParameterValueFromString', '__CFSocket', 'ALCdevice_struct', 'MIDINoteMessage', 'sfntFeatureHeader', 'CGRect', '__SCNetworkInterface', '__CFTree', 'MusicEventUserData', 'TrakTableData', 'GCQuaternion', 'MortContextualSubtable', '__CTRun', 'AudioUnitFrequencyResponseBin', 'MortChain', 'MorxInsertionSubtable', 'CGImageMetadata', 'gss_auth_identity', 'AudioUnitMIDIControlMapping', 'CAFChunkHeader', 'CGImagePropertyOrientation', 'CGPDFScanner', 'OpaqueMusicEventIterator', 'sfntDescriptorHeader', 'AudioUnitNodeConnection', 'OpaqueMIDIDeviceList', 'ExtendedAudioFormatInfo', 'BslnFormat1Part', 'sfntFontDescriptor', 'KernSimpleArrayHeader', '__CFRunLoopObserver', 'CGPatternTiling', 'MIDINotification', 'MorxLigatureSubtable', 'MessageComposeResult', 'MIDIThruConnectionEndpoint', 'MusicDeviceStdNoteParams', 'opaqueCMSimpleQueue', 'ALCcontext_struct', 'OpaqueAudioQueue', 'PropLookupSingle', 'CGInterpolationQuality', 'CGColor', 'AudioOutputUnitStartAtTimeParams', 'gss_name_t_desc_struct', 'CGFunctionCallbacks', 'CAFPacketTableHeader', 'AudioChannelDescription', 'sfntFeatureName', 'MorxContextualSubtable', 'CVSMPTETime', 'AudioValueRange', 'CGTextEncoding', 'AudioStreamBasicDescription', 'AUNodeRenderCallback', 'AudioPanningInfo', 'KerxOrderedListEntry', '__CFAllocator', 'OpaqueJSPropertyNameArray', '__SCDynamicStore', 'OpaqueMIDIEntity', '__CTRubyAnnotation', 'SCNVector4', 'CFHostClientContext', 'CFNetServiceClientContext', 'AudioUnitPresetMAS_SettingData', 'opaqueCMBufferQueueTriggerToken', 'AudioUnitProperty', 'CAFRegionChunk', 'CGPDFString', '__GLsync', '__CFStringTokenizer', 'JustWidthDeltaEntry', 'sfntVariationAxis', '__CFNetDiagnostic', 'CAFOverviewSample', 'sfntCMapEncoding', 'CGVector', '__SCNetworkService', 'opaqueCMSampleBuffer', 'AUHostVersionIdentifier', 'AudioBalanceFade', 'sfntFontRunFeature', 'KerxCoordinateAction', 'sfntCMapSubHeader', 'CVPlanarPixelBufferInfo', 'AUNumVersion', 'AUSamplerInstrumentData', 'AUPreset', '__CTRunDelegate', 'OpaqueAudioQueueProcessingTap', 'KerxTableHeader', '_NSZone', 'OpaqueExtAudioFile', '__CFRunLoopSource', '__CVMetalTextureCache', 'KerxAnchorPointAction', 'OpaqueJSString', 'AudioQueueParameterEvent', '__CFHTTPMessage', 'OpaqueCMClock', 'ScheduledAudioFileRegion', 'STEntryZero', 'AVAudio3DPoint', 'gss_channel_bindings_struct', 'sfntVariationHeader', 'AUChannelInfo', 'UIOffset', 'GLKEffectPropertyPrv', 'KerxStateHeader', 'CGLineJoin', 'CGPDFDocument', '__CFBag', 'KernOrderedListHeader', '__SCNetworkSet', '__SecKey', 'MIDIObjectAddRemoveNotification', 'AudioUnitParameter', 'JustPCActionSubrecord', 'AudioComponentDescription', 'AudioUnitParameterValueName', 'AudioUnitParameterEvent', 'KerxControlPointAction', 'AudioTimeStamp', 'KernKerningPair', 'gss_buffer_set_desc_struct', 'MortFeatureEntry', 'FontVariation', 'CAFStringID', 'LcarCaretClassEntry', 'AudioUnitParameterStringFromValue', 'ACErrorCode', 'ALMXGlyphEntry', 'LtagTable', '__CTTypesetter', 'AuthorizationOpaqueRef', 'UIEdgeInsets', 'CGPathElement', 'CAFMarker', 'KernTableHeader', 'NoteParamsControlValue', 'SSLContext', 'gss_cred_id_t_desc_struct', 'AudioUnitParameterNameInfo', 'CGDataConsumerCallbacks', 'ALMXHeader', 'CGLineCap', 'MIDIControlTransform', 'CGPDFArray', '__SecPolicy', 'AudioConverterPrimeInfo', '__CTTextTab', '__CFNetServiceMonitor', 'AUInputSamplesInOutputCallbackStruct', '__CTFramesetter', 'CGPDFDataFormat', 'STHeader', 'CVPlanarPixelBufferInfo_YCbCrPlanar', 'MIDIValueMap', 'JustDirectionTable', '__SCBondStatus', 'SFNTLookupSegmentHeader', 'OpaqueCMMemoryPool', 'CGPathDrawingMode', 'CGFont', '__SCNetworkReachability', 'AudioClassDescription', 'CGPoint', 'AVAudio3DVectorOrientation', 'CAFStrings', '__CFNetServiceBrowser', 'opaqueMTAudioProcessingTap', 'sfntNameRecord', 'CGPDFPage', 'CGLayer', 'ComponentInstanceRecord', 'CAFInfoStrings', 'HostCallbackInfo', 'MusicDeviceNoteParams', 'OpaqueVTCompressionSession', 'KernIndexArrayHeader', 'CVPlanarPixelBufferInfo_YCbCrBiPlanar', 'MusicTrackLoopInfo', 'opaqueCMFormatDescription', 'STClassTable', 'sfntDirectoryEntry', 'OpaqueCMTimebase', 'CGDataProviderDirectCallbacks', 'MIDIPacketList', 'CAFOverviewChunk', 'MIDIPacket', 'ScheduledAudioSlice', 'CGDataProviderSequentialCallbacks', 'AudioBuffer', 'MorxRearrangementSubtable', 'CGPatternCallbacks', 'AUDistanceAttenuationData', 'MIDIIOErrorNotification', 'CGPDFContentStream', 'IUnknownVTbl', 'MIDITransform', 'MortInsertionSubtable', 'CABarBeatTime', 'AudioBufferList', '__CVBuffer', 'AURenderCallbackStruct', 'STXEntryZero', 'JustPCDuctilityAction', 'OpaqueAudioQueueTimeline', 'VTDecompressionOutputCallbackRecord', 'OpaqueMIDIClient', '__CFPlugInInstance', 'AudioQueueBuffer', '__CFFileDescriptor', 'AudioUnitConnection', '_GKTurnBasedExchangeStatus', 'LcarCaretTable', 'CVPlanarComponentInfo', 'JustWidthDeltaGroup', 'OpaqueAudioComponent', 'ParameterEvent', '__CVPixelBufferPool', '__CTFont', 'CGColorSpace', 'CGSize', 'AUDependentParameter', 'MIDIDriverInterface', 'gss_krb5_rfc1964_keydata', '__CFDateFormatter', 'LtagStringRange', 'OpaqueVTDecompressionSession', 'gss_iov_buffer_desc_struct', 'AUPresetEvent', 'PropTable', 'KernOrderedListEntry', 'CF_BRIDGED_MUTABLE_TYPE', 'gss_OID_desc_struct', 'AudioUnitPresetMAS_Settings', 'AudioFileMarker', 'JustPCConditionalAddAction', 'BslnFormat3Part', '__CFNotificationCenter', 'MortSwashSubtable', 'AUParameterMIDIMapping', 'SCNVector3', 'OpaqueAudioConverter', 'MIDIRawData', 'sfntNameHeader', '__CFRunLoop', 'MFMailComposeResult', 'CATransform3D', 'OpbdSideValues', 'CAF_SMPTE_Time', '__SecAccessControl', 'JustPCAction', 'OpaqueVTFrameSilo', 'OpaqueVTMultiPassStorage', 'CGPathElementType', 'AudioFormatListItem', 'AudioUnitExternalBuffer', 'AudioFileRegion', 'AudioValueTranslation', 'CGImageMetadataTag', 'CAFPeakChunk', 'AudioBytePacketTranslation', 'sfntCMapHeader', '__CFURLEnumerator', 'STXHeader', 'CGPDFObjectType', 'SFNTLookupArrayHeader'])
+
+
+if __name__ == '__main__':
+ import os
+ import re
+
+ FRAMEWORKS_PATH = '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.0.sdk/System/Library/Frameworks/'
+ frameworks = os.listdir(FRAMEWORKS_PATH)
+
+ all_interfaces = set()
+ all_protocols = set()
+ all_primitives = set()
+ for framework in frameworks:
+ frameworkHeadersDir = FRAMEWORKS_PATH + framework + '/Headers/'
+ if not os.path.exists(frameworkHeadersDir):
+ continue
+
+ headerFilenames = os.listdir(frameworkHeadersDir)
+
+ for f in headerFilenames:
+ if not f.endswith('.h'):
+ continue
+
+ headerFilePath = frameworkHeadersDir + f
+ content = open(headerFilePath).read()
+ res = re.findall('(?<=@interface )\w+', content)
+ for r in res:
+ all_interfaces.add(r)
+
+ res = re.findall('(?<=@protocol )\w+', content)
+ for r in res:
+ all_protocols.add(r)
+
+ res = re.findall('(?<=typedef enum )\w+', content)
+ for r in res:
+ all_primitives.add(r)
+
+ res = re.findall('(?<=typedef struct )\w+', content)
+ for r in res:
+ all_primitives.add(r)
+
+ res = re.findall('(?<=typedef const struct )\w+', content)
+ for r in res:
+ all_primitives.add(r)
+
+
+ print("ALL interfaces: \n")
+ print(all_interfaces)
+
+ print("\nALL protocols: \n")
+ print(all_protocols)
+
+ print("\nALL primitives: \n")
+ print(all_primitives)
diff --git a/pygments/lexers/_cocoabuiltins.py b/pygments/lexers/_cocoabuiltins.py
deleted file mode 100644
index 26179594..00000000
--- a/pygments/lexers/_cocoabuiltins.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers._cocoabuiltins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This file defines a set of types used across Cocoa frameworks from Apple.
- There is a list of @interfaces, @protocols and some other (structs, unions)
-
- File may be also used as standalone generator for aboves.
-
- :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from __future__ import print_function
-
-COCOA_INTERFACES = set(['UITableViewCell', 'NSURLSessionDataTask', 'NSLinguisticTagger', 'NSStream', 'UIPrintInfo', 'SKPaymentTransaction', 'SKPhysicsWorld', 'NSString', 'CMAttitude', 'SKSpriteNode', 'JSContext', 'UICollectionReusableView', 'AVMutableCompositionTrack', 'GKLeaderboard', 'NSFetchedResultsController', 'MKTileOverlayRenderer', 'MIDINetworkSession', 'UITextSelectionRect', 'MKRoute', 'MPVolumeView', 'UIKeyCommand', 'AVMutableAudioMix', 'GLKEffectPropertyLight', 'UICollectionViewLayout', 'NSMutableCharacterSet', 'UIAccessibilityElement', 'NSShadow', 'NSAtomicStoreCacheNode', 'UIPushBehavior', 'CBCharacteristic', 'CBUUID', 'CMStepCounter', 'NSNetService', 'UICollectionView', 'UIViewPrintFormatter', 'CAShapeLayer', 'MCPeerID', 'NSFileVersion', 'CMGyroData', 'SKPhysicsJointSpring', 'CIFilter', 'UIView', 'MKMapItem', 'PKPass', 'MKPolygonRenderer', 'JSValue', 'CLGeocoder', 'NSByteCountFormatter', 'AVCaptureScreenInput', 'CAAnimation', 'MKOverlayPathView', 'UIActionSheet', 'UIMotionEffectGroup', 'UIBarItem', 'SKProduct', 'AVAssetExportSession', 'NSKeyedUnarchiver', 'NSMutableSet', 'MKMapView', 'CATransition', 'CLCircularRegion', 'MKTileOverlay', 'UICollisionBehavior', 'ACAccountCredential', 'SKPhysicsJointLimit', 'AVMediaSelectionGroup', 'NSIndexSet', 'AVAudioRecorder', 'NSURL', 'CBCentral', 'NSNumber', 'UITableView', 'AVCaptureStillImageOutput', 'GCController', 'NSAssertionHandler', 'AVAudioSessionPortDescription', 'NSHTTPURLResponse', 'NSPropertyListSerialization', 'AVPlayerItemAccessLogEvent', 'UISwipeGestureRecognizer', 'MKOverlayRenderer', 'NSDecimalNumber', 'EKReminder', 'MKPolylineView', 'AVCaptureMovieFileOutput', 'UIImagePickerController', 'GKAchievementDescription', 'EKParticipant', 'NSBlockOperation', 'UIActivityItemProvider', 'CLLocation', 'GKLeaderboardViewController', 'MPMoviePlayerController', 'GKScore', 'NSURLConnection', 'ABUnknownPersonViewController', 'UIMenuController', 'NSEvent', 'SKTextureAtlas', 'NSKeyedArchiver', 'GKLeaderboardSet', 'NSSimpleCString', 'CBATTRequest', 'GKMatchRequest', 'AVMetadataObject', 'UIAlertView', 'NSIncrementalStore', 'MFMailComposeViewController', 'SSReadingList', 'MPMovieAccessLog', 'NSManagedObjectContext', 'AVCaptureAudioDataOutput', 'ACAccount', 'AVMetadataItem', 'AVCaptureDeviceInputSource', 'CLLocationManager', 'UIStepper', 'UIRefreshControl', 'GKTurnBasedParticipant', 'UICollectionViewTransitionLayout', 'CBCentralManager', 'NSPurgeableData', 'SLComposeViewController', 'NSHashTable', 'MKUserTrackingBarButtonItem', 'UITabBarController', 'CMMotionActivity', 'SKAction', 'AVPlayerItemOutput', 'UIDocumentInteractionController', 'UIDynamicItemBehavior', 'NSMutableDictionary', 'UILabel', 'AVCaptureInputPort', 'NSExpression', 'SKMutablePayment', 'UIStoryboardSegue', 'NSOrderedSet', 'UIPopoverBackgroundView', 'UIToolbar', 'NSNotificationCenter', 'NSEntityMigrationPolicy', 'NSLocale', 'NSURLSession', 'NSTimeZone', 'UIManagedDocument', 'AVMutableVideoCompositionLayerInstruction', 'AVAssetTrackGroup', 'NSInvocationOperation', 'ALAssetRepresentation', 'AVQueuePlayer', 'UIPasteboard', 'NSLayoutManager', 'EKCalendarChooser', 'EKObject', 'CATiledLayer', 'GLKReflectionMapEffect', 'NSManagedObjectID', 'NSUserDefaults', 'SLRequest', 'AVPlayerLayer', 'NSPointerArray', 'AVAudioMix', 'MCAdvertiserAssistant', 'MKMapSnapshotOptions', 'GKMatch', 'AVTimedMetadataGroup', 'CBMutableCharacteristic', 'NSFetchRequest', 'UIDevice', 'NSManagedObject', 'NKAssetDownload', 'AVOutputSettingsAssistant', 'SKPhysicsJointPin', 'UITabBar', 'UITextInputMode', 'NSFetchRequestExpression', 'NSPipe', 'AVComposition', 'ADBannerView', 'AVPlayerItem', 'AVSynchronizedLayer', 'MKDirectionsRequest', 'NSMetadataItem', 'UINavigationItem', 'CBPeripheralManager', 'UIStoryboardPopoverSegue', 'SKProductsRequest', 'UIGravityBehavior', 'UIWindow', 'CBMutableDescriptor', 'UIBezierPath', 'UINavigationController', 'ABPeoplePickerNavigationController', 'EKSource', 'AVAssetWriterInput', 'AVPlayerItemTrack', 'GLKEffectPropertyTexture', 'NSURLResponse', 'SKPaymentQueue', 'MKReverseGeocoder', 'GCControllerAxisInput', 'MKMapSnapshotter', 'NSOrthography', 'NSURLSessionUploadTask', 'NSCharacterSet', 'AVAssetReaderOutput', 'EAGLContext', 'UICollectionViewController', 'AVAssetTrack', 'SKEmitterNode', 'AVCaptureDeviceInput', 'AVVideoCompositionCoreAnimationTool', 'NSURLRequest', 'CMAccelerometerData', 'NSNetServiceBrowser', 'AVAsynchronousVideoCompositionRequest', 'CAGradientLayer', 'NSFormatter', 'CATransaction', 'MPMovieAccessLogEvent', 'UIStoryboard', 'MPMediaLibrary', 'UITapGestureRecognizer', 'MPMediaItemArtwork', 'NSURLSessionTask', 'MCBrowserViewController', 'NSRelationshipDescription', 'NSMutableAttributedString', 'MPNowPlayingInfoCenter', 'MKLocalSearch', 'EAAccessory', 'MKETAResponse', 'CATextLayer', 'NSNotificationQueue', 'NSValue', 'NSMutableIndexSet', 'SKPhysicsContact', 'NSProgress', 'CAScrollLayer', 'NSTextCheckingResult', 'NSEntityDescription', 'NSURLCredentialStorage', 'UIApplication', 'SKDownload', 'MKLocalSearchRequest', 'SKScene', 'UISearchDisplayController', 'CAReplicatorLayer', 'UIPrintPageRenderer', 'EKCalendarItem', 'NSUUID', 'EAAccessoryManager', 'AVAssetResourceLoader', 'AVMutableVideoCompositionInstruction', 'CTCall', 'CIVector', 'UINavigationBar', 'UIPanGestureRecognizer', 'MPMediaQuery', 'ABNewPersonViewController', 'ACAccountType', 'GKSession', 'SKVideoNode', 'GCExtendedGamepadSnapshot', 'GCExtendedGamepad', 'CAValueFunction', 'UIActivityIndicatorView', 'NSNotification', 'SKReceiptRefreshRequest', 'AVCaptureDeviceFormat', 'AVPlayerItemErrorLog', 'NSMapTable', 'NSSet', 'CMMotionManager', 'GKVoiceChatService', 'UIPageControl', 'MKGeodesicPolyline', 'AVMutableComposition', 'NSLayoutConstraint', 'UIWebView', 'NSIncrementalStoreNode', 'EKEventStore', 'UISlider', 'AVAssetResourceLoadingRequest', 'AVCaptureInput', 'SKPhysicsBody', 'NSOperation', 'MKMapCamera', 'SKProductsResponse', 'GLKEffectPropertyMaterial', 'AVCaptureDevice', 'CTCallCenter', 'CBMutableService', 'SKTransition', 'UIDynamicAnimator', 'NSMutableArray', 'MCNearbyServiceBrowser', 'NSOperationQueue', 'MKPolylineRenderer', 'UICollectionViewLayoutAttributes', 'NSValueTransformer', 'UICollectionViewFlowLayout', 'NSEntityMapping', 'SKTexture', 'NSMergePolicy', 'UITextInputStringTokenizer', 'NSRecursiveLock', 'AVAsset', 'NSUndoManager', 'MPMediaPickerController', 'NSFileCoordinator', 'NSFileHandle', 'NSConditionLock', 'UISegmentedControl', 'NSManagedObjectModel', 'UITabBarItem', 'MPMediaItem', 'EKRecurrenceRule', 'UIEvent', 'UITouch', 'UIPrintInteractionController', 'CMDeviceMotion', 'NSCompoundPredicate', 'MKMultiPoint', 'UIPrintFormatter', 'SKView', 'NSConstantString', 'UIPopoverController', 'AVMetadataFaceObject', 'EKEventViewController', 'NSPort', 'MKCircleRenderer', 'AVCompositionTrack', 'UINib', 'NSUbiquitousKeyValueStore', 'NSMetadataQueryResultGroup', 'AVAssetResourceLoadingDataRequest', 'UITableViewHeaderFooterView', 'UISplitViewController', 'AVAudioSession', 'CAEmitterLayer', 'NSNull', 'MKCircleView', 'UIColor', 'UIAttachmentBehavior', 'CLBeacon', 'NSInputStream', 'NSURLCache', 'GKPlayer', 'NSMappingModel', 'NSHTTPCookie', 'AVMutableVideoComposition', 'NSAttributeDescription', 'AVPlayer', 'MKAnnotationView', 'UIFontDescriptor', 'NSTimer', 'CBDescriptor', 'MKOverlayView', 'EKEventEditViewController', 'NSSaveChangesRequest', 'UIReferenceLibraryViewController', 'SKPhysicsJointFixed', 'UILocalizedIndexedCollation', 'UIInterpolatingMotionEffect', 'AVAssetWriter', 'NSBundle', 'SKStoreProductViewController', 'GLKViewController', 'NSMetadataQueryAttributeValueTuple', 'GKTurnBasedMatch', 'UIActivity', 'MKShape', 'NSMergeConflict', 'CIImage', 'UIRotationGestureRecognizer', 'AVPlayerItemLegibleOutput', 'AVAssetImageGenerator', 'GCControllerButtonInput', 'NSSortDescriptor', 'MPTimedMetadata', 'NKIssue', 'UIScreenMode', 'GKTurnBasedEventHandler', 'MKPolyline', 'JSVirtualMachine', 'AVAssetReader', 'NSAttributedString', 'GKMatchmakerViewController', 'NSCountedSet', 'UIButton', 'GKLocalPlayer', 'MPMovieErrorLog', 'AVSpeechUtterance', 'AVURLAsset', 'CBPeripheral', 'AVAssetWriterInputGroup', 'AVAssetReaderAudioMixOutput', 'NSEnumerator', 'UIDocument', 'MKLocalSearchResponse', 'UISimpleTextPrintFormatter', 'CBService', 'MCSession', 'QLPreviewController', 'CAMediaTimingFunction', 'UITextPosition', 'NSNumberFormatter', 'UIPinchGestureRecognizer', 'UIMarkupTextPrintFormatter', 'MKRouteStep', 'NSMetadataQuery', 'AVAssetResourceLoadingContentInformationRequest', 'CTSubscriber', 'CTCarrier', 'NSFileSecurity', 'UIAcceleration', 'UIMotionEffect', 'CLHeading', 'NSFileWrapper', 'MKDirectionsResponse', 'UILocalNotification', 'UICollectionViewCell', 'UITextView', 'CMMagnetometerData', 'UIProgressView', 'GKInvite', 'UISearchBar', 'MKPlacemark', 'AVCaptureConnection', 'ALAssetsFilter', 'AVPlayerItemErrorLogEvent', 'NSJSONSerialization', 'AVAssetReaderVideoCompositionOutput', 'ABPersonViewController', 'CIDetector', 'GKTurnBasedMatchmakerViewController', 'MPMediaItemCollection', 'NSCondition', 'NSURLCredential', 'MIDINetworkConnection', 'NSDecimalNumberHandler', 'NSURLSessionConfiguration', 'EKCalendar', 'NSDictionary', 'CAPropertyAnimation', 'UIPercentDrivenInteractiveTransition', 'MKPolygon', 'AVAssetTrackSegment', 'NSExpressionDescription', 'UIViewController', 'NSURLAuthenticationChallenge', 'NSDirectoryEnumerator', 'MKDistanceFormatter', 'GCControllerElement', 'GKPeerPickerController', 'UITableViewController', 'GKNotificationBanner', 'MKPointAnnotation', 'NSCache', 'SKPhysicsJoint', 'NSXMLParser', 'MFMessageComposeViewController', 'AVCaptureSession', 'NSDataDetector', 'AVCaptureVideoPreviewLayer', 'NSURLComponents', 'UISnapBehavior', 'AVMetadataMachineReadableCodeObject', 'GLKTextureLoader', 'NSTextAttachment', 'NSException', 'UIMenuItem', 'CMMotionActivityManager', 'MKUserLocation', 'CIFeature', 'NSMachPort', 'ALAsset', 'NSURLSessionDownloadTask', 'MPMoviePlayerViewController', 'NSMutableOrderedSet', 'AVCaptureVideoDataOutput', 'NSCachedURLResponse', 'ALAssetsLibrary', 'NSInvocation', 'UILongPressGestureRecognizer', 'NSTextStorage', 'CIFaceFeature', 'MKMapSnapshot', 'GLKEffectPropertyFog', 'NSPersistentStoreRequest', 'AVAudioMixInputParameters', 'CAEmitterBehavior', 'PKPassLibrary', 'NSLock', 'UIDynamicBehavior', 'AVPlayerMediaSelectionCriteria', 'CALayer', 'UIBarButtonItem', 'AVAudioSessionRouteDescription', 'CLBeaconRegion', 'SKEffectNode', 'CABasicAnimation', 'AVVideoCompositionInstruction', 'AVMutableTimedMetadataGroup', 'EKRecurrenceEnd', 'NSTextContainer', 'TWTweetComposeViewController', 'UIScrollView', 'EKRecurrenceDayOfWeek', 'ASIdentifierManager', 'UIScreen', 'CLRegion', 'NSProcessInfo', 'GLKTextureInfo', 'AVCaptureMetadataOutput', 'NSTextTab', 'JSManagedValue', 'NSDate', 'UITextChecker', 'NSData', 'NSParagraphStyle', 'AVMutableMetadataItem', 'EKAlarm', 'NSMutableURLRequest', 'UIVideoEditorController', 'NSAtomicStore', 'UIResponder', 'AVCompositionTrackSegment', 'GCGamepadSnapshot', 'MPMediaEntity', 'GLKSkyboxEffect', 'UISwitch', 'EKStructuredLocation', 'UIGestureRecognizer', 'NSProxy', 'GLKBaseEffect', 'GKScoreChallenge', 'NSCoder', 'MPMediaPlaylist', 'NSDateComponents', 'EKEvent', 'NSDateFormatter', 'AVAssetWriterInputPixelBufferAdaptor', 'UICollectionViewFlowLayoutInvalidationContext', 'UITextField', 'CLPlacemark', 'AVCaptureOutput', 'NSPropertyDescription', 'GCGamepad', 'NSPersistentStoreCoordinator', 'GKMatchmaker', 'CIContext', 'NSThread', 'SKRequest', 'SKPhysicsJointSliding', 'NSPredicate', 'GKVoiceChat', 'SKCropNode', 'AVCaptureAudioPreviewOutput', 'NSStringDrawingContext', 'GKGameCenterViewController', 'UIPrintPaper', 'UICollectionViewLayoutInvalidationContext', 'GLKEffectPropertyTransform', 'UIDatePicker', 'MKDirections', 'ALAssetsGroup', 'CAEmitterCell', 'UIFont', 'MKPinAnnotationView', 'UIPickerView', 'UIImageView', 'SKNode', 'MPMediaQuerySection', 'GKFriendRequestComposeViewController', 'NSError', 'CTSubscriberInfo', 'AVPlayerItemAccessLog', 'MPMediaPropertyPredicate', 'CMLogItem', 'NSAutoreleasePool', 'NSSocketPort', 'AVAssetReaderTrackOutput', 'AVSpeechSynthesisVoice', 'UIImage', 'AVCaptureAudioChannel', 'GKTurnBasedExchangeReply', 'AVVideoCompositionLayerInstruction', 'AVSpeechSynthesizer', 'GKChallengeEventHandler', 'AVCaptureFileOutput', 'UIControl', 'SKPayment', 'ADInterstitialAd', 'AVAudioSessionDataSourceDescription', 'NSArray', 'GCControllerDirectionPad', 'NSFileManager', 'AVMutableAudioMixInputParameters', 'UIScreenEdgePanGestureRecognizer', 'CAKeyframeAnimation', 'EASession', 'UIInputView', 'NSHTTPCookieStorage', 'NSPointerFunctions', 'AVMediaSelectionOption', 'NSRunLoop', 'CAAnimationGroup', 'MKCircle', 'NSMigrationManager', 'UICollectionViewUpdateItem', 'NSMutableData', 'NSMutableParagraphStyle', 'GLKEffectProperty', 'SKShapeNode', 'MPMovieErrorLogEvent', 'MKPolygonView', 'UIAccelerometer', 'NSScanner', 'GKAchievementChallenge', 'AVAudioPlayer', 'AVVideoComposition', 'NKLibrary', 'NSPersistentStore', 'NSPropertyMapping', 'GKChallenge', 'NSURLProtectionSpace', 'ACAccountStore', 'UITextRange', 'NSComparisonPredicate', 'NSOutputStream', 'PKAddPassesViewController', 'CTTelephonyNetworkInfo', 'AVTextStyleRule', 'NSFetchedPropertyDescription', 'UIPageViewController', 'CATransformLayer', 'MCNearbyServiceAdvertiser', 'NSObject', 'MPMusicPlayerController', 'MKOverlayPathRenderer', 'GKAchievement', 'AVCaptureAudioFileOutput', 'TWRequest', 'SKLabelNode', 'MIDINetworkHost', 'MPMediaPredicate', 'AVFrameRateRange', 'NSIndexPath', 'AVVideoCompositionRenderContext', 'CADisplayLink', 'CAEAGLLayer', 'NSMutableString', 'NSMessagePort', 'AVAudioSessionChannelDescription', 'GLKView', 'UIActivityViewController', 'GKAchievementViewController', 'NSURLProtocol', 'NSCalendar', 'SKKeyframeSequence', 'AVMetadataItemFilter', 'NSMethodSignature', 'NSRegularExpression', 'EAGLSharegroup', 'AVPlayerItemVideoOutput', 'CIColor', 'UIDictationPhrase'])
-COCOA_PROTOCOLS = set(['SKStoreProductViewControllerDelegate', 'AVVideoCompositionInstruction', 'AVAudioSessionDelegate', 'GKMatchDelegate', 'NSFileManagerDelegate', 'UILayoutSupport', 'NSCopying', 'UIPrintInteractionControllerDelegate', 'QLPreviewControllerDataSource', 'SKProductsRequestDelegate', 'NSTextStorageDelegate', 'MCBrowserViewControllerDelegate', 'UIViewControllerTransitionCoordinatorContext', 'NSTextAttachmentContainer', 'NSDecimalNumberBehaviors', 'NSMutableCopying', 'UIViewControllerTransitioningDelegate', 'UIAlertViewDelegate', 'AVAudioPlayerDelegate', 'MKReverseGeocoderDelegate', 'NSCoding', 'UITextInputTokenizer', 'GKFriendRequestComposeViewControllerDelegate', 'UIActivityItemSource', 'NSCacheDelegate', 'UITableViewDelegate', 'GKAchievementViewControllerDelegate', 'EKEventEditViewDelegate', 'NSURLConnectionDelegate', 'GKPeerPickerControllerDelegate', 'UIGuidedAccessRestrictionDelegate', 'AVSpeechSynthesizerDelegate', 'MFMailComposeViewControllerDelegate', 'AVPlayerItemLegibleOutputPushDelegate', 'ADInterstitialAdDelegate', 'AVAssetResourceLoaderDelegate', 'UITabBarControllerDelegate', 'SKPaymentTransactionObserver', 'AVCaptureAudioDataOutputSampleBufferDelegate', 'UIInputViewAudioFeedback', 'GKChallengeListener', 'UIPickerViewDelegate', 'UIWebViewDelegate', 'UIApplicationDelegate', 'GKInviteEventListener', 'MPMediaPlayback', 'AVAsynchronousKeyValueLoading', 'QLPreviewItem', 'NSPortDelegate', 'SKRequestDelegate', 'SKPhysicsContactDelegate', 'UIPageViewControllerDataSource', 'AVPlayerItemOutputPushDelegate', 'UICollectionViewDelegate', 'UIImagePickerControllerDelegate', 'UIToolbarDelegate', 'UIViewControllerTransitionCoordinator', 'NSURLConnectionDataDelegate', 'MKOverlay', 'CBCentralManagerDelegate', 'JSExport', 'NSTextLayoutOrientationProvider', 'UIPickerViewDataSource', 'UITextInputTraits', 'NSLayoutManagerDelegate', 'NSFetchedResultsControllerDelegate', 'ABPeoplePickerNavigationControllerDelegate', 'NSDiscardableContent', 'UITextFieldDelegate', 'GKGameCenterControllerDelegate', 'MPMediaPickerControllerDelegate', 'UIAppearance', 'UIPickerViewAccessibilityDelegate', 'UIScrollViewAccessibilityDelegate', 'ADBannerViewDelegate', 'NSURLSessionDelegate', 'NSXMLParserDelegate', 'UIViewControllerRestoration', 'UISearchBarDelegate', 'UIBarPositioning', 'CBPeripheralDelegate', 'UISearchDisplayDelegate', 'CAAction', 'PKAddPassesViewControllerDelegate', 'MCNearbyServiceAdvertiserDelegate', 'GKTurnBasedMatchmakerViewControllerDelegate', 'UIActionSheetDelegate', 'AVCaptureVideoDataOutputSampleBufferDelegate', 'UIAppearanceContainer', 'UIStateRestoring', 'NSURLSessionTaskDelegate', 'NSFilePresenter', 'UIViewControllerContextTransitioning', 'UITextInput', 'CBPeripheralManagerDelegate', 'UITextInputDelegate', 'NSFastEnumeration', 'NSURLAuthenticationChallengeSender', 'AVVideoCompositing', 'NSSecureCoding', 'MCAdvertiserAssistantDelegate', 'GKLocalPlayerListener', 'GLKNamedEffect', 'UIPopoverControllerDelegate', 'AVCaptureMetadataOutputObjectsDelegate', 'MFMessageComposeViewControllerDelegate', 'UITextSelecting', 'NSURLProtocolClient', 'UIVideoEditorControllerDelegate', 'UITableViewDataSource', 'UIDynamicAnimatorDelegate', 'NSURLSessionDataDelegate', 'UICollisionBehaviorDelegate', 'NSStreamDelegate', 'MCNearbyServiceBrowserDelegate', 'UINavigationControllerDelegate', 'MCSessionDelegate', 'UIViewControllerInteractiveTransitioning', 'GKTurnBasedEventListener', 'GLKViewDelegate', 'EAAccessoryDelegate', 'NSKeyedUnarchiverDelegate', 'NSMachPortDelegate', 'UIBarPositioningDelegate', 'ABPersonViewControllerDelegate', 'NSNetServiceBrowserDelegate', 'EKEventViewDelegate', 'UIScrollViewDelegate', 'NSURLConnectionDownloadDelegate', 'UIGestureRecognizerDelegate', 'UINavigationBarDelegate', 'GKVoiceChatClient', 'NSFetchedResultsSectionInfo', 'UIDocumentInteractionControllerDelegate', 'QLPreviewControllerDelegate', 'UIAccessibilityReadingContent', 'ABUnknownPersonViewControllerDelegate', 'GLKViewControllerDelegate', 'UICollectionViewDelegateFlowLayout', 'UISplitViewControllerDelegate', 'MKAnnotation', 'UIAccessibilityIdentification', 'ABNewPersonViewControllerDelegate', 'CAMediaTiming', 'AVCaptureFileOutputRecordingDelegate', 'UITextViewDelegate', 'UITabBarDelegate', 'GKLeaderboardViewControllerDelegate', 'MKMapViewDelegate', 'UIKeyInput', 'UICollectionViewDataSource', 'NSLocking', 'AVCaptureFileOutputDelegate', 'GKChallengeEventHandlerDelegate', 'UIObjectRestoration', 'CIFilterConstructor', 'AVPlayerItemOutputPullDelegate', 'EAGLDrawable', 'AVVideoCompositionValidationHandling', 'UIViewControllerAnimatedTransitioning', 'NSURLSessionDownloadDelegate', 'UIAccelerometerDelegate', 'UIPageViewControllerDelegate', 'UIDataSourceModelAssociation', 'AVAudioRecorderDelegate', 'GKSessionDelegate', 'NSKeyedArchiverDelegate', 'UIDynamicItem', 'CLLocationManagerDelegate', 'NSMetadataQueryDelegate', 'NSNetServiceDelegate', 'GKMatchmakerViewControllerDelegate', 'EKCalendarChooserDelegate', 'NSTextField', 'NSInteger', 'NSUInteger'])
-COCOA_PRIMITIVES = set(['ROTAHeader', '__CFBundle', 'MortSubtable', 'AudioFilePacketTableInfo', 'CGPDFOperatorTable', 'KerxStateEntry', 'ExtendedTempoEvent', 'CTParagraphStyleSetting', 'OpaqueMIDIPort', 'CFStreamErrorHTTP', '__CFMachPort', '_GLKMatrix4', 'ExtendedControlEvent', 'CAFAudioDescription', 'KernVersion0Header', 'CGTextDrawingMode', 'EKErrorCode', 'gss_buffer_desc_struct', 'AudioUnitParameterInfo', '__SCPreferences', '__CTFrame', '__CTLine', 'CFStreamSocketSecurityProtocol', 'gss_krb5_lucid_context_v1', 'OpaqueJSValue', 'TrakTableEntry', 'AudioFramePacketTranslation', 'CGImageSource', 'OpaqueJSPropertyNameAccumulator', 'JustPCGlyphRepeatAddAction', 'BslnFormat0Part', 'OpaqueMIDIThruConnection', 'opaqueCMBufferQueue', 'OpaqueMusicSequence', 'MortRearrangementSubtable', 'MixerDistanceParams', 'MorxSubtable', 'MIDIObjectPropertyChangeNotification', '__CFDictionary', 'CGImageMetadataErrors', 'CGPath', 'OpaqueMIDIEndpoint', 'ALMXHeader', 'AudioComponentPlugInInterface', 'gss_ctx_id_t_desc_struct', 'sfntFontFeatureSetting', 'OpaqueJSContextGroup', '__SCNetworkConnection', 'AudioUnitParameterValueTranslation', 'CGImageMetadataType', 'CGPattern', 'AudioFileTypeAndFormatID', 'CGContext', 'AUNodeInteraction', 'SFNTLookupTable', 'JustPCDecompositionAction', 'KerxControlPointHeader', 'PKErrorCode', 'AudioStreamPacketDescription', 'KernSubtableHeader', '__CFNull', 'AUMIDIOutputCallbackStruct', 'MIDIMetaEvent', 'AudioQueueChannelAssignment', '__CFString', 'AnchorPoint', 'JustTable', '__CFNetService', 'gss_krb5_lucid_key', 'CGPDFDictionary', 'MIDIThruConnectionParams', 'CAF_UUID_ChunkHeader', 'gss_krb5_cfx_keydata', '_GLKMatrix3', 'CGGradient', 'OpaqueMIDISetup', '_GLKMatrix2', 'JustPostcompTable', '__CTParagraphStyle', 'AudioUnitParameterHistoryInfo', 'OpaqueJSContext', 'CGShading', '__CFBinaryHeap', 'SFNTLookupSingle', '__CFHost', '__SecRandom', '__CTFontDescriptor', '_NSRange', 'sfntDirectory', 'AudioQueueLevelMeterState', 'CAFPositionPeak', '__CFBoolean', 'PropLookupSegment', '__CVOpenGLESTextureCache', 'sfntInstance', '_GLKQuaternion', 'KernStateEntry', '__SCNetworkProtocol', 'CAFFileHeader', 'KerxOrderedListHeader', 'CGBlendMode', 'STXEntryOne', 'CAFRegion', 'SFNTLookupTrimmedArrayHeader', 'KerxControlPointEntry', '__CFCharacterSet', 'OpaqueMusicTrack', '_GLKVector4', 'gss_OID_set_desc_struct', 'OpaqueMusicPlayer', '_CFHTTPAuthentication', 'CGAffineTransform', 'CAFMarkerChunk', 'AUHostIdentifier', 'ROTAGlyphEntry', 'BslnTable', 'gss_krb5_lucid_context_version', '_GLKMatrixStack', 'CGImage', 'AnkrTable', 'SFNTLookupSingleHeader', 'MortLigatureSubtable', 'AudioFile_SMPTE_Time', 'CAFUMIDChunk', 'SMPTETime', 'CAFDataChunk', 'CGPDFStream', 'AudioFileRegionList', 'STEntryTwo', 'SFNTLookupBinarySearchHeader', 'OpbdTable', '__CTGlyphInfo', 'BslnFormat2Part', 'KerxIndexArrayHeader', 'TrakTable', 'KerxKerningPair', '__CFBitVector', 'KernVersion0SubtableHeader', 'OpaqueAudioComponentInstance', 'AudioChannelLayout', '__CFUUID', 'MIDISysexSendRequest', '__CFNumberFormatter', 'CGImageSourceStatus', '__CFURL', 'AudioFileMarkerList', 'AUSamplerBankPresetData', 'CGDataProvider', 'AudioFormatInfo', '__SecIdentity', 'sfntCMapExtendedSubHeader', 'MIDIChannelMessage', 'KernOffsetTable', 'CGColorSpaceModel', 'MFMailComposeErrorCode', 'CGFunction', '__SecTrust', 'CFHostInfoType', 'KernSimpleArrayHeader', 'CGFontPostScriptFormat', 'KernStateHeader', 'AudioUnitCocoaViewInfo', 'CGDataConsumer', 'OpaqueMIDIDevice', 'OpaqueCMBlockBuffer', 'AnchorPointTable', 'CGImageDestination', 'CAFInstrumentChunk', 'AudioUnitMeterClipping', '__CFNumber', 'MorxChain', '__CTFontCollection', 'STEntryOne', 'STXEntryTwo', 'ExtendedNoteOnEvent', '__CFArray', 'CGColorRenderingIntent', 'KerxSimpleArrayHeader', 'MorxTable', '_GLKVector3', '_GLKVector2', 'MortTable', 'CGPDFBox', 'AudioUnitParameterValueFromString', '__CFSocket', 'ALCdevice_struct', 'MIDINoteMessage', 'sfntFeatureHeader', 'CGRect', '__SCNetworkInterface', '__CFTree', 'MusicEventUserData', 'TrakTableData', 'MortContextualSubtable', '__CTRun', 'AudioUnitFrequencyResponseBin', 'MortChain', 'MorxInsertionSubtable', 'CGImageMetadata', 'gss_auth_identity', 'AudioUnitMIDIControlMapping', 'CAFChunkHeader', 'PropTable', 'CGPDFScanner', 'OpaqueMusicEventIterator', '__CFFileSecurity', 'AudioUnitNodeConnection', 'OpaqueMIDIDeviceList', 'ExtendedAudioFormatInfo', 'CGRectEdge', 'sfntFontDescriptor', '__CFRunLoopObserver', 'CGPatternTiling', 'MIDINotification', 'MorxLigatureSubtable', 'SFNTLookupSegment', 'MessageComposeResult', 'MIDIThruConnectionEndpoint', 'MusicDeviceStdNoteParams', 'opaqueCMSimpleQueue', 'ALCcontext_struct', 'OpaqueAudioQueue', 'PropLookupSingle', 'CGColor', 'AudioOutputUnitStartAtTimeParams', 'gss_name_t_desc_struct', 'CGFunctionCallbacks', 'CAFPacketTableHeader', 'AudioChannelDescription', 'sfntFeatureName', 'MorxContextualSubtable', 'CVSMPTETime', 'AudioValueRange', 'CGTextEncoding', 'AudioStreamBasicDescription', 'AUNodeRenderCallback', 'AudioPanningInfo', '__CFData', '__CFDate', 'KerxOrderedListEntry', '__CFAllocator', 'OpaqueJSPropertyNameArray', '__SCDynamicStore', 'OpaqueMIDIEntity', 'CFHostClientContext', 'CFNetServiceClientContext', 'AudioUnitPresetMAS_SettingData', 'opaqueCMBufferQueueTriggerToken', 'AudioUnitProperty', 'CAFRegionChunk', 'CGPDFString', '__CFWriteStream', '__CFAttributedString', '__CFStringTokenizer', 'JustWidthDeltaEntry', '__CFSet', 'sfntVariationAxis', '__CFNetDiagnostic', 'CAFOverviewSample', 'sfntCMapEncoding', 'CGVector', '__SCNetworkService', 'opaqueCMSampleBuffer', 'AUHostVersionIdentifier', 'AudioBalanceFade', 'sfntFontRunFeature', 'KerxCoordinateAction', 'sfntCMapSubHeader', 'CVPlanarPixelBufferInfo', 'AUNumVersion', '__CFTimeZone', 'AUSamplerInstrumentData', 'AUPreset', '__CTRunDelegate', 'OpaqueAudioQueueProcessingTap', 'KerxTableHeader', '_NSZone', 'OpaqueExtAudioFile', '__CFRunLoopSource', 'KerxAnchorPointAction', 'OpaqueJSString', 'AudioQueueParameterEvent', '__CFHTTPMessage', 'OpaqueCMClock', 'ScheduledAudioFileRegion', 'STEntryZero', 'gss_channel_bindings_struct', 'sfntVariationHeader', 'AUChannelInfo', 'UIOffset', 'GLKEffectPropertyPrv', 'KerxStateHeader', 'CGLineJoin', 'CGPDFDocument', '__CFBag', 'CFStreamErrorHTTPAuthentication', 'KernOrderedListHeader', '__SCNetworkSet', '__SecKey', 'MIDIObjectAddRemoveNotification', 'sfntDescriptorHeader', 'AudioUnitParameter', 'JustPCActionSubrecord', 'AudioComponentDescription', 'AudioUnitParameterValueName', 'AudioUnitParameterEvent', 'KerxControlPointAction', 'AudioTimeStamp', 'KernKerningPair', 'gss_buffer_set_desc_struct', 'MortFeatureEntry', 'FontVariation', 'CAFStringID', 'LcarCaretClassEntry', 'AudioUnitParameterStringFromValue', 'ACErrorCode', 'ALMXGlyphEntry', 'LtagTable', '__CTTypesetter', 'AuthorizationOpaqueRef', 'UIEdgeInsets', 'CGPathElement', 'CAFMarker', 'KernTableHeader', 'NoteParamsControlValue', 'SSLContext', 'gss_cred_id_t_desc_struct', 'AudioUnitParameterNameInfo', '__SecCertificate', 'CGDataConsumerCallbacks', 'CGInterpolationQuality', 'CGLineCap', 'MIDIControlTransform', 'BslnFormat1Part', 'CGPDFArray', '__SecPolicy', 'AudioConverterPrimeInfo', '__CTTextTab', '__CFNetServiceMonitor', 'AUInputSamplesInOutputCallbackStruct', '__CTFramesetter', 'CGPDFDataFormat', 'STHeader', 'CVPlanarPixelBufferInfo_YCbCrPlanar', 'MIDIValueMap', 'JustDirectionTable', '__SCBondStatus', 'SFNTLookupSegmentHeader', 'OpaqueCMMemoryPool', 'CGPathDrawingMode', 'CGFont', '__SCNetworkReachability', 'AudioClassDescription', 'CGPoint', 'CAFStrings', '__CFNetServiceBrowser', 'opaqueMTAudioProcessingTap', 'sfntNameRecord', 'CGPDFPage', 'CGLayer', 'ComponentInstanceRecord', 'CAFInfoStrings', 'HostCallbackInfo', 'MusicDeviceNoteParams', 'KernIndexArrayHeader', 'CVPlanarPixelBufferInfo_YCbCrBiPlanar', 'MusicTrackLoopInfo', 'opaqueCMFormatDescription', 'STClassTable', 'sfntDirectoryEntry', 'OpaqueCMTimebase', 'CGDataProviderDirectCallbacks', 'MIDIPacketList', 'CAFOverviewChunk', 'MIDIPacket', 'ScheduledAudioSlice', 'CGDataProviderSequentialCallbacks', 'AudioBuffer', 'MorxRearrangementSubtable', 'CGPatternCallbacks', 'AUDistanceAttenuationData', 'MIDIIOErrorNotification', 'CGPDFContentStream', 'IUnknownVTbl', 'MIDITransform', 'MortInsertionSubtable', 'CABarBeatTime', 'AudioBufferList', 'KerxSubtableHeader', '__CVBuffer', 'AURenderCallbackStruct', 'STXEntryZero', 'JustPCDuctilityAction', 'OpaqueAudioQueueTimeline', 'OpaqueMIDIClient', '__CFPlugInInstance', 'AudioQueueBuffer', '__CFFileDescriptor', 'AudioUnitConnection', '_GKTurnBasedExchangeStatus', 'LcarCaretTable', 'CVPlanarComponentInfo', 'JustWidthDeltaGroup', 'OpaqueAudioComponent', 'ParameterEvent', '__CVPixelBufferPool', '__CTFont', 'OpaqueJSClass', 'CGColorSpace', 'CGSize', 'AUDependentParameter', 'MIDIDriverInterface', 'gss_krb5_rfc1964_keydata', '__CFDateFormatter', 'LtagStringRange', 'CFNetServiceMonitorType', 'gss_iov_buffer_desc_struct', 'AUPresetEvent', 'CFNetServicesError', 'KernOrderedListEntry', '__CFLocale', 'gss_OID_desc_struct', 'AudioUnitPresetMAS_Settings', 'AudioFileMarker', 'JustPCConditionalAddAction', 'BslnFormat3Part', '__CFNotificationCenter', 'MortSwashSubtable', 'AUParameterMIDIMapping', 'OpaqueAudioConverter', 'MIDIRawData', 'CFNetDiagnosticStatusValues', 'sfntNameHeader', '__CFRunLoop', 'MFMailComposeResult', 'CATransform3D', 'OpbdSideValues', 'CAF_SMPTE_Time', 'JustPCAction', 'CGPathElementType', '__CFRunLoopTimer', '__CFError', 'AudioFormatListItem', '__CFReadStream', 'AudioUnitExternalBuffer', 'AudioFileRegion', 'AudioValueTranslation', 'CGImageMetadataTag', 'CAFPeakChunk', 'AudioBytePacketTranslation', 'CFNetworkErrors', 'sfntCMapHeader', '__CFURLEnumerator', '__CFCalendar', '__CFMessagePort', 'STXHeader', 'CGPDFObjectType', 'SFNTLookupArrayHeader'])
-
-
-if __name__ == '__main__':
- import os
- import re
-
- FRAMEWORKS_PATH = '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS7.0.sdk/System/Library/Frameworks/'
- frameworks = os.listdir(FRAMEWORKS_PATH)
-
- all_interfaces = set()
- all_protocols = set()
- all_primitives = set()
- for framework in frameworks:
- frameworkHeadersDir = FRAMEWORKS_PATH + framework + '/Headers/'
- if not os.path.exists(frameworkHeadersDir):
- continue
-
- headerFilenames = os.listdir(frameworkHeadersDir)
-
- for f in headerFilenames:
- if not f.endswith('.h'):
- continue
-
- headerFilePath = frameworkHeadersDir + f
- content = open(headerFilePath).read()
- res = re.findall('(?<=@interface )\w+', content)
- for r in res:
- all_interfaces.add(r)
-
- res = re.findall('(?<=@protocol )\w+', content)
- for r in res:
- all_protocols.add(r)
-
- res = re.findall('(?<=typedef enum )\w+', content)
- for r in res:
- all_primitives.add(r)
-
- res = re.findall('(?<=typedef struct )\w+', content)
- for r in res:
- all_primitives.add(r)
-
- res = re.findall('(?<=typedef const struct )\w+', content)
- for r in res:
- all_primitives.add(r)
-
-
- print("ALL interfaces: \n")
- print(all_interfaces)
-
- print("\nALL protocols: \n")
- print(all_protocols)
-
- print("\nALL primitives: \n")
- print(all_primitives)
diff --git a/pygments/lexers/_lassobuiltins.py b/pygments/lexers/_lasso_builtins.py
index 9a0a89da..f7413fce 100644
--- a/pygments/lexers/_lassobuiltins.py
+++ b/pygments/lexers/_lasso_builtins.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
"""
- pygments.lexers._lassobuiltins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ pygments.lexers._lasso_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Built-in Lasso types, traits, methods, and members.
@@ -10,7 +10,7 @@
"""
BUILTINS = {
- 'Types': [
+ 'Types': (
'null',
'void',
'tag',
@@ -270,8 +270,8 @@ BUILTINS = {
'web_error_atend',
'web_response_impl',
'web_router'
- ],
- 'Traits': [
+ ),
+ 'Traits': (
'trait_asstring',
'any',
'trait_generator',
@@ -344,8 +344,8 @@ BUILTINS = {
'web_node_content_html_specialized',
'web_node_content_css_specialized',
'web_node_content_js_specialized'
- ],
- 'Unbound Methods': [
+ ),
+ 'Unbound Methods': (
'fail_now',
'register',
'register_thread',
@@ -1845,8 +1845,8 @@ BUILTINS = {
'web_response',
'web_router_database',
'web_router_initialize'
- ],
- 'Lasso 8 Tags': [
+ ),
+ 'Lasso 8 Tags': (
'__char',
'__sync_timestamp__',
'_admin_addgroup',
@@ -3030,10 +3030,10 @@ BUILTINS = {
'xsd_processsimpletype',
'xsd_ref',
'xsd_type'
- ]
+ )
}
MEMBERS = {
- 'Member Methods': [
+ 'Member Methods': (
'escape_member',
'oncompare',
'sameas',
@@ -4720,8 +4720,8 @@ MEMBERS = {
'acceptpost',
'csscontent',
'jscontent'
- ],
- 'Lasso 8 Member Tags': [
+ ),
+ 'Lasso 8 Member Tags': (
'accept',
'add',
'addattachment',
@@ -5178,5 +5178,5 @@ MEMBERS = {
'xmllang',
'xmlschematype',
'year'
- ]
+ )
}
diff --git a/pygments/lexers/_luabuiltins.py b/pygments/lexers/_lua_builtins.py
index 40037357..79a8da62 100644
--- a/pygments/lexers/_luabuiltins.py
+++ b/pygments/lexers/_lua_builtins.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
"""
- pygments.lexers._luabuiltins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ pygments.lexers._lua_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This file contains the names and modules of lua functions
It is able to re-generate itself, but for adding new functions you
@@ -16,7 +16,7 @@
from __future__ import print_function
-MODULES = {'basic': ['_G',
+MODULES = {'basic': ('_G',
'_VERSION',
'assert',
'collectgarbage',
@@ -42,14 +42,14 @@ MODULES = {'basic': ['_G',
'tostring',
'type',
'unpack',
- 'xpcall'],
- 'coroutine': ['coroutine.create',
+ 'xpcall'),
+ 'coroutine': ('coroutine.create',
'coroutine.resume',
'coroutine.running',
'coroutine.status',
'coroutine.wrap',
- 'coroutine.yield'],
- 'debug': ['debug.debug',
+ 'coroutine.yield'),
+ 'debug': ('debug.debug',
'debug.getfenv',
'debug.gethook',
'debug.getinfo',
@@ -62,8 +62,8 @@ MODULES = {'basic': ['_G',
'debug.setlocal',
'debug.setmetatable',
'debug.setupvalue',
- 'debug.traceback'],
- 'io': ['io.close',
+ 'debug.traceback'),
+ 'io': ('io.close',
'io.flush',
'io.input',
'io.lines',
@@ -73,8 +73,8 @@ MODULES = {'basic': ['_G',
'io.read',
'io.tmpfile',
'io.type',
- 'io.write'],
- 'math': ['math.abs',
+ 'io.write'),
+ 'math': ('math.abs',
'math.acos',
'math.asin',
'math.atan2',
@@ -103,16 +103,16 @@ MODULES = {'basic': ['_G',
'math.sin',
'math.sqrt',
'math.tanh',
- 'math.tan'],
- 'modules': ['module',
+ 'math.tan'),
+ 'modules': ('module',
'require',
'package.cpath',
'package.loaded',
'package.loadlib',
'package.path',
'package.preload',
- 'package.seeall'],
- 'os': ['os.clock',
+ 'package.seeall'),
+ 'os': ('os.clock',
'os.date',
'os.difftime',
'os.execute',
@@ -122,8 +122,8 @@ MODULES = {'basic': ['_G',
'os.rename',
'os.setlocale',
'os.time',
- 'os.tmpname'],
- 'string': ['string.byte',
+ 'os.tmpname'),
+ 'string': ('string.byte',
'string.char',
'string.dump',
'string.find',
@@ -136,12 +136,12 @@ MODULES = {'basic': ['_G',
'string.rep',
'string.reverse',
'string.sub',
- 'string.upper'],
- 'table': ['table.concat',
+ 'string.upper'),
+ 'table': ('table.concat',
'table.insert',
'table.maxn',
'table.remove',
- 'table.sort']}
+ 'table.sort')}
if __name__ == '__main__':
import re
@@ -221,21 +221,17 @@ if __name__ == '__main__':
return 'basic'
def regenerate(filename, modules):
- f = open(filename)
- try:
- content = f.read()
- finally:
- f.close()
+ with open(filename) as fp:
+ content = fp.read()
header = content[:content.find('MODULES = {')]
footer = content[content.find("if __name__ == '__main__':"):]
- f = open(filename, 'w')
- f.write(header)
- f.write('MODULES = %s\n\n' % pprint.pformat(modules))
- f.write(footer)
- f.close()
+ with open(filename, 'w') as fp:
+ fp.write(header)
+ fp.write('MODULES = %s\n\n' % pprint.pformat(modules))
+ fp.write(footer)
def run():
version = get_newest_version()
@@ -251,5 +247,4 @@ if __name__ == '__main__':
regenerate(__file__, modules)
-
run()
diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py
index b028aee6..92dc45f8 100644
--- a/pygments/lexers/_mapping.py
+++ b/pygments/lexers/_mapping.py
@@ -16,14 +16,14 @@
from __future__ import print_function
LEXERS = {
- 'ABAPLexer': ('pygments.lexers.other', 'ABAP', ('abap',), ('*.abap',), ('text/x-abap',)),
- 'APLLexer': ('pygments.lexers.other', 'APL', ('apl',), ('*.apl',), ()),
- 'ActionScript3Lexer': ('pygments.lexers.web', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')),
- 'ActionScriptLexer': ('pygments.lexers.web', 'ActionScript', ('as', 'actionscript'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')),
- 'AdaLexer': ('pygments.lexers.compiled', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)),
- 'AgdaLexer': ('pygments.lexers.functional', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)),
- 'AlloyLexer': ('pygments.lexers.other', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)),
- 'AmbientTalkLexer': ('pygments.lexers.other', 'AmbientTalk', ('at', 'ambienttalk', 'ambienttalk/2'), ('*.at',), ('text/x-ambienttalk',)),
+ 'ABAPLexer': ('pygments.lexers.business', 'ABAP', ('abap',), ('*.abap',), ('text/x-abap',)),
+ 'APLLexer': ('pygments.lexers.apl', 'APL', ('apl',), ('*.apl',), ()),
+ 'ActionScript3Lexer': ('pygments.lexers.actionscript', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')),
+ 'ActionScriptLexer': ('pygments.lexers.actionscript', 'ActionScript', ('as', 'actionscript'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')),
+ 'AdaLexer': ('pygments.lexers.pascal', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)),
+ 'AgdaLexer': ('pygments.lexers.haskell', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)),
+ 'AlloyLexer': ('pygments.lexers.dsls', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)),
+ 'AmbientTalkLexer': ('pygments.lexers.ambient', 'AmbientTalk', ('at', 'ambienttalk', 'ambienttalk/2'), ('*.at',), ('text/x-ambienttalk',)),
'AntlrActionScriptLexer': ('pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-as', 'antlr-actionscript'), ('*.G', '*.g'), ()),
'AntlrCSharpLexer': ('pygments.lexers.parsers', 'ANTLR With C# Target', ('antlr-csharp', 'antlr-c#'), ('*.G', '*.g'), ()),
'AntlrCppLexer': ('pygments.lexers.parsers', 'ANTLR With CPP Target', ('antlr-cpp',), ('*.G', '*.g'), ()),
@@ -33,251 +33,254 @@ LEXERS = {
'AntlrPerlLexer': ('pygments.lexers.parsers', 'ANTLR With Perl Target', ('antlr-perl',), ('*.G', '*.g'), ()),
'AntlrPythonLexer': ('pygments.lexers.parsers', 'ANTLR With Python Target', ('antlr-python',), ('*.G', '*.g'), ()),
'AntlrRubyLexer': ('pygments.lexers.parsers', 'ANTLR With Ruby Target', ('antlr-ruby', 'antlr-rb'), ('*.G', '*.g'), ()),
- 'ApacheConfLexer': ('pygments.lexers.text', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)),
- 'AppleScriptLexer': ('pygments.lexers.other', 'AppleScript', ('applescript',), ('*.applescript',), ()),
+ 'ApacheConfLexer': ('pygments.lexers.configs', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)),
+ 'AppleScriptLexer': ('pygments.lexers.scripting', 'AppleScript', ('applescript',), ('*.applescript',), ()),
'AspectJLexer': ('pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)),
- 'AsymptoteLexer': ('pygments.lexers.other', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)),
- 'AutoItLexer': ('pygments.lexers.other', 'AutoIt', ('autoit',), ('*.au3',), ('text/x-autoit',)),
- 'AutohotkeyLexer': ('pygments.lexers.other', 'autohotkey', ('ahk', 'autohotkey'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)),
- 'AwkLexer': ('pygments.lexers.other', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)),
- 'BBCodeLexer': ('pygments.lexers.text', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
- 'BaseMakefileLexer': ('pygments.lexers.text', 'Base Makefile', ('basemake',), (), ()),
- 'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript')),
+ 'AsymptoteLexer': ('pygments.lexers.graphics', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)),
+ 'AutoItLexer': ('pygments.lexers.automation', 'AutoIt', ('autoit',), ('*.au3',), ('text/x-autoit',)),
+ 'AutohotkeyLexer': ('pygments.lexers.automation', 'autohotkey', ('ahk', 'autohotkey'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)),
+ 'AwkLexer': ('pygments.lexers.textedit', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)),
+ 'BBCodeLexer': ('pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
+ 'BaseMakefileLexer': ('pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()),
+ 'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript')),
'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console',), ('*.sh-session',), ('application/x-shell-session',)),
'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('bat', 'batch', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
- 'BefungeLexer': ('pygments.lexers.other', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)),
- 'BlitzBasicLexer': ('pygments.lexers.compiled', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)),
- 'BlitzMaxLexer': ('pygments.lexers.compiled', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)),
+ 'BefungeLexer': ('pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)),
+ 'BlitzBasicLexer': ('pygments.lexers.basic', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)),
+ 'BlitzMaxLexer': ('pygments.lexers.basic', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)),
'BooLexer': ('pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)),
- 'BrainfuckLexer': ('pygments.lexers.other', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)),
- 'BroLexer': ('pygments.lexers.other', 'Bro', ('bro',), ('*.bro',), ()),
- 'BugsLexer': ('pygments.lexers.math', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()),
- 'CLexer': ('pygments.lexers.compiled', 'C', ('c',), ('*.c', '*.h', '*.idc'), ('text/x-chdr', 'text/x-csrc')),
- 'CMakeLexer': ('pygments.lexers.text', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)),
+ 'BrainfuckLexer': ('pygments.lexers.esoteric', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)),
+ 'BroLexer': ('pygments.lexers.dsls', 'Bro', ('bro',), ('*.bro',), ()),
+ 'BugsLexer': ('pygments.lexers.modeling', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()),
+ 'CLexer': ('pygments.lexers.c_cpp', 'C', ('c',), ('*.c', '*.h', '*.idc'), ('text/x-chdr', 'text/x-csrc')),
+ 'CMakeLexer': ('pygments.lexers.make', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)),
'CObjdumpLexer': ('pygments.lexers.asm', 'c-objdump', ('c-objdump',), ('*.c-objdump',), ('text/x-c-objdump',)),
'CSharpAspxLexer': ('pygments.lexers.dotnet', 'aspx-cs', ('aspx-cs',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
'CSharpLexer': ('pygments.lexers.dotnet', 'C#', ('csharp', 'c#'), ('*.cs',), ('text/x-csharp',)),
- 'Ca65Lexer': ('pygments.lexers.asm', 'ca65', ('ca65',), ('*.s',), ()),
- 'CbmBasicV2Lexer': ('pygments.lexers.other', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()),
+ 'Ca65Lexer': ('pygments.lexers.asm', 'ca65 assembler', ('ca65',), ('*.s',), ()),
+ 'CbmBasicV2Lexer': ('pygments.lexers.basic', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()),
'CeylonLexer': ('pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)),
- 'Cfengine3Lexer': ('pygments.lexers.other', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()),
- 'ChaiscriptLexer': ('pygments.lexers.agile', 'ChaiScript', ('chai', 'chaiscript'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')),
- 'ChapelLexer': ('pygments.lexers.compiled', 'Chapel', ('chapel', 'chpl'), ('*.chpl',), ()),
+ 'Cfengine3Lexer': ('pygments.lexers.configs', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()),
+ 'ChaiscriptLexer': ('pygments.lexers.scripting', 'ChaiScript', ('chai', 'chaiscript'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')),
+ 'ChapelLexer': ('pygments.lexers.chapel', 'Chapel', ('chapel', 'chpl'), ('*.chpl',), ()),
'CheetahHtmlLexer': ('pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire', 'htmlcheetah'), (), ('text/html+cheetah', 'text/html+spitfire')),
'CheetahJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Cheetah', ('js+cheetah', 'javascript+cheetah', 'js+spitfire', 'javascript+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')),
'CheetahLexer': ('pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')),
'CheetahXmlLexer': ('pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')),
- 'CirruLexer': ('pygments.lexers.web', 'Cirru', ('cirru',), ('*.cirru', '*.cr'), ('text/x-cirru',)),
- 'ClayLexer': ('pygments.lexers.compiled', 'Clay', ('clay',), ('*.clay',), ('text/x-clay',)),
+ 'CirruLexer': ('pygments.lexers.webmisc', 'Cirru', ('cirru',), ('*.cirru', '*.cr'), ('text/x-cirru',)),
+ 'ClayLexer': ('pygments.lexers.c_like', 'Clay', ('clay',), ('*.clay',), ('text/x-clay',)),
'ClojureLexer': ('pygments.lexers.jvm', 'Clojure', ('clojure', 'clj'), ('*.clj',), ('text/x-clojure', 'application/x-clojure')),
'ClojureScriptLexer': ('pygments.lexers.jvm', 'ClojureScript', ('clojurescript', 'cljs'), ('*.cljs',), ('text/x-clojurescript', 'application/x-clojurescript')),
- 'CobolFreeformatLexer': ('pygments.lexers.compiled', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()),
- 'CobolLexer': ('pygments.lexers.compiled', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)),
- 'CoffeeScriptLexer': ('pygments.lexers.web', 'CoffeeScript', ('coffee-script', 'coffeescript', 'coffee'), ('*.coffee',), ('text/coffeescript',)),
+ 'CobolFreeformatLexer': ('pygments.lexers.business', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()),
+ 'CobolLexer': ('pygments.lexers.business', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)),
+ 'CoffeeScriptLexer': ('pygments.lexers.javascript', 'CoffeeScript', ('coffee-script', 'coffeescript', 'coffee'), ('*.coffee',), ('text/coffeescript',)),
'ColdfusionCFCLexer': ('pygments.lexers.templates', 'Coldfusion CFC', ('cfc',), ('*.cfc',), ()),
'ColdfusionHtmlLexer': ('pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml'), ('application/x-coldfusion',)),
'ColdfusionLexer': ('pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()),
- 'CommonLispLexer': ('pygments.lexers.functional', 'Common Lisp', ('common-lisp', 'cl', 'lisp', 'elisp', 'emacs', 'emacs-lisp'), ('*.cl', '*.lisp', '*.el'), ('text/x-common-lisp',)),
- 'CoqLexer': ('pygments.lexers.functional', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)),
- 'CppLexer': ('pygments.lexers.compiled', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx', '*.C', '*.H', '*.cp', '*.CPP'), ('text/x-c++hdr', 'text/x-c++src')),
+ 'CommonLispLexer': ('pygments.lexers.lisp', 'Common Lisp', ('common-lisp', 'cl', 'lisp', 'elisp', 'emacs', 'emacs-lisp'), ('*.cl', '*.lisp', '*.el'), ('text/x-common-lisp',)),
+ 'CoqLexer': ('pygments.lexers.theorem', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)),
+ 'CppLexer': ('pygments.lexers.c_cpp', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx', '*.C', '*.H', '*.cp', '*.CPP'), ('text/x-c++hdr', 'text/x-c++src')),
'CppObjdumpLexer': ('pygments.lexers.asm', 'cpp-objdump', ('cpp-objdump', 'c++-objdumb', 'cxx-objdump'), ('*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump'), ('text/x-cpp-objdump',)),
- 'CrocLexer': ('pygments.lexers.agile', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)),
- 'CryptolLexer': ('pygments.lexers.functional', 'Cryptol', ('cryptol', 'cry'), ('*.cry',), ('text/x-cryptol',)),
+ 'CrocLexer': ('pygments.lexers.d', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)),
+ 'CryptolLexer': ('pygments.lexers.haskell', 'Cryptol', ('cryptol', 'cry'), ('*.cry',), ('text/x-cryptol',)),
'CssDjangoLexer': ('pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), (), ('text/css+django', 'text/css+jinja')),
'CssErbLexer': ('pygments.lexers.templates', 'CSS+Ruby', ('css+erb', 'css+ruby'), (), ('text/css+ruby',)),
'CssGenshiLexer': ('pygments.lexers.templates', 'CSS+Genshi Text', ('css+genshitext', 'css+genshi'), (), ('text/css+genshi',)),
- 'CssLexer': ('pygments.lexers.web', 'CSS', ('css',), ('*.css',), ('text/css',)),
+ 'CssLexer': ('pygments.lexers.css', 'CSS', ('css',), ('*.css',), ('text/css',)),
'CssPhpLexer': ('pygments.lexers.templates', 'CSS+PHP', ('css+php',), (), ('text/css+php',)),
'CssSmartyLexer': ('pygments.lexers.templates', 'CSS+Smarty', ('css+smarty',), (), ('text/css+smarty',)),
- 'CudaLexer': ('pygments.lexers.compiled', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)),
+ 'CudaLexer': ('pygments.lexers.c_like', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)),
'CypherLexer': ('pygments.lexers.graph', 'Cypher', ('cypher',), ('*.cyp', '*.cypher'), ()),
- 'CythonLexer': ('pygments.lexers.compiled', 'Cython', ('cython', 'pyx', 'pyrex'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')),
- 'DLexer': ('pygments.lexers.compiled', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)),
+ 'CythonLexer': ('pygments.lexers.python', 'Cython', ('cython', 'pyx', 'pyrex'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')),
+ 'DLexer': ('pygments.lexers.d', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)),
'DObjdumpLexer': ('pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)),
- 'DarcsPatchLexer': ('pygments.lexers.text', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()),
- 'DartLexer': ('pygments.lexers.web', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)),
- 'DebianControlLexer': ('pygments.lexers.text', 'Debian Control file', ('control', 'debcontrol'), ('control',), ()),
- 'DelphiLexer': ('pygments.lexers.compiled', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas',), ('text/x-pascal',)),
- 'DgLexer': ('pygments.lexers.agile', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)),
- 'DiffLexer': ('pygments.lexers.text', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')),
+ 'DarcsPatchLexer': ('pygments.lexers.diff', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()),
+ 'DartLexer': ('pygments.lexers.javascript', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)),
+ 'DebianControlLexer': ('pygments.lexers.installers', 'Debian Control file', ('control', 'debcontrol'), ('control',), ()),
+ 'DelphiLexer': ('pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas',), ('text/x-pascal',)),
+ 'DgLexer': ('pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)),
+ 'DiffLexer': ('pygments.lexers.diff', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')),
'DjangoLexer': ('pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')),
- 'DockerLexer': ('pygments.lexers.text', 'Docker', ('docker', 'dockerfile'), ('Dockerfile', '*.docker'), ('text/x-dockerfile-config',)),
- 'DtdLexer': ('pygments.lexers.web', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)),
- 'DuelLexer': ('pygments.lexers.web', 'Duel', ('duel', 'jbst', 'jsonml+bst'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')),
- 'DylanConsoleLexer': ('pygments.lexers.compiled', 'Dylan session', ('dylan-console', 'dylan-repl'), ('*.dylan-console',), ('text/x-dylan-console',)),
- 'DylanLexer': ('pygments.lexers.compiled', 'Dylan', ('dylan',), ('*.dylan', '*.dyl', '*.intr'), ('text/x-dylan',)),
- 'DylanLidLexer': ('pygments.lexers.compiled', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)),
- 'ECLLexer': ('pygments.lexers.other', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)),
- 'ECLexer': ('pygments.lexers.compiled', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')),
- 'EbnfLexer': ('pygments.lexers.text', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)),
- 'EiffelLexer': ('pygments.lexers.compiled', 'Eiffel', ('eiffel',), ('*.e',), ('text/x-eiffel',)),
- 'ElixirConsoleLexer': ('pygments.lexers.functional', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)),
- 'ElixirLexer': ('pygments.lexers.functional', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.exs'), ('text/x-elixir',)),
+ 'DockerLexer': ('pygments.lexers.configs', 'Docker', ('docker', 'dockerfile'), ('Dockerfile', '*.docker'), ('text/x-dockerfile-config',)),
+ 'DtdLexer': ('pygments.lexers.html', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)),
+ 'DuelLexer': ('pygments.lexers.webmisc', 'Duel', ('duel', 'jbst', 'jsonml+bst'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')),
+ 'DylanConsoleLexer': ('pygments.lexers.dylan', 'Dylan session', ('dylan-console', 'dylan-repl'), ('*.dylan-console',), ('text/x-dylan-console',)),
+ 'DylanLexer': ('pygments.lexers.dylan', 'Dylan', ('dylan',), ('*.dylan', '*.dyl', '*.intr'), ('text/x-dylan',)),
+ 'DylanLidLexer': ('pygments.lexers.dylan', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)),
+ 'ECLLexer': ('pygments.lexers.ecl', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)),
+ 'ECLexer': ('pygments.lexers.c_like', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')),
+ 'EbnfLexer': ('pygments.lexers.parsers', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)),
+ 'EiffelLexer': ('pygments.lexers.eiffel', 'Eiffel', ('eiffel',), ('*.e',), ('text/x-eiffel',)),
+ 'ElixirConsoleLexer': ('pygments.lexers.erlang', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)),
+ 'ElixirLexer': ('pygments.lexers.erlang', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.exs'), ('text/x-elixir',)),
'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)),
- 'ErlangLexer': ('pygments.lexers.functional', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)),
- 'ErlangShellLexer': ('pygments.lexers.functional', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)),
+ 'ErlangLexer': ('pygments.lexers.erlang', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)),
+ 'ErlangShellLexer': ('pygments.lexers.erlang', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)),
'EvoqueHtmlLexer': ('pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), ('*.html',), ('text/html+evoque',)),
'EvoqueLexer': ('pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)),
'EvoqueXmlLexer': ('pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)),
'FSharpLexer': ('pygments.lexers.dotnet', 'FSharp', ('fsharp',), ('*.fs', '*.fsi'), ('text/x-fsharp',)),
- 'FactorLexer': ('pygments.lexers.agile', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)),
- 'FancyLexer': ('pygments.lexers.agile', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)),
- 'FantomLexer': ('pygments.lexers.compiled', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)),
- 'FelixLexer': ('pygments.lexers.compiled', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)),
- 'FortranLexer': ('pygments.lexers.compiled', 'Fortran', ('fortran',), ('*.f', '*.f90', '*.F', '*.F90'), ('text/x-fortran',)),
+ 'FactorLexer': ('pygments.lexers.factor', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)),
+ 'FancyLexer': ('pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)),
+ 'FantomLexer': ('pygments.lexers.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)),
+ 'FelixLexer': ('pygments.lexers.felix', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)),
+ 'FortranLexer': ('pygments.lexers.fortran', 'Fortran', ('fortran',), ('*.f', '*.f90', '*.F', '*.F90'), ('text/x-fortran',)),
'FoxProLexer': ('pygments.lexers.foxpro', 'FoxPro', ('foxpro', 'vfp', 'clipper', 'xbase'), ('*.PRG', '*.prg'), ()),
- 'GAPLexer': ('pygments.lexers.math', 'GAP', ('gap',), ('*.g', '*.gd', '*.gi', '*.gap'), ()),
- 'GLShaderLexer': ('pygments.lexers.compiled', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)),
+ 'GAPLexer': ('pygments.lexers.algebra', 'GAP', ('gap',), ('*.g', '*.gd', '*.gi', '*.gap'), ()),
+ 'GLShaderLexer': ('pygments.lexers.graphics', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)),
'GasLexer': ('pygments.lexers.asm', 'GAS', ('gas', 'asm'), ('*.s', '*.S'), ('text/x-gas',)),
'GenshiLexer': ('pygments.lexers.templates', 'Genshi', ('genshi', 'kid', 'xml+genshi', 'xml+kid'), ('*.kid',), ('application/x-genshi', 'application/x-kid')),
'GenshiTextLexer': ('pygments.lexers.templates', 'Genshi Text', ('genshitext',), (), ('application/x-genshi-text', 'text/x-genshi')),
- 'GettextLexer': ('pygments.lexers.text', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')),
- 'GherkinLexer': ('pygments.lexers.other', 'Gherkin', ('cucumber', 'gherkin'), ('*.feature',), ('text/x-gherkin',)),
- 'GnuplotLexer': ('pygments.lexers.other', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)),
- 'GoLexer': ('pygments.lexers.compiled', 'Go', ('go',), ('*.go',), ('text/x-gosrc',)),
+ 'GettextLexer': ('pygments.lexers.textfmts', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')),
+ 'GherkinLexer': ('pygments.lexers.testing', 'Gherkin', ('cucumber', 'gherkin'), ('*.feature',), ('text/x-gherkin',)),
+ 'GnuplotLexer': ('pygments.lexers.graphics', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)),
+ 'GoLexer': ('pygments.lexers.go', 'Go', ('go',), ('*.go',), ('text/x-gosrc',)),
'GoloLexer': ('pygments.lexers.jvm', 'Golo', ('golo',), ('*.golo',), ()),
- 'GoodDataCLLexer': ('pygments.lexers.other', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)),
+ 'GoodDataCLLexer': ('pygments.lexers.business', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)),
'GosuLexer': ('pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)),
'GosuTemplateLexer': ('pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)),
- 'GroffLexer': ('pygments.lexers.text', 'Groff', ('groff', 'nroff', 'man'), ('*.[1234567]', '*.man'), ('application/x-troff', 'text/troff')),
+ 'GroffLexer': ('pygments.lexers.markup', 'Groff', ('groff', 'nroff', 'man'), ('*.[1234567]', '*.man'), ('application/x-troff', 'text/troff')),
'GroovyLexer': ('pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy',), ('text/x-groovy',)),
- 'HamlLexer': ('pygments.lexers.web', 'Haml', ('haml',), ('*.haml',), ('text/x-haml',)),
+ 'HamlLexer': ('pygments.lexers.html', 'Haml', ('haml',), ('*.haml',), ('text/x-haml',)),
'HandlebarsHtmlLexer': ('pygments.lexers.templates', 'HTML+Handlebars', ('html+handlebars',), ('*.handlebars', '*.hbs'), ('text/html+handlebars', 'text/x-handlebars-template')),
'HandlebarsLexer': ('pygments.lexers.templates', 'Handlebars', ('handlebars',), (), ()),
- 'HaskellLexer': ('pygments.lexers.functional', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)),
- 'HaxeLexer': ('pygments.lexers.web', 'Haxe', ('hx', 'haxe', 'hxsl'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')),
+ 'HaskellLexer': ('pygments.lexers.haskell', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)),
+ 'HaxeLexer': ('pygments.lexers.haxe', 'Haxe', ('hx', 'haxe', 'hxsl'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')),
'HtmlDjangoLexer': ('pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja', 'htmldjango'), (), ('text/html+django', 'text/html+jinja')),
'HtmlGenshiLexer': ('pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)),
- 'HtmlLexer': ('pygments.lexers.web', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')),
+ 'HtmlLexer': ('pygments.lexers.html', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')),
'HtmlPhpLexer': ('pygments.lexers.templates', 'HTML+PHP', ('html+php',), ('*.phtml',), ('application/x-php', 'application/x-httpd-php', 'application/x-httpd-php3', 'application/x-httpd-php4', 'application/x-httpd-php5')),
'HtmlSmartyLexer': ('pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)),
- 'HttpLexer': ('pygments.lexers.text', 'HTTP', ('http',), (), ()),
- 'HxmlLexer': ('pygments.lexers.text', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()),
- 'HyLexer': ('pygments.lexers.agile', 'Hy', ('hylang',), ('*.hy',), ('text/x-hy', 'application/x-hy')),
- 'HybrisLexer': ('pygments.lexers.other', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')),
- 'IDLLexer': ('pygments.lexers.math', 'IDL', ('idl',), ('*.pro',), ('text/idl',)),
- 'IdrisLexer': ('pygments.lexers.functional', 'Idris', ('idris', 'idr'), ('*.idr',), ('text/x-idris',)),
- 'IgorLexer': ('pygments.lexers.math', 'Igor', ('igor', 'igorpro'), ('*.ipf',), ('text/ipf',)),
- 'Inform6Lexer': ('pygments.lexers.compiled', 'Inform 6', ('inform6', 'i6'), ('*.inf',), ()),
- 'Inform6TemplateLexer': ('pygments.lexers.compiled', 'Inform 6 template', ('i6t',), ('*.i6t',), ()),
- 'Inform7Lexer': ('pygments.lexers.compiled', 'Inform 7', ('inform7', 'i7'), ('*.ni', '*.i7x'), ()),
- 'IniLexer': ('pygments.lexers.text', 'INI', ('ini', 'cfg', 'dosini'), ('*.ini', '*.cfg'), ('text/x-ini',)),
- 'IoLexer': ('pygments.lexers.agile', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)),
+ 'HttpLexer': ('pygments.lexers.textfmts', 'HTTP', ('http',), (), ()),
+ 'HxmlLexer': ('pygments.lexers.haxe', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()),
+ 'HyLexer': ('pygments.lexers.lisp', 'Hy', ('hylang',), ('*.hy',), ('text/x-hy', 'application/x-hy')),
+ 'HybrisLexer': ('pygments.lexers.scripting', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')),
+ 'IDLLexer': ('pygments.lexers.idl', 'IDL', ('idl',), ('*.pro',), ('text/idl',)),
+ 'IdrisLexer': ('pygments.lexers.haskell', 'Idris', ('idris', 'idr'), ('*.idr',), ('text/x-idris',)),
+ 'IgorLexer': ('pygments.lexers.igor', 'Igor', ('igor', 'igorpro'), ('*.ipf',), ('text/ipf',)),
+ 'Inform6Lexer': ('pygments.lexers.int_fiction', 'Inform 6', ('inform6', 'i6'), ('*.inf',), ()),
+ 'Inform6TemplateLexer': ('pygments.lexers.int_fiction', 'Inform 6 template', ('i6t',), ('*.i6t',), ()),
+ 'Inform7Lexer': ('pygments.lexers.int_fiction', 'Inform 7', ('inform7', 'i7'), ('*.ni', '*.i7x'), ()),
+ 'IniLexer': ('pygments.lexers.configs', 'INI', ('ini', 'cfg', 'dosini'), ('*.ini', '*.cfg'), ('text/x-ini',)),
+ 'IoLexer': ('pygments.lexers.iolang', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)),
'IokeLexer': ('pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)),
- 'IrcLogsLexer': ('pygments.lexers.text', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)),
- 'JadeLexer': ('pygments.lexers.web', 'Jade', ('jade',), ('*.jade',), ('text/x-jade',)),
- 'JagsLexer': ('pygments.lexers.math', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()),
+ 'IrcLogsLexer': ('pygments.lexers.textfmts', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)),
+ 'IsabelleLexer': ('pygments.lexers.theorem', 'Isabelle', ('isabelle',), ('*.thy',), ('text/x-isabelle',)),
+ 'JadeLexer': ('pygments.lexers.html', 'Jade', ('jade',), ('*.jade',), ('text/x-jade',)),
+ 'JagsLexer': ('pygments.lexers.modeling', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()),
'JasminLexer': ('pygments.lexers.jvm', 'Jasmin', ('jasmin', 'jasminxt'), ('*.j',), ()),
'JavaLexer': ('pygments.lexers.jvm', 'Java', ('java',), ('*.java',), ('text/x-java',)),
'JavascriptDjangoLexer': ('pygments.lexers.templates', 'JavaScript+Django/Jinja', ('js+django', 'javascript+django', 'js+jinja', 'javascript+jinja'), (), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')),
'JavascriptErbLexer': ('pygments.lexers.templates', 'JavaScript+Ruby', ('js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')),
'JavascriptGenshiLexer': ('pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')),
- 'JavascriptLexer': ('pygments.lexers.web', 'JavaScript', ('js', 'javascript'), ('*.js',), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')),
+ 'JavascriptLexer': ('pygments.lexers.javascript', 'JavaScript', ('js', 'javascript'), ('*.js',), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')),
'JavascriptPhpLexer': ('pygments.lexers.templates', 'JavaScript+PHP', ('js+php', 'javascript+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')),
'JavascriptSmartyLexer': ('pygments.lexers.templates', 'JavaScript+Smarty', ('js+smarty', 'javascript+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')),
- 'JsonLexer': ('pygments.lexers.web', 'JSON', ('json',), ('*.json',), ('application/json',)),
+ 'JsonLexer': ('pygments.lexers.data', 'JSON', ('json',), ('*.json',), ('application/json',)),
'JspLexer': ('pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)),
- 'JuliaConsoleLexer': ('pygments.lexers.math', 'Julia console', ('jlcon',), (), ()),
- 'JuliaLexer': ('pygments.lexers.math', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')),
- 'KalLexer': ('pygments.lexers.web', 'Kal', ('kal',), ('*.kal',), ('text/kal', 'application/kal')),
- 'KconfigLexer': ('pygments.lexers.other', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)),
- 'KokaLexer': ('pygments.lexers.functional', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)),
+ 'JuliaConsoleLexer': ('pygments.lexers.julia', 'Julia console', ('jlcon',), (), ()),
+ 'JuliaLexer': ('pygments.lexers.julia', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')),
+ 'KalLexer': ('pygments.lexers.javascript', 'Kal', ('kal',), ('*.kal',), ('text/kal', 'application/kal')),
+ 'KconfigLexer': ('pygments.lexers.configs', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)),
+ 'KokaLexer': ('pygments.lexers.haskell', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)),
'KotlinLexer': ('pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt',), ('text/x-kotlin',)),
- 'LSLLexer': ('pygments.lexers.other', 'LSL', ('lsl',), ('*.lsl',), ('text/x-lsl',)),
+ 'LSLLexer': ('pygments.lexers.scripting', 'LSL', ('lsl',), ('*.lsl',), ('text/x-lsl',)),
'LassoCssLexer': ('pygments.lexers.templates', 'CSS+Lasso', ('css+lasso',), (), ('text/css+lasso',)),
'LassoHtmlLexer': ('pygments.lexers.templates', 'HTML+Lasso', ('html+lasso',), (), ('text/html+lasso', 'application/x-httpd-lasso', 'application/x-httpd-lasso[89]')),
'LassoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Lasso', ('js+lasso', 'javascript+lasso'), (), ('application/x-javascript+lasso', 'text/x-javascript+lasso', 'text/javascript+lasso')),
- 'LassoLexer': ('pygments.lexers.web', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)),
+ 'LassoLexer': ('pygments.lexers.javascript', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)),
'LassoXmlLexer': ('pygments.lexers.templates', 'XML+Lasso', ('xml+lasso',), (), ('application/xml+lasso',)),
- 'LighttpdConfLexer': ('pygments.lexers.text', 'Lighttpd configuration file', ('lighty', 'lighttpd'), (), ('text/x-lighttpd-conf',)),
+ 'LeanLexer': ('pygments.lexers.theorem', 'Lean', ('lean',), ('*.lean',), ('text/x-lean',)),
+ 'LighttpdConfLexer': ('pygments.lexers.configs', 'Lighttpd configuration file', ('lighty', 'lighttpd'), (), ('text/x-lighttpd-conf',)),
'LimboLexer': ('pygments.lexers.inferno', 'Limbo', ('limbo',), ('*.b',), ('text/limbo',)),
'LiquidLexer': ('pygments.lexers.templates', 'liquid', ('liquid',), ('*.liquid',), ()),
- 'LiterateAgdaLexer': ('pygments.lexers.functional', 'Literate Agda', ('lagda', 'literate-agda'), ('*.lagda',), ('text/x-literate-agda',)),
- 'LiterateCryptolLexer': ('pygments.lexers.functional', 'Literate Cryptol', ('lcry', 'literate-cryptol', 'lcryptol'), ('*.lcry',), ('text/x-literate-cryptol',)),
- 'LiterateHaskellLexer': ('pygments.lexers.functional', 'Literate Haskell', ('lhs', 'literate-haskell', 'lhaskell'), ('*.lhs',), ('text/x-literate-haskell',)),
- 'LiterateIdrisLexer': ('pygments.lexers.functional', 'Literate Idris', ('lidr', 'literate-idris', 'lidris'), ('*.lidr',), ('text/x-literate-idris',)),
- 'LiveScriptLexer': ('pygments.lexers.web', 'LiveScript', ('live-script', 'livescript'), ('*.ls',), ('text/livescript',)),
+ 'LiterateAgdaLexer': ('pygments.lexers.haskell', 'Literate Agda', ('lagda', 'literate-agda'), ('*.lagda',), ('text/x-literate-agda',)),
+ 'LiterateCryptolLexer': ('pygments.lexers.haskell', 'Literate Cryptol', ('lcry', 'literate-cryptol', 'lcryptol'), ('*.lcry',), ('text/x-literate-cryptol',)),
+ 'LiterateHaskellLexer': ('pygments.lexers.haskell', 'Literate Haskell', ('lhs', 'literate-haskell', 'lhaskell'), ('*.lhs',), ('text/x-literate-haskell',)),
+ 'LiterateIdrisLexer': ('pygments.lexers.haskell', 'Literate Idris', ('lidr', 'literate-idris', 'lidris'), ('*.lidr',), ('text/x-literate-idris',)),
+ 'LiveScriptLexer': ('pygments.lexers.javascript', 'LiveScript', ('live-script', 'livescript'), ('*.ls',), ('text/livescript',)),
'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)),
- 'LogosLexer': ('pygments.lexers.compiled', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)),
- 'LogtalkLexer': ('pygments.lexers.other', 'Logtalk', ('logtalk',), ('*.lgt',), ('text/x-logtalk',)),
- 'LuaLexer': ('pygments.lexers.agile', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')),
- 'MOOCodeLexer': ('pygments.lexers.other', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)),
- 'MakefileLexer': ('pygments.lexers.text', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)),
+ 'LogosLexer': ('pygments.lexers.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)),
+ 'LogtalkLexer': ('pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)),
+ 'LuaLexer': ('pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')),
+ 'MOOCodeLexer': ('pygments.lexers.scripting', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)),
+ 'MakefileLexer': ('pygments.lexers.make', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)),
'MakoCssLexer': ('pygments.lexers.templates', 'CSS+Mako', ('css+mako',), (), ('text/css+mako',)),
'MakoHtmlLexer': ('pygments.lexers.templates', 'HTML+Mako', ('html+mako',), (), ('text/html+mako',)),
'MakoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Mako', ('js+mako', 'javascript+mako'), (), ('application/x-javascript+mako', 'text/x-javascript+mako', 'text/javascript+mako')),
'MakoLexer': ('pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)),
'MakoXmlLexer': ('pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)),
- 'MaqlLexer': ('pygments.lexers.other', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')),
- 'MaskLexer': ('pygments.lexers.web', 'Mask', ('mask',), ('*.mask',), ('text/x-mask',)),
+ 'MaqlLexer': ('pygments.lexers.business', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')),
+ 'MaskLexer': ('pygments.lexers.javascript', 'Mask', ('mask',), ('*.mask',), ('text/x-mask',)),
'MasonLexer': ('pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)),
- 'MathematicaLexer': ('pygments.lexers.math', 'Mathematica', ('mathematica', 'mma', 'nb'), ('*.nb', '*.cdf', '*.nbp', '*.ma'), ('application/mathematica', 'application/vnd.wolfram.mathematica', 'application/vnd.wolfram.mathematica.package', 'application/vnd.wolfram.cdf')),
- 'MatlabLexer': ('pygments.lexers.math', 'Matlab', ('matlab',), ('*.m',), ('text/matlab',)),
- 'MatlabSessionLexer': ('pygments.lexers.math', 'Matlab session', ('matlabsession',), (), ()),
- 'MiniDLexer': ('pygments.lexers.agile', 'MiniD', ('minid',), ('*.md',), ('text/x-minidsrc',)),
- 'ModelicaLexer': ('pygments.lexers.other', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)),
- 'Modula2Lexer': ('pygments.lexers.compiled', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)),
- 'MoinWikiLexer': ('pygments.lexers.text', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)),
- 'MonkeyLexer': ('pygments.lexers.compiled', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)),
- 'MoonScriptLexer': ('pygments.lexers.agile', 'MoonScript', ('moon', 'moonscript'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')),
- 'MqlLexer': ('pygments.lexers.compiled', 'MQL', ('mql', 'mq4', 'mq5', 'mql4', 'mql5'), ('*.mq4', '*.mq5', '*.mqh'), ('text/x-mql',)),
- 'MscgenLexer': ('pygments.lexers.other', 'Mscgen', ('mscgen', 'msc'), ('*.msc',), ()),
- 'MuPADLexer': ('pygments.lexers.math', 'MuPAD', ('mupad',), ('*.mu',), ()),
- 'MxmlLexer': ('pygments.lexers.web', 'MXML', ('mxml',), ('*.mxml',), ()),
+ 'MathematicaLexer': ('pygments.lexers.algebra', 'Mathematica', ('mathematica', 'mma', 'nb'), ('*.nb', '*.cdf', '*.nbp', '*.ma'), ('application/mathematica', 'application/vnd.wolfram.mathematica', 'application/vnd.wolfram.mathematica.package', 'application/vnd.wolfram.cdf')),
+ 'MatlabLexer': ('pygments.lexers.matlab', 'Matlab', ('matlab',), ('*.m',), ('text/matlab',)),
+ 'MatlabSessionLexer': ('pygments.lexers.matlab', 'Matlab session', ('matlabsession',), (), ()),
+ 'MiniDLexer': ('pygments.lexers.d', 'MiniD', ('minid',), ('*.md',), ('text/x-minidsrc',)),
+ 'ModelicaLexer': ('pygments.lexers.modeling', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)),
+ 'Modula2Lexer': ('pygments.lexers.pascal', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)),
+ 'MoinWikiLexer': ('pygments.lexers.markup', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)),
+ 'MonkeyLexer': ('pygments.lexers.basic', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)),
+ 'MoonScriptLexer': ('pygments.lexers.scripting', 'MoonScript', ('moon', 'moonscript'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')),
+ 'MqlLexer': ('pygments.lexers.c_like', 'MQL', ('mql', 'mq4', 'mq5', 'mql4', 'mql5'), ('*.mq4', '*.mq5', '*.mqh'), ('text/x-mql',)),
+ 'MscgenLexer': ('pygments.lexers.dsls', 'Mscgen', ('mscgen', 'msc'), ('*.msc',), ()),
+ 'MuPADLexer': ('pygments.lexers.algebra', 'MuPAD', ('mupad',), ('*.mu',), ()),
+ 'MxmlLexer': ('pygments.lexers.actionscript', 'MXML', ('mxml',), ('*.mxml',), ()),
'MySqlLexer': ('pygments.lexers.sql', 'MySQL', ('mysql',), (), ('text/x-mysql',)),
'MyghtyCssLexer': ('pygments.lexers.templates', 'CSS+Myghty', ('css+myghty',), (), ('text/css+myghty',)),
'MyghtyHtmlLexer': ('pygments.lexers.templates', 'HTML+Myghty', ('html+myghty',), (), ('text/html+myghty',)),
'MyghtyJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Myghty', ('js+myghty', 'javascript+myghty'), (), ('application/x-javascript+myghty', 'text/x-javascript+myghty', 'text/javascript+mygthy')),
'MyghtyLexer': ('pygments.lexers.templates', 'Myghty', ('myghty',), ('*.myt', 'autodelegate'), ('application/x-myghty',)),
'MyghtyXmlLexer': ('pygments.lexers.templates', 'XML+Myghty', ('xml+myghty',), (), ('application/xml+myghty',)),
- 'NSISLexer': ('pygments.lexers.other', 'NSIS', ('nsis', 'nsi', 'nsh'), ('*.nsi', '*.nsh'), ('text/x-nsis',)),
+ 'NSISLexer': ('pygments.lexers.installers', 'NSIS', ('nsis', 'nsi', 'nsh'), ('*.nsi', '*.nsh'), ('text/x-nsis',)),
'NasmLexer': ('pygments.lexers.asm', 'NASM', ('nasm',), ('*.asm', '*.ASM'), ('text/x-nasm',)),
'NasmObjdumpLexer': ('pygments.lexers.asm', 'objdump-nasm', ('objdump-nasm',), ('*.objdump-intel',), ('text/x-nasm-objdump',)),
'NemerleLexer': ('pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)),
- 'NesCLexer': ('pygments.lexers.compiled', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)),
- 'NewLispLexer': ('pygments.lexers.functional', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl'), ('text/x-newlisp', 'application/x-newlisp')),
- 'NewspeakLexer': ('pygments.lexers.other', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)),
- 'NginxConfLexer': ('pygments.lexers.text', 'Nginx configuration file', ('nginx',), (), ('text/x-nginx-conf',)),
- 'NimrodLexer': ('pygments.lexers.compiled', 'Nimrod', ('nimrod', 'nim'), ('*.nim', '*.nimrod'), ('text/x-nimrod',)),
- 'NixLexer': ('pygments.lexers.functional', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)),
- 'NumPyLexer': ('pygments.lexers.math', 'NumPy', ('numpy',), (), ()),
+ 'NesCLexer': ('pygments.lexers.c_like', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)),
+ 'NewLispLexer': ('pygments.lexers.lisp', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl'), ('text/x-newlisp', 'application/x-newlisp')),
+ 'NewspeakLexer': ('pygments.lexers.smalltalk', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)),
+ 'NginxConfLexer': ('pygments.lexers.configs', 'Nginx configuration file', ('nginx',), (), ('text/x-nginx-conf',)),
+ 'NimrodLexer': ('pygments.lexers.nimrod', 'Nimrod', ('nimrod', 'nim'), ('*.nim', '*.nimrod'), ('text/x-nimrod',)),
+ 'NitLexer': ('pygments.lexers.nit', 'Nit', ('nit',), ('*.nit',), ()),
+ 'NixLexer': ('pygments.lexers.nix', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)),
+ 'NumPyLexer': ('pygments.lexers.python', 'NumPy', ('numpy',), (), ()),
'ObjdumpLexer': ('pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)),
- 'ObjectiveCLexer': ('pygments.lexers.compiled', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m', '*.h'), ('text/x-objective-c',)),
- 'ObjectiveCppLexer': ('pygments.lexers.compiled', 'Objective-C++', ('objective-c++', 'objectivec++', 'obj-c++', 'objc++'), ('*.mm', '*.hh'), ('text/x-objective-c++',)),
- 'ObjectiveJLexer': ('pygments.lexers.web', 'Objective-J', ('objective-j', 'objectivej', 'obj-j', 'objj'), ('*.j',), ('text/x-objective-j',)),
- 'OcamlLexer': ('pygments.lexers.functional', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)),
- 'OctaveLexer': ('pygments.lexers.math', 'Octave', ('octave',), ('*.m',), ('text/octave',)),
- 'OocLexer': ('pygments.lexers.compiled', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)),
- 'OpaLexer': ('pygments.lexers.functional', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)),
- 'OpenEdgeLexer': ('pygments.lexers.other', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')),
- 'PanLexer': ('pygments.lexers.other', 'Pan', ('pan',), ('*.pan',), ()),
- 'PawnLexer': ('pygments.lexers.other', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)),
- 'Perl6Lexer': ('pygments.lexers.agile', 'Perl6', ('perl6', 'pl6'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t'), ('text/x-perl6', 'application/x-perl6')),
- 'PerlLexer': ('pygments.lexers.agile', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t'), ('text/x-perl', 'application/x-perl')),
- 'PhpLexer': ('pygments.lexers.web', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)),
+ 'ObjectiveCLexer': ('pygments.lexers.objective', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m', '*.h'), ('text/x-objective-c',)),
+ 'ObjectiveCppLexer': ('pygments.lexers.objective', 'Objective-C++', ('objective-c++', 'objectivec++', 'obj-c++', 'objc++'), ('*.mm', '*.hh'), ('text/x-objective-c++',)),
+ 'ObjectiveJLexer': ('pygments.lexers.javascript', 'Objective-J', ('objective-j', 'objectivej', 'obj-j', 'objj'), ('*.j',), ('text/x-objective-j',)),
+ 'OcamlLexer': ('pygments.lexers.ml', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)),
+ 'OctaveLexer': ('pygments.lexers.matlab', 'Octave', ('octave',), ('*.m',), ('text/octave',)),
+ 'OocLexer': ('pygments.lexers.ooc', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)),
+ 'OpaLexer': ('pygments.lexers.ml', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)),
+ 'OpenEdgeLexer': ('pygments.lexers.business', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')),
+ 'PanLexer': ('pygments.lexers.dsls', 'Pan', ('pan',), ('*.pan',), ()),
+ 'PawnLexer': ('pygments.lexers.pawn', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)),
+ 'Perl6Lexer': ('pygments.lexers.perl', 'Perl6', ('perl6', 'pl6'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t'), ('text/x-perl6', 'application/x-perl6')),
+ 'PerlLexer': ('pygments.lexers.perl', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t'), ('text/x-perl', 'application/x-perl')),
+ 'PhpLexer': ('pygments.lexers.php', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)),
'PigLexer': ('pygments.lexers.jvm', 'Pig', ('pig',), ('*.pig',), ('text/x-pig',)),
- 'PikeLexer': ('pygments.lexers.compiled', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)),
+ 'PikeLexer': ('pygments.lexers.c_like', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)),
'PlPgsqlLexer': ('pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)),
- 'PostScriptLexer': ('pygments.lexers.other', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)),
+ 'PostScriptLexer': ('pygments.lexers.graphics', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)),
'PostgresConsoleLexer': ('pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)),
'PostgresLexer': ('pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)),
- 'PovrayLexer': ('pygments.lexers.other', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)),
+ 'PovrayLexer': ('pygments.lexers.graphics', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)),
'PowerShellLexer': ('pygments.lexers.shell', 'PowerShell', ('powershell', 'posh', 'ps1', 'psm1'), ('*.ps1', '*.psm1'), ('text/x-powershell',)),
- 'PrologLexer': ('pygments.lexers.compiled', 'Prolog', ('prolog',), ('*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)),
- 'PropertiesLexer': ('pygments.lexers.text', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)),
- 'ProtoBufLexer': ('pygments.lexers.other', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()),
- 'PuppetLexer': ('pygments.lexers.other', 'Puppet', ('puppet',), ('*.pp',), ()),
- 'PyPyLogLexer': ('pygments.lexers.text', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)),
- 'Python3Lexer': ('pygments.lexers.agile', 'Python 3', ('python3', 'py3'), (), ('text/x-python3', 'application/x-python3')),
- 'Python3TracebackLexer': ('pygments.lexers.agile', 'Python 3.0 Traceback', ('py3tb',), ('*.py3tb',), ('text/x-python3-traceback',)),
- 'PythonConsoleLexer': ('pygments.lexers.agile', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
- 'PythonLexer': ('pygments.lexers.agile', 'Python', ('python', 'py', 'sage'), ('*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage'), ('text/x-python', 'application/x-python')),
- 'PythonTracebackLexer': ('pygments.lexers.agile', 'Python Traceback', ('pytb',), ('*.pytb',), ('text/x-python-traceback',)),
- 'QBasicLexer': ('pygments.lexers.qbasic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)),
- 'QmlLexer': ('pygments.lexers.web', 'QML', ('qml',), ('*.qml',), ('application/x-qml',)),
- 'RConsoleLexer': ('pygments.lexers.math', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()),
- 'RPMSpecLexer': ('pygments.lexers.other', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)),
- 'RacketLexer': ('pygments.lexers.functional', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktd', '*.rktl'), ('text/x-racket', 'application/x-racket')),
+ 'PrologLexer': ('pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.ecl', '*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)),
+ 'PropertiesLexer': ('pygments.lexers.configs', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)),
+ 'ProtoBufLexer': ('pygments.lexers.dsls', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()),
+ 'PuppetLexer': ('pygments.lexers.dsls', 'Puppet', ('puppet',), ('*.pp',), ()),
+ 'PyPyLogLexer': ('pygments.lexers.console', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)),
+ 'Python3Lexer': ('pygments.lexers.python', 'Python 3', ('python3', 'py3'), (), ('text/x-python3', 'application/x-python3')),
+ 'Python3TracebackLexer': ('pygments.lexers.python', 'Python 3.0 Traceback', ('py3tb',), ('*.py3tb',), ('text/x-python3-traceback',)),
+ 'PythonConsoleLexer': ('pygments.lexers.python', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
+ 'PythonLexer': ('pygments.lexers.python', 'Python', ('python', 'py', 'sage'), ('*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage'), ('text/x-python', 'application/x-python')),
+ 'PythonTracebackLexer': ('pygments.lexers.python', 'Python Traceback', ('pytb',), ('*.pytb',), ('text/x-python-traceback',)),
+ 'QBasicLexer': ('pygments.lexers.basic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)),
+ 'QmlLexer': ('pygments.lexers.webmisc', 'QML', ('qml',), ('*.qml',), ('application/x-qml',)),
+ 'RConsoleLexer': ('pygments.lexers.r', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()),
+ 'RPMSpecLexer': ('pygments.lexers.installers', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)),
+ 'RacketLexer': ('pygments.lexers.lisp', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktd', '*.rktl'), ('text/x-racket', 'application/x-racket')),
'RagelCLexer': ('pygments.lexers.parsers', 'Ragel in C Host', ('ragel-c',), ('*.rl',), ()),
'RagelCppLexer': ('pygments.lexers.parsers', 'Ragel in CPP Host', ('ragel-cpp',), ('*.rl',), ()),
'RagelDLexer': ('pygments.lexers.parsers', 'Ragel in D Host', ('ragel-d',), ('*.rl',), ()),
@@ -287,57 +290,57 @@ LEXERS = {
'RagelObjectiveCLexer': ('pygments.lexers.parsers', 'Ragel in Objective C Host', ('ragel-objc',), ('*.rl',), ()),
'RagelRubyLexer': ('pygments.lexers.parsers', 'Ragel in Ruby Host', ('ragel-ruby', 'ragel-rb'), ('*.rl',), ()),
'RawTokenLexer': ('pygments.lexers.special', 'Raw token data', ('raw',), (), ('application/x-pygments-tokens',)),
- 'RdLexer': ('pygments.lexers.math', 'Rd', ('rd',), ('*.Rd',), ('text/x-r-doc',)),
- 'RebolLexer': ('pygments.lexers.other', 'REBOL', ('rebol',), ('*.r', '*.r3', '*.reb'), ('text/x-rebol',)),
- 'RedLexer': ('pygments.lexers.other', 'Red', ('red', 'red/system'), ('*.red', '*.reds'), ('text/x-red', 'text/x-red-system')),
- 'RedcodeLexer': ('pygments.lexers.other', 'Redcode', ('redcode',), ('*.cw',), ()),
- 'RegeditLexer': ('pygments.lexers.text', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)),
- 'RexxLexer': ('pygments.lexers.other', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)),
+ 'RdLexer': ('pygments.lexers.r', 'Rd', ('rd',), ('*.Rd',), ('text/x-r-doc',)),
+ 'RebolLexer': ('pygments.lexers.rebol', 'REBOL', ('rebol',), ('*.r', '*.r3', '*.reb'), ('text/x-rebol',)),
+ 'RedLexer': ('pygments.lexers.rebol', 'Red', ('red', 'red/system'), ('*.red', '*.reds'), ('text/x-red', 'text/x-red-system')),
+ 'RedcodeLexer': ('pygments.lexers.esoteric', 'Redcode', ('redcode',), ('*.cw',), ()),
+ 'RegeditLexer': ('pygments.lexers.configs', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)),
+ 'RexxLexer': ('pygments.lexers.scripting', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)),
'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)),
- 'RobotFrameworkLexer': ('pygments.lexers.other', 'RobotFramework', ('robotframework',), ('*.txt', '*.robot'), ('text/x-robotframework',)),
+ 'RobotFrameworkLexer': ('pygments.lexers.robotframework', 'RobotFramework', ('robotframework',), ('*.txt', '*.robot'), ('text/x-robotframework',)),
'RqlLexer': ('pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)),
- 'RslLexer': ('pygments.lexers.other', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)),
- 'RstLexer': ('pygments.lexers.text', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')),
- 'RubyConsoleLexer': ('pygments.lexers.agile', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
- 'RubyLexer': ('pygments.lexers.agile', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby'), ('text/x-ruby', 'application/x-ruby')),
- 'RustLexer': ('pygments.lexers.compiled', 'Rust', ('rust',), ('*.rs',), ('text/x-rustsrc',)),
- 'SLexer': ('pygments.lexers.math', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')),
- 'SMLLexer': ('pygments.lexers.functional', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')),
- 'SassLexer': ('pygments.lexers.web', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)),
+ 'RslLexer': ('pygments.lexers.dsls', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)),
+ 'RstLexer': ('pygments.lexers.markup', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')),
+ 'RubyConsoleLexer': ('pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
+ 'RubyLexer': ('pygments.lexers.ruby', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby'), ('text/x-ruby', 'application/x-ruby')),
+ 'RustLexer': ('pygments.lexers.rust', 'Rust', ('rust',), ('*.rs',), ('text/x-rustsrc',)),
+ 'SLexer': ('pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')),
+ 'SMLLexer': ('pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')),
+ 'SassLexer': ('pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)),
'ScalaLexer': ('pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)),
- 'ScamlLexer': ('pygments.lexers.web', 'Scaml', ('scaml',), ('*.scaml',), ('text/x-scaml',)),
- 'SchemeLexer': ('pygments.lexers.functional', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')),
- 'ScilabLexer': ('pygments.lexers.math', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)),
- 'ScssLexer': ('pygments.lexers.web', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)),
+ 'ScamlLexer': ('pygments.lexers.html', 'Scaml', ('scaml',), ('*.scaml',), ('text/x-scaml',)),
+ 'SchemeLexer': ('pygments.lexers.lisp', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')),
+ 'ScilabLexer': ('pygments.lexers.matlab', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)),
+ 'ScssLexer': ('pygments.lexers.css', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)),
'ShellSessionLexer': ('pygments.lexers.shell', 'Shell Session', ('shell-session',), ('*.shell-session',), ('application/x-sh-session',)),
- 'SlimLexer': ('pygments.lexers.web', 'Slim', ('slim',), ('*.slim',), ('text/x-slim',)),
+ 'SlimLexer': ('pygments.lexers.webmisc', 'Slim', ('slim',), ('*.slim',), ('text/x-slim',)),
'SmaliLexer': ('pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)),
- 'SmalltalkLexer': ('pygments.lexers.other', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)),
+ 'SmalltalkLexer': ('pygments.lexers.smalltalk', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)),
'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)),
- 'SnobolLexer': ('pygments.lexers.other', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)),
- 'SourcePawnLexer': ('pygments.lexers.other', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)),
- 'SourcesListLexer': ('pygments.lexers.text', 'Debian Sourcelist', ('sourceslist', 'sources.list', 'debsources'), ('sources.list',), ()),
+ 'SnobolLexer': ('pygments.lexers.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)),
+ 'SourcePawnLexer': ('pygments.lexers.pawn', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)),
+ 'SourcesListLexer': ('pygments.lexers.installers', 'Debian Sourcelist', ('sourceslist', 'sources.list', 'debsources'), ('sources.list',), ()),
'SparqlLexer': ('pygments.lexers.rdf', 'SPARQL', ('sparql',), ('*.rq', '*.sparql'), ('application/sparql-query',)),
'SqlLexer': ('pygments.lexers.sql', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)),
'SqliteConsoleLexer': ('pygments.lexers.sql', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)),
- 'SquidConfLexer': ('pygments.lexers.text', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)),
+ 'SquidConfLexer': ('pygments.lexers.configs', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)),
'SspLexer': ('pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)),
- 'StanLexer': ('pygments.lexers.math', 'Stan', ('stan',), ('*.stan',), ()),
- 'SwiftLexer': ('pygments.lexers.compiled', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)),
- 'SwigLexer': ('pygments.lexers.compiled', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)),
+ 'StanLexer': ('pygments.lexers.modeling', 'Stan', ('stan',), ('*.stan',), ()),
+ 'SwiftLexer': ('pygments.lexers.objective', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)),
+ 'SwigLexer': ('pygments.lexers.c_like', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)),
'SystemVerilogLexer': ('pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)),
- 'TclLexer': ('pygments.lexers.agile', 'Tcl', ('tcl',), ('*.tcl',), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')),
+ 'TclLexer': ('pygments.lexers.tcl', 'Tcl', ('tcl',), ('*.tcl', '*.rvt'), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')),
'TcshLexer': ('pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)),
'TeaTemplateLexer': ('pygments.lexers.templates', 'Tea', ('tea',), ('*.tea',), ('text/x-tea',)),
- 'TexLexer': ('pygments.lexers.text', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')),
+ 'TexLexer': ('pygments.lexers.markup', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')),
'TextLexer': ('pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)),
- 'TodotxtLexer': ('pygments.lexers.text', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)),
+ 'TodotxtLexer': ('pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)),
'TreetopLexer': ('pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()),
- 'TypeScriptLexer': ('pygments.lexers.web', 'TypeScript', ('ts',), ('*.ts',), ('text/x-typescript',)),
- 'UrbiscriptLexer': ('pygments.lexers.other', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)),
- 'VCTreeStatusLexer': ('pygments.lexers.other', 'VCTreeStatus', ('vctreestatus',), (), ()),
- 'VGLLexer': ('pygments.lexers.other', 'VGL', ('vgl',), ('*.rpf',), ()),
- 'ValaLexer': ('pygments.lexers.compiled', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)),
+ 'TypeScriptLexer': ('pygments.lexers.javascript', 'TypeScript', ('ts',), ('*.ts',), ('text/x-typescript',)),
+ 'UrbiscriptLexer': ('pygments.lexers.urbi', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)),
+ 'VCTreeStatusLexer': ('pygments.lexers.console', 'VCTreeStatus', ('vctreestatus',), (), ()),
+ 'VGLLexer': ('pygments.lexers.dsls', 'VGL', ('vgl',), ('*.rpf',), ()),
+ 'ValaLexer': ('pygments.lexers.c_like', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)),
'VbNetAspxLexer': ('pygments.lexers.dotnet', 'aspx-vb', ('aspx-vb',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
'VbNetLexer': ('pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')),
'VelocityHtmlLexer': ('pygments.lexers.templates', 'HTML+Velocity', ('html+velocity',), (), ('text/html+velocity',)),
@@ -345,18 +348,18 @@ LEXERS = {
'VelocityXmlLexer': ('pygments.lexers.templates', 'XML+Velocity', ('xml+velocity',), (), ('application/xml+velocity',)),
'VerilogLexer': ('pygments.lexers.hdl', 'verilog', ('verilog', 'v'), ('*.v',), ('text/x-verilog',)),
'VhdlLexer': ('pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)),
- 'VimLexer': ('pygments.lexers.text', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)),
- 'XQueryLexer': ('pygments.lexers.web', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')),
+ 'VimLexer': ('pygments.lexers.textedit', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)),
+ 'XQueryLexer': ('pygments.lexers.webmisc', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')),
'XmlDjangoLexer': ('pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), (), ('application/xml+django', 'application/xml+jinja')),
'XmlErbLexer': ('pygments.lexers.templates', 'XML+Ruby', ('xml+erb', 'xml+ruby'), (), ('application/xml+ruby',)),
- 'XmlLexer': ('pygments.lexers.web', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl', '*.wsf'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')),
+ 'XmlLexer': ('pygments.lexers.html', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl', '*.wsf'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')),
'XmlPhpLexer': ('pygments.lexers.templates', 'XML+PHP', ('xml+php',), (), ('application/xml+php',)),
'XmlSmartyLexer': ('pygments.lexers.templates', 'XML+Smarty', ('xml+smarty',), (), ('application/xml+smarty',)),
- 'XsltLexer': ('pygments.lexers.web', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')),
+ 'XsltLexer': ('pygments.lexers.html', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')),
'XtendLexer': ('pygments.lexers.jvm', 'Xtend', ('xtend',), ('*.xtend',), ('text/x-xtend',)),
'YamlJinjaLexer': ('pygments.lexers.templates', 'YAML+Jinja', ('yaml+jinja', 'salt', 'sls'), ('*.sls',), ('text/x-yaml+jinja', 'text/x-sls')),
- 'YamlLexer': ('pygments.lexers.text', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)),
- 'ZephirLexer': ('pygments.lexers.web', 'Zephir', ('zephir',), ('*.zep',), ()),
+ 'YamlLexer': ('pygments.lexers.data', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)),
+ 'ZephirLexer': ('pygments.lexers.php', 'Zephir', ('zephir',), ('*.zep',), ()),
}
if __name__ == '__main__':
@@ -366,35 +369,35 @@ if __name__ == '__main__':
# lookup lexers
found_lexers = []
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
- for filename in os.listdir('.'):
- if filename.endswith('.py') and not filename.startswith('_'):
- module_name = 'pygments.lexers.%s' % filename[:-3]
- print(module_name)
- module = __import__(module_name, None, None, [''])
- for lexer_name in module.__all__:
- lexer = getattr(module, lexer_name)
- found_lexers.append(
- '%r: %r' % (lexer_name,
- (module_name,
- lexer.name,
- tuple(lexer.aliases),
- tuple(lexer.filenames),
- tuple(lexer.mimetypes))))
- # sort them, that should make the diff files for svn smaller
+ for root, dirs, files in os.walk('.'):
+ for filename in files:
+ if filename.endswith('.py') and not filename.startswith('_'):
+ module_name = 'pygments.lexers%s.%s' % (
+ root[1:].replace('/', '.'), filename[:-3])
+ print(module_name)
+ module = __import__(module_name, None, None, [''])
+ for lexer_name in module.__all__:
+ lexer = getattr(module, lexer_name)
+ found_lexers.append(
+ '%r: %r' % (lexer_name,
+ (module_name,
+ lexer.name,
+ tuple(lexer.aliases),
+ tuple(lexer.filenames),
+ tuple(lexer.mimetypes))))
+ # sort them to make the diff minimal
found_lexers.sort()
# extract useful sourcecode from this file
- f = open(__file__)
- try:
- content = f.read()
- finally:
- f.close()
+ with open(__file__) as fp:
+ content = fp.read()
header = content[:content.find('LEXERS = {')]
footer = content[content.find("if __name__ == '__main__':"):]
# write new file
- f = open(__file__, 'wb')
- f.write(header)
- f.write('LEXERS = {\n %s,\n}\n\n' % ',\n '.join(found_lexers))
- f.write(footer)
- f.close()
+ with open(__file__, 'w') as fp:
+ fp.write(header)
+ fp.write('LEXERS = {\n %s,\n}\n\n' % ',\n '.join(found_lexers))
+ fp.write(footer)
+
+ print ('=== %d lexers processed.' % len(found_lexers))
diff --git a/pygments/lexers/_openedge_builtins.py b/pygments/lexers/_openedge_builtins.py
new file mode 100644
index 00000000..1bd97ca8
--- /dev/null
+++ b/pygments/lexers/_openedge_builtins.py
@@ -0,0 +1,2569 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._openedge_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Builtin list for the OpenEdgeLexer.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+OPENEDGEKEYWORDS = (
+ 'ABSOLUTE',
+ 'ABS',
+ 'ABSO',
+ 'ABSOL',
+ 'ABSOLU',
+ 'ABSOLUT',
+ 'ACCELERATOR',
+ 'ACCUM',
+ 'ACCUMULATE',
+ 'ACCUM',
+ 'ACCUMU',
+ 'ACCUMUL',
+ 'ACCUMULA',
+ 'ACCUMULAT',
+ 'ACTIVE-FORM',
+ 'ACTIVE-WINDOW',
+ 'ADD',
+ 'ADD-BUFFER',
+ 'ADD-CALC-COLUMN',
+ 'ADD-COLUMNS-FROM',
+ 'ADD-EVENTS-PROCEDURE',
+ 'ADD-FIELDS-FROM',
+ 'ADD-FIRST',
+ 'ADD-INDEX-FIELD',
+ 'ADD-LAST',
+ 'ADD-LIKE-COLUMN',
+ 'ADD-LIKE-FIELD',
+ 'ADD-LIKE-INDEX',
+ 'ADD-NEW-FIELD',
+ 'ADD-NEW-INDEX',
+ 'ADD-SCHEMA-LOCATION',
+ 'ADD-SUPER-PROCEDURE',
+ 'ADM-DATA',
+ 'ADVISE',
+ 'ALERT-BOX',
+ 'ALIAS',
+ 'ALL',
+ 'ALLOW-COLUMN-SEARCHING',
+ 'ALLOW-REPLICATION',
+ 'ALTER',
+ 'ALWAYS-ON-TOP',
+ 'AMBIGUOUS',
+ 'AMBIG',
+ 'AMBIGU',
+ 'AMBIGUO',
+ 'AMBIGUOU',
+ 'ANALYZE',
+ 'ANALYZ',
+ 'AND',
+ 'ANSI-ONLY',
+ 'ANY',
+ 'ANYWHERE',
+ 'APPEND',
+ 'APPL-ALERT-BOXES',
+ 'APPL-ALERT',
+ 'APPL-ALERT-',
+ 'APPL-ALERT-B',
+ 'APPL-ALERT-BO',
+ 'APPL-ALERT-BOX',
+ 'APPL-ALERT-BOXE',
+ 'APPL-CONTEXT-ID',
+ 'APPLICATION',
+ 'APPLY',
+ 'APPSERVER-INFO',
+ 'APPSERVER-PASSWORD',
+ 'APPSERVER-USERID',
+ 'ARRAY-MESSAGE',
+ 'AS',
+ 'ASC',
+ 'ASCENDING',
+ 'ASCE',
+ 'ASCEN',
+ 'ASCEND',
+ 'ASCENDI',
+ 'ASCENDIN',
+ 'ASK-OVERWRITE',
+ 'ASSEMBLY',
+ 'ASSIGN',
+ 'ASYNCHRONOUS',
+ 'ASYNC-REQUEST-COUNT',
+ 'ASYNC-REQUEST-HANDLE',
+ 'AT',
+ 'ATTACHED-PAIRLIST',
+ 'ATTR-SPACE',
+ 'ATTR',
+ 'ATTRI',
+ 'ATTRIB',
+ 'ATTRIBU',
+ 'ATTRIBUT',
+ 'AUDIT-CONTROL',
+ 'AUDIT-ENABLED',
+ 'AUDIT-EVENT-CONTEXT',
+ 'AUDIT-POLICY',
+ 'AUTHENTICATION-FAILED',
+ 'AUTHORIZATION',
+ 'AUTO-COMPLETION',
+ 'AUTO-COMP',
+ 'AUTO-COMPL',
+ 'AUTO-COMPLE',
+ 'AUTO-COMPLET',
+ 'AUTO-COMPLETI',
+ 'AUTO-COMPLETIO',
+ 'AUTO-ENDKEY',
+ 'AUTO-END-KEY',
+ 'AUTO-GO',
+ 'AUTO-INDENT',
+ 'AUTO-IND',
+ 'AUTO-INDE',
+ 'AUTO-INDEN',
+ 'AUTOMATIC',
+ 'AUTO-RESIZE',
+ 'AUTO-RETURN',
+ 'AUTO-RET',
+ 'AUTO-RETU',
+ 'AUTO-RETUR',
+ 'AUTO-SYNCHRONIZE',
+ 'AUTO-ZAP',
+ 'AUTO-Z',
+ 'AUTO-ZA',
+ 'AVAILABLE',
+ 'AVAIL',
+ 'AVAILA',
+ 'AVAILAB',
+ 'AVAILABL',
+ 'AVAILABLE-FORMATS',
+ 'AVERAGE',
+ 'AVE',
+ 'AVER',
+ 'AVERA',
+ 'AVERAG',
+ 'AVG',
+ 'BACKGROUND',
+ 'BACK',
+ 'BACKG',
+ 'BACKGR',
+ 'BACKGRO',
+ 'BACKGROU',
+ 'BACKGROUN',
+ 'BACKWARDS',
+ 'BACKWARD',
+ 'BASE64-DECODE',
+ 'BASE64-ENCODE',
+ 'BASE-ADE',
+ 'BASE-KEY',
+ 'BATCH-MODE',
+ 'BATCH',
+ 'BATCH-',
+ 'BATCH-M',
+ 'BATCH-MO',
+ 'BATCH-MOD',
+ 'BATCH-SIZE',
+ 'BEFORE-HIDE',
+ 'BEFORE-H',
+ 'BEFORE-HI',
+ 'BEFORE-HID',
+ 'BEGIN-EVENT-GROUP',
+ 'BEGINS',
+ 'BELL',
+ 'BETWEEN',
+ 'BGCOLOR',
+ 'BGC',
+ 'BGCO',
+ 'BGCOL',
+ 'BGCOLO',
+ 'BIG-ENDIAN',
+ 'BINARY',
+ 'BIND',
+ 'BIND-WHERE',
+ 'BLANK',
+ 'BLOCK-ITERATION-DISPLAY',
+ 'BORDER-BOTTOM-CHARS',
+ 'BORDER-B',
+ 'BORDER-BO',
+ 'BORDER-BOT',
+ 'BORDER-BOTT',
+ 'BORDER-BOTTO',
+ 'BORDER-BOTTOM-PIXELS',
+ 'BORDER-BOTTOM-P',
+ 'BORDER-BOTTOM-PI',
+ 'BORDER-BOTTOM-PIX',
+ 'BORDER-BOTTOM-PIXE',
+ 'BORDER-BOTTOM-PIXEL',
+ 'BORDER-LEFT-CHARS',
+ 'BORDER-L',
+ 'BORDER-LE',
+ 'BORDER-LEF',
+ 'BORDER-LEFT',
+ 'BORDER-LEFT-',
+ 'BORDER-LEFT-C',
+ 'BORDER-LEFT-CH',
+ 'BORDER-LEFT-CHA',
+ 'BORDER-LEFT-CHAR',
+ 'BORDER-LEFT-PIXELS',
+ 'BORDER-LEFT-P',
+ 'BORDER-LEFT-PI',
+ 'BORDER-LEFT-PIX',
+ 'BORDER-LEFT-PIXE',
+ 'BORDER-LEFT-PIXEL',
+ 'BORDER-RIGHT-CHARS',
+ 'BORDER-R',
+ 'BORDER-RI',
+ 'BORDER-RIG',
+ 'BORDER-RIGH',
+ 'BORDER-RIGHT',
+ 'BORDER-RIGHT-',
+ 'BORDER-RIGHT-C',
+ 'BORDER-RIGHT-CH',
+ 'BORDER-RIGHT-CHA',
+ 'BORDER-RIGHT-CHAR',
+ 'BORDER-RIGHT-PIXELS',
+ 'BORDER-RIGHT-P',
+ 'BORDER-RIGHT-PI',
+ 'BORDER-RIGHT-PIX',
+ 'BORDER-RIGHT-PIXE',
+ 'BORDER-RIGHT-PIXEL',
+ 'BORDER-TOP-CHARS',
+ 'BORDER-T',
+ 'BORDER-TO',
+ 'BORDER-TOP',
+ 'BORDER-TOP-',
+ 'BORDER-TOP-C',
+ 'BORDER-TOP-CH',
+ 'BORDER-TOP-CHA',
+ 'BORDER-TOP-CHAR',
+ 'BORDER-TOP-PIXELS',
+ 'BORDER-TOP-P',
+ 'BORDER-TOP-PI',
+ 'BORDER-TOP-PIX',
+ 'BORDER-TOP-PIXE',
+ 'BORDER-TOP-PIXEL',
+ 'BOX',
+ 'BOX-SELECTABLE',
+ 'BOX-SELECT',
+ 'BOX-SELECTA',
+ 'BOX-SELECTAB',
+ 'BOX-SELECTABL',
+ 'BREAK',
+ 'BROWSE',
+ 'BUFFER',
+ 'BUFFER-CHARS',
+ 'BUFFER-COMPARE',
+ 'BUFFER-COPY',
+ 'BUFFER-CREATE',
+ 'BUFFER-DELETE',
+ 'BUFFER-FIELD',
+ 'BUFFER-HANDLE',
+ 'BUFFER-LINES',
+ 'BUFFER-NAME',
+ 'BUFFER-RELEASE',
+ 'BUFFER-VALUE',
+ 'BUTTON',
+ 'BUTTONS',
+ 'BUTTON',
+ 'BY',
+ 'BY-POINTER',
+ 'BY-VARIANT-POINTER',
+ 'CACHE',
+ 'CACHE-SIZE',
+ 'CALL',
+ 'CALL-NAME',
+ 'CALL-TYPE',
+ 'CANCEL-BREAK',
+ 'CANCEL-BUTTON',
+ 'CAN-CREATE',
+ 'CAN-DELETE',
+ 'CAN-DO',
+ 'CAN-FIND',
+ 'CAN-QUERY',
+ 'CAN-READ',
+ 'CAN-SET',
+ 'CAN-WRITE',
+ 'CAPS',
+ 'CAREFUL-PAINT',
+ 'CASE',
+ 'CASE-SENSITIVE',
+ 'CASE-SEN',
+ 'CASE-SENS',
+ 'CASE-SENSI',
+ 'CASE-SENSIT',
+ 'CASE-SENSITI',
+ 'CASE-SENSITIV',
+ 'CAST',
+ 'CATCH',
+ 'CDECL',
+ 'CENTERED',
+ 'CENTER',
+ 'CENTERE',
+ 'CHAINED',
+ 'CHARACTER_LENGTH',
+ 'CHARSET',
+ 'CHECK',
+ 'CHECKED',
+ 'CHOOSE',
+ 'CHR',
+ 'CLASS',
+ 'CLASS-TYPE',
+ 'CLEAR',
+ 'CLEAR-APPL-CONTEXT',
+ 'CLEAR-LOG',
+ 'CLEAR-SELECTION',
+ 'CLEAR-SELECT',
+ 'CLEAR-SELECTI',
+ 'CLEAR-SELECTIO',
+ 'CLEAR-SORT-ARROWS',
+ 'CLEAR-SORT-ARROW',
+ 'CLIENT-CONNECTION-ID',
+ 'CLIENT-PRINCIPAL',
+ 'CLIENT-TTY',
+ 'CLIENT-TYPE',
+ 'CLIENT-WORKSTATION',
+ 'CLIPBOARD',
+ 'CLOSE',
+ 'CLOSE-LOG',
+ 'CODE',
+ 'CODEBASE-LOCATOR',
+ 'CODEPAGE',
+ 'CODEPAGE-CONVERT',
+ 'COLLATE',
+ 'COL-OF',
+ 'COLON',
+ 'COLON-ALIGNED',
+ 'COLON-ALIGN',
+ 'COLON-ALIGNE',
+ 'COLOR',
+ 'COLOR-TABLE',
+ 'COLUMN',
+ 'COL',
+ 'COLU',
+ 'COLUM',
+ 'COLUMN-BGCOLOR',
+ 'COLUMN-DCOLOR',
+ 'COLUMN-FGCOLOR',
+ 'COLUMN-FONT',
+ 'COLUMN-LABEL',
+ 'COLUMN-LAB',
+ 'COLUMN-LABE',
+ 'COLUMN-MOVABLE',
+ 'COLUMN-OF',
+ 'COLUMN-PFCOLOR',
+ 'COLUMN-READ-ONLY',
+ 'COLUMN-RESIZABLE',
+ 'COLUMNS',
+ 'COLUMN-SCROLLING',
+ 'COMBO-BOX',
+ 'COMMAND',
+ 'COMPARES',
+ 'COMPILE',
+ 'COMPILER',
+ 'COMPLETE',
+ 'COM-SELF',
+ 'CONFIG-NAME',
+ 'CONNECT',
+ 'CONNECTED',
+ 'CONSTRUCTOR',
+ 'CONTAINS',
+ 'CONTENTS',
+ 'CONTEXT',
+ 'CONTEXT-HELP',
+ 'CONTEXT-HELP-FILE',
+ 'CONTEXT-HELP-ID',
+ 'CONTEXT-POPUP',
+ 'CONTROL',
+ 'CONTROL-BOX',
+ 'CONTROL-FRAME',
+ 'CONVERT',
+ 'CONVERT-3D-COLORS',
+ 'CONVERT-TO-OFFSET',
+ 'CONVERT-TO-OFFS',
+ 'CONVERT-TO-OFFSE',
+ 'COPY-DATASET',
+ 'COPY-LOB',
+ 'COPY-SAX-ATTRIBUTES',
+ 'COPY-TEMP-TABLE',
+ 'COUNT',
+ 'COUNT-OF',
+ 'CPCASE',
+ 'CPCOLL',
+ 'CPINTERNAL',
+ 'CPLOG',
+ 'CPPRINT',
+ 'CPRCODEIN',
+ 'CPRCODEOUT',
+ 'CPSTREAM',
+ 'CPTERM',
+ 'CRC-VALUE',
+ 'CREATE',
+ 'CREATE-LIKE',
+ 'CREATE-LIKE-SEQUENTIAL',
+ 'CREATE-NODE-NAMESPACE',
+ 'CREATE-RESULT-LIST-ENTRY',
+ 'CREATE-TEST-FILE',
+ 'CURRENT',
+ 'CURRENT_DATE',
+ 'CURRENT_DATE',
+ 'CURRENT-CHANGED',
+ 'CURRENT-COLUMN',
+ 'CURRENT-ENVIRONMENT',
+ 'CURRENT-ENV',
+ 'CURRENT-ENVI',
+ 'CURRENT-ENVIR',
+ 'CURRENT-ENVIRO',
+ 'CURRENT-ENVIRON',
+ 'CURRENT-ENVIRONM',
+ 'CURRENT-ENVIRONME',
+ 'CURRENT-ENVIRONMEN',
+ 'CURRENT-ITERATION',
+ 'CURRENT-LANGUAGE',
+ 'CURRENT-LANG',
+ 'CURRENT-LANGU',
+ 'CURRENT-LANGUA',
+ 'CURRENT-LANGUAG',
+ 'CURRENT-QUERY',
+ 'CURRENT-RESULT-ROW',
+ 'CURRENT-ROW-MODIFIED',
+ 'CURRENT-VALUE',
+ 'CURRENT-WINDOW',
+ 'CURSOR',
+ 'CURS',
+ 'CURSO',
+ 'CURSOR-CHAR',
+ 'CURSOR-LINE',
+ 'CURSOR-OFFSET',
+ 'DATABASE',
+ 'DATA-BIND',
+ 'DATA-ENTRY-RETURN',
+ 'DATA-ENTRY-RET',
+ 'DATA-ENTRY-RETU',
+ 'DATA-ENTRY-RETUR',
+ 'DATA-RELATION',
+ 'DATA-REL',
+ 'DATA-RELA',
+ 'DATA-RELAT',
+ 'DATA-RELATI',
+ 'DATA-RELATIO',
+ 'DATASERVERS',
+ 'DATASET',
+ 'DATASET-HANDLE',
+ 'DATA-SOURCE',
+ 'DATA-SOURCE-COMPLETE-MAP',
+ 'DATA-SOURCE-MODIFIED',
+ 'DATA-SOURCE-ROWID',
+ 'DATA-TYPE',
+ 'DATA-T',
+ 'DATA-TY',
+ 'DATA-TYP',
+ 'DATE-FORMAT',
+ 'DATE-F',
+ 'DATE-FO',
+ 'DATE-FOR',
+ 'DATE-FORM',
+ 'DATE-FORMA',
+ 'DAY',
+ 'DBCODEPAGE',
+ 'DBCOLLATION',
+ 'DBNAME',
+ 'DBPARAM',
+ 'DB-REFERENCES',
+ 'DBRESTRICTIONS',
+ 'DBREST',
+ 'DBRESTR',
+ 'DBRESTRI',
+ 'DBRESTRIC',
+ 'DBRESTRICT',
+ 'DBRESTRICTI',
+ 'DBRESTRICTIO',
+ 'DBRESTRICTION',
+ 'DBTASKID',
+ 'DBTYPE',
+ 'DBVERSION',
+ 'DBVERS',
+ 'DBVERSI',
+ 'DBVERSIO',
+ 'DCOLOR',
+ 'DDE',
+ 'DDE-ERROR',
+ 'DDE-ID',
+ 'DDE-I',
+ 'DDE-ITEM',
+ 'DDE-NAME',
+ 'DDE-TOPIC',
+ 'DEBLANK',
+ 'DEBUG',
+ 'DEBU',
+ 'DEBUG-ALERT',
+ 'DEBUGGER',
+ 'DEBUG-LIST',
+ 'DECIMALS',
+ 'DECLARE',
+ 'DECLARE-NAMESPACE',
+ 'DECRYPT',
+ 'DEFAULT',
+ 'DEFAULT-BUFFER-HANDLE',
+ 'DEFAULT-BUTTON',
+ 'DEFAUT-B',
+ 'DEFAUT-BU',
+ 'DEFAUT-BUT',
+ 'DEFAUT-BUTT',
+ 'DEFAUT-BUTTO',
+ 'DEFAULT-COMMIT',
+ 'DEFAULT-EXTENSION',
+ 'DEFAULT-EX',
+ 'DEFAULT-EXT',
+ 'DEFAULT-EXTE',
+ 'DEFAULT-EXTEN',
+ 'DEFAULT-EXTENS',
+ 'DEFAULT-EXTENSI',
+ 'DEFAULT-EXTENSIO',
+ 'DEFAULT-NOXLATE',
+ 'DEFAULT-NOXL',
+ 'DEFAULT-NOXLA',
+ 'DEFAULT-NOXLAT',
+ 'DEFAULT-VALUE',
+ 'DEFAULT-WINDOW',
+ 'DEFINED',
+ 'DEFINE-USER-EVENT-MANAGER',
+ 'DELETE',
+ 'DEL',
+ 'DELE',
+ 'DELET',
+ 'DELETE-CHARACTER',
+ 'DELETE-CHAR',
+ 'DELETE-CHARA',
+ 'DELETE-CHARAC',
+ 'DELETE-CHARACT',
+ 'DELETE-CHARACTE',
+ 'DELETE-CURRENT-ROW',
+ 'DELETE-LINE',
+ 'DELETE-RESULT-LIST-ENTRY',
+ 'DELETE-SELECTED-ROW',
+ 'DELETE-SELECTED-ROWS',
+ 'DELIMITER',
+ 'DESC',
+ 'DESCENDING',
+ 'DESC',
+ 'DESCE',
+ 'DESCEN',
+ 'DESCEND',
+ 'DESCENDI',
+ 'DESCENDIN',
+ 'DESELECT-FOCUSED-ROW',
+ 'DESELECTION',
+ 'DESELECT-ROWS',
+ 'DESELECT-SELECTED-ROW',
+ 'DESTRUCTOR',
+ 'DIALOG-BOX',
+ 'DICTIONARY',
+ 'DICT',
+ 'DICTI',
+ 'DICTIO',
+ 'DICTION',
+ 'DICTIONA',
+ 'DICTIONAR',
+ 'DIR',
+ 'DISABLE',
+ 'DISABLE-AUTO-ZAP',
+ 'DISABLED',
+ 'DISABLE-DUMP-TRIGGERS',
+ 'DISABLE-LOAD-TRIGGERS',
+ 'DISCONNECT',
+ 'DISCON',
+ 'DISCONN',
+ 'DISCONNE',
+ 'DISCONNEC',
+ 'DISP',
+ 'DISPLAY',
+ 'DISP',
+ 'DISPL',
+ 'DISPLA',
+ 'DISPLAY-MESSAGE',
+ 'DISPLAY-TYPE',
+ 'DISPLAY-T',
+ 'DISPLAY-TY',
+ 'DISPLAY-TYP',
+ 'DISTINCT',
+ 'DO',
+ 'DOMAIN-DESCRIPTION',
+ 'DOMAIN-NAME',
+ 'DOMAIN-TYPE',
+ 'DOS',
+ 'DOUBLE',
+ 'DOWN',
+ 'DRAG-ENABLED',
+ 'DROP',
+ 'DROP-DOWN',
+ 'DROP-DOWN-LIST',
+ 'DROP-FILE-NOTIFY',
+ 'DROP-TARGET',
+ 'DUMP',
+ 'DYNAMIC',
+ 'DYNAMIC-FUNCTION',
+ 'EACH',
+ 'ECHO',
+ 'EDGE-CHARS',
+ 'EDGE',
+ 'EDGE-',
+ 'EDGE-C',
+ 'EDGE-CH',
+ 'EDGE-CHA',
+ 'EDGE-CHAR',
+ 'EDGE-PIXELS',
+ 'EDGE-P',
+ 'EDGE-PI',
+ 'EDGE-PIX',
+ 'EDGE-PIXE',
+ 'EDGE-PIXEL',
+ 'EDIT-CAN-PASTE',
+ 'EDIT-CAN-UNDO',
+ 'EDIT-CLEAR',
+ 'EDIT-COPY',
+ 'EDIT-CUT',
+ 'EDITING',
+ 'EDITOR',
+ 'EDIT-PASTE',
+ 'EDIT-UNDO',
+ 'ELSE',
+ 'EMPTY',
+ 'EMPTY-TEMP-TABLE',
+ 'ENABLE',
+ 'ENABLED-FIELDS',
+ 'ENCODE',
+ 'ENCRYPT',
+ 'ENCRYPT-AUDIT-MAC-KEY',
+ 'ENCRYPTION-SALT',
+ 'END',
+ 'END-DOCUMENT',
+ 'END-ELEMENT',
+ 'END-EVENT-GROUP',
+ 'END-FILE-DROP',
+ 'ENDKEY',
+ 'END-KEY',
+ 'END-MOVE',
+ 'END-RESIZE',
+ 'END-ROW-RESIZE',
+ 'END-USER-PROMPT',
+ 'ENTERED',
+ 'ENTRY',
+ 'EQ',
+ 'ERROR',
+ 'ERROR-COLUMN',
+ 'ERROR-COL',
+ 'ERROR-COLU',
+ 'ERROR-COLUM',
+ 'ERROR-ROW',
+ 'ERROR-STACK-TRACE',
+ 'ERROR-STATUS',
+ 'ERROR-STAT',
+ 'ERROR-STATU',
+ 'ESCAPE',
+ 'ETIME',
+ 'EVENT-GROUP-ID',
+ 'EVENT-PROCEDURE',
+ 'EVENT-PROCEDURE-CONTEXT',
+ 'EVENTS',
+ 'EVENT',
+ 'EVENT-TYPE',
+ 'EVENT-T',
+ 'EVENT-TY',
+ 'EVENT-TYP',
+ 'EXCEPT',
+ 'EXCLUSIVE-ID',
+ 'EXCLUSIVE-LOCK',
+ 'EXCLUSIVE',
+ 'EXCLUSIVE-',
+ 'EXCLUSIVE-L',
+ 'EXCLUSIVE-LO',
+ 'EXCLUSIVE-LOC',
+ 'EXCLUSIVE-WEB-USER',
+ 'EXECUTE',
+ 'EXISTS',
+ 'EXP',
+ 'EXPAND',
+ 'EXPANDABLE',
+ 'EXPLICIT',
+ 'EXPORT',
+ 'EXPORT-PRINCIPAL',
+ 'EXTENDED',
+ 'EXTENT',
+ 'EXTERNAL',
+ 'FALSE',
+ 'FETCH',
+ 'FETCH-SELECTED-ROW',
+ 'FGCOLOR',
+ 'FGC',
+ 'FGCO',
+ 'FGCOL',
+ 'FGCOLO',
+ 'FIELD',
+ 'FIELDS',
+ 'FIELD',
+ 'FILE',
+ 'FILE-CREATE-DATE',
+ 'FILE-CREATE-TIME',
+ 'FILE-INFORMATION',
+ 'FILE-INFO',
+ 'FILE-INFOR',
+ 'FILE-INFORM',
+ 'FILE-INFORMA',
+ 'FILE-INFORMAT',
+ 'FILE-INFORMATI',
+ 'FILE-INFORMATIO',
+ 'FILE-MOD-DATE',
+ 'FILE-MOD-TIME',
+ 'FILENAME',
+ 'FILE-NAME',
+ 'FILE-OFFSET',
+ 'FILE-OFF',
+ 'FILE-OFFS',
+ 'FILE-OFFSE',
+ 'FILE-SIZE',
+ 'FILE-TYPE',
+ 'FILL',
+ 'FILLED',
+ 'FILL-IN',
+ 'FILTERS',
+ 'FINAL',
+ 'FINALLY',
+ 'FIND',
+ 'FIND-BY-ROWID',
+ 'FIND-CASE-SENSITIVE',
+ 'FIND-CURRENT',
+ 'FINDER',
+ 'FIND-FIRST',
+ 'FIND-GLOBAL',
+ 'FIND-LAST',
+ 'FIND-NEXT-OCCURRENCE',
+ 'FIND-PREV-OCCURRENCE',
+ 'FIND-SELECT',
+ 'FIND-UNIQUE',
+ 'FIND-WRAP-AROUND',
+ 'FIRST',
+ 'FIRST-ASYNCH-REQUEST',
+ 'FIRST-CHILD',
+ 'FIRST-COLUMN',
+ 'FIRST-FORM',
+ 'FIRST-OBJECT',
+ 'FIRST-OF',
+ 'FIRST-PROCEDURE',
+ 'FIRST-PROC',
+ 'FIRST-PROCE',
+ 'FIRST-PROCED',
+ 'FIRST-PROCEDU',
+ 'FIRST-PROCEDUR',
+ 'FIRST-SERVER',
+ 'FIRST-TAB-ITEM',
+ 'FIRST-TAB-I',
+ 'FIRST-TAB-IT',
+ 'FIRST-TAB-ITE',
+ 'FIT-LAST-COLUMN',
+ 'FIXED-ONLY',
+ 'FLAT-BUTTON',
+ 'FLOAT',
+ 'FOCUS',
+ 'FOCUSED-ROW',
+ 'FOCUSED-ROW-SELECTED',
+ 'FONT',
+ 'FONT-TABLE',
+ 'FOR',
+ 'FORCE-FILE',
+ 'FOREGROUND',
+ 'FORE',
+ 'FOREG',
+ 'FOREGR',
+ 'FOREGRO',
+ 'FOREGROU',
+ 'FOREGROUN',
+ 'FORM',
+ 'FORMAT',
+ 'FORM',
+ 'FORMA',
+ 'FORMATTED',
+ 'FORMATTE',
+ 'FORM-LONG-INPUT',
+ 'FORWARD',
+ 'FORWARDS',
+ 'FORWARD',
+ 'FRAGMENT',
+ 'FRAGMEN',
+ 'FRAME',
+ 'FRAM',
+ 'FRAME-COL',
+ 'FRAME-DB',
+ 'FRAME-DOWN',
+ 'FRAME-FIELD',
+ 'FRAME-FILE',
+ 'FRAME-INDEX',
+ 'FRAME-INDE',
+ 'FRAME-LINE',
+ 'FRAME-NAME',
+ 'FRAME-ROW',
+ 'FRAME-SPACING',
+ 'FRAME-SPA',
+ 'FRAME-SPAC',
+ 'FRAME-SPACI',
+ 'FRAME-SPACIN',
+ 'FRAME-VALUE',
+ 'FRAME-VAL',
+ 'FRAME-VALU',
+ 'FRAME-X',
+ 'FRAME-Y',
+ 'FREQUENCY',
+ 'FROM',
+ 'FROM-CHARS',
+ 'FROM-C',
+ 'FROM-CH',
+ 'FROM-CHA',
+ 'FROM-CHAR',
+ 'FROM-CURRENT',
+ 'FROM-CUR',
+ 'FROM-CURR',
+ 'FROM-CURRE',
+ 'FROM-CURREN',
+ 'FROM-PIXELS',
+ 'FROM-P',
+ 'FROM-PI',
+ 'FROM-PIX',
+ 'FROM-PIXE',
+ 'FROM-PIXEL',
+ 'FULL-HEIGHT-CHARS',
+ 'FULL-HEIGHT',
+ 'FULL-HEIGHT-',
+ 'FULL-HEIGHT-C',
+ 'FULL-HEIGHT-CH',
+ 'FULL-HEIGHT-CHA',
+ 'FULL-HEIGHT-CHAR',
+ 'FULL-HEIGHT-PIXELS',
+ 'FULL-HEIGHT-P',
+ 'FULL-HEIGHT-PI',
+ 'FULL-HEIGHT-PIX',
+ 'FULL-HEIGHT-PIXE',
+ 'FULL-HEIGHT-PIXEL',
+ 'FULL-PATHNAME',
+ 'FULL-PATHN',
+ 'FULL-PATHNA',
+ 'FULL-PATHNAM',
+ 'FULL-WIDTH-CHARS',
+ 'FULL-WIDTH',
+ 'FULL-WIDTH-',
+ 'FULL-WIDTH-C',
+ 'FULL-WIDTH-CH',
+ 'FULL-WIDTH-CHA',
+ 'FULL-WIDTH-CHAR',
+ 'FULL-WIDTH-PIXELS',
+ 'FULL-WIDTH-P',
+ 'FULL-WIDTH-PI',
+ 'FULL-WIDTH-PIX',
+ 'FULL-WIDTH-PIXE',
+ 'FULL-WIDTH-PIXEL',
+ 'FUNCTION',
+ 'FUNCTION-CALL-TYPE',
+ 'GATEWAYS',
+ 'GATEWAY',
+ 'GE',
+ 'GENERATE-MD5',
+ 'GENERATE-PBE-KEY',
+ 'GENERATE-PBE-SALT',
+ 'GENERATE-RANDOM-KEY',
+ 'GENERATE-UUID',
+ 'GET',
+ 'GET-ATTR-CALL-TYPE',
+ 'GET-ATTRIBUTE-NODE',
+ 'GET-BINARY-DATA',
+ 'GET-BLUE-VALUE',
+ 'GET-BLUE',
+ 'GET-BLUE-',
+ 'GET-BLUE-V',
+ 'GET-BLUE-VA',
+ 'GET-BLUE-VAL',
+ 'GET-BLUE-VALU',
+ 'GET-BROWSE-COLUMN',
+ 'GET-BUFFER-HANDLEGETBYTE',
+ 'GET-BYTE',
+ 'GET-CALLBACK-PROC-CONTEXT',
+ 'GET-CALLBACK-PROC-NAME',
+ 'GET-CGI-LIST',
+ 'GET-CGI-LONG-VALUE',
+ 'GET-CGI-VALUE',
+ 'GET-CODEPAGES',
+ 'GET-COLLATIONS',
+ 'GET-CONFIG-VALUE',
+ 'GET-CURRENT',
+ 'GET-DOUBLE',
+ 'GET-DROPPED-FILE',
+ 'GET-DYNAMIC',
+ 'GET-ERROR-COLUMN',
+ 'GET-ERROR-ROW',
+ 'GET-FILE',
+ 'GET-FILE-NAME',
+ 'GET-FILE-OFFSET',
+ 'GET-FILE-OFFSE',
+ 'GET-FIRST',
+ 'GET-FLOAT',
+ 'GET-GREEN-VALUE',
+ 'GET-GREEN',
+ 'GET-GREEN-',
+ 'GET-GREEN-V',
+ 'GET-GREEN-VA',
+ 'GET-GREEN-VAL',
+ 'GET-GREEN-VALU',
+ 'GET-INDEX-BY-NAMESPACE-NAME',
+ 'GET-INDEX-BY-QNAME',
+ 'GET-INT64',
+ 'GET-ITERATION',
+ 'GET-KEY-VALUE',
+ 'GET-KEY-VAL',
+ 'GET-KEY-VALU',
+ 'GET-LAST',
+ 'GET-LOCALNAME-BY-INDEX',
+ 'GET-LONG',
+ 'GET-MESSAGE',
+ 'GET-NEXT',
+ 'GET-NUMBER',
+ 'GET-POINTER-VALUE',
+ 'GET-PREV',
+ 'GET-PRINTERS',
+ 'GET-PROPERTY',
+ 'GET-QNAME-BY-INDEX',
+ 'GET-RED-VALUE',
+ 'GET-RED',
+ 'GET-RED-',
+ 'GET-RED-V',
+ 'GET-RED-VA',
+ 'GET-RED-VAL',
+ 'GET-RED-VALU',
+ 'GET-REPOSITIONED-ROW',
+ 'GET-RGB-VALUE',
+ 'GET-SELECTED-WIDGET',
+ 'GET-SELECTED',
+ 'GET-SELECTED-',
+ 'GET-SELECTED-W',
+ 'GET-SELECTED-WI',
+ 'GET-SELECTED-WID',
+ 'GET-SELECTED-WIDG',
+ 'GET-SELECTED-WIDGE',
+ 'GET-SHORT',
+ 'GET-SIGNATURE',
+ 'GET-SIZE',
+ 'GET-STRING',
+ 'GET-TAB-ITEM',
+ 'GET-TEXT-HEIGHT-CHARS',
+ 'GET-TEXT-HEIGHT',
+ 'GET-TEXT-HEIGHT-',
+ 'GET-TEXT-HEIGHT-C',
+ 'GET-TEXT-HEIGHT-CH',
+ 'GET-TEXT-HEIGHT-CHA',
+ 'GET-TEXT-HEIGHT-CHAR',
+ 'GET-TEXT-HEIGHT-PIXELS',
+ 'GET-TEXT-HEIGHT-P',
+ 'GET-TEXT-HEIGHT-PI',
+ 'GET-TEXT-HEIGHT-PIX',
+ 'GET-TEXT-HEIGHT-PIXE',
+ 'GET-TEXT-HEIGHT-PIXEL',
+ 'GET-TEXT-WIDTH-CHARS',
+ 'GET-TEXT-WIDTH',
+ 'GET-TEXT-WIDTH-',
+ 'GET-TEXT-WIDTH-C',
+ 'GET-TEXT-WIDTH-CH',
+ 'GET-TEXT-WIDTH-CHA',
+ 'GET-TEXT-WIDTH-CHAR',
+ 'GET-TEXT-WIDTH-PIXELS',
+ 'GET-TEXT-WIDTH-P',
+ 'GET-TEXT-WIDTH-PI',
+ 'GET-TEXT-WIDTH-PIX',
+ 'GET-TEXT-WIDTH-PIXE',
+ 'GET-TEXT-WIDTH-PIXEL',
+ 'GET-TYPE-BY-INDEX',
+ 'GET-TYPE-BY-NAMESPACE-NAME',
+ 'GET-TYPE-BY-QNAME',
+ 'GET-UNSIGNED-LONG',
+ 'GET-UNSIGNED-SHORT',
+ 'GET-URI-BY-INDEX',
+ 'GET-VALUE-BY-INDEX',
+ 'GET-VALUE-BY-NAMESPACE-NAME',
+ 'GET-VALUE-BY-QNAME',
+ 'GET-WAIT-STATE',
+ 'GLOBAL',
+ 'GO-ON',
+ 'GO-PENDING',
+ 'GO-PEND',
+ 'GO-PENDI',
+ 'GO-PENDIN',
+ 'GRANT',
+ 'GRAPHIC-EDGE',
+ 'GRAPHIC-E',
+ 'GRAPHIC-ED',
+ 'GRAPHIC-EDG',
+ 'GRID-FACTOR-HORIZONTAL',
+ 'GRID-FACTOR-H',
+ 'GRID-FACTOR-HO',
+ 'GRID-FACTOR-HOR',
+ 'GRID-FACTOR-HORI',
+ 'GRID-FACTOR-HORIZ',
+ 'GRID-FACTOR-HORIZO',
+ 'GRID-FACTOR-HORIZON',
+ 'GRID-FACTOR-HORIZONT',
+ 'GRID-FACTOR-HORIZONTA',
+ 'GRID-FACTOR-VERTICAL',
+ 'GRID-FACTOR-V',
+ 'GRID-FACTOR-VE',
+ 'GRID-FACTOR-VER',
+ 'GRID-FACTOR-VERT',
+ 'GRID-FACTOR-VERT',
+ 'GRID-FACTOR-VERTI',
+ 'GRID-FACTOR-VERTIC',
+ 'GRID-FACTOR-VERTICA',
+ 'GRID-SNAP',
+ 'GRID-UNIT-HEIGHT-CHARS',
+ 'GRID-UNIT-HEIGHT',
+ 'GRID-UNIT-HEIGHT-',
+ 'GRID-UNIT-HEIGHT-C',
+ 'GRID-UNIT-HEIGHT-CH',
+ 'GRID-UNIT-HEIGHT-CHA',
+ 'GRID-UNIT-HEIGHT-PIXELS',
+ 'GRID-UNIT-HEIGHT-P',
+ 'GRID-UNIT-HEIGHT-PI',
+ 'GRID-UNIT-HEIGHT-PIX',
+ 'GRID-UNIT-HEIGHT-PIXE',
+ 'GRID-UNIT-HEIGHT-PIXEL',
+ 'GRID-UNIT-WIDTH-CHARS',
+ 'GRID-UNIT-WIDTH',
+ 'GRID-UNIT-WIDTH-',
+ 'GRID-UNIT-WIDTH-C',
+ 'GRID-UNIT-WIDTH-CH',
+ 'GRID-UNIT-WIDTH-CHA',
+ 'GRID-UNIT-WIDTH-CHAR',
+ 'GRID-UNIT-WIDTH-PIXELS',
+ 'GRID-UNIT-WIDTH-P',
+ 'GRID-UNIT-WIDTH-PI',
+ 'GRID-UNIT-WIDTH-PIX',
+ 'GRID-UNIT-WIDTH-PIXE',
+ 'GRID-UNIT-WIDTH-PIXEL',
+ 'GRID-VISIBLE',
+ 'GROUP',
+ 'GT',
+ 'GUID',
+ 'HANDLER',
+ 'HAS-RECORDS',
+ 'HAVING',
+ 'HEADER',
+ 'HEIGHT-CHARS',
+ 'HEIGHT',
+ 'HEIGHT-',
+ 'HEIGHT-C',
+ 'HEIGHT-CH',
+ 'HEIGHT-CHA',
+ 'HEIGHT-CHAR',
+ 'HEIGHT-PIXELS',
+ 'HEIGHT-P',
+ 'HEIGHT-PI',
+ 'HEIGHT-PIX',
+ 'HEIGHT-PIXE',
+ 'HEIGHT-PIXEL',
+ 'HELP',
+ 'HEX-DECODE',
+ 'HEX-ENCODE',
+ 'HIDDEN',
+ 'HIDE',
+ 'HORIZONTAL',
+ 'HORI',
+ 'HORIZ',
+ 'HORIZO',
+ 'HORIZON',
+ 'HORIZONT',
+ 'HORIZONTA',
+ 'HOST-BYTE-ORDER',
+ 'HTML-CHARSET',
+ 'HTML-END-OF-LINE',
+ 'HTML-END-OF-PAGE',
+ 'HTML-FRAME-BEGIN',
+ 'HTML-FRAME-END',
+ 'HTML-HEADER-BEGIN',
+ 'HTML-HEADER-END',
+ 'HTML-TITLE-BEGIN',
+ 'HTML-TITLE-END',
+ 'HWND',
+ 'ICON',
+ 'IF',
+ 'IMAGE',
+ 'IMAGE-DOWN',
+ 'IMAGE-INSENSITIVE',
+ 'IMAGE-SIZE',
+ 'IMAGE-SIZE-CHARS',
+ 'IMAGE-SIZE-C',
+ 'IMAGE-SIZE-CH',
+ 'IMAGE-SIZE-CHA',
+ 'IMAGE-SIZE-CHAR',
+ 'IMAGE-SIZE-PIXELS',
+ 'IMAGE-SIZE-P',
+ 'IMAGE-SIZE-PI',
+ 'IMAGE-SIZE-PIX',
+ 'IMAGE-SIZE-PIXE',
+ 'IMAGE-SIZE-PIXEL',
+ 'IMAGE-UP',
+ 'IMMEDIATE-DISPLAY',
+ 'IMPLEMENTS',
+ 'IMPORT',
+ 'IMPORT-PRINCIPAL',
+ 'IN',
+ 'INCREMENT-EXCLUSIVE-ID',
+ 'INDEX',
+ 'INDEXED-REPOSITION',
+ 'INDEX-HINT',
+ 'INDEX-INFORMATION',
+ 'INDICATOR',
+ 'INFORMATION',
+ 'INFO',
+ 'INFOR',
+ 'INFORM',
+ 'INFORMA',
+ 'INFORMAT',
+ 'INFORMATI',
+ 'INFORMATIO',
+ 'IN-HANDLE',
+ 'INHERIT-BGCOLOR',
+ 'INHERIT-BGC',
+ 'INHERIT-BGCO',
+ 'INHERIT-BGCOL',
+ 'INHERIT-BGCOLO',
+ 'INHERIT-FGCOLOR',
+ 'INHERIT-FGC',
+ 'INHERIT-FGCO',
+ 'INHERIT-FGCOL',
+ 'INHERIT-FGCOLO',
+ 'INHERITS',
+ 'INITIAL',
+ 'INIT',
+ 'INITI',
+ 'INITIA',
+ 'INITIAL-DIR',
+ 'INITIAL-FILTER',
+ 'INITIALIZE-DOCUMENT-TYPE',
+ 'INITIATE',
+ 'INNER-CHARS',
+ 'INNER-LINES',
+ 'INPUT',
+ 'INPUT-OUTPUT',
+ 'INPUT-O',
+ 'INPUT-OU',
+ 'INPUT-OUT',
+ 'INPUT-OUTP',
+ 'INPUT-OUTPU',
+ 'INPUT-VALUE',
+ 'INSERT',
+ 'INSERT-ATTRIBUTE',
+ 'INSERT-BACKTAB',
+ 'INSERT-B',
+ 'INSERT-BA',
+ 'INSERT-BAC',
+ 'INSERT-BACK',
+ 'INSERT-BACKT',
+ 'INSERT-BACKTA',
+ 'INSERT-FILE',
+ 'INSERT-ROW',
+ 'INSERT-STRING',
+ 'INSERT-TAB',
+ 'INSERT-T',
+ 'INSERT-TA',
+ 'INTERFACE',
+ 'INTERNAL-ENTRIES',
+ 'INTO',
+ 'INVOKE',
+ 'IS',
+ 'IS-ATTR-SPACE',
+ 'IS-ATTR',
+ 'IS-ATTR-',
+ 'IS-ATTR-S',
+ 'IS-ATTR-SP',
+ 'IS-ATTR-SPA',
+ 'IS-ATTR-SPAC',
+ 'IS-CLASS',
+ 'IS-CLAS',
+ 'IS-LEAD-BYTE',
+ 'IS-ATTR',
+ 'IS-OPEN',
+ 'IS-PARAMETER-SET',
+ 'IS-ROW-SELECTED',
+ 'IS-SELECTED',
+ 'ITEM',
+ 'ITEMS-PER-ROW',
+ 'JOIN',
+ 'JOIN-BY-SQLDB',
+ 'KBLABEL',
+ 'KEEP-CONNECTION-OPEN',
+ 'KEEP-FRAME-Z-ORDER',
+ 'KEEP-FRAME-Z',
+ 'KEEP-FRAME-Z-',
+ 'KEEP-FRAME-Z-O',
+ 'KEEP-FRAME-Z-OR',
+ 'KEEP-FRAME-Z-ORD',
+ 'KEEP-FRAME-Z-ORDE',
+ 'KEEP-MESSAGES',
+ 'KEEP-SECURITY-CACHE',
+ 'KEEP-TAB-ORDER',
+ 'KEY',
+ 'KEYCODE',
+ 'KEY-CODE',
+ 'KEYFUNCTION',
+ 'KEYFUNC',
+ 'KEYFUNCT',
+ 'KEYFUNCTI',
+ 'KEYFUNCTIO',
+ 'KEY-FUNCTION',
+ 'KEY-FUNC',
+ 'KEY-FUNCT',
+ 'KEY-FUNCTI',
+ 'KEY-FUNCTIO',
+ 'KEYLABEL',
+ 'KEY-LABEL',
+ 'KEYS',
+ 'KEYWORD',
+ 'KEYWORD-ALL',
+ 'LABEL',
+ 'LABEL-BGCOLOR',
+ 'LABEL-BGC',
+ 'LABEL-BGCO',
+ 'LABEL-BGCOL',
+ 'LABEL-BGCOLO',
+ 'LABEL-DCOLOR',
+ 'LABEL-DC',
+ 'LABEL-DCO',
+ 'LABEL-DCOL',
+ 'LABEL-DCOLO',
+ 'LABEL-FGCOLOR',
+ 'LABEL-FGC',
+ 'LABEL-FGCO',
+ 'LABEL-FGCOL',
+ 'LABEL-FGCOLO',
+ 'LABEL-FONT',
+ 'LABEL-PFCOLOR',
+ 'LABEL-PFC',
+ 'LABEL-PFCO',
+ 'LABEL-PFCOL',
+ 'LABEL-PFCOLO',
+ 'LABELS',
+ 'LANDSCAPE',
+ 'LANGUAGES',
+ 'LANGUAGE',
+ 'LARGE',
+ 'LARGE-TO-SMALL',
+ 'LAST',
+ 'LAST-ASYNCH-REQUEST',
+ 'LAST-BATCH',
+ 'LAST-CHILD',
+ 'LAST-EVENT',
+ 'LAST-EVEN',
+ 'LAST-FORM',
+ 'LASTKEY',
+ 'LAST-KEY',
+ 'LAST-OBJECT',
+ 'LAST-OF',
+ 'LAST-PROCEDURE',
+ 'LAST-PROCE',
+ 'LAST-PROCED',
+ 'LAST-PROCEDU',
+ 'LAST-PROCEDUR',
+ 'LAST-SERVER',
+ 'LAST-TAB-ITEM',
+ 'LAST-TAB-I',
+ 'LAST-TAB-IT',
+ 'LAST-TAB-ITE',
+ 'LC',
+ 'LDBNAME',
+ 'LE',
+ 'LEAVE',
+ 'LEFT-ALIGNED',
+ 'LEFT-ALIGN',
+ 'LEFT-ALIGNE',
+ 'LEFT-TRIM',
+ 'LENGTH',
+ 'LIBRARY',
+ 'LIKE',
+ 'LIKE-SEQUENTIAL',
+ 'LINE',
+ 'LINE-COUNTER',
+ 'LINE-COUNT',
+ 'LINE-COUNTE',
+ 'LIST-EVENTS',
+ 'LISTING',
+ 'LISTI',
+ 'LISTIN',
+ 'LIST-ITEM-PAIRS',
+ 'LIST-ITEMS',
+ 'LIST-PROPERTY-NAMES',
+ 'LIST-QUERY-ATTRS',
+ 'LIST-SET-ATTRS',
+ 'LIST-WIDGETS',
+ 'LITERAL-QUESTION',
+ 'LITTLE-ENDIAN',
+ 'LOAD',
+ 'LOAD-DOMAINS',
+ 'LOAD-ICON',
+ 'LOAD-IMAGE',
+ 'LOAD-IMAGE-DOWN',
+ 'LOAD-IMAGE-INSENSITIVE',
+ 'LOAD-IMAGE-UP',
+ 'LOAD-MOUSE-POINTER',
+ 'LOAD-MOUSE-P',
+ 'LOAD-MOUSE-PO',
+ 'LOAD-MOUSE-POI',
+ 'LOAD-MOUSE-POIN',
+ 'LOAD-MOUSE-POINT',
+ 'LOAD-MOUSE-POINTE',
+ 'LOAD-PICTURE',
+ 'LOAD-SMALL-ICON',
+ 'LOCAL-NAME',
+ 'LOCATOR-COLUMN-NUMBER',
+ 'LOCATOR-LINE-NUMBER',
+ 'LOCATOR-PUBLIC-ID',
+ 'LOCATOR-SYSTEM-ID',
+ 'LOCATOR-TYPE',
+ 'LOCKED',
+ 'LOCK-REGISTRATION',
+ 'LOG',
+ 'LOG-AUDIT-EVENT',
+ 'LOGIN-EXPIRATION-TIMESTAMP',
+ 'LOGIN-HOST',
+ 'LOGIN-STATE',
+ 'LOG-MANAGER',
+ 'LOGOUT',
+ 'LOOKAHEAD',
+ 'LOOKUP',
+ 'LT',
+ 'MACHINE-CLASS',
+ 'MANDATORY',
+ 'MANUAL-HIGHLIGHT',
+ 'MAP',
+ 'MARGIN-EXTRA',
+ 'MARGIN-HEIGHT-CHARS',
+ 'MARGIN-HEIGHT',
+ 'MARGIN-HEIGHT-',
+ 'MARGIN-HEIGHT-C',
+ 'MARGIN-HEIGHT-CH',
+ 'MARGIN-HEIGHT-CHA',
+ 'MARGIN-HEIGHT-CHAR',
+ 'MARGIN-HEIGHT-PIXELS',
+ 'MARGIN-HEIGHT-P',
+ 'MARGIN-HEIGHT-PI',
+ 'MARGIN-HEIGHT-PIX',
+ 'MARGIN-HEIGHT-PIXE',
+ 'MARGIN-HEIGHT-PIXEL',
+ 'MARGIN-WIDTH-CHARS',
+ 'MARGIN-WIDTH',
+ 'MARGIN-WIDTH-',
+ 'MARGIN-WIDTH-C',
+ 'MARGIN-WIDTH-CH',
+ 'MARGIN-WIDTH-CHA',
+ 'MARGIN-WIDTH-CHAR',
+ 'MARGIN-WIDTH-PIXELS',
+ 'MARGIN-WIDTH-P',
+ 'MARGIN-WIDTH-PI',
+ 'MARGIN-WIDTH-PIX',
+ 'MARGIN-WIDTH-PIXE',
+ 'MARGIN-WIDTH-PIXEL',
+ 'MARK-NEW',
+ 'MARK-ROW-STATE',
+ 'MATCHES',
+ 'MAX',
+ 'MAX-BUTTON',
+ 'MAX-CHARS',
+ 'MAX-DATA-GUESS',
+ 'MAX-HEIGHT',
+ 'MAX-HEIGHT-CHARS',
+ 'MAX-HEIGHT-C',
+ 'MAX-HEIGHT-CH',
+ 'MAX-HEIGHT-CHA',
+ 'MAX-HEIGHT-CHAR',
+ 'MAX-HEIGHT-PIXELS',
+ 'MAX-HEIGHT-P',
+ 'MAX-HEIGHT-PI',
+ 'MAX-HEIGHT-PIX',
+ 'MAX-HEIGHT-PIXE',
+ 'MAX-HEIGHT-PIXEL',
+ 'MAXIMIZE',
+ 'MAXIMUM',
+ 'MAX',
+ 'MAXI',
+ 'MAXIM',
+ 'MAXIMU',
+ 'MAXIMUM-LEVEL',
+ 'MAX-ROWS',
+ 'MAX-SIZE',
+ 'MAX-VALUE',
+ 'MAX-VAL',
+ 'MAX-VALU',
+ 'MAX-WIDTH',
+ 'MAX-WIDTH-CHARS',
+ 'MAX-WIDTH',
+ 'MAX-WIDTH-',
+ 'MAX-WIDTH-C',
+ 'MAX-WIDTH-CH',
+ 'MAX-WIDTH-CHA',
+ 'MAX-WIDTH-CHAR',
+ 'MAX-WIDTH-PIXELS',
+ 'MAX-WIDTH-P',
+ 'MAX-WIDTH-PI',
+ 'MAX-WIDTH-PIX',
+ 'MAX-WIDTH-PIXE',
+ 'MAX-WIDTH-PIXEL',
+ 'MD5-DIGEST',
+ 'MEMBER',
+ 'MEMPTR-TO-NODE-VALUE',
+ 'MENU',
+ 'MENUBAR',
+ 'MENU-BAR',
+ 'MENU-ITEM',
+ 'MENU-KEY',
+ 'MENU-K',
+ 'MENU-KE',
+ 'MENU-MOUSE',
+ 'MENU-M',
+ 'MENU-MO',
+ 'MENU-MOU',
+ 'MENU-MOUS',
+ 'MERGE-BY-FIELD',
+ 'MESSAGE',
+ 'MESSAGE-AREA',
+ 'MESSAGE-AREA-FONT',
+ 'MESSAGE-LINES',
+ 'METHOD',
+ 'MIN',
+ 'MIN-BUTTON',
+ 'MIN-COLUMN-WIDTH-CHARS',
+ 'MIN-COLUMN-WIDTH-C',
+ 'MIN-COLUMN-WIDTH-CH',
+ 'MIN-COLUMN-WIDTH-CHA',
+ 'MIN-COLUMN-WIDTH-CHAR',
+ 'MIN-COLUMN-WIDTH-PIXELS',
+ 'MIN-COLUMN-WIDTH-P',
+ 'MIN-COLUMN-WIDTH-PI',
+ 'MIN-COLUMN-WIDTH-PIX',
+ 'MIN-COLUMN-WIDTH-PIXE',
+ 'MIN-COLUMN-WIDTH-PIXEL',
+ 'MIN-HEIGHT-CHARS',
+ 'MIN-HEIGHT',
+ 'MIN-HEIGHT-',
+ 'MIN-HEIGHT-C',
+ 'MIN-HEIGHT-CH',
+ 'MIN-HEIGHT-CHA',
+ 'MIN-HEIGHT-CHAR',
+ 'MIN-HEIGHT-PIXELS',
+ 'MIN-HEIGHT-P',
+ 'MIN-HEIGHT-PI',
+ 'MIN-HEIGHT-PIX',
+ 'MIN-HEIGHT-PIXE',
+ 'MIN-HEIGHT-PIXEL',
+ 'MINIMUM',
+ 'MIN',
+ 'MINI',
+ 'MINIM',
+ 'MINIMU',
+ 'MIN-SIZE',
+ 'MIN-VALUE',
+ 'MIN-VAL',
+ 'MIN-VALU',
+ 'MIN-WIDTH-CHARS',
+ 'MIN-WIDTH',
+ 'MIN-WIDTH-',
+ 'MIN-WIDTH-C',
+ 'MIN-WIDTH-CH',
+ 'MIN-WIDTH-CHA',
+ 'MIN-WIDTH-CHAR',
+ 'MIN-WIDTH-PIXELS',
+ 'MIN-WIDTH-P',
+ 'MIN-WIDTH-PI',
+ 'MIN-WIDTH-PIX',
+ 'MIN-WIDTH-PIXE',
+ 'MIN-WIDTH-PIXEL',
+ 'MODIFIED',
+ 'MODULO',
+ 'MOD',
+ 'MODU',
+ 'MODUL',
+ 'MONTH',
+ 'MOUSE',
+ 'MOUSE-POINTER',
+ 'MOUSE-P',
+ 'MOUSE-PO',
+ 'MOUSE-POI',
+ 'MOUSE-POIN',
+ 'MOUSE-POINT',
+ 'MOUSE-POINTE',
+ 'MOVABLE',
+ 'MOVE-AFTER-TAB-ITEM',
+ 'MOVE-AFTER',
+ 'MOVE-AFTER-',
+ 'MOVE-AFTER-T',
+ 'MOVE-AFTER-TA',
+ 'MOVE-AFTER-TAB',
+ 'MOVE-AFTER-TAB-',
+ 'MOVE-AFTER-TAB-I',
+ 'MOVE-AFTER-TAB-IT',
+ 'MOVE-AFTER-TAB-ITE',
+ 'MOVE-BEFORE-TAB-ITEM',
+ 'MOVE-BEFOR',
+ 'MOVE-BEFORE',
+ 'MOVE-BEFORE-',
+ 'MOVE-BEFORE-T',
+ 'MOVE-BEFORE-TA',
+ 'MOVE-BEFORE-TAB',
+ 'MOVE-BEFORE-TAB-',
+ 'MOVE-BEFORE-TAB-I',
+ 'MOVE-BEFORE-TAB-IT',
+ 'MOVE-BEFORE-TAB-ITE',
+ 'MOVE-COLUMN',
+ 'MOVE-COL',
+ 'MOVE-COLU',
+ 'MOVE-COLUM',
+ 'MOVE-TO-BOTTOM',
+ 'MOVE-TO-B',
+ 'MOVE-TO-BO',
+ 'MOVE-TO-BOT',
+ 'MOVE-TO-BOTT',
+ 'MOVE-TO-BOTTO',
+ 'MOVE-TO-EOF',
+ 'MOVE-TO-TOP',
+ 'MOVE-TO-T',
+ 'MOVE-TO-TO',
+ 'MPE',
+ 'MULTI-COMPILE',
+ 'MULTIPLE',
+ 'MULTIPLE-KEY',
+ 'MULTITASKING-INTERVAL',
+ 'MUST-EXIST',
+ 'NAME',
+ 'NAMESPACE-PREFIX',
+ 'NAMESPACE-URI',
+ 'NATIVE',
+ 'NE',
+ 'NEEDS-APPSERVER-PROMPT',
+ 'NEEDS-PROMPT',
+ 'NEW',
+ 'NEW-INSTANCE',
+ 'NEW-ROW',
+ 'NEXT',
+ 'NEXT-COLUMN',
+ 'NEXT-PROMPT',
+ 'NEXT-ROWID',
+ 'NEXT-SIBLING',
+ 'NEXT-TAB-ITEM',
+ 'NEXT-TAB-I',
+ 'NEXT-TAB-IT',
+ 'NEXT-TAB-ITE',
+ 'NEXT-VALUE',
+ 'NO',
+ 'NO-APPLY',
+ 'NO-ARRAY-MESSAGE',
+ 'NO-ASSIGN',
+ 'NO-ATTR-LIST',
+ 'NO-ATTR',
+ 'NO-ATTR-',
+ 'NO-ATTR-L',
+ 'NO-ATTR-LI',
+ 'NO-ATTR-LIS',
+ 'NO-ATTR-SPACE',
+ 'NO-ATTR',
+ 'NO-ATTR-',
+ 'NO-ATTR-S',
+ 'NO-ATTR-SP',
+ 'NO-ATTR-SPA',
+ 'NO-ATTR-SPAC',
+ 'NO-AUTO-VALIDATE',
+ 'NO-BIND-WHERE',
+ 'NO-BOX',
+ 'NO-CONSOLE',
+ 'NO-CONVERT',
+ 'NO-CONVERT-3D-COLORS',
+ 'NO-CURRENT-VALUE',
+ 'NO-DEBUG',
+ 'NODE-VALUE-TO-MEMPTR',
+ 'NO-DRAG',
+ 'NO-ECHO',
+ 'NO-EMPTY-SPACE',
+ 'NO-ERROR',
+ 'NO-FILL',
+ 'NO-F',
+ 'NO-FI',
+ 'NO-FIL',
+ 'NO-FOCUS',
+ 'NO-HELP',
+ 'NO-HIDE',
+ 'NO-INDEX-HINT',
+ 'NO-INHERIT-BGCOLOR',
+ 'NO-INHERIT-BGC',
+ 'NO-INHERIT-BGCO',
+ 'LABEL-BGCOL',
+ 'LABEL-BGCOLO',
+ 'NO-INHERIT-FGCOLOR',
+ 'NO-INHERIT-FGC',
+ 'NO-INHERIT-FGCO',
+ 'NO-INHERIT-FGCOL',
+ 'NO-INHERIT-FGCOLO',
+ 'NO-JOIN-BY-SQLDB',
+ 'NO-LABELS',
+ 'NO-LABE',
+ 'NO-LOBS',
+ 'NO-LOCK',
+ 'NO-LOOKAHEAD',
+ 'NO-MAP',
+ 'NO-MESSAGE',
+ 'NO-MES',
+ 'NO-MESS',
+ 'NO-MESSA',
+ 'NO-MESSAG',
+ 'NONAMESPACE-SCHEMA-LOCATION',
+ 'NONE',
+ 'NO-PAUSE',
+ 'NO-PREFETCH',
+ 'NO-PREFE',
+ 'NO-PREFET',
+ 'NO-PREFETC',
+ 'NORMALIZE',
+ 'NO-ROW-MARKERS',
+ 'NO-SCROLLBAR-VERTICAL',
+ 'NO-SEPARATE-CONNECTION',
+ 'NO-SEPARATORS',
+ 'NOT',
+ 'NO-TAB-STOP',
+ 'NOT-ACTIVE',
+ 'NO-UNDERLINE',
+ 'NO-UND',
+ 'NO-UNDE',
+ 'NO-UNDER',
+ 'NO-UNDERL',
+ 'NO-UNDERLI',
+ 'NO-UNDERLIN',
+ 'NO-UNDO',
+ 'NO-VALIDATE',
+ 'NO-VAL',
+ 'NO-VALI',
+ 'NO-VALID',
+ 'NO-VALIDA',
+ 'NO-VALIDAT',
+ 'NOW',
+ 'NO-WAIT',
+ 'NO-WORD-WRAP',
+ 'NULL',
+ 'NUM-ALIASES',
+ 'NUM-ALI',
+ 'NUM-ALIA',
+ 'NUM-ALIAS',
+ 'NUM-ALIASE',
+ 'NUM-BUFFERS',
+ 'NUM-BUTTONS',
+ 'NUM-BUT',
+ 'NUM-BUTT',
+ 'NUM-BUTTO',
+ 'NUM-BUTTON',
+ 'NUM-COLUMNS',
+ 'NUM-COL',
+ 'NUM-COLU',
+ 'NUM-COLUM',
+ 'NUM-COLUMN',
+ 'NUM-COPIES',
+ 'NUM-DBS',
+ 'NUM-DROPPED-FILES',
+ 'NUM-ENTRIES',
+ 'NUMERIC',
+ 'NUMERIC-FORMAT',
+ 'NUMERIC-F',
+ 'NUMERIC-FO',
+ 'NUMERIC-FOR',
+ 'NUMERIC-FORM',
+ 'NUMERIC-FORMA',
+ 'NUM-FIELDS',
+ 'NUM-FORMATS',
+ 'NUM-ITEMS',
+ 'NUM-ITERATIONS',
+ 'NUM-LINES',
+ 'NUM-LOCKED-COLUMNS',
+ 'NUM-LOCKED-COL',
+ 'NUM-LOCKED-COLU',
+ 'NUM-LOCKED-COLUM',
+ 'NUM-LOCKED-COLUMN',
+ 'NUM-MESSAGES',
+ 'NUM-PARAMETERS',
+ 'NUM-REFERENCES',
+ 'NUM-REPLACED',
+ 'NUM-RESULTS',
+ 'NUM-SELECTED-ROWS',
+ 'NUM-SELECTED-WIDGETS',
+ 'NUM-SELECTED',
+ 'NUM-SELECTED-',
+ 'NUM-SELECTED-W',
+ 'NUM-SELECTED-WI',
+ 'NUM-SELECTED-WID',
+ 'NUM-SELECTED-WIDG',
+ 'NUM-SELECTED-WIDGE',
+ 'NUM-SELECTED-WIDGET',
+ 'NUM-TABS',
+ 'NUM-TO-RETAIN',
+ 'NUM-VISIBLE-COLUMNS',
+ 'OCTET-LENGTH',
+ 'OF',
+ 'OFF',
+ 'OK',
+ 'OK-CANCEL',
+ 'OLD',
+ 'ON',
+ 'ON-FRAME-BORDER',
+ 'ON-FRAME',
+ 'ON-FRAME-',
+ 'ON-FRAME-B',
+ 'ON-FRAME-BO',
+ 'ON-FRAME-BOR',
+ 'ON-FRAME-BORD',
+ 'ON-FRAME-BORDE',
+ 'OPEN',
+ 'OPSYS',
+ 'OPTION',
+ 'OR',
+ 'ORDERED-JOIN',
+ 'ORDINAL',
+ 'OS-APPEND',
+ 'OS-COMMAND',
+ 'OS-COPY',
+ 'OS-CREATE-DIR',
+ 'OS-DELETE',
+ 'OS-DIR',
+ 'OS-DRIVES',
+ 'OS-DRIVE',
+ 'OS-ERROR',
+ 'OS-GETENV',
+ 'OS-RENAME',
+ 'OTHERWISE',
+ 'OUTPUT',
+ 'OVERLAY',
+ 'OVERRIDE',
+ 'OWNER',
+ 'PAGE',
+ 'PAGE-BOTTOM',
+ 'PAGE-BOT',
+ 'PAGE-BOTT',
+ 'PAGE-BOTTO',
+ 'PAGED',
+ 'PAGE-NUMBER',
+ 'PAGE-NUM',
+ 'PAGE-NUMB',
+ 'PAGE-NUMBE',
+ 'PAGE-SIZE',
+ 'PAGE-TOP',
+ 'PAGE-WIDTH',
+ 'PAGE-WID',
+ 'PAGE-WIDT',
+ 'PARAMETER',
+ 'PARAM',
+ 'PARAME',
+ 'PARAMET',
+ 'PARAMETE',
+ 'PARENT',
+ 'PARSE-STATUS',
+ 'PARTIAL-KEY',
+ 'PASCAL',
+ 'PASSWORD-FIELD',
+ 'PATHNAME',
+ 'PAUSE',
+ 'PBE-HASH-ALGORITHM',
+ 'PBE-HASH-ALG',
+ 'PBE-HASH-ALGO',
+ 'PBE-HASH-ALGOR',
+ 'PBE-HASH-ALGORI',
+ 'PBE-HASH-ALGORIT',
+ 'PBE-HASH-ALGORITH',
+ 'PBE-KEY-ROUNDS',
+ 'PDBNAME',
+ 'PERSISTENT',
+ 'PERSIST',
+ 'PERSISTE',
+ 'PERSISTEN',
+ 'PERSISTENT-CACHE-DISABLED',
+ 'PFCOLOR',
+ 'PFC',
+ 'PFCO',
+ 'PFCOL',
+ 'PFCOLO',
+ 'PIXELS',
+ 'PIXELS-PER-COLUMN',
+ 'PIXELS-PER-COL',
+ 'PIXELS-PER-COLU',
+ 'PIXELS-PER-COLUM',
+ 'PIXELS-PER-ROW',
+ 'POPUP-MENU',
+ 'POPUP-M',
+ 'POPUP-ME',
+ 'POPUP-MEN',
+ 'POPUP-ONLY',
+ 'POPUP-O',
+ 'POPUP-ON',
+ 'POPUP-ONL',
+ 'PORTRAIT',
+ 'POSITION',
+ 'PRECISION',
+ 'PREFER-DATASET',
+ 'PREPARED',
+ 'PREPARE-STRING',
+ 'PREPROCESS',
+ 'PREPROC',
+ 'PREPROCE',
+ 'PREPROCES',
+ 'PRESELECT',
+ 'PRESEL',
+ 'PRESELE',
+ 'PRESELEC',
+ 'PREV',
+ 'PREV-COLUMN',
+ 'PREV-SIBLING',
+ 'PREV-TAB-ITEM',
+ 'PREV-TAB-I',
+ 'PREV-TAB-IT',
+ 'PREV-TAB-ITE',
+ 'PRIMARY',
+ 'PRINTER',
+ 'PRINTER-CONTROL-HANDLE',
+ 'PRINTER-HDC',
+ 'PRINTER-NAME',
+ 'PRINTER-PORT',
+ 'PRINTER-SETUP',
+ 'PRIVATE',
+ 'PRIVATE-DATA',
+ 'PRIVATE-D',
+ 'PRIVATE-DA',
+ 'PRIVATE-DAT',
+ 'PRIVILEGES',
+ 'PROCEDURE',
+ 'PROCE',
+ 'PROCED',
+ 'PROCEDU',
+ 'PROCEDUR',
+ 'PROCEDURE-CALL-TYPE',
+ 'PROCESS',
+ 'PROC-HANDLE',
+ 'PROC-HA',
+ 'PROC-HAN',
+ 'PROC-HAND',
+ 'PROC-HANDL',
+ 'PROC-STATUS',
+ 'PROC-ST',
+ 'PROC-STA',
+ 'PROC-STAT',
+ 'PROC-STATU',
+ 'proc-text',
+ 'proc-text-buffe',
+ 'PROFILER',
+ 'PROGRAM-NAME',
+ 'PROGRESS',
+ 'PROGRESS-SOURCE',
+ 'PROGRESS-S',
+ 'PROGRESS-SO',
+ 'PROGRESS-SOU',
+ 'PROGRESS-SOUR',
+ 'PROGRESS-SOURC',
+ 'PROMPT',
+ 'PROMPT-FOR',
+ 'PROMPT-F',
+ 'PROMPT-FO',
+ 'PROMSGS',
+ 'PROPATH',
+ 'PROPERTY',
+ 'PROTECTED',
+ 'PROVERSION',
+ 'PROVERS',
+ 'PROVERSI',
+ 'PROVERSIO',
+ 'PROXY',
+ 'PROXY-PASSWORD',
+ 'PROXY-USERID',
+ 'PUBLIC',
+ 'PUBLIC-ID',
+ 'PUBLISH',
+ 'PUBLISHED-EVENTS',
+ 'PUT',
+ 'PUTBYTE',
+ 'PUT-BYTE',
+ 'PUT-DOUBLE',
+ 'PUT-FLOAT',
+ 'PUT-INT64',
+ 'PUT-KEY-VALUE',
+ 'PUT-KEY-VAL',
+ 'PUT-KEY-VALU',
+ 'PUT-LONG',
+ 'PUT-SHORT',
+ 'PUT-STRING',
+ 'PUT-UNSIGNED-LONG',
+ 'QUERY',
+ 'QUERY-CLOSE',
+ 'QUERY-OFF-END',
+ 'QUERY-OPEN',
+ 'QUERY-PREPARE',
+ 'QUERY-TUNING',
+ 'QUESTION',
+ 'QUIT',
+ 'QUOTER',
+ 'RADIO-BUTTONS',
+ 'RADIO-SET',
+ 'RANDOM',
+ 'RAW-TRANSFER',
+ 'RCODE-INFORMATION',
+ 'RCODE-INFO',
+ 'RCODE-INFOR',
+ 'RCODE-INFORM',
+ 'RCODE-INFORMA',
+ 'RCODE-INFORMAT',
+ 'RCODE-INFORMATI',
+ 'RCODE-INFORMATIO',
+ 'READ-AVAILABLE',
+ 'READ-EXACT-NUM',
+ 'READ-FILE',
+ 'READKEY',
+ 'READ-ONLY',
+ 'READ-XML',
+ 'READ-XMLSCHEMA',
+ 'REAL',
+ 'RECORD-LENGTH',
+ 'RECTANGLE',
+ 'RECT',
+ 'RECTA',
+ 'RECTAN',
+ 'RECTANG',
+ 'RECTANGL',
+ 'RECURSIVE',
+ 'REFERENCE-ONLY',
+ 'REFRESH',
+ 'REFRESHABLE',
+ 'REFRESH-AUDIT-POLICY',
+ 'REGISTER-DOMAIN',
+ 'RELEASE',
+ 'REMOTE',
+ 'REMOVE-EVENTS-PROCEDURE',
+ 'REMOVE-SUPER-PROCEDURE',
+ 'REPEAT',
+ 'REPLACE',
+ 'REPLACE-SELECTION-TEXT',
+ 'REPOSITION',
+ 'REPOSITION-BACKWARD',
+ 'REPOSITION-FORWARD',
+ 'REPOSITION-MODE',
+ 'REPOSITION-TO-ROW',
+ 'REPOSITION-TO-ROWID',
+ 'REQUEST',
+ 'RESET',
+ 'RESIZABLE',
+ 'RESIZA',
+ 'RESIZAB',
+ 'RESIZABL',
+ 'RESIZE',
+ 'RESTART-ROW',
+ 'RESTART-ROWID',
+ 'RETAIN',
+ 'RETAIN-SHAPE',
+ 'RETRY',
+ 'RETRY-CANCEL',
+ 'RETURN',
+ 'RETURN-INSERTED',
+ 'RETURN-INS',
+ 'RETURN-INSE',
+ 'RETURN-INSER',
+ 'RETURN-INSERT',
+ 'RETURN-INSERTE',
+ 'RETURNS',
+ 'RETURN-TO-START-DIR',
+ 'RETURN-TO-START-DI',
+ 'RETURN-VALUE',
+ 'RETURN-VAL',
+ 'RETURN-VALU',
+ 'RETURN-VALUE-DATA-TYPE',
+ 'REVERSE-FROM',
+ 'REVERT',
+ 'REVOKE',
+ 'RGB-VALUE',
+ 'RIGHT-ALIGNED',
+ 'RETURN-ALIGN',
+ 'RETURN-ALIGNE',
+ 'RIGHT-TRIM',
+ 'R-INDEX',
+ 'ROLES',
+ 'ROUND',
+ 'ROUTINE-LEVEL',
+ 'ROW',
+ 'ROW-HEIGHT-CHARS',
+ 'HEIGHT',
+ 'ROW-HEIGHT-PIXELS',
+ 'HEIGHT-P',
+ 'ROW-MARKERS',
+ 'ROW-OF',
+ 'ROW-RESIZABLE',
+ 'RULE',
+ 'RUN',
+ 'RUN-PROCEDURE',
+ 'SAVE',
+ 'SAVE-AS',
+ 'SAVE-FILE',
+ 'SAX-COMPLETE',
+ 'SAX-COMPLE',
+ 'SAX-COMPLET',
+ 'SAX-PARSE',
+ 'SAX-PARSE-FIRST',
+ 'SAX-PARSE-NEXT',
+ 'SAX-PARSER-ERROR',
+ 'SAX-RUNNING',
+ 'SAX-UNINITIALIZED',
+ 'SAX-WRITE-BEGIN',
+ 'SAX-WRITE-COMPLETE',
+ 'SAX-WRITE-CONTENT',
+ 'SAX-WRITE-ELEMENT',
+ 'SAX-WRITE-ERROR',
+ 'SAX-WRITE-IDLE',
+ 'SAX-WRITER',
+ 'SAX-WRITE-TAG',
+ 'SCHEMA',
+ 'SCHEMA-LOCATION',
+ 'SCHEMA-MARSHAL',
+ 'SCHEMA-PATH',
+ 'SCREEN',
+ 'SCREEN-IO',
+ 'SCREEN-LINES',
+ 'SCREEN-VALUE',
+ 'SCREEN-VAL',
+ 'SCREEN-VALU',
+ 'SCROLL',
+ 'SCROLLABLE',
+ 'SCROLLBAR-HORIZONTAL',
+ 'SCROLLBAR-H',
+ 'SCROLLBAR-HO',
+ 'SCROLLBAR-HOR',
+ 'SCROLLBAR-HORI',
+ 'SCROLLBAR-HORIZ',
+ 'SCROLLBAR-HORIZO',
+ 'SCROLLBAR-HORIZON',
+ 'SCROLLBAR-HORIZONT',
+ 'SCROLLBAR-HORIZONTA',
+ 'SCROLL-BARS',
+ 'SCROLLBAR-VERTICAL',
+ 'SCROLLBAR-V',
+ 'SCROLLBAR-VE',
+ 'SCROLLBAR-VER',
+ 'SCROLLBAR-VERT',
+ 'SCROLLBAR-VERTI',
+ 'SCROLLBAR-VERTIC',
+ 'SCROLLBAR-VERTICA',
+ 'SCROLL-DELTA',
+ 'SCROLLED-ROW-POSITION',
+ 'SCROLLED-ROW-POS',
+ 'SCROLLED-ROW-POSI',
+ 'SCROLLED-ROW-POSIT',
+ 'SCROLLED-ROW-POSITI',
+ 'SCROLLED-ROW-POSITIO',
+ 'SCROLLING',
+ 'SCROLL-OFFSET',
+ 'SCROLL-TO-CURRENT-ROW',
+ 'SCROLL-TO-ITEM',
+ 'SCROLL-TO-I',
+ 'SCROLL-TO-IT',
+ 'SCROLL-TO-ITE',
+ 'SCROLL-TO-SELECTED-ROW',
+ 'SDBNAME',
+ 'SEAL',
+ 'SEAL-TIMESTAMP',
+ 'SEARCH',
+ 'SEARCH-SELF',
+ 'SEARCH-TARGET',
+ 'SECTION',
+ 'SECURITY-POLICY',
+ 'SEEK',
+ 'SELECT',
+ 'SELECTABLE',
+ 'SELECT-ALL',
+ 'SELECTED',
+ 'SELECT-FOCUSED-ROW',
+ 'SELECTION',
+ 'SELECTION-END',
+ 'SELECTION-LIST',
+ 'SELECTION-START',
+ 'SELECTION-TEXT',
+ 'SELECT-NEXT-ROW',
+ 'SELECT-PREV-ROW',
+ 'SELECT-ROW',
+ 'SELF',
+ 'SEND',
+ 'send-sql-statement',
+ 'send-sql',
+ 'SENSITIVE',
+ 'SEPARATE-CONNECTION',
+ 'SEPARATOR-FGCOLOR',
+ 'SEPARATORS',
+ 'SERVER',
+ 'SERVER-CONNECTION-BOUND',
+ 'SERVER-CONNECTION-BOUND-REQUEST',
+ 'SERVER-CONNECTION-CONTEXT',
+ 'SERVER-CONNECTION-ID',
+ 'SERVER-OPERATING-MODE',
+ 'SESSION',
+ 'SESSION-ID',
+ 'SET',
+ 'SET-APPL-CONTEXT',
+ 'SET-ATTR-CALL-TYPE',
+ 'SET-ATTRIBUTE-NODE',
+ 'SET-BLUE-VALUE',
+ 'SET-BLUE',
+ 'SET-BLUE-',
+ 'SET-BLUE-V',
+ 'SET-BLUE-VA',
+ 'SET-BLUE-VAL',
+ 'SET-BLUE-VALU',
+ 'SET-BREAK',
+ 'SET-BUFFERS',
+ 'SET-CALLBACK',
+ 'SET-CLIENT',
+ 'SET-COMMIT',
+ 'SET-CONTENTS',
+ 'SET-CURRENT-VALUE',
+ 'SET-DB-CLIENT',
+ 'SET-DYNAMIC',
+ 'SET-EVENT-MANAGER-OPTION',
+ 'SET-GREEN-VALUE',
+ 'SET-GREEN',
+ 'SET-GREEN-',
+ 'SET-GREEN-V',
+ 'SET-GREEN-VA',
+ 'SET-GREEN-VAL',
+ 'SET-GREEN-VALU',
+ 'SET-INPUT-SOURCE',
+ 'SET-OPTION',
+ 'SET-OUTPUT-DESTINATION',
+ 'SET-PARAMETER',
+ 'SET-POINTER-VALUE',
+ 'SET-PROPERTY',
+ 'SET-RED-VALUE',
+ 'SET-RED',
+ 'SET-RED-',
+ 'SET-RED-V',
+ 'SET-RED-VA',
+ 'SET-RED-VAL',
+ 'SET-RED-VALU',
+ 'SET-REPOSITIONED-ROW',
+ 'SET-RGB-VALUE',
+ 'SET-ROLLBACK',
+ 'SET-SELECTION',
+ 'SET-SIZE',
+ 'SET-SORT-ARROW',
+ 'SETUSERID',
+ 'SETUSER',
+ 'SETUSERI',
+ 'SET-WAIT-STATE',
+ 'SHA1-DIGEST',
+ 'SHARED',
+ 'SHARE-LOCK',
+ 'SHARE',
+ 'SHARE-',
+ 'SHARE-L',
+ 'SHARE-LO',
+ 'SHARE-LOC',
+ 'SHOW-IN-TASKBAR',
+ 'SHOW-STATS',
+ 'SHOW-STAT',
+ 'SIDE-LABEL-HANDLE',
+ 'SIDE-LABEL-H',
+ 'SIDE-LABEL-HA',
+ 'SIDE-LABEL-HAN',
+ 'SIDE-LABEL-HAND',
+ 'SIDE-LABEL-HANDL',
+ 'SIDE-LABELS',
+ 'SIDE-LAB',
+ 'SIDE-LABE',
+ 'SIDE-LABEL',
+ 'SILENT',
+ 'SIMPLE',
+ 'SINGLE',
+ 'SIZE',
+ 'SIZE-CHARS',
+ 'SIZE-C',
+ 'SIZE-CH',
+ 'SIZE-CHA',
+ 'SIZE-CHAR',
+ 'SIZE-PIXELS',
+ 'SIZE-P',
+ 'SIZE-PI',
+ 'SIZE-PIX',
+ 'SIZE-PIXE',
+ 'SIZE-PIXEL',
+ 'SKIP',
+ 'SKIP-DELETED-RECORD',
+ 'SLIDER',
+ 'SMALL-ICON',
+ 'SMALLINT',
+ 'SMALL-TITLE',
+ 'SOME',
+ 'SORT',
+ 'SORT-ASCENDING',
+ 'SORT-NUMBER',
+ 'SOURCE',
+ 'SOURCE-PROCEDURE',
+ 'SPACE',
+ 'SQL',
+ 'SQRT',
+ 'SSL-SERVER-NAME',
+ 'STANDALONE',
+ 'START',
+ 'START-DOCUMENT',
+ 'START-ELEMENT',
+ 'START-MOVE',
+ 'START-RESIZE',
+ 'START-ROW-RESIZE',
+ 'STATE-DETAIL',
+ 'STATIC',
+ 'STATUS',
+ 'STATUS-AREA',
+ 'STATUS-AREA-FONT',
+ 'STDCALL',
+ 'STOP',
+ 'STOP-PARSING',
+ 'STOPPED',
+ 'STOPPE',
+ 'STORED-PROCEDURE',
+ 'STORED-PROC',
+ 'STORED-PROCE',
+ 'STORED-PROCED',
+ 'STORED-PROCEDU',
+ 'STORED-PROCEDUR',
+ 'STREAM',
+ 'STREAM-HANDLE',
+ 'STREAM-IO',
+ 'STRETCH-TO-FIT',
+ 'STRICT',
+ 'STRING',
+ 'STRING-VALUE',
+ 'STRING-XREF',
+ 'SUB-AVERAGE',
+ 'SUB-AVE',
+ 'SUB-AVER',
+ 'SUB-AVERA',
+ 'SUB-AVERAG',
+ 'SUB-COUNT',
+ 'SUB-MAXIMUM',
+ 'SUM-MAX',
+ 'SUM-MAXI',
+ 'SUM-MAXIM',
+ 'SUM-MAXIMU',
+ 'SUB-MENU',
+ 'SUBSUB-',
+ 'MINIMUM',
+ 'SUB-MIN',
+ 'SUBSCRIBE',
+ 'SUBSTITUTE',
+ 'SUBST',
+ 'SUBSTI',
+ 'SUBSTIT',
+ 'SUBSTITU',
+ 'SUBSTITUT',
+ 'SUBSTRING',
+ 'SUBSTR',
+ 'SUBSTRI',
+ 'SUBSTRIN',
+ 'SUB-TOTAL',
+ 'SUBTYPE',
+ 'SUM',
+ 'SUPER',
+ 'SUPER-PROCEDURES',
+ 'SUPPRESS-NAMESPACE-PROCESSING',
+ 'SUPPRESS-WARNINGS',
+ 'SUPPRESS-W',
+ 'SUPPRESS-WA',
+ 'SUPPRESS-WAR',
+ 'SUPPRESS-WARN',
+ 'SUPPRESS-WARNI',
+ 'SUPPRESS-WARNIN',
+ 'SUPPRESS-WARNING',
+ 'SYMMETRIC-ENCRYPTION-ALGORITHM',
+ 'SYMMETRIC-ENCRYPTION-IV',
+ 'SYMMETRIC-ENCRYPTION-KEY',
+ 'SYMMETRIC-SUPPORT',
+ 'SYSTEM-ALERT-BOXES',
+ 'SYSTEM-ALERT',
+ 'SYSTEM-ALERT-',
+ 'SYSTEM-ALERT-B',
+ 'SYSTEM-ALERT-BO',
+ 'SYSTEM-ALERT-BOX',
+ 'SYSTEM-ALERT-BOXE',
+ 'SYSTEM-DIALOG',
+ 'SYSTEM-HELP',
+ 'SYSTEM-ID',
+ 'TABLE',
+ 'TABLE-HANDLE',
+ 'TABLE-NUMBER',
+ 'TAB-POSITION',
+ 'TAB-STOP',
+ 'TARGET',
+ 'TARGET-PROCEDURE',
+ 'TEMP-DIRECTORY',
+ 'TEMP-DIR',
+ 'TEMP-DIRE',
+ 'TEMP-DIREC',
+ 'TEMP-DIRECT',
+ 'TEMP-DIRECTO',
+ 'TEMP-DIRECTOR',
+ 'TEMP-TABLE',
+ 'TEMP-TABLE-PREPARE',
+ 'TERM',
+ 'TERMINAL',
+ 'TERM',
+ 'TERMI',
+ 'TERMIN',
+ 'TERMINA',
+ 'TERMINATE',
+ 'TEXT',
+ 'TEXT-CURSOR',
+ 'TEXT-SEG-GROW',
+ 'TEXT-SELECTED',
+ 'THEN',
+ 'THIS-OBJECT',
+ 'THIS-PROCEDURE',
+ 'THREE-D',
+ 'THROW',
+ 'THROUGH',
+ 'THRU',
+ 'TIC-MARKS',
+ 'TIME',
+ 'TIME-SOURCE',
+ 'TITLE',
+ 'TITLE-BGCOLOR',
+ 'TITLE-BGC',
+ 'TITLE-BGCO',
+ 'TITLE-BGCOL',
+ 'TITLE-BGCOLO',
+ 'TITLE-DCOLOR',
+ 'TITLE-DC',
+ 'TITLE-DCO',
+ 'TITLE-DCOL',
+ 'TITLE-DCOLO',
+ 'TITLE-FGCOLOR',
+ 'TITLE-FGC',
+ 'TITLE-FGCO',
+ 'TITLE-FGCOL',
+ 'TITLE-FGCOLO',
+ 'TITLE-FONT',
+ 'TITLE-FO',
+ 'TITLE-FON',
+ 'TO',
+ 'TODAY',
+ 'TOGGLE-BOX',
+ 'TOOLTIP',
+ 'TOOLTIPS',
+ 'TOPIC',
+ 'TOP-NAV-QUERY',
+ 'TOP-ONLY',
+ 'TO-ROWID',
+ 'TOTAL',
+ 'TRAILING',
+ 'TRANS',
+ 'TRANSACTION',
+ 'TRANSACTION-MODE',
+ 'TRANS-INIT-PROCEDURE',
+ 'TRANSPARENT',
+ 'TRIGGER',
+ 'TRIGGERS',
+ 'TRIM',
+ 'TRUE',
+ 'TRUNCATE',
+ 'TRUNC',
+ 'TRUNCA',
+ 'TRUNCAT',
+ 'TYPE',
+ 'TYPE-OF',
+ 'UNBOX',
+ 'UNBUFFERED',
+ 'UNBUFF',
+ 'UNBUFFE',
+ 'UNBUFFER',
+ 'UNBUFFERE',
+ 'UNDERLINE',
+ 'UNDERL',
+ 'UNDERLI',
+ 'UNDERLIN',
+ 'UNDO',
+ 'UNFORMATTED',
+ 'UNFORM',
+ 'UNFORMA',
+ 'UNFORMAT',
+ 'UNFORMATT',
+ 'UNFORMATTE',
+ 'UNION',
+ 'UNIQUE',
+ 'UNIQUE-ID',
+ 'UNIQUE-MATCH',
+ 'UNIX',
+ 'UNLESS-HIDDEN',
+ 'UNLOAD',
+ 'UNSIGNED-LONG',
+ 'UNSUBSCRIBE',
+ 'UP',
+ 'UPDATE',
+ 'UPDATE-ATTRIBUTE',
+ 'URL',
+ 'URL-DECODE',
+ 'URL-ENCODE',
+ 'URL-PASSWORD',
+ 'URL-USERID',
+ 'USE',
+ 'USE-DICT-EXPS',
+ 'USE-FILENAME',
+ 'USE-INDEX',
+ 'USER',
+ 'USE-REVVIDEO',
+ 'USERID',
+ 'USER-ID',
+ 'USE-TEXT',
+ 'USE-UNDERLINE',
+ 'USE-WIDGET-POOL',
+ 'USING',
+ 'V6DISPLAY',
+ 'V6FRAME',
+ 'VALIDATE',
+ 'VALIDATE-EXPRESSION',
+ 'VALIDATE-MESSAGE',
+ 'VALIDATE-SEAL',
+ 'VALIDATION-ENABLED',
+ 'VALID-EVENT',
+ 'VALID-HANDLE',
+ 'VALID-OBJECT',
+ 'VALUE',
+ 'VALUE-CHANGED',
+ 'VALUES',
+ 'VARIABLE',
+ 'VAR',
+ 'VARI',
+ 'VARIA',
+ 'VARIAB',
+ 'VARIABL',
+ 'VERBOSE',
+ 'VERSION',
+ 'VERTICAL',
+ 'VERT',
+ 'VERTI',
+ 'VERTIC',
+ 'VERTICA',
+ 'VIEW',
+ 'VIEW-AS',
+ 'VIEW-FIRST-COLUMN-ON-REOPEN',
+ 'VIRTUAL-HEIGHT-CHARS',
+ 'VIRTUAL-HEIGHT',
+ 'VIRTUAL-HEIGHT-',
+ 'VIRTUAL-HEIGHT-C',
+ 'VIRTUAL-HEIGHT-CH',
+ 'VIRTUAL-HEIGHT-CHA',
+ 'VIRTUAL-HEIGHT-CHAR',
+ 'VIRTUAL-HEIGHT-PIXELS',
+ 'VIRTUAL-HEIGHT-P',
+ 'VIRTUAL-HEIGHT-PI',
+ 'VIRTUAL-HEIGHT-PIX',
+ 'VIRTUAL-HEIGHT-PIXE',
+ 'VIRTUAL-HEIGHT-PIXEL',
+ 'VIRTUAL-WIDTH-CHARS',
+ 'VIRTUAL-WIDTH',
+ 'VIRTUAL-WIDTH-',
+ 'VIRTUAL-WIDTH-C',
+ 'VIRTUAL-WIDTH-CH',
+ 'VIRTUAL-WIDTH-CHA',
+ 'VIRTUAL-WIDTH-CHAR',
+ 'VIRTUAL-WIDTH-PIXELS',
+ 'VIRTUAL-WIDTH-P',
+ 'VIRTUAL-WIDTH-PI',
+ 'VIRTUAL-WIDTH-PIX',
+ 'VIRTUAL-WIDTH-PIXE',
+ 'VIRTUAL-WIDTH-PIXEL',
+ 'VISIBLE',
+ 'VOID',
+ 'WAIT',
+ 'WAIT-FOR',
+ 'WARNING',
+ 'WEB-CONTEXT',
+ 'WEEKDAY',
+ 'WHEN',
+ 'WHERE',
+ 'WHILE',
+ 'WIDGET',
+ 'WIDGET-ENTER',
+ 'WIDGET-E',
+ 'WIDGET-EN',
+ 'WIDGET-ENT',
+ 'WIDGET-ENTE',
+ 'WIDGET-ID',
+ 'WIDGET-LEAVE',
+ 'WIDGET-L',
+ 'WIDGET-LE',
+ 'WIDGET-LEA',
+ 'WIDGET-LEAV',
+ 'WIDGET-POOL',
+ 'WIDTH',
+ 'WIDTH-CHARS',
+ 'WIDTH',
+ 'WIDTH-',
+ 'WIDTH-C',
+ 'WIDTH-CH',
+ 'WIDTH-CHA',
+ 'WIDTH-CHAR',
+ 'WIDTH-PIXELS',
+ 'WIDTH-P',
+ 'WIDTH-PI',
+ 'WIDTH-PIX',
+ 'WIDTH-PIXE',
+ 'WIDTH-PIXEL',
+ 'WINDOW',
+ 'WINDOW-MAXIMIZED',
+ 'WINDOW-MAXIM',
+ 'WINDOW-MAXIMI',
+ 'WINDOW-MAXIMIZ',
+ 'WINDOW-MAXIMIZE',
+ 'WINDOW-MINIMIZED',
+ 'WINDOW-MINIM',
+ 'WINDOW-MINIMI',
+ 'WINDOW-MINIMIZ',
+ 'WINDOW-MINIMIZE',
+ 'WINDOW-NAME',
+ 'WINDOW-NORMAL',
+ 'WINDOW-STATE',
+ 'WINDOW-STA',
+ 'WINDOW-STAT',
+ 'WINDOW-SYSTEM',
+ 'WITH',
+ 'WORD-INDEX',
+ 'WORD-WRAP',
+ 'WORK-AREA-HEIGHT-PIXELS',
+ 'WORK-AREA-WIDTH-PIXELS',
+ 'WORK-AREA-X',
+ 'WORK-AREA-Y',
+ 'WORKFILE',
+ 'WORK-TABLE',
+ 'WORK-TAB',
+ 'WORK-TABL',
+ 'WRITE',
+ 'WRITE-CDATA',
+ 'WRITE-CHARACTERS',
+ 'WRITE-COMMENT',
+ 'WRITE-DATA-ELEMENT',
+ 'WRITE-EMPTY-ELEMENT',
+ 'WRITE-ENTITY-REF',
+ 'WRITE-EXTERNAL-DTD',
+ 'WRITE-FRAGMENT',
+ 'WRITE-MESSAGE',
+ 'WRITE-PROCESSING-INSTRUCTION',
+ 'WRITE-STATUS',
+ 'WRITE-XML',
+ 'WRITE-XMLSCHEMA',
+ 'X',
+ 'XCODE',
+ 'XML-DATA-TYPE',
+ 'XML-NODE-TYPE',
+ 'XML-SCHEMA-PATH',
+ 'XML-SUPPRESS-NAMESPACE-PROCESSING',
+ 'X-OF',
+ 'XREF',
+ 'XREF-XML',
+ 'Y',
+ 'YEAR',
+ 'YEAR-OFFSET',
+ 'YES',
+ 'YES-NO',
+ 'YES-NO-CANCEL',
+ 'Y-OF'
+)
diff --git a/pygments/lexers/_openedgebuiltins.py b/pygments/lexers/_openedgebuiltins.py
deleted file mode 100644
index 4750e80e..00000000
--- a/pygments/lexers/_openedgebuiltins.py
+++ /dev/null
@@ -1,562 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers._openedgebuiltins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Builtin list for the OpenEdgeLexer.
-
- :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-OPENEDGEKEYWORDS = [
- 'ABSOLUTE', 'ABS', 'ABSO', 'ABSOL', 'ABSOLU', 'ABSOLUT', 'ACCELERATOR',
- 'ACCUM', 'ACCUMULATE', 'ACCUM', 'ACCUMU', 'ACCUMUL', 'ACCUMULA',
- 'ACCUMULAT', 'ACTIVE-FORM', 'ACTIVE-WINDOW', 'ADD', 'ADD-BUFFER',
- 'ADD-CALC-COLUMN', 'ADD-COLUMNS-FROM', 'ADD-EVENTS-PROCEDURE',
- 'ADD-FIELDS-FROM', 'ADD-FIRST', 'ADD-INDEX-FIELD', 'ADD-LAST',
- 'ADD-LIKE-COLUMN', 'ADD-LIKE-FIELD', 'ADD-LIKE-INDEX', 'ADD-NEW-FIELD',
- 'ADD-NEW-INDEX', 'ADD-SCHEMA-LOCATION', 'ADD-SUPER-PROCEDURE', 'ADM-DATA',
- 'ADVISE', 'ALERT-BOX', 'ALIAS', 'ALL', 'ALLOW-COLUMN-SEARCHING',
- 'ALLOW-REPLICATION', 'ALTER', 'ALWAYS-ON-TOP', 'AMBIGUOUS', 'AMBIG',
- 'AMBIGU', 'AMBIGUO', 'AMBIGUOU', 'ANALYZE', 'ANALYZ', 'AND', 'ANSI-ONLY',
- 'ANY', 'ANYWHERE', 'APPEND', 'APPL-ALERT-BOXES', 'APPL-ALERT',
- 'APPL-ALERT-', 'APPL-ALERT-B', 'APPL-ALERT-BO', 'APPL-ALERT-BOX',
- 'APPL-ALERT-BOXE', 'APPL-CONTEXT-ID', 'APPLICATION', 'APPLY',
- 'APPSERVER-INFO', 'APPSERVER-PASSWORD', 'APPSERVER-USERID', 'ARRAY-MESSAGE',
- 'AS', 'ASC', 'ASCENDING', 'ASCE', 'ASCEN', 'ASCEND', 'ASCENDI', 'ASCENDIN',
- 'ASK-OVERWRITE', 'ASSEMBLY', 'ASSIGN', 'ASYNCHRONOUS',
- 'ASYNC-REQUEST-COUNT', 'ASYNC-REQUEST-HANDLE', 'AT', 'ATTACHED-PAIRLIST',
- 'ATTR-SPACE', 'ATTR', 'ATTRI', 'ATTRIB', 'ATTRIBU', 'ATTRIBUT',
- 'AUDIT-CONTROL', 'AUDIT-ENABLED', 'AUDIT-EVENT-CONTEXT', 'AUDIT-POLICY',
- 'AUTHENTICATION-FAILED', 'AUTHORIZATION', 'AUTO-COMPLETION', 'AUTO-COMP',
- 'AUTO-COMPL', 'AUTO-COMPLE', 'AUTO-COMPLET', 'AUTO-COMPLETI',
- 'AUTO-COMPLETIO', 'AUTO-ENDKEY', 'AUTO-END-KEY', 'AUTO-GO', 'AUTO-INDENT',
- 'AUTO-IND', 'AUTO-INDE', 'AUTO-INDEN', 'AUTOMATIC', 'AUTO-RESIZE',
- 'AUTO-RETURN', 'AUTO-RET', 'AUTO-RETU', 'AUTO-RETUR', 'AUTO-SYNCHRONIZE',
- 'AUTO-ZAP', 'AUTO-Z', 'AUTO-ZA', 'AVAILABLE', 'AVAIL', 'AVAILA', 'AVAILAB',
- 'AVAILABL', 'AVAILABLE-FORMATS', 'AVERAGE', 'AVE', 'AVER', 'AVERA',
- 'AVERAG', 'AVG', 'BACKGROUND', 'BACK', 'BACKG', 'BACKGR', 'BACKGRO',
- 'BACKGROU', 'BACKGROUN', 'BACKWARDS', 'BACKWARD', 'BASE64-DECODE',
- 'BASE64-ENCODE', 'BASE-ADE', 'BASE-KEY', 'BATCH-MODE', 'BATCH', 'BATCH-',
- 'BATCH-M', 'BATCH-MO', 'BATCH-MOD', 'BATCH-SIZE', 'BEFORE-HIDE', 'BEFORE-H',
- 'BEFORE-HI', 'BEFORE-HID', 'BEGIN-EVENT-GROUP', 'BEGINS', 'BELL', 'BETWEEN',
- 'BGCOLOR', 'BGC', 'BGCO', 'BGCOL', 'BGCOLO', 'BIG-ENDIAN', 'BINARY', 'BIND',
- 'BIND-WHERE', 'BLANK', 'BLOCK-ITERATION-DISPLAY', 'BORDER-BOTTOM-CHARS',
- 'BORDER-B', 'BORDER-BO', 'BORDER-BOT', 'BORDER-BOTT', 'BORDER-BOTTO',
- 'BORDER-BOTTOM-PIXELS', 'BORDER-BOTTOM-P', 'BORDER-BOTTOM-PI',
- 'BORDER-BOTTOM-PIX', 'BORDER-BOTTOM-PIXE', 'BORDER-BOTTOM-PIXEL',
- 'BORDER-LEFT-CHARS', 'BORDER-L', 'BORDER-LE', 'BORDER-LEF', 'BORDER-LEFT',
- 'BORDER-LEFT-', 'BORDER-LEFT-C', 'BORDER-LEFT-CH', 'BORDER-LEFT-CHA',
- 'BORDER-LEFT-CHAR', 'BORDER-LEFT-PIXELS', 'BORDER-LEFT-P', 'BORDER-LEFT-PI',
- 'BORDER-LEFT-PIX', 'BORDER-LEFT-PIXE', 'BORDER-LEFT-PIXEL',
- 'BORDER-RIGHT-CHARS', 'BORDER-R', 'BORDER-RI', 'BORDER-RIG', 'BORDER-RIGH',
- 'BORDER-RIGHT', 'BORDER-RIGHT-', 'BORDER-RIGHT-C', 'BORDER-RIGHT-CH',
- 'BORDER-RIGHT-CHA', 'BORDER-RIGHT-CHAR', 'BORDER-RIGHT-PIXELS',
- 'BORDER-RIGHT-P', 'BORDER-RIGHT-PI', 'BORDER-RIGHT-PIX',
- 'BORDER-RIGHT-PIXE', 'BORDER-RIGHT-PIXEL', 'BORDER-TOP-CHARS', 'BORDER-T',
- 'BORDER-TO', 'BORDER-TOP', 'BORDER-TOP-', 'BORDER-TOP-C', 'BORDER-TOP-CH',
- 'BORDER-TOP-CHA', 'BORDER-TOP-CHAR', 'BORDER-TOP-PIXELS', 'BORDER-TOP-P',
- 'BORDER-TOP-PI', 'BORDER-TOP-PIX', 'BORDER-TOP-PIXE', 'BORDER-TOP-PIXEL',
- 'BOX', 'BOX-SELECTABLE', 'BOX-SELECT', 'BOX-SELECTA', 'BOX-SELECTAB',
- 'BOX-SELECTABL', 'BREAK', 'BROWSE', 'BUFFER', 'BUFFER-CHARS',
- 'BUFFER-COMPARE', 'BUFFER-COPY', 'BUFFER-CREATE', 'BUFFER-DELETE',
- 'BUFFER-FIELD', 'BUFFER-HANDLE', 'BUFFER-LINES', 'BUFFER-NAME',
- 'BUFFER-RELEASE', 'BUFFER-VALUE', 'BUTTON', 'BUTTONS', 'BUTTON', 'BY',
- 'BY-POINTER', 'BY-VARIANT-POINTER', 'CACHE', 'CACHE-SIZE', 'CALL',
- 'CALL-NAME', 'CALL-TYPE', 'CANCEL-BREAK', 'CANCEL-BUTTON', 'CAN-CREATE',
- 'CAN-DELETE', 'CAN-DO', 'CAN-FIND', 'CAN-QUERY', 'CAN-READ', 'CAN-SET',
- 'CAN-WRITE', 'CAPS', 'CAREFUL-PAINT', 'CASE', 'CASE-SENSITIVE', 'CASE-SEN',
- 'CASE-SENS', 'CASE-SENSI', 'CASE-SENSIT', 'CASE-SENSITI', 'CASE-SENSITIV',
- 'CAST', 'CATCH', 'CDECL', 'CENTERED', 'CENTER', 'CENTERE', 'CHAINED',
- 'CHARACTER_LENGTH', 'CHARSET', 'CHECK', 'CHECKED', 'CHOOSE', 'CHR', 'CLASS',
- 'CLASS-TYPE', 'CLEAR', 'CLEAR-APPL-CONTEXT', 'CLEAR-LOG', 'CLEAR-SELECTION',
- 'CLEAR-SELECT', 'CLEAR-SELECTI', 'CLEAR-SELECTIO', 'CLEAR-SORT-ARROWS',
- 'CLEAR-SORT-ARROW', 'CLIENT-CONNECTION-ID', 'CLIENT-PRINCIPAL',
- 'CLIENT-TTY', 'CLIENT-TYPE', 'CLIENT-WORKSTATION', 'CLIPBOARD', 'CLOSE',
- 'CLOSE-LOG', 'CODE', 'CODEBASE-LOCATOR', 'CODEPAGE', 'CODEPAGE-CONVERT',
- 'COLLATE', 'COL-OF', 'COLON', 'COLON-ALIGNED', 'COLON-ALIGN',
- 'COLON-ALIGNE', 'COLOR', 'COLOR-TABLE', 'COLUMN', 'COL', 'COLU', 'COLUM',
- 'COLUMN-BGCOLOR', 'COLUMN-DCOLOR', 'COLUMN-FGCOLOR', 'COLUMN-FONT',
- 'COLUMN-LABEL', 'COLUMN-LAB', 'COLUMN-LABE', 'COLUMN-MOVABLE', 'COLUMN-OF',
- 'COLUMN-PFCOLOR', 'COLUMN-READ-ONLY', 'COLUMN-RESIZABLE', 'COLUMNS',
- 'COLUMN-SCROLLING', 'COMBO-BOX', 'COMMAND', 'COMPARES', 'COMPILE',
- 'COMPILER', 'COMPLETE', 'COM-SELF', 'CONFIG-NAME', 'CONNECT', 'CONNECTED',
- 'CONSTRUCTOR', 'CONTAINS', 'CONTENTS', 'CONTEXT', 'CONTEXT-HELP',
- 'CONTEXT-HELP-FILE', 'CONTEXT-HELP-ID', 'CONTEXT-POPUP', 'CONTROL',
- 'CONTROL-BOX', 'CONTROL-FRAME', 'CONVERT', 'CONVERT-3D-COLORS',
- 'CONVERT-TO-OFFSET', 'CONVERT-TO-OFFS', 'CONVERT-TO-OFFSE', 'COPY-DATASET',
- 'COPY-LOB', 'COPY-SAX-ATTRIBUTES', 'COPY-TEMP-TABLE', 'COUNT', 'COUNT-OF',
- 'CPCASE', 'CPCOLL', 'CPINTERNAL', 'CPLOG', 'CPPRINT', 'CPRCODEIN',
- 'CPRCODEOUT', 'CPSTREAM', 'CPTERM', 'CRC-VALUE', 'CREATE', 'CREATE-LIKE',
- 'CREATE-LIKE-SEQUENTIAL', 'CREATE-NODE-NAMESPACE',
- 'CREATE-RESULT-LIST-ENTRY', 'CREATE-TEST-FILE', 'CURRENT', 'CURRENT_DATE',
- 'CURRENT_DATE', 'CURRENT-CHANGED', 'CURRENT-COLUMN', 'CURRENT-ENVIRONMENT',
- 'CURRENT-ENV', 'CURRENT-ENVI', 'CURRENT-ENVIR', 'CURRENT-ENVIRO',
- 'CURRENT-ENVIRON', 'CURRENT-ENVIRONM', 'CURRENT-ENVIRONME',
- 'CURRENT-ENVIRONMEN', 'CURRENT-ITERATION', 'CURRENT-LANGUAGE',
- 'CURRENT-LANG', 'CURRENT-LANGU', 'CURRENT-LANGUA', 'CURRENT-LANGUAG',
- 'CURRENT-QUERY', 'CURRENT-RESULT-ROW', 'CURRENT-ROW-MODIFIED',
- 'CURRENT-VALUE', 'CURRENT-WINDOW', 'CURSOR', 'CURS', 'CURSO', 'CURSOR-CHAR',
- 'CURSOR-LINE', 'CURSOR-OFFSET', 'DATABASE', 'DATA-BIND',
- 'DATA-ENTRY-RETURN', 'DATA-ENTRY-RET', 'DATA-ENTRY-RETU',
- 'DATA-ENTRY-RETUR', 'DATA-RELATION', 'DATA-REL', 'DATA-RELA', 'DATA-RELAT',
- 'DATA-RELATI', 'DATA-RELATIO', 'DATASERVERS', 'DATASET', 'DATASET-HANDLE',
- 'DATA-SOURCE', 'DATA-SOURCE-COMPLETE-MAP', 'DATA-SOURCE-MODIFIED',
- 'DATA-SOURCE-ROWID', 'DATA-TYPE', 'DATA-T', 'DATA-TY', 'DATA-TYP',
- 'DATE-FORMAT', 'DATE-F', 'DATE-FO', 'DATE-FOR', 'DATE-FORM', 'DATE-FORMA',
- 'DAY', 'DBCODEPAGE', 'DBCOLLATION', 'DBNAME', 'DBPARAM', 'DB-REFERENCES',
- 'DBRESTRICTIONS', 'DBREST', 'DBRESTR', 'DBRESTRI', 'DBRESTRIC',
- 'DBRESTRICT', 'DBRESTRICTI', 'DBRESTRICTIO', 'DBRESTRICTION', 'DBTASKID',
- 'DBTYPE', 'DBVERSION', 'DBVERS', 'DBVERSI', 'DBVERSIO', 'DCOLOR', 'DDE',
- 'DDE-ERROR', 'DDE-ID', 'DDE-I', 'DDE-ITEM', 'DDE-NAME', 'DDE-TOPIC',
- 'DEBLANK', 'DEBUG', 'DEBU', 'DEBUG-ALERT', 'DEBUGGER', 'DEBUG-LIST',
- 'DECIMALS', 'DECLARE', 'DECLARE-NAMESPACE', 'DECRYPT', 'DEFAULT',
- 'DEFAULT-BUFFER-HANDLE', 'DEFAULT-BUTTON', 'DEFAUT-B', 'DEFAUT-BU',
- 'DEFAUT-BUT', 'DEFAUT-BUTT', 'DEFAUT-BUTTO', 'DEFAULT-COMMIT',
- 'DEFAULT-EXTENSION', 'DEFAULT-EX', 'DEFAULT-EXT', 'DEFAULT-EXTE',
- 'DEFAULT-EXTEN', 'DEFAULT-EXTENS', 'DEFAULT-EXTENSI', 'DEFAULT-EXTENSIO',
- 'DEFAULT-NOXLATE', 'DEFAULT-NOXL', 'DEFAULT-NOXLA', 'DEFAULT-NOXLAT',
- 'DEFAULT-VALUE', 'DEFAULT-WINDOW', 'DEFINED', 'DEFINE-USER-EVENT-MANAGER',
- 'DELETE', 'DEL', 'DELE', 'DELET', 'DELETE-CHARACTER', 'DELETE-CHAR',
- 'DELETE-CHARA', 'DELETE-CHARAC', 'DELETE-CHARACT', 'DELETE-CHARACTE',
- 'DELETE-CURRENT-ROW', 'DELETE-LINE', 'DELETE-RESULT-LIST-ENTRY',
- 'DELETE-SELECTED-ROW', 'DELETE-SELECTED-ROWS', 'DELIMITER', 'DESC',
- 'DESCENDING', 'DESC', 'DESCE', 'DESCEN', 'DESCEND', 'DESCENDI', 'DESCENDIN',
- 'DESELECT-FOCUSED-ROW', 'DESELECTION', 'DESELECT-ROWS',
- 'DESELECT-SELECTED-ROW', 'DESTRUCTOR', 'DIALOG-BOX', 'DICTIONARY', 'DICT',
- 'DICTI', 'DICTIO', 'DICTION', 'DICTIONA', 'DICTIONAR', 'DIR', 'DISABLE',
- 'DISABLE-AUTO-ZAP', 'DISABLED', 'DISABLE-DUMP-TRIGGERS',
- 'DISABLE-LOAD-TRIGGERS', 'DISCONNECT', 'DISCON', 'DISCONN', 'DISCONNE',
- 'DISCONNEC', 'DISP', 'DISPLAY', 'DISP', 'DISPL', 'DISPLA',
- 'DISPLAY-MESSAGE', 'DISPLAY-TYPE', 'DISPLAY-T', 'DISPLAY-TY', 'DISPLAY-TYP',
- 'DISTINCT', 'DO', 'DOMAIN-DESCRIPTION', 'DOMAIN-NAME', 'DOMAIN-TYPE', 'DOS',
- 'DOUBLE', 'DOWN', 'DRAG-ENABLED', 'DROP', 'DROP-DOWN', 'DROP-DOWN-LIST',
- 'DROP-FILE-NOTIFY', 'DROP-TARGET', 'DUMP', 'DYNAMIC', 'DYNAMIC-FUNCTION',
- 'EACH', 'ECHO', 'EDGE-CHARS', 'EDGE', 'EDGE-', 'EDGE-C', 'EDGE-CH',
- 'EDGE-CHA', 'EDGE-CHAR', 'EDGE-PIXELS', 'EDGE-P', 'EDGE-PI', 'EDGE-PIX',
- 'EDGE-PIXE', 'EDGE-PIXEL', 'EDIT-CAN-PASTE', 'EDIT-CAN-UNDO', 'EDIT-CLEAR',
- 'EDIT-COPY', 'EDIT-CUT', 'EDITING', 'EDITOR', 'EDIT-PASTE', 'EDIT-UNDO',
- 'ELSE', 'EMPTY', 'EMPTY-TEMP-TABLE', 'ENABLE', 'ENABLED-FIELDS', 'ENCODE',
- 'ENCRYPT', 'ENCRYPT-AUDIT-MAC-KEY', 'ENCRYPTION-SALT', 'END',
- 'END-DOCUMENT', 'END-ELEMENT', 'END-EVENT-GROUP', 'END-FILE-DROP', 'ENDKEY',
- 'END-KEY', 'END-MOVE', 'END-RESIZE', 'END-ROW-RESIZE', 'END-USER-PROMPT',
- 'ENTERED', 'ENTRY', 'EQ', 'ERROR', 'ERROR-COLUMN', 'ERROR-COL',
- 'ERROR-COLU', 'ERROR-COLUM', 'ERROR-ROW', 'ERROR-STACK-TRACE',
- 'ERROR-STATUS', 'ERROR-STAT', 'ERROR-STATU', 'ESCAPE', 'ETIME',
- 'EVENT-GROUP-ID', 'EVENT-PROCEDURE', 'EVENT-PROCEDURE-CONTEXT', 'EVENTS',
- 'EVENT', 'EVENT-TYPE', 'EVENT-T', 'EVENT-TY', 'EVENT-TYP', 'EXCEPT',
- 'EXCLUSIVE-ID', 'EXCLUSIVE-LOCK', 'EXCLUSIVE', 'EXCLUSIVE-', 'EXCLUSIVE-L',
- 'EXCLUSIVE-LO', 'EXCLUSIVE-LOC', 'EXCLUSIVE-WEB-USER', 'EXECUTE', 'EXISTS',
- 'EXP', 'EXPAND', 'EXPANDABLE', 'EXPLICIT', 'EXPORT', 'EXPORT-PRINCIPAL',
- 'EXTENDED', 'EXTENT', 'EXTERNAL', 'FALSE', 'FETCH', 'FETCH-SELECTED-ROW',
- 'FGCOLOR', 'FGC', 'FGCO', 'FGCOL', 'FGCOLO', 'FIELD', 'FIELDS', 'FIELD',
- 'FILE', 'FILE-CREATE-DATE', 'FILE-CREATE-TIME', 'FILE-INFORMATION',
- 'FILE-INFO', 'FILE-INFOR', 'FILE-INFORM', 'FILE-INFORMA', 'FILE-INFORMAT',
- 'FILE-INFORMATI', 'FILE-INFORMATIO', 'FILE-MOD-DATE', 'FILE-MOD-TIME',
- 'FILENAME', 'FILE-NAME', 'FILE-OFFSET', 'FILE-OFF', 'FILE-OFFS',
- 'FILE-OFFSE', 'FILE-SIZE', 'FILE-TYPE', 'FILL', 'FILLED', 'FILL-IN',
- 'FILTERS', 'FINAL', 'FINALLY', 'FIND', 'FIND-BY-ROWID',
- 'FIND-CASE-SENSITIVE', 'FIND-CURRENT', 'FINDER', 'FIND-FIRST',
- 'FIND-GLOBAL', 'FIND-LAST', 'FIND-NEXT-OCCURRENCE', 'FIND-PREV-OCCURRENCE',
- 'FIND-SELECT', 'FIND-UNIQUE', 'FIND-WRAP-AROUND', 'FIRST',
- 'FIRST-ASYNCH-REQUEST', 'FIRST-CHILD', 'FIRST-COLUMN', 'FIRST-FORM',
- 'FIRST-OBJECT', 'FIRST-OF', 'FIRST-PROCEDURE', 'FIRST-PROC', 'FIRST-PROCE',
- 'FIRST-PROCED', 'FIRST-PROCEDU', 'FIRST-PROCEDUR', 'FIRST-SERVER',
- 'FIRST-TAB-ITEM', 'FIRST-TAB-I', 'FIRST-TAB-IT', 'FIRST-TAB-ITE',
- 'FIT-LAST-COLUMN', 'FIXED-ONLY', 'FLAT-BUTTON', 'FLOAT', 'FOCUS',
- 'FOCUSED-ROW', 'FOCUSED-ROW-SELECTED', 'FONT', 'FONT-TABLE', 'FOR',
- 'FORCE-FILE', 'FOREGROUND', 'FORE', 'FOREG', 'FOREGR', 'FOREGRO',
- 'FOREGROU', 'FOREGROUN', 'FORM', 'FORMAT', 'FORM', 'FORMA', 'FORMATTED',
- 'FORMATTE', 'FORM-LONG-INPUT', 'FORWARD', 'FORWARDS', 'FORWARD', 'FRAGMENT',
- 'FRAGMEN', 'FRAME', 'FRAM', 'FRAME-COL', 'FRAME-DB', 'FRAME-DOWN',
- 'FRAME-FIELD', 'FRAME-FILE', 'FRAME-INDEX', 'FRAME-INDE', 'FRAME-LINE',
- 'FRAME-NAME', 'FRAME-ROW', 'FRAME-SPACING', 'FRAME-SPA', 'FRAME-SPAC',
- 'FRAME-SPACI', 'FRAME-SPACIN', 'FRAME-VALUE', 'FRAME-VAL', 'FRAME-VALU',
- 'FRAME-X', 'FRAME-Y', 'FREQUENCY', 'FROM', 'FROM-CHARS', 'FROM-C',
- 'FROM-CH', 'FROM-CHA', 'FROM-CHAR', 'FROM-CURRENT', 'FROM-CUR', 'FROM-CURR',
- 'FROM-CURRE', 'FROM-CURREN', 'FROM-PIXELS', 'FROM-P', 'FROM-PI', 'FROM-PIX',
- 'FROM-PIXE', 'FROM-PIXEL', 'FULL-HEIGHT-CHARS', 'FULL-HEIGHT',
- 'FULL-HEIGHT-', 'FULL-HEIGHT-C', 'FULL-HEIGHT-CH', 'FULL-HEIGHT-CHA',
- 'FULL-HEIGHT-CHAR', 'FULL-HEIGHT-PIXELS', 'FULL-HEIGHT-P', 'FULL-HEIGHT-PI',
- 'FULL-HEIGHT-PIX', 'FULL-HEIGHT-PIXE', 'FULL-HEIGHT-PIXEL', 'FULL-PATHNAME',
- 'FULL-PATHN', 'FULL-PATHNA', 'FULL-PATHNAM', 'FULL-WIDTH-CHARS',
- 'FULL-WIDTH', 'FULL-WIDTH-', 'FULL-WIDTH-C', 'FULL-WIDTH-CH',
- 'FULL-WIDTH-CHA', 'FULL-WIDTH-CHAR', 'FULL-WIDTH-PIXELS', 'FULL-WIDTH-P',
- 'FULL-WIDTH-PI', 'FULL-WIDTH-PIX', 'FULL-WIDTH-PIXE', 'FULL-WIDTH-PIXEL',
- 'FUNCTION', 'FUNCTION-CALL-TYPE', 'GATEWAYS', 'GATEWAY', 'GE',
- 'GENERATE-MD5', 'GENERATE-PBE-KEY', 'GENERATE-PBE-SALT',
- 'GENERATE-RANDOM-KEY', 'GENERATE-UUID', 'GET', 'GET-ATTR-CALL-TYPE',
- 'GET-ATTRIBUTE-NODE', 'GET-BINARY-DATA', 'GET-BLUE-VALUE', 'GET-BLUE',
- 'GET-BLUE-', 'GET-BLUE-V', 'GET-BLUE-VA', 'GET-BLUE-VAL', 'GET-BLUE-VALU',
- 'GET-BROWSE-COLUMN', 'GET-BUFFER-HANDLEGETBYTE', 'GET-BYTE',
- 'GET-CALLBACK-PROC-CONTEXT', 'GET-CALLBACK-PROC-NAME', 'GET-CGI-LIST',
- 'GET-CGI-LONG-VALUE', 'GET-CGI-VALUE', 'GET-CODEPAGES', 'GET-COLLATIONS',
- 'GET-CONFIG-VALUE', 'GET-CURRENT', 'GET-DOUBLE', 'GET-DROPPED-FILE',
- 'GET-DYNAMIC', 'GET-ERROR-COLUMN', 'GET-ERROR-ROW', 'GET-FILE',
- 'GET-FILE-NAME', 'GET-FILE-OFFSET', 'GET-FILE-OFFSE', 'GET-FIRST',
- 'GET-FLOAT', 'GET-GREEN-VALUE', 'GET-GREEN', 'GET-GREEN-', 'GET-GREEN-V',
- 'GET-GREEN-VA', 'GET-GREEN-VAL', 'GET-GREEN-VALU',
- 'GET-INDEX-BY-NAMESPACE-NAME', 'GET-INDEX-BY-QNAME', 'GET-INT64',
- 'GET-ITERATION', 'GET-KEY-VALUE', 'GET-KEY-VAL', 'GET-KEY-VALU', 'GET-LAST',
- 'GET-LOCALNAME-BY-INDEX', 'GET-LONG', 'GET-MESSAGE', 'GET-NEXT',
- 'GET-NUMBER', 'GET-POINTER-VALUE', 'GET-PREV', 'GET-PRINTERS',
- 'GET-PROPERTY', 'GET-QNAME-BY-INDEX', 'GET-RED-VALUE', 'GET-RED',
- 'GET-RED-', 'GET-RED-V', 'GET-RED-VA', 'GET-RED-VAL', 'GET-RED-VALU',
- 'GET-REPOSITIONED-ROW', 'GET-RGB-VALUE', 'GET-SELECTED-WIDGET',
- 'GET-SELECTED', 'GET-SELECTED-', 'GET-SELECTED-W', 'GET-SELECTED-WI',
- 'GET-SELECTED-WID', 'GET-SELECTED-WIDG', 'GET-SELECTED-WIDGE', 'GET-SHORT',
- 'GET-SIGNATURE', 'GET-SIZE', 'GET-STRING', 'GET-TAB-ITEM',
- 'GET-TEXT-HEIGHT-CHARS', 'GET-TEXT-HEIGHT', 'GET-TEXT-HEIGHT-',
- 'GET-TEXT-HEIGHT-C', 'GET-TEXT-HEIGHT-CH', 'GET-TEXT-HEIGHT-CHA',
- 'GET-TEXT-HEIGHT-CHAR', 'GET-TEXT-HEIGHT-PIXELS', 'GET-TEXT-HEIGHT-P',
- 'GET-TEXT-HEIGHT-PI', 'GET-TEXT-HEIGHT-PIX', 'GET-TEXT-HEIGHT-PIXE',
- 'GET-TEXT-HEIGHT-PIXEL', 'GET-TEXT-WIDTH-CHARS', 'GET-TEXT-WIDTH',
- 'GET-TEXT-WIDTH-', 'GET-TEXT-WIDTH-C', 'GET-TEXT-WIDTH-CH',
- 'GET-TEXT-WIDTH-CHA', 'GET-TEXT-WIDTH-CHAR', 'GET-TEXT-WIDTH-PIXELS',
- 'GET-TEXT-WIDTH-P', 'GET-TEXT-WIDTH-PI', 'GET-TEXT-WIDTH-PIX',
- 'GET-TEXT-WIDTH-PIXE', 'GET-TEXT-WIDTH-PIXEL', 'GET-TYPE-BY-INDEX',
- 'GET-TYPE-BY-NAMESPACE-NAME', 'GET-TYPE-BY-QNAME', 'GET-UNSIGNED-LONG',
- 'GET-UNSIGNED-SHORT', 'GET-URI-BY-INDEX', 'GET-VALUE-BY-INDEX',
- 'GET-VALUE-BY-NAMESPACE-NAME', 'GET-VALUE-BY-QNAME', 'GET-WAIT-STATE',
- 'GLOBAL', 'GO-ON', 'GO-PENDING', 'GO-PEND', 'GO-PENDI', 'GO-PENDIN',
- 'GRANT', 'GRAPHIC-EDGE', 'GRAPHIC-E', 'GRAPHIC-ED', 'GRAPHIC-EDG',
- 'GRID-FACTOR-HORIZONTAL', 'GRID-FACTOR-H', 'GRID-FACTOR-HO',
- 'GRID-FACTOR-HOR', 'GRID-FACTOR-HORI', 'GRID-FACTOR-HORIZ',
- 'GRID-FACTOR-HORIZO', 'GRID-FACTOR-HORIZON', 'GRID-FACTOR-HORIZONT',
- 'GRID-FACTOR-HORIZONTA', 'GRID-FACTOR-VERTICAL', 'GRID-FACTOR-V',
- 'GRID-FACTOR-VE', 'GRID-FACTOR-VER', 'GRID-FACTOR-VERT', 'GRID-FACTOR-VERT',
- 'GRID-FACTOR-VERTI', 'GRID-FACTOR-VERTIC', 'GRID-FACTOR-VERTICA',
- 'GRID-SNAP', 'GRID-UNIT-HEIGHT-CHARS', 'GRID-UNIT-HEIGHT',
- 'GRID-UNIT-HEIGHT-', 'GRID-UNIT-HEIGHT-C', 'GRID-UNIT-HEIGHT-CH',
- 'GRID-UNIT-HEIGHT-CHA', 'GRID-UNIT-HEIGHT-PIXELS', 'GRID-UNIT-HEIGHT-P',
- 'GRID-UNIT-HEIGHT-PI', 'GRID-UNIT-HEIGHT-PIX', 'GRID-UNIT-HEIGHT-PIXE',
- 'GRID-UNIT-HEIGHT-PIXEL', 'GRID-UNIT-WIDTH-CHARS', 'GRID-UNIT-WIDTH',
- 'GRID-UNIT-WIDTH-', 'GRID-UNIT-WIDTH-C', 'GRID-UNIT-WIDTH-CH',
- 'GRID-UNIT-WIDTH-CHA', 'GRID-UNIT-WIDTH-CHAR', 'GRID-UNIT-WIDTH-PIXELS',
- 'GRID-UNIT-WIDTH-P', 'GRID-UNIT-WIDTH-PI', 'GRID-UNIT-WIDTH-PIX',
- 'GRID-UNIT-WIDTH-PIXE', 'GRID-UNIT-WIDTH-PIXEL', 'GRID-VISIBLE', 'GROUP',
- 'GT', 'GUID', 'HANDLER', 'HAS-RECORDS', 'HAVING', 'HEADER', 'HEIGHT-CHARS',
- 'HEIGHT', 'HEIGHT-', 'HEIGHT-C', 'HEIGHT-CH', 'HEIGHT-CHA', 'HEIGHT-CHAR',
- 'HEIGHT-PIXELS', 'HEIGHT-P', 'HEIGHT-PI', 'HEIGHT-PIX', 'HEIGHT-PIXE',
- 'HEIGHT-PIXEL', 'HELP', 'HEX-DECODE', 'HEX-ENCODE', 'HIDDEN', 'HIDE',
- 'HORIZONTAL', 'HORI', 'HORIZ', 'HORIZO', 'HORIZON', 'HORIZONT', 'HORIZONTA',
- 'HOST-BYTE-ORDER', 'HTML-CHARSET', 'HTML-END-OF-LINE', 'HTML-END-OF-PAGE',
- 'HTML-FRAME-BEGIN', 'HTML-FRAME-END', 'HTML-HEADER-BEGIN',
- 'HTML-HEADER-END', 'HTML-TITLE-BEGIN', 'HTML-TITLE-END', 'HWND', 'ICON',
- 'IF', 'IMAGE', 'IMAGE-DOWN', 'IMAGE-INSENSITIVE', 'IMAGE-SIZE',
- 'IMAGE-SIZE-CHARS', 'IMAGE-SIZE-C', 'IMAGE-SIZE-CH', 'IMAGE-SIZE-CHA',
- 'IMAGE-SIZE-CHAR', 'IMAGE-SIZE-PIXELS', 'IMAGE-SIZE-P', 'IMAGE-SIZE-PI',
- 'IMAGE-SIZE-PIX', 'IMAGE-SIZE-PIXE', 'IMAGE-SIZE-PIXEL', 'IMAGE-UP',
- 'IMMEDIATE-DISPLAY', 'IMPLEMENTS', 'IMPORT', 'IMPORT-PRINCIPAL', 'IN',
- 'INCREMENT-EXCLUSIVE-ID', 'INDEX', 'INDEXED-REPOSITION', 'INDEX-HINT',
- 'INDEX-INFORMATION', 'INDICATOR', 'INFORMATION', 'INFO', 'INFOR', 'INFORM',
- 'INFORMA', 'INFORMAT', 'INFORMATI', 'INFORMATIO', 'IN-HANDLE',
- 'INHERIT-BGCOLOR', 'INHERIT-BGC', 'INHERIT-BGCO', 'INHERIT-BGCOL',
- 'INHERIT-BGCOLO', 'INHERIT-FGCOLOR', 'INHERIT-FGC', 'INHERIT-FGCO',
- 'INHERIT-FGCOL', 'INHERIT-FGCOLO', 'INHERITS', 'INITIAL', 'INIT', 'INITI',
- 'INITIA', 'INITIAL-DIR', 'INITIAL-FILTER', 'INITIALIZE-DOCUMENT-TYPE',
- 'INITIATE', 'INNER-CHARS', 'INNER-LINES', 'INPUT', 'INPUT-OUTPUT',
- 'INPUT-O', 'INPUT-OU', 'INPUT-OUT', 'INPUT-OUTP', 'INPUT-OUTPU',
- 'INPUT-VALUE', 'INSERT', 'INSERT-ATTRIBUTE', 'INSERT-BACKTAB', 'INSERT-B',
- 'INSERT-BA', 'INSERT-BAC', 'INSERT-BACK', 'INSERT-BACKT', 'INSERT-BACKTA',
- 'INSERT-FILE', 'INSERT-ROW', 'INSERT-STRING', 'INSERT-TAB', 'INSERT-T',
- 'INSERT-TA', 'INTERFACE', 'INTERNAL-ENTRIES', 'INTO', 'INVOKE', 'IS',
- 'IS-ATTR-SPACE', 'IS-ATTR', 'IS-ATTR-', 'IS-ATTR-S', 'IS-ATTR-SP',
- 'IS-ATTR-SPA', 'IS-ATTR-SPAC', 'IS-CLASS', 'IS-CLAS', 'IS-LEAD-BYTE',
- 'IS-ATTR', 'IS-OPEN', 'IS-PARAMETER-SET', 'IS-ROW-SELECTED', 'IS-SELECTED',
- 'ITEM', 'ITEMS-PER-ROW', 'JOIN', 'JOIN-BY-SQLDB', 'KBLABEL',
- 'KEEP-CONNECTION-OPEN', 'KEEP-FRAME-Z-ORDER', 'KEEP-FRAME-Z',
- 'KEEP-FRAME-Z-', 'KEEP-FRAME-Z-O', 'KEEP-FRAME-Z-OR', 'KEEP-FRAME-Z-ORD',
- 'KEEP-FRAME-Z-ORDE', 'KEEP-MESSAGES', 'KEEP-SECURITY-CACHE',
- 'KEEP-TAB-ORDER', 'KEY', 'KEYCODE', 'KEY-CODE', 'KEYFUNCTION', 'KEYFUNC',
- 'KEYFUNCT', 'KEYFUNCTI', 'KEYFUNCTIO', 'KEY-FUNCTION', 'KEY-FUNC',
- 'KEY-FUNCT', 'KEY-FUNCTI', 'KEY-FUNCTIO', 'KEYLABEL', 'KEY-LABEL', 'KEYS',
- 'KEYWORD', 'KEYWORD-ALL', 'LABEL', 'LABEL-BGCOLOR', 'LABEL-BGC',
- 'LABEL-BGCO', 'LABEL-BGCOL', 'LABEL-BGCOLO', 'LABEL-DCOLOR', 'LABEL-DC',
- 'LABEL-DCO', 'LABEL-DCOL', 'LABEL-DCOLO', 'LABEL-FGCOLOR', 'LABEL-FGC',
- 'LABEL-FGCO', 'LABEL-FGCOL', 'LABEL-FGCOLO', 'LABEL-FONT', 'LABEL-PFCOLOR',
- 'LABEL-PFC', 'LABEL-PFCO', 'LABEL-PFCOL', 'LABEL-PFCOLO', 'LABELS',
- 'LANDSCAPE', 'LANGUAGES', 'LANGUAGE', 'LARGE', 'LARGE-TO-SMALL', 'LAST',
- 'LAST-ASYNCH-REQUEST', 'LAST-BATCH', 'LAST-CHILD', 'LAST-EVENT',
- 'LAST-EVEN', 'LAST-FORM', 'LASTKEY', 'LAST-KEY', 'LAST-OBJECT', 'LAST-OF',
- 'LAST-PROCEDURE', 'LAST-PROCE', 'LAST-PROCED', 'LAST-PROCEDU',
- 'LAST-PROCEDUR', 'LAST-SERVER', 'LAST-TAB-ITEM', 'LAST-TAB-I',
- 'LAST-TAB-IT', 'LAST-TAB-ITE', 'LC', 'LDBNAME', 'LE', 'LEAVE',
- 'LEFT-ALIGNED', 'LEFT-ALIGN', 'LEFT-ALIGNE', 'LEFT-TRIM', 'LENGTH',
- 'LIBRARY', 'LIKE', 'LIKE-SEQUENTIAL', 'LINE', 'LINE-COUNTER', 'LINE-COUNT',
- 'LINE-COUNTE', 'LIST-EVENTS', 'LISTING', 'LISTI', 'LISTIN',
- 'LIST-ITEM-PAIRS', 'LIST-ITEMS', 'LIST-PROPERTY-NAMES', 'LIST-QUERY-ATTRS',
- 'LIST-SET-ATTRS', 'LIST-WIDGETS', 'LITERAL-QUESTION', 'LITTLE-ENDIAN',
- 'LOAD', 'LOAD-DOMAINS', 'LOAD-ICON', 'LOAD-IMAGE', 'LOAD-IMAGE-DOWN',
- 'LOAD-IMAGE-INSENSITIVE', 'LOAD-IMAGE-UP', 'LOAD-MOUSE-POINTER',
- 'LOAD-MOUSE-P', 'LOAD-MOUSE-PO', 'LOAD-MOUSE-POI', 'LOAD-MOUSE-POIN',
- 'LOAD-MOUSE-POINT', 'LOAD-MOUSE-POINTE', 'LOAD-PICTURE', 'LOAD-SMALL-ICON',
- 'LOCAL-NAME', 'LOCATOR-COLUMN-NUMBER', 'LOCATOR-LINE-NUMBER',
- 'LOCATOR-PUBLIC-ID', 'LOCATOR-SYSTEM-ID', 'LOCATOR-TYPE', 'LOCKED',
- 'LOCK-REGISTRATION', 'LOG', 'LOG-AUDIT-EVENT', 'LOGIN-EXPIRATION-TIMESTAMP',
- 'LOGIN-HOST', 'LOGIN-STATE', 'LOG-MANAGER', 'LOGOUT', 'LOOKAHEAD', 'LOOKUP',
- 'LT', 'MACHINE-CLASS', 'MANDATORY', 'MANUAL-HIGHLIGHT', 'MAP',
- 'MARGIN-EXTRA', 'MARGIN-HEIGHT-CHARS', 'MARGIN-HEIGHT', 'MARGIN-HEIGHT-',
- 'MARGIN-HEIGHT-C', 'MARGIN-HEIGHT-CH', 'MARGIN-HEIGHT-CHA',
- 'MARGIN-HEIGHT-CHAR', 'MARGIN-HEIGHT-PIXELS', 'MARGIN-HEIGHT-P',
- 'MARGIN-HEIGHT-PI', 'MARGIN-HEIGHT-PIX', 'MARGIN-HEIGHT-PIXE',
- 'MARGIN-HEIGHT-PIXEL', 'MARGIN-WIDTH-CHARS', 'MARGIN-WIDTH',
- 'MARGIN-WIDTH-', 'MARGIN-WIDTH-C', 'MARGIN-WIDTH-CH', 'MARGIN-WIDTH-CHA',
- 'MARGIN-WIDTH-CHAR', 'MARGIN-WIDTH-PIXELS', 'MARGIN-WIDTH-P',
- 'MARGIN-WIDTH-PI', 'MARGIN-WIDTH-PIX', 'MARGIN-WIDTH-PIXE',
- 'MARGIN-WIDTH-PIXEL', 'MARK-NEW', 'MARK-ROW-STATE', 'MATCHES', 'MAX',
- 'MAX-BUTTON', 'MAX-CHARS', 'MAX-DATA-GUESS', 'MAX-HEIGHT',
- 'MAX-HEIGHT-CHARS', 'MAX-HEIGHT-C', 'MAX-HEIGHT-CH', 'MAX-HEIGHT-CHA',
- 'MAX-HEIGHT-CHAR', 'MAX-HEIGHT-PIXELS', 'MAX-HEIGHT-P', 'MAX-HEIGHT-PI',
- 'MAX-HEIGHT-PIX', 'MAX-HEIGHT-PIXE', 'MAX-HEIGHT-PIXEL', 'MAXIMIZE',
- 'MAXIMUM', 'MAX', 'MAXI', 'MAXIM', 'MAXIMU', 'MAXIMUM-LEVEL', 'MAX-ROWS',
- 'MAX-SIZE', 'MAX-VALUE', 'MAX-VAL', 'MAX-VALU', 'MAX-WIDTH',
- 'MAX-WIDTH-CHARS', 'MAX-WIDTH', 'MAX-WIDTH-', 'MAX-WIDTH-C', 'MAX-WIDTH-CH',
- 'MAX-WIDTH-CHA', 'MAX-WIDTH-CHAR', 'MAX-WIDTH-PIXELS', 'MAX-WIDTH-P',
- 'MAX-WIDTH-PI', 'MAX-WIDTH-PIX', 'MAX-WIDTH-PIXE', 'MAX-WIDTH-PIXEL',
- 'MD5-DIGEST', 'MEMBER', 'MEMPTR-TO-NODE-VALUE', 'MENU', 'MENUBAR',
- 'MENU-BAR', 'MENU-ITEM', 'MENU-KEY', 'MENU-K', 'MENU-KE', 'MENU-MOUSE',
- 'MENU-M', 'MENU-MO', 'MENU-MOU', 'MENU-MOUS', 'MERGE-BY-FIELD', 'MESSAGE',
- 'MESSAGE-AREA', 'MESSAGE-AREA-FONT', 'MESSAGE-LINES', 'METHOD', 'MIN',
- 'MIN-BUTTON', 'MIN-COLUMN-WIDTH-CHARS', 'MIN-COLUMN-WIDTH-C',
- 'MIN-COLUMN-WIDTH-CH', 'MIN-COLUMN-WIDTH-CHA', 'MIN-COLUMN-WIDTH-CHAR',
- 'MIN-COLUMN-WIDTH-PIXELS', 'MIN-COLUMN-WIDTH-P', 'MIN-COLUMN-WIDTH-PI',
- 'MIN-COLUMN-WIDTH-PIX', 'MIN-COLUMN-WIDTH-PIXE', 'MIN-COLUMN-WIDTH-PIXEL',
- 'MIN-HEIGHT-CHARS', 'MIN-HEIGHT', 'MIN-HEIGHT-', 'MIN-HEIGHT-C',
- 'MIN-HEIGHT-CH', 'MIN-HEIGHT-CHA', 'MIN-HEIGHT-CHAR', 'MIN-HEIGHT-PIXELS',
- 'MIN-HEIGHT-P', 'MIN-HEIGHT-PI', 'MIN-HEIGHT-PIX', 'MIN-HEIGHT-PIXE',
- 'MIN-HEIGHT-PIXEL', 'MINIMUM', 'MIN', 'MINI', 'MINIM', 'MINIMU', 'MIN-SIZE',
- 'MIN-VALUE', 'MIN-VAL', 'MIN-VALU', 'MIN-WIDTH-CHARS', 'MIN-WIDTH',
- 'MIN-WIDTH-', 'MIN-WIDTH-C', 'MIN-WIDTH-CH', 'MIN-WIDTH-CHA',
- 'MIN-WIDTH-CHAR', 'MIN-WIDTH-PIXELS', 'MIN-WIDTH-P', 'MIN-WIDTH-PI',
- 'MIN-WIDTH-PIX', 'MIN-WIDTH-PIXE', 'MIN-WIDTH-PIXEL', 'MODIFIED', 'MODULO',
- 'MOD', 'MODU', 'MODUL', 'MONTH', 'MOUSE', 'MOUSE-POINTER', 'MOUSE-P',
- 'MOUSE-PO', 'MOUSE-POI', 'MOUSE-POIN', 'MOUSE-POINT', 'MOUSE-POINTE',
- 'MOVABLE', 'MOVE-AFTER-TAB-ITEM', 'MOVE-AFTER', 'MOVE-AFTER-',
- 'MOVE-AFTER-T', 'MOVE-AFTER-TA', 'MOVE-AFTER-TAB', 'MOVE-AFTER-TAB-',
- 'MOVE-AFTER-TAB-I', 'MOVE-AFTER-TAB-IT', 'MOVE-AFTER-TAB-ITE',
- 'MOVE-BEFORE-TAB-ITEM', 'MOVE-BEFOR', 'MOVE-BEFORE', 'MOVE-BEFORE-',
- 'MOVE-BEFORE-T', 'MOVE-BEFORE-TA', 'MOVE-BEFORE-TAB', 'MOVE-BEFORE-TAB-',
- 'MOVE-BEFORE-TAB-I', 'MOVE-BEFORE-TAB-IT', 'MOVE-BEFORE-TAB-ITE',
- 'MOVE-COLUMN', 'MOVE-COL', 'MOVE-COLU', 'MOVE-COLUM', 'MOVE-TO-BOTTOM',
- 'MOVE-TO-B', 'MOVE-TO-BO', 'MOVE-TO-BOT', 'MOVE-TO-BOTT', 'MOVE-TO-BOTTO',
- 'MOVE-TO-EOF', 'MOVE-TO-TOP', 'MOVE-TO-T', 'MOVE-TO-TO', 'MPE',
- 'MULTI-COMPILE', 'MULTIPLE', 'MULTIPLE-KEY', 'MULTITASKING-INTERVAL',
- 'MUST-EXIST', 'NAME', 'NAMESPACE-PREFIX', 'NAMESPACE-URI', 'NATIVE', 'NE',
- 'NEEDS-APPSERVER-PROMPT', 'NEEDS-PROMPT', 'NEW', 'NEW-INSTANCE', 'NEW-ROW',
- 'NEXT', 'NEXT-COLUMN', 'NEXT-PROMPT', 'NEXT-ROWID', 'NEXT-SIBLING',
- 'NEXT-TAB-ITEM', 'NEXT-TAB-I', 'NEXT-TAB-IT', 'NEXT-TAB-ITE', 'NEXT-VALUE',
- 'NO', 'NO-APPLY', 'NO-ARRAY-MESSAGE', 'NO-ASSIGN', 'NO-ATTR-LIST',
- 'NO-ATTR', 'NO-ATTR-', 'NO-ATTR-L', 'NO-ATTR-LI', 'NO-ATTR-LIS',
- 'NO-ATTR-SPACE', 'NO-ATTR', 'NO-ATTR-', 'NO-ATTR-S', 'NO-ATTR-SP',
- 'NO-ATTR-SPA', 'NO-ATTR-SPAC', 'NO-AUTO-VALIDATE', 'NO-BIND-WHERE',
- 'NO-BOX', 'NO-CONSOLE', 'NO-CONVERT', 'NO-CONVERT-3D-COLORS',
- 'NO-CURRENT-VALUE', 'NO-DEBUG', 'NODE-VALUE-TO-MEMPTR', 'NO-DRAG',
- 'NO-ECHO', 'NO-EMPTY-SPACE', 'NO-ERROR', 'NO-FILL', 'NO-F', 'NO-FI',
- 'NO-FIL', 'NO-FOCUS', 'NO-HELP', 'NO-HIDE', 'NO-INDEX-HINT',
- 'NO-INHERIT-BGCOLOR', 'NO-INHERIT-BGC', 'NO-INHERIT-BGCO', 'LABEL-BGCOL',
- 'LABEL-BGCOLO', 'NO-INHERIT-FGCOLOR', 'NO-INHERIT-FGC', 'NO-INHERIT-FGCO',
- 'NO-INHERIT-FGCOL', 'NO-INHERIT-FGCOLO', 'NO-JOIN-BY-SQLDB', 'NO-LABELS',
- 'NO-LABE', 'NO-LOBS', 'NO-LOCK', 'NO-LOOKAHEAD', 'NO-MAP', 'NO-MESSAGE',
- 'NO-MES', 'NO-MESS', 'NO-MESSA', 'NO-MESSAG', 'NONAMESPACE-SCHEMA-LOCATION',
- 'NONE', 'NO-PAUSE', 'NO-PREFETCH', 'NO-PREFE', 'NO-PREFET', 'NO-PREFETC',
- 'NORMALIZE', 'NO-ROW-MARKERS', 'NO-SCROLLBAR-VERTICAL',
- 'NO-SEPARATE-CONNECTION', 'NO-SEPARATORS', 'NOT', 'NO-TAB-STOP',
- 'NOT-ACTIVE', 'NO-UNDERLINE', 'NO-UND', 'NO-UNDE', 'NO-UNDER', 'NO-UNDERL',
- 'NO-UNDERLI', 'NO-UNDERLIN', 'NO-UNDO', 'NO-VALIDATE', 'NO-VAL', 'NO-VALI',
- 'NO-VALID', 'NO-VALIDA', 'NO-VALIDAT', 'NOW', 'NO-WAIT', 'NO-WORD-WRAP',
- 'NULL', 'NUM-ALIASES', 'NUM-ALI', 'NUM-ALIA', 'NUM-ALIAS', 'NUM-ALIASE',
- 'NUM-BUFFERS', 'NUM-BUTTONS', 'NUM-BUT', 'NUM-BUTT', 'NUM-BUTTO',
- 'NUM-BUTTON', 'NUM-COLUMNS', 'NUM-COL', 'NUM-COLU', 'NUM-COLUM',
- 'NUM-COLUMN', 'NUM-COPIES', 'NUM-DBS', 'NUM-DROPPED-FILES', 'NUM-ENTRIES',
- 'NUMERIC', 'NUMERIC-FORMAT', 'NUMERIC-F', 'NUMERIC-FO', 'NUMERIC-FOR',
- 'NUMERIC-FORM', 'NUMERIC-FORMA', 'NUM-FIELDS', 'NUM-FORMATS', 'NUM-ITEMS',
- 'NUM-ITERATIONS', 'NUM-LINES', 'NUM-LOCKED-COLUMNS', 'NUM-LOCKED-COL',
- 'NUM-LOCKED-COLU', 'NUM-LOCKED-COLUM', 'NUM-LOCKED-COLUMN', 'NUM-MESSAGES',
- 'NUM-PARAMETERS', 'NUM-REFERENCES', 'NUM-REPLACED', 'NUM-RESULTS',
- 'NUM-SELECTED-ROWS', 'NUM-SELECTED-WIDGETS', 'NUM-SELECTED',
- 'NUM-SELECTED-', 'NUM-SELECTED-W', 'NUM-SELECTED-WI', 'NUM-SELECTED-WID',
- 'NUM-SELECTED-WIDG', 'NUM-SELECTED-WIDGE', 'NUM-SELECTED-WIDGET',
- 'NUM-TABS', 'NUM-TO-RETAIN', 'NUM-VISIBLE-COLUMNS', 'OCTET-LENGTH', 'OF',
- 'OFF', 'OK', 'OK-CANCEL', 'OLD', 'ON', 'ON-FRAME-BORDER', 'ON-FRAME',
- 'ON-FRAME-', 'ON-FRAME-B', 'ON-FRAME-BO', 'ON-FRAME-BOR', 'ON-FRAME-BORD',
- 'ON-FRAME-BORDE', 'OPEN', 'OPSYS', 'OPTION', 'OR', 'ORDERED-JOIN',
- 'ORDINAL', 'OS-APPEND', 'OS-COMMAND', 'OS-COPY', 'OS-CREATE-DIR',
- 'OS-DELETE', 'OS-DIR', 'OS-DRIVES', 'OS-DRIVE', 'OS-ERROR', 'OS-GETENV',
- 'OS-RENAME', 'OTHERWISE', 'OUTPUT', 'OVERLAY', 'OVERRIDE', 'OWNER', 'PAGE',
- 'PAGE-BOTTOM', 'PAGE-BOT', 'PAGE-BOTT', 'PAGE-BOTTO', 'PAGED',
- 'PAGE-NUMBER', 'PAGE-NUM', 'PAGE-NUMB', 'PAGE-NUMBE', 'PAGE-SIZE',
- 'PAGE-TOP', 'PAGE-WIDTH', 'PAGE-WID', 'PAGE-WIDT', 'PARAMETER', 'PARAM',
- 'PARAME', 'PARAMET', 'PARAMETE', 'PARENT', 'PARSE-STATUS', 'PARTIAL-KEY',
- 'PASCAL', 'PASSWORD-FIELD', 'PATHNAME', 'PAUSE', 'PBE-HASH-ALGORITHM',
- 'PBE-HASH-ALG', 'PBE-HASH-ALGO', 'PBE-HASH-ALGOR', 'PBE-HASH-ALGORI',
- 'PBE-HASH-ALGORIT', 'PBE-HASH-ALGORITH', 'PBE-KEY-ROUNDS', 'PDBNAME',
- 'PERSISTENT', 'PERSIST', 'PERSISTE', 'PERSISTEN',
- 'PERSISTENT-CACHE-DISABLED', 'PFCOLOR', 'PFC', 'PFCO', 'PFCOL', 'PFCOLO',
- 'PIXELS', 'PIXELS-PER-COLUMN', 'PIXELS-PER-COL', 'PIXELS-PER-COLU',
- 'PIXELS-PER-COLUM', 'PIXELS-PER-ROW', 'POPUP-MENU', 'POPUP-M', 'POPUP-ME',
- 'POPUP-MEN', 'POPUP-ONLY', 'POPUP-O', 'POPUP-ON', 'POPUP-ONL', 'PORTRAIT',
- 'POSITION', 'PRECISION', 'PREFER-DATASET', 'PREPARED', 'PREPARE-STRING',
- 'PREPROCESS', 'PREPROC', 'PREPROCE', 'PREPROCES', 'PRESELECT', 'PRESEL',
- 'PRESELE', 'PRESELEC', 'PREV', 'PREV-COLUMN', 'PREV-SIBLING',
- 'PREV-TAB-ITEM', 'PREV-TAB-I', 'PREV-TAB-IT', 'PREV-TAB-ITE', 'PRIMARY',
- 'PRINTER', 'PRINTER-CONTROL-HANDLE', 'PRINTER-HDC', 'PRINTER-NAME',
- 'PRINTER-PORT', 'PRINTER-SETUP', 'PRIVATE', 'PRIVATE-DATA', 'PRIVATE-D',
- 'PRIVATE-DA', 'PRIVATE-DAT', 'PRIVILEGES', 'PROCEDURE', 'PROCE', 'PROCED',
- 'PROCEDU', 'PROCEDUR', 'PROCEDURE-CALL-TYPE', 'PROCESS', 'PROC-HANDLE',
- 'PROC-HA', 'PROC-HAN', 'PROC-HAND', 'PROC-HANDL', 'PROC-STATUS', 'PROC-ST',
- 'PROC-STA', 'PROC-STAT', 'PROC-STATU', 'proc-text', 'proc-text-buffe',
- 'PROFILER', 'PROGRAM-NAME', 'PROGRESS', 'PROGRESS-SOURCE', 'PROGRESS-S',
- 'PROGRESS-SO', 'PROGRESS-SOU', 'PROGRESS-SOUR', 'PROGRESS-SOURC', 'PROMPT',
- 'PROMPT-FOR', 'PROMPT-F', 'PROMPT-FO', 'PROMSGS', 'PROPATH', 'PROPERTY',
- 'PROTECTED', 'PROVERSION', 'PROVERS', 'PROVERSI', 'PROVERSIO', 'PROXY',
- 'PROXY-PASSWORD', 'PROXY-USERID', 'PUBLIC', 'PUBLIC-ID', 'PUBLISH',
- 'PUBLISHED-EVENTS', 'PUT', 'PUTBYTE', 'PUT-BYTE', 'PUT-DOUBLE', 'PUT-FLOAT',
- 'PUT-INT64', 'PUT-KEY-VALUE', 'PUT-KEY-VAL', 'PUT-KEY-VALU', 'PUT-LONG',
- 'PUT-SHORT', 'PUT-STRING', 'PUT-UNSIGNED-LONG', 'QUERY', 'QUERY-CLOSE',
- 'QUERY-OFF-END', 'QUERY-OPEN', 'QUERY-PREPARE', 'QUERY-TUNING', 'QUESTION',
- 'QUIT', 'QUOTER', 'RADIO-BUTTONS', 'RADIO-SET', 'RANDOM', 'RAW-TRANSFER',
- 'RCODE-INFORMATION', 'RCODE-INFO', 'RCODE-INFOR', 'RCODE-INFORM',
- 'RCODE-INFORMA', 'RCODE-INFORMAT', 'RCODE-INFORMATI', 'RCODE-INFORMATIO',
- 'READ-AVAILABLE', 'READ-EXACT-NUM', 'READ-FILE', 'READKEY', 'READ-ONLY',
- 'READ-XML', 'READ-XMLSCHEMA', 'REAL', 'RECORD-LENGTH', 'RECTANGLE', 'RECT',
- 'RECTA', 'RECTAN', 'RECTANG', 'RECTANGL', 'RECURSIVE', 'REFERENCE-ONLY',
- 'REFRESH', 'REFRESHABLE', 'REFRESH-AUDIT-POLICY', 'REGISTER-DOMAIN',
- 'RELEASE', 'REMOTE', 'REMOVE-EVENTS-PROCEDURE', 'REMOVE-SUPER-PROCEDURE',
- 'REPEAT', 'REPLACE', 'REPLACE-SELECTION-TEXT', 'REPOSITION',
- 'REPOSITION-BACKWARD', 'REPOSITION-FORWARD', 'REPOSITION-MODE',
- 'REPOSITION-TO-ROW', 'REPOSITION-TO-ROWID', 'REQUEST', 'RESET', 'RESIZABLE',
- 'RESIZA', 'RESIZAB', 'RESIZABL', 'RESIZE', 'RESTART-ROW', 'RESTART-ROWID',
- 'RETAIN', 'RETAIN-SHAPE', 'RETRY', 'RETRY-CANCEL', 'RETURN',
- 'RETURN-INSERTED', 'RETURN-INS', 'RETURN-INSE', 'RETURN-INSER',
- 'RETURN-INSERT', 'RETURN-INSERTE', 'RETURNS', 'RETURN-TO-START-DIR',
- 'RETURN-TO-START-DI', 'RETURN-VALUE', 'RETURN-VAL', 'RETURN-VALU',
- 'RETURN-VALUE-DATA-TYPE', 'REVERSE-FROM', 'REVERT', 'REVOKE', 'RGB-VALUE',
- 'RIGHT-ALIGNED', 'RETURN-ALIGN', 'RETURN-ALIGNE', 'RIGHT-TRIM', 'R-INDEX',
- 'ROLES', 'ROUND', 'ROUTINE-LEVEL', 'ROW', 'ROW-HEIGHT-CHARS', 'HEIGHT',
- 'ROW-HEIGHT-PIXELS', 'HEIGHT-P', 'ROW-MARKERS', 'ROW-OF', 'ROW-RESIZABLE',
- 'RULE', 'RUN', 'RUN-PROCEDURE', 'SAVE', 'SAVE-AS', 'SAVE-FILE',
- 'SAX-COMPLETE', 'SAX-COMPLE', 'SAX-COMPLET', 'SAX-PARSE', 'SAX-PARSE-FIRST',
- 'SAX-PARSE-NEXT', 'SAX-PARSER-ERROR', 'SAX-RUNNING', 'SAX-UNINITIALIZED',
- 'SAX-WRITE-BEGIN', 'SAX-WRITE-COMPLETE', 'SAX-WRITE-CONTENT',
- 'SAX-WRITE-ELEMENT', 'SAX-WRITE-ERROR', 'SAX-WRITE-IDLE', 'SAX-WRITER',
- 'SAX-WRITE-TAG', 'SCHEMA', 'SCHEMA-LOCATION', 'SCHEMA-MARSHAL',
- 'SCHEMA-PATH', 'SCREEN', 'SCREEN-IO', 'SCREEN-LINES', 'SCREEN-VALUE',
- 'SCREEN-VAL', 'SCREEN-VALU', 'SCROLL', 'SCROLLABLE', 'SCROLLBAR-HORIZONTAL',
- 'SCROLLBAR-H', 'SCROLLBAR-HO', 'SCROLLBAR-HOR', 'SCROLLBAR-HORI',
- 'SCROLLBAR-HORIZ', 'SCROLLBAR-HORIZO', 'SCROLLBAR-HORIZON',
- 'SCROLLBAR-HORIZONT', 'SCROLLBAR-HORIZONTA', 'SCROLL-BARS',
- 'SCROLLBAR-VERTICAL', 'SCROLLBAR-V', 'SCROLLBAR-VE', 'SCROLLBAR-VER',
- 'SCROLLBAR-VERT', 'SCROLLBAR-VERTI', 'SCROLLBAR-VERTIC',
- 'SCROLLBAR-VERTICA', 'SCROLL-DELTA', 'SCROLLED-ROW-POSITION',
- 'SCROLLED-ROW-POS', 'SCROLLED-ROW-POSI', 'SCROLLED-ROW-POSIT',
- 'SCROLLED-ROW-POSITI', 'SCROLLED-ROW-POSITIO', 'SCROLLING', 'SCROLL-OFFSET',
- 'SCROLL-TO-CURRENT-ROW', 'SCROLL-TO-ITEM', 'SCROLL-TO-I', 'SCROLL-TO-IT',
- 'SCROLL-TO-ITE', 'SCROLL-TO-SELECTED-ROW', 'SDBNAME', 'SEAL',
- 'SEAL-TIMESTAMP', 'SEARCH', 'SEARCH-SELF', 'SEARCH-TARGET', 'SECTION',
- 'SECURITY-POLICY', 'SEEK', 'SELECT', 'SELECTABLE', 'SELECT-ALL', 'SELECTED',
- 'SELECT-FOCUSED-ROW', 'SELECTION', 'SELECTION-END', 'SELECTION-LIST',
- 'SELECTION-START', 'SELECTION-TEXT', 'SELECT-NEXT-ROW', 'SELECT-PREV-ROW',
- 'SELECT-ROW', 'SELF', 'SEND', 'send-sql-statement', 'send-sql', 'SENSITIVE',
- 'SEPARATE-CONNECTION', 'SEPARATOR-FGCOLOR', 'SEPARATORS', 'SERVER',
- 'SERVER-CONNECTION-BOUND', 'SERVER-CONNECTION-BOUND-REQUEST',
- 'SERVER-CONNECTION-CONTEXT', 'SERVER-CONNECTION-ID',
- 'SERVER-OPERATING-MODE', 'SESSION', 'SESSION-ID', 'SET', 'SET-APPL-CONTEXT',
- 'SET-ATTR-CALL-TYPE', 'SET-ATTRIBUTE-NODE', 'SET-BLUE-VALUE', 'SET-BLUE',
- 'SET-BLUE-', 'SET-BLUE-V', 'SET-BLUE-VA', 'SET-BLUE-VAL', 'SET-BLUE-VALU',
- 'SET-BREAK', 'SET-BUFFERS', 'SET-CALLBACK', 'SET-CLIENT', 'SET-COMMIT',
- 'SET-CONTENTS', 'SET-CURRENT-VALUE', 'SET-DB-CLIENT', 'SET-DYNAMIC',
- 'SET-EVENT-MANAGER-OPTION', 'SET-GREEN-VALUE', 'SET-GREEN', 'SET-GREEN-',
- 'SET-GREEN-V', 'SET-GREEN-VA', 'SET-GREEN-VAL', 'SET-GREEN-VALU',
- 'SET-INPUT-SOURCE', 'SET-OPTION', 'SET-OUTPUT-DESTINATION', 'SET-PARAMETER',
- 'SET-POINTER-VALUE', 'SET-PROPERTY', 'SET-RED-VALUE', 'SET-RED', 'SET-RED-',
- 'SET-RED-V', 'SET-RED-VA', 'SET-RED-VAL', 'SET-RED-VALU',
- 'SET-REPOSITIONED-ROW', 'SET-RGB-VALUE', 'SET-ROLLBACK', 'SET-SELECTION',
- 'SET-SIZE', 'SET-SORT-ARROW', 'SETUSERID', 'SETUSER', 'SETUSERI',
- 'SET-WAIT-STATE', 'SHA1-DIGEST', 'SHARED', 'SHARE-LOCK', 'SHARE', 'SHARE-',
- 'SHARE-L', 'SHARE-LO', 'SHARE-LOC', 'SHOW-IN-TASKBAR', 'SHOW-STATS',
- 'SHOW-STAT', 'SIDE-LABEL-HANDLE', 'SIDE-LABEL-H', 'SIDE-LABEL-HA',
- 'SIDE-LABEL-HAN', 'SIDE-LABEL-HAND', 'SIDE-LABEL-HANDL', 'SIDE-LABELS',
- 'SIDE-LAB', 'SIDE-LABE', 'SIDE-LABEL', 'SILENT', 'SIMPLE', 'SINGLE', 'SIZE',
- 'SIZE-CHARS', 'SIZE-C', 'SIZE-CH', 'SIZE-CHA', 'SIZE-CHAR', 'SIZE-PIXELS',
- 'SIZE-P', 'SIZE-PI', 'SIZE-PIX', 'SIZE-PIXE', 'SIZE-PIXEL', 'SKIP',
- 'SKIP-DELETED-RECORD', 'SLIDER', 'SMALL-ICON', 'SMALLINT', 'SMALL-TITLE',
- 'SOME', 'SORT', 'SORT-ASCENDING', 'SORT-NUMBER', 'SOURCE',
- 'SOURCE-PROCEDURE', 'SPACE', 'SQL', 'SQRT', 'SSL-SERVER-NAME', 'STANDALONE',
- 'START', 'START-DOCUMENT', 'START-ELEMENT', 'START-MOVE', 'START-RESIZE',
- 'START-ROW-RESIZE', 'STATE-DETAIL', 'STATIC', 'STATUS', 'STATUS-AREA',
- 'STATUS-AREA-FONT', 'STDCALL', 'STOP', 'STOP-PARSING', 'STOPPED', 'STOPPE',
- 'STORED-PROCEDURE', 'STORED-PROC', 'STORED-PROCE', 'STORED-PROCED',
- 'STORED-PROCEDU', 'STORED-PROCEDUR', 'STREAM', 'STREAM-HANDLE', 'STREAM-IO',
- 'STRETCH-TO-FIT', 'STRICT', 'STRING', 'STRING-VALUE', 'STRING-XREF',
- 'SUB-AVERAGE', 'SUB-AVE', 'SUB-AVER', 'SUB-AVERA', 'SUB-AVERAG',
- 'SUB-COUNT', 'SUB-MAXIMUM', 'SUM-MAX', 'SUM-MAXI', 'SUM-MAXIM',
- 'SUM-MAXIMU', 'SUB-MENU', 'SUBSUB-', 'MINIMUM', 'SUB-MIN', 'SUBSCRIBE',
- 'SUBSTITUTE', 'SUBST', 'SUBSTI', 'SUBSTIT', 'SUBSTITU', 'SUBSTITUT',
- 'SUBSTRING', 'SUBSTR', 'SUBSTRI', 'SUBSTRIN', 'SUB-TOTAL', 'SUBTYPE', 'SUM',
- 'SUPER', 'SUPER-PROCEDURES', 'SUPPRESS-NAMESPACE-PROCESSING',
- 'SUPPRESS-WARNINGS', 'SUPPRESS-W', 'SUPPRESS-WA', 'SUPPRESS-WAR',
- 'SUPPRESS-WARN', 'SUPPRESS-WARNI', 'SUPPRESS-WARNIN', 'SUPPRESS-WARNING',
- 'SYMMETRIC-ENCRYPTION-ALGORITHM', 'SYMMETRIC-ENCRYPTION-IV',
- 'SYMMETRIC-ENCRYPTION-KEY', 'SYMMETRIC-SUPPORT', 'SYSTEM-ALERT-BOXES',
- 'SYSTEM-ALERT', 'SYSTEM-ALERT-', 'SYSTEM-ALERT-B', 'SYSTEM-ALERT-BO',
- 'SYSTEM-ALERT-BOX', 'SYSTEM-ALERT-BOXE', 'SYSTEM-DIALOG', 'SYSTEM-HELP',
- 'SYSTEM-ID', 'TABLE', 'TABLE-HANDLE', 'TABLE-NUMBER', 'TAB-POSITION',
- 'TAB-STOP', 'TARGET', 'TARGET-PROCEDURE', 'TEMP-DIRECTORY', 'TEMP-DIR',
- 'TEMP-DIRE', 'TEMP-DIREC', 'TEMP-DIRECT', 'TEMP-DIRECTO', 'TEMP-DIRECTOR',
- 'TEMP-TABLE', 'TEMP-TABLE-PREPARE', 'TERM', 'TERMINAL', 'TERM', 'TERMI',
- 'TERMIN', 'TERMINA', 'TERMINATE', 'TEXT', 'TEXT-CURSOR', 'TEXT-SEG-GROW',
- 'TEXT-SELECTED', 'THEN', 'THIS-OBJECT', 'THIS-PROCEDURE', 'THREE-D',
- 'THROW', 'THROUGH', 'THRU', 'TIC-MARKS', 'TIME', 'TIME-SOURCE', 'TITLE',
- 'TITLE-BGCOLOR', 'TITLE-BGC', 'TITLE-BGCO', 'TITLE-BGCOL', 'TITLE-BGCOLO',
- 'TITLE-DCOLOR', 'TITLE-DC', 'TITLE-DCO', 'TITLE-DCOL', 'TITLE-DCOLO',
- 'TITLE-FGCOLOR', 'TITLE-FGC', 'TITLE-FGCO', 'TITLE-FGCOL', 'TITLE-FGCOLO',
- 'TITLE-FONT', 'TITLE-FO', 'TITLE-FON', 'TO', 'TODAY', 'TOGGLE-BOX',
- 'TOOLTIP', 'TOOLTIPS', 'TOPIC', 'TOP-NAV-QUERY', 'TOP-ONLY', 'TO-ROWID',
- 'TOTAL', 'TRAILING', 'TRANS', 'TRANSACTION', 'TRANSACTION-MODE',
- 'TRANS-INIT-PROCEDURE', 'TRANSPARENT', 'TRIGGER', 'TRIGGERS', 'TRIM',
- 'TRUE', 'TRUNCATE', 'TRUNC', 'TRUNCA', 'TRUNCAT', 'TYPE', 'TYPE-OF',
- 'UNBOX', 'UNBUFFERED', 'UNBUFF', 'UNBUFFE', 'UNBUFFER', 'UNBUFFERE',
- 'UNDERLINE', 'UNDERL', 'UNDERLI', 'UNDERLIN', 'UNDO', 'UNFORMATTED',
- 'UNFORM', 'UNFORMA', 'UNFORMAT', 'UNFORMATT', 'UNFORMATTE', 'UNION',
- 'UNIQUE', 'UNIQUE-ID', 'UNIQUE-MATCH', 'UNIX', 'UNLESS-HIDDEN', 'UNLOAD',
- 'UNSIGNED-LONG', 'UNSUBSCRIBE', 'UP', 'UPDATE', 'UPDATE-ATTRIBUTE', 'URL',
- 'URL-DECODE', 'URL-ENCODE', 'URL-PASSWORD', 'URL-USERID', 'USE',
- 'USE-DICT-EXPS', 'USE-FILENAME', 'USE-INDEX', 'USER', 'USE-REVVIDEO',
- 'USERID', 'USER-ID', 'USE-TEXT', 'USE-UNDERLINE', 'USE-WIDGET-POOL',
- 'USING', 'V6DISPLAY', 'V6FRAME', 'VALIDATE', 'VALIDATE-EXPRESSION',
- 'VALIDATE-MESSAGE', 'VALIDATE-SEAL', 'VALIDATION-ENABLED', 'VALID-EVENT',
- 'VALID-HANDLE', 'VALID-OBJECT', 'VALUE', 'VALUE-CHANGED', 'VALUES',
- 'VARIABLE', 'VAR', 'VARI', 'VARIA', 'VARIAB', 'VARIABL', 'VERBOSE',
- 'VERSION', 'VERTICAL', 'VERT', 'VERTI', 'VERTIC', 'VERTICA', 'VIEW',
- 'VIEW-AS', 'VIEW-FIRST-COLUMN-ON-REOPEN', 'VIRTUAL-HEIGHT-CHARS',
- 'VIRTUAL-HEIGHT', 'VIRTUAL-HEIGHT-', 'VIRTUAL-HEIGHT-C',
- 'VIRTUAL-HEIGHT-CH', 'VIRTUAL-HEIGHT-CHA', 'VIRTUAL-HEIGHT-CHAR',
- 'VIRTUAL-HEIGHT-PIXELS', 'VIRTUAL-HEIGHT-P', 'VIRTUAL-HEIGHT-PI',
- 'VIRTUAL-HEIGHT-PIX', 'VIRTUAL-HEIGHT-PIXE', 'VIRTUAL-HEIGHT-PIXEL',
- 'VIRTUAL-WIDTH-CHARS', 'VIRTUAL-WIDTH', 'VIRTUAL-WIDTH-', 'VIRTUAL-WIDTH-C',
- 'VIRTUAL-WIDTH-CH', 'VIRTUAL-WIDTH-CHA', 'VIRTUAL-WIDTH-CHAR',
- 'VIRTUAL-WIDTH-PIXELS', 'VIRTUAL-WIDTH-P', 'VIRTUAL-WIDTH-PI',
- 'VIRTUAL-WIDTH-PIX', 'VIRTUAL-WIDTH-PIXE', 'VIRTUAL-WIDTH-PIXEL', 'VISIBLE',
- 'VOID', 'WAIT', 'WAIT-FOR', 'WARNING', 'WEB-CONTEXT', 'WEEKDAY', 'WHEN',
- 'WHERE', 'WHILE', 'WIDGET', 'WIDGET-ENTER', 'WIDGET-E', 'WIDGET-EN',
- 'WIDGET-ENT', 'WIDGET-ENTE', 'WIDGET-ID', 'WIDGET-LEAVE', 'WIDGET-L',
- 'WIDGET-LE', 'WIDGET-LEA', 'WIDGET-LEAV', 'WIDGET-POOL', 'WIDTH',
- 'WIDTH-CHARS', 'WIDTH', 'WIDTH-', 'WIDTH-C', 'WIDTH-CH', 'WIDTH-CHA',
- 'WIDTH-CHAR', 'WIDTH-PIXELS', 'WIDTH-P', 'WIDTH-PI', 'WIDTH-PIX',
- 'WIDTH-PIXE', 'WIDTH-PIXEL', 'WINDOW', 'WINDOW-MAXIMIZED', 'WINDOW-MAXIM',
- 'WINDOW-MAXIMI', 'WINDOW-MAXIMIZ', 'WINDOW-MAXIMIZE', 'WINDOW-MINIMIZED',
- 'WINDOW-MINIM', 'WINDOW-MINIMI', 'WINDOW-MINIMIZ', 'WINDOW-MINIMIZE',
- 'WINDOW-NAME', 'WINDOW-NORMAL', 'WINDOW-STATE', 'WINDOW-STA', 'WINDOW-STAT',
- 'WINDOW-SYSTEM', 'WITH', 'WORD-INDEX', 'WORD-WRAP',
- 'WORK-AREA-HEIGHT-PIXELS', 'WORK-AREA-WIDTH-PIXELS', 'WORK-AREA-X',
- 'WORK-AREA-Y', 'WORKFILE', 'WORK-TABLE', 'WORK-TAB', 'WORK-TABL', 'WRITE',
- 'WRITE-CDATA', 'WRITE-CHARACTERS', 'WRITE-COMMENT', 'WRITE-DATA-ELEMENT',
- 'WRITE-EMPTY-ELEMENT', 'WRITE-ENTITY-REF', 'WRITE-EXTERNAL-DTD',
- 'WRITE-FRAGMENT', 'WRITE-MESSAGE', 'WRITE-PROCESSING-INSTRUCTION',
- 'WRITE-STATUS', 'WRITE-XML', 'WRITE-XMLSCHEMA', 'X', 'XCODE',
- 'XML-DATA-TYPE', 'XML-NODE-TYPE', 'XML-SCHEMA-PATH',
- 'XML-SUPPRESS-NAMESPACE-PROCESSING', 'X-OF', 'XREF', 'XREF-XML', 'Y',
- 'YEAR', 'YEAR-OFFSET', 'YES', 'YES-NO', 'YES-NO-CANCEL', 'Y-OF'
-]
diff --git a/pygments/lexers/_phpbuiltins.py b/pygments/lexers/_php_builtins.py
index 354e995c..343b4416 100644
--- a/pygments/lexers/_phpbuiltins.py
+++ b/pygments/lexers/_php_builtins.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
"""
- pygments.lexers._phpbuiltins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ pygments.lexers._php_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This file loads the function names and their modules from the
php webpage and generates itself.
@@ -18,8 +18,8 @@
from __future__ import print_function
-MODULES = {'.NET': ['dotnet_load'],
- 'APC': ['apc_add',
+MODULES = {'.NET': ('dotnet_load',),
+ 'APC': ('apc_add',
'apc_bin_dump',
'apc_bin_dumpfile',
'apc_bin_load',
@@ -37,8 +37,8 @@ MODULES = {'.NET': ['dotnet_load'],
'apc_inc',
'apc_load_constants',
'apc_sma_info',
- 'apc_store'],
- 'APD': ['apd_breakpoint',
+ 'apc_store'),
+ 'APD': ('apd_breakpoint',
'apd_callstack',
'apd_clunk',
'apd_continue',
@@ -53,8 +53,8 @@ MODULES = {'.NET': ['dotnet_load'],
'apd_set_session_trace',
'apd_set_session',
'override_function',
- 'rename_function'],
- 'Aliases and deprecated Mysqli': ['mysqli_bind_param',
+ 'rename_function'),
+ 'Aliases and deprecated Mysqli': ('mysqli_bind_param',
'mysqli_bind_result',
'mysqli_client_encoding',
'mysqli_connect',
@@ -72,8 +72,8 @@ MODULES = {'.NET': ['dotnet_load'],
'mysqli_rpl_parse_enabled',
'mysqli_rpl_probe',
'mysqli_send_long_data',
- 'mysqli_slave_query'],
- 'Apache': ['apache_child_terminate',
+ 'mysqli_slave_query'),
+ 'Apache': ('apache_child_terminate',
'apache_get_modules',
'apache_get_version',
'apache_getenv',
@@ -84,8 +84,8 @@ MODULES = {'.NET': ['dotnet_load'],
'apache_response_headers',
'apache_setenv',
'getallheaders',
- 'virtual'],
- 'Array': ['array_change_key_case',
+ 'virtual'),
+ 'Array': ('array_change_key_case',
'array_chunk',
'array_column',
'array_combine',
@@ -163,15 +163,15 @@ MODULES = {'.NET': ['dotnet_load'],
'sort',
'uasort',
'uksort',
- 'usort'],
- 'BBCode': ['bbcode_add_element',
+ 'usort'),
+ 'BBCode': ('bbcode_add_element',
'bbcode_add_smiley',
'bbcode_create',
'bbcode_destroy',
'bbcode_parse',
'bbcode_set_arg_parser',
- 'bbcode_set_flags'],
- 'BC Math': ['bcadd',
+ 'bbcode_set_flags'),
+ 'BC Math': ('bcadd',
'bccomp',
'bcdiv',
'bcmod',
@@ -180,9 +180,9 @@ MODULES = {'.NET': ['dotnet_load'],
'bcpowmod',
'bcscale',
'bcsqrt',
- 'bcsub'],
- 'Blenc': ['blenc_encrypt'],
- 'Bzip2': ['bzclose',
+ 'bcsub'),
+ 'Blenc': ('blenc_encrypt',),
+ 'Bzip2': ('bzclose',
'bzcompress',
'bzdecompress',
'bzerrno',
@@ -191,8 +191,8 @@ MODULES = {'.NET': ['dotnet_load'],
'bzflush',
'bzopen',
'bzread',
- 'bzwrite'],
- 'COM': ['com_addref',
+ 'bzwrite'),
+ 'COM': ('com_addref',
'com_create_guid',
'com_event_sink',
'com_get_active_object',
@@ -233,8 +233,8 @@ MODULES = {'.NET': ['dotnet_load'],
'variant_set_type',
'variant_set',
'variant_sub',
- 'variant_xor'],
- 'CUBRID': ['cubrid_bind',
+ 'variant_xor'),
+ 'CUBRID': ('cubrid_bind',
'cubrid_close_prepare',
'cubrid_close_request',
'cubrid_col_get',
@@ -301,8 +301,8 @@ MODULES = {'.NET': ['dotnet_load'],
'cubrid_set_db_parameter',
'cubrid_set_drop',
'cubrid_set_query_timeout',
- 'cubrid_version'],
- 'Cairo': ['cairo_create',
+ 'cubrid_version'),
+ 'Cairo': ('cairo_create',
'cairo_font_face_get_type',
'cairo_font_face_status',
'cairo_font_options_create',
@@ -396,8 +396,8 @@ MODULES = {'.NET': ['dotnet_load'],
'cairo_surface_write_to_png',
'cairo_svg_surface_create',
'cairo_svg_surface_restrict_to_version',
- 'cairo_svg_version_to_string'],
- 'Calendar': ['cal_days_in_month',
+ 'cairo_svg_version_to_string'),
+ 'Calendar': ('cal_days_in_month',
'cal_from_jd',
'cal_info',
'cal_to_jd',
@@ -414,8 +414,8 @@ MODULES = {'.NET': ['dotnet_load'],
'jdtounix',
'JewishToJD',
'JulianToJD',
- 'unixtojd'],
- 'Classes/Object': ['__autoload',
+ 'unixtojd'),
+ 'Classes/Object': ('__autoload',
'call_user_method_array',
'call_user_method',
'class_alias',
@@ -434,18 +434,18 @@ MODULES = {'.NET': ['dotnet_load'],
'is_subclass_of',
'method_exists',
'property_exists',
- 'trait_exists'],
- 'Classkit': ['classkit_import',
+ 'trait_exists'),
+ 'Classkit': ('classkit_import',
'classkit_method_add',
'classkit_method_copy',
'classkit_method_redefine',
'classkit_method_remove',
- 'classkit_method_rename'],
- 'Crack': ['crack_check',
+ 'classkit_method_rename'),
+ 'Crack': ('crack_check',
'crack_closedict',
'crack_getlastmessage',
- 'crack_opendict'],
- 'Ctype': ['ctype_alnum',
+ 'crack_opendict'),
+ 'Ctype': ('ctype_alnum',
'ctype_alpha',
'ctype_cntrl',
'ctype_digit',
@@ -455,14 +455,14 @@ MODULES = {'.NET': ['dotnet_load'],
'ctype_punct',
'ctype_space',
'ctype_upper',
- 'ctype_xdigit'],
- 'Cyrus': ['cyrus_authenticate',
+ 'ctype_xdigit'),
+ 'Cyrus': ('cyrus_authenticate',
'cyrus_bind',
'cyrus_close',
'cyrus_connect',
'cyrus_query',
- 'cyrus_unbind'],
- 'DB++': ['dbplus_add',
+ 'cyrus_unbind'),
+ 'DB++': ('dbplus_add',
'dbplus_aql',
'dbplus_chdir',
'dbplus_close',
@@ -508,8 +508,8 @@ MODULES = {'.NET': ['dotnet_load'],
'dbplus_unselect',
'dbplus_update',
'dbplus_xlockrel',
- 'dbplus_xunlockrel'],
- 'DBA': ['dba_close',
+ 'dbplus_xunlockrel'),
+ 'DBA': ('dba_close',
'dba_delete',
'dba_exists',
'dba_fetch',
@@ -523,9 +523,9 @@ MODULES = {'.NET': ['dotnet_load'],
'dba_optimize',
'dba_popen',
'dba_replace',
- 'dba_sync'],
- 'DOM': ['dom_import_simplexml'],
- 'Date/Time': ['checkdate',
+ 'dba_sync'),
+ 'DOM': ('dom_import_simplexml',),
+ 'Date/Time': ('checkdate',
'date_add',
'date_create_from_format',
'date_create_immutable_from_format',
@@ -575,8 +575,8 @@ MODULES = {'.NET': ['dotnet_load'],
'timezone_offset_get',
'timezone_open',
'timezone_transitions_get',
- 'timezone_version_get'],
- 'Direct IO': ['dio_close',
+ 'timezone_version_get'),
+ 'Direct IO': ('dio_close',
'dio_fcntl',
'dio_open',
'dio_read',
@@ -584,8 +584,8 @@ MODULES = {'.NET': ['dotnet_load'],
'dio_stat',
'dio_tcsetattr',
'dio_truncate',
- 'dio_write'],
- 'Directory': ['chdir',
+ 'dio_write'),
+ 'Directory': ('chdir',
'chroot',
'closedir',
'dir',
@@ -593,8 +593,8 @@ MODULES = {'.NET': ['dotnet_load'],
'opendir',
'readdir',
'rewinddir',
- 'scandir'],
- 'Eio': ['eio_busy',
+ 'scandir'),
+ 'Eio': ('eio_busy',
'eio_cancel',
'eio_chmod',
'eio_chown',
@@ -652,8 +652,8 @@ MODULES = {'.NET': ['dotnet_load'],
'eio_truncate',
'eio_unlink',
'eio_utime',
- 'eio_write'],
- 'Enchant': ['enchant_broker_describe',
+ 'eio_write'),
+ 'Enchant': ('enchant_broker_describe',
'enchant_broker_dict_exists',
'enchant_broker_free_dict',
'enchant_broker_free',
@@ -671,8 +671,8 @@ MODULES = {'.NET': ['dotnet_load'],
'enchant_dict_is_in_session',
'enchant_dict_quick_check',
'enchant_dict_store_replacement',
- 'enchant_dict_suggest'],
- 'Error Handling': ['debug_backtrace',
+ 'enchant_dict_suggest'),
+ 'Error Handling': ('debug_backtrace',
'debug_print_backtrace',
'error_get_last',
'error_log',
@@ -682,14 +682,14 @@ MODULES = {'.NET': ['dotnet_load'],
'set_error_handler',
'set_exception_handler',
'trigger_error',
- 'user_error'],
- 'Exif': ['exif_imagetype',
+ 'user_error'),
+ 'Exif': ('exif_imagetype',
'exif_read_data',
'exif_tagname',
'exif_thumbnail',
- 'read_exif_data'],
- 'Expect': ['expect_expectl', 'expect_popen'],
- 'FAM': ['fam_cancel_monitor',
+ 'read_exif_data'),
+ 'Expect': ('expect_expectl', 'expect_popen'),
+ 'FAM': ('fam_cancel_monitor',
'fam_close',
'fam_monitor_collection',
'fam_monitor_directory',
@@ -698,8 +698,8 @@ MODULES = {'.NET': ['dotnet_load'],
'fam_open',
'fam_pending',
'fam_resume_monitor',
- 'fam_suspend_monitor'],
- 'FDF': ['fdf_add_doc_javascript',
+ 'fam_suspend_monitor'),
+ 'FDF': ('fdf_add_doc_javascript',
'fdf_add_template',
'fdf_close',
'fdf_create',
@@ -733,9 +733,9 @@ MODULES = {'.NET': ['dotnet_load'],
'fdf_set_submit_form_action',
'fdf_set_target_frame',
'fdf_set_value',
- 'fdf_set_version'],
- 'FPM': ['fastcgi_finish_request'],
- 'FTP': ['ftp_alloc',
+ 'fdf_set_version'),
+ 'FPM': ('fastcgi_finish_request',),
+ 'FTP': ('ftp_alloc',
'ftp_cdup',
'ftp_chdir',
'ftp_chmod',
@@ -768,8 +768,8 @@ MODULES = {'.NET': ['dotnet_load'],
'ftp_site',
'ftp_size',
'ftp_ssl_connect',
- 'ftp_systype'],
- 'Fann': ['fann_cascadetrain_on_data',
+ 'ftp_systype'),
+ 'Fann': ('fann_cascadetrain_on_data',
'fann_cascadetrain_on_file',
'fann_clear_scaling_params',
'fann_copy',
@@ -909,14 +909,14 @@ MODULES = {'.NET': ['dotnet_load'],
'fann_train_epoch',
'fann_train_on_data',
'fann_train_on_file',
- 'fann_train'],
- 'Fileinfo': ['finfo_buffer',
+ 'fann_train'),
+ 'Fileinfo': ('finfo_buffer',
'finfo_close',
'finfo_file',
'finfo_open',
'finfo_set_flags',
- 'mime_content_type'],
- 'Filesystem': ['basename',
+ 'mime_content_type'),
+ 'Filesystem': ('basename',
'chgrp',
'chmod',
'chown',
@@ -995,15 +995,15 @@ MODULES = {'.NET': ['dotnet_load'],
'tmpfile',
'touch',
'umask',
- 'unlink'],
- 'Filter': ['filter_has_var',
+ 'unlink'),
+ 'Filter': ('filter_has_var',
'filter_id',
'filter_input_array',
'filter_input',
'filter_list',
'filter_var_array',
- 'filter_var'],
- 'Firebird/InterBase': ['ibase_add_user',
+ 'filter_var'),
+ 'Firebird/InterBase': ('ibase_add_user',
'ibase_affected_rows',
'ibase_backup',
'ibase_blob_add',
@@ -1050,9 +1050,9 @@ MODULES = {'.NET': ['dotnet_load'],
'ibase_service_detach',
'ibase_set_event_handler',
'ibase_trans',
- 'ibase_wait_event'],
- 'FriBiDi': ['fribidi_log2vis'],
- 'FrontBase': ['fbsql_affected_rows',
+ 'ibase_wait_event'),
+ 'FriBiDi': ('fribidi_log2vis',),
+ 'FrontBase': ('fbsql_affected_rows',
'fbsql_autocommit',
'fbsql_blob_size',
'fbsql_change_user',
@@ -1111,8 +1111,8 @@ MODULES = {'.NET': ['dotnet_load'],
'fbsql_table_name',
'fbsql_tablename',
'fbsql_username',
- 'fbsql_warnings'],
- 'Function handling': ['call_user_func_array',
+ 'fbsql_warnings'),
+ 'Function handling': ('call_user_func_array',
'call_user_func',
'create_function',
'forward_static_call_array',
@@ -1124,8 +1124,8 @@ MODULES = {'.NET': ['dotnet_load'],
'get_defined_functions',
'register_shutdown_function',
'register_tick_function',
- 'unregister_tick_function'],
- 'GD and Image': ['gd_info',
+ 'unregister_tick_function'),
+ 'GD and Image': ('gd_info',
'getimagesize',
'getimagesizefromstring',
'image_type_to_extension',
@@ -1238,8 +1238,8 @@ MODULES = {'.NET': ['dotnet_load'],
'iptcembed',
'iptcparse',
'jpeg2wbmp',
- 'png2wbmp'],
- 'GMP': ['gmp_abs',
+ 'png2wbmp'),
+ 'GMP': ('gmp_abs',
'gmp_add',
'gmp_and',
'gmp_clrbit',
@@ -1279,8 +1279,8 @@ MODULES = {'.NET': ['dotnet_load'],
'gmp_strval',
'gmp_sub',
'gmp_testbit',
- 'gmp_xor'],
- 'GeoIP': ['geoip_asnum_by_name',
+ 'gmp_xor'),
+ 'GeoIP': ('geoip_asnum_by_name',
'geoip_continent_code_by_name',
'geoip_country_code_by_name',
'geoip_country_code3_by_name',
@@ -1298,8 +1298,8 @@ MODULES = {'.NET': ['dotnet_load'],
'geoip_region_by_name',
'geoip_region_name_by_code',
'geoip_setup_custom_directory',
- 'geoip_time_zone_by_country_and_region'],
- 'Gettext': ['bind_textdomain_codeset',
+ 'geoip_time_zone_by_country_and_region'),
+ 'Gettext': ('bind_textdomain_codeset',
'bindtextdomain',
'dcgettext',
'dcngettext',
@@ -1307,8 +1307,8 @@ MODULES = {'.NET': ['dotnet_load'],
'dngettext',
'gettext',
'ngettext',
- 'textdomain'],
- 'GnuPG': ['gnupg_adddecryptkey',
+ 'textdomain'),
+ 'GnuPG': ('gnupg_adddecryptkey',
'gnupg_addencryptkey',
'gnupg_addsignkey',
'gnupg_cleardecryptkeys',
@@ -1328,9 +1328,9 @@ MODULES = {'.NET': ['dotnet_load'],
'gnupg_seterrormode',
'gnupg_setsignmode',
'gnupg_sign',
- 'gnupg_verify'],
- 'Gopher': ['gopher_parsedir'],
- 'Grapheme': ['grapheme_extract',
+ 'gnupg_verify'),
+ 'Gopher': ('gopher_parsedir',),
+ 'Grapheme': ('grapheme_extract',
'grapheme_stripos',
'grapheme_stristr',
'grapheme_strlen',
@@ -1338,8 +1338,8 @@ MODULES = {'.NET': ['dotnet_load'],
'grapheme_strripos',
'grapheme_strrpos',
'grapheme_strstr',
- 'grapheme_substr'],
- 'Gupnp': ['gupnp_context_get_host_ip',
+ 'grapheme_substr'),
+ 'Gupnp': ('gupnp_context_get_host_ip',
'gupnp_context_get_port',
'gupnp_context_get_subscription_timeout',
'gupnp_context_host_path',
@@ -1376,8 +1376,8 @@ MODULES = {'.NET': ['dotnet_load'],
'gupnp_service_proxy_get_subscribed',
'gupnp_service_proxy_remove_notify',
'gupnp_service_proxy_set_subscribed',
- 'gupnp_service_thaw_notify'],
- 'HTTP': ['http_cache_etag',
+ 'gupnp_service_thaw_notify'),
+ 'HTTP': ('http_cache_etag',
'http_cache_last_modified',
'http_chunked_decode',
'http_deflate',
@@ -1427,8 +1427,8 @@ MODULES = {'.NET': ['dotnet_load'],
'http_send_stream',
'http_throttle',
'http_build_str',
- 'http_build_url'],
- 'Hash': ['hash_algos',
+ 'http_build_url'),
+ 'Hash': ('hash_algos',
'hash_copy',
'hash_file',
'hash_final',
@@ -1439,8 +1439,8 @@ MODULES = {'.NET': ['dotnet_load'],
'hash_update_file',
'hash_update_stream',
'hash_update',
- 'hash'],
- 'Hyperwave': ['hw_Array2Objrec',
+ 'hash'),
+ 'Hyperwave': ('hw_Array2Objrec',
'hw_changeobject',
'hw_Children',
'hw_ChildrenObj',
@@ -1501,12 +1501,12 @@ MODULES = {'.NET': ['dotnet_load'],
'hw_setlinkroot',
'hw_stat',
'hw_Unlock',
- 'hw_Who'],
- 'Hyperwave API': ['hwapi_attribute_new',
+ 'hw_Who'),
+ 'Hyperwave API': ('hwapi_attribute_new',
'hwapi_content_new',
'hwapi_hgcsp',
- 'hwapi_object_new'],
- 'IBM DB2': ['db2_autocommit',
+ 'hwapi_object_new'),
+ 'IBM DB2': ('db2_autocommit',
'db2_bind_param',
'db2_client_info',
'db2_close',
@@ -1556,8 +1556,8 @@ MODULES = {'.NET': ['dotnet_load'],
'db2_stmt_error',
'db2_stmt_errormsg',
'db2_table_privileges',
- 'db2_tables'],
- 'ID3': ['id3_get_frame_long_name',
+ 'db2_tables'),
+ 'ID3': ('id3_get_frame_long_name',
'id3_get_frame_short_name',
'id3_get_genre_id',
'id3_get_genre_list',
@@ -1565,9 +1565,9 @@ MODULES = {'.NET': ['dotnet_load'],
'id3_get_tag',
'id3_get_version',
'id3_remove_tag',
- 'id3_set_tag'],
- 'IDN': ['grapheme_substr', 'idn_to_ascii', 'idn_to_unicode', 'idn_to_utf8'],
- 'IIS': ['iis_add_server',
+ 'id3_set_tag'),
+ 'IDN': ('grapheme_substr', 'idn_to_ascii', 'idn_to_unicode', 'idn_to_utf8'),
+ 'IIS': ('iis_add_server',
'iis_get_dir_security',
'iis_get_script_map',
'iis_get_server_by_comment',
@@ -1582,8 +1582,8 @@ MODULES = {'.NET': ['dotnet_load'],
'iis_start_server',
'iis_start_service',
'iis_stop_server',
- 'iis_stop_service'],
- 'IMAP': ['imap_8bit',
+ 'iis_stop_service'),
+ 'IMAP': ('imap_8bit',
'imap_alerts',
'imap_append',
'imap_base64',
@@ -1655,8 +1655,8 @@ MODULES = {'.NET': ['dotnet_load'],
'imap_unsubscribe',
'imap_utf7_decode',
'imap_utf7_encode',
- 'imap_utf8'],
- 'Informix': ['ifx_affected_rows',
+ 'imap_utf8'),
+ 'Informix': ('ifx_affected_rows',
'ifx_blobinfile_mode',
'ifx_byteasvarchar',
'ifx_close',
@@ -1693,8 +1693,8 @@ MODULES = {'.NET': ['dotnet_load'],
'ifxus_read_slob',
'ifxus_seek_slob',
'ifxus_tell_slob',
- 'ifxus_write_slob'],
- 'Ingres': ['ingres_autocommit_state',
+ 'ifxus_write_slob'),
+ 'Ingres': ('ingres_autocommit_state',
'ingres_autocommit',
'ingres_charset',
'ingres_close',
@@ -1727,19 +1727,19 @@ MODULES = {'.NET': ['dotnet_load'],
'ingres_result_seek',
'ingres_rollback',
'ingres_set_environment',
- 'ingres_unbuffered_query'],
- 'Inotify': ['inotify_add_watch',
+ 'ingres_unbuffered_query'),
+ 'Inotify': ('inotify_add_watch',
'inotify_init',
'inotify_queue_len',
'inotify_read',
- 'inotify_rm_watch'],
- 'JSON': ['json_decode',
+ 'inotify_rm_watch'),
+ 'JSON': ('json_decode',
'json_encode',
'json_last_error_msg',
- 'json_last_error'],
- 'Java': ['java_last_exception_clear', 'java_last_exception_get'],
- 'Judy': ['judy_type', 'judy_version'],
- 'KADM5': ['kadm5_chpass_principal',
+ 'json_last_error'),
+ 'Java': ('java_last_exception_clear', 'java_last_exception_get'),
+ 'Judy': ('judy_type', 'judy_version'),
+ 'KADM5': ('kadm5_chpass_principal',
'kadm5_create_principal',
'kadm5_delete_principal',
'kadm5_destroy',
@@ -1748,8 +1748,8 @@ MODULES = {'.NET': ['dotnet_load'],
'kadm5_get_principal',
'kadm5_get_principals',
'kadm5_init_with_password',
- 'kadm5_modify_principal'],
- 'LDAP': ['ldap_8859_to_t61',
+ 'kadm5_modify_principal'),
+ 'LDAP': ('ldap_8859_to_t61',
'ldap_add',
'ldap_bind',
'ldap_close',
@@ -1793,9 +1793,9 @@ MODULES = {'.NET': ['dotnet_load'],
'ldap_sort',
'ldap_start_tls',
'ldap_t61_to_8859',
- 'ldap_unbind'],
- 'LZF': ['lzf_compress', 'lzf_decompress', 'lzf_optimized_for'],
- 'Libevent': ['event_add',
+ 'ldap_unbind'),
+ 'LZF': ('lzf_compress', 'lzf_decompress', 'lzf_optimized_for'),
+ 'Libevent': ('event_add',
'event_base_free',
'event_base_loop',
'event_base_loopbreak',
@@ -1818,8 +1818,8 @@ MODULES = {'.NET': ['dotnet_load'],
'event_del',
'event_free',
'event_new',
- 'event_set'],
- 'Lotus Notes': ['notes_body',
+ 'event_set'),
+ 'Lotus Notes': ('notes_body',
'notes_copy_db',
'notes_create_db',
'notes_create_note',
@@ -1832,8 +1832,8 @@ MODULES = {'.NET': ['dotnet_load'],
'notes_nav_create',
'notes_search',
'notes_unread',
- 'notes_version'],
- 'MCVE': ['m_checkstatus',
+ 'notes_version'),
+ 'MCVE': ('m_checkstatus',
'm_completeauthorizations',
'm_connect',
'm_connectionerror',
@@ -1871,9 +1871,9 @@ MODULES = {'.NET': ['dotnet_load'],
'm_uwait',
'm_validateidentifier',
'm_verifyconnection',
- 'm_verifysslcert'],
- 'Mail': ['ezmlm_hash', 'mail'],
- 'Mailparse': ['mailparse_determine_best_xfer_encoding',
+ 'm_verifysslcert'),
+ 'Mail': ('ezmlm_hash', 'mail'),
+ 'Mailparse': ('mailparse_determine_best_xfer_encoding',
'mailparse_msg_create',
'mailparse_msg_extract_part_file',
'mailparse_msg_extract_part',
@@ -1886,8 +1886,8 @@ MODULES = {'.NET': ['dotnet_load'],
'mailparse_msg_parse',
'mailparse_rfc822_parse_addresses',
'mailparse_stream_encode',
- 'mailparse_uudecode_all'],
- 'Math': ['abs',
+ 'mailparse_uudecode_all'),
+ 'Math': ('abs',
'acos',
'acosh',
'asin',
@@ -1934,8 +1934,8 @@ MODULES = {'.NET': ['dotnet_load'],
'sqrt',
'srand',
'tan',
- 'tanh'],
- 'MaxDB': ['maxdb_affected_rows',
+ 'tanh'),
+ 'MaxDB': ('maxdb_affected_rows',
'maxdb_autocommit',
'maxdb_bind_param',
'maxdb_bind_result',
@@ -2036,8 +2036,8 @@ MODULES = {'.NET': ['dotnet_load'],
'maxdb_thread_id',
'maxdb_thread_safe',
'maxdb_use_result',
- 'maxdb_warning_count'],
- 'Mcrypt': ['mcrypt_cbc',
+ 'maxdb_warning_count'),
+ 'Mcrypt': ('mcrypt_cbc',
'mcrypt_cfb',
'mcrypt_create_iv',
'mcrypt_decrypt',
@@ -2073,20 +2073,20 @@ MODULES = {'.NET': ['dotnet_load'],
'mcrypt_module_open',
'mcrypt_module_self_test',
'mcrypt_ofb',
- 'mdecrypt_generic'],
- 'Memcache': ['memcache_debug'],
- 'Mhash': ['mhash_count',
+ 'mdecrypt_generic'),
+ 'Memcache': ('memcache_debug',),
+ 'Mhash': ('mhash_count',
'mhash_get_block_size',
'mhash_get_hash_name',
'mhash_keygen_s2k',
- 'mhash'],
- 'Ming': ['ming_keypress',
+ 'mhash'),
+ 'Ming': ('ming_keypress',
'ming_setcubicthreshold',
'ming_setscale',
'ming_setswfcompression',
'ming_useconstants',
- 'ming_useswfversion'],
- 'Misc.': ['connection_aborted',
+ 'ming_useswfversion'),
+ 'Misc.': ('connection_aborted',
'connection_status',
'connection_timeout',
'constant',
@@ -2110,9 +2110,9 @@ MODULES = {'.NET': ['dotnet_load'],
'time_sleep_until',
'uniqid',
'unpack',
- 'usleep'],
- 'Mongo': ['bson_decode', 'bson_encode'],
- 'Msession': ['msession_connect',
+ 'usleep'),
+ 'Mongo': ('bson_decode', 'bson_encode'),
+ 'Msession': ('msession_connect',
'msession_count',
'msession_create',
'msession_destroy',
@@ -2132,8 +2132,8 @@ MODULES = {'.NET': ['dotnet_load'],
'msession_set',
'msession_timeout',
'msession_uniq',
- 'msession_unlock'],
- 'Mssql': ['mssql_bind',
+ 'msession_unlock'),
+ 'Mssql': ('mssql_bind',
'mssql_close',
'mssql_connect',
'mssql_data_seek',
@@ -2162,8 +2162,8 @@ MODULES = {'.NET': ['dotnet_load'],
'mssql_query',
'mssql_result',
'mssql_rows_affected',
- 'mssql_select_db'],
- 'Multibyte String': ['mb_check_encoding',
+ 'mssql_select_db'),
+ 'Multibyte String': ('mb_check_encoding',
'mb_convert_case',
'mb_convert_encoding',
'mb_convert_kana',
@@ -2217,8 +2217,8 @@ MODULES = {'.NET': ['dotnet_load'],
'mb_strwidth',
'mb_substitute_character',
'mb_substr_count',
- 'mb_substr'],
- 'MySQL': ['mysql_affected_rows',
+ 'mb_substr'),
+ 'MySQL': ('mysql_affected_rows',
'mysql_client_encoding',
'mysql_close',
'mysql_connect',
@@ -2265,9 +2265,9 @@ MODULES = {'.NET': ['dotnet_load'],
'mysql_stat',
'mysql_tablename',
'mysql_thread_id',
- 'mysql_unbuffered_query'],
- 'Mysqlnd_memcache': ['mysqlnd_memcache_get_config', 'mysqlnd_memcache_set'],
- 'Mysqlnd_ms': ['mysqlnd_ms_dump_servers',
+ 'mysql_unbuffered_query'),
+ 'Mysqlnd_memcache': ('mysqlnd_memcache_get_config', 'mysqlnd_memcache_set'),
+ 'Mysqlnd_ms': ('mysqlnd_ms_dump_servers',
'mysqlnd_ms_fabric_select_global',
'mysqlnd_ms_fabric_select_shard',
'mysqlnd_ms_get_last_gtid',
@@ -2276,12 +2276,12 @@ MODULES = {'.NET': ['dotnet_load'],
'mysqlnd_ms_match_wild',
'mysqlnd_ms_query_is_select',
'mysqlnd_ms_set_qos',
- 'mysqlnd_ms_set_user_pick_server'],
- 'Mysqlnd_uh': ['mysqlnd_uh_convert_to_mysqlnd',
+ 'mysqlnd_ms_set_user_pick_server'),
+ 'Mysqlnd_uh': ('mysqlnd_uh_convert_to_mysqlnd',
'mysqlnd_uh_set_connection_proxy',
- 'mysqlnd_uh_set_statement_proxy'],
- 'NSAPI': ['nsapi_request_headers', 'nsapi_response_headers', 'nsapi_virtual'],
- 'Ncurses': ['ncurses_addch',
+ 'mysqlnd_uh_set_statement_proxy'),
+ 'NSAPI': ('nsapi_request_headers', 'nsapi_response_headers', 'nsapi_virtual'),
+ 'Ncurses': ('ncurses_addch',
'ncurses_addchnstr',
'ncurses_addchstr',
'ncurses_addnstr',
@@ -2440,8 +2440,8 @@ MODULES = {'.NET': ['dotnet_load'],
'ncurses_wrefresh',
'ncurses_wstandend',
'ncurses_wstandout',
- 'ncurses_wvline'],
- 'Network': ['checkdnsrr',
+ 'ncurses_wvline'),
+ 'Network': ('checkdnsrr',
'closelog',
'define_syslog_variables',
'dns_check_record',
@@ -2474,8 +2474,8 @@ MODULES = {'.NET': ['dotnet_load'],
'socket_get_status',
'socket_set_blocking',
'socket_set_timeout',
- 'syslog'],
- 'Newt': ['newt_bell',
+ 'syslog'),
+ 'Newt': ('newt_bell',
'newt_button_bar',
'newt_button',
'newt_centered_window',
@@ -2590,9 +2590,9 @@ MODULES = {'.NET': ['dotnet_load'],
'newt_win_menu',
'newt_win_message',
'newt_win_messagev',
- 'newt_win_ternary'],
- 'OAuth': ['oauth_get_sbs', 'oauth_urlencode'],
- 'OCI8': ['oci_bind_array_by_name',
+ 'newt_win_ternary'),
+ 'OAuth': ('oauth_get_sbs', 'oauth_urlencode'),
+ 'OCI8': ('oci_bind_array_by_name',
'oci_bind_by_name',
'oci_cancel',
'oci_client_version',
@@ -2639,8 +2639,8 @@ MODULES = {'.NET': ['dotnet_load'],
'oci_set_edition',
'oci_set_module_name',
'oci_set_prefetch',
- 'oci_statement_type'],
- 'ODBC': ['odbc_autocommit',
+ 'oci_statement_type'),
+ 'ODBC': ('odbc_autocommit',
'odbc_binmode',
'odbc_close_all',
'odbc_close',
@@ -2684,13 +2684,13 @@ MODULES = {'.NET': ['dotnet_load'],
'odbc_specialcolumns',
'odbc_statistics',
'odbc_tableprivileges',
- 'odbc_tables'],
- 'OPcache': ['opcache_compile_file',
+ 'odbc_tables'),
+ 'OPcache': ('opcache_compile_file',
'opcache_get_configuration',
'opcache_get_status',
'opcache_invalidate',
- 'opcache_reset'],
- 'Object Aggregation': ['aggregate_info',
+ 'opcache_reset'),
+ 'Object Aggregation': ('aggregate_info',
'aggregate_methods_by_list',
'aggregate_methods_by_regexp',
'aggregate_methods',
@@ -2699,8 +2699,8 @@ MODULES = {'.NET': ['dotnet_load'],
'aggregate_properties',
'aggregate',
'aggregation_info',
- 'deaggregate'],
- 'OpenAL': ['openal_buffer_create',
+ 'deaggregate'),
+ 'OpenAL': ('openal_buffer_create',
'openal_buffer_data',
'openal_buffer_destroy',
'openal_buffer_get',
@@ -2722,8 +2722,8 @@ MODULES = {'.NET': ['dotnet_load'],
'openal_source_rewind',
'openal_source_set',
'openal_source_stop',
- 'openal_stream'],
- 'OpenSSL': ['openssl_cipher_iv_length',
+ 'openal_stream'),
+ 'OpenSSL': ('openssl_cipher_iv_length',
'openssl_csr_export_to_file',
'openssl_csr_export',
'openssl_csr_get_public_key',
@@ -2774,8 +2774,8 @@ MODULES = {'.NET': ['dotnet_load'],
'openssl_x509_export',
'openssl_x509_free',
'openssl_x509_parse',
- 'openssl_x509_read'],
- 'Output Control': ['flush',
+ 'openssl_x509_read'),
+ 'Output Control': ('flush',
'ob_clean',
'ob_end_clean',
'ob_end_flush',
@@ -2791,8 +2791,8 @@ MODULES = {'.NET': ['dotnet_load'],
'ob_list_handlers',
'ob_start',
'output_add_rewrite_var',
- 'output_reset_rewrite_vars'],
- 'Ovrimos SQL': ['ovrimos_close',
+ 'output_reset_rewrite_vars'),
+ 'Ovrimos SQL': ('ovrimos_close',
'ovrimos_commit',
'ovrimos_connect',
'ovrimos_cursor',
@@ -2811,8 +2811,8 @@ MODULES = {'.NET': ['dotnet_load'],
'ovrimos_prepare',
'ovrimos_result_all',
'ovrimos_result',
- 'ovrimos_rollback'],
- 'PCNTL': ['pcntl_alarm',
+ 'ovrimos_rollback'),
+ 'PCNTL': ('pcntl_alarm',
'pcntl_errno',
'pcntl_exec',
'pcntl_fork',
@@ -2832,8 +2832,8 @@ MODULES = {'.NET': ['dotnet_load'],
'pcntl_wifsignaled',
'pcntl_wifstopped',
'pcntl_wstopsig',
- 'pcntl_wtermsig'],
- 'PCRE': ['preg_filter',
+ 'pcntl_wtermsig'),
+ 'PCRE': ('preg_filter',
'preg_grep',
'preg_last_error',
'preg_match_all',
@@ -2841,8 +2841,8 @@ MODULES = {'.NET': ['dotnet_load'],
'preg_quote',
'preg_replace_callback',
'preg_replace',
- 'preg_split'],
- 'PDF': ['PDF_activate_item',
+ 'preg_split'),
+ 'PDF': ('PDF_activate_item',
'PDF_add_annotation',
'PDF_add_bookmark',
'PDF_add_launchlink',
@@ -3020,8 +3020,8 @@ MODULES = {'.NET': ['dotnet_load'],
'PDF_translate',
'PDF_utf16_to_utf8',
'PDF_utf32_to_utf16',
- 'PDF_utf8_to_utf16'],
- 'PHP Options/Info': ['assert_options',
+ 'PDF_utf8_to_utf16'),
+ 'PHP Options/Info': ('assert_options',
'assert',
'cli_get_process_title',
'cli_set_process_title',
@@ -3074,8 +3074,8 @@ MODULES = {'.NET': ['dotnet_load'],
'version_compare',
'zend_logo_guid',
'zend_thread_id',
- 'zend_version'],
- 'POSIX': ['posix_access',
+ 'zend_version'),
+ 'POSIX': ('posix_access',
'posix_ctermid',
'posix_errno',
'posix_get_last_error',
@@ -3110,15 +3110,15 @@ MODULES = {'.NET': ['dotnet_load'],
'posix_strerror',
'posix_times',
'posix_ttyname',
- 'posix_uname'],
- 'POSIX Regex': ['ereg_replace',
+ 'posix_uname'),
+ 'POSIX Regex': ('ereg_replace',
'ereg',
'eregi_replace',
'eregi',
'split',
'spliti',
- 'sql_regcase'],
- 'PS': ['ps_add_bookmark',
+ 'sql_regcase'),
+ 'PS': ('ps_add_bookmark',
'ps_add_launchlink',
'ps_add_locallink',
'ps_add_note',
@@ -3195,8 +3195,8 @@ MODULES = {'.NET': ['dotnet_load'],
'ps_symbol_name',
'ps_symbol_width',
'ps_symbol',
- 'ps_translate'],
- 'Paradox': ['px_close',
+ 'ps_translate'),
+ 'Paradox': ('px_close',
'px_create_fp',
'px_date2string',
'px_delete_record',
@@ -3220,15 +3220,15 @@ MODULES = {'.NET': ['dotnet_load'],
'px_set_targetencoding',
'px_set_value',
'px_timestamp2string',
- 'px_update_record'],
- 'Parsekit': ['parsekit_compile_file',
+ 'px_update_record'),
+ 'Parsekit': ('parsekit_compile_file',
'parsekit_compile_string',
- 'parsekit_func_arginfo'],
- 'Password Hashing': ['password_get_info',
+ 'parsekit_func_arginfo'),
+ 'Password Hashing': ('password_get_info',
'password_hash',
'password_needs_rehash',
- 'password_verify'],
- 'PostgreSQL': ['pg_affected_rows',
+ 'password_verify'),
+ 'PostgreSQL': ('pg_affected_rows',
'pg_cancel_query',
'pg_client_encoding',
'pg_close',
@@ -3312,8 +3312,8 @@ MODULES = {'.NET': ['dotnet_load'],
'pg_unescape_bytea',
'pg_untrace',
'pg_update',
- 'pg_version'],
- 'Printer': ['printer_abort',
+ 'pg_version'),
+ 'Printer': ('printer_abort',
'printer_close',
'printer_create_brush',
'printer_create_dc',
@@ -3343,9 +3343,9 @@ MODULES = {'.NET': ['dotnet_load'],
'printer_set_option',
'printer_start_doc',
'printer_start_page',
- 'printer_write'],
- 'Proctitle': ['setproctitle', 'setthreadtitle'],
- 'Program execution': ['escapeshellarg',
+ 'printer_write'),
+ 'Proctitle': ('setproctitle', 'setthreadtitle'),
+ 'Program execution': ('escapeshellarg',
'escapeshellcmd',
'exec',
'passthru',
@@ -3355,8 +3355,8 @@ MODULES = {'.NET': ['dotnet_load'],
'proc_open',
'proc_terminate',
'shell_exec',
- 'system'],
- 'Pspell': ['pspell_add_to_personal',
+ 'system'),
+ 'Pspell': ('pspell_add_to_personal',
'pspell_add_to_session',
'pspell_check',
'pspell_clear_session',
@@ -3374,13 +3374,13 @@ MODULES = {'.NET': ['dotnet_load'],
'pspell_new',
'pspell_save_wordlist',
'pspell_store_replacement',
- 'pspell_suggest'],
- 'RPM Reader': ['rpm_close',
+ 'pspell_suggest'),
+ 'RPM Reader': ('rpm_close',
'rpm_get_tag',
'rpm_is_valid',
'rpm_open',
- 'rpm_version'],
- 'RRD': ['rrd_create',
+ 'rpm_version'),
+ 'RRD': ('rrd_create',
'rrd_error',
'rrd_fetch',
'rrd_first',
@@ -3393,8 +3393,8 @@ MODULES = {'.NET': ['dotnet_load'],
'rrd_update',
'rrd_version',
'rrd_xport',
- 'rrdc_disconnect'],
- 'Radius': ['radius_acct_open',
+ 'rrdc_disconnect'),
+ 'Radius': ('radius_acct_open',
'radius_add_server',
'radius_auth_open',
'radius_close',
@@ -3421,9 +3421,9 @@ MODULES = {'.NET': ['dotnet_load'],
'radius_salt_encrypt_attr',
'radius_send_request',
'radius_server_secret',
- 'radius_strerror'],
- 'Rar': ['rar_wrapper_cache_stats'],
- 'Readline': ['readline_add_history',
+ 'radius_strerror'),
+ 'Rar': ('rar_wrapper_cache_stats',),
+ 'Readline': ('readline_add_history',
'readline_callback_handler_install',
'readline_callback_handler_remove',
'readline_callback_read_char',
@@ -3435,9 +3435,9 @@ MODULES = {'.NET': ['dotnet_load'],
'readline_read_history',
'readline_redisplay',
'readline_write_history',
- 'readline'],
- 'Recode': ['recode_file', 'recode_string', 'recode'],
- 'SNMP': ['snmp_get_quick_print',
+ 'readline'),
+ 'Recode': ('recode_file', 'recode_string', 'recode'),
+ 'SNMP': ('snmp_get_quick_print',
'snmp_get_valueretrieval',
'snmp_read_mib',
'snmp_set_enum_print',
@@ -3460,9 +3460,9 @@ MODULES = {'.NET': ['dotnet_load'],
'snmprealwalk',
'snmpset',
'snmpwalk',
- 'snmpwalkoid'],
- 'SOAP': ['is_soap_fault', 'use_soap_error_handler'],
- 'SPL': ['class_implements',
+ 'snmpwalkoid'),
+ 'SOAP': ('is_soap_fault', 'use_soap_error_handler'),
+ 'SPL': ('class_implements',
'class_parents',
'class_uses',
'iterator_apply',
@@ -3475,9 +3475,9 @@ MODULES = {'.NET': ['dotnet_load'],
'spl_autoload_unregister',
'spl_autoload',
'spl_classes',
- 'spl_object_hash'],
- 'SPPLUS': ['calcul_hmac', 'calculhmac', 'nthmac', 'signeurlpaiement'],
- 'SQLSRV': ['sqlsrv_begin_transaction',
+ 'spl_object_hash'),
+ 'SPPLUS': ('calcul_hmac', 'calculhmac', 'nthmac', 'signeurlpaiement'),
+ 'SQLSRV': ('sqlsrv_begin_transaction',
'sqlsrv_cancel',
'sqlsrv_client_info',
'sqlsrv_close',
@@ -3502,8 +3502,8 @@ MODULES = {'.NET': ['dotnet_load'],
'sqlsrv_rollback',
'sqlsrv_rows_affected',
'sqlsrv_send_stream_data',
- 'sqlsrv_server_info'],
- 'SQLite': ['sqlite_array_query',
+ 'sqlsrv_server_info'),
+ 'SQLite': ('sqlite_array_query',
'sqlite_busy_timeout',
'sqlite_changes',
'sqlite_close',
@@ -3542,8 +3542,8 @@ MODULES = {'.NET': ['dotnet_load'],
'sqlite_udf_decode_binary',
'sqlite_udf_encode_binary',
'sqlite_unbuffered_query',
- 'sqlite_valid'],
- 'SSH2': ['ssh2_auth_agent',
+ 'sqlite_valid'),
+ 'SSH2': ('ssh2_auth_agent',
'ssh2_auth_hostbased_file',
'ssh2_auth_none',
'ssh2_auth_password',
@@ -3571,8 +3571,8 @@ MODULES = {'.NET': ['dotnet_load'],
'ssh2_sftp_unlink',
'ssh2_sftp',
'ssh2_shell',
- 'ssh2_tunnel'],
- 'SVN': ['svn_add',
+ 'ssh2_tunnel'),
+ 'SVN': ('svn_add',
'svn_auth_get_parameter',
'svn_auth_set_parameter',
'svn_blame',
@@ -3619,8 +3619,8 @@ MODULES = {'.NET': ['dotnet_load'],
'svn_repos_recover',
'svn_revert',
'svn_status',
- 'svn_update'],
- 'SWF': ['swf_actiongeturl',
+ 'svn_update'),
+ 'SWF': ('swf_actiongeturl',
'swf_actiongotoframe',
'swf_actiongotolabel',
'swf_actionnextframe',
@@ -3686,8 +3686,8 @@ MODULES = {'.NET': ['dotnet_load'],
'swf_startsymbol',
'swf_textwidth',
'swf_translate',
- 'swf_viewport'],
- 'Semaphore': ['ftok',
+ 'swf_viewport'),
+ 'Semaphore': ('ftok',
'msg_get_queue',
'msg_queue_exists',
'msg_receive',
@@ -3705,8 +3705,8 @@ MODULES = {'.NET': ['dotnet_load'],
'shm_has_var',
'shm_put_var',
'shm_remove_var',
- 'shm_remove'],
- 'Session': ['session_cache_expire',
+ 'shm_remove'),
+ 'Session': ('session_cache_expire',
'session_cache_limiter',
'session_commit',
'session_decode',
@@ -3727,23 +3727,23 @@ MODULES = {'.NET': ['dotnet_load'],
'session_status',
'session_unregister',
'session_unset',
- 'session_write_close'],
- 'Session PgSQL': ['session_pgsql_add_error',
+ 'session_write_close'),
+ 'Session PgSQL': ('session_pgsql_add_error',
'session_pgsql_get_error',
'session_pgsql_get_field',
'session_pgsql_reset',
'session_pgsql_set_field',
- 'session_pgsql_status'],
- 'Shared Memory': ['shmop_close',
+ 'session_pgsql_status'),
+ 'Shared Memory': ('shmop_close',
'shmop_delete',
'shmop_open',
'shmop_read',
'shmop_size',
- 'shmop_write'],
- 'SimpleXML': ['simplexml_import_dom',
+ 'shmop_write'),
+ 'SimpleXML': ('simplexml_import_dom',
'simplexml_load_file',
- 'simplexml_load_string'],
- 'Socket': ['socket_accept',
+ 'simplexml_load_string'),
+ 'Socket': ('socket_accept',
'socket_bind',
'socket_clear_error',
'socket_close',
@@ -3771,9 +3771,9 @@ MODULES = {'.NET': ['dotnet_load'],
'socket_set_option',
'socket_shutdown',
'socket_strerror',
- 'socket_write'],
- 'Solr': ['solr_get_version'],
- 'Statistic': ['stats_absolute_deviation',
+ 'socket_write'),
+ 'Solr': ('solr_get_version',),
+ 'Statistic': ('stats_absolute_deviation',
'stats_cdf_beta',
'stats_cdf_binomial',
'stats_cdf_cauchy',
@@ -3840,9 +3840,9 @@ MODULES = {'.NET': ['dotnet_load'],
'stats_stat_paired_t',
'stats_stat_percentile',
'stats_stat_powersum',
- 'stats_variance'],
- 'Stomp': ['stomp_connect_error', 'stomp_version'],
- 'Stream': ['set_socket_blocking',
+ 'stats_variance'),
+ 'Stomp': ('stomp_connect_error', 'stomp_version'),
+ 'Stream': ('set_socket_blocking',
'stream_bucket_append',
'stream_bucket_make_writeable',
'stream_bucket_new',
@@ -3888,8 +3888,8 @@ MODULES = {'.NET': ['dotnet_load'],
'stream_supports_lock',
'stream_wrapper_register',
'stream_wrapper_restore',
- 'stream_wrapper_unregister'],
- 'String': ['addcslashes',
+ 'stream_wrapper_unregister'),
+ 'String': ('addcslashes',
'addslashes',
'bin2hex',
'chop',
@@ -3986,8 +3986,8 @@ MODULES = {'.NET': ['dotnet_load'],
'vfprintf',
'vprintf',
'vsprintf',
- 'wordwrap'],
- 'Sybase': ['sybase_affected_rows',
+ 'wordwrap'),
+ 'Sybase': ('sybase_affected_rows',
'sybase_close',
'sybase_connect',
'sybase_data_seek',
@@ -4011,10 +4011,10 @@ MODULES = {'.NET': ['dotnet_load'],
'sybase_result',
'sybase_select_db',
'sybase_set_message_handler',
- 'sybase_unbuffered_query'],
- 'TCP': ['tcpwrap_check'],
- 'Taint': ['is_tainted', 'taint', 'untaint'],
- 'Tidy': ['ob_tidyhandler',
+ 'sybase_unbuffered_query'),
+ 'TCP': ('tcpwrap_check',),
+ 'Taint': ('is_tainted', 'taint', 'untaint'),
+ 'Tidy': ('ob_tidyhandler',
'tidy_access_count',
'tidy_config_count',
'tidy_error_count',
@@ -4024,9 +4024,9 @@ MODULES = {'.NET': ['dotnet_load'],
'tidy_save_config',
'tidy_set_encoding',
'tidy_setopt',
- 'tidy_warning_count'],
- 'Tokenizer': ['token_get_all', 'token_name'],
- 'Trader': ['trader_acos',
+ 'tidy_warning_count'),
+ 'Tokenizer': ('token_get_all', 'token_name'),
+ 'Trader': ('trader_acos',
'trader_ad',
'trader_add',
'trader_adosc',
@@ -4188,8 +4188,8 @@ MODULES = {'.NET': ['dotnet_load'],
'trader_var',
'trader_wclprice',
'trader_willr',
- 'trader_wma'],
- 'URL': ['base64_decode',
+ 'trader_wma'),
+ 'URL': ('base64_decode',
'base64_encode',
'get_headers',
'get_meta_tags',
@@ -4198,8 +4198,8 @@ MODULES = {'.NET': ['dotnet_load'],
'rawurldecode',
'rawurlencode',
'urldecode',
- 'urlencode'],
- 'Uopz': ['uopz_backup',
+ 'urlencode'),
+ 'Uopz': ('uopz_backup',
'uopz_compose',
'uopz_copy',
'uopz_delete',
@@ -4211,8 +4211,8 @@ MODULES = {'.NET': ['dotnet_load'],
'uopz_redefine',
'uopz_rename',
'uopz_restore',
- 'uopz_undefine'],
- 'Variable handling': ['boolval',
+ 'uopz_undefine'),
+ 'Variable handling': ('boolval',
'debug_zval_dump',
'doubleval',
'empty',
@@ -4245,19 +4245,19 @@ MODULES = {'.NET': ['dotnet_load'],
'unserialize',
'unset',
'var_dump',
- 'var_export'],
- 'W32api': ['w32api_deftype',
+ 'var_export'),
+ 'W32api': ('w32api_deftype',
'w32api_init_dtype',
'w32api_invoke_function',
'w32api_register_function',
- 'w32api_set_call_method'],
- 'WDDX': ['wddx_add_vars',
+ 'w32api_set_call_method'),
+ 'WDDX': ('wddx_add_vars',
'wddx_deserialize',
'wddx_packet_end',
'wddx_packet_start',
'wddx_serialize_value',
- 'wddx_serialize_vars'],
- 'WinCache': ['wincache_fcache_fileinfo',
+ 'wddx_serialize_vars'),
+ 'WinCache': ('wincache_fcache_fileinfo',
'wincache_fcache_meminfo',
'wincache_lock',
'wincache_ocache_fileinfo',
@@ -4278,8 +4278,8 @@ MODULES = {'.NET': ['dotnet_load'],
'wincache_ucache_info',
'wincache_ucache_meminfo',
'wincache_ucache_set',
- 'wincache_unlock'],
- 'XML Parser': ['utf8_decode',
+ 'wincache_unlock'),
+ 'XML Parser': ('utf8_decode',
'utf8_encode',
'xml_error_string',
'xml_get_current_byte_index',
@@ -4302,8 +4302,8 @@ MODULES = {'.NET': ['dotnet_load'],
'xml_set_object',
'xml_set_processing_instruction_handler',
'xml_set_start_namespace_decl_handler',
- 'xml_set_unparsed_entity_decl_handler'],
- 'XML-RPC': ['xmlrpc_decode_request',
+ 'xml_set_unparsed_entity_decl_handler'),
+ 'XML-RPC': ('xmlrpc_decode_request',
'xmlrpc_decode',
'xmlrpc_encode_request',
'xmlrpc_encode',
@@ -4316,8 +4316,8 @@ MODULES = {'.NET': ['dotnet_load'],
'xmlrpc_server_destroy',
'xmlrpc_server_register_introspection_callback',
'xmlrpc_server_register_method',
- 'xmlrpc_set_type'],
- 'XSLT (PHP 4)': ['xslt_backend_info',
+ 'xmlrpc_set_type'),
+ 'XSLT (PHP 4)': ('xslt_backend_info',
'xslt_backend_name',
'xslt_backend_version',
'xslt_create',
@@ -4335,12 +4335,12 @@ MODULES = {'.NET': ['dotnet_load'],
'xslt_set_sax_handlers',
'xslt_set_scheme_handler',
'xslt_set_scheme_handlers',
- 'xslt_setopt'],
- 'Xhprof': ['xhprof_disable',
+ 'xslt_setopt'),
+ 'Xhprof': ('xhprof_disable',
'xhprof_enable',
'xhprof_sample_disable',
- 'xhprof_sample_enable'],
- 'YAZ': ['yaz_addinfo',
+ 'xhprof_sample_enable'),
+ 'YAZ': ('yaz_addinfo',
'yaz_ccl_conf',
'yaz_ccl_parse',
'yaz_close',
@@ -4364,8 +4364,8 @@ MODULES = {'.NET': ['dotnet_load'],
'yaz_set_option',
'yaz_sort',
'yaz_syntax',
- 'yaz_wait'],
- 'YP/NIS': ['yp_all',
+ 'yaz_wait'),
+ 'YP/NIS': ('yp_all',
'yp_cat',
'yp_err_string',
'yp_errno',
@@ -4374,13 +4374,13 @@ MODULES = {'.NET': ['dotnet_load'],
'yp_master',
'yp_match',
'yp_next',
- 'yp_order'],
- 'Yaml': ['yaml_emit_file',
+ 'yp_order'),
+ 'Yaml': ('yaml_emit_file',
'yaml_emit',
'yaml_parse_file',
'yaml_parse_url',
- 'yaml_parse'],
- 'Zip': ['zip_close',
+ 'yaml_parse'),
+ 'Zip': ('zip_close',
'zip_entry_close',
'zip_entry_compressedsize',
'zip_entry_compressionmethod',
@@ -4389,8 +4389,8 @@ MODULES = {'.NET': ['dotnet_load'],
'zip_entry_open',
'zip_entry_read',
'zip_open',
- 'zip_read'],
- 'Zlib': ['gzclose',
+ 'zip_read'),
+ 'Zlib': ('gzclose',
'gzcompress',
'gzdecode',
'gzdeflate',
@@ -4413,8 +4413,8 @@ MODULES = {'.NET': ['dotnet_load'],
'readgzfile',
'zlib_decode',
'zlib_encode',
- 'zlib_get_coding_type'],
- 'bcompiler': ['bcompiler_load_exe',
+ 'zlib_get_coding_type'),
+ 'bcompiler': ('bcompiler_load_exe',
'bcompiler_load',
'bcompiler_parse_class',
'bcompiler_read',
@@ -4426,8 +4426,8 @@ MODULES = {'.NET': ['dotnet_load'],
'bcompiler_write_function',
'bcompiler_write_functions_from_file',
'bcompiler_write_header',
- 'bcompiler_write_included_filename'],
- 'cURL': ['curl_close',
+ 'bcompiler_write_included_filename'),
+ 'cURL': ('curl_close',
'curl_copy_handle',
'curl_errno',
'curl_error',
@@ -4455,9 +4455,9 @@ MODULES = {'.NET': ['dotnet_load'],
'curl_share_setopt',
'curl_strerror',
'curl_unescape',
- 'curl_version'],
- 'chdb': ['chdb_create'],
- 'dBase': ['dbase_add_record',
+ 'curl_version'),
+ 'chdb': ('chdb_create',),
+ 'dBase': ('dbase_add_record',
'dbase_close',
'dbase_create',
'dbase_delete_record',
@@ -4468,23 +4468,23 @@ MODULES = {'.NET': ['dotnet_load'],
'dbase_numrecords',
'dbase_open',
'dbase_pack',
- 'dbase_replace_record'],
- 'dbx': ['dbx_close',
+ 'dbase_replace_record'),
+ 'dbx': ('dbx_close',
'dbx_compare',
'dbx_connect',
'dbx_error',
'dbx_escape_string',
'dbx_fetch_row',
'dbx_query',
- 'dbx_sort'],
- 'filePro': ['filepro_fieldcount',
+ 'dbx_sort'),
+ 'filePro': ('filepro_fieldcount',
'filepro_fieldname',
'filepro_fieldtype',
'filepro_fieldwidth',
'filepro_retrieve',
'filepro_rowcount',
- 'filepro'],
- 'iconv': ['iconv_get_encoding',
+ 'filepro'),
+ 'iconv': ('iconv_get_encoding',
'iconv_mime_decode_headers',
'iconv_mime_decode',
'iconv_mime_encode',
@@ -4494,20 +4494,20 @@ MODULES = {'.NET': ['dotnet_load'],
'iconv_strrpos',
'iconv_substr',
'iconv',
- 'ob_iconv_handler'],
- 'inclued': ['inclued_get_data'],
- 'intl': ['intl_error_name',
+ 'ob_iconv_handler'),
+ 'inclued': ('inclued_get_data',),
+ 'intl': ('intl_error_name',
'intl_get_error_code',
'intl_get_error_message',
- 'intl_is_failure'],
- 'libxml': ['libxml_clear_errors',
+ 'intl_is_failure'),
+ 'libxml': ('libxml_clear_errors',
'libxml_disable_entity_loader',
'libxml_get_errors',
'libxml_get_last_error',
'libxml_set_external_entity_loader',
'libxml_set_streams_context',
- 'libxml_use_internal_errors'],
- 'mSQL': ['msql_affected_rows',
+ 'libxml_use_internal_errors'),
+ 'mSQL': ('msql_affected_rows',
'msql_close',
'msql_connect',
'msql_create_db',
@@ -4546,8 +4546,8 @@ MODULES = {'.NET': ['dotnet_load'],
'msql_result',
'msql_select_db',
'msql_tablename',
- 'msql'],
- 'mnoGoSearch': ['udm_add_search_limit',
+ 'msql'),
+ 'mnoGoSearch': ('udm_add_search_limit',
'udm_alloc_agent_array',
'udm_alloc_agent',
'udm_api_version',
@@ -4570,8 +4570,8 @@ MODULES = {'.NET': ['dotnet_load'],
'udm_hash32',
'udm_load_ispell_data',
'udm_open_stored',
- 'udm_set_agent_param'],
- 'mqseries': ['mqseries_back',
+ 'udm_set_agent_param'),
+ 'mqseries': ('mqseries_back',
'mqseries_begin',
'mqseries_close',
'mqseries_cmit',
@@ -4584,8 +4584,8 @@ MODULES = {'.NET': ['dotnet_load'],
'mqseries_put1',
'mqseries_put',
'mqseries_set',
- 'mqseries_strerror'],
- 'mysqlnd_qc': ['mysqlnd_qc_clear_cache',
+ 'mqseries_strerror'),
+ 'mysqlnd_qc': ('mysqlnd_qc_clear_cache',
'mysqlnd_qc_get_available_handlers',
'mysqlnd_qc_get_cache_info',
'mysqlnd_qc_get_core_stats',
@@ -4594,9 +4594,9 @@ MODULES = {'.NET': ['dotnet_load'],
'mysqlnd_qc_set_cache_condition',
'mysqlnd_qc_set_is_select',
'mysqlnd_qc_set_storage_handler',
- 'mysqlnd_qc_set_user_handlers'],
- 'qtdom': ['qdom_error', 'qdom_tree'],
- 'runkit': ['runkit_class_adopt',
+ 'mysqlnd_qc_set_user_handlers'),
+ 'qtdom': ('qdom_error', 'qdom_tree'),
+ 'runkit': ('runkit_class_adopt',
'runkit_class_emancipate',
'runkit_constant_add',
'runkit_constant_redefine',
@@ -4616,11 +4616,11 @@ MODULES = {'.NET': ['dotnet_load'],
'runkit_method_rename',
'runkit_return_value_used',
'runkit_sandbox_output_handler',
- 'runkit_superglobals'],
- 'ssdeep': ['ssdeep_fuzzy_compare',
+ 'runkit_superglobals'),
+ 'ssdeep': ('ssdeep_fuzzy_compare',
'ssdeep_fuzzy_hash_filename',
- 'ssdeep_fuzzy_hash'],
- 'vpopmail': ['vpopmail_add_alias_domain_ex',
+ 'ssdeep_fuzzy_hash'),
+ 'vpopmail': ('vpopmail_add_alias_domain_ex',
'vpopmail_add_alias_domain',
'vpopmail_add_domain_ex',
'vpopmail_add_domain',
@@ -4636,9 +4636,9 @@ MODULES = {'.NET': ['dotnet_load'],
'vpopmail_del_user',
'vpopmail_error',
'vpopmail_passwd',
- 'vpopmail_set_user_quota'],
- 'win32ps': ['win32_ps_list_procs', 'win32_ps_stat_mem', 'win32_ps_stat_proc'],
- 'win32service': ['win32_continue_service',
+ 'vpopmail_set_user_quota'),
+ 'win32ps': ('win32_ps_list_procs', 'win32_ps_stat_mem', 'win32_ps_stat_proc'),
+ 'win32service': ('win32_continue_service',
'win32_create_service',
'win32_delete_service',
'win32_get_last_control_message',
@@ -4647,13 +4647,13 @@ MODULES = {'.NET': ['dotnet_load'],
'win32_set_service_status',
'win32_start_service_ctrl_dispatcher',
'win32_start_service',
- 'win32_stop_service'],
- 'xattr': ['xattr_get',
+ 'win32_stop_service'),
+ 'xattr': ('xattr_get',
'xattr_list',
'xattr_remove',
'xattr_set',
- 'xattr_supported'],
- 'xdiff': ['xdiff_file_bdiff_size',
+ 'xattr_supported'),
+ 'xdiff': ('xdiff_file_bdiff_size',
'xdiff_file_bdiff',
'xdiff_file_bpatch',
'xdiff_file_diff_binary',
@@ -4670,7 +4670,7 @@ MODULES = {'.NET': ['dotnet_load'],
'xdiff_string_merge3',
'xdiff_string_patch_binary',
'xdiff_string_patch',
- 'xdiff_string_rabdiff']}
+ 'xdiff_string_rabdiff')}
if __name__ == '__main__':
import glob
@@ -4733,20 +4733,16 @@ if __name__ == '__main__':
os.remove(download[0])
def regenerate(filename, modules):
- f = open(filename)
- try:
- content = f.read()
- finally:
- f.close()
+ with open(filename) as fp:
+ content = fp.read()
header = content[:content.find('MODULES = {')]
footer = content[content.find("if __name__ == '__main__':"):]
- f = open(filename, 'w')
- f.write(header)
- f.write('MODULES = %s\n\n' % pprint.pformat(modules))
- f.write(footer)
- f.close()
+ with open(filename, 'w') as fp:
+ fp.write(header)
+ fp.write('MODULES = %s\n\n' % pprint.pformat(modules))
+ fp.write(footer)
def run():
print('>> Downloading Function Index')
diff --git a/pygments/lexers/_postgres_builtins.py b/pygments/lexers/_postgres_builtins.py
index 11dc6dec..c71cfb66 100644
--- a/pygments/lexers/_postgres_builtins.py
+++ b/pygments/lexers/_postgres_builtins.py
@@ -15,6 +15,8 @@ try:
except ImportError:
from urllib.request import urlopen
+from pygments.util import format_lines
+
# One man's constant is another man's variable.
SOURCE_URL = 'https://github.com/postgres/postgres/raw/master'
KEYWORDS_URL = SOURCE_URL + '/doc/src/sgml/keywords.sgml'
@@ -100,134 +102,519 @@ def parse_pseudos(f):
return dt
def update_consts(filename, constname, content):
- f = open(filename)
- lines = f.readlines()
- f.close()
+ with open(filename) as f:
+ data = f.read()
# Line to start/end inserting
- re_start = re.compile(r'^%s\s*=\s*\[\s*$' % constname)
- re_end = re.compile(r'^\s*\]\s*$')
- start = [ n for n, l in enumerate(lines) if re_start.match(l) ]
- if not start:
- raise ValueError("couldn't find line containing '%s = ['" % constname)
- if len(start) > 1:
- raise ValueError("too many lines containing '%s = ['" % constname)
- start = start[0] + 1
-
- end = [ n for n, l in enumerate(lines) if n >= start and re_end.match(l) ]
- if not end:
- raise ValueError("couldn't find line containing ']' after %s " % constname)
- end = end[0]
-
- # Pack the new content in lines not too long
- content = [repr(item) for item in content ]
- new_lines = [[]]
- for item in content:
- if sum(map(len, new_lines[-1])) + 2 * len(new_lines[-1]) + len(item) + 4 > 75:
- new_lines.append([])
- new_lines[-1].append(item)
-
- lines[start:end] = [ " %s,\n" % ", ".join(items) for items in new_lines ]
-
- f = open(filename, 'w')
- f.write(''.join(lines))
- f.close()
+ re_match = re.compile(r'^%s\s*=\s*\($.*?^\s*\)$' % constname, re.M | re.S)
+ m = re_match.search(data)
+ if not m:
+ raise ValueError('Could not find existing definition for %s' %
+ (constname,))
+
+ new_block = format_lines(constname, content)
+ data = data[:m.start()] + new_block + data[m.end():]
+
+ with open(filename, 'w') as f:
+ f.write(data)
# Autogenerated: please edit them if you like wasting your time.
-KEYWORDS = [
- 'ABORT', 'ABSOLUTE', 'ACCESS', 'ACTION', 'ADD', 'ADMIN', 'AFTER',
- 'AGGREGATE', 'ALL', 'ALSO', 'ALTER', 'ALWAYS', 'ANALYSE', 'ANALYZE',
- 'AND', 'ANY', 'ARRAY', 'AS', 'ASC', 'ASSERTION', 'ASSIGNMENT',
- 'ASYMMETRIC', 'AT', 'ATTRIBUTE', 'AUTHORIZATION', 'BACKWARD', 'BEFORE',
- 'BEGIN', 'BETWEEN', 'BIGINT', 'BINARY', 'BIT', 'BOOLEAN', 'BOTH', 'BY',
- 'CACHE', 'CALLED', 'CASCADE', 'CASCADED', 'CASE', 'CAST', 'CATALOG',
- 'CHAIN', 'CHAR', 'CHARACTER', 'CHARACTERISTICS', 'CHECK', 'CHECKPOINT',
- 'CLASS', 'CLOSE', 'CLUSTER', 'COALESCE', 'COLLATE', 'COLLATION',
- 'COLUMN', 'COMMENT', 'COMMENTS', 'COMMIT', 'COMMITTED', 'CONCURRENTLY',
- 'CONFIGURATION', 'CONNECTION', 'CONSTRAINT', 'CONSTRAINTS', 'CONTENT',
- 'CONTINUE', 'CONVERSION', 'COPY', 'COST', 'CREATE', 'CROSS', 'CSV',
- 'CURRENT', 'CURRENT_CATALOG', 'CURRENT_DATE', 'CURRENT_ROLE',
- 'CURRENT_SCHEMA', 'CURRENT_TIME', 'CURRENT_TIMESTAMP', 'CURRENT_USER',
- 'CURSOR', 'CYCLE', 'DATA', 'DATABASE', 'DAY', 'DEALLOCATE', 'DEC',
- 'DECIMAL', 'DECLARE', 'DEFAULT', 'DEFAULTS', 'DEFERRABLE', 'DEFERRED',
- 'DEFINER', 'DELETE', 'DELIMITER', 'DELIMITERS', 'DESC', 'DICTIONARY',
- 'DISABLE', 'DISCARD', 'DISTINCT', 'DO', 'DOCUMENT', 'DOMAIN', 'DOUBLE',
- 'DROP', 'EACH', 'ELSE', 'ENABLE', 'ENCODING', 'ENCRYPTED', 'END',
- 'ENUM', 'ESCAPE', 'EXCEPT', 'EXCLUDE', 'EXCLUDING', 'EXCLUSIVE',
- 'EXECUTE', 'EXISTS', 'EXPLAIN', 'EXTENSION', 'EXTERNAL', 'EXTRACT',
- 'FALSE', 'FAMILY', 'FETCH', 'FIRST', 'FLOAT', 'FOLLOWING', 'FOR',
- 'FORCE', 'FOREIGN', 'FORWARD', 'FREEZE', 'FROM', 'FULL', 'FUNCTION',
- 'FUNCTIONS', 'GLOBAL', 'GRANT', 'GRANTED', 'GREATEST', 'GROUP',
- 'HANDLER', 'HAVING', 'HEADER', 'HOLD', 'HOUR', 'IDENTITY', 'IF',
- 'ILIKE', 'IMMEDIATE', 'IMMUTABLE', 'IMPLICIT', 'IN', 'INCLUDING',
- 'INCREMENT', 'INDEX', 'INDEXES', 'INHERIT', 'INHERITS', 'INITIALLY',
- 'INLINE', 'INNER', 'INOUT', 'INPUT', 'INSENSITIVE', 'INSERT', 'INSTEAD',
- 'INT', 'INTEGER', 'INTERSECT', 'INTERVAL', 'INTO', 'INVOKER', 'IS',
- 'ISNULL', 'ISOLATION', 'JOIN', 'KEY', 'LABEL', 'LANGUAGE', 'LARGE',
- 'LAST', 'LC_COLLATE', 'LC_CTYPE', 'LEADING', 'LEAST', 'LEFT', 'LEVEL',
- 'LIKE', 'LIMIT', 'LISTEN', 'LOAD', 'LOCAL', 'LOCALTIME',
- 'LOCALTIMESTAMP', 'LOCATION', 'LOCK', 'MAPPING', 'MATCH', 'MAXVALUE',
- 'MINUTE', 'MINVALUE', 'MODE', 'MONTH', 'MOVE', 'NAME', 'NAMES',
- 'NATIONAL', 'NATURAL', 'NCHAR', 'NEXT', 'NO', 'NONE', 'NOT', 'NOTHING',
- 'NOTIFY', 'NOTNULL', 'NOWAIT', 'NULL', 'NULLIF', 'NULLS', 'NUMERIC',
- 'OBJECT', 'OF', 'OFF', 'OFFSET', 'OIDS', 'ON', 'ONLY', 'OPERATOR',
- 'OPTION', 'OPTIONS', 'OR', 'ORDER', 'OUT', 'OUTER', 'OVER', 'OVERLAPS',
- 'OVERLAY', 'OWNED', 'OWNER', 'PARSER', 'PARTIAL', 'PARTITION',
- 'PASSING', 'PASSWORD', 'PLACING', 'PLANS', 'POSITION', 'PRECEDING',
- 'PRECISION', 'PREPARE', 'PREPARED', 'PRESERVE', 'PRIMARY', 'PRIOR',
- 'PRIVILEGES', 'PROCEDURAL', 'PROCEDURE', 'QUOTE', 'RANGE', 'READ',
- 'REAL', 'REASSIGN', 'RECHECK', 'RECURSIVE', 'REF', 'REFERENCES',
- 'REINDEX', 'RELATIVE', 'RELEASE', 'RENAME', 'REPEATABLE', 'REPLACE',
- 'REPLICA', 'RESET', 'RESTART', 'RESTRICT', 'RETURNING', 'RETURNS',
- 'REVOKE', 'RIGHT', 'ROLE', 'ROLLBACK', 'ROW', 'ROWS', 'RULE',
- 'SAVEPOINT', 'SCHEMA', 'SCROLL', 'SEARCH', 'SECOND', 'SECURITY',
- 'SELECT', 'SEQUENCE', 'SEQUENCES', 'SERIALIZABLE', 'SERVER', 'SESSION',
- 'SESSION_USER', 'SET', 'SETOF', 'SHARE', 'SHOW', 'SIMILAR', 'SIMPLE',
- 'SMALLINT', 'SOME', 'STABLE', 'STANDALONE', 'START', 'STATEMENT',
- 'STATISTICS', 'STDIN', 'STDOUT', 'STORAGE', 'STRICT', 'STRIP',
- 'SUBSTRING', 'SYMMETRIC', 'SYSID', 'SYSTEM', 'TABLE', 'TABLES',
- 'TABLESPACE', 'TEMP', 'TEMPLATE', 'TEMPORARY', 'TEXT', 'THEN', 'TIME',
- 'TIMESTAMP', 'TO', 'TRAILING', 'TRANSACTION', 'TREAT', 'TRIGGER',
- 'TRIM', 'TRUE', 'TRUNCATE', 'TRUSTED', 'TYPE', 'UNBOUNDED',
- 'UNCOMMITTED', 'UNENCRYPTED', 'UNION', 'UNIQUE', 'UNKNOWN', 'UNLISTEN',
- 'UNLOGGED', 'UNTIL', 'UPDATE', 'USER', 'USING', 'VACUUM', 'VALID',
- 'VALIDATE', 'VALIDATOR', 'VALUE', 'VALUES', 'VARCHAR', 'VARIADIC',
- 'VARYING', 'VERBOSE', 'VERSION', 'VIEW', 'VOLATILE', 'WHEN', 'WHERE',
- 'WHITESPACE', 'WINDOW', 'WITH', 'WITHOUT', 'WORK', 'WRAPPER', 'WRITE',
- 'XML', 'XMLATTRIBUTES', 'XMLCONCAT', 'XMLELEMENT', 'XMLEXISTS',
- 'XMLFOREST', 'XMLPARSE', 'XMLPI', 'XMLROOT', 'XMLSERIALIZE', 'YEAR',
- 'YES', 'ZONE',
- ]
-
-DATATYPES = [
- 'bigint', 'bigserial', 'bit', 'bit varying', 'bool', 'boolean', 'box',
- 'bytea', 'char', 'character', 'character varying', 'cidr', 'circle',
- 'date', 'decimal', 'double precision', 'float4', 'float8', 'inet',
- 'int', 'int2', 'int4', 'int8', 'integer', 'interval', 'json', 'line',
- 'lseg', 'macaddr', 'money', 'numeric', 'path', 'point', 'polygon',
- 'real', 'serial', 'serial2', 'serial4', 'serial8', 'smallint',
- 'smallserial', 'text', 'time', 'timestamp', 'timestamptz', 'timetz',
- 'tsquery', 'tsvector', 'txid_snapshot', 'uuid', 'varbit', 'varchar',
- 'with time zone', 'without time zone', 'xml',
- ]
-
-PSEUDO_TYPES = [
- 'any', 'anyelement', 'anyarray', 'anynonarray', 'anyenum', 'anyrange',
- 'cstring', 'internal', 'language_handler', 'fdw_handler', 'record',
- 'trigger', 'void', 'opaque',
- ]
+KEYWORDS = (
+ 'ABORT',
+ 'ABSOLUTE',
+ 'ACCESS',
+ 'ACTION',
+ 'ADD',
+ 'ADMIN',
+ 'AFTER',
+ 'AGGREGATE',
+ 'ALL',
+ 'ALSO',
+ 'ALTER',
+ 'ALWAYS',
+ 'ANALYSE',
+ 'ANALYZE',
+ 'AND',
+ 'ANY',
+ 'ARRAY',
+ 'AS',
+ 'ASC',
+ 'ASSERTION',
+ 'ASSIGNMENT',
+ 'ASYMMETRIC',
+ 'AT',
+ 'ATTRIBUTE',
+ 'AUTHORIZATION',
+ 'BACKWARD',
+ 'BEFORE',
+ 'BEGIN',
+ 'BETWEEN',
+ 'BIGINT',
+ 'BINARY',
+ 'BIT',
+ 'BOOLEAN',
+ 'BOTH',
+ 'BY',
+ 'CACHE',
+ 'CALLED',
+ 'CASCADE',
+ 'CASCADED',
+ 'CASE',
+ 'CAST',
+ 'CATALOG',
+ 'CHAIN',
+ 'CHAR',
+ 'CHARACTER',
+ 'CHARACTERISTICS',
+ 'CHECK',
+ 'CHECKPOINT',
+ 'CLASS',
+ 'CLOSE',
+ 'CLUSTER',
+ 'COALESCE',
+ 'COLLATE',
+ 'COLLATION',
+ 'COLUMN',
+ 'COMMENT',
+ 'COMMENTS',
+ 'COMMIT',
+ 'COMMITTED',
+ 'CONCURRENTLY',
+ 'CONFIGURATION',
+ 'CONNECTION',
+ 'CONSTRAINT',
+ 'CONSTRAINTS',
+ 'CONTENT',
+ 'CONTINUE',
+ 'CONVERSION',
+ 'COPY',
+ 'COST',
+ 'CREATE',
+ 'CROSS',
+ 'CSV',
+ 'CURRENT',
+ 'CURRENT_CATALOG',
+ 'CURRENT_DATE',
+ 'CURRENT_ROLE',
+ 'CURRENT_SCHEMA',
+ 'CURRENT_TIME',
+ 'CURRENT_TIMESTAMP',
+ 'CURRENT_USER',
+ 'CURSOR',
+ 'CYCLE',
+ 'DATA',
+ 'DATABASE',
+ 'DAY',
+ 'DEALLOCATE',
+ 'DEC',
+ 'DECIMAL',
+ 'DECLARE',
+ 'DEFAULT',
+ 'DEFAULTS',
+ 'DEFERRABLE',
+ 'DEFERRED',
+ 'DEFINER',
+ 'DELETE',
+ 'DELIMITER',
+ 'DELIMITERS',
+ 'DESC',
+ 'DICTIONARY',
+ 'DISABLE',
+ 'DISCARD',
+ 'DISTINCT',
+ 'DO',
+ 'DOCUMENT',
+ 'DOMAIN',
+ 'DOUBLE',
+ 'DROP',
+ 'EACH',
+ 'ELSE',
+ 'ENABLE',
+ 'ENCODING',
+ 'ENCRYPTED',
+ 'END',
+ 'ENUM',
+ 'ESCAPE',
+ 'EVENT',
+ 'EXCEPT',
+ 'EXCLUDE',
+ 'EXCLUDING',
+ 'EXCLUSIVE',
+ 'EXECUTE',
+ 'EXISTS',
+ 'EXPLAIN',
+ 'EXTENSION',
+ 'EXTERNAL',
+ 'EXTRACT',
+ 'FALSE',
+ 'FAMILY',
+ 'FETCH',
+ 'FILTER',
+ 'FIRST',
+ 'FLOAT',
+ 'FOLLOWING',
+ 'FOR',
+ 'FORCE',
+ 'FOREIGN',
+ 'FORWARD',
+ 'FREEZE',
+ 'FROM',
+ 'FULL',
+ 'FUNCTION',
+ 'FUNCTIONS',
+ 'GLOBAL',
+ 'GRANT',
+ 'GRANTED',
+ 'GREATEST',
+ 'GROUP',
+ 'HANDLER',
+ 'HAVING',
+ 'HEADER',
+ 'HOLD',
+ 'HOUR',
+ 'IDENTITY',
+ 'IF',
+ 'ILIKE',
+ 'IMMEDIATE',
+ 'IMMUTABLE',
+ 'IMPLICIT',
+ 'IN',
+ 'INCLUDING',
+ 'INCREMENT',
+ 'INDEX',
+ 'INDEXES',
+ 'INHERIT',
+ 'INHERITS',
+ 'INITIALLY',
+ 'INLINE',
+ 'INNER',
+ 'INOUT',
+ 'INPUT',
+ 'INSENSITIVE',
+ 'INSERT',
+ 'INSTEAD',
+ 'INT',
+ 'INTEGER',
+ 'INTERSECT',
+ 'INTERVAL',
+ 'INTO',
+ 'INVOKER',
+ 'IS',
+ 'ISNULL',
+ 'ISOLATION',
+ 'JOIN',
+ 'KEY',
+ 'LABEL',
+ 'LANGUAGE',
+ 'LARGE',
+ 'LAST',
+ 'LATERAL',
+ 'LC_COLLATE',
+ 'LC_CTYPE',
+ 'LEADING',
+ 'LEAKPROOF',
+ 'LEAST',
+ 'LEFT',
+ 'LEVEL',
+ 'LIKE',
+ 'LIMIT',
+ 'LISTEN',
+ 'LOAD',
+ 'LOCAL',
+ 'LOCALTIME',
+ 'LOCALTIMESTAMP',
+ 'LOCATION',
+ 'LOCK',
+ 'MAPPING',
+ 'MATCH',
+ 'MATERIALIZED',
+ 'MAXVALUE',
+ 'MINUTE',
+ 'MINVALUE',
+ 'MODE',
+ 'MONTH',
+ 'MOVE',
+ 'NAME',
+ 'NAMES',
+ 'NATIONAL',
+ 'NATURAL',
+ 'NCHAR',
+ 'NEXT',
+ 'NO',
+ 'NONE',
+ 'NOT',
+ 'NOTHING',
+ 'NOTIFY',
+ 'NOTNULL',
+ 'NOWAIT',
+ 'NULL',
+ 'NULLIF',
+ 'NULLS',
+ 'NUMERIC',
+ 'OBJECT',
+ 'OF',
+ 'OFF',
+ 'OFFSET',
+ 'OIDS',
+ 'ON',
+ 'ONLY',
+ 'OPERATOR',
+ 'OPTION',
+ 'OPTIONS',
+ 'OR',
+ 'ORDER',
+ 'ORDINALITY',
+ 'OUT',
+ 'OUTER',
+ 'OVER',
+ 'OVERLAPS',
+ 'OVERLAY',
+ 'OWNED',
+ 'OWNER',
+ 'PARSER',
+ 'PARTIAL',
+ 'PARTITION',
+ 'PASSING',
+ 'PASSWORD',
+ 'PLACING',
+ 'PLANS',
+ 'POLICY',
+ 'POSITION',
+ 'PRECEDING',
+ 'PRECISION',
+ 'PREPARE',
+ 'PREPARED',
+ 'PRESERVE',
+ 'PRIMARY',
+ 'PRIOR',
+ 'PRIVILEGES',
+ 'PROCEDURAL',
+ 'PROCEDURE',
+ 'PROGRAM',
+ 'QUOTE',
+ 'RANGE',
+ 'READ',
+ 'REAL',
+ 'REASSIGN',
+ 'RECHECK',
+ 'RECURSIVE',
+ 'REF',
+ 'REFERENCES',
+ 'REFRESH',
+ 'REINDEX',
+ 'RELATIVE',
+ 'RELEASE',
+ 'RENAME',
+ 'REPEATABLE',
+ 'REPLACE',
+ 'REPLICA',
+ 'RESET',
+ 'RESTART',
+ 'RESTRICT',
+ 'RETURNING',
+ 'RETURNS',
+ 'REVOKE',
+ 'RIGHT',
+ 'ROLE',
+ 'ROLLBACK',
+ 'ROW',
+ 'ROWS',
+ 'RULE',
+ 'SAVEPOINT',
+ 'SCHEMA',
+ 'SCROLL',
+ 'SEARCH',
+ 'SECOND',
+ 'SECURITY',
+ 'SELECT',
+ 'SEQUENCE',
+ 'SEQUENCES',
+ 'SERIALIZABLE',
+ 'SERVER',
+ 'SESSION',
+ 'SESSION_USER',
+ 'SET',
+ 'SETOF',
+ 'SHARE',
+ 'SHOW',
+ 'SIMILAR',
+ 'SIMPLE',
+ 'SMALLINT',
+ 'SNAPSHOT',
+ 'SOME',
+ 'STABLE',
+ 'STANDALONE',
+ 'START',
+ 'STATEMENT',
+ 'STATISTICS',
+ 'STDIN',
+ 'STDOUT',
+ 'STORAGE',
+ 'STRICT',
+ 'STRIP',
+ 'SUBSTRING',
+ 'SYMMETRIC',
+ 'SYSID',
+ 'SYSTEM',
+ 'TABLE',
+ 'TABLES',
+ 'TABLESPACE',
+ 'TEMP',
+ 'TEMPLATE',
+ 'TEMPORARY',
+ 'TEXT',
+ 'THEN',
+ 'TIME',
+ 'TIMESTAMP',
+ 'TO',
+ 'TRAILING',
+ 'TRANSACTION',
+ 'TREAT',
+ 'TRIGGER',
+ 'TRIM',
+ 'TRUE',
+ 'TRUNCATE',
+ 'TRUSTED',
+ 'TYPE',
+ 'TYPES',
+ 'UNBOUNDED',
+ 'UNCOMMITTED',
+ 'UNENCRYPTED',
+ 'UNION',
+ 'UNIQUE',
+ 'UNKNOWN',
+ 'UNLISTEN',
+ 'UNLOGGED',
+ 'UNTIL',
+ 'UPDATE',
+ 'USER',
+ 'USING',
+ 'VACUUM',
+ 'VALID',
+ 'VALIDATE',
+ 'VALIDATOR',
+ 'VALUE',
+ 'VALUES',
+ 'VARCHAR',
+ 'VARIADIC',
+ 'VARYING',
+ 'VERBOSE',
+ 'VERSION',
+ 'VIEW',
+ 'VIEWS',
+ 'VOLATILE',
+ 'WHEN',
+ 'WHERE',
+ 'WHITESPACE',
+ 'WINDOW',
+ 'WITH',
+ 'WITHIN',
+ 'WITHOUT',
+ 'WORK',
+ 'WRAPPER',
+ 'WRITE',
+ 'XML',
+ 'XMLATTRIBUTES',
+ 'XMLCONCAT',
+ 'XMLELEMENT',
+ 'XMLEXISTS',
+ 'XMLFOREST',
+ 'XMLPARSE',
+ 'XMLPI',
+ 'XMLROOT',
+ 'XMLSERIALIZE',
+ 'YEAR',
+ 'YES',
+ 'ZONE',
+)
+
+DATATYPES = (
+ 'bigint',
+ 'bigserial',
+ 'bit',
+ 'bit varying',
+ 'bool',
+ 'boolean',
+ 'box',
+ 'bytea',
+ 'char',
+ 'character',
+ 'character varying',
+ 'cidr',
+ 'circle',
+ 'date',
+ 'decimal',
+ 'double precision',
+ 'float4',
+ 'float8',
+ 'inet',
+ 'int',
+ 'int2',
+ 'int4',
+ 'int8',
+ 'integer',
+ 'interval',
+ 'json',
+ 'jsonb',
+ 'line',
+ 'lseg',
+ 'macaddr',
+ 'money',
+ 'numeric',
+ 'path',
+ 'pg_lsn',
+ 'point',
+ 'polygon',
+ 'real',
+ 'serial',
+ 'serial2',
+ 'serial4',
+ 'serial8',
+ 'smallint',
+ 'smallserial',
+ 'text',
+ 'time',
+ 'timestamp',
+ 'timestamptz',
+ 'timetz',
+ 'tsquery',
+ 'tsvector',
+ 'txid_snapshot',
+ 'uuid',
+ 'varbit',
+ 'varchar',
+ 'with time zone',
+ 'without time zone',
+ 'xml',
+)
+
+PSEUDO_TYPES = (
+ 'any',
+ 'anyelement',
+ 'anyarray',
+ 'anynonarray',
+ 'anyenum',
+ 'anyrange',
+ 'cstring',
+ 'internal',
+ 'language_handler',
+ 'fdw_handler',
+ 'record',
+ 'trigger',
+ 'void',
+ 'opaque',
+)
# Remove 'trigger' from types
-PSEUDO_TYPES = sorted(set(PSEUDO_TYPES) - set(map(str.lower, KEYWORDS)))
+PSEUDO_TYPES = tuple(sorted(set(PSEUDO_TYPES) - set(map(str.lower, KEYWORDS))))
-PLPGSQL_KEYWORDS = [
+PLPGSQL_KEYWORDS = (
'ALIAS', 'CONSTANT', 'DIAGNOSTICS', 'ELSIF', 'EXCEPTION', 'EXIT',
'FOREACH', 'GET', 'LOOP', 'NOTICE', 'OPEN', 'PERFORM', 'QUERY', 'RAISE',
'RETURN', 'REVERSE', 'SQLSTATE', 'WHILE',
- ]
+)
if __name__ == '__main__':
update_myself()
-
diff --git a/pygments/lexers/_scilab_builtins.py b/pygments/lexers/_scilab_builtins.py
index 7b27daab..456edc48 100644
--- a/pygments/lexers/_scilab_builtins.py
+++ b/pygments/lexers/_scilab_builtins.py
@@ -9,32 +9,3089 @@
:license: BSD, see LICENSE for details.
"""
-# These lists are generated automatically.
-# Run the following in a Scilab script:
-#
-# varType=["functions", "commands", "macros", "variables" ];
-# fd = mopen('list.txt','wt');
-#
-# for j=1:size(varType,"*")
-# myStr="";
-# a=completion("",varType(j));
-# myStr=varType(j)+"_kw = [";
-# for i=1:size(a,"*")
-# myStr = myStr + """" + a(i) + """";
-# if size(a,"*") <> i then
-# myStr = myStr + ","; end
-# end
-# myStr = myStr + "]";
-# mputl(myStr,fd);
-# end
-# mclose(fd);
-#
-# Then replace "$" by "\\$" manually.
-
-functions_kw = ["%XMLAttr_6","%XMLAttr_e","%XMLAttr_i_XMLElem","%XMLAttr_length","%XMLAttr_p","%XMLAttr_size","%XMLDoc_6","%XMLDoc_e","%XMLDoc_i_XMLList","%XMLDoc_p","%XMLElem_6","%XMLElem_e","%XMLElem_i_XMLDoc","%XMLElem_i_XMLElem","%XMLElem_i_XMLList","%XMLElem_p","%XMLList_6","%XMLList_e","%XMLList_i_XMLElem","%XMLList_i_XMLList","%XMLList_length","%XMLList_p","%XMLList_size","%XMLNs_6","%XMLNs_e","%XMLNs_i_XMLElem","%XMLNs_p","%XMLSet_6","%XMLSet_e","%XMLSet_length","%XMLSet_p","%XMLSet_size","%XMLValid_p","%b_i_XMLList","%c_i_XMLAttr","%c_i_XMLDoc","%c_i_XMLElem","%c_i_XMLList","%ce_i_XMLList","%fptr_i_XMLList","%h_i_XMLList","%hm_i_XMLList","%i_abs","%i_cumprod","%i_cumsum","%i_diag","%i_i_XMLList","%i_matrix","%i_max","%i_maxi","%i_min","%i_mini","%i_mput","%i_p","%i_prod","%i_sum","%i_tril","%i_triu","%ip_i_XMLList","%l_i_XMLList","%lss_i_XMLList","%mc_i_XMLList","%msp_full","%msp_i_XMLList","%msp_spget","%p_i_XMLList","%ptr_i_XMLList","%r_i_XMLList","%s_i_XMLList","%sp_i_XMLList","%spb_i_XMLList","%st_i_XMLList","Calendar","ClipBoard","Matplot","Matplot1","PlaySound","TCL_DeleteInterp","TCL_DoOneEvent","TCL_EvalFile","TCL_EvalStr","TCL_ExistArray","TCL_ExistInterp","TCL_ExistVar","TCL_GetVar","TCL_GetVersion","TCL_SetVar","TCL_UnsetVar","TCL_UpVar","_","_code2str","_str2code","about","abs","acos","addcb","addf","addhistory","addinter","amell","and","argn","arl2_ius","ascii","asin","atan","backslash","balanc","banner","base2dec","basename","bdiag","beep","besselh","besseli","besselj","besselk","bessely","beta","bezout","bfinit","blkfc1i","blkslvi","bool2s","browsehistory","browsevar","bsplin3val","buildDocv2","buildouttb","bvode","c_link","calerf","call","callblk","captions","cd","cdfbet","cdfbin","cdfchi","cdfchn","cdff","cdffnc","cdfgam","cdfnbn","cdfnor","cdfpoi","cdft","ceil","champ","champ1","chdir","chol","clc","clean","clear","clear_pixmap","clearfun","clearglobal","closeEditor","closeXcos","code2str","coeff","comp","completion","conj","contour2di","contr","conv2","convstr","copy","copyfile","corr","cos","coserror","createdir","cshep2d","ctree2","ctree3","ctree4","cumprod","cumsum","curblock","curblockc","dasrt","dassl","data2sig","debug","dec2base","deff","definedfields","degree","delbpt","delete","deletefile","delip","delmenu","det","dgettext","dhinf","diag","diary","diffobjs","disp","dispbpt","displayhistory","disposefftwlibrary","dlgamma","dnaupd","dneupd","double","draw","drawaxis","drawlater","drawnow","dsaupd","dsearch","dseupd","duplicate","editor","editvar","emptystr","end_scicosim","ereduc","errcatch","errclear","error","eval_cshep2d","exec","execstr","exists","exit","exp","expm","exportUI","export_to_hdf5","eye","fadj2sp","fec","feval","fft","fftw","fftw_flags","fftw_forget_wisdom","fftwlibraryisloaded","file","filebrowser","fileext","fileinfo","fileparts","filesep","find","findBD","findfiles","floor","format","fort","fprintfMat","freq","frexp","fromc","fromjava","fscanfMat","fsolve","fstair","full","fullpath","funcprot","funptr","gamma","gammaln","geom3d","get","get_absolute_file_path","get_fftw_wisdom","getblocklabel","getcallbackobject","getdate","getdebuginfo","getdefaultlanguage","getdrives","getdynlibext","getenv","getfield","gethistory","gethistoryfile","getinstalledlookandfeels","getio","getlanguage","getlongpathname","getlookandfeel","getmd5","getmemory","getmodules","getos","getpid","getrelativefilename","getscicosvars","getscilabmode","getshortpathname","gettext","getvariablesonstack","getversion","glist","global","glue","grand","grayplot","grep","gsort","gstacksize","havewindow","helpbrowser","hess","hinf","historymanager","historysize","host","iconvert","iconvert","ieee","ilib_verbose","imag","impl","import_from_hdf5","imult","inpnvi","int","int16","int2d","int32","int3d","int8","interp","interp2d","interp3d","intg","intppty","inttype","inv","is_handle_valid","isalphanum","isascii","isdef","isdigit","isdir","isequal","isequalbitwise","iserror","isfile","isglobal","isletter","isreal","iswaitingforinput","javaclasspath","javalibrarypath","kron","lasterror","ldiv","ldivf","legendre","length","lib","librarieslist","libraryinfo","linear_interpn","lines","link","linmeq","list","load","loadScicos","loadfftwlibrary","loadhistory","log","log1p","lsq","lsq_splin","lsqrsolve","lsslist","lstcat","lstsize","ltitr","lu","ludel","lufact","luget","lusolve","macr2lst","macr2tree","matfile_close","matfile_listvar","matfile_open","matfile_varreadnext","matfile_varwrite","matrix","max","maxfiles","mclearerr","mclose","meof","merror","messagebox","mfprintf","mfscanf","mget","mgeti","mgetl","mgetstr","min","mlist","mode","model2blk","mopen","move","movefile","mprintf","mput","mputl","mputstr","mscanf","mseek","msprintf","msscanf","mtell","mtlb_mode","mtlb_sparse","mucomp","mulf","nearfloat","newaxes","newest","newfun","nnz","notify","number_properties","ode","odedc","ones","opentk","optim","or","ordmmd","parallel_concurrency","parallel_run","param3d","param3d1","part","pathconvert","pathsep","phase_simulation","plot2d","plot2d1","plot2d2","plot2d3","plot2d4","plot3d","plot3d1","pointer_xproperty","poly","ppol","pppdiv","predef","print","printf","printfigure","printsetupbox","prod","progressionbar","prompt","pwd","qld","qp_solve","qr","raise_window","rand","rankqr","rat","rcond","rdivf","read","read4b","readb","readgateway","readmps","real","realtime","realtimeinit","regexp","relocate_handle","remez","removedir","removelinehistory","res_with_prec","resethistory","residu","resume","return","ricc","ricc_old","rlist","roots","rotate_axes","round","rpem","rtitr","rubberbox","save","saveafterncommands","saveconsecutivecommands","savehistory","schur","sci_haltscicos","sci_tree2","sci_tree3","sci_tree4","sciargs","scicos_debug","scicos_debug_count","scicos_time","scicosim","scinotes","sctree","semidef","set","set_blockerror","set_fftw_wisdom","set_xproperty","setbpt","setdefaultlanguage","setenv","setfield","sethistoryfile","setlanguage","setlookandfeel","setmenu","sfact","sfinit","show_pixmap","show_window","showalluimenushandles","sident","sig2data","sign","simp","simp_mode","sin","size","slash","sleep","sorder","sparse","spchol","spcompack","spec","spget","splin","splin2d","splin3d","spones","sprintf","sqrt","stacksize","str2code","strcat","strchr","strcmp","strcspn","strindex","string","stringbox","stripblanks","strncpy","strrchr","strrev","strsplit","strspn","strstr","strsubst","strtod","strtok","subf","sum","svd","swap_handles","symfcti","syredi","system_getproperty","system_setproperty","ta2lpd","tan","taucs_chdel","taucs_chfact","taucs_chget","taucs_chinfo","taucs_chsolve","tempname","testmatrix","timer","tlist","tohome","tokens","toolbar","toprint","tr_zer","tril","triu","type","typename","uiDisplayTree","uicontextmenu","uicontrol","uigetcolor","uigetdir","uigetfile","uigetfont","uimenu","uint16","uint32","uint8","uipopup","uiputfile","uiwait","ulink","umf_ludel","umf_lufact","umf_luget","umf_luinfo","umf_lusolve","umfpack","unglue","unix","unsetmenu","unzoom","updatebrowsevar","usecanvas","user","var2vec","varn","vec2var","waitbar","warnBlockByUID","warning","what","where","whereis","who","winsid","with_embedded_jre","with_module","writb","write","write4b","x_choose","x_choose_modeless","x_dialog","x_mdialog","xarc","xarcs","xarrows","xchange","xchoicesi","xclick","xcos","xcosAddToolsMenu","xcosConfigureXmlFile","xcosDiagramToScilab","xcosPalCategoryAdd","xcosPalDelete","xcosPalDisable","xcosPalEnable","xcosPalGenerateIcon","xcosPalLoad","xcosPalMove","xcosUpdateBlock","xdel","xfarc","xfarcs","xfpoly","xfpolys","xfrect","xget","xgetech","xgetmouse","xgraduate","xgrid","xlfont","xls_open","xls_read","xmlAddNs","xmlAsNumber","xmlAsText","xmlDTD","xmlDelete","xmlDocument","xmlDump","xmlElement","xmlFormat","xmlGetNsByHref","xmlGetNsByPrefix","xmlGetOpenDocs","xmlIsValidObject","xmlNs","xmlRead","xmlReadStr","xmlRelaxNG","xmlRemove","xmlSchema","xmlSetAttributes","xmlValidate","xmlWrite","xmlXPath","xname","xpause","xpoly","xpolys","xrect","xrects","xs2bmp","xs2eps","xs2gif","xs2jpg","xs2pdf","xs2png","xs2ppm","xs2ps","xs2svg","xsegs","xset","xsetech","xstring","xstringb","xtitle","zeros","znaupd","zneupd","zoom_rect"]
-
-commands_kw = ["abort","apropos","break","case","catch","clc","clear","continue","do","else","elseif","end","endfunction","exit","for","function","help","if","pause","pwd","quit","resume","return","select","then","try","what","while","who"]
-
-macros_kw = ["%0_i_st","%3d_i_h","%Block_xcosUpdateBlock","%TNELDER_p","%TNELDER_string","%TNMPLOT_p","%TNMPLOT_string","%TOPTIM_p","%TOPTIM_string","%TSIMPLEX_p","%TSIMPLEX_string","%_gsort","%_strsplit","%ar_p","%asn","%b_a_b","%b_a_s","%b_c_s","%b_c_spb","%b_cumprod","%b_cumsum","%b_d_s","%b_diag","%b_e","%b_f_s","%b_f_spb","%b_g_s","%b_g_spb","%b_h_s","%b_h_spb","%b_i_b","%b_i_ce","%b_i_h","%b_i_hm","%b_i_s","%b_i_sp","%b_i_spb","%b_i_st","%b_iconvert","%b_l_b","%b_l_s","%b_m_b","%b_m_s","%b_matrix","%b_n_hm","%b_o_hm","%b_p_s","%b_prod","%b_r_b","%b_r_s","%b_s_b","%b_s_s","%b_string","%b_sum","%b_tril","%b_triu","%b_x_b","%b_x_s","%c_a_c","%c_b_c","%c_b_s","%c_diag","%c_e","%c_eye","%c_f_s","%c_i_c","%c_i_ce","%c_i_h","%c_i_hm","%c_i_lss","%c_i_r","%c_i_s","%c_i_st","%c_matrix","%c_n_l","%c_n_st","%c_o_l","%c_o_st","%c_ones","%c_rand","%c_tril","%c_triu","%cblock_c_cblock","%cblock_c_s","%cblock_e","%cblock_f_cblock","%cblock_p","%cblock_size","%ce_6","%ce_c_ce","%ce_e","%ce_f_ce","%ce_i_ce","%ce_i_s","%ce_i_st","%ce_matrix","%ce_p","%ce_size","%ce_string","%ce_t","%champdat_i_h","%choose","%diagram_xcos","%dir_p","%fptr_i_st","%grayplot_i_h","%h_i_st","%hm_1_hm","%hm_1_s","%hm_2_hm","%hm_2_s","%hm_3_hm","%hm_3_s","%hm_4_hm","%hm_4_s","%hm_5","%hm_a_hm","%hm_a_r","%hm_a_s","%hm_abs","%hm_and","%hm_bool2s","%hm_c_hm","%hm_ceil","%hm_conj","%hm_cos","%hm_cumprod","%hm_cumsum","%hm_d_hm","%hm_d_s","%hm_degree","%hm_e","%hm_exp","%hm_f_hm","%hm_fft","%hm_find","%hm_floor","%hm_g_hm","%hm_h_hm","%hm_i_b","%hm_i_ce","%hm_i_hm","%hm_i_i","%hm_i_p","%hm_i_r","%hm_i_s","%hm_i_st","%hm_iconvert","%hm_imag","%hm_int","%hm_isnan","%hm_isreal","%hm_j_hm","%hm_j_s","%hm_k_hm","%hm_k_s","%hm_log","%hm_m_p","%hm_m_r","%hm_m_s","%hm_matrix","%hm_maxi","%hm_mean","%hm_median","%hm_mini","%hm_n_b","%hm_n_c","%hm_n_hm","%hm_n_i","%hm_n_p","%hm_n_s","%hm_o_b","%hm_o_c","%hm_o_hm","%hm_o_i","%hm_o_p","%hm_o_s","%hm_ones","%hm_or","%hm_p","%hm_prod","%hm_q_hm","%hm_r_s","%hm_rand","%hm_real","%hm_round","%hm_s","%hm_s_hm","%hm_s_r","%hm_s_s","%hm_sign","%hm_sin","%hm_size","%hm_sqrt","%hm_st_deviation","%hm_string","%hm_sum","%hm_x_hm","%hm_x_p","%hm_x_s","%hm_zeros","%i_1_s","%i_2_s","%i_3_s","%i_4_s","%i_Matplot","%i_a_i","%i_a_s","%i_and","%i_ascii","%i_b_s","%i_bezout","%i_champ","%i_champ1","%i_contour","%i_contour2d","%i_d_i","%i_d_s","%i_e","%i_fft","%i_g_i","%i_gcd","%i_h_i","%i_i_ce","%i_i_h","%i_i_hm","%i_i_i","%i_i_s","%i_i_st","%i_j_i","%i_j_s","%i_l_s","%i_lcm","%i_length","%i_m_i","%i_m_s","%i_mfprintf","%i_mprintf","%i_msprintf","%i_n_s","%i_o_s","%i_or","%i_p_i","%i_p_s","%i_plot2d","%i_plot2d1","%i_plot2d2","%i_q_s","%i_r_i","%i_r_s","%i_round","%i_s_i","%i_s_s","%i_sign","%i_string","%i_x_i","%i_x_s","%ip_a_s","%ip_i_st","%ip_m_s","%ip_n_ip","%ip_o_ip","%ip_p","%ip_s_s","%ip_string","%k","%l_i_h","%l_i_s","%l_i_st","%l_isequal","%l_n_c","%l_n_l","%l_n_m","%l_n_p","%l_n_s","%l_n_st","%l_o_c","%l_o_l","%l_o_m","%l_o_p","%l_o_s","%l_o_st","%lss_a_lss","%lss_a_p","%lss_a_r","%lss_a_s","%lss_c_lss","%lss_c_p","%lss_c_r","%lss_c_s","%lss_e","%lss_eye","%lss_f_lss","%lss_f_p","%lss_f_r","%lss_f_s","%lss_i_ce","%lss_i_lss","%lss_i_p","%lss_i_r","%lss_i_s","%lss_i_st","%lss_inv","%lss_l_lss","%lss_l_p","%lss_l_r","%lss_l_s","%lss_m_lss","%lss_m_p","%lss_m_r","%lss_m_s","%lss_n_lss","%lss_n_p","%lss_n_r","%lss_n_s","%lss_norm","%lss_o_lss","%lss_o_p","%lss_o_r","%lss_o_s","%lss_ones","%lss_r_lss","%lss_r_p","%lss_r_r","%lss_r_s","%lss_rand","%lss_s","%lss_s_lss","%lss_s_p","%lss_s_r","%lss_s_s","%lss_size","%lss_t","%lss_v_lss","%lss_v_p","%lss_v_r","%lss_v_s","%lt_i_s","%m_n_l","%m_o_l","%mc_i_h","%mc_i_s","%mc_i_st","%mc_n_st","%mc_o_st","%mc_string","%mps_p","%mps_string","%msp_a_s","%msp_abs","%msp_e","%msp_find","%msp_i_s","%msp_i_st","%msp_length","%msp_m_s","%msp_maxi","%msp_n_msp","%msp_nnz","%msp_o_msp","%msp_p","%msp_sparse","%msp_spones","%msp_t","%p_a_lss","%p_a_r","%p_c_lss","%p_c_r","%p_cumprod","%p_cumsum","%p_d_p","%p_d_r","%p_d_s","%p_det","%p_e","%p_f_lss","%p_f_r","%p_i_ce","%p_i_h","%p_i_hm","%p_i_lss","%p_i_p","%p_i_r","%p_i_s","%p_i_st","%p_inv","%p_j_s","%p_k_p","%p_k_r","%p_k_s","%p_l_lss","%p_l_p","%p_l_r","%p_l_s","%p_m_hm","%p_m_lss","%p_m_r","%p_matrix","%p_n_l","%p_n_lss","%p_n_r","%p_o_l","%p_o_lss","%p_o_r","%p_o_sp","%p_p_s","%p_prod","%p_q_p","%p_q_r","%p_q_s","%p_r_lss","%p_r_p","%p_r_r","%p_r_s","%p_s_lss","%p_s_r","%p_simp","%p_string","%p_sum","%p_v_lss","%p_v_p","%p_v_r","%p_v_s","%p_x_hm","%p_x_r","%p_y_p","%p_y_r","%p_y_s","%p_z_p","%p_z_r","%p_z_s","%r_a_hm","%r_a_lss","%r_a_p","%r_a_r","%r_a_s","%r_c_lss","%r_c_p","%r_c_r","%r_c_s","%r_clean","%r_cumprod","%r_d_p","%r_d_r","%r_d_s","%r_det","%r_diag","%r_e","%r_eye","%r_f_lss","%r_f_p","%r_f_r","%r_f_s","%r_i_ce","%r_i_hm","%r_i_lss","%r_i_p","%r_i_r","%r_i_s","%r_i_st","%r_inv","%r_j_s","%r_k_p","%r_k_r","%r_k_s","%r_l_lss","%r_l_p","%r_l_r","%r_l_s","%r_m_hm","%r_m_lss","%r_m_p","%r_m_r","%r_m_s","%r_matrix","%r_n_lss","%r_n_p","%r_n_r","%r_n_s","%r_norm","%r_o_lss","%r_o_p","%r_o_r","%r_o_s","%r_ones","%r_p","%r_p_s","%r_prod","%r_q_p","%r_q_r","%r_q_s","%r_r_lss","%r_r_p","%r_r_r","%r_r_s","%r_rand","%r_s","%r_s_hm","%r_s_lss","%r_s_p","%r_s_r","%r_s_s","%r_simp","%r_size","%r_string","%r_sum","%r_t","%r_tril","%r_triu","%r_v_lss","%r_v_p","%r_v_r","%r_v_s","%r_x_p","%r_x_r","%r_x_s","%r_y_p","%r_y_r","%r_y_s","%r_z_p","%r_z_r","%r_z_s","%s_1_hm","%s_1_i","%s_2_hm","%s_2_i","%s_3_hm","%s_3_i","%s_4_hm","%s_4_i","%s_5","%s_a_b","%s_a_hm","%s_a_i","%s_a_ip","%s_a_lss","%s_a_msp","%s_a_r","%s_a_sp","%s_and","%s_b_i","%s_b_s","%s_c_b","%s_c_cblock","%s_c_lss","%s_c_r","%s_c_sp","%s_d_b","%s_d_i","%s_d_p","%s_d_r","%s_d_sp","%s_e","%s_f_b","%s_f_cblock","%s_f_lss","%s_f_r","%s_f_sp","%s_g_b","%s_g_s","%s_h_b","%s_h_s","%s_i_b","%s_i_c","%s_i_ce","%s_i_h","%s_i_hm","%s_i_i","%s_i_lss","%s_i_p","%s_i_r","%s_i_s","%s_i_sp","%s_i_spb","%s_i_st","%s_j_i","%s_k_hm","%s_k_p","%s_k_r","%s_k_sp","%s_l_b","%s_l_hm","%s_l_i","%s_l_lss","%s_l_p","%s_l_r","%s_l_s","%s_l_sp","%s_m_b","%s_m_hm","%s_m_i","%s_m_ip","%s_m_lss","%s_m_msp","%s_m_r","%s_matrix","%s_n_hm","%s_n_i","%s_n_l","%s_n_lss","%s_n_r","%s_n_st","%s_o_hm","%s_o_i","%s_o_l","%s_o_lss","%s_o_r","%s_o_st","%s_or","%s_p_b","%s_p_i","%s_pow","%s_q_hm","%s_q_i","%s_q_p","%s_q_r","%s_q_sp","%s_r_b","%s_r_i","%s_r_lss","%s_r_p","%s_r_r","%s_r_s","%s_r_sp","%s_s_b","%s_s_hm","%s_s_i","%s_s_ip","%s_s_lss","%s_s_r","%s_s_sp","%s_simp","%s_v_lss","%s_v_p","%s_v_r","%s_v_s","%s_x_b","%s_x_hm","%s_x_i","%s_x_r","%s_y_p","%s_y_r","%s_y_sp","%s_z_p","%s_z_r","%s_z_sp","%sn","%sp_a_s","%sp_a_sp","%sp_and","%sp_c_s","%sp_ceil","%sp_cos","%sp_cumprod","%sp_cumsum","%sp_d_s","%sp_d_sp","%sp_diag","%sp_e","%sp_exp","%sp_f_s","%sp_floor","%sp_gsort","%sp_i_ce","%sp_i_h","%sp_i_s","%sp_i_sp","%sp_i_st","%sp_int","%sp_inv","%sp_k_s","%sp_k_sp","%sp_l_s","%sp_l_sp","%sp_length","%sp_norm","%sp_or","%sp_p_s","%sp_prod","%sp_q_s","%sp_q_sp","%sp_r_s","%sp_r_sp","%sp_round","%sp_s_s","%sp_s_sp","%sp_sin","%sp_sqrt","%sp_string","%sp_sum","%sp_tril","%sp_triu","%sp_y_s","%sp_y_sp","%sp_z_s","%sp_z_sp","%spb_and","%spb_c_b","%spb_cumprod","%spb_cumsum","%spb_diag","%spb_e","%spb_f_b","%spb_g_b","%spb_g_spb","%spb_h_b","%spb_h_spb","%spb_i_b","%spb_i_ce","%spb_i_h","%spb_i_st","%spb_or","%spb_prod","%spb_sum","%spb_tril","%spb_triu","%st_6","%st_c_st","%st_e","%st_f_st","%st_i_b","%st_i_c","%st_i_fptr","%st_i_h","%st_i_i","%st_i_ip","%st_i_lss","%st_i_msp","%st_i_p","%st_i_r","%st_i_s","%st_i_sp","%st_i_spb","%st_i_st","%st_matrix","%st_n_c","%st_n_l","%st_n_mc","%st_n_p","%st_n_s","%st_o_c","%st_o_l","%st_o_mc","%st_o_p","%st_o_s","%st_o_tl","%st_p","%st_size","%st_string","%st_t","%ticks_i_h","%xls_e","%xls_p","%xlssheet_e","%xlssheet_p","%xlssheet_size","%xlssheet_string","DominationRank","G_make","IsAScalar","NDcost","OS_Version","PlotSparse","ReadHBSparse","ReadmiMatrix","TCL_CreateSlave","WritemiMatrix","abcd","abinv","accept_func_default","accept_func_vfsa","acf","acosd","acosh","acoshm","acosm","acot","acotd","acoth","acsc","acscd","acsch","add_demo","add_help_chapter","add_module_help_chapter","add_param","add_profiling","adj2sp","aff2ab","ana_style","analpf","analyze","aplat","apropos","arhnk","arl2","arma2p","armac","armax","armax1","arobasestring2strings","arsimul","ascii2string","asciimat","asec","asecd","asech","asind","asinh","asinhm","asinm","assert_checkalmostequal","assert_checkequal","assert_checkerror","assert_checkfalse","assert_checkfilesequal","assert_checktrue","assert_comparecomplex","assert_computedigits","assert_cond2reltol","assert_cond2reqdigits","assert_generror","atand","atanh","atanhm","atanm","atomsAutoload","atomsAutoloadAdd","atomsAutoloadDel","atomsAutoloadList","atomsCategoryList","atomsCheckModule","atomsDepTreeShow","atomsGetConfig","atomsGetInstalled","atomsGetLoaded","atomsGetLoadedPath","atomsInstall","atomsIsInstalled","atomsIsLoaded","atomsList","atomsLoad","atomsRemove","atomsRepositoryAdd","atomsRepositoryDel","atomsRepositoryList","atomsRestoreConfig","atomsSaveConfig","atomsSearch","atomsSetConfig","atomsShow","atomsSystemInit","atomsSystemUpdate","atomsTest","atomsUpdate","atomsVersion","augment","auread","auwrite","balreal","bench_run","bilin","bilt","bin2dec","binomial","bitand","bitcmp","bitget","bitor","bitset","bitxor","black","blanks","bloc2exp","bloc2ss","block_parameter_error","bode","bstap","buttmag","bvodeS","bytecode","bytecodewalk","cainv","calendar","calfrq","canon","casc","cat","cat_code","cb_m2sci_gui","ccontrg","cell","cell2mat","cellstr","center","cepstrum","cfspec","char","chart","cheb1mag","cheb2mag","check_gateways","check_help","check_modules_xml","check_versions","chepol","chfact","chsolve","classmarkov","clean_help","clock","cls2dls","cmb_lin","cmndred","cmoment","coding_ga_binary","coding_ga_identity","coff","coffg","colcomp","colcompr","colinout","colregul","companion","complex","compute_initial_temp","cond","cond2sp","condestsp","config","configure_msifort","configure_msvc","cont_frm","cont_mat","contrss","conv","convert_to_float","convertindex","convol","convol2d","copfac","correl","cosd","cosh","coshm","cosm","cotd","cotg","coth","cothm","covar","createfun","createstruct","crossover_ga_binary","crossover_ga_default","csc","cscd","csch","csgn","csim","cspect","ctr_gram","czt","dae","daeoptions","damp","datafit","date","datenum","datevec","dbphi","dcf","ddp","dec2bin","dec2hex","dec2oct","del_help_chapter","del_module_help_chapter","demo_begin","demo_choose","demo_compiler","demo_end","demo_file_choice","demo_folder_choice","demo_function_choice","demo_gui","demo_mdialog","demo_message","demo_run","demo_viewCode","denom","derivat","derivative","des2ss","des2tf","detectmsifort64tools","detectmsvc64tools","determ","detr","detrend","devtools_run_builder","dft","dhnorm","diff","diophant","dir","dirname","dispfiles","dllinfo","dscr","dsimul","dt_ility","dtsi","edit","edit_error","eigenmarkov","ell1mag","enlarge_shape","entropy","eomday","epred","eqfir","eqiir","equil","equil1","erf","erfc","erfcx","erfinv","etime","eval","evans","evstr","expression2code","extract_help_examples","factor","factorial","factors","faurre","ffilt","fft2","fftshift","fieldnames","filt_sinc","filter","findABCD","findAC","findBDK","findR","find_freq","find_links","find_scicos_version","findm","findmsifortcompiler","findmsvccompiler","findx0BD","firstnonsingleton","fit_dat","fix","fixedpointgcd","flipdim","flts","fminsearch","format_txt","fourplan","fprintf","frep2tf","freson","frfit","frmag","fscanf","fseek_origin","fsfirlin","fspec","fspecg","fstabst","ftest","ftuneq","fullfile","fullrf","fullrfk","fun2string","g_margin","gainplot","gamitg","gcare","gcd","gencompilationflags_unix","generateBlockImage","generateBlockImages","generic_i_ce","generic_i_h","generic_i_hm","generic_i_s","generic_i_st","genlib","genlib_old","genmarkov","geomean","getDiagramVersion","getModelicaPath","get_file_path","get_function_path","get_param","get_profile","get_scicos_version","getd","getscilabkeywords","getshell","gettklib","gfare","gfrancis","givens","glever","gmres","group","gschur","gspec","gtild","h2norm","h_cl","h_inf","h_inf_st","h_norm","hallchart","halt","hank","hankelsv","harmean","haveacompiler","head_comments","help","help_from_sci","help_skeleton","hermit","hex2dec","hilb","hilbert","horner","householder","hrmt","htrianr","hypermat","ifft","iir","iirgroup","iirlp","iirmod","ilib_build","ilib_compile","ilib_for_link","ilib_gen_Make","ilib_gen_Make_unix","ilib_gen_cleaner","ilib_gen_gateway","ilib_gen_loader","ilib_include_flag","ilib_mex_build","im_inv","importScicosDiagram","importScicosPal","importXcosDiagram","imrep2ss","ind2sub","inistate","init_ga_default","init_param","initial_scicos_tables","input","instruction2code","intc","intdec","integrate","interp1","interpln","intersect","intl","intsplin","inttrap","inv_coeff","invr","invrs","invsyslin","iqr","isLeapYear","is_absolute_path","is_param","iscell","iscellstr","isempty","isfield","isinf","isnan","isnum","issparse","isstruct","isvector","jmat","justify","kalm","karmarkar","kernel","kpure","krac2","kroneck","lattn","launchtest","lcf","lcm","lcmdiag","leastsq","leqe","leqr","lev","levin","lex_sort","lft","lin","lin2mu","lincos","lindquist","linf","linfn","linsolve","linspace","list2vec","list_param","listfiles","listfunctions","listvarinfile","lmisolver","lmitool","loadXcosLibs","loadmatfile","loadwave","log10","log2","logm","logspace","lqe","lqg","lqg2stan","lqg_ltr","lqr","ls","lyap","m2sci_gui","m_circle","macglov","macrovar","mad","makecell","manedit","mapsound","markp2ss","matfile2sci","mdelete","mean","meanf","median","mese","meshgrid","mfft","mfile2sci","minreal","minss","mkdir","modulo","moment","mrfit","msd","mstr2sci","mtlb","mtlb_0","mtlb_a","mtlb_all","mtlb_any","mtlb_axes","mtlb_axis","mtlb_beta","mtlb_box","mtlb_choices","mtlb_close","mtlb_colordef","mtlb_cond","mtlb_conv","mtlb_cov","mtlb_cumprod","mtlb_cumsum","mtlb_dec2hex","mtlb_delete","mtlb_diag","mtlb_diff","mtlb_dir","mtlb_double","mtlb_e","mtlb_echo","mtlb_error","mtlb_eval","mtlb_exist","mtlb_eye","mtlb_false","mtlb_fft","mtlb_fftshift","mtlb_filter","mtlb_find","mtlb_findstr","mtlb_fliplr","mtlb_fopen","mtlb_format","mtlb_fprintf","mtlb_fread","mtlb_fscanf","mtlb_full","mtlb_fwrite","mtlb_get","mtlb_grid","mtlb_hold","mtlb_i","mtlb_ifft","mtlb_image","mtlb_imp","mtlb_int16","mtlb_int32","mtlb_int8","mtlb_is","mtlb_isa","mtlb_isfield","mtlb_isletter","mtlb_isspace","mtlb_l","mtlb_legendre","mtlb_linspace","mtlb_logic","mtlb_logical","mtlb_loglog","mtlb_lower","mtlb_max","mtlb_mean","mtlb_median","mtlb_mesh","mtlb_meshdom","mtlb_min","mtlb_more","mtlb_num2str","mtlb_ones","mtlb_pcolor","mtlb_plot","mtlb_prod","mtlb_qr","mtlb_qz","mtlb_rand","mtlb_randn","mtlb_rcond","mtlb_realmax","mtlb_realmin","mtlb_repmat","mtlb_s","mtlb_semilogx","mtlb_semilogy","mtlb_setstr","mtlb_size","mtlb_sort","mtlb_sortrows","mtlb_sprintf","mtlb_sscanf","mtlb_std","mtlb_strcmp","mtlb_strcmpi","mtlb_strfind","mtlb_strrep","mtlb_subplot","mtlb_sum","mtlb_t","mtlb_toeplitz","mtlb_tril","mtlb_triu","mtlb_true","mtlb_type","mtlb_uint16","mtlb_uint32","mtlb_uint8","mtlb_upper","mtlb_var","mtlb_zeros","mu2lin","mutation_ga_binary","mutation_ga_default","mvcorrel","mvvacov","nancumsum","nand2mean","nanmax","nanmean","nanmeanf","nanmedian","nanmin","nanstdev","nansum","narsimul","ndgrid","ndims","nehari","neigh_func_csa","neigh_func_default","neigh_func_fsa","neigh_func_vfsa","neldermead_cget","neldermead_configure","neldermead_costf","neldermead_defaultoutput","neldermead_destroy","neldermead_display","neldermead_function","neldermead_get","neldermead_log","neldermead_new","neldermead_restart","neldermead_search","neldermead_updatesimp","nextpow2","nfreq","nicholschart","nlev","nmplot_cget","nmplot_configure","nmplot_contour","nmplot_destroy","nmplot_display","nmplot_function","nmplot_get","nmplot_historyplot","nmplot_log","nmplot_new","nmplot_outputcmd","nmplot_restart","nmplot_search","nmplot_simplexhistory","noisegen","nonreg_test_run","norm","now","null","num2cell","numdiff","numer","nyquist","nyquistfrequencybounds","obs_gram","obscont","observer","obsv_mat","obsvss","oct2dec","odeoptions","optim_ga","optim_moga","optim_nsga","optim_nsga2","optim_sa","optimbase_cget","optimbase_checkbounds","optimbase_checkcostfun","optimbase_checkx0","optimbase_configure","optimbase_destroy","optimbase_display","optimbase_function","optimbase_get","optimbase_hasbounds","optimbase_hasconstraints","optimbase_hasnlcons","optimbase_histget","optimbase_histset","optimbase_incriter","optimbase_isfeasible","optimbase_isinbounds","optimbase_isinnonlincons","optimbase_log","optimbase_logshutdown","optimbase_logstartup","optimbase_new","optimbase_outputcmd","optimbase_outstruct","optimbase_proj2bnds","optimbase_set","optimbase_stoplog","optimbase_terminate","optimget","optimplotfunccount","optimplotfval","optimplotx","optimset","optimsimplex_center","optimsimplex_check","optimsimplex_compsomefv","optimsimplex_computefv","optimsimplex_deltafv","optimsimplex_deltafvmax","optimsimplex_destroy","optimsimplex_dirmat","optimsimplex_fvmean","optimsimplex_fvstdev","optimsimplex_fvvariance","optimsimplex_getall","optimsimplex_getallfv","optimsimplex_getallx","optimsimplex_getfv","optimsimplex_getn","optimsimplex_getnbve","optimsimplex_getve","optimsimplex_getx","optimsimplex_gradientfv","optimsimplex_log","optimsimplex_new","optimsimplex_print","optimsimplex_reflect","optimsimplex_setall","optimsimplex_setallfv","optimsimplex_setallx","optimsimplex_setfv","optimsimplex_setn","optimsimplex_setnbve","optimsimplex_setve","optimsimplex_setx","optimsimplex_shrink","optimsimplex_size","optimsimplex_sort","optimsimplex_tostring","optimsimplex_xbar","orth","p_margin","pack","pareto_filter","parrot","pbig","pca","pcg","pdiv","pen2ea","pencan","pencost","penlaur","perctl","perl","perms","permute","pertrans","pfactors","pfss","phasemag","phaseplot","phc","pinv","playsnd","plotprofile","plzr","pmodulo","pol2des","pol2str","polar","polfact","prbs_a","prettyprint","primes","princomp","profile","proj","projsl","projspec","psmall","pspect","qmr","qpsolve","quart","quaskro","rafiter","randpencil","range","rank","read_csv","readxls","recompilefunction","recons","reglin","regress","remezb","remove_param","remove_profiling","repfreq","replace_Ix_by_Fx","repmat","reset_profiling","resize_matrix","returntoscilab","rhs2code","ric_desc","riccati","rmdir","routh_t","rowcomp","rowcompr","rowinout","rowregul","rowshuff","rref","sample","samplef","samwr","savematfile","savewave","scanf","sci2exp","sciGUI_init","sci_sparse","scicos_getvalue","scicos_simulate","scicos_workspace_init","scisptdemo","scitest","sdiff","sec","secd","sech","selection_ga_elitist","selection_ga_random","sensi","set_param","setdiff","sgrid","show_margins","show_pca","showprofile","signm","sinc","sincd","sind","sinh","sinhm","sinm","sm2des","sm2ss","smga","smooth","solve","sound","soundsec","sp2adj","spaninter","spanplus","spantwo","specfact","speye","sprand","spzeros","sqroot","sqrtm","squarewave","squeeze","srfaur","srkf","ss2des","ss2ss","ss2tf","sscanf","sskf","ssprint","ssrand","st_deviation","st_i_generic","st_ility","stabil","statgain","stdev","stdevf","steadycos","strange","strcmpi","struct","sub2ind","sva","svplot","sylm","sylv","sysconv","sysdiag","sysfact","syslin","syssize","system","systmat","tabul","tand","tanh","tanhm","tanm","tbx_build_blocks","tbx_build_cleaner","tbx_build_gateway","tbx_build_gateway_clean","tbx_build_gateway_loader","tbx_build_help","tbx_build_help_loader","tbx_build_loader","tbx_build_macros","tbx_build_src","tbx_builder","tbx_builder_gateway","tbx_builder_gateway_lang","tbx_builder_help","tbx_builder_help_lang","tbx_builder_macros","tbx_builder_src","tbx_builder_src_lang","temp_law_csa","temp_law_default","temp_law_fsa","temp_law_huang","temp_law_vfsa","test_clean","test_on_columns","test_run","test_run_level","testexamples","tf2des","tf2ss","thrownan","tic","time_id","toc","toeplitz","tokenpos","toolboxes","trace","trans","translatepaths","tree2code","trfmod","trianfml","trimmean","trisolve","trzeros","typeof","ui_observer","union","unique","unit_test_run","unix_g","unix_s","unix_w","unix_x","unobs","unpack","variance","variancef","vec2list","vectorfind","ver","warnobsolete","wavread","wavwrite","wcenter","weekday","wfir","wfir_gui","whereami","who_user","whos","wiener","wigner","winclose","window","winlist","with_javasci","with_macros_source","with_modelica_compiler","with_pvm","with_texmacs","with_tk","write_csv","xcosBlockEval","xcosBlockInterface","xcosCodeGeneration","xcosConfigureModelica","xcosPal","xcosPalAdd","xcosPalAddBlock","xcosPalExport","xcosShowBlockWarning","xcosValidateBlockSet","xcosValidateCompareBlock","xcos_compile","xcos_run","xcos_simulate","xcos_workspace_init","xmltochm","xmltoformat","xmltohtml","xmltojar","xmltopdf","xmltops","xmltoweb","yulewalk","zeropen","zgrid","zpbutt","zpch1","zpch2","zpell"]
-
-builtin_consts = ["\\$","%F","%T","%e","%eps","%f","%fftw","%gui","%i","%inf","%io","%modalWarning","%nan","%pi","%s","%t","%tk","%toolboxes","%toolboxes_dir","%z","PWD","SCI","SCIHOME","TMPDIR","a","ans","assertlib","atomslib","cacsdlib","compatibility_functilib","corelib","data_structureslib","demo_toolslib","development_toolslib","differential_equationlib","dynamic_linklib","elementary_functionslib","fd","fileiolib","functionslib","genetic_algorithmslib","helptoolslib","home","i","integerlib","interpolationlib","iolib","j","linear_algebralib","m2scilib","matiolib","modules_managerlib","myStr","neldermeadlib","optimbaselib","optimizationlib","optimsimplexlib","output_streamlib","overloadinglib","parameterslib","polynomialslib","scicos_autolib","scicos_utilslib","scinoteslib","signal_processinglib","simulated_annealinglib","soundlib","sparselib","special_functionslib","spreadsheetlib","statisticslib","stringlib","tclscilib","timelib","umfpacklib","varType","xcoslib"]
+# Autogenerated
+
+commands_kw = (
+ 'abort',
+ 'apropos',
+ 'break',
+ 'case',
+ 'catch',
+ 'clc',
+ 'clear',
+ 'continue',
+ 'do',
+ 'else',
+ 'elseif',
+ 'end',
+ 'endfunction',
+ 'exit',
+ 'for',
+ 'function',
+ 'help',
+ 'if',
+ 'pause',
+ 'pwd',
+ 'quit',
+ 'resume',
+ 'return',
+ 'select',
+ 'then',
+ 'try',
+ 'what',
+ 'while',
+ 'who',
+)
+
+functions_kw = (
+ '!!_invoke_',
+ '%H5Object_e',
+ '%H5Object_fieldnames',
+ '%H5Object_p',
+ '%XMLAttr_6',
+ '%XMLAttr_e',
+ '%XMLAttr_i_XMLElem',
+ '%XMLAttr_length',
+ '%XMLAttr_p',
+ '%XMLAttr_size',
+ '%XMLDoc_6',
+ '%XMLDoc_e',
+ '%XMLDoc_i_XMLList',
+ '%XMLDoc_p',
+ '%XMLElem_6',
+ '%XMLElem_e',
+ '%XMLElem_i_XMLDoc',
+ '%XMLElem_i_XMLElem',
+ '%XMLElem_i_XMLList',
+ '%XMLElem_p',
+ '%XMLList_6',
+ '%XMLList_e',
+ '%XMLList_i_XMLElem',
+ '%XMLList_i_XMLList',
+ '%XMLList_length',
+ '%XMLList_p',
+ '%XMLList_size',
+ '%XMLNs_6',
+ '%XMLNs_e',
+ '%XMLNs_i_XMLElem',
+ '%XMLNs_p',
+ '%XMLSet_6',
+ '%XMLSet_e',
+ '%XMLSet_length',
+ '%XMLSet_p',
+ '%XMLSet_size',
+ '%XMLValid_p',
+ '%_EClass_6',
+ '%_EClass_e',
+ '%_EClass_p',
+ '%_EObj_0',
+ '%_EObj_1__EObj',
+ '%_EObj_1_b',
+ '%_EObj_1_c',
+ '%_EObj_1_i',
+ '%_EObj_1_s',
+ '%_EObj_2__EObj',
+ '%_EObj_2_b',
+ '%_EObj_2_c',
+ '%_EObj_2_i',
+ '%_EObj_2_s',
+ '%_EObj_3__EObj',
+ '%_EObj_3_b',
+ '%_EObj_3_c',
+ '%_EObj_3_i',
+ '%_EObj_3_s',
+ '%_EObj_4__EObj',
+ '%_EObj_4_b',
+ '%_EObj_4_c',
+ '%_EObj_4_i',
+ '%_EObj_4_s',
+ '%_EObj_5',
+ '%_EObj_6',
+ '%_EObj_a__EObj',
+ '%_EObj_a_b',
+ '%_EObj_a_c',
+ '%_EObj_a_i',
+ '%_EObj_a_s',
+ '%_EObj_d__EObj',
+ '%_EObj_d_b',
+ '%_EObj_d_c',
+ '%_EObj_d_i',
+ '%_EObj_d_s',
+ '%_EObj_disp',
+ '%_EObj_e',
+ '%_EObj_g__EObj',
+ '%_EObj_g_b',
+ '%_EObj_g_c',
+ '%_EObj_g_i',
+ '%_EObj_g_s',
+ '%_EObj_h__EObj',
+ '%_EObj_h_b',
+ '%_EObj_h_c',
+ '%_EObj_h_i',
+ '%_EObj_h_s',
+ '%_EObj_i__EObj',
+ '%_EObj_j__EObj',
+ '%_EObj_j_b',
+ '%_EObj_j_c',
+ '%_EObj_j_i',
+ '%_EObj_j_s',
+ '%_EObj_k__EObj',
+ '%_EObj_k_b',
+ '%_EObj_k_c',
+ '%_EObj_k_i',
+ '%_EObj_k_s',
+ '%_EObj_l__EObj',
+ '%_EObj_l_b',
+ '%_EObj_l_c',
+ '%_EObj_l_i',
+ '%_EObj_l_s',
+ '%_EObj_m__EObj',
+ '%_EObj_m_b',
+ '%_EObj_m_c',
+ '%_EObj_m_i',
+ '%_EObj_m_s',
+ '%_EObj_n__EObj',
+ '%_EObj_n_b',
+ '%_EObj_n_c',
+ '%_EObj_n_i',
+ '%_EObj_n_s',
+ '%_EObj_o__EObj',
+ '%_EObj_o_b',
+ '%_EObj_o_c',
+ '%_EObj_o_i',
+ '%_EObj_o_s',
+ '%_EObj_p',
+ '%_EObj_p__EObj',
+ '%_EObj_p_b',
+ '%_EObj_p_c',
+ '%_EObj_p_i',
+ '%_EObj_p_s',
+ '%_EObj_q__EObj',
+ '%_EObj_q_b',
+ '%_EObj_q_c',
+ '%_EObj_q_i',
+ '%_EObj_q_s',
+ '%_EObj_r__EObj',
+ '%_EObj_r_b',
+ '%_EObj_r_c',
+ '%_EObj_r_i',
+ '%_EObj_r_s',
+ '%_EObj_s__EObj',
+ '%_EObj_s_b',
+ '%_EObj_s_c',
+ '%_EObj_s_i',
+ '%_EObj_s_s',
+ '%_EObj_t',
+ '%_EObj_x__EObj',
+ '%_EObj_x_b',
+ '%_EObj_x_c',
+ '%_EObj_x_i',
+ '%_EObj_x_s',
+ '%_EObj_y__EObj',
+ '%_EObj_y_b',
+ '%_EObj_y_c',
+ '%_EObj_y_i',
+ '%_EObj_y_s',
+ '%_EObj_z__EObj',
+ '%_EObj_z_b',
+ '%_EObj_z_c',
+ '%_EObj_z_i',
+ '%_EObj_z_s',
+ '%_eigs',
+ '%_load',
+ '%b_1__EObj',
+ '%b_2__EObj',
+ '%b_3__EObj',
+ '%b_4__EObj',
+ '%b_a__EObj',
+ '%b_d__EObj',
+ '%b_g__EObj',
+ '%b_h__EObj',
+ '%b_i_XMLList',
+ '%b_i__EObj',
+ '%b_j__EObj',
+ '%b_k__EObj',
+ '%b_l__EObj',
+ '%b_m__EObj',
+ '%b_n__EObj',
+ '%b_o__EObj',
+ '%b_p__EObj',
+ '%b_q__EObj',
+ '%b_r__EObj',
+ '%b_s__EObj',
+ '%b_x__EObj',
+ '%b_y__EObj',
+ '%b_z__EObj',
+ '%c_1__EObj',
+ '%c_2__EObj',
+ '%c_3__EObj',
+ '%c_4__EObj',
+ '%c_a__EObj',
+ '%c_d__EObj',
+ '%c_g__EObj',
+ '%c_h__EObj',
+ '%c_i_XMLAttr',
+ '%c_i_XMLDoc',
+ '%c_i_XMLElem',
+ '%c_i_XMLList',
+ '%c_i__EObj',
+ '%c_j__EObj',
+ '%c_k__EObj',
+ '%c_l__EObj',
+ '%c_m__EObj',
+ '%c_n__EObj',
+ '%c_o__EObj',
+ '%c_p__EObj',
+ '%c_q__EObj',
+ '%c_r__EObj',
+ '%c_s__EObj',
+ '%c_x__EObj',
+ '%c_y__EObj',
+ '%c_z__EObj',
+ '%ce_i_XMLList',
+ '%fptr_i_XMLList',
+ '%h_i_XMLList',
+ '%hm_i_XMLList',
+ '%i_1__EObj',
+ '%i_2__EObj',
+ '%i_3__EObj',
+ '%i_4__EObj',
+ '%i_a__EObj',
+ '%i_abs',
+ '%i_cumprod',
+ '%i_cumsum',
+ '%i_d__EObj',
+ '%i_diag',
+ '%i_g__EObj',
+ '%i_h__EObj',
+ '%i_i_XMLList',
+ '%i_i__EObj',
+ '%i_j__EObj',
+ '%i_k__EObj',
+ '%i_l__EObj',
+ '%i_m__EObj',
+ '%i_matrix',
+ '%i_max',
+ '%i_maxi',
+ '%i_min',
+ '%i_mini',
+ '%i_mput',
+ '%i_n__EObj',
+ '%i_o__EObj',
+ '%i_p',
+ '%i_p__EObj',
+ '%i_prod',
+ '%i_q__EObj',
+ '%i_r__EObj',
+ '%i_s__EObj',
+ '%i_sum',
+ '%i_tril',
+ '%i_triu',
+ '%i_x__EObj',
+ '%i_y__EObj',
+ '%i_z__EObj',
+ '%ip_i_XMLList',
+ '%l_i_XMLList',
+ '%l_i__EObj',
+ '%lss_i_XMLList',
+ '%mc_i_XMLList',
+ '%msp_full',
+ '%msp_i_XMLList',
+ '%msp_spget',
+ '%p_i_XMLList',
+ '%ptr_i_XMLList',
+ '%r_i_XMLList',
+ '%s_1__EObj',
+ '%s_2__EObj',
+ '%s_3__EObj',
+ '%s_4__EObj',
+ '%s_a__EObj',
+ '%s_d__EObj',
+ '%s_g__EObj',
+ '%s_h__EObj',
+ '%s_i_XMLList',
+ '%s_i__EObj',
+ '%s_j__EObj',
+ '%s_k__EObj',
+ '%s_l__EObj',
+ '%s_m__EObj',
+ '%s_n__EObj',
+ '%s_o__EObj',
+ '%s_p__EObj',
+ '%s_q__EObj',
+ '%s_r__EObj',
+ '%s_s__EObj',
+ '%s_x__EObj',
+ '%s_y__EObj',
+ '%s_z__EObj',
+ '%sp_i_XMLList',
+ '%spb_i_XMLList',
+ '%st_i_XMLList',
+ 'Calendar',
+ 'ClipBoard',
+ 'Matplot',
+ 'Matplot1',
+ 'PlaySound',
+ 'TCL_DeleteInterp',
+ 'TCL_DoOneEvent',
+ 'TCL_EvalFile',
+ 'TCL_EvalStr',
+ 'TCL_ExistArray',
+ 'TCL_ExistInterp',
+ 'TCL_ExistVar',
+ 'TCL_GetVar',
+ 'TCL_GetVersion',
+ 'TCL_SetVar',
+ 'TCL_UnsetVar',
+ 'TCL_UpVar',
+ '_',
+ '_code2str',
+ '_d',
+ '_str2code',
+ 'about',
+ 'abs',
+ 'acos',
+ 'addModulePreferences',
+ 'addcolor',
+ 'addf',
+ 'addhistory',
+ 'addinter',
+ 'addlocalizationdomain',
+ 'amell',
+ 'and',
+ 'argn',
+ 'arl2_ius',
+ 'ascii',
+ 'asin',
+ 'atan',
+ 'backslash',
+ 'balanc',
+ 'banner',
+ 'base2dec',
+ 'basename',
+ 'bdiag',
+ 'beep',
+ 'besselh',
+ 'besseli',
+ 'besselj',
+ 'besselk',
+ 'bessely',
+ 'beta',
+ 'bezout',
+ 'bfinit',
+ 'blkfc1i',
+ 'blkslvi',
+ 'bool2s',
+ 'browsehistory',
+ 'browsevar',
+ 'bsplin3val',
+ 'buildDoc',
+ 'buildouttb',
+ 'bvode',
+ 'c_link',
+ 'call',
+ 'callblk',
+ 'captions',
+ 'cd',
+ 'cdfbet',
+ 'cdfbin',
+ 'cdfchi',
+ 'cdfchn',
+ 'cdff',
+ 'cdffnc',
+ 'cdfgam',
+ 'cdfnbn',
+ 'cdfnor',
+ 'cdfpoi',
+ 'cdft',
+ 'ceil',
+ 'champ',
+ 'champ1',
+ 'chdir',
+ 'chol',
+ 'clc',
+ 'clean',
+ 'clear',
+ 'clearfun',
+ 'clearglobal',
+ 'closeEditor',
+ 'closeEditvar',
+ 'closeXcos',
+ 'code2str',
+ 'coeff',
+ 'color',
+ 'comp',
+ 'completion',
+ 'conj',
+ 'contour2di',
+ 'contr',
+ 'conv2',
+ 'convstr',
+ 'copy',
+ 'copyfile',
+ 'corr',
+ 'cos',
+ 'coserror',
+ 'createdir',
+ 'cshep2d',
+ 'csvDefault',
+ 'csvIsnum',
+ 'csvRead',
+ 'csvStringToDouble',
+ 'csvTextScan',
+ 'csvWrite',
+ 'ctree2',
+ 'ctree3',
+ 'ctree4',
+ 'cumprod',
+ 'cumsum',
+ 'curblock',
+ 'curblockc',
+ 'daskr',
+ 'dasrt',
+ 'dassl',
+ 'data2sig',
+ 'datatipCreate',
+ 'datatipManagerMode',
+ 'datatipMove',
+ 'datatipRemove',
+ 'datatipSetDisplay',
+ 'datatipSetInterp',
+ 'datatipSetOrientation',
+ 'datatipSetStyle',
+ 'datatipToggle',
+ 'dawson',
+ 'dct',
+ 'debug',
+ 'dec2base',
+ 'deff',
+ 'definedfields',
+ 'degree',
+ 'delbpt',
+ 'delete',
+ 'deletefile',
+ 'delip',
+ 'delmenu',
+ 'det',
+ 'dgettext',
+ 'dhinf',
+ 'diag',
+ 'diary',
+ 'diffobjs',
+ 'disp',
+ 'dispbpt',
+ 'displayhistory',
+ 'disposefftwlibrary',
+ 'dlgamma',
+ 'dnaupd',
+ 'dneupd',
+ 'double',
+ 'drawaxis',
+ 'drawlater',
+ 'drawnow',
+ 'driver',
+ 'dsaupd',
+ 'dsearch',
+ 'dseupd',
+ 'dst',
+ 'duplicate',
+ 'editvar',
+ 'emptystr',
+ 'end_scicosim',
+ 'ereduc',
+ 'erf',
+ 'erfc',
+ 'erfcx',
+ 'erfi',
+ 'errcatch',
+ 'errclear',
+ 'error',
+ 'eval_cshep2d',
+ 'exec',
+ 'execstr',
+ 'exists',
+ 'exit',
+ 'exp',
+ 'expm',
+ 'exportUI',
+ 'export_to_hdf5',
+ 'eye',
+ 'fadj2sp',
+ 'fec',
+ 'feval',
+ 'fft',
+ 'fftw',
+ 'fftw_flags',
+ 'fftw_forget_wisdom',
+ 'fftwlibraryisloaded',
+ 'figure',
+ 'file',
+ 'filebrowser',
+ 'fileext',
+ 'fileinfo',
+ 'fileparts',
+ 'filesep',
+ 'find',
+ 'findBD',
+ 'findfiles',
+ 'fire_closing_finished',
+ 'floor',
+ 'format',
+ 'fort',
+ 'fprintfMat',
+ 'freq',
+ 'frexp',
+ 'fromc',
+ 'fromjava',
+ 'fscanfMat',
+ 'fsolve',
+ 'fstair',
+ 'full',
+ 'fullpath',
+ 'funcprot',
+ 'funptr',
+ 'gamma',
+ 'gammaln',
+ 'geom3d',
+ 'get',
+ 'getURL',
+ 'get_absolute_file_path',
+ 'get_fftw_wisdom',
+ 'getblocklabel',
+ 'getcallbackobject',
+ 'getdate',
+ 'getdebuginfo',
+ 'getdefaultlanguage',
+ 'getdrives',
+ 'getdynlibext',
+ 'getenv',
+ 'getfield',
+ 'gethistory',
+ 'gethistoryfile',
+ 'getinstalledlookandfeels',
+ 'getio',
+ 'getlanguage',
+ 'getlongpathname',
+ 'getlookandfeel',
+ 'getmd5',
+ 'getmemory',
+ 'getmodules',
+ 'getos',
+ 'getpid',
+ 'getrelativefilename',
+ 'getscicosvars',
+ 'getscilabmode',
+ 'getshortpathname',
+ 'gettext',
+ 'getvariablesonstack',
+ 'getversion',
+ 'glist',
+ 'global',
+ 'glue',
+ 'grand',
+ 'graphicfunction',
+ 'grayplot',
+ 'grep',
+ 'gsort',
+ 'gstacksize',
+ 'h5attr',
+ 'h5close',
+ 'h5cp',
+ 'h5dataset',
+ 'h5dump',
+ 'h5exists',
+ 'h5flush',
+ 'h5get',
+ 'h5group',
+ 'h5isArray',
+ 'h5isAttr',
+ 'h5isCompound',
+ 'h5isFile',
+ 'h5isGroup',
+ 'h5isList',
+ 'h5isRef',
+ 'h5isSet',
+ 'h5isSpace',
+ 'h5isType',
+ 'h5isVlen',
+ 'h5label',
+ 'h5ln',
+ 'h5ls',
+ 'h5mount',
+ 'h5mv',
+ 'h5open',
+ 'h5read',
+ 'h5readattr',
+ 'h5rm',
+ 'h5umount',
+ 'h5write',
+ 'h5writeattr',
+ 'havewindow',
+ 'helpbrowser',
+ 'hess',
+ 'hinf',
+ 'historymanager',
+ 'historysize',
+ 'host',
+ 'htmlDump',
+ 'htmlRead',
+ 'htmlReadStr',
+ 'htmlWrite',
+ 'iconvert',
+ 'iconvert',
+ 'ieee',
+ 'ilib_verbose',
+ 'imag',
+ 'impl',
+ 'import_from_hdf5',
+ 'imult',
+ 'inpnvi',
+ 'int',
+ 'int16',
+ 'int2d',
+ 'int32',
+ 'int3d',
+ 'int8',
+ 'interp',
+ 'interp2d',
+ 'interp3d',
+ 'intg',
+ 'intppty',
+ 'inttype',
+ 'inv',
+ 'invoke_lu',
+ 'is_handle_valid',
+ 'is_hdf5_file',
+ 'isalphanum',
+ 'isascii',
+ 'isdef',
+ 'isdigit',
+ 'isdir',
+ 'isequal',
+ 'isequalbitwise',
+ 'iserror',
+ 'isfile',
+ 'isglobal',
+ 'isletter',
+ 'isnum',
+ 'isreal',
+ 'iswaitingforinput',
+ 'jallowClassReloading',
+ 'jarray',
+ 'jautoTranspose',
+ 'jautoUnwrap',
+ 'javaclasspath',
+ 'javalibrarypath',
+ 'jcast',
+ 'jcompile',
+ 'jconvMatrixMethod',
+ 'jcreatejar',
+ 'jdeff',
+ 'jdisableTrace',
+ 'jenableTrace',
+ 'jexists',
+ 'jgetclassname',
+ 'jgetfield',
+ 'jgetfields',
+ 'jgetinfo',
+ 'jgetmethods',
+ 'jimport',
+ 'jinvoke',
+ 'jinvoke_db',
+ 'jnewInstance',
+ 'jremove',
+ 'jsetfield',
+ 'junwrap',
+ 'junwraprem',
+ 'jwrap',
+ 'jwrapinfloat',
+ 'kron',
+ 'lasterror',
+ 'ldiv',
+ 'ldivf',
+ 'legendre',
+ 'length',
+ 'lib',
+ 'librarieslist',
+ 'libraryinfo',
+ 'light',
+ 'linear_interpn',
+ 'lines',
+ 'link',
+ 'linmeq',
+ 'list',
+ 'listvar_in_hdf5',
+ 'load',
+ 'loadGui',
+ 'loadScicos',
+ 'loadXcos',
+ 'loadfftwlibrary',
+ 'loadhistory',
+ 'log',
+ 'log1p',
+ 'lsq',
+ 'lsq_splin',
+ 'lsqrsolve',
+ 'lsslist',
+ 'lstcat',
+ 'lstsize',
+ 'ltitr',
+ 'lu',
+ 'ludel',
+ 'lufact',
+ 'luget',
+ 'lusolve',
+ 'macr2lst',
+ 'macr2tree',
+ 'matfile_close',
+ 'matfile_listvar',
+ 'matfile_open',
+ 'matfile_varreadnext',
+ 'matfile_varwrite',
+ 'matrix',
+ 'max',
+ 'maxfiles',
+ 'mclearerr',
+ 'mclose',
+ 'meof',
+ 'merror',
+ 'messagebox',
+ 'mfprintf',
+ 'mfscanf',
+ 'mget',
+ 'mgeti',
+ 'mgetl',
+ 'mgetstr',
+ 'min',
+ 'mlist',
+ 'mode',
+ 'model2blk',
+ 'mopen',
+ 'move',
+ 'movefile',
+ 'mprintf',
+ 'mput',
+ 'mputl',
+ 'mputstr',
+ 'mscanf',
+ 'mseek',
+ 'msprintf',
+ 'msscanf',
+ 'mtell',
+ 'mtlb_mode',
+ 'mtlb_sparse',
+ 'mucomp',
+ 'mulf',
+ 'name2rgb',
+ 'nearfloat',
+ 'newaxes',
+ 'newest',
+ 'newfun',
+ 'nnz',
+ 'norm',
+ 'notify',
+ 'number_properties',
+ 'ode',
+ 'odedc',
+ 'ones',
+ 'openged',
+ 'opentk',
+ 'optim',
+ 'or',
+ 'ordmmd',
+ 'parallel_concurrency',
+ 'parallel_run',
+ 'param3d',
+ 'param3d1',
+ 'part',
+ 'pathconvert',
+ 'pathsep',
+ 'phase_simulation',
+ 'plot2d',
+ 'plot2d1',
+ 'plot2d2',
+ 'plot2d3',
+ 'plot2d4',
+ 'plot3d',
+ 'plot3d1',
+ 'plotbrowser',
+ 'pointer_xproperty',
+ 'poly',
+ 'ppol',
+ 'pppdiv',
+ 'predef',
+ 'preferences',
+ 'print',
+ 'printf',
+ 'printfigure',
+ 'printsetupbox',
+ 'prod',
+ 'progressionbar',
+ 'prompt',
+ 'pwd',
+ 'qld',
+ 'qp_solve',
+ 'qr',
+ 'raise_window',
+ 'rand',
+ 'rankqr',
+ 'rat',
+ 'rcond',
+ 'rdivf',
+ 'read',
+ 'read4b',
+ 'read_csv',
+ 'readb',
+ 'readgateway',
+ 'readmps',
+ 'real',
+ 'realtime',
+ 'realtimeinit',
+ 'regexp',
+ 'relocate_handle',
+ 'remez',
+ 'removeModulePreferences',
+ 'removedir',
+ 'removelinehistory',
+ 'res_with_prec',
+ 'resethistory',
+ 'residu',
+ 'resume',
+ 'return',
+ 'ricc',
+ 'rlist',
+ 'roots',
+ 'rotate_axes',
+ 'round',
+ 'rpem',
+ 'rtitr',
+ 'rubberbox',
+ 'save',
+ 'saveGui',
+ 'saveafterncommands',
+ 'saveconsecutivecommands',
+ 'savehistory',
+ 'schur',
+ 'sci_haltscicos',
+ 'sci_tree2',
+ 'sci_tree3',
+ 'sci_tree4',
+ 'sciargs',
+ 'scicos_debug',
+ 'scicos_debug_count',
+ 'scicos_time',
+ 'scicosim',
+ 'scinotes',
+ 'sctree',
+ 'semidef',
+ 'set',
+ 'set_blockerror',
+ 'set_fftw_wisdom',
+ 'set_xproperty',
+ 'setbpt',
+ 'setdefaultlanguage',
+ 'setenv',
+ 'setfield',
+ 'sethistoryfile',
+ 'setlanguage',
+ 'setlookandfeel',
+ 'setmenu',
+ 'sfact',
+ 'sfinit',
+ 'show_window',
+ 'sident',
+ 'sig2data',
+ 'sign',
+ 'simp',
+ 'simp_mode',
+ 'sin',
+ 'size',
+ 'slash',
+ 'sleep',
+ 'sorder',
+ 'sparse',
+ 'spchol',
+ 'spcompack',
+ 'spec',
+ 'spget',
+ 'splin',
+ 'splin2d',
+ 'splin3d',
+ 'splitURL',
+ 'spones',
+ 'sprintf',
+ 'sqrt',
+ 'stacksize',
+ 'str2code',
+ 'strcat',
+ 'strchr',
+ 'strcmp',
+ 'strcspn',
+ 'strindex',
+ 'string',
+ 'stringbox',
+ 'stripblanks',
+ 'strncpy',
+ 'strrchr',
+ 'strrev',
+ 'strsplit',
+ 'strspn',
+ 'strstr',
+ 'strsubst',
+ 'strtod',
+ 'strtok',
+ 'subf',
+ 'sum',
+ 'svd',
+ 'swap_handles',
+ 'symfcti',
+ 'syredi',
+ 'system_getproperty',
+ 'system_setproperty',
+ 'ta2lpd',
+ 'tan',
+ 'taucs_chdel',
+ 'taucs_chfact',
+ 'taucs_chget',
+ 'taucs_chinfo',
+ 'taucs_chsolve',
+ 'tempname',
+ 'testmatrix',
+ 'timer',
+ 'tlist',
+ 'tohome',
+ 'tokens',
+ 'toolbar',
+ 'toprint',
+ 'tr_zer',
+ 'tril',
+ 'triu',
+ 'type',
+ 'typename',
+ 'uiDisplayTree',
+ 'uicontextmenu',
+ 'uicontrol',
+ 'uigetcolor',
+ 'uigetdir',
+ 'uigetfile',
+ 'uigetfont',
+ 'uimenu',
+ 'uint16',
+ 'uint32',
+ 'uint8',
+ 'uipopup',
+ 'uiputfile',
+ 'uiwait',
+ 'ulink',
+ 'umf_ludel',
+ 'umf_lufact',
+ 'umf_luget',
+ 'umf_luinfo',
+ 'umf_lusolve',
+ 'umfpack',
+ 'unglue',
+ 'unix',
+ 'unsetmenu',
+ 'unzoom',
+ 'updatebrowsevar',
+ 'usecanvas',
+ 'useeditor',
+ 'user',
+ 'var2vec',
+ 'varn',
+ 'vec2var',
+ 'waitbar',
+ 'warnBlockByUID',
+ 'warning',
+ 'what',
+ 'where',
+ 'whereis',
+ 'who',
+ 'winsid',
+ 'with_module',
+ 'writb',
+ 'write',
+ 'write4b',
+ 'write_csv',
+ 'x_choose',
+ 'x_choose_modeless',
+ 'x_dialog',
+ 'x_mdialog',
+ 'xarc',
+ 'xarcs',
+ 'xarrows',
+ 'xchange',
+ 'xchoicesi',
+ 'xclick',
+ 'xcos',
+ 'xcosAddToolsMenu',
+ 'xcosConfigureXmlFile',
+ 'xcosDiagramToScilab',
+ 'xcosPalCategoryAdd',
+ 'xcosPalDelete',
+ 'xcosPalDisable',
+ 'xcosPalEnable',
+ 'xcosPalGenerateIcon',
+ 'xcosPalGet',
+ 'xcosPalLoad',
+ 'xcosPalMove',
+ 'xcosSimulationStarted',
+ 'xcosUpdateBlock',
+ 'xdel',
+ 'xend',
+ 'xfarc',
+ 'xfarcs',
+ 'xfpoly',
+ 'xfpolys',
+ 'xfrect',
+ 'xget',
+ 'xgetmouse',
+ 'xgraduate',
+ 'xgrid',
+ 'xinit',
+ 'xlfont',
+ 'xls_open',
+ 'xls_read',
+ 'xmlAddNs',
+ 'xmlAppend',
+ 'xmlAsNumber',
+ 'xmlAsText',
+ 'xmlDTD',
+ 'xmlDelete',
+ 'xmlDocument',
+ 'xmlDump',
+ 'xmlElement',
+ 'xmlFormat',
+ 'xmlGetNsByHref',
+ 'xmlGetNsByPrefix',
+ 'xmlGetOpenDocs',
+ 'xmlIsValidObject',
+ 'xmlName',
+ 'xmlNs',
+ 'xmlRead',
+ 'xmlReadStr',
+ 'xmlRelaxNG',
+ 'xmlRemove',
+ 'xmlSchema',
+ 'xmlSetAttributes',
+ 'xmlValidate',
+ 'xmlWrite',
+ 'xmlXPath',
+ 'xname',
+ 'xpause',
+ 'xpoly',
+ 'xpolys',
+ 'xrect',
+ 'xrects',
+ 'xs2bmp',
+ 'xs2emf',
+ 'xs2eps',
+ 'xs2gif',
+ 'xs2jpg',
+ 'xs2pdf',
+ 'xs2png',
+ 'xs2ppm',
+ 'xs2ps',
+ 'xs2svg',
+ 'xsegs',
+ 'xset',
+ 'xstring',
+ 'xstringb',
+ 'xtitle',
+ 'zeros',
+ 'znaupd',
+ 'zneupd',
+ 'zoom_rect',
+)
+
+macros_kw = (
+ '!_deff_wrapper',
+ '%0_i_st',
+ '%3d_i_h',
+ '%Block_xcosUpdateBlock',
+ '%TNELDER_p',
+ '%TNELDER_string',
+ '%TNMPLOT_p',
+ '%TNMPLOT_string',
+ '%TOPTIM_p',
+ '%TOPTIM_string',
+ '%TSIMPLEX_p',
+ '%TSIMPLEX_string',
+ '%_EVoid_p',
+ '%_gsort',
+ '%_listvarinfile',
+ '%_rlist',
+ '%_save',
+ '%_sodload',
+ '%_strsplit',
+ '%_unwrap',
+ '%ar_p',
+ '%asn',
+ '%b_a_b',
+ '%b_a_s',
+ '%b_c_s',
+ '%b_c_spb',
+ '%b_cumprod',
+ '%b_cumsum',
+ '%b_d_s',
+ '%b_diag',
+ '%b_e',
+ '%b_f_s',
+ '%b_f_spb',
+ '%b_g_s',
+ '%b_g_spb',
+ '%b_grand',
+ '%b_h_s',
+ '%b_h_spb',
+ '%b_i_b',
+ '%b_i_ce',
+ '%b_i_h',
+ '%b_i_hm',
+ '%b_i_s',
+ '%b_i_sp',
+ '%b_i_spb',
+ '%b_i_st',
+ '%b_iconvert',
+ '%b_l_b',
+ '%b_l_s',
+ '%b_m_b',
+ '%b_m_s',
+ '%b_matrix',
+ '%b_n_hm',
+ '%b_o_hm',
+ '%b_p_s',
+ '%b_prod',
+ '%b_r_b',
+ '%b_r_s',
+ '%b_s_b',
+ '%b_s_s',
+ '%b_string',
+ '%b_sum',
+ '%b_tril',
+ '%b_triu',
+ '%b_x_b',
+ '%b_x_s',
+ '%bicg',
+ '%bicgstab',
+ '%c_a_c',
+ '%c_b_c',
+ '%c_b_s',
+ '%c_diag',
+ '%c_dsearch',
+ '%c_e',
+ '%c_eye',
+ '%c_f_s',
+ '%c_grand',
+ '%c_i_c',
+ '%c_i_ce',
+ '%c_i_h',
+ '%c_i_hm',
+ '%c_i_lss',
+ '%c_i_r',
+ '%c_i_s',
+ '%c_i_st',
+ '%c_matrix',
+ '%c_n_l',
+ '%c_n_st',
+ '%c_o_l',
+ '%c_o_st',
+ '%c_ones',
+ '%c_rand',
+ '%c_tril',
+ '%c_triu',
+ '%cblock_c_cblock',
+ '%cblock_c_s',
+ '%cblock_e',
+ '%cblock_f_cblock',
+ '%cblock_p',
+ '%cblock_size',
+ '%ce_6',
+ '%ce_c_ce',
+ '%ce_e',
+ '%ce_f_ce',
+ '%ce_i_ce',
+ '%ce_i_s',
+ '%ce_i_st',
+ '%ce_matrix',
+ '%ce_p',
+ '%ce_size',
+ '%ce_string',
+ '%ce_t',
+ '%cgs',
+ '%champdat_i_h',
+ '%choose',
+ '%diagram_xcos',
+ '%dir_p',
+ '%fptr_i_st',
+ '%grand_perm',
+ '%grayplot_i_h',
+ '%h_i_st',
+ '%hmS_k_hmS_generic',
+ '%hm_1_hm',
+ '%hm_1_s',
+ '%hm_2_hm',
+ '%hm_2_s',
+ '%hm_3_hm',
+ '%hm_3_s',
+ '%hm_4_hm',
+ '%hm_4_s',
+ '%hm_5',
+ '%hm_a_hm',
+ '%hm_a_r',
+ '%hm_a_s',
+ '%hm_abs',
+ '%hm_and',
+ '%hm_bool2s',
+ '%hm_c_hm',
+ '%hm_ceil',
+ '%hm_conj',
+ '%hm_cos',
+ '%hm_cumprod',
+ '%hm_cumsum',
+ '%hm_d_hm',
+ '%hm_d_s',
+ '%hm_degree',
+ '%hm_dsearch',
+ '%hm_e',
+ '%hm_exp',
+ '%hm_eye',
+ '%hm_f_hm',
+ '%hm_find',
+ '%hm_floor',
+ '%hm_g_hm',
+ '%hm_grand',
+ '%hm_gsort',
+ '%hm_h_hm',
+ '%hm_i_b',
+ '%hm_i_ce',
+ '%hm_i_h',
+ '%hm_i_hm',
+ '%hm_i_i',
+ '%hm_i_p',
+ '%hm_i_r',
+ '%hm_i_s',
+ '%hm_i_st',
+ '%hm_iconvert',
+ '%hm_imag',
+ '%hm_int',
+ '%hm_isnan',
+ '%hm_isreal',
+ '%hm_j_hm',
+ '%hm_j_s',
+ '%hm_k_hm',
+ '%hm_k_s',
+ '%hm_log',
+ '%hm_m_p',
+ '%hm_m_r',
+ '%hm_m_s',
+ '%hm_matrix',
+ '%hm_max',
+ '%hm_mean',
+ '%hm_median',
+ '%hm_min',
+ '%hm_n_b',
+ '%hm_n_c',
+ '%hm_n_hm',
+ '%hm_n_i',
+ '%hm_n_p',
+ '%hm_n_s',
+ '%hm_o_b',
+ '%hm_o_c',
+ '%hm_o_hm',
+ '%hm_o_i',
+ '%hm_o_p',
+ '%hm_o_s',
+ '%hm_ones',
+ '%hm_or',
+ '%hm_p',
+ '%hm_prod',
+ '%hm_q_hm',
+ '%hm_r_s',
+ '%hm_rand',
+ '%hm_real',
+ '%hm_round',
+ '%hm_s',
+ '%hm_s_hm',
+ '%hm_s_r',
+ '%hm_s_s',
+ '%hm_sign',
+ '%hm_sin',
+ '%hm_size',
+ '%hm_sqrt',
+ '%hm_stdev',
+ '%hm_string',
+ '%hm_sum',
+ '%hm_x_hm',
+ '%hm_x_p',
+ '%hm_x_s',
+ '%hm_zeros',
+ '%i_1_s',
+ '%i_2_s',
+ '%i_3_s',
+ '%i_4_s',
+ '%i_Matplot',
+ '%i_a_i',
+ '%i_a_s',
+ '%i_and',
+ '%i_ascii',
+ '%i_b_s',
+ '%i_bezout',
+ '%i_champ',
+ '%i_champ1',
+ '%i_contour',
+ '%i_contour2d',
+ '%i_d_i',
+ '%i_d_s',
+ '%i_dsearch',
+ '%i_e',
+ '%i_fft',
+ '%i_g_i',
+ '%i_gcd',
+ '%i_grand',
+ '%i_h_i',
+ '%i_i_ce',
+ '%i_i_h',
+ '%i_i_hm',
+ '%i_i_i',
+ '%i_i_s',
+ '%i_i_st',
+ '%i_j_i',
+ '%i_j_s',
+ '%i_l_s',
+ '%i_lcm',
+ '%i_length',
+ '%i_m_i',
+ '%i_m_s',
+ '%i_mfprintf',
+ '%i_mprintf',
+ '%i_msprintf',
+ '%i_n_s',
+ '%i_o_s',
+ '%i_or',
+ '%i_p_i',
+ '%i_p_s',
+ '%i_plot2d',
+ '%i_plot2d1',
+ '%i_plot2d2',
+ '%i_q_s',
+ '%i_r_i',
+ '%i_r_s',
+ '%i_round',
+ '%i_s_i',
+ '%i_s_s',
+ '%i_sign',
+ '%i_string',
+ '%i_x_i',
+ '%i_x_s',
+ '%ip_a_s',
+ '%ip_i_st',
+ '%ip_m_s',
+ '%ip_n_ip',
+ '%ip_o_ip',
+ '%ip_p',
+ '%ip_part',
+ '%ip_s_s',
+ '%ip_string',
+ '%k',
+ '%l_i_h',
+ '%l_i_s',
+ '%l_i_st',
+ '%l_isequal',
+ '%l_n_c',
+ '%l_n_l',
+ '%l_n_m',
+ '%l_n_p',
+ '%l_n_s',
+ '%l_n_st',
+ '%l_o_c',
+ '%l_o_l',
+ '%l_o_m',
+ '%l_o_p',
+ '%l_o_s',
+ '%l_o_st',
+ '%lss_a_lss',
+ '%lss_a_p',
+ '%lss_a_r',
+ '%lss_a_s',
+ '%lss_c_lss',
+ '%lss_c_p',
+ '%lss_c_r',
+ '%lss_c_s',
+ '%lss_e',
+ '%lss_eye',
+ '%lss_f_lss',
+ '%lss_f_p',
+ '%lss_f_r',
+ '%lss_f_s',
+ '%lss_i_ce',
+ '%lss_i_lss',
+ '%lss_i_p',
+ '%lss_i_r',
+ '%lss_i_s',
+ '%lss_i_st',
+ '%lss_inv',
+ '%lss_l_lss',
+ '%lss_l_p',
+ '%lss_l_r',
+ '%lss_l_s',
+ '%lss_m_lss',
+ '%lss_m_p',
+ '%lss_m_r',
+ '%lss_m_s',
+ '%lss_n_lss',
+ '%lss_n_p',
+ '%lss_n_r',
+ '%lss_n_s',
+ '%lss_norm',
+ '%lss_o_lss',
+ '%lss_o_p',
+ '%lss_o_r',
+ '%lss_o_s',
+ '%lss_ones',
+ '%lss_r_lss',
+ '%lss_r_p',
+ '%lss_r_r',
+ '%lss_r_s',
+ '%lss_rand',
+ '%lss_s',
+ '%lss_s_lss',
+ '%lss_s_p',
+ '%lss_s_r',
+ '%lss_s_s',
+ '%lss_size',
+ '%lss_t',
+ '%lss_v_lss',
+ '%lss_v_p',
+ '%lss_v_r',
+ '%lss_v_s',
+ '%lt_i_s',
+ '%m_n_l',
+ '%m_o_l',
+ '%mc_i_h',
+ '%mc_i_s',
+ '%mc_i_st',
+ '%mc_n_st',
+ '%mc_o_st',
+ '%mc_string',
+ '%mps_p',
+ '%mps_string',
+ '%msp_a_s',
+ '%msp_abs',
+ '%msp_e',
+ '%msp_find',
+ '%msp_i_s',
+ '%msp_i_st',
+ '%msp_length',
+ '%msp_m_s',
+ '%msp_maxi',
+ '%msp_n_msp',
+ '%msp_nnz',
+ '%msp_o_msp',
+ '%msp_p',
+ '%msp_sparse',
+ '%msp_spones',
+ '%msp_t',
+ '%p_a_lss',
+ '%p_a_r',
+ '%p_c_lss',
+ '%p_c_r',
+ '%p_cumprod',
+ '%p_cumsum',
+ '%p_d_p',
+ '%p_d_r',
+ '%p_d_s',
+ '%p_det',
+ '%p_e',
+ '%p_f_lss',
+ '%p_f_r',
+ '%p_grand',
+ '%p_i_ce',
+ '%p_i_h',
+ '%p_i_hm',
+ '%p_i_lss',
+ '%p_i_p',
+ '%p_i_r',
+ '%p_i_s',
+ '%p_i_st',
+ '%p_inv',
+ '%p_j_s',
+ '%p_k_p',
+ '%p_k_r',
+ '%p_k_s',
+ '%p_l_lss',
+ '%p_l_p',
+ '%p_l_r',
+ '%p_l_s',
+ '%p_m_hm',
+ '%p_m_lss',
+ '%p_m_r',
+ '%p_matrix',
+ '%p_n_l',
+ '%p_n_lss',
+ '%p_n_r',
+ '%p_o_l',
+ '%p_o_lss',
+ '%p_o_r',
+ '%p_o_sp',
+ '%p_p_s',
+ '%p_part',
+ '%p_prod',
+ '%p_q_p',
+ '%p_q_r',
+ '%p_q_s',
+ '%p_r_lss',
+ '%p_r_p',
+ '%p_r_r',
+ '%p_r_s',
+ '%p_s_lss',
+ '%p_s_r',
+ '%p_simp',
+ '%p_string',
+ '%p_sum',
+ '%p_v_lss',
+ '%p_v_p',
+ '%p_v_r',
+ '%p_v_s',
+ '%p_x_hm',
+ '%p_x_r',
+ '%p_y_p',
+ '%p_y_r',
+ '%p_y_s',
+ '%p_z_p',
+ '%p_z_r',
+ '%p_z_s',
+ '%pcg',
+ '%plist_p',
+ '%plist_string',
+ '%r_0',
+ '%r_a_hm',
+ '%r_a_lss',
+ '%r_a_p',
+ '%r_a_r',
+ '%r_a_s',
+ '%r_c_lss',
+ '%r_c_p',
+ '%r_c_r',
+ '%r_c_s',
+ '%r_clean',
+ '%r_cumprod',
+ '%r_cumsum',
+ '%r_d_p',
+ '%r_d_r',
+ '%r_d_s',
+ '%r_det',
+ '%r_diag',
+ '%r_e',
+ '%r_eye',
+ '%r_f_lss',
+ '%r_f_p',
+ '%r_f_r',
+ '%r_f_s',
+ '%r_i_ce',
+ '%r_i_hm',
+ '%r_i_lss',
+ '%r_i_p',
+ '%r_i_r',
+ '%r_i_s',
+ '%r_i_st',
+ '%r_inv',
+ '%r_j_s',
+ '%r_k_p',
+ '%r_k_r',
+ '%r_k_s',
+ '%r_l_lss',
+ '%r_l_p',
+ '%r_l_r',
+ '%r_l_s',
+ '%r_m_hm',
+ '%r_m_lss',
+ '%r_m_p',
+ '%r_m_r',
+ '%r_m_s',
+ '%r_matrix',
+ '%r_n_lss',
+ '%r_n_p',
+ '%r_n_r',
+ '%r_n_s',
+ '%r_norm',
+ '%r_o_lss',
+ '%r_o_p',
+ '%r_o_r',
+ '%r_o_s',
+ '%r_ones',
+ '%r_p',
+ '%r_p_s',
+ '%r_prod',
+ '%r_q_p',
+ '%r_q_r',
+ '%r_q_s',
+ '%r_r_lss',
+ '%r_r_p',
+ '%r_r_r',
+ '%r_r_s',
+ '%r_rand',
+ '%r_s',
+ '%r_s_hm',
+ '%r_s_lss',
+ '%r_s_p',
+ '%r_s_r',
+ '%r_s_s',
+ '%r_simp',
+ '%r_size',
+ '%r_string',
+ '%r_sum',
+ '%r_t',
+ '%r_tril',
+ '%r_triu',
+ '%r_v_lss',
+ '%r_v_p',
+ '%r_v_r',
+ '%r_v_s',
+ '%r_varn',
+ '%r_x_p',
+ '%r_x_r',
+ '%r_x_s',
+ '%r_y_p',
+ '%r_y_r',
+ '%r_y_s',
+ '%r_z_p',
+ '%r_z_r',
+ '%r_z_s',
+ '%s_1_hm',
+ '%s_1_i',
+ '%s_2_hm',
+ '%s_2_i',
+ '%s_3_hm',
+ '%s_3_i',
+ '%s_4_hm',
+ '%s_4_i',
+ '%s_5',
+ '%s_a_b',
+ '%s_a_hm',
+ '%s_a_i',
+ '%s_a_ip',
+ '%s_a_lss',
+ '%s_a_msp',
+ '%s_a_r',
+ '%s_a_sp',
+ '%s_and',
+ '%s_b_i',
+ '%s_b_s',
+ '%s_bezout',
+ '%s_c_b',
+ '%s_c_cblock',
+ '%s_c_lss',
+ '%s_c_r',
+ '%s_c_sp',
+ '%s_d_b',
+ '%s_d_i',
+ '%s_d_p',
+ '%s_d_r',
+ '%s_d_sp',
+ '%s_e',
+ '%s_f_b',
+ '%s_f_cblock',
+ '%s_f_lss',
+ '%s_f_r',
+ '%s_f_sp',
+ '%s_g_b',
+ '%s_g_s',
+ '%s_gcd',
+ '%s_grand',
+ '%s_h_b',
+ '%s_h_s',
+ '%s_i_b',
+ '%s_i_c',
+ '%s_i_ce',
+ '%s_i_h',
+ '%s_i_hm',
+ '%s_i_i',
+ '%s_i_lss',
+ '%s_i_p',
+ '%s_i_r',
+ '%s_i_s',
+ '%s_i_sp',
+ '%s_i_spb',
+ '%s_i_st',
+ '%s_j_i',
+ '%s_k_hm',
+ '%s_k_p',
+ '%s_k_r',
+ '%s_k_sp',
+ '%s_l_b',
+ '%s_l_hm',
+ '%s_l_i',
+ '%s_l_lss',
+ '%s_l_p',
+ '%s_l_r',
+ '%s_l_s',
+ '%s_l_sp',
+ '%s_lcm',
+ '%s_m_b',
+ '%s_m_hm',
+ '%s_m_i',
+ '%s_m_ip',
+ '%s_m_lss',
+ '%s_m_msp',
+ '%s_m_r',
+ '%s_matrix',
+ '%s_n_hm',
+ '%s_n_i',
+ '%s_n_l',
+ '%s_n_lss',
+ '%s_n_r',
+ '%s_n_st',
+ '%s_o_hm',
+ '%s_o_i',
+ '%s_o_l',
+ '%s_o_lss',
+ '%s_o_r',
+ '%s_o_st',
+ '%s_or',
+ '%s_p_b',
+ '%s_p_i',
+ '%s_pow',
+ '%s_q_hm',
+ '%s_q_i',
+ '%s_q_p',
+ '%s_q_r',
+ '%s_q_sp',
+ '%s_r_b',
+ '%s_r_i',
+ '%s_r_lss',
+ '%s_r_p',
+ '%s_r_r',
+ '%s_r_s',
+ '%s_r_sp',
+ '%s_s_b',
+ '%s_s_hm',
+ '%s_s_i',
+ '%s_s_ip',
+ '%s_s_lss',
+ '%s_s_r',
+ '%s_s_sp',
+ '%s_simp',
+ '%s_v_lss',
+ '%s_v_p',
+ '%s_v_r',
+ '%s_v_s',
+ '%s_x_b',
+ '%s_x_hm',
+ '%s_x_i',
+ '%s_x_r',
+ '%s_y_p',
+ '%s_y_r',
+ '%s_y_sp',
+ '%s_z_p',
+ '%s_z_r',
+ '%s_z_sp',
+ '%sn',
+ '%sp_a_s',
+ '%sp_a_sp',
+ '%sp_and',
+ '%sp_c_s',
+ '%sp_ceil',
+ '%sp_conj',
+ '%sp_cos',
+ '%sp_cumprod',
+ '%sp_cumsum',
+ '%sp_d_s',
+ '%sp_d_sp',
+ '%sp_det',
+ '%sp_diag',
+ '%sp_e',
+ '%sp_exp',
+ '%sp_f_s',
+ '%sp_floor',
+ '%sp_grand',
+ '%sp_gsort',
+ '%sp_i_ce',
+ '%sp_i_h',
+ '%sp_i_s',
+ '%sp_i_sp',
+ '%sp_i_st',
+ '%sp_int',
+ '%sp_inv',
+ '%sp_k_s',
+ '%sp_k_sp',
+ '%sp_l_s',
+ '%sp_l_sp',
+ '%sp_length',
+ '%sp_max',
+ '%sp_min',
+ '%sp_norm',
+ '%sp_or',
+ '%sp_p_s',
+ '%sp_prod',
+ '%sp_q_s',
+ '%sp_q_sp',
+ '%sp_r_s',
+ '%sp_r_sp',
+ '%sp_round',
+ '%sp_s_s',
+ '%sp_s_sp',
+ '%sp_sin',
+ '%sp_sqrt',
+ '%sp_string',
+ '%sp_sum',
+ '%sp_tril',
+ '%sp_triu',
+ '%sp_y_s',
+ '%sp_y_sp',
+ '%sp_z_s',
+ '%sp_z_sp',
+ '%spb_and',
+ '%spb_c_b',
+ '%spb_cumprod',
+ '%spb_cumsum',
+ '%spb_diag',
+ '%spb_e',
+ '%spb_f_b',
+ '%spb_g_b',
+ '%spb_g_spb',
+ '%spb_h_b',
+ '%spb_h_spb',
+ '%spb_i_b',
+ '%spb_i_ce',
+ '%spb_i_h',
+ '%spb_i_st',
+ '%spb_or',
+ '%spb_prod',
+ '%spb_sum',
+ '%spb_tril',
+ '%spb_triu',
+ '%st_6',
+ '%st_c_st',
+ '%st_e',
+ '%st_f_st',
+ '%st_i_b',
+ '%st_i_c',
+ '%st_i_fptr',
+ '%st_i_h',
+ '%st_i_i',
+ '%st_i_ip',
+ '%st_i_lss',
+ '%st_i_msp',
+ '%st_i_p',
+ '%st_i_r',
+ '%st_i_s',
+ '%st_i_sp',
+ '%st_i_spb',
+ '%st_i_st',
+ '%st_matrix',
+ '%st_n_c',
+ '%st_n_l',
+ '%st_n_mc',
+ '%st_n_p',
+ '%st_n_s',
+ '%st_o_c',
+ '%st_o_l',
+ '%st_o_mc',
+ '%st_o_p',
+ '%st_o_s',
+ '%st_o_tl',
+ '%st_p',
+ '%st_size',
+ '%st_string',
+ '%st_t',
+ '%ticks_i_h',
+ '%xls_e',
+ '%xls_p',
+ '%xlssheet_e',
+ '%xlssheet_p',
+ '%xlssheet_size',
+ '%xlssheet_string',
+ 'DominationRank',
+ 'G_make',
+ 'IsAScalar',
+ 'NDcost',
+ 'OS_Version',
+ 'PlotSparse',
+ 'ReadHBSparse',
+ 'TCL_CreateSlave',
+ 'abcd',
+ 'abinv',
+ 'accept_func_default',
+ 'accept_func_vfsa',
+ 'acf',
+ 'acosd',
+ 'acosh',
+ 'acoshm',
+ 'acosm',
+ 'acot',
+ 'acotd',
+ 'acoth',
+ 'acsc',
+ 'acscd',
+ 'acsch',
+ 'add_demo',
+ 'add_help_chapter',
+ 'add_module_help_chapter',
+ 'add_param',
+ 'add_profiling',
+ 'adj2sp',
+ 'aff2ab',
+ 'ana_style',
+ 'analpf',
+ 'analyze',
+ 'aplat',
+ 'apropos',
+ 'arhnk',
+ 'arl2',
+ 'arma2p',
+ 'arma2ss',
+ 'armac',
+ 'armax',
+ 'armax1',
+ 'arobasestring2strings',
+ 'arsimul',
+ 'ascii2string',
+ 'asciimat',
+ 'asec',
+ 'asecd',
+ 'asech',
+ 'asind',
+ 'asinh',
+ 'asinhm',
+ 'asinm',
+ 'assert_checkalmostequal',
+ 'assert_checkequal',
+ 'assert_checkerror',
+ 'assert_checkfalse',
+ 'assert_checkfilesequal',
+ 'assert_checktrue',
+ 'assert_comparecomplex',
+ 'assert_computedigits',
+ 'assert_cond2reltol',
+ 'assert_cond2reqdigits',
+ 'assert_generror',
+ 'atand',
+ 'atanh',
+ 'atanhm',
+ 'atanm',
+ 'atomsAutoload',
+ 'atomsAutoloadAdd',
+ 'atomsAutoloadDel',
+ 'atomsAutoloadList',
+ 'atomsCategoryList',
+ 'atomsCheckModule',
+ 'atomsDepTreeShow',
+ 'atomsGetConfig',
+ 'atomsGetInstalled',
+ 'atomsGetInstalledPath',
+ 'atomsGetLoaded',
+ 'atomsGetLoadedPath',
+ 'atomsInstall',
+ 'atomsIsInstalled',
+ 'atomsIsLoaded',
+ 'atomsList',
+ 'atomsLoad',
+ 'atomsQuit',
+ 'atomsRemove',
+ 'atomsRepositoryAdd',
+ 'atomsRepositoryDel',
+ 'atomsRepositoryList',
+ 'atomsRestoreConfig',
+ 'atomsSaveConfig',
+ 'atomsSearch',
+ 'atomsSetConfig',
+ 'atomsShow',
+ 'atomsSystemInit',
+ 'atomsSystemUpdate',
+ 'atomsTest',
+ 'atomsUpdate',
+ 'atomsVersion',
+ 'augment',
+ 'auread',
+ 'auwrite',
+ 'balreal',
+ 'bench_run',
+ 'bilin',
+ 'bilt',
+ 'bin2dec',
+ 'binomial',
+ 'bitand',
+ 'bitcmp',
+ 'bitget',
+ 'bitor',
+ 'bitset',
+ 'bitxor',
+ 'black',
+ 'blanks',
+ 'bloc2exp',
+ 'bloc2ss',
+ 'block_parameter_error',
+ 'bode',
+ 'bode_asymp',
+ 'bstap',
+ 'buttmag',
+ 'bvodeS',
+ 'bytecode',
+ 'bytecodewalk',
+ 'cainv',
+ 'calendar',
+ 'calerf',
+ 'calfrq',
+ 'canon',
+ 'casc',
+ 'cat',
+ 'cat_code',
+ 'cb_m2sci_gui',
+ 'ccontrg',
+ 'cell',
+ 'cell2mat',
+ 'cellstr',
+ 'center',
+ 'cepstrum',
+ 'cfspec',
+ 'char',
+ 'chart',
+ 'cheb1mag',
+ 'cheb2mag',
+ 'check_gateways',
+ 'check_modules_xml',
+ 'check_versions',
+ 'chepol',
+ 'chfact',
+ 'chsolve',
+ 'classmarkov',
+ 'clean_help',
+ 'clock',
+ 'cls2dls',
+ 'cmb_lin',
+ 'cmndred',
+ 'cmoment',
+ 'coding_ga_binary',
+ 'coding_ga_identity',
+ 'coff',
+ 'coffg',
+ 'colcomp',
+ 'colcompr',
+ 'colinout',
+ 'colregul',
+ 'companion',
+ 'complex',
+ 'compute_initial_temp',
+ 'cond',
+ 'cond2sp',
+ 'condestsp',
+ 'configure_msifort',
+ 'configure_msvc',
+ 'conjgrad',
+ 'cont_frm',
+ 'cont_mat',
+ 'contrss',
+ 'conv',
+ 'convert_to_float',
+ 'convertindex',
+ 'convol',
+ 'convol2d',
+ 'copfac',
+ 'correl',
+ 'cosd',
+ 'cosh',
+ 'coshm',
+ 'cosm',
+ 'cotd',
+ 'cotg',
+ 'coth',
+ 'cothm',
+ 'cov',
+ 'covar',
+ 'createXConfiguration',
+ 'createfun',
+ 'createstruct',
+ 'cross',
+ 'crossover_ga_binary',
+ 'crossover_ga_default',
+ 'csc',
+ 'cscd',
+ 'csch',
+ 'csgn',
+ 'csim',
+ 'cspect',
+ 'ctr_gram',
+ 'czt',
+ 'dae',
+ 'daeoptions',
+ 'damp',
+ 'datafit',
+ 'date',
+ 'datenum',
+ 'datevec',
+ 'dbphi',
+ 'dcf',
+ 'ddp',
+ 'dec2bin',
+ 'dec2hex',
+ 'dec2oct',
+ 'del_help_chapter',
+ 'del_module_help_chapter',
+ 'demo_begin',
+ 'demo_choose',
+ 'demo_compiler',
+ 'demo_end',
+ 'demo_file_choice',
+ 'demo_folder_choice',
+ 'demo_function_choice',
+ 'demo_gui',
+ 'demo_run',
+ 'demo_viewCode',
+ 'denom',
+ 'derivat',
+ 'derivative',
+ 'des2ss',
+ 'des2tf',
+ 'detectmsifort64tools',
+ 'detectmsvc64tools',
+ 'determ',
+ 'detr',
+ 'detrend',
+ 'devtools_run_builder',
+ 'dhnorm',
+ 'diff',
+ 'diophant',
+ 'dir',
+ 'dirname',
+ 'dispfiles',
+ 'dllinfo',
+ 'dscr',
+ 'dsimul',
+ 'dt_ility',
+ 'dtsi',
+ 'edit',
+ 'edit_error',
+ 'editor',
+ 'eigenmarkov',
+ 'eigs',
+ 'ell1mag',
+ 'enlarge_shape',
+ 'entropy',
+ 'eomday',
+ 'epred',
+ 'eqfir',
+ 'eqiir',
+ 'equil',
+ 'equil1',
+ 'erfinv',
+ 'etime',
+ 'eval',
+ 'evans',
+ 'evstr',
+ 'example_run',
+ 'expression2code',
+ 'extract_help_examples',
+ 'factor',
+ 'factorial',
+ 'factors',
+ 'faurre',
+ 'ffilt',
+ 'fft2',
+ 'fftshift',
+ 'fieldnames',
+ 'filt_sinc',
+ 'filter',
+ 'findABCD',
+ 'findAC',
+ 'findBDK',
+ 'findR',
+ 'find_freq',
+ 'find_links',
+ 'find_scicos_version',
+ 'findm',
+ 'findmsifortcompiler',
+ 'findmsvccompiler',
+ 'findx0BD',
+ 'firstnonsingleton',
+ 'fix',
+ 'fixedpointgcd',
+ 'flipdim',
+ 'flts',
+ 'fminsearch',
+ 'formatBlackTip',
+ 'formatBodeMagTip',
+ 'formatBodePhaseTip',
+ 'formatGainplotTip',
+ 'formatHallModuleTip',
+ 'formatHallPhaseTip',
+ 'formatNicholsGainTip',
+ 'formatNicholsPhaseTip',
+ 'formatNyquistTip',
+ 'formatPhaseplotTip',
+ 'formatSgridDampingTip',
+ 'formatSgridFreqTip',
+ 'formatZgridDampingTip',
+ 'formatZgridFreqTip',
+ 'format_txt',
+ 'fourplan',
+ 'frep2tf',
+ 'freson',
+ 'frfit',
+ 'frmag',
+ 'fseek_origin',
+ 'fsfirlin',
+ 'fspec',
+ 'fspecg',
+ 'fstabst',
+ 'ftest',
+ 'ftuneq',
+ 'fullfile',
+ 'fullrf',
+ 'fullrfk',
+ 'fun2string',
+ 'g_margin',
+ 'gainplot',
+ 'gamitg',
+ 'gcare',
+ 'gcd',
+ 'gencompilationflags_unix',
+ 'generateBlockImage',
+ 'generateBlockImages',
+ 'generic_i_ce',
+ 'generic_i_h',
+ 'generic_i_hm',
+ 'generic_i_s',
+ 'generic_i_st',
+ 'genlib',
+ 'genmarkov',
+ 'geomean',
+ 'getDiagramVersion',
+ 'getModelicaPath',
+ 'getPreferencesValue',
+ 'get_file_path',
+ 'get_function_path',
+ 'get_param',
+ 'get_profile',
+ 'get_scicos_version',
+ 'getd',
+ 'getscilabkeywords',
+ 'getshell',
+ 'gettklib',
+ 'gfare',
+ 'gfrancis',
+ 'givens',
+ 'glever',
+ 'gmres',
+ 'group',
+ 'gschur',
+ 'gspec',
+ 'gtild',
+ 'h2norm',
+ 'h_cl',
+ 'h_inf',
+ 'h_inf_st',
+ 'h_norm',
+ 'hallchart',
+ 'halt',
+ 'hank',
+ 'hankelsv',
+ 'harmean',
+ 'haveacompiler',
+ 'head_comments',
+ 'help',
+ 'help_from_sci',
+ 'help_skeleton',
+ 'hermit',
+ 'hex2dec',
+ 'hilb',
+ 'hilbert',
+ 'histc',
+ 'horner',
+ 'householder',
+ 'hrmt',
+ 'htrianr',
+ 'hypermat',
+ 'idct',
+ 'idst',
+ 'ifft',
+ 'ifftshift',
+ 'iir',
+ 'iirgroup',
+ 'iirlp',
+ 'iirmod',
+ 'ilib_build',
+ 'ilib_build_jar',
+ 'ilib_compile',
+ 'ilib_for_link',
+ 'ilib_gen_Make',
+ 'ilib_gen_Make_unix',
+ 'ilib_gen_cleaner',
+ 'ilib_gen_gateway',
+ 'ilib_gen_loader',
+ 'ilib_include_flag',
+ 'ilib_mex_build',
+ 'im_inv',
+ 'importScicosDiagram',
+ 'importScicosPal',
+ 'importXcosDiagram',
+ 'imrep2ss',
+ 'ind2sub',
+ 'inistate',
+ 'init_ga_default',
+ 'init_param',
+ 'initial_scicos_tables',
+ 'input',
+ 'instruction2code',
+ 'intc',
+ 'intdec',
+ 'integrate',
+ 'interp1',
+ 'interpln',
+ 'intersect',
+ 'intl',
+ 'intsplin',
+ 'inttrap',
+ 'inv_coeff',
+ 'invr',
+ 'invrs',
+ 'invsyslin',
+ 'iqr',
+ 'isLeapYear',
+ 'is_absolute_path',
+ 'is_param',
+ 'iscell',
+ 'iscellstr',
+ 'iscolumn',
+ 'isempty',
+ 'isfield',
+ 'isinf',
+ 'ismatrix',
+ 'isnan',
+ 'isrow',
+ 'isscalar',
+ 'issparse',
+ 'issquare',
+ 'isstruct',
+ 'isvector',
+ 'jmat',
+ 'justify',
+ 'kalm',
+ 'karmarkar',
+ 'kernel',
+ 'kpure',
+ 'krac2',
+ 'kroneck',
+ 'lattn',
+ 'lattp',
+ 'launchtest',
+ 'lcf',
+ 'lcm',
+ 'lcmdiag',
+ 'leastsq',
+ 'leqe',
+ 'leqr',
+ 'lev',
+ 'levin',
+ 'lex_sort',
+ 'lft',
+ 'lin',
+ 'lin2mu',
+ 'lincos',
+ 'lindquist',
+ 'linf',
+ 'linfn',
+ 'linsolve',
+ 'linspace',
+ 'list2vec',
+ 'list_param',
+ 'listfiles',
+ 'listfunctions',
+ 'listvarinfile',
+ 'lmisolver',
+ 'lmitool',
+ 'loadXcosLibs',
+ 'loadmatfile',
+ 'loadwave',
+ 'log10',
+ 'log2',
+ 'logm',
+ 'logspace',
+ 'lqe',
+ 'lqg',
+ 'lqg2stan',
+ 'lqg_ltr',
+ 'lqr',
+ 'ls',
+ 'lyap',
+ 'm2sci_gui',
+ 'm_circle',
+ 'macglov',
+ 'macrovar',
+ 'mad',
+ 'makecell',
+ 'manedit',
+ 'mapsound',
+ 'markp2ss',
+ 'matfile2sci',
+ 'mdelete',
+ 'mean',
+ 'meanf',
+ 'median',
+ 'members',
+ 'mese',
+ 'meshgrid',
+ 'mfft',
+ 'mfile2sci',
+ 'minreal',
+ 'minss',
+ 'mkdir',
+ 'modulo',
+ 'moment',
+ 'mrfit',
+ 'msd',
+ 'mstr2sci',
+ 'mtlb',
+ 'mtlb_0',
+ 'mtlb_a',
+ 'mtlb_all',
+ 'mtlb_any',
+ 'mtlb_axes',
+ 'mtlb_axis',
+ 'mtlb_beta',
+ 'mtlb_box',
+ 'mtlb_choices',
+ 'mtlb_close',
+ 'mtlb_colordef',
+ 'mtlb_cond',
+ 'mtlb_cov',
+ 'mtlb_cumprod',
+ 'mtlb_cumsum',
+ 'mtlb_dec2hex',
+ 'mtlb_delete',
+ 'mtlb_diag',
+ 'mtlb_diff',
+ 'mtlb_dir',
+ 'mtlb_double',
+ 'mtlb_e',
+ 'mtlb_echo',
+ 'mtlb_error',
+ 'mtlb_eval',
+ 'mtlb_exist',
+ 'mtlb_eye',
+ 'mtlb_false',
+ 'mtlb_fft',
+ 'mtlb_fftshift',
+ 'mtlb_filter',
+ 'mtlb_find',
+ 'mtlb_findstr',
+ 'mtlb_fliplr',
+ 'mtlb_fopen',
+ 'mtlb_format',
+ 'mtlb_fprintf',
+ 'mtlb_fread',
+ 'mtlb_fscanf',
+ 'mtlb_full',
+ 'mtlb_fwrite',
+ 'mtlb_get',
+ 'mtlb_grid',
+ 'mtlb_hold',
+ 'mtlb_i',
+ 'mtlb_ifft',
+ 'mtlb_image',
+ 'mtlb_imp',
+ 'mtlb_int16',
+ 'mtlb_int32',
+ 'mtlb_int8',
+ 'mtlb_is',
+ 'mtlb_isa',
+ 'mtlb_isfield',
+ 'mtlb_isletter',
+ 'mtlb_isspace',
+ 'mtlb_l',
+ 'mtlb_legendre',
+ 'mtlb_linspace',
+ 'mtlb_logic',
+ 'mtlb_logical',
+ 'mtlb_loglog',
+ 'mtlb_lower',
+ 'mtlb_max',
+ 'mtlb_mean',
+ 'mtlb_median',
+ 'mtlb_mesh',
+ 'mtlb_meshdom',
+ 'mtlb_min',
+ 'mtlb_more',
+ 'mtlb_num2str',
+ 'mtlb_ones',
+ 'mtlb_pcolor',
+ 'mtlb_plot',
+ 'mtlb_prod',
+ 'mtlb_qr',
+ 'mtlb_qz',
+ 'mtlb_rand',
+ 'mtlb_randn',
+ 'mtlb_rcond',
+ 'mtlb_realmax',
+ 'mtlb_realmin',
+ 'mtlb_s',
+ 'mtlb_semilogx',
+ 'mtlb_semilogy',
+ 'mtlb_setstr',
+ 'mtlb_size',
+ 'mtlb_sort',
+ 'mtlb_sortrows',
+ 'mtlb_sprintf',
+ 'mtlb_sscanf',
+ 'mtlb_std',
+ 'mtlb_strcmp',
+ 'mtlb_strcmpi',
+ 'mtlb_strfind',
+ 'mtlb_strrep',
+ 'mtlb_subplot',
+ 'mtlb_sum',
+ 'mtlb_t',
+ 'mtlb_toeplitz',
+ 'mtlb_tril',
+ 'mtlb_triu',
+ 'mtlb_true',
+ 'mtlb_type',
+ 'mtlb_uint16',
+ 'mtlb_uint32',
+ 'mtlb_uint8',
+ 'mtlb_upper',
+ 'mtlb_var',
+ 'mtlb_zeros',
+ 'mu2lin',
+ 'mutation_ga_binary',
+ 'mutation_ga_default',
+ 'mvcorrel',
+ 'mvvacov',
+ 'nancumsum',
+ 'nand2mean',
+ 'nanmax',
+ 'nanmean',
+ 'nanmeanf',
+ 'nanmedian',
+ 'nanmin',
+ 'nanreglin',
+ 'nanstdev',
+ 'nansum',
+ 'narsimul',
+ 'ndgrid',
+ 'ndims',
+ 'nehari',
+ 'neigh_func_csa',
+ 'neigh_func_default',
+ 'neigh_func_fsa',
+ 'neigh_func_vfsa',
+ 'neldermead_cget',
+ 'neldermead_configure',
+ 'neldermead_costf',
+ 'neldermead_defaultoutput',
+ 'neldermead_destroy',
+ 'neldermead_function',
+ 'neldermead_get',
+ 'neldermead_log',
+ 'neldermead_new',
+ 'neldermead_restart',
+ 'neldermead_search',
+ 'neldermead_updatesimp',
+ 'nextpow2',
+ 'nfreq',
+ 'nicholschart',
+ 'nlev',
+ 'nmplot_cget',
+ 'nmplot_configure',
+ 'nmplot_contour',
+ 'nmplot_destroy',
+ 'nmplot_function',
+ 'nmplot_get',
+ 'nmplot_historyplot',
+ 'nmplot_log',
+ 'nmplot_new',
+ 'nmplot_outputcmd',
+ 'nmplot_restart',
+ 'nmplot_search',
+ 'nmplot_simplexhistory',
+ 'noisegen',
+ 'nonreg_test_run',
+ 'now',
+ 'nthroot',
+ 'null',
+ 'num2cell',
+ 'numderivative',
+ 'numdiff',
+ 'numer',
+ 'nyquist',
+ 'nyquistfrequencybounds',
+ 'obs_gram',
+ 'obscont',
+ 'observer',
+ 'obsv_mat',
+ 'obsvss',
+ 'oct2dec',
+ 'odeoptions',
+ 'optim_ga',
+ 'optim_moga',
+ 'optim_nsga',
+ 'optim_nsga2',
+ 'optim_sa',
+ 'optimbase_cget',
+ 'optimbase_checkbounds',
+ 'optimbase_checkcostfun',
+ 'optimbase_checkx0',
+ 'optimbase_configure',
+ 'optimbase_destroy',
+ 'optimbase_function',
+ 'optimbase_get',
+ 'optimbase_hasbounds',
+ 'optimbase_hasconstraints',
+ 'optimbase_hasnlcons',
+ 'optimbase_histget',
+ 'optimbase_histset',
+ 'optimbase_incriter',
+ 'optimbase_isfeasible',
+ 'optimbase_isinbounds',
+ 'optimbase_isinnonlincons',
+ 'optimbase_log',
+ 'optimbase_logshutdown',
+ 'optimbase_logstartup',
+ 'optimbase_new',
+ 'optimbase_outputcmd',
+ 'optimbase_outstruct',
+ 'optimbase_proj2bnds',
+ 'optimbase_set',
+ 'optimbase_stoplog',
+ 'optimbase_terminate',
+ 'optimget',
+ 'optimplotfunccount',
+ 'optimplotfval',
+ 'optimplotx',
+ 'optimset',
+ 'optimsimplex_center',
+ 'optimsimplex_check',
+ 'optimsimplex_compsomefv',
+ 'optimsimplex_computefv',
+ 'optimsimplex_deltafv',
+ 'optimsimplex_deltafvmax',
+ 'optimsimplex_destroy',
+ 'optimsimplex_dirmat',
+ 'optimsimplex_fvmean',
+ 'optimsimplex_fvstdev',
+ 'optimsimplex_fvvariance',
+ 'optimsimplex_getall',
+ 'optimsimplex_getallfv',
+ 'optimsimplex_getallx',
+ 'optimsimplex_getfv',
+ 'optimsimplex_getn',
+ 'optimsimplex_getnbve',
+ 'optimsimplex_getve',
+ 'optimsimplex_getx',
+ 'optimsimplex_gradientfv',
+ 'optimsimplex_log',
+ 'optimsimplex_new',
+ 'optimsimplex_reflect',
+ 'optimsimplex_setall',
+ 'optimsimplex_setallfv',
+ 'optimsimplex_setallx',
+ 'optimsimplex_setfv',
+ 'optimsimplex_setn',
+ 'optimsimplex_setnbve',
+ 'optimsimplex_setve',
+ 'optimsimplex_setx',
+ 'optimsimplex_shrink',
+ 'optimsimplex_size',
+ 'optimsimplex_sort',
+ 'optimsimplex_xbar',
+ 'orth',
+ 'output_ga_default',
+ 'output_moga_default',
+ 'output_nsga2_default',
+ 'output_nsga_default',
+ 'p_margin',
+ 'pack',
+ 'pareto_filter',
+ 'parrot',
+ 'pbig',
+ 'pca',
+ 'pcg',
+ 'pdiv',
+ 'pen2ea',
+ 'pencan',
+ 'pencost',
+ 'penlaur',
+ 'perctl',
+ 'perl',
+ 'perms',
+ 'permute',
+ 'pertrans',
+ 'pfactors',
+ 'pfss',
+ 'phasemag',
+ 'phaseplot',
+ 'phc',
+ 'pinv',
+ 'playsnd',
+ 'plotprofile',
+ 'plzr',
+ 'pmodulo',
+ 'pol2des',
+ 'pol2str',
+ 'polar',
+ 'polfact',
+ 'prbs_a',
+ 'prettyprint',
+ 'primes',
+ 'princomp',
+ 'profile',
+ 'proj',
+ 'projsl',
+ 'projspec',
+ 'psmall',
+ 'pspect',
+ 'qmr',
+ 'qpsolve',
+ 'quart',
+ 'quaskro',
+ 'rafiter',
+ 'randpencil',
+ 'range',
+ 'rank',
+ 'readxls',
+ 'recompilefunction',
+ 'recons',
+ 'reglin',
+ 'regress',
+ 'remezb',
+ 'remove_param',
+ 'remove_profiling',
+ 'repfreq',
+ 'replace_Ix_by_Fx',
+ 'repmat',
+ 'reset_profiling',
+ 'resize_matrix',
+ 'returntoscilab',
+ 'rhs2code',
+ 'ric_desc',
+ 'riccati',
+ 'rmdir',
+ 'routh_t',
+ 'rowcomp',
+ 'rowcompr',
+ 'rowinout',
+ 'rowregul',
+ 'rowshuff',
+ 'rref',
+ 'sample',
+ 'samplef',
+ 'samwr',
+ 'savematfile',
+ 'savewave',
+ 'scanf',
+ 'sci2exp',
+ 'sciGUI_init',
+ 'sci_sparse',
+ 'scicos_getvalue',
+ 'scicos_simulate',
+ 'scicos_workspace_init',
+ 'scisptdemo',
+ 'scitest',
+ 'sdiff',
+ 'sec',
+ 'secd',
+ 'sech',
+ 'selection_ga_elitist',
+ 'selection_ga_random',
+ 'sensi',
+ 'setPreferencesValue',
+ 'set_param',
+ 'setdiff',
+ 'sgrid',
+ 'show_margins',
+ 'show_pca',
+ 'showprofile',
+ 'signm',
+ 'sinc',
+ 'sincd',
+ 'sind',
+ 'sinh',
+ 'sinhm',
+ 'sinm',
+ 'sm2des',
+ 'sm2ss',
+ 'smga',
+ 'smooth',
+ 'solve',
+ 'sound',
+ 'soundsec',
+ 'sp2adj',
+ 'spaninter',
+ 'spanplus',
+ 'spantwo',
+ 'specfact',
+ 'speye',
+ 'sprand',
+ 'spzeros',
+ 'sqroot',
+ 'sqrtm',
+ 'squarewave',
+ 'squeeze',
+ 'srfaur',
+ 'srkf',
+ 'ss2des',
+ 'ss2ss',
+ 'ss2tf',
+ 'sskf',
+ 'ssprint',
+ 'ssrand',
+ 'st_deviation',
+ 'st_i_generic',
+ 'st_ility',
+ 'stabil',
+ 'statgain',
+ 'stdev',
+ 'stdevf',
+ 'steadycos',
+ 'strange',
+ 'strcmpi',
+ 'struct',
+ 'sub2ind',
+ 'sva',
+ 'svplot',
+ 'sylm',
+ 'sylv',
+ 'sysconv',
+ 'sysdiag',
+ 'sysfact',
+ 'syslin',
+ 'syssize',
+ 'system',
+ 'systmat',
+ 'tabul',
+ 'tand',
+ 'tanh',
+ 'tanhm',
+ 'tanm',
+ 'tbx_build_blocks',
+ 'tbx_build_cleaner',
+ 'tbx_build_gateway',
+ 'tbx_build_gateway_clean',
+ 'tbx_build_gateway_loader',
+ 'tbx_build_help',
+ 'tbx_build_help_loader',
+ 'tbx_build_loader',
+ 'tbx_build_localization',
+ 'tbx_build_macros',
+ 'tbx_build_pal_loader',
+ 'tbx_build_src',
+ 'tbx_builder',
+ 'tbx_builder_gateway',
+ 'tbx_builder_gateway_lang',
+ 'tbx_builder_help',
+ 'tbx_builder_help_lang',
+ 'tbx_builder_macros',
+ 'tbx_builder_src',
+ 'tbx_builder_src_lang',
+ 'tbx_generate_pofile',
+ 'temp_law_csa',
+ 'temp_law_default',
+ 'temp_law_fsa',
+ 'temp_law_huang',
+ 'temp_law_vfsa',
+ 'test_clean',
+ 'test_on_columns',
+ 'test_run',
+ 'test_run_level',
+ 'testexamples',
+ 'tf2des',
+ 'tf2ss',
+ 'thrownan',
+ 'tic',
+ 'time_id',
+ 'toc',
+ 'toeplitz',
+ 'tokenpos',
+ 'toolboxes',
+ 'trace',
+ 'trans',
+ 'translatepaths',
+ 'tree2code',
+ 'trfmod',
+ 'trianfml',
+ 'trimmean',
+ 'trisolve',
+ 'trzeros',
+ 'typeof',
+ 'ui_observer',
+ 'union',
+ 'unique',
+ 'unit_test_run',
+ 'unix_g',
+ 'unix_s',
+ 'unix_w',
+ 'unix_x',
+ 'unobs',
+ 'unpack',
+ 'unwrap',
+ 'variance',
+ 'variancef',
+ 'vec2list',
+ 'vectorfind',
+ 'ver',
+ 'warnobsolete',
+ 'wavread',
+ 'wavwrite',
+ 'wcenter',
+ 'weekday',
+ 'wfir',
+ 'wfir_gui',
+ 'whereami',
+ 'who_user',
+ 'whos',
+ 'wiener',
+ 'wigner',
+ 'window',
+ 'winlist',
+ 'with_javasci',
+ 'with_macros_source',
+ 'with_modelica_compiler',
+ 'with_tk',
+ 'xcorr',
+ 'xcosBlockEval',
+ 'xcosBlockInterface',
+ 'xcosCodeGeneration',
+ 'xcosConfigureModelica',
+ 'xcosPal',
+ 'xcosPalAdd',
+ 'xcosPalAddBlock',
+ 'xcosPalExport',
+ 'xcosPalGenerateAllIcons',
+ 'xcosShowBlockWarning',
+ 'xcosValidateBlockSet',
+ 'xcosValidateCompareBlock',
+ 'xcos_compile',
+ 'xcos_debug_gui',
+ 'xcos_run',
+ 'xcos_simulate',
+ 'xcov',
+ 'xmltochm',
+ 'xmltoformat',
+ 'xmltohtml',
+ 'xmltojar',
+ 'xmltopdf',
+ 'xmltops',
+ 'xmltoweb',
+ 'yulewalk',
+ 'zeropen',
+ 'zgrid',
+ 'zpbutt',
+ 'zpch1',
+ 'zpch2',
+ 'zpell',
+)
+
+variables_kw = (
+ '$',
+ '%F',
+ '%T',
+ '%e',
+ '%eps',
+ '%f',
+ '%fftw',
+ '%gui',
+ '%i',
+ '%inf',
+ '%io',
+ '%modalWarning',
+ '%nan',
+ '%pi',
+ '%s',
+ '%t',
+ '%tk',
+ '%toolboxes',
+ '%toolboxes_dir',
+ '%z',
+ 'PWD',
+ 'SCI',
+ 'SCIHOME',
+ 'TMPDIR',
+ 'arnoldilib',
+ 'assertlib',
+ 'atomslib',
+ 'cacsdlib',
+ 'compatibility_functilib',
+ 'corelib',
+ 'data_structureslib',
+ 'demo_toolslib',
+ 'development_toolslib',
+ 'differential_equationlib',
+ 'dynamic_linklib',
+ 'elementary_functionslib',
+ 'enull',
+ 'evoid',
+ 'external_objectslib',
+ 'fd',
+ 'fileiolib',
+ 'functionslib',
+ 'genetic_algorithmslib',
+ 'helptoolslib',
+ 'home',
+ 'integerlib',
+ 'interpolationlib',
+ 'iolib',
+ 'jnull',
+ 'jvoid',
+ 'linear_algebralib',
+ 'm2scilib',
+ 'matiolib',
+ 'modules_managerlib',
+ 'neldermeadlib',
+ 'optimbaselib',
+ 'optimizationlib',
+ 'optimsimplexlib',
+ 'output_streamlib',
+ 'overloadinglib',
+ 'parameterslib',
+ 'polynomialslib',
+ 'preferenceslib',
+ 'randliblib',
+ 'scicos_autolib',
+ 'scicos_utilslib',
+ 'scinoteslib',
+ 'signal_processinglib',
+ 'simulated_annealinglib',
+ 'soundlib',
+ 'sparselib',
+ 'special_functionslib',
+ 'spreadsheetlib',
+ 'statisticslib',
+ 'stringlib',
+ 'tclscilib',
+ 'timelib',
+ 'umfpacklib',
+ 'xcoslib',
+)
+
+if __name__ == '__main__':
+ import subprocess
+ from pygments.util import format_lines
+
+ mapping = {'variables': 'builtin'}
+
+ def extract_completion(var_type):
+ s = subprocess.Popen(['scilab', '-nwni'], stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ output = s.communicate('''\
+fd = mopen("/dev/stderr", "wt");
+mputl(strcat(completion("", "%s"), "||"), fd);
+mclose(fd)\n''' % var_type)
+ if '||' not in output[1]:
+ raise Exception(output[0])
+ return output[1].strip().split('||')
+
+ new_data = {}
+ for t in ('functions', 'commands', 'macros', 'variables'):
+ new_data[t] = extract_completion(t)
+
+ with open(__file__) as f:
+ content = f.read()
+
+ header = content[:content.find('# Autogenerated')]
+ footer = content[content.find("if __name__ == '__main__':"):]
+
+ with open(__file__, 'w') as f:
+ f.write(header)
+ f.write('# Autogenerated\n\n')
+ for k, v in sorted(new_data.iteritems()):
+ f.write(format_lines(k + '_kw', v) + '\n\n')
+ f.write(footer)
diff --git a/pygments/lexers/_sourcemod_builtins.py b/pygments/lexers/_sourcemod_builtins.py
new file mode 100644
index 00000000..40ef5299
--- /dev/null
+++ b/pygments/lexers/_sourcemod_builtins.py
@@ -0,0 +1,1161 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._sourcemod_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ This file contains the names of SourceMod functions.
+ It is able to re-generate itself.
+
+ Do not edit the FUNCTIONS list by hand.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from __future__ import print_function
+
+FUNCTIONS = (
+ 'OnEntityCreated',
+ 'OnEntityDestroyed',
+ 'OnGetGameDescription',
+ 'OnLevelInit',
+ 'SDKHook',
+ 'SDKHookEx',
+ 'SDKUnhook',
+ 'SDKHooks_TakeDamage',
+ 'SDKHooks_DropWeapon',
+ 'TopMenuHandler',
+ 'CreateTopMenu',
+ 'LoadTopMenuConfig',
+ 'AddToTopMenu',
+ 'GetTopMenuInfoString',
+ 'GetTopMenuObjName',
+ 'RemoveFromTopMenu',
+ 'DisplayTopMenu',
+ 'DisplayTopMenuCategory',
+ 'FindTopMenuCategory',
+ 'SetTopMenuTitleCaching',
+ 'OnAdminMenuCreated',
+ 'OnAdminMenuReady',
+ 'GetAdminTopMenu',
+ 'AddTargetsToMenu',
+ 'AddTargetsToMenu2',
+ 'RedisplayAdminMenu',
+ 'TEHook',
+ 'AddTempEntHook',
+ 'RemoveTempEntHook',
+ 'TE_Start',
+ 'TE_IsValidProp',
+ 'TE_WriteNum',
+ 'TE_ReadNum',
+ 'TE_WriteFloat',
+ 'TE_ReadFloat',
+ 'TE_WriteVector',
+ 'TE_ReadVector',
+ 'TE_WriteAngles',
+ 'TE_WriteFloatArray',
+ 'TE_Send',
+ 'TE_WriteEncodedEnt',
+ 'TE_SendToAll',
+ 'TE_SendToClient',
+ 'CreateKeyValues',
+ 'KvSetString',
+ 'KvSetNum',
+ 'KvSetUInt64',
+ 'KvSetFloat',
+ 'KvSetColor',
+ 'KvSetVector',
+ 'KvGetString',
+ 'KvGetNum',
+ 'KvGetFloat',
+ 'KvGetColor',
+ 'KvGetUInt64',
+ 'KvGetVector',
+ 'KvJumpToKey',
+ 'KvJumpToKeySymbol',
+ 'KvGotoFirstSubKey',
+ 'KvGotoNextKey',
+ 'KvSavePosition',
+ 'KvDeleteKey',
+ 'KvDeleteThis',
+ 'KvGoBack',
+ 'KvRewind',
+ 'KvGetSectionName',
+ 'KvSetSectionName',
+ 'KvGetDataType',
+ 'KeyValuesToFile',
+ 'FileToKeyValues',
+ 'StringToKeyValues',
+ 'KvSetEscapeSequences',
+ 'KvNodesInStack',
+ 'KvCopySubkeys',
+ 'KvFindKeyById',
+ 'KvGetNameSymbol',
+ 'KvGetSectionSymbol',
+ 'TE_SetupSparks',
+ 'TE_SetupSmoke',
+ 'TE_SetupDust',
+ 'TE_SetupMuzzleFlash',
+ 'TE_SetupMetalSparks',
+ 'TE_SetupEnergySplash',
+ 'TE_SetupArmorRicochet',
+ 'TE_SetupGlowSprite',
+ 'TE_SetupExplosion',
+ 'TE_SetupBloodSprite',
+ 'TE_SetupBeamRingPoint',
+ 'TE_SetupBeamPoints',
+ 'TE_SetupBeamLaser',
+ 'TE_SetupBeamRing',
+ 'TE_SetupBeamFollow',
+ 'HookEvent',
+ 'HookEventEx',
+ 'UnhookEvent',
+ 'CreateEvent',
+ 'FireEvent',
+ 'CancelCreatedEvent',
+ 'GetEventBool',
+ 'SetEventBool',
+ 'GetEventInt',
+ 'SetEventInt',
+ 'GetEventFloat',
+ 'SetEventFloat',
+ 'GetEventString',
+ 'SetEventString',
+ 'GetEventName',
+ 'SetEventBroadcast',
+ 'GetUserMessageType',
+ 'GetUserMessageId',
+ 'GetUserMessageName',
+ 'StartMessage',
+ 'StartMessageEx',
+ 'EndMessage',
+ 'MsgHook',
+ 'MsgPostHook',
+ 'HookUserMessage',
+ 'UnhookUserMessage',
+ 'StartMessageAll',
+ 'StartMessageOne',
+ 'InactivateClient',
+ 'ReconnectClient',
+ 'GetMaxEntities',
+ 'GetEntityCount',
+ 'IsValidEntity',
+ 'IsValidEdict',
+ 'IsEntNetworkable',
+ 'CreateEdict',
+ 'RemoveEdict',
+ 'GetEdictFlags',
+ 'SetEdictFlags',
+ 'GetEdictClassname',
+ 'GetEntityNetClass',
+ 'ChangeEdictState',
+ 'GetEntData',
+ 'SetEntData',
+ 'GetEntDataFloat',
+ 'SetEntDataFloat',
+ 'GetEntDataEnt2',
+ 'SetEntDataEnt2',
+ 'GetEntDataVector',
+ 'SetEntDataVector',
+ 'GetEntDataString',
+ 'SetEntDataString',
+ 'FindSendPropOffs',
+ 'FindSendPropInfo',
+ 'FindDataMapOffs',
+ 'FindDataMapInfo',
+ 'GetEntSendPropOffs',
+ 'GetEntProp',
+ 'SetEntProp',
+ 'GetEntPropFloat',
+ 'SetEntPropFloat',
+ 'GetEntPropEnt',
+ 'SetEntPropEnt',
+ 'GetEntPropVector',
+ 'SetEntPropVector',
+ 'GetEntPropString',
+ 'SetEntPropString',
+ 'GetEntPropArraySize',
+ 'GetEntDataArray',
+ 'SetEntDataArray',
+ 'GetEntityAddress',
+ 'GetEntityClassname',
+ 'float',
+ 'FloatMul',
+ 'FloatDiv',
+ 'FloatAdd',
+ 'FloatSub',
+ 'FloatFraction',
+ 'RoundToZero',
+ 'RoundToCeil',
+ 'RoundToFloor',
+ 'RoundToNearest',
+ 'FloatCompare',
+ 'SquareRoot',
+ 'Pow',
+ 'Exponential',
+ 'Logarithm',
+ 'Sine',
+ 'Cosine',
+ 'Tangent',
+ 'FloatAbs',
+ 'ArcTangent',
+ 'ArcCosine',
+ 'ArcSine',
+ 'ArcTangent2',
+ 'RoundFloat',
+ 'operator%',
+ 'DegToRad',
+ 'RadToDeg',
+ 'GetURandomInt',
+ 'GetURandomFloat',
+ 'SetURandomSeed',
+ 'SetURandomSeedSimple',
+ 'RemovePlayerItem',
+ 'GivePlayerItem',
+ 'GetPlayerWeaponSlot',
+ 'IgniteEntity',
+ 'ExtinguishEntity',
+ 'TeleportEntity',
+ 'ForcePlayerSuicide',
+ 'SlapPlayer',
+ 'FindEntityByClassname',
+ 'GetClientEyeAngles',
+ 'CreateEntityByName',
+ 'DispatchSpawn',
+ 'DispatchKeyValue',
+ 'DispatchKeyValueFloat',
+ 'DispatchKeyValueVector',
+ 'GetClientAimTarget',
+ 'GetTeamCount',
+ 'GetTeamName',
+ 'GetTeamScore',
+ 'SetTeamScore',
+ 'GetTeamClientCount',
+ 'SetEntityModel',
+ 'GetPlayerDecalFile',
+ 'GetPlayerJingleFile',
+ 'GetServerNetStats',
+ 'EquipPlayerWeapon',
+ 'ActivateEntity',
+ 'SetClientInfo',
+ 'GivePlayerAmmo',
+ 'SetClientListeningFlags',
+ 'GetClientListeningFlags',
+ 'SetListenOverride',
+ 'GetListenOverride',
+ 'IsClientMuted',
+ 'TR_GetPointContents',
+ 'TR_GetPointContentsEnt',
+ 'TR_TraceRay',
+ 'TR_TraceHull',
+ 'TR_TraceRayFilter',
+ 'TR_TraceHullFilter',
+ 'TR_TraceRayEx',
+ 'TR_TraceHullEx',
+ 'TR_TraceRayFilterEx',
+ 'TR_TraceHullFilterEx',
+ 'TR_GetFraction',
+ 'TR_GetEndPosition',
+ 'TR_GetEntityIndex',
+ 'TR_DidHit',
+ 'TR_GetHitGroup',
+ 'TR_GetPlaneNormal',
+ 'TR_PointOutsideWorld',
+ 'SortIntegers',
+ 'SortFloats',
+ 'SortStrings',
+ 'SortFunc1D',
+ 'SortCustom1D',
+ 'SortCustom2D',
+ 'SortADTArray',
+ 'SortFuncADTArray',
+ 'SortADTArrayCustom',
+ 'CompileRegex',
+ 'MatchRegex',
+ 'GetRegexSubString',
+ 'SimpleRegexMatch',
+ 'TF2_GetPlayerClass',
+ 'TF2_SetPlayerClass',
+ 'TF2_RemoveWeaponSlot',
+ 'TF2_RemoveAllWeapons',
+ 'TF2_IsPlayerInCondition',
+ 'TF2_GetObjectType',
+ 'TF2_GetObjectMode',
+ 'NominateMap',
+ 'RemoveNominationByMap',
+ 'RemoveNominationByOwner',
+ 'GetExcludeMapList',
+ 'GetNominatedMapList',
+ 'CanMapChooserStartVote',
+ 'InitiateMapChooserVote',
+ 'HasEndOfMapVoteFinished',
+ 'EndOfMapVoteEnabled',
+ 'OnNominationRemoved',
+ 'OnMapVoteStarted',
+ 'CreateTimer',
+ 'KillTimer',
+ 'TriggerTimer',
+ 'GetTickedTime',
+ 'GetMapTimeLeft',
+ 'GetMapTimeLimit',
+ 'ExtendMapTimeLimit',
+ 'GetTickInterval',
+ 'OnMapTimeLeftChanged',
+ 'IsServerProcessing',
+ 'CreateDataTimer',
+ 'ByteCountToCells',
+ 'CreateArray',
+ 'ClearArray',
+ 'CloneArray',
+ 'ResizeArray',
+ 'GetArraySize',
+ 'PushArrayCell',
+ 'PushArrayString',
+ 'PushArrayArray',
+ 'GetArrayCell',
+ 'GetArrayString',
+ 'GetArrayArray',
+ 'SetArrayCell',
+ 'SetArrayString',
+ 'SetArrayArray',
+ 'ShiftArrayUp',
+ 'RemoveFromArray',
+ 'SwapArrayItems',
+ 'FindStringInArray',
+ 'FindValueInArray',
+ 'ProcessTargetString',
+ 'ReplyToTargetError',
+ 'MultiTargetFilter',
+ 'AddMultiTargetFilter',
+ 'RemoveMultiTargetFilter',
+ 'OnBanClient',
+ 'OnBanIdentity',
+ 'OnRemoveBan',
+ 'BanClient',
+ 'BanIdentity',
+ 'RemoveBan',
+ 'CreateTrie',
+ 'SetTrieValue',
+ 'SetTrieArray',
+ 'SetTrieString',
+ 'GetTrieValue',
+ 'GetTrieArray',
+ 'GetTrieString',
+ 'RemoveFromTrie',
+ 'ClearTrie',
+ 'GetTrieSize',
+ 'GetFunctionByName',
+ 'CreateGlobalForward',
+ 'CreateForward',
+ 'GetForwardFunctionCount',
+ 'AddToForward',
+ 'RemoveFromForward',
+ 'RemoveAllFromForward',
+ 'Call_StartForward',
+ 'Call_StartFunction',
+ 'Call_PushCell',
+ 'Call_PushCellRef',
+ 'Call_PushFloat',
+ 'Call_PushFloatRef',
+ 'Call_PushArray',
+ 'Call_PushArrayEx',
+ 'Call_PushString',
+ 'Call_PushStringEx',
+ 'Call_Finish',
+ 'Call_Cancel',
+ 'NativeCall',
+ 'CreateNative',
+ 'ThrowNativeError',
+ 'GetNativeStringLength',
+ 'GetNativeString',
+ 'SetNativeString',
+ 'GetNativeCell',
+ 'GetNativeCellRef',
+ 'SetNativeCellRef',
+ 'GetNativeArray',
+ 'SetNativeArray',
+ 'FormatNativeString',
+ 'RequestFrameCallback',
+ 'RequestFrame',
+ 'OnRebuildAdminCache',
+ 'DumpAdminCache',
+ 'AddCommandOverride',
+ 'GetCommandOverride',
+ 'UnsetCommandOverride',
+ 'CreateAdmGroup',
+ 'FindAdmGroup',
+ 'SetAdmGroupAddFlag',
+ 'GetAdmGroupAddFlag',
+ 'GetAdmGroupAddFlags',
+ 'SetAdmGroupImmuneFrom',
+ 'GetAdmGroupImmuneCount',
+ 'GetAdmGroupImmuneFrom',
+ 'AddAdmGroupCmdOverride',
+ 'GetAdmGroupCmdOverride',
+ 'RegisterAuthIdentType',
+ 'CreateAdmin',
+ 'GetAdminUsername',
+ 'BindAdminIdentity',
+ 'SetAdminFlag',
+ 'GetAdminFlag',
+ 'GetAdminFlags',
+ 'AdminInheritGroup',
+ 'GetAdminGroupCount',
+ 'GetAdminGroup',
+ 'SetAdminPassword',
+ 'GetAdminPassword',
+ 'FindAdminByIdentity',
+ 'RemoveAdmin',
+ 'FlagBitsToBitArray',
+ 'FlagBitArrayToBits',
+ 'FlagArrayToBits',
+ 'FlagBitsToArray',
+ 'FindFlagByName',
+ 'FindFlagByChar',
+ 'FindFlagChar',
+ 'ReadFlagString',
+ 'CanAdminTarget',
+ 'CreateAuthMethod',
+ 'SetAdmGroupImmunityLevel',
+ 'GetAdmGroupImmunityLevel',
+ 'SetAdminImmunityLevel',
+ 'GetAdminImmunityLevel',
+ 'FlagToBit',
+ 'BitToFlag',
+ 'ServerCommand',
+ 'ServerCommandEx',
+ 'InsertServerCommand',
+ 'ServerExecute',
+ 'ClientCommand',
+ 'FakeClientCommand',
+ 'FakeClientCommandEx',
+ 'PrintToServer',
+ 'PrintToConsole',
+ 'ReplyToCommand',
+ 'GetCmdReplySource',
+ 'SetCmdReplySource',
+ 'IsChatTrigger',
+ 'ShowActivity2',
+ 'ShowActivity',
+ 'ShowActivityEx',
+ 'FormatActivitySource',
+ 'SrvCmd',
+ 'RegServerCmd',
+ 'ConCmd',
+ 'RegConsoleCmd',
+ 'RegAdminCmd',
+ 'GetCmdArgs',
+ 'GetCmdArg',
+ 'GetCmdArgString',
+ 'CreateConVar',
+ 'FindConVar',
+ 'ConVarChanged',
+ 'HookConVarChange',
+ 'UnhookConVarChange',
+ 'GetConVarBool',
+ 'SetConVarBool',
+ 'GetConVarInt',
+ 'SetConVarInt',
+ 'GetConVarFloat',
+ 'SetConVarFloat',
+ 'GetConVarString',
+ 'SetConVarString',
+ 'ResetConVar',
+ 'GetConVarDefault',
+ 'GetConVarFlags',
+ 'SetConVarFlags',
+ 'GetConVarBounds',
+ 'SetConVarBounds',
+ 'GetConVarName',
+ 'QueryClientConVar',
+ 'GetCommandIterator',
+ 'ReadCommandIterator',
+ 'CheckCommandAccess',
+ 'CheckAccess',
+ 'IsValidConVarChar',
+ 'GetCommandFlags',
+ 'SetCommandFlags',
+ 'FindFirstConCommand',
+ 'FindNextConCommand',
+ 'SendConVarValue',
+ 'AddServerTag',
+ 'RemoveServerTag',
+ 'CommandListener',
+ 'AddCommandListener',
+ 'RemoveCommandListener',
+ 'CommandExists',
+ 'OnClientSayCommand',
+ 'OnClientSayCommand_Post',
+ 'TF2_IgnitePlayer',
+ 'TF2_RespawnPlayer',
+ 'TF2_RegeneratePlayer',
+ 'TF2_AddCondition',
+ 'TF2_RemoveCondition',
+ 'TF2_SetPlayerPowerPlay',
+ 'TF2_DisguisePlayer',
+ 'TF2_RemovePlayerDisguise',
+ 'TF2_StunPlayer',
+ 'TF2_MakeBleed',
+ 'TF2_GetClass',
+ 'TF2_CalcIsAttackCritical',
+ 'TF2_OnIsHolidayActive',
+ 'TF2_IsHolidayActive',
+ 'TF2_IsPlayerInDuel',
+ 'TF2_RemoveWearable',
+ 'TF2_OnConditionAdded',
+ 'TF2_OnConditionRemoved',
+ 'TF2_OnWaitingForPlayersStart',
+ 'TF2_OnWaitingForPlayersEnd',
+ 'TF2_OnPlayerTeleport',
+ 'SQL_Connect',
+ 'SQL_DefConnect',
+ 'SQL_ConnectCustom',
+ 'SQLite_UseDatabase',
+ 'SQL_CheckConfig',
+ 'SQL_GetDriver',
+ 'SQL_ReadDriver',
+ 'SQL_GetDriverIdent',
+ 'SQL_GetDriverProduct',
+ 'SQL_SetCharset',
+ 'SQL_GetAffectedRows',
+ 'SQL_GetInsertId',
+ 'SQL_GetError',
+ 'SQL_EscapeString',
+ 'SQL_QuoteString',
+ 'SQL_FastQuery',
+ 'SQL_Query',
+ 'SQL_PrepareQuery',
+ 'SQL_FetchMoreResults',
+ 'SQL_HasResultSet',
+ 'SQL_GetRowCount',
+ 'SQL_GetFieldCount',
+ 'SQL_FieldNumToName',
+ 'SQL_FieldNameToNum',
+ 'SQL_FetchRow',
+ 'SQL_MoreRows',
+ 'SQL_Rewind',
+ 'SQL_FetchString',
+ 'SQL_FetchFloat',
+ 'SQL_FetchInt',
+ 'SQL_IsFieldNull',
+ 'SQL_FetchSize',
+ 'SQL_BindParamInt',
+ 'SQL_BindParamFloat',
+ 'SQL_BindParamString',
+ 'SQL_Execute',
+ 'SQL_LockDatabase',
+ 'SQL_UnlockDatabase',
+ 'SQLTCallback',
+ 'SQL_IsSameConnection',
+ 'SQL_TConnect',
+ 'SQL_TQuery',
+ 'SQL_CreateTransaction',
+ 'SQL_AddQuery',
+ 'SQLTxnSuccess',
+ 'SQLTxnFailure',
+ 'SQL_ExecuteTransaction',
+ 'CloseHandle',
+ 'CloneHandle',
+ 'MenuHandler',
+ 'CreateMenu',
+ 'DisplayMenu',
+ 'DisplayMenuAtItem',
+ 'AddMenuItem',
+ 'InsertMenuItem',
+ 'RemoveMenuItem',
+ 'RemoveAllMenuItems',
+ 'GetMenuItem',
+ 'GetMenuSelectionPosition',
+ 'GetMenuItemCount',
+ 'SetMenuPagination',
+ 'GetMenuPagination',
+ 'GetMenuStyle',
+ 'SetMenuTitle',
+ 'GetMenuTitle',
+ 'CreatePanelFromMenu',
+ 'GetMenuExitButton',
+ 'SetMenuExitButton',
+ 'GetMenuExitBackButton',
+ 'SetMenuExitBackButton',
+ 'SetMenuNoVoteButton',
+ 'CancelMenu',
+ 'GetMenuOptionFlags',
+ 'SetMenuOptionFlags',
+ 'IsVoteInProgress',
+ 'CancelVote',
+ 'VoteMenu',
+ 'VoteMenuToAll',
+ 'VoteHandler',
+ 'SetVoteResultCallback',
+ 'CheckVoteDelay',
+ 'IsClientInVotePool',
+ 'RedrawClientVoteMenu',
+ 'GetMenuStyleHandle',
+ 'CreatePanel',
+ 'CreateMenuEx',
+ 'GetClientMenu',
+ 'CancelClientMenu',
+ 'GetMaxPageItems',
+ 'GetPanelStyle',
+ 'SetPanelTitle',
+ 'DrawPanelItem',
+ 'DrawPanelText',
+ 'CanPanelDrawFlags',
+ 'SetPanelKeys',
+ 'SendPanelToClient',
+ 'GetPanelTextRemaining',
+ 'GetPanelCurrentKey',
+ 'SetPanelCurrentKey',
+ 'RedrawMenuItem',
+ 'InternalShowMenu',
+ 'GetMenuVoteInfo',
+ 'IsNewVoteAllowed',
+ 'PrefetchSound',
+ 'EmitAmbientSound',
+ 'FadeClientVolume',
+ 'StopSound',
+ 'EmitSound',
+ 'EmitSentence',
+ 'GetDistGainFromSoundLevel',
+ 'AmbientSHook',
+ 'NormalSHook',
+ 'AddAmbientSoundHook',
+ 'AddNormalSoundHook',
+ 'RemoveAmbientSoundHook',
+ 'RemoveNormalSoundHook',
+ 'EmitSoundToClient',
+ 'EmitSoundToAll',
+ 'ATTN_TO_SNDLEVEL',
+ 'GetGameSoundParams',
+ 'EmitGameSound',
+ 'EmitAmbientGameSound',
+ 'EmitGameSoundToClient',
+ 'EmitGameSoundToAll',
+ 'PrecacheScriptSound',
+ 'strlen',
+ 'StrContains',
+ 'strcmp',
+ 'strncmp',
+ 'StrEqual',
+ 'strcopy',
+ 'Format',
+ 'FormatEx',
+ 'VFormat',
+ 'StringToInt',
+ 'StringToIntEx',
+ 'IntToString',
+ 'StringToFloat',
+ 'StringToFloatEx',
+ 'FloatToString',
+ 'BreakString',
+ 'TrimString',
+ 'SplitString',
+ 'ReplaceString',
+ 'ReplaceStringEx',
+ 'GetCharBytes',
+ 'IsCharAlpha',
+ 'IsCharNumeric',
+ 'IsCharSpace',
+ 'IsCharMB',
+ 'IsCharUpper',
+ 'IsCharLower',
+ 'StripQuotes',
+ 'CharToUpper',
+ 'CharToLower',
+ 'FindCharInString',
+ 'StrCat',
+ 'ExplodeString',
+ 'ImplodeStrings',
+ 'GetVectorLength',
+ 'GetVectorDistance',
+ 'GetVectorDotProduct',
+ 'GetVectorCrossProduct',
+ 'NormalizeVector',
+ 'GetAngleVectors',
+ 'GetVectorAngles',
+ 'GetVectorVectors',
+ 'AddVectors',
+ 'SubtractVectors',
+ 'ScaleVector',
+ 'NegateVector',
+ 'MakeVectorFromPoints',
+ 'BaseComm_IsClientGagged',
+ 'BaseComm_IsClientMuted',
+ 'BaseComm_SetClientGag',
+ 'BaseComm_SetClientMute',
+ 'FormatUserLogText',
+ 'FindPluginByFile',
+ 'FindTarget',
+ 'AcceptEntityInput',
+ 'SetVariantBool',
+ 'SetVariantString',
+ 'SetVariantInt',
+ 'SetVariantFloat',
+ 'SetVariantVector3D',
+ 'SetVariantPosVector3D',
+ 'SetVariantColor',
+ 'SetVariantEntity',
+ 'GameRules_GetProp',
+ 'GameRules_SetProp',
+ 'GameRules_GetPropFloat',
+ 'GameRules_SetPropFloat',
+ 'GameRules_GetPropEnt',
+ 'GameRules_SetPropEnt',
+ 'GameRules_GetPropVector',
+ 'GameRules_SetPropVector',
+ 'GameRules_GetPropString',
+ 'GameRules_SetPropString',
+ 'GameRules_GetRoundState',
+ 'OnClientConnect',
+ 'OnClientConnected',
+ 'OnClientPutInServer',
+ 'OnClientDisconnect',
+ 'OnClientDisconnect_Post',
+ 'OnClientCommand',
+ 'OnClientSettingsChanged',
+ 'OnClientAuthorized',
+ 'OnClientPreAdminCheck',
+ 'OnClientPostAdminFilter',
+ 'OnClientPostAdminCheck',
+ 'GetMaxClients',
+ 'GetMaxHumanPlayers',
+ 'GetClientCount',
+ 'GetClientName',
+ 'GetClientIP',
+ 'GetClientAuthString',
+ 'GetClientAuthId',
+ 'GetSteamAccountID',
+ 'GetClientUserId',
+ 'IsClientConnected',
+ 'IsClientInGame',
+ 'IsClientInKickQueue',
+ 'IsClientAuthorized',
+ 'IsFakeClient',
+ 'IsClientSourceTV',
+ 'IsClientReplay',
+ 'IsClientObserver',
+ 'IsPlayerAlive',
+ 'GetClientInfo',
+ 'GetClientTeam',
+ 'SetUserAdmin',
+ 'GetUserAdmin',
+ 'AddUserFlags',
+ 'RemoveUserFlags',
+ 'SetUserFlagBits',
+ 'GetUserFlagBits',
+ 'CanUserTarget',
+ 'RunAdminCacheChecks',
+ 'NotifyPostAdminCheck',
+ 'CreateFakeClient',
+ 'SetFakeClientConVar',
+ 'GetClientHealth',
+ 'GetClientModel',
+ 'GetClientWeapon',
+ 'GetClientMaxs',
+ 'GetClientMins',
+ 'GetClientAbsAngles',
+ 'GetClientAbsOrigin',
+ 'GetClientArmor',
+ 'GetClientDeaths',
+ 'GetClientFrags',
+ 'GetClientDataRate',
+ 'IsClientTimingOut',
+ 'GetClientTime',
+ 'GetClientLatency',
+ 'GetClientAvgLatency',
+ 'GetClientAvgLoss',
+ 'GetClientAvgChoke',
+ 'GetClientAvgData',
+ 'GetClientAvgPackets',
+ 'GetClientOfUserId',
+ 'KickClient',
+ 'KickClientEx',
+ 'ChangeClientTeam',
+ 'GetClientSerial',
+ 'GetClientFromSerial',
+ 'FindStringTable',
+ 'GetNumStringTables',
+ 'GetStringTableNumStrings',
+ 'GetStringTableMaxStrings',
+ 'GetStringTableName',
+ 'FindStringIndex',
+ 'ReadStringTable',
+ 'GetStringTableDataLength',
+ 'GetStringTableData',
+ 'SetStringTableData',
+ 'AddToStringTable',
+ 'LockStringTables',
+ 'AddFileToDownloadsTable',
+ 'GetEntityFlags',
+ 'SetEntityFlags',
+ 'GetEntityMoveType',
+ 'SetEntityMoveType',
+ 'GetEntityRenderMode',
+ 'SetEntityRenderMode',
+ 'GetEntityRenderFx',
+ 'SetEntityRenderFx',
+ 'SetEntityRenderColor',
+ 'GetEntityGravity',
+ 'SetEntityGravity',
+ 'SetEntityHealth',
+ 'GetClientButtons',
+ 'EntityOutput',
+ 'HookEntityOutput',
+ 'UnhookEntityOutput',
+ 'HookSingleEntityOutput',
+ 'UnhookSingleEntityOutput',
+ 'SMC_CreateParser',
+ 'SMC_ParseFile',
+ 'SMC_GetErrorString',
+ 'SMC_ParseStart',
+ 'SMC_SetParseStart',
+ 'SMC_ParseEnd',
+ 'SMC_SetParseEnd',
+ 'SMC_NewSection',
+ 'SMC_KeyValue',
+ 'SMC_EndSection',
+ 'SMC_SetReaders',
+ 'SMC_RawLine',
+ 'SMC_SetRawLine',
+ 'BfWriteBool',
+ 'BfWriteByte',
+ 'BfWriteChar',
+ 'BfWriteShort',
+ 'BfWriteWord',
+ 'BfWriteNum',
+ 'BfWriteFloat',
+ 'BfWriteString',
+ 'BfWriteEntity',
+ 'BfWriteAngle',
+ 'BfWriteCoord',
+ 'BfWriteVecCoord',
+ 'BfWriteVecNormal',
+ 'BfWriteAngles',
+ 'BfReadBool',
+ 'BfReadByte',
+ 'BfReadChar',
+ 'BfReadShort',
+ 'BfReadWord',
+ 'BfReadNum',
+ 'BfReadFloat',
+ 'BfReadString',
+ 'BfReadEntity',
+ 'BfReadAngle',
+ 'BfReadCoord',
+ 'BfReadVecCoord',
+ 'BfReadVecNormal',
+ 'BfReadAngles',
+ 'BfGetNumBytesLeft',
+ 'CreateProfiler',
+ 'StartProfiling',
+ 'StopProfiling',
+ 'GetProfilerTime',
+ 'OnPluginStart',
+ 'AskPluginLoad2',
+ 'OnPluginEnd',
+ 'OnPluginPauseChange',
+ 'OnGameFrame',
+ 'OnMapStart',
+ 'OnMapEnd',
+ 'OnConfigsExecuted',
+ 'OnAutoConfigsBuffered',
+ 'OnAllPluginsLoaded',
+ 'GetMyHandle',
+ 'GetPluginIterator',
+ 'MorePlugins',
+ 'ReadPlugin',
+ 'GetPluginStatus',
+ 'GetPluginFilename',
+ 'IsPluginDebugging',
+ 'GetPluginInfo',
+ 'FindPluginByNumber',
+ 'SetFailState',
+ 'ThrowError',
+ 'GetTime',
+ 'FormatTime',
+ 'LoadGameConfigFile',
+ 'GameConfGetOffset',
+ 'GameConfGetKeyValue',
+ 'GameConfGetAddress',
+ 'GetSysTickCount',
+ 'AutoExecConfig',
+ 'RegPluginLibrary',
+ 'LibraryExists',
+ 'GetExtensionFileStatus',
+ 'OnLibraryAdded',
+ 'OnLibraryRemoved',
+ 'ReadMapList',
+ 'SetMapListCompatBind',
+ 'OnClientFloodCheck',
+ 'OnClientFloodResult',
+ 'CanTestFeatures',
+ 'GetFeatureStatus',
+ 'RequireFeature',
+ 'LoadFromAddress',
+ 'StoreToAddress',
+ 'CreateStack',
+ 'PushStackCell',
+ 'PushStackString',
+ 'PushStackArray',
+ 'PopStackCell',
+ 'PopStackString',
+ 'PopStackArray',
+ 'IsStackEmpty',
+ 'PopStack',
+ 'OnPlayerRunCmd',
+ 'BuildPath',
+ 'OpenDirectory',
+ 'ReadDirEntry',
+ 'OpenFile',
+ 'DeleteFile',
+ 'ReadFileLine',
+ 'ReadFile',
+ 'ReadFileString',
+ 'WriteFile',
+ 'WriteFileString',
+ 'WriteFileLine',
+ 'ReadFileCell',
+ 'WriteFileCell',
+ 'IsEndOfFile',
+ 'FileSeek',
+ 'FilePosition',
+ 'FileExists',
+ 'RenameFile',
+ 'DirExists',
+ 'FileSize',
+ 'FlushFile',
+ 'RemoveDir',
+ 'CreateDirectory',
+ 'GetFileTime',
+ 'LogToOpenFile',
+ 'LogToOpenFileEx',
+ 'PbReadInt',
+ 'PbReadFloat',
+ 'PbReadBool',
+ 'PbReadString',
+ 'PbReadColor',
+ 'PbReadAngle',
+ 'PbReadVector',
+ 'PbReadVector2D',
+ 'PbGetRepeatedFieldCount',
+ 'PbSetInt',
+ 'PbSetFloat',
+ 'PbSetBool',
+ 'PbSetString',
+ 'PbSetColor',
+ 'PbSetAngle',
+ 'PbSetVector',
+ 'PbSetVector2D',
+ 'PbAddInt',
+ 'PbAddFloat',
+ 'PbAddBool',
+ 'PbAddString',
+ 'PbAddColor',
+ 'PbAddAngle',
+ 'PbAddVector',
+ 'PbAddVector2D',
+ 'PbRemoveRepeatedFieldValue',
+ 'PbReadMessage',
+ 'PbReadRepeatedMessage',
+ 'PbAddMessage',
+ 'SetNextMap',
+ 'GetNextMap',
+ 'ForceChangeLevel',
+ 'GetMapHistorySize',
+ 'GetMapHistory',
+ 'GeoipCode2',
+ 'GeoipCode3',
+ 'GeoipCountry',
+ 'MarkNativeAsOptional',
+ 'RegClientCookie',
+ 'FindClientCookie',
+ 'SetClientCookie',
+ 'GetClientCookie',
+ 'SetAuthIdCookie',
+ 'AreClientCookiesCached',
+ 'OnClientCookiesCached',
+ 'CookieMenuHandler',
+ 'SetCookiePrefabMenu',
+ 'SetCookieMenuItem',
+ 'ShowCookieMenu',
+ 'GetCookieIterator',
+ 'ReadCookieIterator',
+ 'GetCookieAccess',
+ 'GetClientCookieTime',
+ 'LoadTranslations',
+ 'SetGlobalTransTarget',
+ 'GetClientLanguage',
+ 'GetServerLanguage',
+ 'GetLanguageCount',
+ 'GetLanguageInfo',
+ 'SetClientLanguage',
+ 'GetLanguageByCode',
+ 'GetLanguageByName',
+ 'CS_OnBuyCommand',
+ 'CS_OnCSWeaponDrop',
+ 'CS_OnGetWeaponPrice',
+ 'CS_OnTerminateRound',
+ 'CS_RespawnPlayer',
+ 'CS_SwitchTeam',
+ 'CS_DropWeapon',
+ 'CS_TerminateRound',
+ 'CS_GetTranslatedWeaponAlias',
+ 'CS_GetWeaponPrice',
+ 'CS_GetClientClanTag',
+ 'CS_SetClientClanTag',
+ 'CS_GetTeamScore',
+ 'CS_SetTeamScore',
+ 'CS_GetMVPCount',
+ 'CS_SetMVPCount',
+ 'CS_GetClientContributionScore',
+ 'CS_SetClientContributionScore',
+ 'CS_GetClientAssists',
+ 'CS_SetClientAssists',
+ 'CS_AliasToWeaponID',
+ 'CS_WeaponIDToAlias',
+ 'CS_IsValidWeaponID',
+ 'CS_UpdateClientModel',
+ 'LogToGame',
+ 'SetRandomSeed',
+ 'GetRandomFloat',
+ 'GetRandomInt',
+ 'IsMapValid',
+ 'IsDedicatedServer',
+ 'GetEngineTime',
+ 'GetGameTime',
+ 'GetGameTickCount',
+ 'GetGameDescription',
+ 'GetGameFolderName',
+ 'GetCurrentMap',
+ 'PrecacheModel',
+ 'PrecacheSentenceFile',
+ 'PrecacheDecal',
+ 'PrecacheGeneric',
+ 'IsModelPrecached',
+ 'IsDecalPrecached',
+ 'IsGenericPrecached',
+ 'PrecacheSound',
+ 'IsSoundPrecached',
+ 'CreateDialog',
+ 'GetEngineVersion',
+ 'PrintToChat',
+ 'PrintToChatAll',
+ 'PrintCenterText',
+ 'PrintCenterTextAll',
+ 'PrintHintText',
+ 'PrintHintTextToAll',
+ 'ShowVGUIPanel',
+ 'CreateHudSynchronizer',
+ 'SetHudTextParams',
+ 'SetHudTextParamsEx',
+ 'ShowSyncHudText',
+ 'ClearSyncHud',
+ 'ShowHudText',
+ 'ShowMOTDPanel',
+ 'DisplayAskConnectBox',
+ 'EntIndexToEntRef',
+ 'EntRefToEntIndex',
+ 'MakeCompatEntRef',
+ 'SetClientViewEntity',
+ 'SetLightStyle',
+ 'GetClientEyePosition',
+ 'CreateDataPack',
+ 'WritePackCell',
+ 'WritePackFloat',
+ 'WritePackString',
+ 'ReadPackCell',
+ 'ReadPackFloat',
+ 'ReadPackString',
+ 'ResetPack',
+ 'GetPackPosition',
+ 'SetPackPosition',
+ 'IsPackReadable',
+ 'LogMessage',
+ 'LogToFile',
+ 'LogToFileEx',
+ 'LogAction',
+ 'LogError',
+ 'OnLogAction',
+ 'GameLogHook',
+ 'AddGameLogHook',
+ 'RemoveGameLogHook',
+ 'FindTeamByName',
+ 'StartPrepSDKCall',
+ 'PrepSDKCall_SetVirtual',
+ 'PrepSDKCall_SetSignature',
+ 'PrepSDKCall_SetAddress',
+ 'PrepSDKCall_SetFromConf',
+ 'PrepSDKCall_SetReturnInfo',
+ 'PrepSDKCall_AddParameter',
+ 'EndPrepSDKCall',
+ 'SDKCall',
+ 'GetPlayerResourceEntity',
+)
+if __name__ == '__main__':
+ import re
+ import sys
+ try:
+ from urllib import FancyURLopener
+ except ImportError:
+ from urllib.request import FancyURLopener
+
+ from pygments.util import format_lines
+
+ # urllib ends up wanting to import a module called 'math' -- if
+ # pygments/lexers is in the path, this ends badly.
+ for i in range(len(sys.path)-1, -1, -1):
+ if sys.path[i].endswith('/lexers'):
+ del sys.path[i]
+
+ class Opener(FancyURLopener):
+ version = 'Mozilla/5.0 (Pygments Sourcemod Builtins Update)'
+
+ opener = Opener()
+
+ def get_version():
+ f = opener.open('http://docs.sourcemod.net/api/index.php')
+ r = re.compile(r'SourceMod v\.<b>([\d\.]+(?:-\w+)?)</td>')
+ for line in f:
+ m = r.search(line)
+ if m is not None:
+ return m.groups()[0]
+ raise ValueError('No version in api docs')
+
+ def get_sm_functions():
+ f = opener.open('http://docs.sourcemod.net/api/SMfuncs.js')
+ r = re.compile(r'SMfunctions\[\d+\] = Array \("(?:public )?([^,]+)",".+"\);')
+ functions = []
+ for line in f:
+ m = r.match(line)
+ if m is not None:
+ functions.append(m.groups()[0])
+ return functions
+
+ def regenerate(filename, natives):
+ with open(filename) as fp:
+ content = fp.read()
+
+ header = content[:content.find('FUNCTIONS = (')]
+ footer = content[content.find("if __name__ == '__main__':")-1:]
+
+
+ with open(filename, 'w') as fp:
+ fp.write(header)
+ fp.write(format_lines('FUNCTIONS', natives))
+ fp.write(footer)
+
+ def run():
+ version = get_version()
+ print('> Downloading function index for SourceMod %s' % version)
+ functions = get_sm_functions()
+ print('> %d functions found:' % len(functions))
+
+ functionlist = []
+ for full_function_name in functions:
+ print('>> %s' % full_function_name)
+ functionlist.append(full_function_name)
+
+ regenerate(__file__, functionlist)
+
+
+ run()
diff --git a/pygments/lexers/_sourcemodbuiltins.py b/pygments/lexers/_sourcemodbuiltins.py
deleted file mode 100644
index eee84d0b..00000000
--- a/pygments/lexers/_sourcemodbuiltins.py
+++ /dev/null
@@ -1,1077 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers._sourcemodbuiltins
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- This file contains the names of SourceMod functions.
- It is able to re-generate itself.
-
- Do not edit the FUNCTIONS list by hand.
-
- :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from __future__ import print_function
-
-FUNCTIONS = ['TopMenuHandler',
- 'CreateTopMenu',
- 'LoadTopMenuConfig',
- 'AddToTopMenu',
- 'GetTopMenuInfoString',
- 'GetTopMenuObjName',
- 'RemoveFromTopMenu',
- 'DisplayTopMenu',
- 'FindTopMenuCategory',
- 'OnAdminMenuCreated',
- 'OnAdminMenuReady',
- 'GetAdminTopMenu',
- 'AddTargetsToMenu',
- 'AddTargetsToMenu2',
- 'RedisplayAdminMenu',
- 'TEHook',
- 'AddTempEntHook',
- 'RemoveTempEntHook',
- 'TE_Start',
- 'TE_IsValidProp',
- 'TE_WriteNum',
- 'TE_ReadNum',
- 'TE_WriteFloat',
- 'TE_ReadFloat',
- 'TE_WriteVector',
- 'TE_ReadVector',
- 'TE_WriteAngles',
- 'TE_WriteFloatArray',
- 'TE_Send',
- 'TE_WriteEncodedEnt',
- 'TE_SendToAll',
- 'TE_SendToClient',
- 'CreateKeyValues',
- 'KvSetString',
- 'KvSetNum',
- 'KvSetUInt64',
- 'KvSetFloat',
- 'KvSetColor',
- 'KvSetVector',
- 'KvGetString',
- 'KvGetNum',
- 'KvGetFloat',
- 'KvGetColor',
- 'KvGetUInt64',
- 'KvGetVector',
- 'KvJumpToKey',
- 'KvJumpToKeySymbol',
- 'KvGotoFirstSubKey',
- 'KvGotoNextKey',
- 'KvSavePosition',
- 'KvDeleteKey',
- 'KvDeleteThis',
- 'KvGoBack',
- 'KvRewind',
- 'KvGetSectionName',
- 'KvSetSectionName',
- 'KvGetDataType',
- 'KeyValuesToFile',
- 'FileToKeyValues',
- 'KvSetEscapeSequences',
- 'KvNodesInStack',
- 'KvCopySubkeys',
- 'KvFindKeyById',
- 'KvGetNameSymbol',
- 'KvGetSectionSymbol',
- 'TE_SetupSparks',
- 'TE_SetupSmoke',
- 'TE_SetupDust',
- 'TE_SetupMuzzleFlash',
- 'TE_SetupMetalSparks',
- 'TE_SetupEnergySplash',
- 'TE_SetupArmorRicochet',
- 'TE_SetupGlowSprite',
- 'TE_SetupExplosion',
- 'TE_SetupBloodSprite',
- 'TE_SetupBeamRingPoint',
- 'TE_SetupBeamPoints',
- 'TE_SetupBeamLaser',
- 'TE_SetupBeamRing',
- 'TE_SetupBeamFollow',
- 'HookEvent',
- 'HookEventEx',
- 'UnhookEvent',
- 'CreateEvent',
- 'FireEvent',
- 'CancelCreatedEvent',
- 'GetEventBool',
- 'SetEventBool',
- 'GetEventInt',
- 'SetEventInt',
- 'GetEventFloat',
- 'SetEventFloat',
- 'GetEventString',
- 'SetEventString',
- 'GetEventName',
- 'SetEventBroadcast',
- 'GetUserMessageId',
- 'GetUserMessageName',
- 'StartMessage',
- 'StartMessageEx',
- 'EndMessage',
- 'MsgHook',
- 'MsgPostHook',
- 'HookUserMessage',
- 'UnhookUserMessage',
- 'StartMessageAll',
- 'StartMessageOne',
- 'InactivateClient',
- 'ReconnectClient',
- 'GetMaxEntities',
- 'GetEntityCount',
- 'IsValidEntity',
- 'IsValidEdict',
- 'IsEntNetworkable',
- 'CreateEdict',
- 'RemoveEdict',
- 'GetEdictFlags',
- 'SetEdictFlags',
- 'GetEdictClassname',
- 'GetEntityNetClass',
- 'ChangeEdictState',
- 'GetEntData',
- 'SetEntData',
- 'GetEntDataFloat',
- 'SetEntDataFloat',
- 'GetEntDataEnt2',
- 'SetEntDataEnt2',
- 'GetEntDataVector',
- 'SetEntDataVector',
- 'GetEntDataString',
- 'SetEntDataString',
- 'FindSendPropOffs',
- 'FindSendPropInfo',
- 'FindDataMapOffs',
- 'GetEntSendPropOffs',
- 'GetEntProp',
- 'SetEntProp',
- 'GetEntPropFloat',
- 'SetEntPropFloat',
- 'GetEntPropEnt',
- 'SetEntPropEnt',
- 'GetEntPropVector',
- 'SetEntPropVector',
- 'GetEntPropString',
- 'SetEntPropString',
- 'GetEntPropArraySize',
- 'GetEntDataArray',
- 'SetEntDataArray',
- 'GetEntityClassname',
- 'float',
- 'FloatMul',
- 'FloatDiv',
- 'FloatAdd',
- 'FloatSub',
- 'FloatFraction',
- 'RoundToZero',
- 'RoundToCeil',
- 'RoundToFloor',
- 'RoundToNearest',
- 'FloatCompare',
- 'SquareRoot',
- 'Pow',
- 'Exponential',
- 'Logarithm',
- 'Sine',
- 'Cosine',
- 'Tangent',
- 'FloatAbs',
- 'ArcTangent',
- 'ArcCosine',
- 'ArcSine',
- 'ArcTangent2',
- 'RoundFloat',
- 'operator%',
- 'DegToRad',
- 'RadToDeg',
- 'GetURandomInt',
- 'GetURandomFloat',
- 'SetURandomSeed',
- 'SetURandomSeedSimple',
- 'RemovePlayerItem',
- 'GivePlayerItem',
- 'GetPlayerWeaponSlot',
- 'IgniteEntity',
- 'ExtinguishEntity',
- 'TeleportEntity',
- 'ForcePlayerSuicide',
- 'SlapPlayer',
- 'FindEntityByClassname',
- 'GetClientEyeAngles',
- 'CreateEntityByName',
- 'DispatchSpawn',
- 'DispatchKeyValue',
- 'DispatchKeyValueFloat',
- 'DispatchKeyValueVector',
- 'GetClientAimTarget',
- 'GetTeamCount',
- 'GetTeamName',
- 'GetTeamScore',
- 'SetTeamScore',
- 'GetTeamClientCount',
- 'SetEntityModel',
- 'GetPlayerDecalFile',
- 'GetServerNetStats',
- 'EquipPlayerWeapon',
- 'ActivateEntity',
- 'SetClientInfo',
- 'SetClientListeningFlags',
- 'GetClientListeningFlags',
- 'SetListenOverride',
- 'GetListenOverride',
- 'IsClientMuted',
- 'TR_GetPointContents',
- 'TR_GetPointContentsEnt',
- 'TR_TraceRay',
- 'TR_TraceHull',
- 'TR_TraceRayFilter',
- 'TR_TraceHullFilter',
- 'TR_TraceRayEx',
- 'TR_TraceHullEx',
- 'TR_TraceRayFilterEx',
- 'TR_TraceHullFilterEx',
- 'TR_GetFraction',
- 'TR_GetEndPosition',
- 'TR_GetEntityIndex',
- 'TR_DidHit',
- 'TR_GetHitGroup',
- 'TR_GetPlaneNormal',
- 'TR_PointOutsideWorld',
- 'SortIntegers',
- 'SortFloats',
- 'SortStrings',
- 'SortFunc1D',
- 'SortCustom1D',
- 'SortCustom2D',
- 'SortADTArray',
- 'SortFuncADTArray',
- 'SortADTArrayCustom',
- 'CompileRegex',
- 'MatchRegex',
- 'GetRegexSubString',
- 'SimpleRegexMatch',
- 'TF2_GetPlayerClass',
- 'TF2_SetPlayerClass',
- 'TF2_GetPlayerResourceData',
- 'TF2_SetPlayerResourceData',
- 'TF2_RemoveWeaponSlot',
- 'TF2_RemoveAllWeapons',
- 'TF2_IsPlayerInCondition',
- 'TF2_GetObjectType',
- 'TF2_GetObjectMode',
- 'NominateMap',
- 'RemoveNominationByMap',
- 'RemoveNominationByOwner',
- 'GetExcludeMapList',
- 'GetNominatedMapList',
- 'CanMapChooserStartVote',
- 'InitiateMapChooserVote',
- 'HasEndOfMapVoteFinished',
- 'EndOfMapVoteEnabled',
- 'OnNominationRemoved',
- 'OnMapVoteStarted',
- 'CreateTimer',
- 'KillTimer',
- 'TriggerTimer',
- 'GetTickedTime',
- 'GetMapTimeLeft',
- 'GetMapTimeLimit',
- 'ExtendMapTimeLimit',
- 'GetTickInterval',
- 'OnMapTimeLeftChanged',
- 'IsServerProcessing',
- 'CreateDataTimer',
- 'ByteCountToCells',
- 'CreateArray',
- 'ClearArray',
- 'CloneArray',
- 'ResizeArray',
- 'GetArraySize',
- 'PushArrayCell',
- 'PushArrayString',
- 'PushArrayArray',
- 'GetArrayCell',
- 'GetArrayString',
- 'GetArrayArray',
- 'SetArrayCell',
- 'SetArrayString',
- 'SetArrayArray',
- 'ShiftArrayUp',
- 'RemoveFromArray',
- 'SwapArrayItems',
- 'FindStringInArray',
- 'FindValueInArray',
- 'ProcessTargetString',
- 'ReplyToTargetError',
- 'MultiTargetFilter',
- 'AddMultiTargetFilter',
- 'RemoveMultiTargetFilter',
- 'OnBanClient',
- 'OnBanIdentity',
- 'OnRemoveBan',
- 'BanClient',
- 'BanIdentity',
- 'RemoveBan',
- 'CreateTrie',
- 'SetTrieValue',
- 'SetTrieArray',
- 'SetTrieString',
- 'GetTrieValue',
- 'GetTrieArray',
- 'GetTrieString',
- 'RemoveFromTrie',
- 'ClearTrie',
- 'GetTrieSize',
- 'GetFunctionByName',
- 'CreateGlobalForward',
- 'CreateForward',
- 'GetForwardFunctionCount',
- 'AddToForward',
- 'RemoveFromForward',
- 'RemoveAllFromForward',
- 'Call_StartForward',
- 'Call_StartFunction',
- 'Call_PushCell',
- 'Call_PushCellRef',
- 'Call_PushFloat',
- 'Call_PushFloatRef',
- 'Call_PushArray',
- 'Call_PushArrayEx',
- 'Call_PushString',
- 'Call_PushStringEx',
- 'Call_Finish',
- 'Call_Cancel',
- 'NativeCall',
- 'CreateNative',
- 'ThrowNativeError',
- 'GetNativeStringLength',
- 'GetNativeString',
- 'SetNativeString',
- 'GetNativeCell',
- 'GetNativeCellRef',
- 'SetNativeCellRef',
- 'GetNativeArray',
- 'SetNativeArray',
- 'FormatNativeString',
- 'OnRebuildAdminCache',
- 'DumpAdminCache',
- 'AddCommandOverride',
- 'GetCommandOverride',
- 'UnsetCommandOverride',
- 'CreateAdmGroup',
- 'FindAdmGroup',
- 'SetAdmGroupAddFlag',
- 'GetAdmGroupAddFlag',
- 'GetAdmGroupAddFlags',
- 'SetAdmGroupImmuneFrom',
- 'GetAdmGroupImmuneCount',
- 'GetAdmGroupImmuneFrom',
- 'AddAdmGroupCmdOverride',
- 'GetAdmGroupCmdOverride',
- 'RegisterAuthIdentType',
- 'CreateAdmin',
- 'GetAdminUsername',
- 'BindAdminIdentity',
- 'SetAdminFlag',
- 'GetAdminFlag',
- 'GetAdminFlags',
- 'AdminInheritGroup',
- 'GetAdminGroupCount',
- 'GetAdminGroup',
- 'SetAdminPassword',
- 'GetAdminPassword',
- 'FindAdminByIdentity',
- 'RemoveAdmin',
- 'FlagBitsToBitArray',
- 'FlagBitArrayToBits',
- 'FlagArrayToBits',
- 'FlagBitsToArray',
- 'FindFlagByName',
- 'FindFlagByChar',
- 'FindFlagChar',
- 'ReadFlagString',
- 'CanAdminTarget',
- 'CreateAuthMethod',
- 'SetAdmGroupImmunityLevel',
- 'GetAdmGroupImmunityLevel',
- 'SetAdminImmunityLevel',
- 'GetAdminImmunityLevel',
- 'FlagToBit',
- 'BitToFlag',
- 'ServerCommand',
- 'ServerCommandEx',
- 'InsertServerCommand',
- 'ServerExecute',
- 'ClientCommand',
- 'FakeClientCommand',
- 'FakeClientCommandEx',
- 'PrintToServer',
- 'PrintToConsole',
- 'ReplyToCommand',
- 'GetCmdReplySource',
- 'SetCmdReplySource',
- 'IsChatTrigger',
- 'ShowActivity2',
- 'ShowActivity',
- 'ShowActivityEx',
- 'FormatActivitySource',
- 'SrvCmd',
- 'RegServerCmd',
- 'ConCmd',
- 'RegConsoleCmd',
- 'RegAdminCmd',
- 'GetCmdArgs',
- 'GetCmdArg',
- 'GetCmdArgString',
- 'CreateConVar',
- 'FindConVar',
- 'ConVarChanged',
- 'HookConVarChange',
- 'UnhookConVarChange',
- 'GetConVarBool',
- 'SetConVarBool',
- 'GetConVarInt',
- 'SetConVarInt',
- 'GetConVarFloat',
- 'SetConVarFloat',
- 'GetConVarString',
- 'SetConVarString',
- 'ResetConVar',
- 'GetConVarDefault',
- 'GetConVarFlags',
- 'SetConVarFlags',
- 'GetConVarBounds',
- 'SetConVarBounds',
- 'GetConVarName',
- 'QueryClientConVar',
- 'GetCommandIterator',
- 'ReadCommandIterator',
- 'CheckCommandAccess',
- 'CheckAccess',
- 'IsValidConVarChar',
- 'GetCommandFlags',
- 'SetCommandFlags',
- 'FindFirstConCommand',
- 'FindNextConCommand',
- 'SendConVarValue',
- 'AddServerTag',
- 'RemoveServerTag',
- 'CommandListener',
- 'AddCommandListener',
- 'RemoveCommandListener',
- 'TF2_IgnitePlayer',
- 'TF2_RespawnPlayer',
- 'TF2_RegeneratePlayer',
- 'TF2_AddCondition',
- 'TF2_RemoveCondition',
- 'TF2_SetPlayerPowerPlay',
- 'TF2_DisguisePlayer',
- 'TF2_RemovePlayerDisguise',
- 'TF2_StunPlayer',
- 'TF2_MakeBleed',
- 'TF2_GetResourceEntity',
- 'TF2_GetClass',
- 'TF2_CalcIsAttackCritical',
- 'TF2_OnIsHolidayActive',
- 'TF2_IsPlayerInDuel',
- 'TF2_OnConditionAdded',
- 'TF2_OnConditionRemoved',
- 'TF2_OnWaitingForPlayersStart',
- 'TF2_OnWaitingForPlayersEnd',
- 'SQL_Connect',
- 'SQL_DefConnect',
- 'SQL_ConnectCustom',
- 'SQLite_UseDatabase',
- 'SQL_CheckConfig',
- 'SQL_GetDriver',
- 'SQL_ReadDriver',
- 'SQL_GetDriverIdent',
- 'SQL_GetDriverProduct',
- 'SQL_GetAffectedRows',
- 'SQL_GetInsertId',
- 'SQL_GetError',
- 'SQL_EscapeString',
- 'SQL_QuoteString',
- 'SQL_FastQuery',
- 'SQL_Query',
- 'SQL_PrepareQuery',
- 'SQL_FetchMoreResults',
- 'SQL_HasResultSet',
- 'SQL_GetRowCount',
- 'SQL_GetFieldCount',
- 'SQL_FieldNumToName',
- 'SQL_FieldNameToNum',
- 'SQL_FetchRow',
- 'SQL_MoreRows',
- 'SQL_Rewind',
- 'SQL_FetchString',
- 'SQL_FetchFloat',
- 'SQL_FetchInt',
- 'SQL_IsFieldNull',
- 'SQL_FetchSize',
- 'SQL_BindParamInt',
- 'SQL_BindParamFloat',
- 'SQL_BindParamString',
- 'SQL_Execute',
- 'SQL_LockDatabase',
- 'SQL_UnlockDatabase',
- 'SQLTCallback',
- 'SQL_IsSameConnection',
- 'SQL_TConnect',
- 'SQL_TQuery',
- 'CloseHandle',
- 'CloneHandle',
- 'MenuHandler',
- 'CreateMenu',
- 'DisplayMenu',
- 'DisplayMenuAtItem',
- 'AddMenuItem',
- 'InsertMenuItem',
- 'RemoveMenuItem',
- 'RemoveAllMenuItems',
- 'GetMenuItem',
- 'GetMenuSelectionPosition',
- 'GetMenuItemCount',
- 'SetMenuPagination',
- 'GetMenuPagination',
- 'GetMenuStyle',
- 'SetMenuTitle',
- 'GetMenuTitle',
- 'CreatePanelFromMenu',
- 'GetMenuExitButton',
- 'SetMenuExitButton',
- 'GetMenuExitBackButton',
- 'SetMenuExitBackButton',
- 'SetMenuNoVoteButton',
- 'CancelMenu',
- 'GetMenuOptionFlags',
- 'SetMenuOptionFlags',
- 'IsVoteInProgress',
- 'CancelVote',
- 'VoteMenu',
- 'VoteMenuToAll',
- 'VoteHandler',
- 'SetVoteResultCallback',
- 'CheckVoteDelay',
- 'IsClientInVotePool',
- 'RedrawClientVoteMenu',
- 'GetMenuStyleHandle',
- 'CreatePanel',
- 'CreateMenuEx',
- 'GetClientMenu',
- 'CancelClientMenu',
- 'GetMaxPageItems',
- 'GetPanelStyle',
- 'SetPanelTitle',
- 'DrawPanelItem',
- 'DrawPanelText',
- 'CanPanelDrawFlags',
- 'SetPanelKeys',
- 'SendPanelToClient',
- 'GetPanelTextRemaining',
- 'GetPanelCurrentKey',
- 'SetPanelCurrentKey',
- 'RedrawMenuItem',
- 'InternalShowMenu',
- 'GetMenuVoteInfo',
- 'IsNewVoteAllowed',
- 'PrefetchSound',
- 'EmitAmbientSound',
- 'FadeClientVolume',
- 'StopSound',
- 'EmitSound',
- 'EmitSentence',
- 'GetDistGainFromSoundLevel',
- 'AmbientSHook',
- 'NormalSHook',
- 'AddAmbientSoundHook',
- 'AddNormalSoundHook',
- 'RemoveAmbientSoundHook',
- 'RemoveNormalSoundHook',
- 'EmitSoundToClient',
- 'EmitSoundToAll',
- 'ATTN_TO_SNDLEVEL',
- 'strlen',
- 'StrContains',
- 'strcmp',
- 'strncmp',
- 'StrEqual',
- 'strcopy',
- 'Format',
- 'FormatEx',
- 'VFormat',
- 'StringToInt',
- 'StringToIntEx',
- 'IntToString',
- 'StringToFloat',
- 'StringToFloatEx',
- 'FloatToString',
- 'BreakString',
- 'TrimString',
- 'SplitString',
- 'ReplaceString',
- 'ReplaceStringEx',
- 'GetCharBytes',
- 'IsCharAlpha',
- 'IsCharNumeric',
- 'IsCharSpace',
- 'IsCharMB',
- 'IsCharUpper',
- 'IsCharLower',
- 'StripQuotes',
- 'CharToUpper',
- 'CharToLower',
- 'FindCharInString',
- 'StrCat',
- 'ExplodeString',
- 'ImplodeStrings',
- 'GetVectorLength',
- 'GetVectorDistance',
- 'GetVectorDotProduct',
- 'GetVectorCrossProduct',
- 'NormalizeVector',
- 'GetAngleVectors',
- 'GetVectorAngles',
- 'GetVectorVectors',
- 'AddVectors',
- 'SubtractVectors',
- 'ScaleVector',
- 'NegateVector',
- 'MakeVectorFromPoints',
- 'BaseComm_IsClientGagged',
- 'BaseComm_IsClientMuted',
- 'BaseComm_SetClientGag',
- 'BaseComm_SetClientMute',
- 'FormatUserLogText',
- 'FindPluginByFile',
- 'FindTarget',
- 'AcceptEntityInput',
- 'SetVariantBool',
- 'SetVariantString',
- 'SetVariantInt',
- 'SetVariantFloat',
- 'SetVariantVector3D',
- 'SetVariantPosVector3D',
- 'SetVariantColor',
- 'SetVariantEntity',
- 'GameRules_GetProp',
- 'GameRules_SetProp',
- 'GameRules_GetPropFloat',
- 'GameRules_SetPropFloat',
- 'GameRules_GetPropEnt',
- 'GameRules_SetPropEnt',
- 'GameRules_GetPropVector',
- 'GameRules_SetPropVector',
- 'GameRules_GetPropString',
- 'GameRules_SetPropString',
- 'GameRules_GetRoundState',
- 'OnClientConnect',
- 'OnClientConnected',
- 'OnClientPutInServer',
- 'OnClientDisconnect',
- 'OnClientDisconnect_Post',
- 'OnClientCommand',
- 'OnClientSettingsChanged',
- 'OnClientAuthorized',
- 'OnClientPreAdminCheck',
- 'OnClientPostAdminFilter',
- 'OnClientPostAdminCheck',
- 'GetMaxClients',
- 'GetClientCount',
- 'GetClientName',
- 'GetClientIP',
- 'GetClientAuthString',
- 'GetClientUserId',
- 'IsClientConnected',
- 'IsClientInGame',
- 'IsClientInKickQueue',
- 'IsClientAuthorized',
- 'IsFakeClient',
- 'IsClientSourceTV',
- 'IsClientReplay',
- 'IsClientObserver',
- 'IsPlayerAlive',
- 'GetClientInfo',
- 'GetClientTeam',
- 'SetUserAdmin',
- 'GetUserAdmin',
- 'AddUserFlags',
- 'RemoveUserFlags',
- 'SetUserFlagBits',
- 'GetUserFlagBits',
- 'CanUserTarget',
- 'RunAdminCacheChecks',
- 'NotifyPostAdminCheck',
- 'CreateFakeClient',
- 'SetFakeClientConVar',
- 'GetClientHealth',
- 'GetClientModel',
- 'GetClientWeapon',
- 'GetClientMaxs',
- 'GetClientMins',
- 'GetClientAbsAngles',
- 'GetClientAbsOrigin',
- 'GetClientArmor',
- 'GetClientDeaths',
- 'GetClientFrags',
- 'GetClientDataRate',
- 'IsClientTimingOut',
- 'GetClientTime',
- 'GetClientLatency',
- 'GetClientAvgLatency',
- 'GetClientAvgLoss',
- 'GetClientAvgChoke',
- 'GetClientAvgData',
- 'GetClientAvgPackets',
- 'GetClientOfUserId',
- 'KickClient',
- 'KickClientEx',
- 'ChangeClientTeam',
- 'GetClientSerial',
- 'GetClientFromSerial',
- 'FindStringTable',
- 'GetNumStringTables',
- 'GetStringTableNumStrings',
- 'GetStringTableMaxStrings',
- 'GetStringTableName',
- 'FindStringIndex',
- 'ReadStringTable',
- 'GetStringTableDataLength',
- 'GetStringTableData',
- 'SetStringTableData',
- 'AddToStringTable',
- 'LockStringTables',
- 'AddFileToDownloadsTable',
- 'GetEntityFlags',
- 'SetEntityFlags',
- 'GetEntityMoveType',
- 'SetEntityMoveType',
- 'GetEntityRenderMode',
- 'SetEntityRenderMode',
- 'GetEntityRenderFx',
- 'SetEntityRenderFx',
- 'SetEntityRenderColor',
- 'GetEntityGravity',
- 'SetEntityGravity',
- 'SetEntityHealth',
- 'GetClientButtons',
- 'EntityOutput',
- 'HookEntityOutput',
- 'UnhookEntityOutput',
- 'HookSingleEntityOutput',
- 'UnhookSingleEntityOutput',
- 'SMC_CreateParser',
- 'SMC_ParseFile',
- 'SMC_GetErrorString',
- 'SMC_ParseStart',
- 'SMC_SetParseStart',
- 'SMC_ParseEnd',
- 'SMC_SetParseEnd',
- 'SMC_NewSection',
- 'SMC_KeyValue',
- 'SMC_EndSection',
- 'SMC_SetReaders',
- 'SMC_RawLine',
- 'SMC_SetRawLine',
- 'BfWriteBool',
- 'BfWriteByte',
- 'BfWriteChar',
- 'BfWriteShort',
- 'BfWriteWord',
- 'BfWriteNum',
- 'BfWriteFloat',
- 'BfWriteString',
- 'BfWriteEntity',
- 'BfWriteAngle',
- 'BfWriteCoord',
- 'BfWriteVecCoord',
- 'BfWriteVecNormal',
- 'BfWriteAngles',
- 'BfReadBool',
- 'BfReadByte',
- 'BfReadChar',
- 'BfReadShort',
- 'BfReadWord',
- 'BfReadNum',
- 'BfReadFloat',
- 'BfReadString',
- 'BfReadEntity',
- 'BfReadAngle',
- 'BfReadCoord',
- 'BfReadVecCoord',
- 'BfReadVecNormal',
- 'BfReadAngles',
- 'BfGetNumBytesLeft',
- 'CreateProfiler',
- 'StartProfiling',
- 'StopProfiling',
- 'GetProfilerTime',
- 'OnPluginStart',
- 'AskPluginLoad2',
- 'OnPluginEnd',
- 'OnPluginPauseChange',
- 'OnGameFrame',
- 'OnMapStart',
- 'OnMapEnd',
- 'OnConfigsExecuted',
- 'OnAutoConfigsBuffered',
- 'OnAllPluginsLoaded',
- 'GetMyHandle',
- 'GetPluginIterator',
- 'MorePlugins',
- 'ReadPlugin',
- 'GetPluginStatus',
- 'GetPluginFilename',
- 'IsPluginDebugging',
- 'GetPluginInfo',
- 'FindPluginByNumber',
- 'SetFailState',
- 'ThrowError',
- 'GetTime',
- 'FormatTime',
- 'LoadGameConfigFile',
- 'GameConfGetOffset',
- 'GameConfGetKeyValue',
- 'GetSysTickCount',
- 'AutoExecConfig',
- 'RegPluginLibrary',
- 'LibraryExists',
- 'GetExtensionFileStatus',
- 'OnLibraryAdded',
- 'OnLibraryRemoved',
- 'ReadMapList',
- 'SetMapListCompatBind',
- 'OnClientFloodCheck',
- 'OnClientFloodResult',
- 'CanTestFeatures',
- 'GetFeatureStatus',
- 'RequireFeature',
- 'LoadFromAddress',
- 'StoreToAddress',
- 'CreateStack',
- 'PushStackCell',
- 'PushStackString',
- 'PushStackArray',
- 'PopStackCell',
- 'PopStackString',
- 'PopStackArray',
- 'IsStackEmpty',
- 'PopStack',
- 'OnPlayerRunCmd',
- 'BuildPath',
- 'OpenDirectory',
- 'ReadDirEntry',
- 'OpenFile',
- 'DeleteFile',
- 'ReadFileLine',
- 'ReadFile',
- 'ReadFileString',
- 'WriteFile',
- 'WriteFileString',
- 'WriteFileLine',
- 'ReadFileCell',
- 'WriteFileCell',
- 'IsEndOfFile',
- 'FileSeek',
- 'FilePosition',
- 'FileExists',
- 'RenameFile',
- 'DirExists',
- 'FileSize',
- 'FlushFile',
- 'RemoveDir',
- 'CreateDirectory',
- 'GetFileTime',
- 'LogToOpenFile',
- 'LogToOpenFileEx',
- 'SetNextMap',
- 'GetNextMap',
- 'ForceChangeLevel',
- 'GetMapHistorySize',
- 'GetMapHistory',
- 'GeoipCode2',
- 'GeoipCode3',
- 'GeoipCountry',
- 'MarkNativeAsOptional',
- 'RegClientCookie',
- 'FindClientCookie',
- 'SetClientCookie',
- 'GetClientCookie',
- 'SetAuthIdCookie',
- 'AreClientCookiesCached',
- 'OnClientCookiesCached',
- 'CookieMenuHandler',
- 'SetCookiePrefabMenu',
- 'SetCookieMenuItem',
- 'ShowCookieMenu',
- 'GetCookieIterator',
- 'ReadCookieIterator',
- 'GetCookieAccess',
- 'GetClientCookieTime',
- 'LoadTranslations',
- 'SetGlobalTransTarget',
- 'GetClientLanguage',
- 'GetServerLanguage',
- 'GetLanguageCount',
- 'GetLanguageInfo',
- 'SetClientLanguage',
- 'GetLanguageByCode',
- 'GetLanguageByName',
- 'CS_OnBuyCommand',
- 'CS_OnCSWeaponDrop',
- 'CS_OnGetWeaponPrice',
- 'CS_OnTerminateRound',
- 'CS_RespawnPlayer',
- 'CS_SwitchTeam',
- 'CS_DropWeapon',
- 'CS_TerminateRound',
- 'CS_GetTranslatedWeaponAlias',
- 'CS_GetWeaponPrice',
- 'CS_GetClientClanTag',
- 'CS_SetClientClanTag',
- 'LogToGame',
- 'SetRandomSeed',
- 'GetRandomFloat',
- 'GetRandomInt',
- 'IsMapValid',
- 'IsDedicatedServer',
- 'GetEngineTime',
- 'GetGameTime',
- 'GetGameTickCount',
- 'GetGameDescription',
- 'GetGameFolderName',
- 'GetCurrentMap',
- 'PrecacheModel',
- 'PrecacheSentenceFile',
- 'PrecacheDecal',
- 'PrecacheGeneric',
- 'IsModelPrecached',
- 'IsDecalPrecached',
- 'IsGenericPrecached',
- 'PrecacheSound',
- 'IsSoundPrecached',
- 'CreateDialog',
- 'GuessSDKVersion',
- 'PrintToChat',
- 'PrintToChatAll',
- 'PrintCenterText',
- 'PrintCenterTextAll',
- 'PrintHintText',
- 'PrintHintTextToAll',
- 'ShowVGUIPanel',
- 'CreateHudSynchronizer',
- 'SetHudTextParams',
- 'SetHudTextParamsEx',
- 'ShowSyncHudText',
- 'ClearSyncHud',
- 'ShowHudText',
- 'ShowMOTDPanel',
- 'DisplayAskConnectBox',
- 'EntIndexToEntRef',
- 'EntRefToEntIndex',
- 'MakeCompatEntRef',
- 'SetClientViewEntity',
- 'SetLightStyle',
- 'GetClientEyePosition',
- 'CreateDataPack',
- 'WritePackCell',
- 'WritePackFloat',
- 'WritePackString',
- 'ReadPackCell',
- 'ReadPackFloat',
- 'ReadPackString',
- 'ResetPack',
- 'GetPackPosition',
- 'SetPackPosition',
- 'IsPackReadable',
- 'LogMessage',
- 'LogMessageEx',
- 'LogToFile',
- 'LogToFileEx',
- 'LogAction',
- 'LogError',
- 'OnLogAction',
- 'GameLogHook',
- 'AddGameLogHook',
- 'RemoveGameLogHook',
- 'FindTeamByName',
- 'StartPrepSDKCall',
- 'PrepSDKCall_SetVirtual',
- 'PrepSDKCall_SetSignature',
- 'PrepSDKCall_SetFromConf',
- 'PrepSDKCall_SetReturnInfo',
- 'PrepSDKCall_AddParameter',
- 'EndPrepSDKCall',
- 'SDKCall']
-
-if __name__ == '__main__':
- import pprint
- import re
- import sys
- try:
- from urllib import urlopen
- except ImportError:
- from urllib.request import urlopen
-
- # urllib ends up wanting to import a module called 'math' -- if
- # pygments/lexers is in the path, this ends badly.
- for i in range(len(sys.path)-1, -1, -1):
- if sys.path[i].endswith('/lexers'):
- del sys.path[i]
-
- def get_version():
- f = urlopen('http://docs.sourcemod.net/api/index.php')
- r = re.compile(r'SourceMod v\.<b>([\d\.]+)</td>')
- for line in f:
- m = r.search(line)
- if m is not None:
- return m.groups()[0]
-
- def get_sm_functions():
- f = urlopen('http://docs.sourcemod.net/api/SMfuncs.js')
- r = re.compile(r'SMfunctions\[\d+\] = Array \("(?:public )?([^,]+)",".+"\);')
- functions = []
- for line in f:
- m = r.match(line)
- if m is not None:
- functions.append(m.groups()[0])
- return functions
-
- def regenerate(filename, natives):
- f = open(filename)
- try:
- content = f.read()
- finally:
- f.close()
-
- header = content[:content.find('FUNCTIONS = [')]
- footer = content[content.find("if __name__ == '__main__':"):]
-
-
- f = open(filename, 'w')
- f.write(header)
- f.write('FUNCTIONS = %s\n\n' % pprint.pformat(natives))
- f.write(footer)
- f.close()
-
- def run():
- version = get_version()
- print('> Downloading function index for SourceMod %s' % version)
- functions = get_sm_functions()
- print('> %d functions found:' % len(functions))
-
- functionlist = []
- for full_function_name in functions:
- print('>> %s' % full_function_name)
- functionlist.append(full_function_name)
-
- regenerate(__file__, functionlist)
-
-
- run()
diff --git a/pygments/lexers/_stan_builtins.py b/pygments/lexers/_stan_builtins.py
index 583b44a8..246cdd88 100644
--- a/pygments/lexers/_stan_builtins.py
+++ b/pygments/lexers/_stan_builtins.py
@@ -4,15 +4,27 @@
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This file contains the names of functions for Stan used by
- ``pygments.lexers.math.StanLexer``. These builtins are from Stan language v2.2.0.
+ ``pygments.lexers.math.StanLexer. This is for Stan language version 2.4.0.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-KEYWORDS = ['else', 'for', 'if', 'in', 'lp__', 'print', 'while']
+KEYWORDS = (
+ 'else',
+ 'for',
+ 'if',
+ 'in',
+ 'increment_log_prob',
+ 'lp__',
+ 'print',
+ 'return',
+ 'while',
+)
-TYPES = [ 'cholesky_factor_cov',
+TYPES = (
+ 'cholesky_factor_corr',
+ 'cholesky_factor_cov',
'corr_matrix',
'cov_matrix',
'int',
@@ -23,9 +35,12 @@ TYPES = [ 'cholesky_factor_cov',
'row_vector',
'simplex',
'unit_vector',
- 'vector']
+ 'vector',
+ 'void',
+)
-FUNCTIONS = [ 'Phi',
+FUNCTIONS = (
+ 'Phi',
'Phi_approx',
'abs',
'acos',
@@ -147,7 +162,6 @@ FUNCTIONS = [ 'Phi',
'hypergeometric_rng',
'hypot',
'if_else',
- 'increment_log_prob',
'int_step',
'inv',
'inv_chi_square_ccdf_log',
@@ -170,8 +184,11 @@ FUNCTIONS = [ 'Phi',
'inverse_spd',
'lbeta',
'lgamma',
+ 'lkj_corr_cholesky_log',
+ 'lkj_corr_cholesky_rng',
'lkj_corr_log',
'lkj_corr_rng',
+ 'lkj_cov_log',
'lmgamma',
'log',
'log10',
@@ -207,7 +224,9 @@ FUNCTIONS = [ 'Phi',
'min',
'modified_bessel_first_kind',
'modified_bessel_second_kind',
+ 'multi_gp_log',
'multi_normal_cholesky_log',
+ 'multi_normal_cholesky_rng',
'multi_normal_log',
'multi_normal_prec_log',
'multi_normal_rng',
@@ -217,6 +236,10 @@ FUNCTIONS = [ 'Phi',
'multinomial_rng',
'multiply_log',
'multiply_lower_tri_self_transpose',
+ 'neg_binomial_2_log',
+ 'neg_binomial_2_log_log',
+ 'neg_binomial_2_log_rng',
+ 'neg_binomial_2_rng',
'neg_binomial_ccdf_log',
'neg_binomial_cdf',
'neg_binomial_cdf_log',
@@ -247,7 +270,11 @@ FUNCTIONS = [ 'Phi',
'positive_infinity',
'pow',
'prod',
+ 'qr_Q',
+ 'qr_R',
'quad_form',
+ 'quad_form_diag',
+ 'quad_form_sym',
'rank',
'rayleigh_ccdf_log',
'rayleigh_cdf',
@@ -283,6 +310,8 @@ FUNCTIONS = [ 'Phi',
'softmax',
'sort_asc',
'sort_desc',
+ 'sort_indices_asc',
+ 'sort_indices_desc',
'sqrt',
'sqrt2',
'square',
@@ -301,6 +330,10 @@ FUNCTIONS = [ 'Phi',
'tanh',
'tcrossprod',
'tgamma',
+ 'to_array_1d',
+ 'to_array_2d',
+ 'to_matrix',
+ 'to_row_vector',
'to_vector',
'trace',
'trace_gen_quad_form',
@@ -314,15 +347,18 @@ FUNCTIONS = [ 'Phi',
'uniform_rng',
'variance',
'von_mises_log',
+ 'von_mises_rng',
'weibull_ccdf_log',
'weibull_cdf',
'weibull_cdf_log',
'weibull_log',
'weibull_rng',
'wishart_log',
- 'wishart_rng']
+ 'wishart_rng',
+)
-DISTRIBUTIONS = [ 'bernoulli',
+DISTRIBUTIONS = (
+ 'bernoulli',
'bernoulli_logit',
'beta',
'beta_binomial',
@@ -344,14 +380,19 @@ DISTRIBUTIONS = [ 'bernoulli',
'inv_gamma',
'inv_wishart',
'lkj_corr',
+ 'lkj_corr_cholesky',
+ 'lkj_cov',
'logistic',
'lognormal',
+ 'multi_gp',
'multi_normal',
'multi_normal_cholesky',
'multi_normal_prec',
'multi_student_t',
'multinomial',
'neg_binomial',
+ 'neg_binomial_2',
+ 'neg_binomial_2_log',
'normal',
'ordered_logistic',
'pareto',
@@ -364,9 +405,11 @@ DISTRIBUTIONS = [ 'bernoulli',
'uniform',
'von_mises',
'weibull',
- 'wishart']
+ 'wishart',
+)
-RESERVED = [ 'alignas',
+RESERVED = (
+ 'alignas',
'alignof',
'and',
'and_eq',
@@ -401,6 +444,7 @@ RESERVED = [ 'alignas',
'false',
'float',
'friend',
+ 'fvar',
'goto',
'inline',
'int',
@@ -421,7 +465,6 @@ RESERVED = [ 'alignas',
'register',
'reinterpret_cast',
'repeat',
- 'return',
'short',
'signed',
'sizeof',
@@ -445,10 +488,11 @@ RESERVED = [ 'alignas',
'unsigned',
'until',
'using',
+ 'var',
'virtual',
'void',
'volatile',
'wchar_t',
'xor',
- 'xor_eq']
-
+ 'xor_eq',
+)
diff --git a/pygments/lexers/_vim_builtins.py b/pygments/lexers/_vim_builtins.py
new file mode 100644
index 00000000..e8e2b248
--- /dev/null
+++ b/pygments/lexers/_vim_builtins.py
@@ -0,0 +1,1939 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._vim_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ This file is autogenerated by scripts/get_vimkw.py
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+# Split up in multiple functions so it's importable by jython, which has a
+# per-method size limit.
+
+def _getauto():
+ var = (
+ ('BufAdd','BufAdd'),
+ ('BufCreate','BufCreate'),
+ ('BufDelete','BufDelete'),
+ ('BufEnter','BufEnter'),
+ ('BufFilePost','BufFilePost'),
+ ('BufFilePre','BufFilePre'),
+ ('BufHidden','BufHidden'),
+ ('BufLeave','BufLeave'),
+ ('BufNew','BufNew'),
+ ('BufNewFile','BufNewFile'),
+ ('BufRead','BufRead'),
+ ('BufReadCmd','BufReadCmd'),
+ ('BufReadPost','BufReadPost'),
+ ('BufReadPre','BufReadPre'),
+ ('BufUnload','BufUnload'),
+ ('BufWinEnter','BufWinEnter'),
+ ('BufWinLeave','BufWinLeave'),
+ ('BufWipeout','BufWipeout'),
+ ('BufWrite','BufWrite'),
+ ('BufWriteCmd','BufWriteCmd'),
+ ('BufWritePost','BufWritePost'),
+ ('BufWritePre','BufWritePre'),
+ ('Cmd','Cmd'),
+ ('CmdwinEnter','CmdwinEnter'),
+ ('CmdwinLeave','CmdwinLeave'),
+ ('ColorScheme','ColorScheme'),
+ ('CompleteDone','CompleteDone'),
+ ('CursorHold','CursorHold'),
+ ('CursorHoldI','CursorHoldI'),
+ ('CursorMoved','CursorMoved'),
+ ('CursorMovedI','CursorMovedI'),
+ ('EncodingChanged','EncodingChanged'),
+ ('FileAppendCmd','FileAppendCmd'),
+ ('FileAppendPost','FileAppendPost'),
+ ('FileAppendPre','FileAppendPre'),
+ ('FileChangedRO','FileChangedRO'),
+ ('FileChangedShell','FileChangedShell'),
+ ('FileChangedShellPost','FileChangedShellPost'),
+ ('FileEncoding','FileEncoding'),
+ ('FileReadCmd','FileReadCmd'),
+ ('FileReadPost','FileReadPost'),
+ ('FileReadPre','FileReadPre'),
+ ('FileType','FileType'),
+ ('FileWriteCmd','FileWriteCmd'),
+ ('FileWritePost','FileWritePost'),
+ ('FileWritePre','FileWritePre'),
+ ('FilterReadPost','FilterReadPost'),
+ ('FilterReadPre','FilterReadPre'),
+ ('FilterWritePost','FilterWritePost'),
+ ('FilterWritePre','FilterWritePre'),
+ ('FocusGained','FocusGained'),
+ ('FocusLost','FocusLost'),
+ ('FuncUndefined','FuncUndefined'),
+ ('GUIEnter','GUIEnter'),
+ ('GUIFailed','GUIFailed'),
+ ('InsertChange','InsertChange'),
+ ('InsertCharPre','InsertCharPre'),
+ ('InsertEnter','InsertEnter'),
+ ('InsertLeave','InsertLeave'),
+ ('MenuPopup','MenuPopup'),
+ ('QuickFixCmdPost','QuickFixCmdPost'),
+ ('QuickFixCmdPre','QuickFixCmdPre'),
+ ('QuitPre','QuitPre'),
+ ('RemoteReply','RemoteReply'),
+ ('SessionLoadPost','SessionLoadPost'),
+ ('ShellCmdPost','ShellCmdPost'),
+ ('ShellFilterPost','ShellFilterPost'),
+ ('SourceCmd','SourceCmd'),
+ ('SourcePre','SourcePre'),
+ ('SpellFileMissing','SpellFileMissing'),
+ ('StdinReadPost','StdinReadPost'),
+ ('StdinReadPre','StdinReadPre'),
+ ('SwapExists','SwapExists'),
+ ('Syntax','Syntax'),
+ ('TabEnter','TabEnter'),
+ ('TabLeave','TabLeave'),
+ ('TermChanged','TermChanged'),
+ ('TermResponse','TermResponse'),
+ ('TextChanged','TextChanged'),
+ ('TextChangedI','TextChangedI'),
+ ('User','User'),
+ ('UserGettingBored','UserGettingBored'),
+ ('VimEnter','VimEnter'),
+ ('VimLeave','VimLeave'),
+ ('VimLeavePre','VimLeavePre'),
+ ('VimResized','VimResized'),
+ ('WinEnter','WinEnter'),
+ ('WinLeave','WinLeave'),
+ ('event','event'),
+ )
+ return var
+auto = _getauto()
+
+def _getcommand():
+ var = (
+ ('a','a'),
+ ('ab','ab'),
+ ('abc','abclear'),
+ ('abo','aboveleft'),
+ ('al','all'),
+ ('ar','ar'),
+ ('ar','args'),
+ ('arga','argadd'),
+ ('argd','argdelete'),
+ ('argdo','argdo'),
+ ('arge','argedit'),
+ ('argg','argglobal'),
+ ('argl','arglocal'),
+ ('argu','argument'),
+ ('as','ascii'),
+ ('au','au'),
+ ('b','buffer'),
+ ('bN','bNext'),
+ ('ba','ball'),
+ ('bad','badd'),
+ ('bd','bdelete'),
+ ('bel','belowright'),
+ ('bf','bfirst'),
+ ('bl','blast'),
+ ('bm','bmodified'),
+ ('bn','bnext'),
+ ('bo','botright'),
+ ('bp','bprevious'),
+ ('br','br'),
+ ('br','brewind'),
+ ('brea','break'),
+ ('breaka','breakadd'),
+ ('breakd','breakdel'),
+ ('breakl','breaklist'),
+ ('bro','browse'),
+ ('bu','bu'),
+ ('buf','buf'),
+ ('bufdo','bufdo'),
+ ('buffers','buffers'),
+ ('bun','bunload'),
+ ('bw','bwipeout'),
+ ('c','c'),
+ ('c','change'),
+ ('cN','cN'),
+ ('cN','cNext'),
+ ('cNf','cNf'),
+ ('cNf','cNfile'),
+ ('cabc','cabclear'),
+ ('cad','cad'),
+ ('cad','caddexpr'),
+ ('caddb','caddbuffer'),
+ ('caddf','caddfile'),
+ ('cal','call'),
+ ('cat','catch'),
+ ('cb','cbuffer'),
+ ('cc','cc'),
+ ('ccl','cclose'),
+ ('cd','cd'),
+ ('ce','center'),
+ ('cex','cexpr'),
+ ('cf','cfile'),
+ ('cfir','cfirst'),
+ ('cg','cgetfile'),
+ ('cgetb','cgetbuffer'),
+ ('cgete','cgetexpr'),
+ ('changes','changes'),
+ ('chd','chdir'),
+ ('che','checkpath'),
+ ('checkt','checktime'),
+ ('cl','cl'),
+ ('cl','clist'),
+ ('cla','clast'),
+ ('clo','close'),
+ ('cmapc','cmapclear'),
+ ('cn','cn'),
+ ('cn','cnext'),
+ ('cnew','cnewer'),
+ ('cnf','cnf'),
+ ('cnf','cnfile'),
+ ('co','copy'),
+ ('col','colder'),
+ ('colo','colorscheme'),
+ ('com','com'),
+ ('comc','comclear'),
+ ('comp','compiler'),
+ ('con','con'),
+ ('con','continue'),
+ ('conf','confirm'),
+ ('cope','copen'),
+ ('cp','cprevious'),
+ ('cpf','cpfile'),
+ ('cq','cquit'),
+ ('cr','crewind'),
+ ('cs','cs'),
+ ('cscope','cscope'),
+ ('cstag','cstag'),
+ ('cuna','cunabbrev'),
+ ('cw','cwindow'),
+ ('d','d'),
+ ('d','delete'),
+ ('de','de'),
+ ('debug','debug'),
+ ('debugg','debuggreedy'),
+ ('del','del'),
+ ('delc','delcommand'),
+ ('delel','delel'),
+ ('delep','delep'),
+ ('deletel','deletel'),
+ ('deletep','deletep'),
+ ('deletl','deletl'),
+ ('deletp','deletp'),
+ ('delf','delf'),
+ ('delf','delfunction'),
+ ('dell','dell'),
+ ('delm','delmarks'),
+ ('delp','delp'),
+ ('dep','dep'),
+ ('di','di'),
+ ('di','display'),
+ ('diffg','diffget'),
+ ('diffo','diffoff'),
+ ('diffp','diffpatch'),
+ ('diffpu','diffput'),
+ ('diffs','diffsplit'),
+ ('difft','diffthis'),
+ ('diffu','diffupdate'),
+ ('dig','dig'),
+ ('dig','digraphs'),
+ ('dir','dir'),
+ ('dj','djump'),
+ ('dl','dl'),
+ ('dli','dlist'),
+ ('do','do'),
+ ('doau','doau'),
+ ('dp','dp'),
+ ('dr','drop'),
+ ('ds','dsearch'),
+ ('dsp','dsplit'),
+ ('e','e'),
+ ('e','edit'),
+ ('ea','ea'),
+ ('earlier','earlier'),
+ ('ec','ec'),
+ ('echoe','echoerr'),
+ ('echom','echomsg'),
+ ('echon','echon'),
+ ('el','else'),
+ ('elsei','elseif'),
+ ('em','emenu'),
+ ('en','en'),
+ ('en','endif'),
+ ('endf','endf'),
+ ('endf','endfunction'),
+ ('endfo','endfor'),
+ ('endfun','endfun'),
+ ('endt','endtry'),
+ ('endw','endwhile'),
+ ('ene','enew'),
+ ('ex','ex'),
+ ('exi','exit'),
+ ('exu','exusage'),
+ ('f','f'),
+ ('f','file'),
+ ('files','files'),
+ ('filet','filet'),
+ ('filetype','filetype'),
+ ('fin','fin'),
+ ('fin','find'),
+ ('fina','finally'),
+ ('fini','finish'),
+ ('fir','first'),
+ ('fix','fixdel'),
+ ('fo','fold'),
+ ('foldc','foldclose'),
+ ('foldd','folddoopen'),
+ ('folddoc','folddoclosed'),
+ ('foldo','foldopen'),
+ ('for','for'),
+ ('fu','fu'),
+ ('fu','function'),
+ ('fun','fun'),
+ ('g','g'),
+ ('go','goto'),
+ ('gr','grep'),
+ ('grepa','grepadd'),
+ ('gui','gui'),
+ ('gvim','gvim'),
+ ('h','h'),
+ ('h','help'),
+ ('ha','hardcopy'),
+ ('helpf','helpfind'),
+ ('helpg','helpgrep'),
+ ('helpt','helptags'),
+ ('hi','hi'),
+ ('hid','hide'),
+ ('his','history'),
+ ('i','i'),
+ ('ia','ia'),
+ ('iabc','iabclear'),
+ ('if','if'),
+ ('ij','ijump'),
+ ('il','ilist'),
+ ('imapc','imapclear'),
+ ('in','in'),
+ ('intro','intro'),
+ ('is','isearch'),
+ ('isp','isplit'),
+ ('iuna','iunabbrev'),
+ ('j','join'),
+ ('ju','jumps'),
+ ('k','k'),
+ ('kee','keepmarks'),
+ ('keepa','keepa'),
+ ('keepalt','keepalt'),
+ ('keepj','keepjumps'),
+ ('keepp','keeppatterns'),
+ ('l','l'),
+ ('l','list'),
+ ('lN','lN'),
+ ('lN','lNext'),
+ ('lNf','lNf'),
+ ('lNf','lNfile'),
+ ('la','la'),
+ ('la','last'),
+ ('lad','lad'),
+ ('lad','laddexpr'),
+ ('laddb','laddbuffer'),
+ ('laddf','laddfile'),
+ ('lan','lan'),
+ ('lan','language'),
+ ('lat','lat'),
+ ('later','later'),
+ ('lb','lbuffer'),
+ ('lc','lcd'),
+ ('lch','lchdir'),
+ ('lcl','lclose'),
+ ('lcs','lcs'),
+ ('lcscope','lcscope'),
+ ('le','left'),
+ ('lefta','leftabove'),
+ ('lex','lexpr'),
+ ('lf','lfile'),
+ ('lfir','lfirst'),
+ ('lg','lgetfile'),
+ ('lgetb','lgetbuffer'),
+ ('lgete','lgetexpr'),
+ ('lgr','lgrep'),
+ ('lgrepa','lgrepadd'),
+ ('lh','lhelpgrep'),
+ ('ll','ll'),
+ ('lla','llast'),
+ ('lli','llist'),
+ ('lmak','lmake'),
+ ('lmapc','lmapclear'),
+ ('lne','lne'),
+ ('lne','lnext'),
+ ('lnew','lnewer'),
+ ('lnf','lnf'),
+ ('lnf','lnfile'),
+ ('lo','lo'),
+ ('lo','loadview'),
+ ('loadk','loadk'),
+ ('loadkeymap','loadkeymap'),
+ ('loc','lockmarks'),
+ ('lockv','lockvar'),
+ ('lol','lolder'),
+ ('lop','lopen'),
+ ('lp','lprevious'),
+ ('lpf','lpfile'),
+ ('lr','lrewind'),
+ ('ls','ls'),
+ ('lt','ltag'),
+ ('lua','lua'),
+ ('luado','luado'),
+ ('luafile','luafile'),
+ ('lv','lvimgrep'),
+ ('lvimgrepa','lvimgrepadd'),
+ ('lw','lwindow'),
+ ('m','move'),
+ ('ma','ma'),
+ ('ma','mark'),
+ ('mak','make'),
+ ('marks','marks'),
+ ('mat','match'),
+ ('menut','menut'),
+ ('menut','menutranslate'),
+ ('mes','mes'),
+ ('messages','messages'),
+ ('mk','mk'),
+ ('mk','mkexrc'),
+ ('mks','mksession'),
+ ('mksp','mkspell'),
+ ('mkv','mkv'),
+ ('mkv','mkvimrc'),
+ ('mkvie','mkview'),
+ ('mo','mo'),
+ ('mod','mode'),
+ ('mz','mz'),
+ ('mz','mzscheme'),
+ ('mzf','mzfile'),
+ ('n','n'),
+ ('n','next'),
+ ('nb','nbkey'),
+ ('nbc','nbclose'),
+ ('nbs','nbstart'),
+ ('ne','ne'),
+ ('new','new'),
+ ('nmapc','nmapclear'),
+ ('noa','noa'),
+ ('noautocmd','noautocmd'),
+ ('noh','nohlsearch'),
+ ('nu','number'),
+ ('o','o'),
+ ('o','open'),
+ ('ol','oldfiles'),
+ ('omapc','omapclear'),
+ ('on','only'),
+ ('opt','options'),
+ ('ownsyntax','ownsyntax'),
+ ('p','p'),
+ ('p','print'),
+ ('pc','pclose'),
+ ('pe','pe'),
+ ('pe','perl'),
+ ('ped','pedit'),
+ ('perld','perldo'),
+ ('po','pop'),
+ ('popu','popu'),
+ ('popu','popup'),
+ ('pp','ppop'),
+ ('pr','pr'),
+ ('pre','preserve'),
+ ('prev','previous'),
+ ('pro','pro'),
+ ('prof','profile'),
+ ('profd','profdel'),
+ ('promptf','promptfind'),
+ ('promptr','promptrepl'),
+ ('ps','psearch'),
+ ('ptN','ptN'),
+ ('ptN','ptNext'),
+ ('pta','ptag'),
+ ('ptf','ptfirst'),
+ ('ptj','ptjump'),
+ ('ptl','ptlast'),
+ ('ptn','ptn'),
+ ('ptn','ptnext'),
+ ('ptp','ptprevious'),
+ ('ptr','ptrewind'),
+ ('pts','ptselect'),
+ ('pu','put'),
+ ('pw','pwd'),
+ ('py','py'),
+ ('py','python'),
+ ('py3','py3'),
+ ('py3','py3'),
+ ('py3do','py3do'),
+ ('pydo','pydo'),
+ ('pyf','pyfile'),
+ ('python3','python3'),
+ ('q','q'),
+ ('q','quit'),
+ ('qa','qall'),
+ ('quita','quitall'),
+ ('r','r'),
+ ('r','read'),
+ ('re','re'),
+ ('rec','recover'),
+ ('red','red'),
+ ('red','redo'),
+ ('redi','redir'),
+ ('redr','redraw'),
+ ('redraws','redrawstatus'),
+ ('reg','registers'),
+ ('res','resize'),
+ ('ret','retab'),
+ ('retu','return'),
+ ('rew','rewind'),
+ ('ri','right'),
+ ('rightb','rightbelow'),
+ ('ru','ru'),
+ ('ru','runtime'),
+ ('rub','ruby'),
+ ('rubyd','rubydo'),
+ ('rubyf','rubyfile'),
+ ('rundo','rundo'),
+ ('rv','rviminfo'),
+ ('sN','sNext'),
+ ('sa','sargument'),
+ ('sal','sall'),
+ ('san','sandbox'),
+ ('sav','saveas'),
+ ('sb','sbuffer'),
+ ('sbN','sbNext'),
+ ('sba','sball'),
+ ('sbf','sbfirst'),
+ ('sbl','sblast'),
+ ('sbm','sbmodified'),
+ ('sbn','sbnext'),
+ ('sbp','sbprevious'),
+ ('sbr','sbrewind'),
+ ('scrip','scrip'),
+ ('scrip','scriptnames'),
+ ('scripte','scriptencoding'),
+ ('scs','scs'),
+ ('scscope','scscope'),
+ ('se','set'),
+ ('setf','setfiletype'),
+ ('setg','setglobal'),
+ ('setl','setlocal'),
+ ('sf','sfind'),
+ ('sfir','sfirst'),
+ ('sh','shell'),
+ ('si','si'),
+ ('sig','sig'),
+ ('sign','sign'),
+ ('sil','silent'),
+ ('sim','simalt'),
+ ('sl','sl'),
+ ('sl','sleep'),
+ ('sla','slast'),
+ ('sm','smagic'),
+ ('sm','smap'),
+ ('sme','sme'),
+ ('smenu','smenu'),
+ ('sn','snext'),
+ ('sni','sniff'),
+ ('sno','snomagic'),
+ ('snoreme','snoreme'),
+ ('snoremenu','snoremenu'),
+ ('so','so'),
+ ('so','source'),
+ ('sor','sort'),
+ ('sp','split'),
+ ('spe','spe'),
+ ('spe','spellgood'),
+ ('spelld','spelldump'),
+ ('spelli','spellinfo'),
+ ('spellr','spellrepall'),
+ ('spellu','spellundo'),
+ ('spellw','spellwrong'),
+ ('spr','sprevious'),
+ ('sre','srewind'),
+ ('st','st'),
+ ('st','stop'),
+ ('sta','stag'),
+ ('star','star'),
+ ('star','startinsert'),
+ ('start','start'),
+ ('startg','startgreplace'),
+ ('startr','startreplace'),
+ ('stj','stjump'),
+ ('stopi','stopinsert'),
+ ('sts','stselect'),
+ ('sun','sunhide'),
+ ('sunme','sunme'),
+ ('sunmenu','sunmenu'),
+ ('sus','suspend'),
+ ('sv','sview'),
+ ('sw','swapname'),
+ ('sy','sy'),
+ ('syn','syn'),
+ ('sync','sync'),
+ ('syncbind','syncbind'),
+ ('syntime','syntime'),
+ ('t','t'),
+ ('tN','tN'),
+ ('tN','tNext'),
+ ('ta','ta'),
+ ('ta','tag'),
+ ('tab','tab'),
+ ('tabN','tabN'),
+ ('tabN','tabNext'),
+ ('tabc','tabclose'),
+ ('tabd','tabdo'),
+ ('tabe','tabedit'),
+ ('tabf','tabfind'),
+ ('tabfir','tabfirst'),
+ ('tabl','tablast'),
+ ('tabm','tabmove'),
+ ('tabn','tabnext'),
+ ('tabnew','tabnew'),
+ ('tabo','tabonly'),
+ ('tabp','tabprevious'),
+ ('tabr','tabrewind'),
+ ('tabs','tabs'),
+ ('tags','tags'),
+ ('tc','tcl'),
+ ('tcld','tcldo'),
+ ('tclf','tclfile'),
+ ('te','tearoff'),
+ ('tf','tfirst'),
+ ('th','throw'),
+ ('tj','tjump'),
+ ('tl','tlast'),
+ ('tm','tm'),
+ ('tm','tmenu'),
+ ('tn','tn'),
+ ('tn','tnext'),
+ ('to','topleft'),
+ ('tp','tprevious'),
+ ('tr','tr'),
+ ('tr','trewind'),
+ ('try','try'),
+ ('ts','tselect'),
+ ('tu','tu'),
+ ('tu','tunmenu'),
+ ('u','u'),
+ ('u','undo'),
+ ('un','un'),
+ ('una','unabbreviate'),
+ ('undoj','undojoin'),
+ ('undol','undolist'),
+ ('unh','unhide'),
+ ('unl','unl'),
+ ('unlo','unlockvar'),
+ ('uns','unsilent'),
+ ('up','update'),
+ ('v','v'),
+ ('ve','ve'),
+ ('ve','version'),
+ ('verb','verbose'),
+ ('vert','vertical'),
+ ('vi','vi'),
+ ('vi','visual'),
+ ('vie','view'),
+ ('vim','vimgrep'),
+ ('vimgrepa','vimgrepadd'),
+ ('viu','viusage'),
+ ('vmapc','vmapclear'),
+ ('vne','vnew'),
+ ('vs','vsplit'),
+ ('w','w'),
+ ('w','write'),
+ ('wN','wNext'),
+ ('wa','wall'),
+ ('wh','while'),
+ ('win','win'),
+ ('win','winsize'),
+ ('winc','wincmd'),
+ ('windo','windo'),
+ ('winp','winpos'),
+ ('wn','wnext'),
+ ('wp','wprevious'),
+ ('wq','wq'),
+ ('wqa','wqall'),
+ ('ws','wsverb'),
+ ('wundo','wundo'),
+ ('wv','wviminfo'),
+ ('x','x'),
+ ('x','xit'),
+ ('xa','xall'),
+ ('xmapc','xmapclear'),
+ ('xme','xme'),
+ ('xmenu','xmenu'),
+ ('xnoreme','xnoreme'),
+ ('xnoremenu','xnoremenu'),
+ ('xunme','xunme'),
+ ('xunmenu','xunmenu'),
+ ('xwininfo','xwininfo'),
+ ('y','yank'),
+ )
+ return var
+command = _getcommand()
+
+def _getoption():
+ var = (
+ ('acd','acd'),
+ ('ai','ai'),
+ ('akm','akm'),
+ ('al','al'),
+ ('aleph','aleph'),
+ ('allowrevins','allowrevins'),
+ ('altkeymap','altkeymap'),
+ ('ambiwidth','ambiwidth'),
+ ('ambw','ambw'),
+ ('anti','anti'),
+ ('antialias','antialias'),
+ ('ar','ar'),
+ ('arab','arab'),
+ ('arabic','arabic'),
+ ('arabicshape','arabicshape'),
+ ('ari','ari'),
+ ('arshape','arshape'),
+ ('autochdir','autochdir'),
+ ('autoindent','autoindent'),
+ ('autoread','autoread'),
+ ('autowrite','autowrite'),
+ ('autowriteall','autowriteall'),
+ ('aw','aw'),
+ ('awa','awa'),
+ ('background','background'),
+ ('backspace','backspace'),
+ ('backup','backup'),
+ ('backupcopy','backupcopy'),
+ ('backupdir','backupdir'),
+ ('backupext','backupext'),
+ ('backupskip','backupskip'),
+ ('balloondelay','balloondelay'),
+ ('ballooneval','ballooneval'),
+ ('balloonexpr','balloonexpr'),
+ ('bdir','bdir'),
+ ('bdlay','bdlay'),
+ ('beval','beval'),
+ ('bex','bex'),
+ ('bexpr','bexpr'),
+ ('bg','bg'),
+ ('bh','bh'),
+ ('bin','bin'),
+ ('binary','binary'),
+ ('biosk','biosk'),
+ ('bioskey','bioskey'),
+ ('bk','bk'),
+ ('bkc','bkc'),
+ ('bl','bl'),
+ ('bomb','bomb'),
+ ('breakat','breakat'),
+ ('brk','brk'),
+ ('browsedir','browsedir'),
+ ('bs','bs'),
+ ('bsdir','bsdir'),
+ ('bsk','bsk'),
+ ('bt','bt'),
+ ('bufhidden','bufhidden'),
+ ('buflisted','buflisted'),
+ ('buftype','buftype'),
+ ('casemap','casemap'),
+ ('cb','cb'),
+ ('cc','cc'),
+ ('ccv','ccv'),
+ ('cd','cd'),
+ ('cdpath','cdpath'),
+ ('cedit','cedit'),
+ ('cf','cf'),
+ ('cfu','cfu'),
+ ('ch','ch'),
+ ('charconvert','charconvert'),
+ ('ci','ci'),
+ ('cin','cin'),
+ ('cindent','cindent'),
+ ('cink','cink'),
+ ('cinkeys','cinkeys'),
+ ('cino','cino'),
+ ('cinoptions','cinoptions'),
+ ('cinw','cinw'),
+ ('cinwords','cinwords'),
+ ('clipboard','clipboard'),
+ ('cmdheight','cmdheight'),
+ ('cmdwinheight','cmdwinheight'),
+ ('cmp','cmp'),
+ ('cms','cms'),
+ ('co','co'),
+ ('cocu','cocu'),
+ ('cole','cole'),
+ ('colorcolumn','colorcolumn'),
+ ('columns','columns'),
+ ('com','com'),
+ ('comments','comments'),
+ ('commentstring','commentstring'),
+ ('compatible','compatible'),
+ ('complete','complete'),
+ ('completefunc','completefunc'),
+ ('completeopt','completeopt'),
+ ('concealcursor','concealcursor'),
+ ('conceallevel','conceallevel'),
+ ('confirm','confirm'),
+ ('consk','consk'),
+ ('conskey','conskey'),
+ ('copyindent','copyindent'),
+ ('cot','cot'),
+ ('cp','cp'),
+ ('cpo','cpo'),
+ ('cpoptions','cpoptions'),
+ ('cpt','cpt'),
+ ('crb','crb'),
+ ('cryptmethod','cryptmethod'),
+ ('cscopepathcomp','cscopepathcomp'),
+ ('cscopeprg','cscopeprg'),
+ ('cscopequickfix','cscopequickfix'),
+ ('cscoperelative','cscoperelative'),
+ ('cscopetag','cscopetag'),
+ ('cscopetagorder','cscopetagorder'),
+ ('cscopeverbose','cscopeverbose'),
+ ('cspc','cspc'),
+ ('csprg','csprg'),
+ ('csqf','csqf'),
+ ('csre','csre'),
+ ('cst','cst'),
+ ('csto','csto'),
+ ('csverb','csverb'),
+ ('cuc','cuc'),
+ ('cul','cul'),
+ ('cursorbind','cursorbind'),
+ ('cursorcolumn','cursorcolumn'),
+ ('cursorline','cursorline'),
+ ('cwh','cwh'),
+ ('debug','debug'),
+ ('deco','deco'),
+ ('def','def'),
+ ('define','define'),
+ ('delcombine','delcombine'),
+ ('dex','dex'),
+ ('dg','dg'),
+ ('dict','dict'),
+ ('dictionary','dictionary'),
+ ('diff','diff'),
+ ('diffexpr','diffexpr'),
+ ('diffopt','diffopt'),
+ ('digraph','digraph'),
+ ('dip','dip'),
+ ('dir','dir'),
+ ('directory','directory'),
+ ('display','display'),
+ ('dy','dy'),
+ ('ea','ea'),
+ ('ead','ead'),
+ ('eadirection','eadirection'),
+ ('eb','eb'),
+ ('ed','ed'),
+ ('edcompatible','edcompatible'),
+ ('ef','ef'),
+ ('efm','efm'),
+ ('ei','ei'),
+ ('ek','ek'),
+ ('enc','enc'),
+ ('encoding','encoding'),
+ ('endofline','endofline'),
+ ('eol','eol'),
+ ('ep','ep'),
+ ('equalalways','equalalways'),
+ ('equalprg','equalprg'),
+ ('errorbells','errorbells'),
+ ('errorfile','errorfile'),
+ ('errorformat','errorformat'),
+ ('esckeys','esckeys'),
+ ('et','et'),
+ ('eventignore','eventignore'),
+ ('ex','ex'),
+ ('expandtab','expandtab'),
+ ('exrc','exrc'),
+ ('fcl','fcl'),
+ ('fcs','fcs'),
+ ('fdc','fdc'),
+ ('fde','fde'),
+ ('fdi','fdi'),
+ ('fdl','fdl'),
+ ('fdls','fdls'),
+ ('fdm','fdm'),
+ ('fdn','fdn'),
+ ('fdo','fdo'),
+ ('fdt','fdt'),
+ ('fen','fen'),
+ ('fenc','fenc'),
+ ('fencs','fencs'),
+ ('fex','fex'),
+ ('ff','ff'),
+ ('ffs','ffs'),
+ ('fic','fic'),
+ ('fileencoding','fileencoding'),
+ ('fileencodings','fileencodings'),
+ ('fileformat','fileformat'),
+ ('fileformats','fileformats'),
+ ('fileignorecase','fileignorecase'),
+ ('filetype','filetype'),
+ ('fillchars','fillchars'),
+ ('fk','fk'),
+ ('fkmap','fkmap'),
+ ('flp','flp'),
+ ('fml','fml'),
+ ('fmr','fmr'),
+ ('fo','fo'),
+ ('foldclose','foldclose'),
+ ('foldcolumn','foldcolumn'),
+ ('foldenable','foldenable'),
+ ('foldexpr','foldexpr'),
+ ('foldignore','foldignore'),
+ ('foldlevel','foldlevel'),
+ ('foldlevelstart','foldlevelstart'),
+ ('foldmarker','foldmarker'),
+ ('foldmethod','foldmethod'),
+ ('foldminlines','foldminlines'),
+ ('foldnestmax','foldnestmax'),
+ ('foldopen','foldopen'),
+ ('foldtext','foldtext'),
+ ('formatexpr','formatexpr'),
+ ('formatlistpat','formatlistpat'),
+ ('formatoptions','formatoptions'),
+ ('formatprg','formatprg'),
+ ('fp','fp'),
+ ('fs','fs'),
+ ('fsync','fsync'),
+ ('ft','ft'),
+ ('gcr','gcr'),
+ ('gd','gd'),
+ ('gdefault','gdefault'),
+ ('gfm','gfm'),
+ ('gfn','gfn'),
+ ('gfs','gfs'),
+ ('gfw','gfw'),
+ ('ghr','ghr'),
+ ('go','go'),
+ ('gp','gp'),
+ ('grepformat','grepformat'),
+ ('grepprg','grepprg'),
+ ('gtl','gtl'),
+ ('gtt','gtt'),
+ ('guicursor','guicursor'),
+ ('guifont','guifont'),
+ ('guifontset','guifontset'),
+ ('guifontwide','guifontwide'),
+ ('guiheadroom','guiheadroom'),
+ ('guioptions','guioptions'),
+ ('guipty','guipty'),
+ ('guitablabel','guitablabel'),
+ ('guitabtooltip','guitabtooltip'),
+ ('helpfile','helpfile'),
+ ('helpheight','helpheight'),
+ ('helplang','helplang'),
+ ('hf','hf'),
+ ('hh','hh'),
+ ('hi','hi'),
+ ('hid','hid'),
+ ('hidden','hidden'),
+ ('highlight','highlight'),
+ ('history','history'),
+ ('hk','hk'),
+ ('hkmap','hkmap'),
+ ('hkmapp','hkmapp'),
+ ('hkp','hkp'),
+ ('hl','hl'),
+ ('hlg','hlg'),
+ ('hls','hls'),
+ ('hlsearch','hlsearch'),
+ ('ic','ic'),
+ ('icon','icon'),
+ ('iconstring','iconstring'),
+ ('ignorecase','ignorecase'),
+ ('im','im'),
+ ('imactivatefunc','imactivatefunc'),
+ ('imactivatekey','imactivatekey'),
+ ('imaf','imaf'),
+ ('imak','imak'),
+ ('imc','imc'),
+ ('imcmdline','imcmdline'),
+ ('imd','imd'),
+ ('imdisable','imdisable'),
+ ('imi','imi'),
+ ('iminsert','iminsert'),
+ ('ims','ims'),
+ ('imsearch','imsearch'),
+ ('imsf','imsf'),
+ ('imstatusfunc','imstatusfunc'),
+ ('inc','inc'),
+ ('include','include'),
+ ('includeexpr','includeexpr'),
+ ('incsearch','incsearch'),
+ ('inde','inde'),
+ ('indentexpr','indentexpr'),
+ ('indentkeys','indentkeys'),
+ ('indk','indk'),
+ ('inex','inex'),
+ ('inf','inf'),
+ ('infercase','infercase'),
+ ('inoremap','inoremap'),
+ ('insertmode','insertmode'),
+ ('invacd','invacd'),
+ ('invai','invai'),
+ ('invakm','invakm'),
+ ('invallowrevins','invallowrevins'),
+ ('invaltkeymap','invaltkeymap'),
+ ('invanti','invanti'),
+ ('invantialias','invantialias'),
+ ('invar','invar'),
+ ('invarab','invarab'),
+ ('invarabic','invarabic'),
+ ('invarabicshape','invarabicshape'),
+ ('invari','invari'),
+ ('invarshape','invarshape'),
+ ('invautochdir','invautochdir'),
+ ('invautoindent','invautoindent'),
+ ('invautoread','invautoread'),
+ ('invautowrite','invautowrite'),
+ ('invautowriteall','invautowriteall'),
+ ('invaw','invaw'),
+ ('invawa','invawa'),
+ ('invbackup','invbackup'),
+ ('invballooneval','invballooneval'),
+ ('invbeval','invbeval'),
+ ('invbin','invbin'),
+ ('invbinary','invbinary'),
+ ('invbiosk','invbiosk'),
+ ('invbioskey','invbioskey'),
+ ('invbk','invbk'),
+ ('invbl','invbl'),
+ ('invbomb','invbomb'),
+ ('invbuflisted','invbuflisted'),
+ ('invcf','invcf'),
+ ('invci','invci'),
+ ('invcin','invcin'),
+ ('invcindent','invcindent'),
+ ('invcompatible','invcompatible'),
+ ('invconfirm','invconfirm'),
+ ('invconsk','invconsk'),
+ ('invconskey','invconskey'),
+ ('invcopyindent','invcopyindent'),
+ ('invcp','invcp'),
+ ('invcrb','invcrb'),
+ ('invcscoperelative','invcscoperelative'),
+ ('invcscopetag','invcscopetag'),
+ ('invcscopeverbose','invcscopeverbose'),
+ ('invcsre','invcsre'),
+ ('invcst','invcst'),
+ ('invcsverb','invcsverb'),
+ ('invcuc','invcuc'),
+ ('invcul','invcul'),
+ ('invcursorbind','invcursorbind'),
+ ('invcursorcolumn','invcursorcolumn'),
+ ('invcursorline','invcursorline'),
+ ('invdeco','invdeco'),
+ ('invdelcombine','invdelcombine'),
+ ('invdg','invdg'),
+ ('invdiff','invdiff'),
+ ('invdigraph','invdigraph'),
+ ('invea','invea'),
+ ('inveb','inveb'),
+ ('inved','inved'),
+ ('invedcompatible','invedcompatible'),
+ ('invek','invek'),
+ ('invendofline','invendofline'),
+ ('inveol','inveol'),
+ ('invequalalways','invequalalways'),
+ ('inverrorbells','inverrorbells'),
+ ('invesckeys','invesckeys'),
+ ('invet','invet'),
+ ('invex','invex'),
+ ('invexpandtab','invexpandtab'),
+ ('invexrc','invexrc'),
+ ('invfen','invfen'),
+ ('invfic','invfic'),
+ ('invfileignorecase','invfileignorecase'),
+ ('invfk','invfk'),
+ ('invfkmap','invfkmap'),
+ ('invfoldenable','invfoldenable'),
+ ('invgd','invgd'),
+ ('invgdefault','invgdefault'),
+ ('invguipty','invguipty'),
+ ('invhid','invhid'),
+ ('invhidden','invhidden'),
+ ('invhk','invhk'),
+ ('invhkmap','invhkmap'),
+ ('invhkmapp','invhkmapp'),
+ ('invhkp','invhkp'),
+ ('invhls','invhls'),
+ ('invhlsearch','invhlsearch'),
+ ('invic','invic'),
+ ('invicon','invicon'),
+ ('invignorecase','invignorecase'),
+ ('invim','invim'),
+ ('invimc','invimc'),
+ ('invimcmdline','invimcmdline'),
+ ('invimd','invimd'),
+ ('invimdisable','invimdisable'),
+ ('invincsearch','invincsearch'),
+ ('invinf','invinf'),
+ ('invinfercase','invinfercase'),
+ ('invinsertmode','invinsertmode'),
+ ('invis','invis'),
+ ('invjoinspaces','invjoinspaces'),
+ ('invjs','invjs'),
+ ('invlazyredraw','invlazyredraw'),
+ ('invlbr','invlbr'),
+ ('invlinebreak','invlinebreak'),
+ ('invlisp','invlisp'),
+ ('invlist','invlist'),
+ ('invloadplugins','invloadplugins'),
+ ('invlpl','invlpl'),
+ ('invlz','invlz'),
+ ('invma','invma'),
+ ('invmacatsui','invmacatsui'),
+ ('invmagic','invmagic'),
+ ('invmh','invmh'),
+ ('invml','invml'),
+ ('invmod','invmod'),
+ ('invmodeline','invmodeline'),
+ ('invmodifiable','invmodifiable'),
+ ('invmodified','invmodified'),
+ ('invmore','invmore'),
+ ('invmousef','invmousef'),
+ ('invmousefocus','invmousefocus'),
+ ('invmousehide','invmousehide'),
+ ('invnu','invnu'),
+ ('invnumber','invnumber'),
+ ('invodev','invodev'),
+ ('invopendevice','invopendevice'),
+ ('invpaste','invpaste'),
+ ('invpi','invpi'),
+ ('invpreserveindent','invpreserveindent'),
+ ('invpreviewwindow','invpreviewwindow'),
+ ('invprompt','invprompt'),
+ ('invpvw','invpvw'),
+ ('invreadonly','invreadonly'),
+ ('invrelativenumber','invrelativenumber'),
+ ('invremap','invremap'),
+ ('invrestorescreen','invrestorescreen'),
+ ('invrevins','invrevins'),
+ ('invri','invri'),
+ ('invrightleft','invrightleft'),
+ ('invrl','invrl'),
+ ('invrnu','invrnu'),
+ ('invro','invro'),
+ ('invrs','invrs'),
+ ('invru','invru'),
+ ('invruler','invruler'),
+ ('invsb','invsb'),
+ ('invsc','invsc'),
+ ('invscb','invscb'),
+ ('invscrollbind','invscrollbind'),
+ ('invscs','invscs'),
+ ('invsecure','invsecure'),
+ ('invsft','invsft'),
+ ('invshellslash','invshellslash'),
+ ('invshelltemp','invshelltemp'),
+ ('invshiftround','invshiftround'),
+ ('invshortname','invshortname'),
+ ('invshowcmd','invshowcmd'),
+ ('invshowfulltag','invshowfulltag'),
+ ('invshowmatch','invshowmatch'),
+ ('invshowmode','invshowmode'),
+ ('invsi','invsi'),
+ ('invsm','invsm'),
+ ('invsmartcase','invsmartcase'),
+ ('invsmartindent','invsmartindent'),
+ ('invsmarttab','invsmarttab'),
+ ('invsmd','invsmd'),
+ ('invsn','invsn'),
+ ('invsol','invsol'),
+ ('invspell','invspell'),
+ ('invsplitbelow','invsplitbelow'),
+ ('invsplitright','invsplitright'),
+ ('invspr','invspr'),
+ ('invsr','invsr'),
+ ('invssl','invssl'),
+ ('invsta','invsta'),
+ ('invstartofline','invstartofline'),
+ ('invstmp','invstmp'),
+ ('invswapfile','invswapfile'),
+ ('invswf','invswf'),
+ ('invta','invta'),
+ ('invtagbsearch','invtagbsearch'),
+ ('invtagrelative','invtagrelative'),
+ ('invtagstack','invtagstack'),
+ ('invtbi','invtbi'),
+ ('invtbidi','invtbidi'),
+ ('invtbs','invtbs'),
+ ('invtermbidi','invtermbidi'),
+ ('invterse','invterse'),
+ ('invtextauto','invtextauto'),
+ ('invtextmode','invtextmode'),
+ ('invtf','invtf'),
+ ('invtgst','invtgst'),
+ ('invtildeop','invtildeop'),
+ ('invtimeout','invtimeout'),
+ ('invtitle','invtitle'),
+ ('invto','invto'),
+ ('invtop','invtop'),
+ ('invtr','invtr'),
+ ('invttimeout','invttimeout'),
+ ('invttybuiltin','invttybuiltin'),
+ ('invttyfast','invttyfast'),
+ ('invtx','invtx'),
+ ('invudf','invudf'),
+ ('invundofile','invundofile'),
+ ('invvb','invvb'),
+ ('invvisualbell','invvisualbell'),
+ ('invwa','invwa'),
+ ('invwarn','invwarn'),
+ ('invwb','invwb'),
+ ('invweirdinvert','invweirdinvert'),
+ ('invwfh','invwfh'),
+ ('invwfw','invwfw'),
+ ('invwic','invwic'),
+ ('invwildignorecase','invwildignorecase'),
+ ('invwildmenu','invwildmenu'),
+ ('invwinfixheight','invwinfixheight'),
+ ('invwinfixwidth','invwinfixwidth'),
+ ('invwiv','invwiv'),
+ ('invwmnu','invwmnu'),
+ ('invwrap','invwrap'),
+ ('invwrapscan','invwrapscan'),
+ ('invwrite','invwrite'),
+ ('invwriteany','invwriteany'),
+ ('invwritebackup','invwritebackup'),
+ ('invws','invws'),
+ ('is','is'),
+ ('isf','isf'),
+ ('isfname','isfname'),
+ ('isi','isi'),
+ ('isident','isident'),
+ ('isk','isk'),
+ ('iskeyword','iskeyword'),
+ ('isp','isp'),
+ ('isprint','isprint'),
+ ('joinspaces','joinspaces'),
+ ('js','js'),
+ ('key','key'),
+ ('keymap','keymap'),
+ ('keymodel','keymodel'),
+ ('keywordprg','keywordprg'),
+ ('km','km'),
+ ('kmp','kmp'),
+ ('kp','kp'),
+ ('langmap','langmap'),
+ ('langmenu','langmenu'),
+ ('laststatus','laststatus'),
+ ('lazyredraw','lazyredraw'),
+ ('lbr','lbr'),
+ ('lcs','lcs'),
+ ('linebreak','linebreak'),
+ ('lines','lines'),
+ ('linespace','linespace'),
+ ('lisp','lisp'),
+ ('lispwords','lispwords'),
+ ('list','list'),
+ ('listchars','listchars'),
+ ('lm','lm'),
+ ('lmap','lmap'),
+ ('loadplugins','loadplugins'),
+ ('lpl','lpl'),
+ ('ls','ls'),
+ ('lsp','lsp'),
+ ('lw','lw'),
+ ('lz','lz'),
+ ('ma','ma'),
+ ('macatsui','macatsui'),
+ ('magic','magic'),
+ ('makeef','makeef'),
+ ('makeprg','makeprg'),
+ ('mat','mat'),
+ ('matchpairs','matchpairs'),
+ ('matchtime','matchtime'),
+ ('maxcombine','maxcombine'),
+ ('maxfuncdepth','maxfuncdepth'),
+ ('maxmapdepth','maxmapdepth'),
+ ('maxmem','maxmem'),
+ ('maxmempattern','maxmempattern'),
+ ('maxmemtot','maxmemtot'),
+ ('mco','mco'),
+ ('mef','mef'),
+ ('menuitems','menuitems'),
+ ('mfd','mfd'),
+ ('mh','mh'),
+ ('mis','mis'),
+ ('mkspellmem','mkspellmem'),
+ ('ml','ml'),
+ ('mls','mls'),
+ ('mm','mm'),
+ ('mmd','mmd'),
+ ('mmp','mmp'),
+ ('mmt','mmt'),
+ ('mod','mod'),
+ ('modeline','modeline'),
+ ('modelines','modelines'),
+ ('modifiable','modifiable'),
+ ('modified','modified'),
+ ('more','more'),
+ ('mouse','mouse'),
+ ('mousef','mousef'),
+ ('mousefocus','mousefocus'),
+ ('mousehide','mousehide'),
+ ('mousem','mousem'),
+ ('mousemodel','mousemodel'),
+ ('mouses','mouses'),
+ ('mouseshape','mouseshape'),
+ ('mouset','mouset'),
+ ('mousetime','mousetime'),
+ ('mp','mp'),
+ ('mps','mps'),
+ ('msm','msm'),
+ ('mzq','mzq'),
+ ('mzquantum','mzquantum'),
+ ('nf','nf'),
+ ('nnoremap','nnoremap'),
+ ('noacd','noacd'),
+ ('noai','noai'),
+ ('noakm','noakm'),
+ ('noallowrevins','noallowrevins'),
+ ('noaltkeymap','noaltkeymap'),
+ ('noanti','noanti'),
+ ('noantialias','noantialias'),
+ ('noar','noar'),
+ ('noarab','noarab'),
+ ('noarabic','noarabic'),
+ ('noarabicshape','noarabicshape'),
+ ('noari','noari'),
+ ('noarshape','noarshape'),
+ ('noautochdir','noautochdir'),
+ ('noautoindent','noautoindent'),
+ ('noautoread','noautoread'),
+ ('noautowrite','noautowrite'),
+ ('noautowriteall','noautowriteall'),
+ ('noaw','noaw'),
+ ('noawa','noawa'),
+ ('nobackup','nobackup'),
+ ('noballooneval','noballooneval'),
+ ('nobeval','nobeval'),
+ ('nobin','nobin'),
+ ('nobinary','nobinary'),
+ ('nobiosk','nobiosk'),
+ ('nobioskey','nobioskey'),
+ ('nobk','nobk'),
+ ('nobl','nobl'),
+ ('nobomb','nobomb'),
+ ('nobuflisted','nobuflisted'),
+ ('nocf','nocf'),
+ ('noci','noci'),
+ ('nocin','nocin'),
+ ('nocindent','nocindent'),
+ ('nocompatible','nocompatible'),
+ ('noconfirm','noconfirm'),
+ ('noconsk','noconsk'),
+ ('noconskey','noconskey'),
+ ('nocopyindent','nocopyindent'),
+ ('nocp','nocp'),
+ ('nocrb','nocrb'),
+ ('nocscoperelative','nocscoperelative'),
+ ('nocscopetag','nocscopetag'),
+ ('nocscopeverbose','nocscopeverbose'),
+ ('nocsre','nocsre'),
+ ('nocst','nocst'),
+ ('nocsverb','nocsverb'),
+ ('nocuc','nocuc'),
+ ('nocul','nocul'),
+ ('nocursorbind','nocursorbind'),
+ ('nocursorcolumn','nocursorcolumn'),
+ ('nocursorline','nocursorline'),
+ ('nodeco','nodeco'),
+ ('nodelcombine','nodelcombine'),
+ ('nodg','nodg'),
+ ('nodiff','nodiff'),
+ ('nodigraph','nodigraph'),
+ ('noea','noea'),
+ ('noeb','noeb'),
+ ('noed','noed'),
+ ('noedcompatible','noedcompatible'),
+ ('noek','noek'),
+ ('noendofline','noendofline'),
+ ('noeol','noeol'),
+ ('noequalalways','noequalalways'),
+ ('noerrorbells','noerrorbells'),
+ ('noesckeys','noesckeys'),
+ ('noet','noet'),
+ ('noex','noex'),
+ ('noexpandtab','noexpandtab'),
+ ('noexrc','noexrc'),
+ ('nofen','nofen'),
+ ('nofic','nofic'),
+ ('nofileignorecase','nofileignorecase'),
+ ('nofk','nofk'),
+ ('nofkmap','nofkmap'),
+ ('nofoldenable','nofoldenable'),
+ ('nogd','nogd'),
+ ('nogdefault','nogdefault'),
+ ('noguipty','noguipty'),
+ ('nohid','nohid'),
+ ('nohidden','nohidden'),
+ ('nohk','nohk'),
+ ('nohkmap','nohkmap'),
+ ('nohkmapp','nohkmapp'),
+ ('nohkp','nohkp'),
+ ('nohls','nohls'),
+ ('nohlsearch','nohlsearch'),
+ ('noic','noic'),
+ ('noicon','noicon'),
+ ('noignorecase','noignorecase'),
+ ('noim','noim'),
+ ('noimc','noimc'),
+ ('noimcmdline','noimcmdline'),
+ ('noimd','noimd'),
+ ('noimdisable','noimdisable'),
+ ('noincsearch','noincsearch'),
+ ('noinf','noinf'),
+ ('noinfercase','noinfercase'),
+ ('noinsertmode','noinsertmode'),
+ ('nois','nois'),
+ ('nojoinspaces','nojoinspaces'),
+ ('nojs','nojs'),
+ ('nolazyredraw','nolazyredraw'),
+ ('nolbr','nolbr'),
+ ('nolinebreak','nolinebreak'),
+ ('nolisp','nolisp'),
+ ('nolist','nolist'),
+ ('noloadplugins','noloadplugins'),
+ ('nolpl','nolpl'),
+ ('nolz','nolz'),
+ ('noma','noma'),
+ ('nomacatsui','nomacatsui'),
+ ('nomagic','nomagic'),
+ ('nomh','nomh'),
+ ('noml','noml'),
+ ('nomod','nomod'),
+ ('nomodeline','nomodeline'),
+ ('nomodifiable','nomodifiable'),
+ ('nomodified','nomodified'),
+ ('nomore','nomore'),
+ ('nomousef','nomousef'),
+ ('nomousefocus','nomousefocus'),
+ ('nomousehide','nomousehide'),
+ ('nonu','nonu'),
+ ('nonumber','nonumber'),
+ ('noodev','noodev'),
+ ('noopendevice','noopendevice'),
+ ('nopaste','nopaste'),
+ ('nopi','nopi'),
+ ('nopreserveindent','nopreserveindent'),
+ ('nopreviewwindow','nopreviewwindow'),
+ ('noprompt','noprompt'),
+ ('nopvw','nopvw'),
+ ('noreadonly','noreadonly'),
+ ('norelativenumber','norelativenumber'),
+ ('noremap','noremap'),
+ ('norestorescreen','norestorescreen'),
+ ('norevins','norevins'),
+ ('nori','nori'),
+ ('norightleft','norightleft'),
+ ('norl','norl'),
+ ('nornu','nornu'),
+ ('noro','noro'),
+ ('nors','nors'),
+ ('noru','noru'),
+ ('noruler','noruler'),
+ ('nosb','nosb'),
+ ('nosc','nosc'),
+ ('noscb','noscb'),
+ ('noscrollbind','noscrollbind'),
+ ('noscs','noscs'),
+ ('nosecure','nosecure'),
+ ('nosft','nosft'),
+ ('noshellslash','noshellslash'),
+ ('noshelltemp','noshelltemp'),
+ ('noshiftround','noshiftround'),
+ ('noshortname','noshortname'),
+ ('noshowcmd','noshowcmd'),
+ ('noshowfulltag','noshowfulltag'),
+ ('noshowmatch','noshowmatch'),
+ ('noshowmode','noshowmode'),
+ ('nosi','nosi'),
+ ('nosm','nosm'),
+ ('nosmartcase','nosmartcase'),
+ ('nosmartindent','nosmartindent'),
+ ('nosmarttab','nosmarttab'),
+ ('nosmd','nosmd'),
+ ('nosn','nosn'),
+ ('nosol','nosol'),
+ ('nospell','nospell'),
+ ('nosplitbelow','nosplitbelow'),
+ ('nosplitright','nosplitright'),
+ ('nospr','nospr'),
+ ('nosr','nosr'),
+ ('nossl','nossl'),
+ ('nosta','nosta'),
+ ('nostartofline','nostartofline'),
+ ('nostmp','nostmp'),
+ ('noswapfile','noswapfile'),
+ ('noswf','noswf'),
+ ('nota','nota'),
+ ('notagbsearch','notagbsearch'),
+ ('notagrelative','notagrelative'),
+ ('notagstack','notagstack'),
+ ('notbi','notbi'),
+ ('notbidi','notbidi'),
+ ('notbs','notbs'),
+ ('notermbidi','notermbidi'),
+ ('noterse','noterse'),
+ ('notextauto','notextauto'),
+ ('notextmode','notextmode'),
+ ('notf','notf'),
+ ('notgst','notgst'),
+ ('notildeop','notildeop'),
+ ('notimeout','notimeout'),
+ ('notitle','notitle'),
+ ('noto','noto'),
+ ('notop','notop'),
+ ('notr','notr'),
+ ('nottimeout','nottimeout'),
+ ('nottybuiltin','nottybuiltin'),
+ ('nottyfast','nottyfast'),
+ ('notx','notx'),
+ ('noudf','noudf'),
+ ('noundofile','noundofile'),
+ ('novb','novb'),
+ ('novisualbell','novisualbell'),
+ ('nowa','nowa'),
+ ('nowarn','nowarn'),
+ ('nowb','nowb'),
+ ('noweirdinvert','noweirdinvert'),
+ ('nowfh','nowfh'),
+ ('nowfw','nowfw'),
+ ('nowic','nowic'),
+ ('nowildignorecase','nowildignorecase'),
+ ('nowildmenu','nowildmenu'),
+ ('nowinfixheight','nowinfixheight'),
+ ('nowinfixwidth','nowinfixwidth'),
+ ('nowiv','nowiv'),
+ ('nowmnu','nowmnu'),
+ ('nowrap','nowrap'),
+ ('nowrapscan','nowrapscan'),
+ ('nowrite','nowrite'),
+ ('nowriteany','nowriteany'),
+ ('nowritebackup','nowritebackup'),
+ ('nows','nows'),
+ ('nrformats','nrformats'),
+ ('nu','nu'),
+ ('number','number'),
+ ('numberwidth','numberwidth'),
+ ('nuw','nuw'),
+ ('odev','odev'),
+ ('oft','oft'),
+ ('ofu','ofu'),
+ ('omnifunc','omnifunc'),
+ ('opendevice','opendevice'),
+ ('operatorfunc','operatorfunc'),
+ ('opfunc','opfunc'),
+ ('osfiletype','osfiletype'),
+ ('pa','pa'),
+ ('para','para'),
+ ('paragraphs','paragraphs'),
+ ('paste','paste'),
+ ('pastetoggle','pastetoggle'),
+ ('patchexpr','patchexpr'),
+ ('patchmode','patchmode'),
+ ('path','path'),
+ ('pdev','pdev'),
+ ('penc','penc'),
+ ('pex','pex'),
+ ('pexpr','pexpr'),
+ ('pfn','pfn'),
+ ('ph','ph'),
+ ('pheader','pheader'),
+ ('pi','pi'),
+ ('pm','pm'),
+ ('pmbcs','pmbcs'),
+ ('pmbfn','pmbfn'),
+ ('popt','popt'),
+ ('preserveindent','preserveindent'),
+ ('previewheight','previewheight'),
+ ('previewwindow','previewwindow'),
+ ('printdevice','printdevice'),
+ ('printencoding','printencoding'),
+ ('printexpr','printexpr'),
+ ('printfont','printfont'),
+ ('printheader','printheader'),
+ ('printmbcharset','printmbcharset'),
+ ('printmbfont','printmbfont'),
+ ('printoptions','printoptions'),
+ ('prompt','prompt'),
+ ('pt','pt'),
+ ('pumheight','pumheight'),
+ ('pvh','pvh'),
+ ('pvw','pvw'),
+ ('qe','qe'),
+ ('quoteescape','quoteescape'),
+ ('rdt','rdt'),
+ ('re','re'),
+ ('readonly','readonly'),
+ ('redrawtime','redrawtime'),
+ ('regexpengine','regexpengine'),
+ ('relativenumber','relativenumber'),
+ ('remap','remap'),
+ ('report','report'),
+ ('restorescreen','restorescreen'),
+ ('revins','revins'),
+ ('ri','ri'),
+ ('rightleft','rightleft'),
+ ('rightleftcmd','rightleftcmd'),
+ ('rl','rl'),
+ ('rlc','rlc'),
+ ('rnu','rnu'),
+ ('ro','ro'),
+ ('rs','rs'),
+ ('rtp','rtp'),
+ ('ru','ru'),
+ ('ruf','ruf'),
+ ('ruler','ruler'),
+ ('rulerformat','rulerformat'),
+ ('runtimepath','runtimepath'),
+ ('sb','sb'),
+ ('sbo','sbo'),
+ ('sbr','sbr'),
+ ('sc','sc'),
+ ('scb','scb'),
+ ('scr','scr'),
+ ('scroll','scroll'),
+ ('scrollbind','scrollbind'),
+ ('scrolljump','scrolljump'),
+ ('scrolloff','scrolloff'),
+ ('scrollopt','scrollopt'),
+ ('scs','scs'),
+ ('sect','sect'),
+ ('sections','sections'),
+ ('secure','secure'),
+ ('sel','sel'),
+ ('selection','selection'),
+ ('selectmode','selectmode'),
+ ('sessionoptions','sessionoptions'),
+ ('sft','sft'),
+ ('sh','sh'),
+ ('shcf','shcf'),
+ ('shell','shell'),
+ ('shellcmdflag','shellcmdflag'),
+ ('shellpipe','shellpipe'),
+ ('shellquote','shellquote'),
+ ('shellredir','shellredir'),
+ ('shellslash','shellslash'),
+ ('shelltemp','shelltemp'),
+ ('shelltype','shelltype'),
+ ('shellxescape','shellxescape'),
+ ('shellxquote','shellxquote'),
+ ('shiftround','shiftround'),
+ ('shiftwidth','shiftwidth'),
+ ('shm','shm'),
+ ('shortmess','shortmess'),
+ ('shortname','shortname'),
+ ('showbreak','showbreak'),
+ ('showcmd','showcmd'),
+ ('showfulltag','showfulltag'),
+ ('showmatch','showmatch'),
+ ('showmode','showmode'),
+ ('showtabline','showtabline'),
+ ('shq','shq'),
+ ('si','si'),
+ ('sidescroll','sidescroll'),
+ ('sidescrolloff','sidescrolloff'),
+ ('siso','siso'),
+ ('sj','sj'),
+ ('slm','slm'),
+ ('sm','sm'),
+ ('smartcase','smartcase'),
+ ('smartindent','smartindent'),
+ ('smarttab','smarttab'),
+ ('smc','smc'),
+ ('smd','smd'),
+ ('sn','sn'),
+ ('so','so'),
+ ('softtabstop','softtabstop'),
+ ('sol','sol'),
+ ('sp','sp'),
+ ('spc','spc'),
+ ('spell','spell'),
+ ('spellcapcheck','spellcapcheck'),
+ ('spellfile','spellfile'),
+ ('spelllang','spelllang'),
+ ('spellsuggest','spellsuggest'),
+ ('spf','spf'),
+ ('spl','spl'),
+ ('splitbelow','splitbelow'),
+ ('splitright','splitright'),
+ ('spr','spr'),
+ ('sps','sps'),
+ ('sr','sr'),
+ ('srr','srr'),
+ ('ss','ss'),
+ ('ssl','ssl'),
+ ('ssop','ssop'),
+ ('st','st'),
+ ('sta','sta'),
+ ('stal','stal'),
+ ('startofline','startofline'),
+ ('statusline','statusline'),
+ ('stl','stl'),
+ ('stmp','stmp'),
+ ('sts','sts'),
+ ('su','su'),
+ ('sua','sua'),
+ ('suffixes','suffixes'),
+ ('suffixesadd','suffixesadd'),
+ ('sw','sw'),
+ ('swapfile','swapfile'),
+ ('swapsync','swapsync'),
+ ('swb','swb'),
+ ('swf','swf'),
+ ('switchbuf','switchbuf'),
+ ('sws','sws'),
+ ('sxe','sxe'),
+ ('sxq','sxq'),
+ ('syn','syn'),
+ ('synmaxcol','synmaxcol'),
+ ('syntax','syntax'),
+ ('t_AB','t_AB'),
+ ('t_AF','t_AF'),
+ ('t_AL','t_AL'),
+ ('t_CS','t_CS'),
+ ('t_CV','t_CV'),
+ ('t_Ce','t_Ce'),
+ ('t_Co','t_Co'),
+ ('t_Cs','t_Cs'),
+ ('t_DL','t_DL'),
+ ('t_EI','t_EI'),
+ ('t_F1','t_F1'),
+ ('t_F2','t_F2'),
+ ('t_F3','t_F3'),
+ ('t_F4','t_F4'),
+ ('t_F5','t_F5'),
+ ('t_F6','t_F6'),
+ ('t_F7','t_F7'),
+ ('t_F8','t_F8'),
+ ('t_F9','t_F9'),
+ ('t_IE','t_IE'),
+ ('t_IS','t_IS'),
+ ('t_K1','t_K1'),
+ ('t_K3','t_K3'),
+ ('t_K4','t_K4'),
+ ('t_K5','t_K5'),
+ ('t_K6','t_K6'),
+ ('t_K7','t_K7'),
+ ('t_K8','t_K8'),
+ ('t_K9','t_K9'),
+ ('t_KA','t_KA'),
+ ('t_KB','t_KB'),
+ ('t_KC','t_KC'),
+ ('t_KD','t_KD'),
+ ('t_KE','t_KE'),
+ ('t_KF','t_KF'),
+ ('t_KG','t_KG'),
+ ('t_KH','t_KH'),
+ ('t_KI','t_KI'),
+ ('t_KJ','t_KJ'),
+ ('t_KK','t_KK'),
+ ('t_KL','t_KL'),
+ ('t_RI','t_RI'),
+ ('t_RV','t_RV'),
+ ('t_SI','t_SI'),
+ ('t_Sb','t_Sb'),
+ ('t_Sf','t_Sf'),
+ ('t_WP','t_WP'),
+ ('t_WS','t_WS'),
+ ('t_ZH','t_ZH'),
+ ('t_ZR','t_ZR'),
+ ('t_al','t_al'),
+ ('t_bc','t_bc'),
+ ('t_cd','t_cd'),
+ ('t_ce','t_ce'),
+ ('t_cl','t_cl'),
+ ('t_cm','t_cm'),
+ ('t_cs','t_cs'),
+ ('t_da','t_da'),
+ ('t_db','t_db'),
+ ('t_dl','t_dl'),
+ ('t_fs','t_fs'),
+ ('t_k1','t_k1'),
+ ('t_k2','t_k2'),
+ ('t_k3','t_k3'),
+ ('t_k4','t_k4'),
+ ('t_k5','t_k5'),
+ ('t_k6','t_k6'),
+ ('t_k7','t_k7'),
+ ('t_k8','t_k8'),
+ ('t_k9','t_k9'),
+ ('t_kB','t_kB'),
+ ('t_kD','t_kD'),
+ ('t_kI','t_kI'),
+ ('t_kN','t_kN'),
+ ('t_kP','t_kP'),
+ ('t_kb','t_kb'),
+ ('t_kd','t_kd'),
+ ('t_ke','t_ke'),
+ ('t_kh','t_kh'),
+ ('t_kl','t_kl'),
+ ('t_kr','t_kr'),
+ ('t_ks','t_ks'),
+ ('t_ku','t_ku'),
+ ('t_le','t_le'),
+ ('t_mb','t_mb'),
+ ('t_md','t_md'),
+ ('t_me','t_me'),
+ ('t_mr','t_mr'),
+ ('t_ms','t_ms'),
+ ('t_nd','t_nd'),
+ ('t_op','t_op'),
+ ('t_se','t_se'),
+ ('t_so','t_so'),
+ ('t_sr','t_sr'),
+ ('t_te','t_te'),
+ ('t_ti','t_ti'),
+ ('t_ts','t_ts'),
+ ('t_u7','t_u7'),
+ ('t_ue','t_ue'),
+ ('t_us','t_us'),
+ ('t_ut','t_ut'),
+ ('t_vb','t_vb'),
+ ('t_ve','t_ve'),
+ ('t_vi','t_vi'),
+ ('t_vs','t_vs'),
+ ('t_xs','t_xs'),
+ ('ta','ta'),
+ ('tabline','tabline'),
+ ('tabpagemax','tabpagemax'),
+ ('tabstop','tabstop'),
+ ('tag','tag'),
+ ('tagbsearch','tagbsearch'),
+ ('taglength','taglength'),
+ ('tagrelative','tagrelative'),
+ ('tags','tags'),
+ ('tagstack','tagstack'),
+ ('tal','tal'),
+ ('tb','tb'),
+ ('tbi','tbi'),
+ ('tbidi','tbidi'),
+ ('tbis','tbis'),
+ ('tbs','tbs'),
+ ('tenc','tenc'),
+ ('term','term'),
+ ('termbidi','termbidi'),
+ ('termencoding','termencoding'),
+ ('terse','terse'),
+ ('textauto','textauto'),
+ ('textmode','textmode'),
+ ('textwidth','textwidth'),
+ ('tf','tf'),
+ ('tgst','tgst'),
+ ('thesaurus','thesaurus'),
+ ('tildeop','tildeop'),
+ ('timeout','timeout'),
+ ('timeoutlen','timeoutlen'),
+ ('title','title'),
+ ('titlelen','titlelen'),
+ ('titleold','titleold'),
+ ('titlestring','titlestring'),
+ ('tl','tl'),
+ ('tm','tm'),
+ ('to','to'),
+ ('toolbar','toolbar'),
+ ('toolbariconsize','toolbariconsize'),
+ ('top','top'),
+ ('tpm','tpm'),
+ ('tr','tr'),
+ ('ts','ts'),
+ ('tsl','tsl'),
+ ('tsr','tsr'),
+ ('ttimeout','ttimeout'),
+ ('ttimeoutlen','ttimeoutlen'),
+ ('ttm','ttm'),
+ ('tty','tty'),
+ ('ttybuiltin','ttybuiltin'),
+ ('ttyfast','ttyfast'),
+ ('ttym','ttym'),
+ ('ttymouse','ttymouse'),
+ ('ttyscroll','ttyscroll'),
+ ('ttytype','ttytype'),
+ ('tw','tw'),
+ ('tx','tx'),
+ ('uc','uc'),
+ ('udf','udf'),
+ ('udir','udir'),
+ ('ul','ul'),
+ ('undodir','undodir'),
+ ('undofile','undofile'),
+ ('undolevels','undolevels'),
+ ('undoreload','undoreload'),
+ ('updatecount','updatecount'),
+ ('updatetime','updatetime'),
+ ('ur','ur'),
+ ('ut','ut'),
+ ('vb','vb'),
+ ('vbs','vbs'),
+ ('vdir','vdir'),
+ ('ve','ve'),
+ ('verbose','verbose'),
+ ('verbosefile','verbosefile'),
+ ('vfile','vfile'),
+ ('vi','vi'),
+ ('viewdir','viewdir'),
+ ('viewoptions','viewoptions'),
+ ('viminfo','viminfo'),
+ ('virtualedit','virtualedit'),
+ ('visualbell','visualbell'),
+ ('vnoremap','vnoremap'),
+ ('vop','vop'),
+ ('wa','wa'),
+ ('wak','wak'),
+ ('warn','warn'),
+ ('wb','wb'),
+ ('wc','wc'),
+ ('wcm','wcm'),
+ ('wd','wd'),
+ ('weirdinvert','weirdinvert'),
+ ('wfh','wfh'),
+ ('wfw','wfw'),
+ ('wh','wh'),
+ ('whichwrap','whichwrap'),
+ ('wi','wi'),
+ ('wic','wic'),
+ ('wig','wig'),
+ ('wildchar','wildchar'),
+ ('wildcharm','wildcharm'),
+ ('wildignore','wildignore'),
+ ('wildignorecase','wildignorecase'),
+ ('wildmenu','wildmenu'),
+ ('wildmode','wildmode'),
+ ('wildoptions','wildoptions'),
+ ('wim','wim'),
+ ('winaltkeys','winaltkeys'),
+ ('window','window'),
+ ('winfixheight','winfixheight'),
+ ('winfixwidth','winfixwidth'),
+ ('winheight','winheight'),
+ ('winminheight','winminheight'),
+ ('winminwidth','winminwidth'),
+ ('winwidth','winwidth'),
+ ('wiv','wiv'),
+ ('wiw','wiw'),
+ ('wm','wm'),
+ ('wmh','wmh'),
+ ('wmnu','wmnu'),
+ ('wmw','wmw'),
+ ('wop','wop'),
+ ('wrap','wrap'),
+ ('wrapmargin','wrapmargin'),
+ ('wrapscan','wrapscan'),
+ ('write','write'),
+ ('writeany','writeany'),
+ ('writebackup','writebackup'),
+ ('writedelay','writedelay'),
+ ('ws','ws'),
+ ('ww','ww'),
+ )
+ return var
+option = _getoption()
+
diff --git a/pygments/lexers/_vimbuiltins.py b/pygments/lexers/_vimbuiltins.py
deleted file mode 100644
index e95a8ec5..00000000
--- a/pygments/lexers/_vimbuiltins.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Split up in multiple functions so it's importable by jython, which has a
-# per-method size limit.
-
-def _getauto():
- return [('BufAdd','BufAdd'),('BufCreate','BufCreate'),('BufDelete','BufDelete'),('BufEnter','BufEnter'),('BufFilePost','BufFilePost'),('BufFilePre','BufFilePre'),('BufHidden','BufHidden'),('BufLeave','BufLeave'),('BufNew','BufNew'),('BufNewFile','BufNewFile'),('BufRead','BufRead'),('BufReadCmd','BufReadCmd'),('BufReadPost','BufReadPost'),('BufReadPre','BufReadPre'),('BufUnload','BufUnload'),('BufWinEnter','BufWinEnter'),('BufWinLeave','BufWinLeave'),('BufWipeout','BufWipeout'),('BufWrite','BufWrite'),('BufWriteCmd','BufWriteCmd'),('BufWritePost','BufWritePost'),('BufWritePre','BufWritePre'),('Cmd','Cmd'),('CmdwinEnter','CmdwinEnter'),('CmdwinLeave','CmdwinLeave'),('ColorScheme','ColorScheme'),('CursorHold','CursorHold'),('CursorHoldI','CursorHoldI'),('CursorMoved','CursorMoved'),('CursorMovedI','CursorMovedI'),('EncodingChanged','EncodingChanged'),('FileAppendCmd','FileAppendCmd'),('FileAppendPost','FileAppendPost'),('FileAppendPre','FileAppendPre'),('FileChangedRO','FileChangedRO'),('FileChangedShell','FileChangedShell'),('FileChangedShellPost','FileChangedShellPost'),('FileEncoding','FileEncoding'),('FileReadCmd','FileReadCmd'),('FileReadPost','FileReadPost'),('FileReadPre','FileReadPre'),('FileType','FileType'),('FileWriteCmd','FileWriteCmd'),('FileWritePost','FileWritePost'),('FileWritePre','FileWritePre'),('FilterReadPost','FilterReadPost'),('FilterReadPre','FilterReadPre'),('FilterWritePost','FilterWritePost'),('FilterWritePre','FilterWritePre'),('FocusGained','FocusGained'),('FocusLost','FocusLost'),('FuncUndefined','FuncUndefined'),('GUIEnter','GUIEnter'),('GUIFailed','GUIFailed'),('InsertChange','InsertChange'),('InsertCharPre','InsertCharPre'),('InsertEnter','InsertEnter'),('InsertLeave','InsertLeave'),('MenuPopup','MenuPopup'),('QuickFixCmdPost','QuickFixCmdPost'),('QuickFixCmdPre','QuickFixCmdPre'),('RemoteReply','RemoteReply'),('SessionLoadPost','SessionLoadPost'),('ShellCmdPost','ShellCmdPost'),('ShellFilterPost','ShellFilterPost'),('SourceCmd','SourceCmd'),('SourcePre','SourcePre'),('SpellFileMissing','SpellFileMissing'),('StdinReadPost','StdinReadPost'),('StdinReadPre','StdinReadPre'),('SwapExists','SwapExists'),('Syntax','Syntax'),('TabEnter','TabEnter'),('TabLeave','TabLeave'),('TermChanged','TermChanged'),('TermResponse','TermResponse'),('User','User'),('UserGettingBored','UserGettingBored'),('VimEnter','VimEnter'),('VimLeave','VimLeave'),('VimLeavePre','VimLeavePre'),('VimResized','VimResized'),('WinEnter','WinEnter'),('WinLeave','WinLeave'),('event','event')]
-def _getcommand():
- return [('Allargs','Allargs'),('DiffOrig','DiffOrig'),('Error','Error'),('Man','Man'),('MyCommand','MyCommand'),('Mycmd','Mycmd'),('N','N'),('N','Next'),('P','P'),('P','Print'),('Ren','Ren'),('Rena','Rena'),('Renu','Renu'),('TOhtml','TOhtml'),('X','X'),('XMLent','XMLent'),('XMLns','XMLns'),('a','a'),('ab','ab'),('abc','abclear'),('abo','aboveleft'),('al','all'),('ar','ar'),('ar','args'),('arga','argadd'),('argd','argdelete'),('argdo','argdo'),('arge','argedit'),('argg','argglobal'),('argl','arglocal'),('argu','argument'),('as','ascii'),('au','autocmd'),('b','buffer'),('bN','bNext'),('ba','ball'),('bad','badd'),('bar','bar'),('bd','bdelete'),('bel','belowright'),('bf','bfirst'),('bl','blast'),('bm','bmodified'),('bn','bnext'),('bo','botright'),('bp','bprevious'),('br','br'),('br','brewind'),('brea','break'),('breaka','breakadd'),('breakd','breakdel'),('breakl','breaklist'),('bro','browse'),('browseset','browseset'),('bu','bu'),('buf','buf'),('bufdo','bufdo'),('buffers','buffers'),('bun','bunload'),('bw','bwipeout'),('c','c'),('c','change'),('cN','cN'),('cN','cNext'),('cNf','cNf'),('cNf','cNfile'),('cabc','cabclear'),('cad','cad'),('cad','caddexpr'),('caddb','caddbuffer'),('caddf','caddfile'),('cal','call'),('cat','catch'),('cb','cbuffer'),('cc','cc'),('ccl','cclose'),('cd','cd'),('ce','center'),('cex','cexpr'),('cf','cfile'),('cfir','cfirst'),('cg','cgetfile'),('cgetb','cgetbuffer'),('cgete','cgetexpr'),('changes','changes'),('chd','chdir'),('che','checkpath'),('checkt','checktime'),('cl','cl'),('cl','clist'),('cla','clast'),('clo','close'),('cmapc','cmapclear'),('cmdname','cmdname'),('cn','cn'),('cn','cnext'),('cnew','cnewer'),('cnf','cnf'),('cnf','cnfile'),('co','copy'),('col','colder'),('colo','colorscheme'),('com','com'),('comc','comclear'),('comment','comment'),('comp','compiler'),('con','con'),('con','continue'),('conf','confirm'),('cope','copen'),('count','count'),('cp','cprevious'),('cpf','cpfile'),('cq','cquit'),('cr','crewind'),('cs','cs'),('cscope','cscope'),('cstag','cstag'),('cuna','cunabbrev'),('cw','cwindow'),('d','d'),('d','delete'),('de','de'),('debug','debug'),('debugg','debuggreedy'),('del','del'),('delc','delcommand'),('delf','delf'),('delf','delfunction'),('delm','delmarks'),('di','di'),('di','display'),('diffg','diffget'),('diffo','diffo'),('diffoff','diffoff'),('diffp','diffp'),('diffpatch','diffpatch'),('diffpu','diffput'),('diffsplit','diffsplit'),('difft','difft'),('diffthis','diffthis'),('diffu','diffupdate'),('dig','dig'),('dig','digraphs'),('dj','djump'),('dl','dlist'),('do','do'),('doau','doau'),('dr','drop'),('ds','dsearch'),('dsp','dsplit'),('dwim','dwim'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','edit'),('ea','ea'),('earlier','earlier'),('ec','ec'),('echoe','echoerr'),('echom','echomsg'),('echon','echon'),('el','else'),('elsei','elseif'),('em','emenu'),('emenu','emenu'),('en','en'),('en','endif'),('endf','endf'),('endf','endfunction'),('endfo','endfor'),('endfun','endfun'),('endt','endtry'),('endw','endwhile'),('ene','enew'),('ex','ex'),('exi','exit'),('exu','exusage'),('f','f'),('f','file'),('filename','filename'),('files','files'),('filet','filet'),('filetype','filetype'),('fin','fin'),('fin','find'),('fina','finally'),('fini','finish'),('fir','first'),('fix','fixdel'),('fo','fold'),('foldc','foldclose'),('foldd','folddoopen'),('folddoc','folddoclosed'),('foldo','foldopen'),('for','for'),('fu','fu'),('fu','function'),('fun','fun'),('g','g'),('get','get'),('go','goto'),('gr','grep'),('grepa','grepadd'),('gs','gs'),('gs','gs'),('gui','gui'),('gvim','gvim'),('h','h'),('h','h'),('h','h'),('h','h'),('h','help'),('ha','hardcopy'),('helpf','helpfind'),('helpg','helpgrep'),('helpt','helptags'),('hi','hi'),('hid','hide'),('his','history'),('i','i'),('ia','ia'),('iabc','iabclear'),('if','if'),('ij','ijump'),('il','ilist'),('imapc','imapclear'),('in','in'),('index','index'),('intro','intro'),('is','isearch'),('isp','isplit'),('iuna','iunabbrev'),('j','join'),('ju','jumps'),('k','k'),('kee','keepmarks'),('keepa','keepa'),('keepalt','keepalt'),('keepj','keepjumps'),('l','l'),('l','list'),('lN','lN'),('lN','lNext'),('lNf','lNf'),('lNf','lNfile'),('la','la'),('la','last'),('lad','lad'),('lad','laddexpr'),('laddb','laddbuffer'),('laddf','laddfile'),('lan','lan'),('lan','language'),('lat','lat'),('later','later'),('lb','lbuffer'),('lc','lcd'),('lch','lchdir'),('lcl','lclose'),('lcs','lcs'),('lcscope','lcscope'),('le','left'),('lefta','leftabove'),('let','let'),('lex','lexpr'),('lf','lfile'),('lfir','lfirst'),('lg','lgetfile'),('lgetb','lgetbuffer'),('lgete','lgetexpr'),('lgr','lgrep'),('lgrepa','lgrepadd'),('lh','lhelpgrep'),('ll','ll'),('lla','llast'),('lli','llist'),('lmak','lmake'),('lmapc','lmapclear'),('lne','lne'),('lne','lnext'),('lnew','lnewer'),('lnf','lnf'),('lnf','lnfile'),('lo','lo'),('lo','loadview'),('loadk','loadk'),('loadkeymap','loadkeymap'),('loc','lockmarks'),('locale','locale'),('lockv','lockvar'),('lol','lolder'),('lop','lopen'),('lp','lprevious'),('lpf','lpfile'),('lr','lrewind'),('ls','ls'),('lt','ltag'),('lua','lua'),('luado','luado'),('luafile','luafile'),('lv','lvimgrep'),('lvimgrepa','lvimgrepadd'),('lw','lwindow'),('m','move'),('ma','ma'),('ma','mark'),('main','main'),('main','main'),('mak','make'),('marks','marks'),('mat','match'),('menut','menut'),('menut','menutranslate'),('mes','mes'),('messages','messages'),('mk','mk'),('mk','mkexrc'),('mkdir','mkdir'),('mks','mksession'),('mksp','mkspell'),('mkv','mkv'),('mkv','mkvimrc'),('mkvie','mkview'),('mo','mo'),('mod','mode'),('mv','mv'),('mz','mz'),('mz','mzscheme'),('mzf','mzfile'),('n','n'),('n','n'),('n','next'),('nb','nbkey'),('nbc','nbclose'),('nbs','nbstart'),('ne','ne'),('new','new'),('nkf','nkf'),('nmapc','nmapclear'),('noa','noa'),('noautocmd','noautocmd'),('noh','nohlsearch'),('nu','number'),('o','o'),('o','open'),('ol','oldfiles'),('omapc','omapclear'),('on','only'),('opt','options'),('ownsyntax','ownsyntax'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','print'),('pat','pat'),('pat','pat'),('pc','pclose'),('pe','pe'),('pe','perl'),('ped','pedit'),('perld','perldo'),('po','pop'),('popu','popu'),('popu','popup'),('pp','ppop'),('pr','pr'),('pre','preserve'),('prev','previous'),('pro','pro'),('prof','profile'),('profd','profdel'),('promptf','promptfind'),('promptr','promptrepl'),('ps','psearch'),('ptN','ptN'),('ptN','ptNext'),('pta','ptag'),('ptf','ptfirst'),('ptj','ptjump'),('ptl','ptlast'),('ptn','ptn'),('ptn','ptnext'),('ptp','ptprevious'),('ptr','ptrewind'),('pts','ptselect'),('pu','put'),('pw','pwd'),('py','py'),('py','python'),('py3','py3'),('py3','py3'),('py3file','py3file'),('pyf','pyfile'),('python3','python3'),('q','q'),('q','quit'),('qa','qall'),('quita','quitall'),('quote','quote'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','read'),('re','re'),('rec','recover'),('red','red'),('red','redo'),('redi','redir'),('redr','redraw'),('redraws','redrawstatus'),('reg','registers'),('res','resize'),('ret','retab'),('retu','return'),('rew','rewind'),('ri','right'),('rightb','rightbelow'),('ru','ru'),('ru','runtime'),('rub','ruby'),('rubyd','rubydo'),('rubyf','rubyfile'),('rundo','rundo'),('rv','rviminfo'),('s','s'),('s','s'),('s','s'),('s','s'),('sN','sNext'),('sa','sargument'),('sal','sall'),('san','sandbox'),('sav','saveas'),('sb','sbuffer'),('sbN','sbNext'),('sba','sball'),('sbf','sbfirst'),('sbl','sblast'),('sbm','sbmodified'),('sbn','sbnext'),('sbp','sbprevious'),('sbr','sbrewind'),('scrip','scrip'),('scrip','scriptnames'),('scripte','scriptencoding'),('scs','scs'),('scscope','scscope'),('se','set'),('setf','setfiletype'),('setg','setglobal'),('setl','setlocal'),('sf','sfind'),('sfir','sfirst'),('sh','shell'),('si','si'),('sig','sig'),('sign','sign'),('sil','silent'),('sim','simalt'),('sl','sl'),('sl','sleep'),('sla','slast'),('sm','smagic'),('sm','smap'),('sme','sme'),('smenu','smenu'),('sn','snext'),('sni','sniff'),('sno','snomagic'),('snoreme','snoreme'),('snoremenu','snoremenu'),('so','so'),('so','source'),('sor','sort'),('sp','split'),('spe','spe'),('spe','spellgood'),('spelld','spelldump'),('spelli','spellinfo'),('spellr','spellrepall'),('spellu','spellundo'),('spellw','spellwrong'),('spr','sprevious'),('sre','srewind'),('st','st'),('st','stop'),('sta','stag'),('star','star'),('star','startinsert'),('start','start'),('startg','startgreplace'),('startr','startreplace'),('stj','stjump'),('stopi','stopinsert'),('sts','stselect'),('sub','sub'),('sub','sub'),('sun','sunhide'),('sunme','sunme'),('sunmenu','sunmenu'),('sus','suspend'),('sv','sview'),('sw','swapname'),('sy','sy'),('syn','syn'),('sync','sync'),('syncbind','syncbind'),('synlist','synlist'),('t','t'),('t','t'),('t','t'),('tN','tN'),('tN','tNext'),('ta','ta'),('ta','tag'),('tab','tab'),('tabN','tabN'),('tabN','tabNext'),('tabc','tabclose'),('tabd','tabdo'),('tabe','tabedit'),('tabf','tabfind'),('tabfir','tabfirst'),('tabl','tablast'),('tabm','tabmove'),('tabn','tabnext'),('tabnew','tabnew'),('tabo','tabonly'),('tabp','tabprevious'),('tabr','tabrewind'),('tabs','tabs'),('tags','tags'),('tc','tcl'),('tcld','tcldo'),('tclf','tclfile'),('te','tearoff'),('tf','tfirst'),('th','throw'),('tj','tjump'),('tl','tlast'),('tm','tm'),('tm','tmenu'),('tn','tn'),('tn','tnext'),('to','topleft'),('tp','tprevious'),('tr','tr'),('tr','trewind'),('try','try'),('ts','tselect'),('tu','tu'),('tu','tunmenu'),('u','u'),('u','undo'),('un','un'),('una','unabbreviate'),('undoj','undojoin'),('undol','undolist'),('unh','unhide'),('unl','unl'),('unlo','unlockvar'),('uns','unsilent'),('up','update'),('v','v'),('ve','ve'),('ve','version'),('verb','verbose'),('version','version'),('version','version'),('vert','vertical'),('vi','vi'),('vi','visual'),('vie','view'),('vim','vimgrep'),('vimgrepa','vimgrepadd'),('viu','viusage'),('vmapc','vmapclear'),('vne','vnew'),('vs','vsplit'),('w','w'),('w','write'),('wN','wNext'),('wa','wall'),('wh','while'),('win','win'),('win','winsize'),('winc','wincmd'),('windo','windo'),('winp','winpos'),('wn','wnext'),('wp','wprevious'),('wq','wq'),('wqa','wqall'),('ws','wsverb'),('wundo','wundo'),('wv','wviminfo'),('x','x'),('x','xit'),('xa','xall'),('xmapc','xmapclear'),('xme','xme'),('xmenu','xmenu'),('xnoreme','xnoreme'),('xnoremenu','xnoremenu'),('xterm','xterm'),('xunme','xunme'),('xunmenu','xunmenu'),('xwininfo','xwininfo'),('y','yank')]
-def _getoption():
- return [('acd','acd'),('ai','ai'),('akm','akm'),('al','al'),('aleph','aleph'),('allowrevins','allowrevins'),('altkeymap','altkeymap'),('ambiwidth','ambiwidth'),('ambw','ambw'),('anti','anti'),('antialias','antialias'),('ar','ar'),('arab','arab'),('arabic','arabic'),('arabicshape','arabicshape'),('ari','ari'),('arshape','arshape'),('autochdir','autochdir'),('autoindent','autoindent'),('autoread','autoread'),('autowrite','autowrite'),('autowriteall','autowriteall'),('aw','aw'),('awa','awa'),('background','background'),('backspace','backspace'),('backup','backup'),('backupcopy','backupcopy'),('backupdir','backupdir'),('backupext','backupext'),('backupskip','backupskip'),('balloondelay','balloondelay'),('ballooneval','ballooneval'),('balloonexpr','balloonexpr'),('bdir','bdir'),('bdlay','bdlay'),('beval','beval'),('bex','bex'),('bexpr','bexpr'),('bg','bg'),('bh','bh'),('bin','bin'),('binary','binary'),('biosk','biosk'),('bioskey','bioskey'),('bk','bk'),('bkc','bkc'),('bl','bl'),('bomb','bomb'),('breakat','breakat'),('brk','brk'),('browsedir','browsedir'),('bs','bs'),('bsdir','bsdir'),('bsk','bsk'),('bt','bt'),('bufhidden','bufhidden'),('buflisted','buflisted'),('buftype','buftype'),('casemap','casemap'),('cb','cb'),('cc','cc'),('ccv','ccv'),('cd','cd'),('cdpath','cdpath'),('cedit','cedit'),('cf','cf'),('cfu','cfu'),('ch','ch'),('charconvert','charconvert'),('ci','ci'),('cin','cin'),('cindent','cindent'),('cink','cink'),('cinkeys','cinkeys'),('cino','cino'),('cinoptions','cinoptions'),('cinw','cinw'),('cinwords','cinwords'),('clipboard','clipboard'),('cmdheight','cmdheight'),('cmdwinheight','cmdwinheight'),('cmp','cmp'),('cms','cms'),('co','co'),('cocu','cocu'),('cole','cole'),('colorcolumn','colorcolumn'),('columns','columns'),('com','com'),('comments','comments'),('commentstring','commentstring'),('compatible','compatible'),('complete','complete'),('completefunc','completefunc'),('completeopt','completeopt'),('concealcursor','concealcursor'),('conceallevel','conceallevel'),('confirm','confirm'),('consk','consk'),('conskey','conskey'),('copyindent','copyindent'),('cot','cot'),('cp','cp'),('cpo','cpo'),('cpoptions','cpoptions'),('cpt','cpt'),('crb','crb'),('cryptmethod','cryptmethod'),('cscopepathcomp','cscopepathcomp'),('cscopeprg','cscopeprg'),('cscopequickfix','cscopequickfix'),('cscoperelative','cscoperelative'),('cscopetag','cscopetag'),('cscopetagorder','cscopetagorder'),('cscopeverbose','cscopeverbose'),('cspc','cspc'),('csprg','csprg'),('csqf','csqf'),('csre','csre'),('cst','cst'),('csto','csto'),('csverb','csverb'),('cuc','cuc'),('cul','cul'),('cursorbind','cursorbind'),('cursorcolumn','cursorcolumn'),('cursorline','cursorline'),('cwh','cwh'),('debug','debug'),('deco','deco'),('def','def'),('define','define'),('delcombine','delcombine'),('dex','dex'),('dg','dg'),('dict','dict'),('dictionary','dictionary'),('diff','diff'),('diffexpr','diffexpr'),('diffopt','diffopt'),('digraph','digraph'),('dip','dip'),('dir','dir'),('directory','directory'),('display','display'),('dy','dy'),('ea','ea'),('ead','ead'),('eadirection','eadirection'),('eb','eb'),('ed','ed'),('edcompatible','edcompatible'),('ef','ef'),('efm','efm'),('ei','ei'),('ek','ek'),('enc','enc'),('encoding','encoding'),('endofline','endofline'),('eol','eol'),('ep','ep'),('equalalways','equalalways'),('equalprg','equalprg'),('errorbells','errorbells'),('errorfile','errorfile'),('errorformat','errorformat'),('esckeys','esckeys'),('et','et'),('eventignore','eventignore'),('ex','ex'),('expandtab','expandtab'),('exrc','exrc'),('fcl','fcl'),('fcs','fcs'),('fdc','fdc'),('fde','fde'),('fdi','fdi'),('fdl','fdl'),('fdls','fdls'),('fdm','fdm'),('fdn','fdn'),('fdo','fdo'),('fdt','fdt'),('fen','fen'),('fenc','fenc'),('fencs','fencs'),('fex','fex'),('ff','ff'),('ffs','ffs'),('fileencoding','fileencoding'),('fileencodings','fileencodings'),('fileformat','fileformat'),('fileformats','fileformats'),('filetype','filetype'),('fillchars','fillchars'),('fk','fk'),('fkmap','fkmap'),('flp','flp'),('fml','fml'),('fmr','fmr'),('fo','fo'),('foldclose','foldclose'),('foldcolumn','foldcolumn'),('foldenable','foldenable'),('foldexpr','foldexpr'),('foldignore','foldignore'),('foldlevel','foldlevel'),('foldlevelstart','foldlevelstart'),('foldmarker','foldmarker'),('foldmethod','foldmethod'),('foldminlines','foldminlines'),('foldnestmax','foldnestmax'),('foldopen','foldopen'),('foldtext','foldtext'),('formatexpr','formatexpr'),('formatlistpat','formatlistpat'),('formatoptions','formatoptions'),('formatprg','formatprg'),('fp','fp'),('fs','fs'),('fsync','fsync'),('ft','ft'),('gcr','gcr'),('gd','gd'),('gdefault','gdefault'),('gfm','gfm'),('gfn','gfn'),('gfs','gfs'),('gfw','gfw'),('ghr','ghr'),('go','go'),('gp','gp'),('grepformat','grepformat'),('grepprg','grepprg'),('gtl','gtl'),('gtt','gtt'),('guicursor','guicursor'),('guifont','guifont'),('guifontset','guifontset'),('guifontwide','guifontwide'),('guiheadroom','guiheadroom'),('guioptions','guioptions'),('guipty','guipty'),('guitablabel','guitablabel'),('guitabtooltip','guitabtooltip'),('helpfile','helpfile'),('helpheight','helpheight'),('helplang','helplang'),('hf','hf'),('hh','hh'),('hi','hi'),('hid','hid'),('hidden','hidden'),('highlight','highlight'),('history','history'),('hk','hk'),('hkmap','hkmap'),('hkmapp','hkmapp'),('hkp','hkp'),('hl','hl'),('hlg','hlg'),('hls','hls'),('hlsearch','hlsearch'),('ic','ic'),('icon','icon'),('iconstring','iconstring'),('ignorecase','ignorecase'),('im','im'),('imactivatekey','imactivatekey'),('imak','imak'),('imc','imc'),('imcmdline','imcmdline'),('imd','imd'),('imdisable','imdisable'),('imi','imi'),('iminsert','iminsert'),('ims','ims'),('imsearch','imsearch'),('inc','inc'),('include','include'),('includeexpr','includeexpr'),('incsearch','incsearch'),('inde','inde'),('indentexpr','indentexpr'),('indentkeys','indentkeys'),('indk','indk'),('inex','inex'),('inf','inf'),('infercase','infercase'),('inoremap','inoremap'),('insertmode','insertmode'),('invacd','invacd'),('invai','invai'),('invakm','invakm'),('invallowrevins','invallowrevins'),('invaltkeymap','invaltkeymap'),('invanti','invanti'),('invantialias','invantialias'),('invar','invar'),('invarab','invarab'),('invarabic','invarabic'),('invarabicshape','invarabicshape'),('invari','invari'),('invarshape','invarshape'),('invautochdir','invautochdir'),('invautoindent','invautoindent'),('invautoread','invautoread'),('invautowrite','invautowrite'),('invautowriteall','invautowriteall'),('invaw','invaw'),('invawa','invawa'),('invbackup','invbackup'),('invballooneval','invballooneval'),('invbeval','invbeval'),('invbin','invbin'),('invbinary','invbinary'),('invbiosk','invbiosk'),('invbioskey','invbioskey'),('invbk','invbk'),('invbl','invbl'),('invbomb','invbomb'),('invbuflisted','invbuflisted'),('invcf','invcf'),('invci','invci'),('invcin','invcin'),('invcindent','invcindent'),('invcompatible','invcompatible'),('invconfirm','invconfirm'),('invconsk','invconsk'),('invconskey','invconskey'),('invcopyindent','invcopyindent'),('invcp','invcp'),('invcrb','invcrb'),('invcscopetag','invcscopetag'),('invcscopeverbose','invcscopeverbose'),('invcst','invcst'),('invcsverb','invcsverb'),('invcuc','invcuc'),('invcul','invcul'),('invcursorbind','invcursorbind'),('invcursorcolumn','invcursorcolumn'),('invcursorline','invcursorline'),('invdeco','invdeco'),('invdelcombine','invdelcombine'),('invdg','invdg'),('invdiff','invdiff'),('invdigraph','invdigraph'),('invea','invea'),('inveb','inveb'),('inved','inved'),('invedcompatible','invedcompatible'),('invek','invek'),('invendofline','invendofline'),('inveol','inveol'),('invequalalways','invequalalways'),('inverrorbells','inverrorbells'),('invesckeys','invesckeys'),('invet','invet'),('invex','invex'),('invexpandtab','invexpandtab'),('invexrc','invexrc'),('invfen','invfen'),('invfk','invfk'),('invfkmap','invfkmap'),('invfoldenable','invfoldenable'),('invgd','invgd'),('invgdefault','invgdefault'),('invguipty','invguipty'),('invhid','invhid'),('invhidden','invhidden'),('invhk','invhk'),('invhkmap','invhkmap'),('invhkmapp','invhkmapp'),('invhkp','invhkp'),('invhls','invhls'),('invhlsearch','invhlsearch'),('invic','invic'),('invicon','invicon'),('invignorecase','invignorecase'),('invim','invim'),('invimc','invimc'),('invimcmdline','invimcmdline'),('invimd','invimd'),('invimdisable','invimdisable'),('invincsearch','invincsearch'),('invinf','invinf'),('invinfercase','invinfercase'),('invinsertmode','invinsertmode'),('invis','invis'),('invjoinspaces','invjoinspaces'),('invjs','invjs'),('invlazyredraw','invlazyredraw'),('invlbr','invlbr'),('invlinebreak','invlinebreak'),('invlisp','invlisp'),('invlist','invlist'),('invloadplugins','invloadplugins'),('invlpl','invlpl'),('invlz','invlz'),('invma','invma'),('invmacatsui','invmacatsui'),('invmagic','invmagic'),('invmh','invmh'),('invml','invml'),('invmod','invmod'),('invmodeline','invmodeline'),('invmodifiable','invmodifiable'),('invmodified','invmodified'),('invmore','invmore'),('invmousef','invmousef'),('invmousefocus','invmousefocus'),('invmousehide','invmousehide'),('invnu','invnu'),('invnumber','invnumber'),('invodev','invodev'),('invopendevice','invopendevice'),('invpaste','invpaste'),('invpi','invpi'),('invpreserveindent','invpreserveindent'),('invpreviewwindow','invpreviewwindow'),('invprompt','invprompt'),('invpvw','invpvw'),('invreadonly','invreadonly'),('invrelativenumber','invrelativenumber'),('invremap','invremap'),('invrestorescreen','invrestorescreen'),('invrevins','invrevins'),('invri','invri'),('invrightleft','invrightleft'),('invrl','invrl'),('invrnu','invrnu'),('invro','invro'),('invrs','invrs'),('invru','invru'),('invruler','invruler'),('invsb','invsb'),('invsc','invsc'),('invscb','invscb'),('invscrollbind','invscrollbind'),('invscs','invscs'),('invsecure','invsecure'),('invsft','invsft'),('invshellslash','invshellslash'),('invshelltemp','invshelltemp'),('invshiftround','invshiftround'),('invshortname','invshortname'),('invshowcmd','invshowcmd'),('invshowfulltag','invshowfulltag'),('invshowmatch','invshowmatch'),('invshowmode','invshowmode'),('invsi','invsi'),('invsm','invsm'),('invsmartcase','invsmartcase'),('invsmartindent','invsmartindent'),('invsmarttab','invsmarttab'),('invsmd','invsmd'),('invsn','invsn'),('invsol','invsol'),('invspell','invspell'),('invsplitbelow','invsplitbelow'),('invsplitright','invsplitright'),('invspr','invspr'),('invsr','invsr'),('invssl','invssl'),('invsta','invsta'),('invstartofline','invstartofline'),('invstmp','invstmp'),('invswapfile','invswapfile'),('invswf','invswf'),('invta','invta'),('invtagbsearch','invtagbsearch'),('invtagrelative','invtagrelative'),('invtagstack','invtagstack'),('invtbi','invtbi'),('invtbidi','invtbidi'),('invtbs','invtbs'),('invtermbidi','invtermbidi'),('invterse','invterse'),('invtextauto','invtextauto'),('invtextmode','invtextmode'),('invtf','invtf'),('invtgst','invtgst'),('invtildeop','invtildeop'),('invtimeout','invtimeout'),('invtitle','invtitle'),('invto','invto'),('invtop','invtop'),('invtr','invtr'),('invttimeout','invttimeout'),('invttybuiltin','invttybuiltin'),('invttyfast','invttyfast'),('invtx','invtx'),('invvb','invvb'),('invvisualbell','invvisualbell'),('invwa','invwa'),('invwarn','invwarn'),('invwb','invwb'),('invweirdinvert','invweirdinvert'),('invwfh','invwfh'),('invwfw','invwfw'),('invwildignorecase','invwildignorecase'),('invwildmenu','invwildmenu'),('invwinfixheight','invwinfixheight'),('invwinfixwidth','invwinfixwidth'),('invwiv','invwiv'),('invwmnu','invwmnu'),('invwrap','invwrap'),('invwrapscan','invwrapscan'),('invwrite','invwrite'),('invwriteany','invwriteany'),('invwritebackup','invwritebackup'),('invws','invws'),('is','is'),('isf','isf'),('isfname','isfname'),('isi','isi'),('isident','isident'),('isk','isk'),('iskeyword','iskeyword'),('isp','isp'),('isprint','isprint'),('joinspaces','joinspaces'),('js','js'),('key','key'),('keymap','keymap'),('keymodel','keymodel'),('keywordprg','keywordprg'),('km','km'),('kmp','kmp'),('kp','kp'),('langmap','langmap'),('langmenu','langmenu'),('laststatus','laststatus'),('lazyredraw','lazyredraw'),('lbr','lbr'),('lcs','lcs'),('linebreak','linebreak'),('lines','lines'),('linespace','linespace'),('lisp','lisp'),('lispwords','lispwords'),('list','list'),('listchars','listchars'),('lm','lm'),('lmap','lmap'),('loadplugins','loadplugins'),('lpl','lpl'),('ls','ls'),('lsp','lsp'),('lw','lw'),('lz','lz'),('ma','ma'),('macatsui','macatsui'),('magic','magic'),('makeef','makeef'),('makeprg','makeprg'),('mat','mat'),('matchpairs','matchpairs'),('matchtime','matchtime'),('maxcombine','maxcombine'),('maxfuncdepth','maxfuncdepth'),('maxmapdepth','maxmapdepth'),('maxmem','maxmem'),('maxmempattern','maxmempattern'),('maxmemtot','maxmemtot'),('mco','mco'),('mef','mef'),('menuitems','menuitems'),('mfd','mfd'),('mh','mh'),('mis','mis'),('mkspellmem','mkspellmem'),('ml','ml'),('mls','mls'),('mm','mm'),('mmd','mmd'),('mmp','mmp'),('mmt','mmt'),('mod','mod'),('modeline','modeline'),('modelines','modelines'),('modifiable','modifiable'),('modified','modified'),('more','more'),('mouse','mouse'),('mousef','mousef'),('mousefocus','mousefocus'),('mousehide','mousehide'),('mousem','mousem'),('mousemodel','mousemodel'),('mouses','mouses'),('mouseshape','mouseshape'),('mouset','mouset'),('mousetime','mousetime'),('mp','mp'),('mps','mps'),('msm','msm'),('mzq','mzq'),('mzquantum','mzquantum'),('nf','nf'),('nnoremap','nnoremap'),('noacd','noacd'),('noai','noai'),('noakm','noakm'),('noallowrevins','noallowrevins'),('noaltkeymap','noaltkeymap'),('noanti','noanti'),('noantialias','noantialias'),('noar','noar'),('noarab','noarab'),('noarabic','noarabic'),('noarabicshape','noarabicshape'),('noari','noari'),('noarshape','noarshape'),('noautochdir','noautochdir'),('noautoindent','noautoindent'),('noautoread','noautoread'),('noautowrite','noautowrite'),('noautowriteall','noautowriteall'),('noaw','noaw'),('noawa','noawa'),('nobackup','nobackup'),('noballooneval','noballooneval'),('nobeval','nobeval'),('nobin','nobin'),('nobinary','nobinary'),('nobiosk','nobiosk'),('nobioskey','nobioskey'),('nobk','nobk'),('nobl','nobl'),('nobomb','nobomb'),('nobuflisted','nobuflisted'),('nocf','nocf'),('noci','noci'),('nocin','nocin'),('nocindent','nocindent'),('nocompatible','nocompatible'),('noconfirm','noconfirm'),('noconsk','noconsk'),('noconskey','noconskey'),('nocopyindent','nocopyindent'),('nocp','nocp'),('nocrb','nocrb'),('nocscopetag','nocscopetag'),('nocscopeverbose','nocscopeverbose'),('nocst','nocst'),('nocsverb','nocsverb'),('nocuc','nocuc'),('nocul','nocul'),('nocursorbind','nocursorbind'),('nocursorcolumn','nocursorcolumn'),('nocursorline','nocursorline'),('nodeco','nodeco'),('nodelcombine','nodelcombine'),('nodg','nodg'),('nodiff','nodiff'),('nodigraph','nodigraph'),('noea','noea'),('noeb','noeb'),('noed','noed'),('noedcompatible','noedcompatible'),('noek','noek'),('noendofline','noendofline'),('noeol','noeol'),('noequalalways','noequalalways'),('noerrorbells','noerrorbells'),('noesckeys','noesckeys'),('noet','noet'),('noex','noex'),('noexpandtab','noexpandtab'),('noexrc','noexrc'),('nofen','nofen'),('nofk','nofk'),('nofkmap','nofkmap'),('nofoldenable','nofoldenable'),('nogd','nogd'),('nogdefault','nogdefault'),('noguipty','noguipty'),('nohid','nohid'),('nohidden','nohidden'),('nohk','nohk'),('nohkmap','nohkmap'),('nohkmapp','nohkmapp'),('nohkp','nohkp'),('nohls','nohls'),('nohlsearch','nohlsearch'),('noic','noic'),('noicon','noicon'),('noignorecase','noignorecase'),('noim','noim'),('noimc','noimc'),('noimcmdline','noimcmdline'),('noimd','noimd'),('noimdisable','noimdisable'),('noincsearch','noincsearch'),('noinf','noinf'),('noinfercase','noinfercase'),('noinsertmode','noinsertmode'),('nois','nois'),('nojoinspaces','nojoinspaces'),('nojs','nojs'),('nolazyredraw','nolazyredraw'),('nolbr','nolbr'),('nolinebreak','nolinebreak'),('nolisp','nolisp'),('nolist','nolist'),('noloadplugins','noloadplugins'),('nolpl','nolpl'),('nolz','nolz'),('noma','noma'),('nomacatsui','nomacatsui'),('nomagic','nomagic'),('nomh','nomh'),('noml','noml'),('nomod','nomod'),('nomodeline','nomodeline'),('nomodifiable','nomodifiable'),('nomodified','nomodified'),('nomore','nomore'),('nomousef','nomousef'),('nomousefocus','nomousefocus'),('nomousehide','nomousehide'),('nonu','nonu'),('nonumber','nonumber'),('noodev','noodev'),('noopendevice','noopendevice'),('nopaste','nopaste'),('nopi','nopi'),('nopreserveindent','nopreserveindent'),('nopreviewwindow','nopreviewwindow'),('noprompt','noprompt'),('nopvw','nopvw'),('noreadonly','noreadonly'),('norelativenumber','norelativenumber'),('noremap','noremap'),('norestorescreen','norestorescreen'),('norevins','norevins'),('nori','nori'),('norightleft','norightleft'),('norl','norl'),('nornu','nornu'),('noro','noro'),('nors','nors'),('noru','noru'),('noruler','noruler'),('nosb','nosb'),('nosc','nosc'),('noscb','noscb'),('noscrollbind','noscrollbind'),('noscs','noscs'),('nosecure','nosecure'),('nosft','nosft'),('noshellslash','noshellslash'),('noshelltemp','noshelltemp'),('noshiftround','noshiftround'),('noshortname','noshortname'),('noshowcmd','noshowcmd'),('noshowfulltag','noshowfulltag'),('noshowmatch','noshowmatch'),('noshowmode','noshowmode'),('nosi','nosi'),('nosm','nosm'),('nosmartcase','nosmartcase'),('nosmartindent','nosmartindent'),('nosmarttab','nosmarttab'),('nosmd','nosmd'),('nosn','nosn'),('nosol','nosol'),('nospell','nospell'),('nosplitbelow','nosplitbelow'),('nosplitright','nosplitright'),('nospr','nospr'),('nosr','nosr'),('nossl','nossl'),('nosta','nosta'),('nostartofline','nostartofline'),('nostmp','nostmp'),('noswapfile','noswapfile'),('noswf','noswf'),('nota','nota'),('notagbsearch','notagbsearch'),('notagrelative','notagrelative'),('notagstack','notagstack'),('notbi','notbi'),('notbidi','notbidi'),('notbs','notbs'),('notermbidi','notermbidi'),('noterse','noterse'),('notextauto','notextauto'),('notextmode','notextmode'),('notf','notf'),('notgst','notgst'),('notildeop','notildeop'),('notimeout','notimeout'),('notitle','notitle'),('noto','noto'),('notop','notop'),('notr','notr'),('nottimeout','nottimeout'),('nottybuiltin','nottybuiltin'),('nottyfast','nottyfast'),('notx','notx'),('novb','novb'),('novisualbell','novisualbell'),('nowa','nowa'),('nowarn','nowarn'),('nowb','nowb'),('noweirdinvert','noweirdinvert'),('nowfh','nowfh'),('nowfw','nowfw'),('nowildignorecase','nowildignorecase'),('nowildmenu','nowildmenu'),('nowinfixheight','nowinfixheight'),('nowinfixwidth','nowinfixwidth'),('nowiv','nowiv'),('nowmnu','nowmnu'),('nowrap','nowrap'),('nowrapscan','nowrapscan'),('nowrite','nowrite'),('nowriteany','nowriteany'),('nowritebackup','nowritebackup'),('nows','nows'),('nrformats','nrformats'),('nu','nu'),('number','number'),('numberwidth','numberwidth'),('nuw','nuw'),('odev','odev'),('oft','oft'),('ofu','ofu'),('omnifunc','omnifunc'),('opendevice','opendevice'),('operatorfunc','operatorfunc'),('opfunc','opfunc'),('osfiletype','osfiletype'),('pa','pa'),('para','para'),('paragraphs','paragraphs'),('paste','paste'),('pastetoggle','pastetoggle'),('patchexpr','patchexpr'),('patchmode','patchmode'),('path','path'),('pdev','pdev'),('penc','penc'),('pex','pex'),('pexpr','pexpr'),('pfn','pfn'),('ph','ph'),('pheader','pheader'),('pi','pi'),('pm','pm'),('pmbcs','pmbcs'),('pmbfn','pmbfn'),('popt','popt'),('preserveindent','preserveindent'),('previewheight','previewheight'),('previewwindow','previewwindow'),('printdevice','printdevice'),('printencoding','printencoding'),('printexpr','printexpr'),('printfont','printfont'),('printheader','printheader'),('printmbcharset','printmbcharset'),('printmbfont','printmbfont'),('printoptions','printoptions'),('prompt','prompt'),('pt','pt'),('pumheight','pumheight'),('pvh','pvh'),('pvw','pvw'),('qe','qe'),('quoteescape','quoteescape'),('rdt','rdt'),('readonly','readonly'),('redrawtime','redrawtime'),('relativenumber','relativenumber'),('remap','remap'),('report','report'),('restorescreen','restorescreen'),('revins','revins'),('ri','ri'),('rightleft','rightleft'),('rightleftcmd','rightleftcmd'),('rl','rl'),('rlc','rlc'),('rnu','rnu'),('ro','ro'),('rs','rs'),('rtp','rtp'),('ru','ru'),('ruf','ruf'),('ruler','ruler'),('rulerformat','rulerformat'),('runtimepath','runtimepath'),('sb','sb'),('sbo','sbo'),('sbr','sbr'),('sc','sc'),('scb','scb'),('scr','scr'),('scroll','scroll'),('scrollbind','scrollbind'),('scrolljump','scrolljump'),('scrolloff','scrolloff'),('scrollopt','scrollopt'),('scs','scs'),('sect','sect'),('sections','sections'),('secure','secure'),('sel','sel'),('selection','selection'),('selectmode','selectmode'),('sessionoptions','sessionoptions'),('sft','sft'),('sh','sh'),('shcf','shcf'),('shell','shell'),('shellcmdflag','shellcmdflag'),('shellpipe','shellpipe'),('shellquote','shellquote'),('shellredir','shellredir'),('shellslash','shellslash'),('shelltemp','shelltemp'),('shelltype','shelltype'),('shellxquote','shellxquote'),('shiftround','shiftround'),('shiftwidth','shiftwidth'),('shm','shm'),('shortmess','shortmess'),('shortname','shortname'),('showbreak','showbreak'),('showcmd','showcmd'),('showfulltag','showfulltag'),('showmatch','showmatch'),('showmode','showmode'),('showtabline','showtabline'),('shq','shq'),('si','si'),('sidescroll','sidescroll'),('sidescrolloff','sidescrolloff'),('siso','siso'),('sj','sj'),('slm','slm'),('sm','sm'),('smartcase','smartcase'),('smartindent','smartindent'),('smarttab','smarttab'),('smc','smc'),('smd','smd'),('sn','sn'),('so','so'),('softtabstop','softtabstop'),('sol','sol'),('sp','sp'),('spc','spc'),('spell','spell'),('spellcapcheck','spellcapcheck'),('spellfile','spellfile'),('spelllang','spelllang'),('spellsuggest','spellsuggest'),('spf','spf'),('spl','spl'),('splitbelow','splitbelow'),('splitright','splitright'),('spr','spr'),('sps','sps'),('sr','sr'),('srr','srr'),('ss','ss'),('ssl','ssl'),('ssop','ssop'),('st','st'),('sta','sta'),('stal','stal'),('startofline','startofline'),('statusline','statusline'),('stl','stl'),('stmp','stmp'),('sts','sts'),('su','su'),('sua','sua'),('suffixes','suffixes'),('suffixesadd','suffixesadd'),('sw','sw'),('swapfile','swapfile'),('swapsync','swapsync'),('swb','swb'),('swf','swf'),('switchbuf','switchbuf'),('sws','sws'),('sxq','sxq'),('syn','syn'),('synmaxcol','synmaxcol'),('syntax','syntax'),('t_AB','t_AB'),('t_AF','t_AF'),('t_AL','t_AL'),('t_CS','t_CS'),('t_CV','t_CV'),('t_Ce','t_Ce'),('t_Co','t_Co'),('t_Cs','t_Cs'),('t_DL','t_DL'),('t_EI','t_EI'),('t_F1','t_F1'),('t_F2','t_F2'),('t_F3','t_F3'),('t_F4','t_F4'),('t_F5','t_F5'),('t_F6','t_F6'),('t_F7','t_F7'),('t_F8','t_F8'),('t_F9','t_F9'),('t_IE','t_IE'),('t_IS','t_IS'),('t_K1','t_K1'),('t_K3','t_K3'),('t_K4','t_K4'),('t_K5','t_K5'),('t_K6','t_K6'),('t_K7','t_K7'),('t_K8','t_K8'),('t_K9','t_K9'),('t_KA','t_KA'),('t_KB','t_KB'),('t_KC','t_KC'),('t_KD','t_KD'),('t_KE','t_KE'),('t_KF','t_KF'),('t_KG','t_KG'),('t_KH','t_KH'),('t_KI','t_KI'),('t_KJ','t_KJ'),('t_KK','t_KK'),('t_KL','t_KL'),('t_RI','t_RI'),('t_RV','t_RV'),('t_SI','t_SI'),('t_Sb','t_Sb'),('t_Sf','t_Sf'),('t_WP','t_WP'),('t_WS','t_WS'),('t_ZH','t_ZH'),('t_ZR','t_ZR'),('t_al','t_al'),('t_bc','t_bc'),('t_cd','t_cd'),('t_ce','t_ce'),('t_cl','t_cl'),('t_cm','t_cm'),('t_cs','t_cs'),('t_da','t_da'),('t_db','t_db'),('t_dl','t_dl'),('t_fs','t_fs'),('t_k1','t_k1'),('t_k2','t_k2'),('t_k3','t_k3'),('t_k4','t_k4'),('t_k5','t_k5'),('t_k6','t_k6'),('t_k7','t_k7'),('t_k8','t_k8'),('t_k9','t_k9'),('t_kB','t_kB'),('t_kD','t_kD'),('t_kI','t_kI'),('t_kN','t_kN'),('t_kP','t_kP'),('t_kb','t_kb'),('t_kd','t_kd'),('t_ke','t_ke'),('t_kh','t_kh'),('t_kl','t_kl'),('t_kr','t_kr'),('t_ks','t_ks'),('t_ku','t_ku'),('t_le','t_le'),('t_mb','t_mb'),('t_md','t_md'),('t_me','t_me'),('t_mr','t_mr'),('t_ms','t_ms'),('t_nd','t_nd'),('t_op','t_op'),('t_se','t_se'),('t_so','t_so'),('t_sr','t_sr'),('t_te','t_te'),('t_ti','t_ti'),('t_ts','t_ts'),('t_ue','t_ue'),('t_us','t_us'),('t_ut','t_ut'),('t_vb','t_vb'),('t_ve','t_ve'),('t_vi','t_vi'),('t_vs','t_vs'),('t_xs','t_xs'),('ta','ta'),('tabline','tabline'),('tabpagemax','tabpagemax'),('tabstop','tabstop'),('tag','tag'),('tagbsearch','tagbsearch'),('taglength','taglength'),('tagrelative','tagrelative'),('tags','tags'),('tagstack','tagstack'),('tal','tal'),('tb','tb'),('tbi','tbi'),('tbidi','tbidi'),('tbis','tbis'),('tbs','tbs'),('tenc','tenc'),('term','term'),('termbidi','termbidi'),('termencoding','termencoding'),('terse','terse'),('textauto','textauto'),('textmode','textmode'),('textwidth','textwidth'),('tf','tf'),('tgst','tgst'),('thesaurus','thesaurus'),('tildeop','tildeop'),('timeout','timeout'),('timeoutlen','timeoutlen'),('title','title'),('titlelen','titlelen'),('titleold','titleold'),('titlestring','titlestring'),('tl','tl'),('tm','tm'),('to','to'),('toolbar','toolbar'),('toolbariconsize','toolbariconsize'),('top','top'),('tpm','tpm'),('tr','tr'),('ts','ts'),('tsl','tsl'),('tsr','tsr'),('ttimeout','ttimeout'),('ttimeoutlen','ttimeoutlen'),('ttm','ttm'),('tty','tty'),('ttybuiltin','ttybuiltin'),('ttyfast','ttyfast'),('ttym','ttym'),('ttymouse','ttymouse'),('ttyscroll','ttyscroll'),('ttytype','ttytype'),('tw','tw'),('tx','tx'),('uc','uc'),('udf','udf'),('udir','udir'),('ul','ul'),('undodir','undodir'),('undofile','undofile'),('undolevels','undolevels'),('undoreload','undoreload'),('updatecount','updatecount'),('updatetime','updatetime'),('ur','ur'),('ut','ut'),('vb','vb'),('vbs','vbs'),('vdir','vdir'),('ve','ve'),('verbose','verbose'),('verbosefile','verbosefile'),('vfile','vfile'),('vi','vi'),('viewdir','viewdir'),('viewoptions','viewoptions'),('viminfo','viminfo'),('virtualedit','virtualedit'),('visualbell','visualbell'),('vnoremap','vnoremap'),('vop','vop'),('wa','wa'),('wak','wak'),('warn','warn'),('wb','wb'),('wc','wc'),('wcm','wcm'),('wd','wd'),('weirdinvert','weirdinvert'),('wfh','wfh'),('wfw','wfw'),('wh','wh'),('whichwrap','whichwrap'),('wi','wi'),('wic','wic'),('wig','wig'),('wildchar','wildchar'),('wildcharm','wildcharm'),('wildignore','wildignore'),('wildignorecase','wildignorecase'),('wildmenu','wildmenu'),('wildmode','wildmode'),('wildoptions','wildoptions'),('wim','wim'),('winaltkeys','winaltkeys'),('window','window'),('winfixheight','winfixheight'),('winfixwidth','winfixwidth'),('winheight','winheight'),('winminheight','winminheight'),('winminwidth','winminwidth'),('winwidth','winwidth'),('wiv','wiv'),('wiw','wiw'),('wm','wm'),('wmh','wmh'),('wmnu','wmnu'),('wmw','wmw'),('wop','wop'),('wrap','wrap'),('wrapmargin','wrapmargin'),('wrapscan','wrapscan'),('write','write'),('writeany','writeany'),('writebackup','writebackup'),('writedelay','writedelay'),('ws','ws'),('ww','ww')]
-
-option = _getoption()
-command = _getcommand()
-auto = _getauto()
diff --git a/pygments/lexers/actionscript.py b/pygments/lexers/actionscript.py
new file mode 100644
index 00000000..9a33dc18
--- /dev/null
+++ b/pygments/lexers/actionscript.py
@@ -0,0 +1,238 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.actionscript
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for ActionScript and MXML.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, using, this, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['ActionScriptLexer', 'ActionScript3Lexer', 'MxmlLexer']
+
+
+class ActionScriptLexer(RegexLexer):
+ """
+ For ActionScript source code.
+
+ .. versionadded:: 0.9
+ """
+
+ name = 'ActionScript'
+ aliases = ['as', 'actionscript']
+ filenames = ['*.as']
+ mimetypes = ['application/x-actionscript', 'text/x-actionscript',
+ 'text/actionscript']
+
+ flags = re.DOTALL
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex),
+ (r'[~\^\*!%&<>\|+=:;,/?\\-]+', Operator),
+ (r'[{}\[\]();.]+', Punctuation),
+ (words((
+ 'case', 'default', 'for', 'each', 'in', 'while', 'do', 'break',
+ 'return', 'continue', 'if', 'else', 'throw', 'try', 'catch',
+ 'var', 'with', 'new', 'typeof', 'arguments', 'instanceof', 'this',
+ 'switch'), suffix=r'\b'),
+ Keyword),
+ (words((
+ 'class', 'public', 'final', 'internal', 'native', 'override', 'private',
+ 'protected', 'static', 'import', 'extends', 'implements', 'interface',
+ 'intrinsic', 'return', 'super', 'dynamic', 'function', 'const', 'get',
+ 'namespace', 'package', 'set'), suffix=r'\b'),
+ Keyword.Declaration),
+ (r'(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b',
+ Keyword.Constant),
+ (words((
+ 'Accessibility', 'AccessibilityProperties', 'ActionScriptVersion',
+ 'ActivityEvent', 'AntiAliasType', 'ApplicationDomain', 'AsBroadcaster', 'Array',
+ 'AsyncErrorEvent', 'AVM1Movie', 'BevelFilter', 'Bitmap', 'BitmapData',
+ 'BitmapDataChannel', 'BitmapFilter', 'BitmapFilterQuality', 'BitmapFilterType',
+ 'BlendMode', 'BlurFilter', 'Boolean', 'ByteArray', 'Camera', 'Capabilities', 'CapsStyle',
+ 'Class', 'Color', 'ColorMatrixFilter', 'ColorTransform', 'ContextMenu',
+ 'ContextMenuBuiltInItems', 'ContextMenuEvent', 'ContextMenuItem',
+ 'ConvultionFilter', 'CSMSettings', 'DataEvent', 'Date', 'DefinitionError',
+ 'DeleteObjectSample', 'Dictionary', 'DisplacmentMapFilter', 'DisplayObject',
+ 'DisplacmentMapFilterMode', 'DisplayObjectContainer', 'DropShadowFilter',
+ 'Endian', 'EOFError', 'Error', 'ErrorEvent', 'EvalError', 'Event', 'EventDispatcher',
+ 'EventPhase', 'ExternalInterface', 'FileFilter', 'FileReference',
+ 'FileReferenceList', 'FocusDirection', 'FocusEvent', 'Font', 'FontStyle', 'FontType',
+ 'FrameLabel', 'FullScreenEvent', 'Function', 'GlowFilter', 'GradientBevelFilter',
+ 'GradientGlowFilter', 'GradientType', 'Graphics', 'GridFitType', 'HTTPStatusEvent',
+ 'IBitmapDrawable', 'ID3Info', 'IDataInput', 'IDataOutput', 'IDynamicPropertyOutput'
+ 'IDynamicPropertyWriter', 'IEventDispatcher', 'IExternalizable',
+ 'IllegalOperationError', 'IME', 'IMEConversionMode', 'IMEEvent', 'int',
+ 'InteractiveObject', 'InterpolationMethod', 'InvalidSWFError', 'InvokeEvent',
+ 'IOError', 'IOErrorEvent', 'JointStyle', 'Key', 'Keyboard', 'KeyboardEvent', 'KeyLocation',
+ 'LineScaleMode', 'Loader', 'LoaderContext', 'LoaderInfo', 'LoadVars', 'LocalConnection',
+ 'Locale', 'Math', 'Matrix', 'MemoryError', 'Microphone', 'MorphShape', 'Mouse', 'MouseEvent',
+ 'MovieClip', 'MovieClipLoader', 'Namespace', 'NetConnection', 'NetStatusEvent',
+ 'NetStream', 'NewObjectSample', 'Number', 'Object', 'ObjectEncoding', 'PixelSnapping',
+ 'Point', 'PrintJob', 'PrintJobOptions', 'PrintJobOrientation', 'ProgressEvent', 'Proxy',
+ 'QName', 'RangeError', 'Rectangle', 'ReferenceError', 'RegExp', 'Responder', 'Sample',
+ 'Scene', 'ScriptTimeoutError', 'Security', 'SecurityDomain', 'SecurityError',
+ 'SecurityErrorEvent', 'SecurityPanel', 'Selection', 'Shape', 'SharedObject',
+ 'SharedObjectFlushStatus', 'SimpleButton', 'Socket', 'Sound', 'SoundChannel',
+ 'SoundLoaderContext', 'SoundMixer', 'SoundTransform', 'SpreadMethod', 'Sprite',
+ 'StackFrame', 'StackOverflowError', 'Stage', 'StageAlign', 'StageDisplayState',
+ 'StageQuality', 'StageScaleMode', 'StaticText', 'StatusEvent', 'String', 'StyleSheet',
+ 'SWFVersion', 'SyncEvent', 'SyntaxError', 'System', 'TextColorType', 'TextField',
+ 'TextFieldAutoSize', 'TextFieldType', 'TextFormat', 'TextFormatAlign',
+ 'TextLineMetrics', 'TextRenderer', 'TextSnapshot', 'Timer', 'TimerEvent', 'Transform',
+ 'TypeError', 'uint', 'URIError', 'URLLoader', 'URLLoaderDataFormat', 'URLRequest',
+ 'URLRequestHeader', 'URLRequestMethod', 'URLStream', 'URLVariabeles', 'VerifyError',
+ 'Video', 'XML', 'XMLDocument', 'XMLList', 'XMLNode', 'XMLNodeType', 'XMLSocket',
+ 'XMLUI'), suffix=r'\b'),
+ Name.Builtin),
+ (words((
+ 'decodeURI', 'decodeURIComponent', 'encodeURI', 'escape', 'eval', 'isFinite', 'isNaN',
+ 'isXMLName', 'clearInterval', 'fscommand', 'getTimer', 'getURL', 'getVersion',
+ 'isFinite', 'parseFloat', 'parseInt', 'setInterval', 'trace', 'updateAfterEvent',
+ 'unescape'), suffix=r'\b'),
+ Name.Function),
+ (r'[$a-zA-Z_]\w*', Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-f]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ ]
+ }
+
+
+class ActionScript3Lexer(RegexLexer):
+ """
+ For ActionScript 3 source code.
+
+ .. versionadded:: 0.11
+ """
+
+ name = 'ActionScript 3'
+ aliases = ['as3', 'actionscript3']
+ filenames = ['*.as']
+ mimetypes = ['application/x-actionscript3', 'text/x-actionscript3',
+ 'text/actionscript3']
+
+ identifier = r'[$a-zA-Z_]\w*'
+ typeidentifier = identifier + '(?:\.<\w+>)?'
+
+ flags = re.DOTALL | re.MULTILINE
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'(function\s+)(' + identifier + r')(\s*)(\()',
+ bygroups(Keyword.Declaration, Name.Function, Text, Operator),
+ 'funcparams'),
+ (r'(var|const)(\s+)(' + identifier + r')(\s*)(:)(\s*)(' +
+ typeidentifier + r')',
+ bygroups(Keyword.Declaration, Text, Name, Text, Punctuation, Text,
+ Keyword.Type)),
+ (r'(import|package)(\s+)((?:' + identifier + r'|\.)+)(\s*)',
+ bygroups(Keyword, Text, Name.Namespace, Text)),
+ (r'(new)(\s+)(' + typeidentifier + r')(\s*)(\()',
+ bygroups(Keyword, Text, Keyword.Type, Text, Operator)),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'/(\\\\|\\/|[^\n])*/[gisx]*', String.Regex),
+ (r'(\.)(' + identifier + r')', bygroups(Operator, Name.Attribute)),
+ (r'(case|default|for|each|in|while|do|break|return|continue|if|else|'
+ r'throw|try|catch|with|new|typeof|arguments|instanceof|this|'
+ r'switch|import|include|as|is)\b',
+ Keyword),
+ (r'(class|public|final|internal|native|override|private|protected|'
+ r'static|import|extends|implements|interface|intrinsic|return|super|'
+ r'dynamic|function|const|get|namespace|package|set)\b',
+ Keyword.Declaration),
+ (r'(true|false|null|NaN|Infinity|-Infinity|undefined|void)\b',
+ Keyword.Constant),
+ (r'(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|'
+ r'isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|'
+ r'isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|'
+ r'unescape)\b', Name.Function),
+ (identifier, Name),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-f]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r'[~\^\*!%&<>\|+=:;,/?\\{}\[\]().-]+', Operator),
+ ],
+ 'funcparams': [
+ (r'\s+', Text),
+ (r'(\s*)(\.\.\.)?(' + identifier + r')(\s*)(:)(\s*)(' +
+ typeidentifier + r'|\*)(\s*)',
+ bygroups(Text, Punctuation, Name, Text, Operator, Text,
+ Keyword.Type, Text), 'defval'),
+ (r'\)', Operator, 'type')
+ ],
+ 'type': [
+ (r'(\s*)(:)(\s*)(' + typeidentifier + r'|\*)',
+ bygroups(Text, Operator, Text, Keyword.Type), '#pop:2'),
+ (r'\s*', Text, '#pop:2')
+ ],
+ 'defval': [
+ (r'(=)(\s*)([^(),]+)(\s*)(,?)',
+ bygroups(Operator, Text, using(this), Text, Operator), '#pop'),
+ (r',?', Operator, '#pop')
+ ]
+ }
+
+ def analyse_text(text):
+ if re.match(r'\w+\s*:\s*\w', text):
+ return 0.3
+ return 0
+
+
+class MxmlLexer(RegexLexer):
+ """
+ For MXML markup.
+ Nested AS3 in <script> tags is highlighted by the appropriate lexer.
+
+ .. versionadded:: 1.1
+ """
+ flags = re.MULTILINE | re.DOTALL
+ name = 'MXML'
+ aliases = ['mxml']
+ filenames = ['*.mxml']
+ mimetimes = ['text/xml', 'application/xml']
+
+ tokens = {
+ 'root': [
+ ('[^<&]+', Text),
+ (r'&\S*?;', Name.Entity),
+ (r'(\<\!\[CDATA\[)(.*?)(\]\]\>)',
+ bygroups(String, using(ActionScript3Lexer), String)),
+ ('<!--', Comment, 'comment'),
+ (r'<\?.*?\?>', Comment.Preproc),
+ ('<![^>]*>', Comment.Preproc),
+ (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
+ (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
+ ],
+ 'comment': [
+ ('[^-]+', Comment),
+ ('-->', Comment, '#pop'),
+ ('-', Comment),
+ ],
+ 'tag': [
+ (r'\s+', Text),
+ (r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
+ (r'/?\s*>', Name.Tag, '#pop'),
+ ],
+ 'attr': [
+ ('\s+', Text),
+ ('".*?"', String, '#pop'),
+ ("'.*?'", String, '#pop'),
+ (r'[^\s>]+', String, '#pop'),
+ ],
+ }
diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py
index 1ae369b9..7ad60c83 100644
--- a/pygments/lexers/agile.py
+++ b/pygments/lexers/agile.py
@@ -3,2550 +3,22 @@
pygments.lexers.agile
~~~~~~~~~~~~~~~~~~~~~
- Lexers for agile languages.
+ Just export lexer classes previously contained in this module.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import re
-
-from pygments.lexer import Lexer, RegexLexer, ExtendedRegexLexer, \
- LexerContext, include, combined, do_insertions, bygroups, using, this, default
-from pygments.token import Error, Text, Other, \
- Comment, Operator, Keyword, Name, String, Number, Generic, Punctuation
-from pygments.util import get_bool_opt, get_list_opt, shebang_matches, iteritems
-from pygments import unistring as uni
-
-
-__all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer',
- 'Python3Lexer', 'Python3TracebackLexer', 'RubyLexer',
- 'RubyConsoleLexer', 'PerlLexer', 'LuaLexer', 'MoonScriptLexer',
- 'CrocLexer', 'MiniDLexer', 'IoLexer', 'TclLexer', 'FactorLexer',
- 'FancyLexer', 'DgLexer', 'Perl6Lexer', 'HyLexer',
- 'ChaiscriptLexer']
-
-# b/w compatibility
-from pygments.lexers.functional import SchemeLexer
+from pygments.lexers.lisp import SchemeLexer
from pygments.lexers.jvm import IokeLexer, ClojureLexer
-
-line_re = re.compile('.*?\n')
-
-
-class PythonLexer(RegexLexer):
- """
- For `Python <http://www.python.org>`_ source code.
- """
-
- name = 'Python'
- aliases = ['python', 'py', 'sage']
- filenames = ['*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage']
- mimetypes = ['text/x-python', 'application/x-python']
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
- (r"^(\s*)([rRuU]{,2}'''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
- (r'[^\S\n]+', Text),
- (r'#.*$', Comment),
- (r'[]{}:(),;[]', Punctuation),
- (r'\\\n', Text),
- (r'\\', Text),
- (r'(in|is|and|or|not)\b', Operator.Word),
- (r'!=|==|<<|>>|[-~+/*%=<>&^|.]', Operator),
- include('keywords'),
- (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'funcname'),
- (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'classname'),
- (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
- 'fromimport'),
- (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
- 'import'),
- include('builtins'),
- include('backtick'),
- ('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'),
- ("(?:[rR]|[uU][rR]|[rR][uU])'''", String, 'tsqs'),
- ('(?:[rR]|[uU][rR]|[rR][uU])"', String, 'dqs'),
- ("(?:[rR]|[uU][rR]|[rR][uU])'", String, 'sqs'),
- ('[uU]?"""', String, combined('stringescape', 'tdqs')),
- ("[uU]?'''", String, combined('stringescape', 'tsqs')),
- ('[uU]?"', String, combined('stringescape', 'dqs')),
- ("[uU]?'", String, combined('stringescape', 'sqs')),
- include('name'),
- include('numbers'),
- ],
- 'keywords': [
- (r'(assert|break|continue|del|elif|else|except|exec|'
- r'finally|for|global|if|lambda|pass|print|raise|'
- r'return|try|while|yield(\s+from)?|as|with)\b', Keyword),
- ],
- 'builtins': [
- (r'(?<!\.)(__import__|abs|all|any|apply|basestring|bin|bool|buffer|'
- r'bytearray|bytes|callable|chr|classmethod|cmp|coerce|compile|'
- r'complex|delattr|dict|dir|divmod|enumerate|eval|execfile|exit|'
- r'file|filter|float|frozenset|getattr|globals|hasattr|hash|hex|id|'
- r'input|int|intern|isinstance|issubclass|iter|len|list|locals|'
- r'long|map|max|min|next|object|oct|open|ord|pow|property|range|'
- r'raw_input|reduce|reload|repr|reversed|round|set|setattr|slice|'
- r'sorted|staticmethod|str|sum|super|tuple|type|unichr|unicode|'
- r'vars|xrange|zip)\b', Name.Builtin),
- (r'(?<!\.)(self|None|Ellipsis|NotImplemented|False|True'
- r')\b', Name.Builtin.Pseudo),
- (r'(?<!\.)(ArithmeticError|AssertionError|AttributeError|'
- r'BaseException|DeprecationWarning|EOFError|EnvironmentError|'
- r'Exception|FloatingPointError|FutureWarning|GeneratorExit|IOError|'
- r'ImportError|ImportWarning|IndentationError|IndexError|KeyError|'
- r'KeyboardInterrupt|LookupError|MemoryError|NameError|'
- r'NotImplemented|NotImplementedError|OSError|OverflowError|'
- r'OverflowWarning|PendingDeprecationWarning|ReferenceError|'
- r'RuntimeError|RuntimeWarning|StandardError|StopIteration|'
- r'SyntaxError|SyntaxWarning|SystemError|SystemExit|TabError|'
- r'TypeError|UnboundLocalError|UnicodeDecodeError|'
- r'UnicodeEncodeError|UnicodeError|UnicodeTranslateError|'
- r'UnicodeWarning|UserWarning|ValueError|VMSError|Warning|'
- r'WindowsError|ZeroDivisionError)\b', Name.Exception),
- ],
- 'numbers': [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+j?', Number.Float),
- (r'0[0-7]+j?', Number.Oct),
- (r'0[bB][01]+', Number.Bin),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+j?', Number.Integer)
- ],
- 'backtick': [
- ('`.*?`', String.Backtick),
- ],
- 'name': [
- (r'@[\w.]+', Name.Decorator),
- ('[a-zA-Z_]\w*', Name),
- ],
- 'funcname': [
- ('[a-zA-Z_]\w*', Name.Function, '#pop')
- ],
- 'classname': [
- ('[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'import': [
- (r'(?:[ \t]|\\\n)+', Text),
- (r'as\b', Keyword.Namespace),
- (r',', Operator),
- (r'[a-zA-Z_][\w.]*', Name.Namespace),
- default('#pop') # all else: go back
- ],
- 'fromimport': [
- (r'(?:[ \t]|\\\n)+', Text),
- (r'import\b', Keyword.Namespace, '#pop'),
- # if None occurs here, it's "raise x from None", since None can
- # never be a module name
- (r'None\b', Name.Builtin.Pseudo, '#pop'),
- # sadly, in "raise x from y" y will be highlighted as namespace too
- (r'[a-zA-Z_.][\w.]*', Name.Namespace),
- # anything else here also means "raise x from y" and is therefore
- # not an error
- default('#pop'),
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N{.*?}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'strings': [
- (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
- '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol),
- (r'[^\\\'"%\n]+', String),
- # quotes, percents and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- # unhandled string formatting sign
- (r'%', String)
- # newlines are an error (use "nl" state)
- ],
- 'nl': [
- (r'\n', String)
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- include('strings'),
- include('nl')
- ],
- 'tsqs': [
- (r"'''", String, '#pop'),
- include('strings'),
- include('nl')
- ],
- }
-
- def analyse_text(text):
- return shebang_matches(text, r'pythonw?(2(\.\d)?)?') or \
- 'import ' in text[:1000]
-
-
-class Python3Lexer(RegexLexer):
- """
- For `Python <http://www.python.org>`_ source code (version 3.0).
-
- .. versionadded:: 0.10
- """
-
- name = 'Python 3'
- aliases = ['python3', 'py3']
- filenames = [] # Nothing until Python 3 gets widespread
- mimetypes = ['text/x-python3', 'application/x-python3']
-
- flags = re.MULTILINE | re.UNICODE
-
- uni_name = "[%s][%s]*" % (uni.xid_start, uni.xid_continue)
-
- tokens = PythonLexer.tokens.copy()
- tokens['keywords'] = [
- (r'(assert|break|continue|del|elif|else|except|'
- r'finally|for|global|if|lambda|pass|raise|nonlocal|'
- r'return|try|while|yield(\s+from)?|as|with|True|False|None)\b',
- Keyword),
- ]
- tokens['builtins'] = [
- (r'(?<!\.)(__import__|abs|all|any|bin|bool|bytearray|bytes|'
- r'chr|classmethod|cmp|compile|complex|delattr|dict|dir|'
- r'divmod|enumerate|eval|filter|float|format|frozenset|getattr|'
- r'globals|hasattr|hash|hex|id|input|int|isinstance|issubclass|'
- r'iter|len|list|locals|map|max|memoryview|min|next|object|oct|'
- r'open|ord|pow|print|property|range|repr|reversed|round|'
- r'set|setattr|slice|sorted|staticmethod|str|sum|super|tuple|type|'
- r'vars|zip)\b', Name.Builtin),
- (r'(?<!\.)(self|Ellipsis|NotImplemented)\b', Name.Builtin.Pseudo),
- (r'(?<!\.)(ArithmeticError|AssertionError|AttributeError|'
- r'BaseException|BufferError|BytesWarning|DeprecationWarning|'
- r'EOFError|EnvironmentError|Exception|FloatingPointError|'
- r'FutureWarning|GeneratorExit|IOError|ImportError|'
- r'ImportWarning|IndentationError|IndexError|KeyError|'
- r'KeyboardInterrupt|LookupError|MemoryError|NameError|'
- r'NotImplementedError|OSError|OverflowError|'
- r'PendingDeprecationWarning|ReferenceError|'
- r'RuntimeError|RuntimeWarning|StopIteration|'
- r'SyntaxError|SyntaxWarning|SystemError|SystemExit|TabError|'
- r'TypeError|UnboundLocalError|UnicodeDecodeError|'
- r'UnicodeEncodeError|UnicodeError|UnicodeTranslateError|'
- r'UnicodeWarning|UserWarning|ValueError|VMSError|Warning|'
- r'WindowsError|ZeroDivisionError|'
- # new builtin exceptions from PEP 3151
- r'BlockingIOError|ChildProcessError|ConnectionError|'
- r'BrokenPipeError|ConnectionAbortedError|ConnectionRefusedError|'
- r'ConnectionResetError|FileExistsError|FileNotFoundError|'
- r'InterruptedError|IsADirectoryError|NotADirectoryError|'
- r'PermissionError|ProcessLookupError|TimeoutError)\b',
- Name.Exception),
- ]
- tokens['numbers'] = [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'0[oO][0-7]+', Number.Oct),
- (r'0[bB][01]+', Number.Bin),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+', Number.Integer)
- ]
- tokens['backtick'] = []
- tokens['name'] = [
- (r'@\w+', Name.Decorator),
- (uni_name, Name),
- ]
- tokens['funcname'] = [
- (uni_name, Name.Function, '#pop')
- ]
- tokens['classname'] = [
- (uni_name, Name.Class, '#pop')
- ]
- tokens['import'] = [
- (r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
- (r'\.', Name.Namespace),
- (uni_name, Name.Namespace),
- (r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
- default('#pop') # all else: go back
- ]
- tokens['fromimport'] = [
- (r'(\s+)(import)\b', bygroups(Text, Keyword), '#pop'),
- (r'\.', Name.Namespace),
- (uni_name, Name.Namespace),
- default('#pop'),
- ]
- # don't highlight "%s" substitutions
- tokens['strings'] = [
- (r'[^\\\'"%\n]+', String),
- # quotes, percents and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- # unhandled string formatting sign
- (r'%', String)
- # newlines are an error (use "nl" state)
- ]
-
- def analyse_text(text):
- return shebang_matches(text, r'pythonw?3(\.\d)?')
-
-
-class PythonConsoleLexer(Lexer):
- """
- For Python console output or doctests, such as:
-
- .. sourcecode:: pycon
-
- >>> a = 'foo'
- >>> print a
- foo
- >>> 1 / 0
- Traceback (most recent call last):
- File "<stdin>", line 1, in <module>
- ZeroDivisionError: integer division or modulo by zero
-
- Additional options:
-
- `python3`
- Use Python 3 lexer for code. Default is ``False``.
-
- .. versionadded:: 1.0
- """
- name = 'Python console session'
- aliases = ['pycon']
- mimetypes = ['text/x-python-doctest']
-
- def __init__(self, **options):
- self.python3 = get_bool_opt(options, 'python3', False)
- Lexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- if self.python3:
- pylexer = Python3Lexer(**self.options)
- tblexer = Python3TracebackLexer(**self.options)
- else:
- pylexer = PythonLexer(**self.options)
- tblexer = PythonTracebackLexer(**self.options)
-
- curcode = ''
- insertions = []
- curtb = ''
- tbindex = 0
- tb = 0
- for match in line_re.finditer(text):
- line = match.group()
- if line.startswith(u'>>> ') or line.startswith(u'... '):
- tb = 0
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:4])]))
- curcode += line[4:]
- elif line.rstrip() == u'...' and not tb:
- # only a new >>> prompt can end an exception block
- # otherwise an ellipsis in place of the traceback frames
- # will be mishandled
- insertions.append((len(curcode),
- [(0, Generic.Prompt, u'...')]))
- curcode += line[3:]
- else:
- if curcode:
- for item in do_insertions(insertions,
- pylexer.get_tokens_unprocessed(curcode)):
- yield item
- curcode = ''
- insertions = []
- if (line.startswith(u'Traceback (most recent call last):') or
- re.match(u' File "[^"]+", line \\d+\\n$', line)):
- tb = 1
- curtb = line
- tbindex = match.start()
- elif line == 'KeyboardInterrupt\n':
- yield match.start(), Name.Class, line
- elif tb:
- curtb += line
- if not (line.startswith(' ') or line.strip() == u'...'):
- tb = 0
- for i, t, v in tblexer.get_tokens_unprocessed(curtb):
- yield tbindex+i, t, v
- else:
- yield match.start(), Generic.Output, line
- if curcode:
- for item in do_insertions(insertions,
- pylexer.get_tokens_unprocessed(curcode)):
- yield item
-
-
-class PythonTracebackLexer(RegexLexer):
- """
- For Python tracebacks.
-
- .. versionadded:: 0.7
- """
-
- name = 'Python Traceback'
- aliases = ['pytb']
- filenames = ['*.pytb']
- mimetypes = ['text/x-python-traceback']
-
- tokens = {
- 'root': [
- (r'^Traceback \(most recent call last\):\n',
- Generic.Traceback, 'intb'),
- # SyntaxError starts with this.
- (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
- (r'^.*\n', Other),
- ],
- 'intb': [
- (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
- bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
- (r'^( File )("[^"]+")(, line )(\d+)(\n)',
- bygroups(Text, Name.Builtin, Text, Number, Text)),
- (r'^( )(.+)(\n)',
- bygroups(Text, using(PythonLexer), Text)),
- (r'^([ \t]*)(\.\.\.)(\n)',
- bygroups(Text, Comment, Text)), # for doctests...
- (r'^([^:]+)(: )(.+)(\n)',
- bygroups(Generic.Error, Text, Name, Text), '#pop'),
- (r'^([a-zA-Z_]\w*)(:?\n)',
- bygroups(Generic.Error, Text), '#pop')
- ],
- }
-
-
-class Python3TracebackLexer(RegexLexer):
- """
- For Python 3.0 tracebacks, with support for chained exceptions.
-
- .. versionadded:: 1.0
- """
-
- name = 'Python 3.0 Traceback'
- aliases = ['py3tb']
- filenames = ['*.py3tb']
- mimetypes = ['text/x-python3-traceback']
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'^Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'),
- (r'^During handling of the above exception, another '
- r'exception occurred:\n\n', Generic.Traceback),
- (r'^The above exception was the direct cause of the '
- r'following exception:\n\n', Generic.Traceback),
- (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
- ],
- 'intb': [
- (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
- bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
- (r'^( File )("[^"]+")(, line )(\d+)(\n)',
- bygroups(Text, Name.Builtin, Text, Number, Text)),
- (r'^( )(.+)(\n)',
- bygroups(Text, using(Python3Lexer), Text)),
- (r'^([ \t]*)(\.\.\.)(\n)',
- bygroups(Text, Comment, Text)), # for doctests...
- (r'^([^:]+)(: )(.+)(\n)',
- bygroups(Generic.Error, Text, Name, Text), '#pop'),
- (r'^([a-zA-Z_]\w*)(:?\n)',
- bygroups(Generic.Error, Text), '#pop')
- ],
- }
-
-
-class RubyLexer(ExtendedRegexLexer):
- """
- For `Ruby <http://www.ruby-lang.org>`_ source code.
- """
-
- name = 'Ruby'
- aliases = ['rb', 'ruby', 'duby']
- filenames = ['*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec',
- '*.rbx', '*.duby']
- mimetypes = ['text/x-ruby', 'application/x-ruby']
-
- flags = re.DOTALL | re.MULTILINE
-
- def heredoc_callback(self, match, ctx):
- # okay, this is the hardest part of parsing Ruby...
- # match: 1 = <<-?, 2 = quote? 3 = name 4 = quote? 5 = rest of line
-
- start = match.start(1)
- yield start, Operator, match.group(1) # <<-?
- yield match.start(2), String.Heredoc, match.group(2) # quote ", ', `
- yield match.start(3), Name.Constant, match.group(3) # heredoc name
- yield match.start(4), String.Heredoc, match.group(4) # quote again
-
- heredocstack = ctx.__dict__.setdefault('heredocstack', [])
- outermost = not bool(heredocstack)
- heredocstack.append((match.group(1) == '<<-', match.group(3)))
-
- ctx.pos = match.start(5)
- ctx.end = match.end(5)
- # this may find other heredocs
- for i, t, v in self.get_tokens_unprocessed(context=ctx):
- yield i, t, v
- ctx.pos = match.end()
-
- if outermost:
- # this is the outer heredoc again, now we can process them all
- for tolerant, hdname in heredocstack:
- lines = []
- for match in line_re.finditer(ctx.text, ctx.pos):
- if tolerant:
- check = match.group().strip()
- else:
- check = match.group().rstrip()
- if check == hdname:
- for amatch in lines:
- yield amatch.start(), String.Heredoc, amatch.group()
- yield match.start(), Name.Constant, match.group()
- ctx.pos = match.end()
- break
- else:
- lines.append(match)
- else:
- # end of heredoc not found -- error!
- for amatch in lines:
- yield amatch.start(), Error, amatch.group()
- ctx.end = len(ctx.text)
- del heredocstack[:]
-
-
- def gen_rubystrings_rules():
- def intp_regex_callback(self, match, ctx):
- yield match.start(1), String.Regex, match.group(1) # begin
- nctx = LexerContext(match.group(3), 0, ['interpolated-regex'])
- for i, t, v in self.get_tokens_unprocessed(context=nctx):
- yield match.start(3)+i, t, v
- yield match.start(4), String.Regex, match.group(4) # end[mixounse]*
- ctx.pos = match.end()
-
- def intp_string_callback(self, match, ctx):
- yield match.start(1), String.Other, match.group(1)
- nctx = LexerContext(match.group(3), 0, ['interpolated-string'])
- for i, t, v in self.get_tokens_unprocessed(context=nctx):
- yield match.start(3)+i, t, v
- yield match.start(4), String.Other, match.group(4) # end
- ctx.pos = match.end()
-
- states = {}
- states['strings'] = [
- # easy ones
- (r'\:@{0,2}([a-zA-Z_]\w*[\!\?]?|\*\*?|[-+]@?|'
- r'[/%&|^`~]|\[\]=?|<<|>>|<=?>|>=?|===?)', String.Symbol),
- (r":'(\\\\|\\'|[^'])*'", String.Symbol),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- (r':"', String.Symbol, 'simple-sym'),
- (r'([a-zA-Z_]\w*)(:)(?!:)',
- bygroups(String.Symbol, Punctuation)), # Since Ruby 1.9
- (r'"', String.Double, 'simple-string'),
- (r'(?<!\.)`', String.Backtick, 'simple-backtick'),
- ]
-
- # double-quoted string and symbol
- for name, ttype, end in ('string', String.Double, '"'), \
- ('sym', String.Symbol, '"'), \
- ('backtick', String.Backtick, '`'):
- states['simple-'+name] = [
- include('string-intp-escaped'),
- (r'[^\\%s#]+' % end, ttype),
- (r'[\\#]', ttype),
- (end, ttype, '#pop'),
- ]
-
- # braced quoted strings
- for lbrace, rbrace, name in ('\\{', '\\}', 'cb'), \
- ('\\[', '\\]', 'sb'), \
- ('\\(', '\\)', 'pa'), \
- ('<', '>', 'ab'):
- states[name+'-intp-string'] = [
- (r'\\[\\' + lbrace + rbrace + ']', String.Other),
- (r'(?<!\\)' + lbrace, String.Other, '#push'),
- (r'(?<!\\)' + rbrace, String.Other, '#pop'),
- include('string-intp-escaped'),
- (r'[\\#' + lbrace + rbrace + ']', String.Other),
- (r'[^\\#' + lbrace + rbrace + ']+', String.Other),
- ]
- states['strings'].append((r'%[QWx]?' + lbrace, String.Other,
- name+'-intp-string'))
- states[name+'-string'] = [
- (r'\\[\\' + lbrace + rbrace + ']', String.Other),
- (r'(?<!\\)' + lbrace, String.Other, '#push'),
- (r'(?<!\\)' + rbrace, String.Other, '#pop'),
- (r'[\\#' + lbrace + rbrace + ']', String.Other),
- (r'[^\\#' + lbrace + rbrace + ']+', String.Other),
- ]
- states['strings'].append((r'%[qsw]' + lbrace, String.Other,
- name+'-string'))
- states[name+'-regex'] = [
- (r'\\[\\' + lbrace + rbrace + ']', String.Regex),
- (r'(?<!\\)' + lbrace, String.Regex, '#push'),
- (r'(?<!\\)' + rbrace + '[mixounse]*', String.Regex, '#pop'),
- include('string-intp'),
- (r'[\\#' + lbrace + rbrace + ']', String.Regex),
- (r'[^\\#' + lbrace + rbrace + ']+', String.Regex),
- ]
- states['strings'].append((r'%r' + lbrace, String.Regex,
- name+'-regex'))
-
- # these must come after %<brace>!
- states['strings'] += [
- # %r regex
- (r'(%r([^a-zA-Z0-9]))((?:\\\2|(?!\2).)*)(\2[mixounse]*)',
- intp_regex_callback),
- # regular fancy strings with qsw
- (r'%[qsw]([^a-zA-Z0-9])((?:\\\1|(?!\1).)*)\1', String.Other),
- (r'(%[QWx]([^a-zA-Z0-9]))((?:\\\2|(?!\2).)*)(\2)',
- intp_string_callback),
- # special forms of fancy strings after operators or
- # in method calls with braces
- (r'(?<=[-+/*%=<>&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
- bygroups(Text, String.Other, None)),
- # and because of fixed width lookbehinds the whole thing a
- # second time for line startings...
- (r'^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
- bygroups(Text, String.Other, None)),
- # all regular fancy strings without qsw
- (r'(%([^a-zA-Z0-9\s]))((?:\\\2|(?!\2).)*)(\2)',
- intp_string_callback),
- ]
-
- return states
-
- tokens = {
- 'root': [
- (r'#.*?$', Comment.Single),
- (r'=begin\s.*?\n=end.*?$', Comment.Multiline),
- # keywords
- (r'(BEGIN|END|alias|begin|break|case|defined\?|'
- r'do|else|elsif|end|ensure|for|if|in|next|redo|'
- r'rescue|raise|retry|return|super|then|undef|unless|until|when|'
- r'while|yield)\b', Keyword),
- # start of function, class and module names
- (r'(module)(\s+)([a-zA-Z_]\w*'
- r'(?:::[a-zA-Z_]\w*)*)',
- bygroups(Keyword, Text, Name.Namespace)),
- (r'(def)(\s+)', bygroups(Keyword, Text), 'funcname'),
- (r'def(?=[*%&^`~+-/\[<>=])', Keyword, 'funcname'),
- (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
- # special methods
- (r'(initialize|new|loop|include|extend|raise|attr_reader|'
- r'attr_writer|attr_accessor|attr|catch|throw|private|'
- r'module_function|public|protected|true|false|nil)\b',
- Keyword.Pseudo),
- (r'(not|and|or)\b', Operator.Word),
- (r'(autoload|block_given|const_defined|eql|equal|frozen|include|'
- r'instance_of|is_a|iterator|kind_of|method_defined|nil|'
- r'private_method_defined|protected_method_defined|'
- r'public_method_defined|respond_to|tainted)\?', Name.Builtin),
- (r'(chomp|chop|exit|gsub|sub)!', Name.Builtin),
- (r'(?<!\.)(Array|Float|Integer|String|__id__|__send__|abort|'
- r'ancestors|at_exit|autoload|binding|callcc|caller|'
- r'catch|chomp|chop|class_eval|class_variables|'
- r'clone|const_defined\?|const_get|const_missing|const_set|'
- r'constants|display|dup|eval|exec|exit|extend|fail|fork|'
- r'format|freeze|getc|gets|global_variables|gsub|'
- r'hash|id|included_modules|inspect|instance_eval|'
- r'instance_method|instance_methods|'
- r'instance_variable_get|instance_variable_set|instance_variables|'
- r'lambda|load|local_variables|loop|'
- r'method|method_missing|methods|module_eval|name|'
- r'object_id|open|p|print|printf|private_class_method|'
- r'private_instance_methods|'
- r'private_methods|proc|protected_instance_methods|'
- r'protected_methods|public_class_method|'
- r'public_instance_methods|public_methods|'
- r'putc|puts|raise|rand|readline|readlines|require|'
- r'scan|select|self|send|set_trace_func|singleton_methods|sleep|'
- r'split|sprintf|srand|sub|syscall|system|taint|'
- r'test|throw|to_a|to_s|trace_var|trap|untaint|untrace_var|'
- r'warn)\b', Name.Builtin),
- (r'__(FILE|LINE)__\b', Name.Builtin.Pseudo),
- # normal heredocs
- (r'(?<!\w)(<<-?)(["`\']?)([a-zA-Z_]\w*)(\2)(.*?\n)',
- heredoc_callback),
- # empty string heredocs
- (r'(<<-?)("|\')()(\2)(.*?\n)', heredoc_callback),
- (r'__END__', Comment.Preproc, 'end-part'),
- # multiline regex (after keywords or assignments)
- (r'(?:^|(?<=[=<>~!:])|'
- r'(?<=(?:\s|;)when\s)|'
- r'(?<=(?:\s|;)or\s)|'
- r'(?<=(?:\s|;)and\s)|'
- r'(?<=(?:\s|;|\.)index\s)|'
- r'(?<=(?:\s|;|\.)scan\s)|'
- r'(?<=(?:\s|;|\.)sub\s)|'
- r'(?<=(?:\s|;|\.)sub!\s)|'
- r'(?<=(?:\s|;|\.)gsub\s)|'
- r'(?<=(?:\s|;|\.)gsub!\s)|'
- r'(?<=(?:\s|;|\.)match\s)|'
- r'(?<=(?:\s|;)if\s)|'
- r'(?<=(?:\s|;)elsif\s)|'
- r'(?<=^when\s)|'
- r'(?<=^index\s)|'
- r'(?<=^scan\s)|'
- r'(?<=^sub\s)|'
- r'(?<=^gsub\s)|'
- r'(?<=^sub!\s)|'
- r'(?<=^gsub!\s)|'
- r'(?<=^match\s)|'
- r'(?<=^if\s)|'
- r'(?<=^elsif\s)'
- r')(\s*)(/)', bygroups(Text, String.Regex), 'multiline-regex'),
- # multiline regex (in method calls or subscripts)
- (r'(?<=\(|,|\[)/', String.Regex, 'multiline-regex'),
- # multiline regex (this time the funny no whitespace rule)
- (r'(\s+)(/)(?![\s=])', bygroups(Text, String.Regex),
- 'multiline-regex'),
- # lex numbers and ignore following regular expressions which
- # are division operators in fact (grrrr. i hate that. any
- # better ideas?)
- # since pygments 0.7 we also eat a "?" operator after numbers
- # so that the char operator does not work. Chars are not allowed
- # there so that you can use the ternary operator.
- # stupid example:
- # x>=0?n[x]:""
- (r'(0_?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
- bygroups(Number.Oct, Text, Operator)),
- (r'(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
- bygroups(Number.Hex, Text, Operator)),
- (r'(0b[01]+(?:_[01]+)*)(\s*)([/?])?',
- bygroups(Number.Bin, Text, Operator)),
- (r'([\d]+(?:_\d+)*)(\s*)([/?])?',
- bygroups(Number.Integer, Text, Operator)),
- # Names
- (r'@@[a-zA-Z_]\w*', Name.Variable.Class),
- (r'@[a-zA-Z_]\w*', Name.Variable.Instance),
- (r'\$\w+', Name.Variable.Global),
- (r'\$[!@&`\'+~=/\\,;.<>_*$?:"]', Name.Variable.Global),
- (r'\$-[0adFiIlpvw]', Name.Variable.Global),
- (r'::', Operator),
- include('strings'),
- # chars
- (r'\?(\\[MC]-)*' # modifiers
- r'(\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)'
- r'(?!\w)',
- String.Char),
- (r'[A-Z]\w+', Name.Constant),
- # this is needed because ruby attributes can look
- # like keywords (class) or like this: ` ?!?
- (r'(\.|::)([a-zA-Z_]\w*[\!\?]?|[*%&^`~+-/\[<>=])',
- bygroups(Operator, Name)),
- (r'[a-zA-Z_]\w*[\!\?]?', Name),
- (r'(\[|\]|\*\*|<<?|>>?|>=|<=|<=>|=~|={3}|'
- r'!~|&&?|\|\||\.{1,3})', Operator),
- (r'[-+/*%=<>&!^|~]=?', Operator),
- (r'[(){};,/?:\\]', Punctuation),
- (r'\s+', Text)
- ],
- 'funcname': [
- (r'\(', Punctuation, 'defexpr'),
- (r'(?:([a-zA-Z_]\w*)(\.))?'
- r'([a-zA-Z_]\w*[\!\?]?|\*\*?|[-+]@?|'
- r'[/%&|^`~]|\[\]=?|<<|>>|<=?>|>=?|===?)',
- bygroups(Name.Class, Operator, Name.Function), '#pop'),
- default('#pop')
- ],
- 'classname': [
- (r'\(', Punctuation, 'defexpr'),
- (r'<<', Operator, '#pop'),
- (r'[A-Z_]\w*', Name.Class, '#pop'),
- default('#pop')
- ],
- 'defexpr': [
- (r'(\))(\.|::)?', bygroups(Punctuation, Operator), '#pop'),
- (r'\(', Operator, '#push'),
- include('root')
- ],
- 'in-intp': [
- ('}', String.Interpol, '#pop'),
- include('root'),
- ],
- 'string-intp': [
- (r'#{', String.Interpol, 'in-intp'),
- (r'#@@?[a-zA-Z_]\w*', String.Interpol),
- (r'#\$[a-zA-Z_]\w*', String.Interpol)
- ],
- 'string-intp-escaped': [
- include('string-intp'),
- (r'\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})',
- String.Escape)
- ],
- 'interpolated-regex': [
- include('string-intp'),
- (r'[\\#]', String.Regex),
- (r'[^\\#]+', String.Regex),
- ],
- 'interpolated-string': [
- include('string-intp'),
- (r'[\\#]', String.Other),
- (r'[^\\#]+', String.Other),
- ],
- 'multiline-regex': [
- include('string-intp'),
- (r'\\\\', String.Regex),
- (r'\\/', String.Regex),
- (r'[\\#]', String.Regex),
- (r'[^\\/#]+', String.Regex),
- (r'/[mixounse]*', String.Regex, '#pop'),
- ],
- 'end-part': [
- (r'.+', Comment.Preproc, '#pop')
- ]
- }
- tokens.update(gen_rubystrings_rules())
-
- def analyse_text(text):
- return shebang_matches(text, r'ruby(1\.\d)?')
-
-
-class RubyConsoleLexer(Lexer):
- """
- For Ruby interactive console (**irb**) output like:
-
- .. sourcecode:: rbcon
-
- irb(main):001:0> a = 1
- => 1
- irb(main):002:0> puts a
- 1
- => nil
- """
- name = 'Ruby irb session'
- aliases = ['rbcon', 'irb']
- mimetypes = ['text/x-ruby-shellsession']
-
- _prompt_re = re.compile('irb\([a-zA-Z_]\w*\):\d{3}:\d+[>*"\'] '
- '|>> |\?> ')
-
- def get_tokens_unprocessed(self, text):
- rblexer = RubyLexer(**self.options)
-
- curcode = ''
- insertions = []
- for match in line_re.finditer(text):
- line = match.group()
- m = self._prompt_re.match(line)
- if m is not None:
- end = m.end()
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:end])]))
- curcode += line[end:]
- else:
- if curcode:
- for item in do_insertions(insertions,
- rblexer.get_tokens_unprocessed(curcode)):
- yield item
- curcode = ''
- insertions = []
- yield match.start(), Generic.Output, line
- if curcode:
- for item in do_insertions(insertions,
- rblexer.get_tokens_unprocessed(curcode)):
- yield item
-
-
-class PerlLexer(RegexLexer):
- """
- For `Perl <http://www.perl.org>`_ source code.
- """
-
- name = 'Perl'
- aliases = ['perl', 'pl']
- filenames = ['*.pl', '*.pm', '*.t']
- mimetypes = ['text/x-perl', 'application/x-perl']
-
- flags = re.DOTALL | re.MULTILINE
- # TODO: give this to a perl guy who knows how to parse perl...
- tokens = {
- 'balanced-regex': [
- (r'/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*', String.Regex, '#pop'),
- (r'!(\\\\|\\[^\\]|[^\\!])*![egimosx]*', String.Regex, '#pop'),
- (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
- (r'{(\\\\|\\[^\\]|[^\\}])*}[egimosx]*', String.Regex, '#pop'),
- (r'<(\\\\|\\[^\\]|[^\\>])*>[egimosx]*', String.Regex, '#pop'),
- (r'\[(\\\\|\\[^\\]|[^\\\]])*\][egimosx]*', String.Regex, '#pop'),
- (r'\((\\\\|\\[^\\]|[^\\\)])*\)[egimosx]*', String.Regex, '#pop'),
- (r'@(\\\\|\\[^\\]|[^\\\@])*@[egimosx]*', String.Regex, '#pop'),
- (r'%(\\\\|\\[^\\]|[^\\\%])*%[egimosx]*', String.Regex, '#pop'),
- (r'\$(\\\\|\\[^\\]|[^\\\$])*\$[egimosx]*', String.Regex, '#pop'),
- ],
- 'root': [
- (r'\#.*?$', Comment.Single),
- (r'^=[a-zA-Z0-9]+\s+.*?\n=cut', Comment.Multiline),
- (r'(case|continue|do|else|elsif|for|foreach|if|last|my|'
- r'next|our|redo|reset|then|unless|until|while|use|'
- r'print|new|BEGIN|CHECK|INIT|END|return)\b', Keyword),
- (r'(format)(\s+)(\w+)(\s*)(=)(\s*\n)',
- bygroups(Keyword, Text, Name, Text, Punctuation, Text), 'format'),
- (r'(eq|lt|gt|le|ge|ne|not|and|or|cmp)\b', Operator.Word),
- # common delimiters
- (r's/(\\\\|\\[^\\]|[^\\/])*/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*',
- String.Regex),
- (r's!(\\\\|\\!|[^!])*!(\\\\|\\!|[^!])*![egimosx]*', String.Regex),
- (r's\\(\\\\|[^\\])*\\(\\\\|[^\\])*\\[egimosx]*', String.Regex),
- (r's@(\\\\|\\[^\\]|[^\\@])*@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*',
- String.Regex),
- (r's%(\\\\|\\[^\\]|[^\\%])*%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*',
- String.Regex),
- # balanced delimiters
- (r's{(\\\\|\\[^\\]|[^\\}])*}\s*', String.Regex, 'balanced-regex'),
- (r's<(\\\\|\\[^\\]|[^\\>])*>\s*', String.Regex, 'balanced-regex'),
- (r's\[(\\\\|\\[^\\]|[^\\\]])*\]\s*', String.Regex,
- 'balanced-regex'),
- (r's\((\\\\|\\[^\\]|[^\\\)])*\)\s*', String.Regex,
- 'balanced-regex'),
-
- (r'm?/(\\\\|\\[^\\]|[^\\/\n])*/[gcimosx]*', String.Regex),
- (r'm(?=[/!\\{<\[\(@%\$])', String.Regex, 'balanced-regex'),
- (r'((?<==~)|(?<=\())\s*/(\\\\|\\[^\\]|[^\\/])*/[gcimosx]*',
- String.Regex),
- (r'\s+', Text),
- (r'(abs|accept|alarm|atan2|bind|binmode|bless|caller|chdir|'
- r'chmod|chomp|chop|chown|chr|chroot|close|closedir|connect|'
- r'continue|cos|crypt|dbmclose|dbmopen|defined|delete|die|'
- r'dump|each|endgrent|endhostent|endnetent|endprotoent|'
- r'endpwent|endservent|eof|eval|exec|exists|exit|exp|fcntl|'
- r'fileno|flock|fork|format|formline|getc|getgrent|getgrgid|'
- r'getgrnam|gethostbyaddr|gethostbyname|gethostent|getlogin|'
- r'getnetbyaddr|getnetbyname|getnetent|getpeername|getpgrp|'
- r'getppid|getpriority|getprotobyname|getprotobynumber|'
- r'getprotoent|getpwent|getpwnam|getpwuid|getservbyname|'
- r'getservbyport|getservent|getsockname|getsockopt|glob|gmtime|'
- r'goto|grep|hex|import|index|int|ioctl|join|keys|kill|last|'
- r'lc|lcfirst|length|link|listen|local|localtime|log|lstat|'
- r'map|mkdir|msgctl|msgget|msgrcv|msgsnd|my|next|no|oct|open|'
- r'opendir|ord|our|pack|package|pipe|pop|pos|printf|'
- r'prototype|push|quotemeta|rand|read|readdir|'
- r'readline|readlink|readpipe|recv|redo|ref|rename|require|'
- r'reverse|rewinddir|rindex|rmdir|scalar|seek|seekdir|'
- r'select|semctl|semget|semop|send|setgrent|sethostent|setnetent|'
- r'setpgrp|setpriority|setprotoent|setpwent|setservent|'
- r'setsockopt|shift|shmctl|shmget|shmread|shmwrite|shutdown|'
- r'sin|sleep|socket|socketpair|sort|splice|split|sprintf|sqrt|'
- r'srand|stat|study|substr|symlink|syscall|sysopen|sysread|'
- r'sysseek|system|syswrite|tell|telldir|tie|tied|time|times|tr|'
- r'truncate|uc|ucfirst|umask|undef|unlink|unpack|unshift|untie|'
- r'utime|values|vec|wait|waitpid|wantarray|warn|write'
- r')\b', Name.Builtin),
- (r'((__(DATA|DIE|WARN)__)|(STD(IN|OUT|ERR)))\b', Name.Builtin.Pseudo),
- (r'<<([\'"]?)([a-zA-Z_]\w*)\1;?\n.*?\n\2\n', String),
- (r'__END__', Comment.Preproc, 'end-part'),
- (r'\$\^[ADEFHILMOPSTWX]', Name.Variable.Global),
- (r"\$[\\\"\[\]'&`+*.,;=%~?@$!<>(^|/-](?!\w)", Name.Variable.Global),
- (r'[$@%#]+', Name.Variable, 'varname'),
- (r'0_?[0-7]+(_[0-7]+)*', Number.Oct),
- (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex),
- (r'0b[01]+(_[01]+)*', Number.Bin),
- (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?',
- Number.Float),
- (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float),
- (r'\d+(_\d+)*', Number.Integer),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- (r'`(\\\\|\\[^\\]|[^`\\])*`', String.Backtick),
- (r'<([^\s>]+)>', String.Regex),
- (r'(q|qq|qw|qr|qx)\{', String.Other, 'cb-string'),
- (r'(q|qq|qw|qr|qx)\(', String.Other, 'rb-string'),
- (r'(q|qq|qw|qr|qx)\[', String.Other, 'sb-string'),
- (r'(q|qq|qw|qr|qx)\<', String.Other, 'lt-string'),
- (r'(q|qq|qw|qr|qx)([^a-zA-Z0-9])(.|\n)*?\2', String.Other),
- (r'package\s+', Keyword, 'modulename'),
- (r'sub\s+', Keyword, 'funcname'),
- (r'(\[\]|\*\*|::|<<|>>|>=|<=>|<=|={3}|!=|=~|'
- r'!~|&&?|\|\||\.{1,3})', Operator),
- (r'[-+/*%=<>&^|!\\~]=?', Operator),
- (r'[\(\)\[\]:;,<>/\?\{\}]', Punctuation), # yes, there's no shortage
- # of punctuation in Perl!
- (r'(?=\w)', Name, 'name'),
- ],
- 'format': [
- (r'\.\n', String.Interpol, '#pop'),
- (r'[^\n]*\n', String.Interpol),
- ],
- 'varname': [
- (r'\s+', Text),
- (r'\{', Punctuation, '#pop'), # hash syntax?
- (r'\)|,', Punctuation, '#pop'), # argument specifier
- (r'\w+::', Name.Namespace),
- (r'[\w:]+', Name.Variable, '#pop'),
- ],
- 'name': [
- (r'\w+::', Name.Namespace),
- (r'[\w:]+', Name, '#pop'),
- (r'[A-Z_]+(?=\W)', Name.Constant, '#pop'),
- (r'(?=\W)', Text, '#pop'),
- ],
- 'modulename': [
- (r'[a-zA-Z_]\w*', Name.Namespace, '#pop')
- ],
- 'funcname': [
- (r'[a-zA-Z_]\w*[\!\?]?', Name.Function),
- (r'\s+', Text),
- # argument declaration
- (r'(\([$@%]*\))(\s*)', bygroups(Punctuation, Text)),
- (r'.*?{', Punctuation, '#pop'),
- (r';', Punctuation, '#pop'),
- ],
- 'cb-string': [
- (r'\\[\{\}\\]', String.Other),
- (r'\\', String.Other),
- (r'\{', String.Other, 'cb-string'),
- (r'\}', String.Other, '#pop'),
- (r'[^\{\}\\]+', String.Other)
- ],
- 'rb-string': [
- (r'\\[\(\)\\]', String.Other),
- (r'\\', String.Other),
- (r'\(', String.Other, 'rb-string'),
- (r'\)', String.Other, '#pop'),
- (r'[^\(\)]+', String.Other)
- ],
- 'sb-string': [
- (r'\\[\[\]\\]', String.Other),
- (r'\\', String.Other),
- (r'\[', String.Other, 'sb-string'),
- (r'\]', String.Other, '#pop'),
- (r'[^\[\]]+', String.Other)
- ],
- 'lt-string': [
- (r'\\[\<\>\\]', String.Other),
- (r'\\', String.Other),
- (r'\<', String.Other, 'lt-string'),
- (r'\>', String.Other, '#pop'),
- (r'[^\<\>]+', String.Other)
- ],
- 'end-part': [
- (r'.+', Comment.Preproc, '#pop')
- ]
- }
-
- def analyse_text(text):
- if shebang_matches(text, r'perl'):
- return True
- if re.search('(?:my|our)\s+[$@%(]', text):
- return 0.9
-
-
-class LuaLexer(RegexLexer):
- """
- For `Lua <http://www.lua.org>`_ source code.
-
- Additional options accepted:
-
- `func_name_highlighting`
- If given and ``True``, highlight builtin function names
- (default: ``True``).
- `disabled_modules`
- If given, must be a list of module names whose function names
- should not be highlighted. By default all modules are highlighted.
-
- To get a list of allowed modules have a look into the
- `_luabuiltins` module:
-
- .. sourcecode:: pycon
-
- >>> from pygments.lexers._luabuiltins import MODULES
- >>> MODULES.keys()
- ['string', 'coroutine', 'modules', 'io', 'basic', ...]
- """
-
- name = 'Lua'
- aliases = ['lua']
- filenames = ['*.lua', '*.wlua']
- mimetypes = ['text/x-lua', 'application/x-lua']
-
- tokens = {
- 'root': [
- # lua allows a file to start with a shebang
- (r'#!(.*?)$', Comment.Preproc),
- default('base'),
- ],
- 'base': [
- (r'(?s)--\[(=*)\[.*?\]\1\]', Comment.Multiline),
- ('--.*$', Comment.Single),
-
- (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
- (r'(?i)\d+e[+-]?\d+', Number.Float),
- ('(?i)0x[0-9a-f]*', Number.Hex),
- (r'\d+', Number.Integer),
-
- (r'\n', Text),
- (r'[^\S\n]', Text),
- # multiline strings
- (r'(?s)\[(=*)\[.*?\]\1\]', String),
-
- (r'(==|~=|<=|>=|\.\.\.|\.\.|[=+\-*/%^<>#])', Operator),
- (r'[\[\]\{\}\(\)\.,:;]', Punctuation),
- (r'(and|or|not)\b', Operator.Word),
-
- ('(break|do|else|elseif|end|for|if|in|repeat|return|then|until|'
- r'while)\b', Keyword),
- (r'(local)\b', Keyword.Declaration),
- (r'(true|false|nil)\b', Keyword.Constant),
-
- (r'(function)\b', Keyword, 'funcname'),
-
- (r'[A-Za-z_]\w*(\.[A-Za-z_]\w*)?', Name),
-
- ("'", String.Single, combined('stringescape', 'sqs')),
- ('"', String.Double, combined('stringescape', 'dqs'))
- ],
-
- 'funcname': [
- (r'\s+', Text),
- ('(?:([A-Za-z_]\w*)(\.))?([A-Za-z_]\w*)',
- bygroups(Name.Class, Punctuation, Name.Function), '#pop'),
- # inline function
- ('\(', Punctuation, '#pop'),
- ],
-
- # if I understand correctly, every character is valid in a lua string,
- # so this state is only for later corrections
- 'string': [
- ('.', String)
- ],
-
- 'stringescape': [
- (r'''\\([abfnrtv\\"']|\d{1,3})''', String.Escape)
- ],
-
- 'sqs': [
- ("'", String, '#pop'),
- include('string')
- ],
-
- 'dqs': [
- ('"', String, '#pop'),
- include('string')
- ]
- }
-
- def __init__(self, **options):
- self.func_name_highlighting = get_bool_opt(
- options, 'func_name_highlighting', True)
- self.disabled_modules = get_list_opt(options, 'disabled_modules', [])
-
- self._functions = set()
- if self.func_name_highlighting:
- from pygments.lexers._luabuiltins import MODULES
- for mod, func in iteritems(MODULES):
- if mod not in self.disabled_modules:
- self._functions.update(func)
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- if value in self._functions:
- yield index, Name.Builtin, value
- continue
- elif '.' in value:
- a, b = value.split('.')
- yield index, Name, a
- yield index + len(a), Punctuation, u'.'
- yield index + len(a) + 1, Name, b
- continue
- yield index, token, value
-
-
-class MoonScriptLexer(LuaLexer):
- """
- For `MoonScript <http://moonscript.org.org>`_ source code.
-
- .. versionadded:: 1.5
- """
-
- name = "MoonScript"
- aliases = ["moon", "moonscript"]
- filenames = ["*.moon"]
- mimetypes = ['text/x-moonscript', 'application/x-moonscript']
-
- tokens = {
- 'root': [
- (r'#!(.*?)$', Comment.Preproc),
- default('base'),
- ],
- 'base': [
- ('--.*$', Comment.Single),
- (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
- (r'(?i)\d+e[+-]?\d+', Number.Float),
- (r'(?i)0x[0-9a-f]*', Number.Hex),
- (r'\d+', Number.Integer),
- (r'\n', Text),
- (r'[^\S\n]+', Text),
- (r'(?s)\[(=*)\[.*?\]\1\]', String),
- (r'(->|=>)', Name.Function),
- (r':[a-zA-Z_]\w*', Name.Variable),
- (r'(==|!=|~=|<=|>=|\.\.\.|\.\.|[=+\-*/%^<>#!.\\:])', Operator),
- (r'[;,]', Punctuation),
- (r'[\[\]\{\}\(\)]', Keyword.Type),
- (r'[a-zA-Z_]\w*:', Name.Variable),
- (r"(class|extends|if|then|super|do|with|import|export|"
- r"while|elseif|return|for|in|from|when|using|else|"
- r"and|or|not|switch|break)\b", Keyword),
- (r'(true|false|nil)\b', Keyword.Constant),
- (r'(and|or|not)\b', Operator.Word),
- (r'(self)\b', Name.Builtin.Pseudo),
- (r'@@?([a-zA-Z_]\w*)?', Name.Variable.Class),
- (r'[A-Z]\w*', Name.Class), # proper name
- (r'[A-Za-z_]\w*(\.[A-Za-z_]\w*)?', Name),
- ("'", String.Single, combined('stringescape', 'sqs')),
- ('"', String.Double, combined('stringescape', 'dqs'))
- ],
- 'stringescape': [
- (r'''\\([abfnrtv\\"']|\d{1,3})''', String.Escape)
- ],
- 'sqs': [
- ("'", String.Single, '#pop'),
- (".", String)
- ],
- 'dqs': [
- ('"', String.Double, '#pop'),
- (".", String)
- ]
- }
-
- def get_tokens_unprocessed(self, text):
- # set . as Operator instead of Punctuation
- for index, token, value in \
- LuaLexer.get_tokens_unprocessed(self, text):
- if token == Punctuation and value == ".":
- token = Operator
- yield index, token, value
-
-
-class CrocLexer(RegexLexer):
- """
- For `Croc <http://jfbillingsley.com/croc>`_ source.
- """
- name = 'Croc'
- filenames = ['*.croc']
- aliases = ['croc']
- mimetypes = ['text/x-crocsrc']
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'\s+', Text),
- # Comments
- (r'//(.*?)\n', Comment.Single),
- (r'/\*', Comment.Multiline, 'nestedcomment'),
- # Keywords
- (r'(as|assert|break|case|catch|class|continue|default'
- r'|do|else|finally|for|foreach|function|global|namespace'
- r'|if|import|in|is|local|module|return|scope|super|switch'
- r'|this|throw|try|vararg|while|with|yield)\b', Keyword),
- (r'(false|true|null)\b', Keyword.Constant),
- # FloatLiteral
- (r'([0-9][0-9_]*)(?=[.eE])(\.[0-9][0-9_]*)?([eE][+\-]?[0-9_]+)?',
- Number.Float),
- # IntegerLiteral
- # -- Binary
- (r'0[bB][01][01_]*', Number.Bin),
- # -- Hexadecimal
- (r'0[xX][0-9a-fA-F][0-9a-fA-F_]*', Number.Hex),
- # -- Decimal
- (r'([0-9][0-9_]*)(?![.eE])', Number.Integer),
- # CharacterLiteral
- (r"""'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-9]{1,3}"""
- r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|.)'""",
- String.Char
- ),
- # StringLiteral
- # -- WysiwygString
- (r'@"(""|[^"])*"', String),
- (r'@`(``|[^`])*`', String),
- (r"@'(''|[^'])*'", String),
- # -- DoubleQuotedString
- (r'"(\\\\|\\"|[^"])*"', String),
- # Tokens
- (
- r'(~=|\^=|%=|\*=|==|!=|>>>=|>>>|>>=|>>|>=|<=>|\?=|-\>'
- r'|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.|/=)'
- r'|[-/.&$@|\+<>!()\[\]{}?,;:=*%^~#\\]', Punctuation
- ),
- # Identifier
- (r'[a-zA-Z_]\w*', Name),
- ],
- 'nestedcomment': [
- (r'[^*/]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ],
- }
-
-
-class MiniDLexer(CrocLexer):
- """
- For MiniD source. MiniD is now known as Croc.
- """
- name = 'MiniD'
- filenames = ['*.md']
- aliases = ['minid']
- mimetypes = ['text/x-minidsrc']
-
-
-class IoLexer(RegexLexer):
- """
- For `Io <http://iolanguage.com/>`_ (a small, prototype-based
- programming language) source.
-
- .. versionadded:: 0.10
- """
- name = 'Io'
- filenames = ['*.io']
- aliases = ['io']
- mimetypes = ['text/x-iosrc']
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'\s+', Text),
- # Comments
- (r'//(.*?)\n', Comment.Single),
- (r'#(.*?)\n', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'/\+', Comment.Multiline, 'nestedcomment'),
- # DoubleQuotedString
- (r'"(\\\\|\\"|[^"])*"', String),
- # Operators
- (r'::=|:=|=|\(|\)|;|,|\*|-|\+|>|<|@|!|/|\||\^|\.|%|&|\[|\]|\{|\}',
- Operator),
- # keywords
- (r'(clone|do|doFile|doString|method|for|if|else|elseif|then)\b',
- Keyword),
- # constants
- (r'(nil|false|true)\b', Name.Constant),
- # names
- (r'(Object|list|List|Map|args|Sequence|Coroutine|File)\b',
- Name.Builtin),
- ('[a-zA-Z_]\w*', Name),
- # numbers
- (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+', Number.Integer)
- ],
- 'nestedcomment': [
- (r'[^+/]+', Comment.Multiline),
- (r'/\+', Comment.Multiline, '#push'),
- (r'\+/', Comment.Multiline, '#pop'),
- (r'[+/]', Comment.Multiline),
- ]
- }
-
-
-class TclLexer(RegexLexer):
- """
- For Tcl source code.
-
- .. versionadded:: 0.10
- """
-
- keyword_cmds_re = (
- r'\b(after|apply|array|break|catch|continue|elseif|else|error|'
- r'eval|expr|for|foreach|global|if|namespace|proc|rename|return|'
- r'set|switch|then|trace|unset|update|uplevel|upvar|variable|'
- r'vwait|while)\b'
- )
-
- builtin_cmds_re = (
- r'\b(append|bgerror|binary|cd|chan|clock|close|concat|dde|dict|'
- r'encoding|eof|exec|exit|fblocked|fconfigure|fcopy|file|'
- r'fileevent|flush|format|gets|glob|history|http|incr|info|interp|'
- r'join|lappend|lassign|lindex|linsert|list|llength|load|loadTk|'
- r'lrange|lrepeat|lreplace|lreverse|lsearch|lset|lsort|mathfunc|'
- r'mathop|memory|msgcat|open|package|pid|pkg::create|pkg_mkIndex|'
- r'platform|platform::shell|puts|pwd|re_syntax|read|refchan|'
- r'regexp|registry|regsub|scan|seek|socket|source|split|string|'
- r'subst|tell|time|tm|unknown|unload)\b'
- )
-
- name = 'Tcl'
- aliases = ['tcl']
- filenames = ['*.tcl']
- mimetypes = ['text/x-tcl', 'text/x-script.tcl', 'application/x-tcl']
-
- def _gen_command_rules(keyword_cmds_re, builtin_cmds_re, context=""):
- return [
- (keyword_cmds_re, Keyword, 'params' + context),
- (builtin_cmds_re, Name.Builtin, 'params' + context),
- (r'([\w\.\-]+)', Name.Variable, 'params' + context),
- (r'#', Comment, 'comment'),
- ]
-
- tokens = {
- 'root': [
- include('command'),
- include('basic'),
- include('data'),
- (r'}', Keyword), # HACK: somehow we miscounted our braces
- ],
- 'command': _gen_command_rules(keyword_cmds_re, builtin_cmds_re),
- 'command-in-brace': _gen_command_rules(keyword_cmds_re,
- builtin_cmds_re,
- "-in-brace"),
- 'command-in-bracket': _gen_command_rules(keyword_cmds_re,
- builtin_cmds_re,
- "-in-bracket"),
- 'command-in-paren': _gen_command_rules(keyword_cmds_re,
- builtin_cmds_re,
- "-in-paren"),
- 'basic': [
- (r'\(', Keyword, 'paren'),
- (r'\[', Keyword, 'bracket'),
- (r'\{', Keyword, 'brace'),
- (r'"', String.Double, 'string'),
- (r'(eq|ne|in|ni)\b', Operator.Word),
- (r'!=|==|<<|>>|<=|>=|&&|\|\||\*\*|[-+~!*/%<>&^|?:]', Operator),
- ],
- 'data': [
- (r'\s+', Text),
- (r'0x[a-fA-F0-9]+', Number.Hex),
- (r'0[0-7]+', Number.Oct),
- (r'\d+\.\d+', Number.Float),
- (r'\d+', Number.Integer),
- (r'\$([\w\.\-\:]+)', Name.Variable),
- (r'([\w\.\-\:]+)', Text),
- ],
- 'params': [
- (r';', Keyword, '#pop'),
- (r'\n', Text, '#pop'),
- (r'(else|elseif|then)\b', Keyword),
- include('basic'),
- include('data'),
- ],
- 'params-in-brace': [
- (r'}', Keyword, ('#pop', '#pop')),
- include('params')
- ],
- 'params-in-paren': [
- (r'\)', Keyword, ('#pop', '#pop')),
- include('params')
- ],
- 'params-in-bracket': [
- (r'\]', Keyword, ('#pop', '#pop')),
- include('params')
- ],
- 'string': [
- (r'\[', String.Double, 'string-square'),
- (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\])', String.Double),
- (r'"', String.Double, '#pop')
- ],
- 'string-square': [
- (r'\[', String.Double, 'string-square'),
- (r'(?s)(\\\\|\\[0-7]+|\\.|\\\n|[^\]\\])', String.Double),
- (r'\]', String.Double, '#pop')
- ],
- 'brace': [
- (r'}', Keyword, '#pop'),
- include('command-in-brace'),
- include('basic'),
- include('data'),
- ],
- 'paren': [
- (r'\)', Keyword, '#pop'),
- include('command-in-paren'),
- include('basic'),
- include('data'),
- ],
- 'bracket': [
- (r'\]', Keyword, '#pop'),
- include('command-in-bracket'),
- include('basic'),
- include('data'),
- ],
- 'comment': [
- (r'.*[^\\]\n', Comment, '#pop'),
- (r'.*\\\n', Comment),
- ],
- }
-
- def analyse_text(text):
- return shebang_matches(text, r'(tcl)')
-
-
-class FactorLexer(RegexLexer):
- """
- Lexer for the `Factor <http://factorcode.org>`_ language.
-
- .. versionadded:: 1.4
- """
- name = 'Factor'
- aliases = ['factor']
- filenames = ['*.factor']
- mimetypes = ['text/x-factor']
-
- flags = re.MULTILINE | re.UNICODE
-
- builtin_kernel = (
- r'(?:-rot|2bi|2bi@|2bi\*|2curry|2dip|2drop|2dup|2keep|2nip|'
- r'2over|2tri|2tri@|2tri\*|3bi|3curry|3dip|3drop|3dup|3keep|'
- r'3tri|4dip|4drop|4dup|4keep|<wrapper>|=|>boolean|\(clone\)|'
- r'\?|\?execute|\?if|and|assert|assert=|assert\?|bi|bi-curry|'
- r'bi-curry@|bi-curry\*|bi@|bi\*|boa|boolean|boolean\?|both\?|'
- r'build|call|callstack|callstack>array|callstack\?|clear|clone|'
- r'compose|compose\?|curry|curry\?|datastack|die|dip|do|drop|'
- r'dup|dupd|either\?|eq\?|equal\?|execute|hashcode|hashcode\*|'
- r'identity-hashcode|identity-tuple|identity-tuple\?|if|if\*|'
- r'keep|loop|most|new|nip|not|null|object|or|over|pick|prepose|'
- r'retainstack|rot|same\?|swap|swapd|throw|tri|tri-curry|'
- r'tri-curry@|tri-curry\*|tri@|tri\*|tuple|tuple\?|unless|'
- r'unless\*|until|when|when\*|while|with|wrapper|wrapper\?|xor)\s'
- )
-
- builtin_assocs = (
- r'(?:2cache|<enum>|>alist|\?at|\?of|assoc|assoc-all\?|'
- r'assoc-any\?|assoc-clone-like|assoc-combine|assoc-diff|'
- r'assoc-diff!|assoc-differ|assoc-each|assoc-empty\?|'
- r'assoc-filter|assoc-filter!|assoc-filter-as|assoc-find|'
- r'assoc-hashcode|assoc-intersect|assoc-like|assoc-map|'
- r'assoc-map-as|assoc-partition|assoc-refine|assoc-size|'
- r'assoc-stack|assoc-subset\?|assoc-union|assoc-union!|'
- r'assoc=|assoc>map|assoc\?|at|at+|at\*|cache|change-at|'
- r'clear-assoc|delete-at|delete-at\*|enum|enum\?|extract-keys|'
- r'inc-at|key\?|keys|map>assoc|maybe-set-at|new-assoc|of|'
- r'push-at|rename-at|set-at|sift-keys|sift-values|substitute|'
- r'unzip|value-at|value-at\*|value\?|values|zip)\s'
- )
-
- builtin_combinators = (
- r'(?:2cleave|2cleave>quot|3cleave|3cleave>quot|4cleave|'
- r'4cleave>quot|alist>quot|call-effect|case|case-find|'
- r'case>quot|cleave|cleave>quot|cond|cond>quot|deep-spread>quot|'
- r'execute-effect|linear-case-quot|no-case|no-case\?|no-cond|'
- r'no-cond\?|recursive-hashcode|shallow-spread>quot|spread|'
- r'to-fixed-point|wrong-values|wrong-values\?)\s'
- )
-
- builtin_math = (
- r'(?:-|/|/f|/i|/mod|2/|2\^|<|<=|<fp-nan>|>|>=|>bignum|'
- r'>fixnum|>float|>integer|\(all-integers\?\)|'
- r'\(each-integer\)|\(find-integer\)|\*|\+|\?1\+|'
- r'abs|align|all-integers\?|bignum|bignum\?|bit\?|bitand|'
- r'bitnot|bitor|bits>double|bits>float|bitxor|complex|'
- r'complex\?|denominator|double>bits|each-integer|even\?|'
- r'find-integer|find-last-integer|fixnum|fixnum\?|float|'
- r'float>bits|float\?|fp-bitwise=|fp-infinity\?|fp-nan-payload|'
- r'fp-nan\?|fp-qnan\?|fp-sign|fp-snan\?|fp-special\?|'
- r'if-zero|imaginary-part|integer|integer>fixnum|'
- r'integer>fixnum-strict|integer\?|log2|log2-expects-positive|'
- r'log2-expects-positive\?|mod|neg|neg\?|next-float|'
- r'next-power-of-2|number|number=|number\?|numerator|odd\?|'
- r'out-of-fixnum-range|out-of-fixnum-range\?|power-of-2\?|'
- r'prev-float|ratio|ratio\?|rational|rational\?|real|'
- r'real-part|real\?|recip|rem|sgn|shift|sq|times|u<|u<=|u>|'
- r'u>=|unless-zero|unordered\?|when-zero|zero\?)\s'
- )
-
- builtin_sequences = (
- r'(?:1sequence|2all\?|2each|2map|2map-as|2map-reduce|2reduce|'
- r'2selector|2sequence|3append|3append-as|3each|3map|3map-as|'
- r'3sequence|4sequence|<repetition>|<reversed>|<slice>|\?first|'
- r'\?last|\?nth|\?second|\?set-nth|accumulate|accumulate!|'
- r'accumulate-as|all\?|any\?|append|append!|append-as|'
- r'assert-sequence|assert-sequence=|assert-sequence\?|'
- r'binary-reduce|bounds-check|bounds-check\?|bounds-error|'
- r'bounds-error\?|but-last|but-last-slice|cartesian-each|'
- r'cartesian-map|cartesian-product|change-nth|check-slice|'
- r'check-slice-error|clone-like|collapse-slice|collector|'
- r'collector-for|concat|concat-as|copy|count|cut|cut-slice|'
- r'cut\*|delete-all|delete-slice|drop-prefix|each|each-from|'
- r'each-index|empty\?|exchange|filter|filter!|filter-as|find|'
- r'find-from|find-index|find-index-from|find-last|find-last-from|'
- r'first|first2|first3|first4|flip|follow|fourth|glue|halves|'
- r'harvest|head|head-slice|head-slice\*|head\*|head\?|'
- r'if-empty|immutable|immutable-sequence|immutable-sequence\?|'
- r'immutable\?|index|index-from|indices|infimum|infimum-by|'
- r'insert-nth|interleave|iota|iota-tuple|iota-tuple\?|join|'
- r'join-as|last|last-index|last-index-from|length|lengthen|'
- r'like|longer|longer\?|longest|map|map!|map-as|map-find|'
- r'map-find-last|map-index|map-integers|map-reduce|map-sum|'
- r'max-length|member-eq\?|member\?|midpoint@|min-length|'
- r'mismatch|move|new-like|new-resizable|new-sequence|'
- r'non-negative-integer-expected|non-negative-integer-expected\?|'
- r'nth|nths|pad-head|pad-tail|padding|partition|pop|pop\*|'
- r'prefix|prepend|prepend-as|produce|produce-as|product|push|'
- r'push-all|push-either|push-if|reduce|reduce-index|remove|'
- r'remove!|remove-eq|remove-eq!|remove-nth|remove-nth!|repetition|'
- r'repetition\?|replace-slice|replicate|replicate-as|rest|'
- r'rest-slice|reverse|reverse!|reversed|reversed\?|second|'
- r'selector|selector-for|sequence|sequence-hashcode|sequence=|'
- r'sequence\?|set-first|set-fourth|set-last|set-length|set-nth|'
- r'set-second|set-third|short|shorten|shorter|shorter\?|'
- r'shortest|sift|slice|slice-error|slice-error\?|slice\?|'
- r'snip|snip-slice|start|start\*|subseq|subseq\?|suffix|'
- r'suffix!|sum|sum-lengths|supremum|supremum-by|surround|tail|'
- r'tail-slice|tail-slice\*|tail\*|tail\?|third|trim|'
- r'trim-head|trim-head-slice|trim-slice|trim-tail|trim-tail-slice|'
- r'unclip|unclip-last|unclip-last-slice|unclip-slice|unless-empty|'
- r'virtual-exemplar|virtual-sequence|virtual-sequence\?|virtual@|'
- r'when-empty)\s'
- )
-
- builtin_namespaces = (
- r'(?:\+@|change|change-global|counter|dec|get|get-global|'
- r'global|inc|init-namespaces|initialize|is-global|make-assoc|'
- r'namespace|namestack|off|on|set|set-global|set-namestack|'
- r'toggle|with-global|with-scope|with-variable|with-variables)\s'
- )
-
- builtin_arrays = (
- r'(?:1array|2array|3array|4array|<array>|>array|array|array\?|'
- r'pair|pair\?|resize-array)\s'
- )
-
- builtin_io = (
- r'(?:\(each-stream-block-slice\)|\(each-stream-block\)|'
- r'\(stream-contents-by-block\)|\(stream-contents-by-element\)|'
- r'\(stream-contents-by-length-or-block\)|'
- r'\(stream-contents-by-length\)|\+byte\+|\+character\+|'
- r'bad-seek-type|bad-seek-type\?|bl|contents|each-block|'
- r'each-block-size|each-block-slice|each-line|each-morsel|'
- r'each-stream-block|each-stream-block-slice|each-stream-line|'
- r'error-stream|flush|input-stream|input-stream\?|'
- r'invalid-read-buffer|invalid-read-buffer\?|lines|nl|'
- r'output-stream|output-stream\?|print|read|read-into|'
- r'read-partial|read-partial-into|read-until|read1|readln|'
- r'seek-absolute|seek-absolute\?|seek-end|seek-end\?|'
- r'seek-input|seek-output|seek-relative|seek-relative\?|'
- r'stream-bl|stream-contents|stream-contents\*|stream-copy|'
- r'stream-copy\*|stream-element-type|stream-flush|'
- r'stream-length|stream-lines|stream-nl|stream-print|'
- r'stream-read|stream-read-into|stream-read-partial|'
- r'stream-read-partial-into|stream-read-partial-unsafe|'
- r'stream-read-unsafe|stream-read-until|stream-read1|'
- r'stream-readln|stream-seek|stream-seekable\?|stream-tell|'
- r'stream-write|stream-write1|tell-input|tell-output|'
- r'with-error-stream|with-error-stream\*|with-error>output|'
- r'with-input-output\+error-streams|'
- r'with-input-output\+error-streams\*|with-input-stream|'
- r'with-input-stream\*|with-output-stream|with-output-stream\*|'
- r'with-output>error|with-output\+error-stream|'
- r'with-output\+error-stream\*|with-streams|with-streams\*|'
- r'write|write1)\s'
- )
-
- builtin_strings = (
- r'(?:1string|<string>|>string|resize-string|string|string\?)\s'
- )
-
- builtin_vectors = (
- r'(?:1vector|<vector>|>vector|\?push|vector|vector\?)\s'
- )
-
- builtin_continuations = (
- r'(?:<condition>|<continuation>|<restart>|attempt-all|'
- r'attempt-all-error|attempt-all-error\?|callback-error-hook|'
- r'callcc0|callcc1|cleanup|compute-restarts|condition|'
- r'condition\?|continuation|continuation\?|continue|'
- r'continue-restart|continue-with|current-continuation|'
- r'error|error-continuation|error-in-thread|error-thread|'
- r'ifcc|ignore-errors|in-callback\?|original-error|recover|'
- r'restart|restart\?|restarts|rethrow|rethrow-restarts|'
- r'return|return-continuation|thread-error-hook|throw-continue|'
- r'throw-restarts|with-datastack|with-return)\s'
- )
-
- tokens = {
- 'root': [
- # factor allows a file to start with a shebang
- (r'#!.*$', Comment.Preproc),
- default('base'),
- ],
- 'base': [
- (r'\s+', Text),
-
- # defining words
- (r'((?:MACRO|MEMO|TYPED)?:[:]?)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Function)),
- (r'(M:[:]?)(\s+)(\S+)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Class, Text, Name.Function)),
- (r'(C:)(\s+)(\S+)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Function, Text, Name.Class)),
- (r'(GENERIC:)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Function)),
- (r'(HOOK:|GENERIC#)(\s+)(\S+)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Function, Text, Name.Function)),
- (r'\(\s', Name.Function, 'stackeffect'),
- (r';\s', Keyword),
-
- # imports and namespaces
- (r'(USING:)(\s+)',
- bygroups(Keyword.Namespace, Text), 'vocabs'),
- (r'(USE:|UNUSE:|IN:|QUALIFIED:)(\s+)(\S+)',
- bygroups(Keyword.Namespace, Text, Name.Namespace)),
- (r'(QUALIFIED-WITH:)(\s+)(\S+)(\s+)(\S+)',
- bygroups(Keyword.Namespace, Text, Name.Namespace, Text, Name.Namespace)),
- (r'(FROM:|EXCLUDE:)(\s+)(\S+)(\s+=>\s)',
- bygroups(Keyword.Namespace, Text, Name.Namespace, Text), 'words'),
- (r'(RENAME:)(\s+)(\S+)(\s+)(\S+)(\s+=>\s+)(\S+)',
- bygroups(Keyword.Namespace, Text, Name.Function, Text, Name.Namespace, Text, Name.Function)),
- (r'(ALIAS:|TYPEDEF:)(\s+)(\S+)(\s+)(\S+)',
- bygroups(Keyword.Namespace, Text, Name.Function, Text, Name.Function)),
- (r'(DEFER:|FORGET:|POSTPONE:)(\s+)(\S+)',
- bygroups(Keyword.Namespace, Text, Name.Function)),
-
- # tuples and classes
- (r'(TUPLE:|ERROR:)(\s+)(\S+)(\s+<\s+)(\S+)',
- bygroups(Keyword, Text, Name.Class, Text, Name.Class), 'slots'),
- (r'(TUPLE:|ERROR:|BUILTIN:)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Class), 'slots'),
- (r'(MIXIN:|UNION:|INTERSECTION:)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Class)),
- (r'(PREDICATE:)(\s+)(\S+)(\s+<\s+)(\S+)',
- bygroups(Keyword, Text, Name.Class, Text, Name.Class)),
- (r'(C:)(\s+)(\S+)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Function, Text, Name.Class)),
- (r'(INSTANCE:)(\s+)(\S+)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Class, Text, Name.Class)),
- (r'(SLOT:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Function)),
- (r'(SINGLETON:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class)),
- (r'SINGLETONS:', Keyword, 'classes'),
-
- # other syntax
- (r'(CONSTANT:|SYMBOL:|MAIN:|HELP:)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Function)),
- (r'SYMBOLS:\s', Keyword, 'words'),
- (r'SYNTAX:\s', Keyword),
- (r'ALIEN:\s', Keyword),
- (r'(STRUCT:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class)),
- (r'(FUNCTION:)(\s+\S+\s+)(\S+)(\s+\(\s+[^\)]+\)\s)',
- bygroups(Keyword.Namespace, Text, Name.Function, Text)),
- (r'(FUNCTION-ALIAS:)(\s+)(\S+)(\s+\S+\s+)(\S+)(\s+\(\s+[^\)]+\)\s)',
- bygroups(Keyword.Namespace, Text, Name.Function, Text, Name.Function, Text)),
-
- # vocab.private
- (r'(?:<PRIVATE|PRIVATE>)\s', Keyword.Namespace),
-
- # strings
- (r'"""\s+(?:.|\n)*?\s+"""', String),
- (r'"(?:\\\\|\\"|[^"])*"', String),
- (r'\S+"\s+(?:\\\\|\\"|[^"])*"', String),
- (r'CHAR:\s+(?:\\[\\abfnrstv]|[^\\]\S*)\s', String.Char),
-
- # comments
- (r'!\s+.*$', Comment),
- (r'#!\s+.*$', Comment),
- (r'/\*\s+(?:.|\n)*?\s\*/\s', Comment),
-
- # boolean constants
- (r'[tf]\s', Name.Constant),
-
- # symbols and literals
- (r'[\\$]\s+\S+', Name.Constant),
- (r'M\\\s+\S+\s+\S+', Name.Constant),
-
- # numbers
- (r'[+-]?(?:[\d,]*\d)?\.(?:\d([\d,]*\d)?)?(?:[eE][+-]?\d+)?\s', Number),
- (r'[+-]?\d(?:[\d,]*\d)?(?:[eE][+-]?\d+)?\s', Number),
- (r'0x[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s', Number),
- (r'NAN:\s+[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s', Number),
- (r'0b[01]+\s', Number.Bin),
- (r'0o[0-7]+\s', Number.Oct),
- (r'(?:\d([\d,]*\d)?)?\+\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s', Number),
- (r'(?:\-\d([\d,]*\d)?)?\-\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s', Number),
-
- # keywords
- (r'(?:deprecated|final|foldable|flushable|inline|recursive)\s',
- Keyword),
-
- # builtins
- (builtin_kernel, Name.Builtin),
- (builtin_assocs, Name.Builtin),
- (builtin_combinators, Name.Builtin),
- (builtin_math, Name.Builtin),
- (builtin_sequences, Name.Builtin),
- (builtin_namespaces, Name.Builtin),
- (builtin_arrays, Name.Builtin),
- (builtin_io, Name.Builtin),
- (builtin_strings, Name.Builtin),
- (builtin_vectors, Name.Builtin),
- (builtin_continuations, Name.Builtin),
-
- # everything else is text
- (r'\S+', Text),
- ],
- 'stackeffect': [
- (r'\s+', Text),
- (r'\(\s+', Name.Function, 'stackeffect'),
- (r'\)\s', Name.Function, '#pop'),
- (r'--\s', Name.Function),
- (r'\S+', Name.Variable),
- ],
- 'slots': [
- (r'\s+', Text),
- (r';\s', Keyword, '#pop'),
- (r'({\s+)(\S+)(\s+[^}]+\s+}\s)',
- bygroups(Text, Name.Variable, Text)),
- (r'\S+', Name.Variable),
- ],
- 'vocabs': [
- (r'\s+', Text),
- (r';\s', Keyword, '#pop'),
- (r'\S+', Name.Namespace),
- ],
- 'classes': [
- (r'\s+', Text),
- (r';\s', Keyword, '#pop'),
- (r'\S+', Name.Class),
- ],
- 'words': [
- (r'\s+', Text),
- (r';\s', Keyword, '#pop'),
- (r'\S+', Name.Function),
- ],
- }
-
-
-class FancyLexer(RegexLexer):
- """
- Pygments Lexer For `Fancy <http://www.fancy-lang.org/>`_.
-
- Fancy is a self-hosted, pure object-oriented, dynamic,
- class-based, concurrent general-purpose programming language
- running on Rubinius, the Ruby VM.
-
- .. versionadded:: 1.5
- """
- name = 'Fancy'
- filenames = ['*.fy', '*.fancypack']
- aliases = ['fancy', 'fy']
- mimetypes = ['text/x-fancysrc']
-
- tokens = {
- # copied from PerlLexer:
- 'balanced-regex': [
- (r'/(\\\\|\\/|[^/])*/[egimosx]*', String.Regex, '#pop'),
- (r'!(\\\\|\\!|[^!])*![egimosx]*', String.Regex, '#pop'),
- (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
- (r'{(\\\\|\\}|[^}])*}[egimosx]*', String.Regex, '#pop'),
- (r'<(\\\\|\\>|[^>])*>[egimosx]*', String.Regex, '#pop'),
- (r'\[(\\\\|\\\]|[^\]])*\][egimosx]*', String.Regex, '#pop'),
- (r'\((\\\\|\\\)|[^\)])*\)[egimosx]*', String.Regex, '#pop'),
- (r'@(\\\\|\\\@|[^\@])*@[egimosx]*', String.Regex, '#pop'),
- (r'%(\\\\|\\\%|[^\%])*%[egimosx]*', String.Regex, '#pop'),
- (r'\$(\\\\|\\\$|[^\$])*\$[egimosx]*', String.Regex, '#pop'),
- ],
- 'root': [
- (r'\s+', Text),
-
- # balanced delimiters (copied from PerlLexer):
- (r's{(\\\\|\\}|[^}])*}\s*', String.Regex, 'balanced-regex'),
- (r's<(\\\\|\\>|[^>])*>\s*', String.Regex, 'balanced-regex'),
- (r's\[(\\\\|\\\]|[^\]])*\]\s*', String.Regex, 'balanced-regex'),
- (r's\((\\\\|\\\)|[^\)])*\)\s*', String.Regex, 'balanced-regex'),
- (r'm?/(\\\\|\\/|[^/\n])*/[gcimosx]*', String.Regex),
- (r'm(?=[/!\\{<\[\(@%\$])', String.Regex, 'balanced-regex'),
-
- # Comments
- (r'#(.*?)\n', Comment.Single),
- # Symbols
- (r'\'([^\'\s\[\]\(\)\{\}]+|\[\])', String.Symbol),
- # Multi-line DoubleQuotedString
- (r'"""(\\\\|\\"|[^"])*"""', String),
- # DoubleQuotedString
- (r'"(\\\\|\\"|[^"])*"', String),
- # keywords
- (r'(def|class|try|catch|finally|retry|return|return_local|match|'
- r'case|->|=>)\b', Keyword),
- # constants
- (r'(self|super|nil|false|true)\b', Name.Constant),
- (r'[(){};,/?\|:\\]', Punctuation),
- # names
- (r'(Object|Array|Hash|Directory|File|Class|String|Number|'
- r'Enumerable|FancyEnumerable|Block|TrueClass|NilClass|'
- r'FalseClass|Tuple|Symbol|Stack|Set|FancySpec|Method|Package|'
- r'Range)\b', Name.Builtin),
- # functions
- (r'[a-zA-Z](\w|[-+?!=*/^><%])*:', Name.Function),
- # operators, must be below functions
- (r'[-+*/~,<>=&!?%^\[\]\.$]+', Operator),
- ('[A-Z]\w*', Name.Constant),
- ('@[a-zA-Z_]\w*', Name.Variable.Instance),
- ('@@[a-zA-Z_]\w*', Name.Variable.Class),
- ('@@?', Operator),
- ('[a-zA-Z_]\w*', Name),
- # numbers - / checks are necessary to avoid mismarking regexes,
- # see comment in RubyLexer
- (r'(0[oO]?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
- bygroups(Number.Oct, Text, Operator)),
- (r'(0[xX][0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
- bygroups(Number.Hex, Text, Operator)),
- (r'(0[bB][01]+(?:_[01]+)*)(\s*)([/?])?',
- bygroups(Number.Bin, Text, Operator)),
- (r'([\d]+(?:_\d+)*)(\s*)([/?])?',
- bygroups(Number.Integer, Text, Operator)),
- (r'\d+([eE][+-]?[0-9]+)|\d+\.\d+([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+', Number.Integer)
- ]
- }
-
-
-class DgLexer(RegexLexer):
- """
- Lexer for `dg <http://pyos.github.com/dg>`_,
- a functional and object-oriented programming language
- running on the CPython 3 VM.
-
- .. versionadded:: 1.6
- """
- name = 'dg'
- aliases = ['dg']
- filenames = ['*.dg']
- mimetypes = ['text/x-dg']
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'#.*?$', Comment.Single),
-
- (r'(?i)0b[01]+', Number.Bin),
- (r'(?i)0o[0-7]+', Number.Oct),
- (r'(?i)0x[0-9a-f]+', Number.Hex),
- (r'(?i)[+-]?[0-9]+\.[0-9]+(e[+-]?[0-9]+)?j?', Number.Float),
- (r'(?i)[+-]?[0-9]+e[+-]?\d+j?', Number.Float),
- (r'(?i)[+-]?[0-9]+j?', Number.Integer),
-
- (r"(?i)(br|r?b?)'''", String, combined('stringescape', 'tsqs', 'string')),
- (r'(?i)(br|r?b?)"""', String, combined('stringescape', 'tdqs', 'string')),
- (r"(?i)(br|r?b?)'", String, combined('stringescape', 'sqs', 'string')),
- (r'(?i)(br|r?b?)"', String, combined('stringescape', 'dqs', 'string')),
-
- (r"`\w+'*`", Operator),
- (r'\b(and|in|is|or|where)\b', Operator.Word),
- (r'[!$%&*+\-./:<-@\\^|~;,]+', Operator),
-
- (r"(?<!\.)(bool|bytearray|bytes|classmethod|complex|dict'?|"
- r"float|frozenset|int|list'?|memoryview|object|property|range|"
- r"set'?|slice|staticmethod|str|super|tuple'?|type)"
- r"(?!['\w])", Name.Builtin),
- (r'(?<!\.)(__import__|abs|all|any|bin|bind|chr|cmp|compile|complex|'
- r'delattr|dir|divmod|drop|dropwhile|enumerate|eval|exhaust|'
- r'filter|flip|foldl1?|format|fst|getattr|globals|hasattr|hash|'
- r'head|hex|id|init|input|isinstance|issubclass|iter|iterate|last|'
- r'len|locals|map|max|min|next|oct|open|ord|pow|print|repr|'
- r'reversed|round|setattr|scanl1?|snd|sorted|sum|tail|take|'
- r"takewhile|vars|zip)(?!['\w])", Name.Builtin),
- (r"(?<!\.)(self|Ellipsis|NotImplemented|None|True|False)(?!['\w])",
- Name.Builtin.Pseudo),
-
- (r"(?<!\.)[A-Z]\w*(Error|Exception|Warning)'*(?!['\w])",
- Name.Exception),
- (r"(?<!\.)(Exception|GeneratorExit|KeyboardInterrupt|StopIteration|"
- r"SystemExit)(?!['\w])", Name.Exception),
-
- (r"(?<![\.\w])(except|finally|for|if|import|not|otherwise|raise|"
- r"subclass|while|with|yield)(?!['\w])", Keyword.Reserved),
-
- (r"[A-Z_]+'*(?!['\w])", Name),
- (r"[A-Z]\w+'*(?!['\w])", Keyword.Type),
- (r"\w+'*", Name),
-
- (r'[()]', Punctuation),
- (r'.', Error),
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N{.*?}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'string': [
- (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
- '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol),
- (r'[^\\\'"%\n]+', String),
- # quotes, percents and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- # unhandled string formatting sign
- (r'%', String),
- (r'\n', String)
- ],
- 'dqs': [
- (r'"', String, '#pop')
- ],
- 'sqs': [
- (r"'", String, '#pop')
- ],
- 'tdqs': [
- (r'"""', String, '#pop')
- ],
- 'tsqs': [
- (r"'''", String, '#pop')
- ],
- }
-
-
-class Perl6Lexer(ExtendedRegexLexer):
- """
- For `Perl 6 <http://www.perl6.org>`_ source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Perl6'
- aliases = ['perl6', 'pl6']
- filenames = ['*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6',
- '*.6pm', '*.p6m', '*.pm6', '*.t']
- mimetypes = ['text/x-perl6', 'application/x-perl6']
- flags = re.MULTILINE | re.DOTALL | re.UNICODE
-
- PERL6_IDENTIFIER_RANGE = "['\w:-]"
-
- PERL6_KEYWORDS = (
- 'BEGIN', 'CATCH', 'CHECK', 'CONTROL', 'END', 'ENTER', 'FIRST', 'INIT',
- 'KEEP', 'LAST', 'LEAVE', 'NEXT', 'POST', 'PRE', 'START', 'TEMP',
- 'UNDO', 'as', 'assoc', 'async', 'augment', 'binary', 'break', 'but',
- 'cached', 'category', 'class', 'constant', 'contend', 'continue',
- 'copy', 'deep', 'default', 'defequiv', 'defer', 'die', 'do', 'else',
- 'elsif', 'enum', 'equiv', 'exit', 'export', 'fail', 'fatal', 'for',
- 'gather', 'given', 'goto', 'grammar', 'handles', 'has', 'if', 'inline',
- 'irs', 'is', 'last', 'leave', 'let', 'lift', 'loop', 'looser', 'macro',
- 'make', 'maybe', 'method', 'module', 'multi', 'my', 'next', 'of',
- 'ofs', 'only', 'oo', 'ors', 'our', 'package', 'parsed', 'prec',
- 'proto', 'readonly', 'redo', 'ref', 'regex', 'reparsed', 'repeat',
- 'require', 'required', 'return', 'returns', 'role', 'rule', 'rw',
- 'self', 'slang', 'state', 'sub', 'submethod', 'subset', 'supersede',
- 'take', 'temp', 'tighter', 'token', 'trusts', 'try', 'unary',
- 'unless', 'until', 'use', 'warn', 'when', 'where', 'while', 'will',
- )
-
- PERL6_BUILTINS = (
- 'ACCEPTS', 'HOW', 'REJECTS', 'VAR', 'WHAT', 'WHENCE', 'WHERE', 'WHICH',
- 'WHO', 'abs', 'acos', 'acosec', 'acosech', 'acosh', 'acotan', 'acotanh',
- 'all', 'any', 'approx', 'arity', 'asec', 'asech', 'asin', 'asinh',
- 'assuming', 'atan', 'atan2', 'atanh', 'attr', 'bless', 'body', 'by',
- 'bytes', 'caller', 'callsame', 'callwith', 'can', 'capitalize', 'cat',
- 'ceiling', 'chars', 'chmod', 'chomp', 'chop', 'chr', 'chroot',
- 'circumfix', 'cis', 'classify', 'clone', 'close', 'cmp_ok', 'codes',
- 'comb', 'connect', 'contains', 'context', 'cos', 'cosec', 'cosech',
- 'cosh', 'cotan', 'cotanh', 'count', 'defined', 'delete', 'diag',
- 'dies_ok', 'does', 'e', 'each', 'eager', 'elems', 'end', 'eof', 'eval',
- 'eval_dies_ok', 'eval_elsewhere', 'eval_lives_ok', 'evalfile', 'exists',
- 'exp', 'first', 'flip', 'floor', 'flunk', 'flush', 'fmt', 'force_todo',
- 'fork', 'from', 'getc', 'gethost', 'getlogin', 'getpeername', 'getpw',
- 'gmtime', 'graphs', 'grep', 'hints', 'hyper', 'im', 'index', 'infix',
- 'invert', 'is_approx', 'is_deeply', 'isa', 'isa_ok', 'isnt', 'iterator',
- 'join', 'key', 'keys', 'kill', 'kv', 'lastcall', 'lazy', 'lc', 'lcfirst',
- 'like', 'lines', 'link', 'lives_ok', 'localtime', 'log', 'log10', 'map',
- 'max', 'min', 'minmax', 'name', 'new', 'nextsame', 'nextwith', 'nfc',
- 'nfd', 'nfkc', 'nfkd', 'nok_error', 'nonce', 'none', 'normalize', 'not',
- 'nothing', 'ok', 'once', 'one', 'open', 'opendir', 'operator', 'ord',
- 'p5chomp', 'p5chop', 'pack', 'pair', 'pairs', 'pass', 'perl', 'pi',
- 'pick', 'plan', 'plan_ok', 'polar', 'pop', 'pos', 'postcircumfix',
- 'postfix', 'pred', 'prefix', 'print', 'printf', 'push', 'quasi',
- 'quotemeta', 'rand', 're', 'read', 'readdir', 'readline', 'reduce',
- 'reverse', 'rewind', 'rewinddir', 'rindex', 'roots', 'round',
- 'roundrobin', 'run', 'runinstead', 'sameaccent', 'samecase', 'say',
- 'sec', 'sech', 'sech', 'seek', 'shape', 'shift', 'sign', 'signature',
- 'sin', 'sinh', 'skip', 'skip_rest', 'sleep', 'slurp', 'sort', 'splice',
- 'split', 'sprintf', 'sqrt', 'srand', 'strand', 'subst', 'substr', 'succ',
- 'sum', 'symlink', 'tan', 'tanh', 'throws_ok', 'time', 'times', 'to',
- 'todo', 'trim', 'trim_end', 'trim_start', 'true', 'truncate', 'uc',
- 'ucfirst', 'undef', 'undefine', 'uniq', 'unlike', 'unlink', 'unpack',
- 'unpolar', 'unshift', 'unwrap', 'use_ok', 'value', 'values', 'vec',
- 'version_lt', 'void', 'wait', 'want', 'wrap', 'write', 'zip',
- )
-
- PERL6_BUILTIN_CLASSES = (
- 'Abstraction', 'Any', 'AnyChar', 'Array', 'Associative', 'Bag', 'Bit',
- 'Blob', 'Block', 'Bool', 'Buf', 'Byte', 'Callable', 'Capture', 'Char', 'Class',
- 'Code', 'Codepoint', 'Comparator', 'Complex', 'Decreasing', 'Exception',
- 'Failure', 'False', 'Grammar', 'Grapheme', 'Hash', 'IO', 'Increasing',
- 'Int', 'Junction', 'KeyBag', 'KeyExtractor', 'KeyHash', 'KeySet',
- 'KitchenSink', 'List', 'Macro', 'Mapping', 'Match', 'Matcher', 'Method',
- 'Module', 'Num', 'Object', 'Ordered', 'Ordering', 'OrderingPair',
- 'Package', 'Pair', 'Positional', 'Proxy', 'Range', 'Rat', 'Regex',
- 'Role', 'Routine', 'Scalar', 'Seq', 'Set', 'Signature', 'Str', 'StrLen',
- 'StrPos', 'Sub', 'Submethod', 'True', 'UInt', 'Undef', 'Version', 'Void',
- 'Whatever', 'bit', 'bool', 'buf', 'buf1', 'buf16', 'buf2', 'buf32',
- 'buf4', 'buf64', 'buf8', 'complex', 'int', 'int1', 'int16', 'int2',
- 'int32', 'int4', 'int64', 'int8', 'num', 'rat', 'rat1', 'rat16', 'rat2',
- 'rat32', 'rat4', 'rat64', 'rat8', 'uint', 'uint1', 'uint16', 'uint2',
- 'uint32', 'uint4', 'uint64', 'uint8', 'utf16', 'utf32', 'utf8',
- )
-
- PERL6_OPERATORS = (
- 'X', 'Z', 'after', 'also', 'and', 'andthen', 'before', 'cmp', 'div',
- 'eq', 'eqv', 'extra', 'ff', 'fff', 'ge', 'gt', 'le', 'leg', 'lt', 'm',
- 'mm', 'mod', 'ne', 'or', 'orelse', 'rx', 's', 'tr', 'x', 'xor', 'xx',
- '++', '--', '**', '!', '+', '-', '~', '?', '|', '||', '+^', '~^', '?^',
- '^', '*', '/', '%', '%%', '+&', '+<', '+>', '~&', '~<', '~>', '?&',
- 'gcd', 'lcm', '+', '-', '+|', '+^', '~|', '~^', '?|', '?^',
- '~', '&', '^', 'but', 'does', '<=>', '..', '..^', '^..', '^..^',
- '!=', '==', '<', '<=', '>', '>=', '~~', '===', '!eqv',
- '&&', '||', '^^', '//', 'min', 'max', '??', '!!', 'ff', 'fff', 'so',
- 'not', '<==', '==>', '<<==', '==>>',
- )
-
- # Perl 6 has a *lot* of possible bracketing characters
- # this list was lifted from STD.pm6 (https://github.com/perl6/std)
- PERL6_BRACKETS = {
- u'\u0028' : u'\u0029', u'\u003c' : u'\u003e', u'\u005b' : u'\u005d',
- u'\u007b' : u'\u007d', u'\u00ab' : u'\u00bb', u'\u0f3a' : u'\u0f3b',
- u'\u0f3c' : u'\u0f3d', u'\u169b' : u'\u169c', u'\u2018' : u'\u2019',
- u'\u201a' : u'\u2019', u'\u201b' : u'\u2019', u'\u201c' : u'\u201d',
- u'\u201e' : u'\u201d', u'\u201f' : u'\u201d', u'\u2039' : u'\u203a',
- u'\u2045' : u'\u2046', u'\u207d' : u'\u207e', u'\u208d' : u'\u208e',
- u'\u2208' : u'\u220b', u'\u2209' : u'\u220c', u'\u220a' : u'\u220d',
- u'\u2215' : u'\u29f5', u'\u223c' : u'\u223d', u'\u2243' : u'\u22cd',
- u'\u2252' : u'\u2253', u'\u2254' : u'\u2255', u'\u2264' : u'\u2265',
- u'\u2266' : u'\u2267', u'\u2268' : u'\u2269', u'\u226a' : u'\u226b',
- u'\u226e' : u'\u226f', u'\u2270' : u'\u2271', u'\u2272' : u'\u2273',
- u'\u2274' : u'\u2275', u'\u2276' : u'\u2277', u'\u2278' : u'\u2279',
- u'\u227a' : u'\u227b', u'\u227c' : u'\u227d', u'\u227e' : u'\u227f',
- u'\u2280' : u'\u2281', u'\u2282' : u'\u2283', u'\u2284' : u'\u2285',
- u'\u2286' : u'\u2287', u'\u2288' : u'\u2289', u'\u228a' : u'\u228b',
- u'\u228f' : u'\u2290', u'\u2291' : u'\u2292', u'\u2298' : u'\u29b8',
- u'\u22a2' : u'\u22a3', u'\u22a6' : u'\u2ade', u'\u22a8' : u'\u2ae4',
- u'\u22a9' : u'\u2ae3', u'\u22ab' : u'\u2ae5', u'\u22b0' : u'\u22b1',
- u'\u22b2' : u'\u22b3', u'\u22b4' : u'\u22b5', u'\u22b6' : u'\u22b7',
- u'\u22c9' : u'\u22ca', u'\u22cb' : u'\u22cc', u'\u22d0' : u'\u22d1',
- u'\u22d6' : u'\u22d7', u'\u22d8' : u'\u22d9', u'\u22da' : u'\u22db',
- u'\u22dc' : u'\u22dd', u'\u22de' : u'\u22df', u'\u22e0' : u'\u22e1',
- u'\u22e2' : u'\u22e3', u'\u22e4' : u'\u22e5', u'\u22e6' : u'\u22e7',
- u'\u22e8' : u'\u22e9', u'\u22ea' : u'\u22eb', u'\u22ec' : u'\u22ed',
- u'\u22f0' : u'\u22f1', u'\u22f2' : u'\u22fa', u'\u22f3' : u'\u22fb',
- u'\u22f4' : u'\u22fc', u'\u22f6' : u'\u22fd', u'\u22f7' : u'\u22fe',
- u'\u2308' : u'\u2309', u'\u230a' : u'\u230b', u'\u2329' : u'\u232a',
- u'\u23b4' : u'\u23b5', u'\u2768' : u'\u2769', u'\u276a' : u'\u276b',
- u'\u276c' : u'\u276d', u'\u276e' : u'\u276f', u'\u2770' : u'\u2771',
- u'\u2772' : u'\u2773', u'\u2774' : u'\u2775', u'\u27c3' : u'\u27c4',
- u'\u27c5' : u'\u27c6', u'\u27d5' : u'\u27d6', u'\u27dd' : u'\u27de',
- u'\u27e2' : u'\u27e3', u'\u27e4' : u'\u27e5', u'\u27e6' : u'\u27e7',
- u'\u27e8' : u'\u27e9', u'\u27ea' : u'\u27eb', u'\u2983' : u'\u2984',
- u'\u2985' : u'\u2986', u'\u2987' : u'\u2988', u'\u2989' : u'\u298a',
- u'\u298b' : u'\u298c', u'\u298d' : u'\u298e', u'\u298f' : u'\u2990',
- u'\u2991' : u'\u2992', u'\u2993' : u'\u2994', u'\u2995' : u'\u2996',
- u'\u2997' : u'\u2998', u'\u29c0' : u'\u29c1', u'\u29c4' : u'\u29c5',
- u'\u29cf' : u'\u29d0', u'\u29d1' : u'\u29d2', u'\u29d4' : u'\u29d5',
- u'\u29d8' : u'\u29d9', u'\u29da' : u'\u29db', u'\u29f8' : u'\u29f9',
- u'\u29fc' : u'\u29fd', u'\u2a2b' : u'\u2a2c', u'\u2a2d' : u'\u2a2e',
- u'\u2a34' : u'\u2a35', u'\u2a3c' : u'\u2a3d', u'\u2a64' : u'\u2a65',
- u'\u2a79' : u'\u2a7a', u'\u2a7d' : u'\u2a7e', u'\u2a7f' : u'\u2a80',
- u'\u2a81' : u'\u2a82', u'\u2a83' : u'\u2a84', u'\u2a8b' : u'\u2a8c',
- u'\u2a91' : u'\u2a92', u'\u2a93' : u'\u2a94', u'\u2a95' : u'\u2a96',
- u'\u2a97' : u'\u2a98', u'\u2a99' : u'\u2a9a', u'\u2a9b' : u'\u2a9c',
- u'\u2aa1' : u'\u2aa2', u'\u2aa6' : u'\u2aa7', u'\u2aa8' : u'\u2aa9',
- u'\u2aaa' : u'\u2aab', u'\u2aac' : u'\u2aad', u'\u2aaf' : u'\u2ab0',
- u'\u2ab3' : u'\u2ab4', u'\u2abb' : u'\u2abc', u'\u2abd' : u'\u2abe',
- u'\u2abf' : u'\u2ac0', u'\u2ac1' : u'\u2ac2', u'\u2ac3' : u'\u2ac4',
- u'\u2ac5' : u'\u2ac6', u'\u2acd' : u'\u2ace', u'\u2acf' : u'\u2ad0',
- u'\u2ad1' : u'\u2ad2', u'\u2ad3' : u'\u2ad4', u'\u2ad5' : u'\u2ad6',
- u'\u2aec' : u'\u2aed', u'\u2af7' : u'\u2af8', u'\u2af9' : u'\u2afa',
- u'\u2e02' : u'\u2e03', u'\u2e04' : u'\u2e05', u'\u2e09' : u'\u2e0a',
- u'\u2e0c' : u'\u2e0d', u'\u2e1c' : u'\u2e1d', u'\u2e20' : u'\u2e21',
- u'\u3008' : u'\u3009', u'\u300a' : u'\u300b', u'\u300c' : u'\u300d',
- u'\u300e' : u'\u300f', u'\u3010' : u'\u3011', u'\u3014' : u'\u3015',
- u'\u3016' : u'\u3017', u'\u3018' : u'\u3019', u'\u301a' : u'\u301b',
- u'\u301d' : u'\u301e', u'\ufd3e' : u'\ufd3f', u'\ufe17' : u'\ufe18',
- u'\ufe35' : u'\ufe36', u'\ufe37' : u'\ufe38', u'\ufe39' : u'\ufe3a',
- u'\ufe3b' : u'\ufe3c', u'\ufe3d' : u'\ufe3e', u'\ufe3f' : u'\ufe40',
- u'\ufe41' : u'\ufe42', u'\ufe43' : u'\ufe44', u'\ufe47' : u'\ufe48',
- u'\ufe59' : u'\ufe5a', u'\ufe5b' : u'\ufe5c', u'\ufe5d' : u'\ufe5e',
- u'\uff08' : u'\uff09', u'\uff1c' : u'\uff1e', u'\uff3b' : u'\uff3d',
- u'\uff5b' : u'\uff5d', u'\uff5f' : u'\uff60', u'\uff62' : u'\uff63',
- }
-
- def _build_word_match(words, boundary_regex_fragment = None, prefix = '', suffix = ''):
- if boundary_regex_fragment is None:
- return r'\b(' + prefix + r'|'.join([ re.escape(x) for x in words]) + \
- suffix + r')\b'
- else:
- return r'(?<!' + boundary_regex_fragment + r')' + prefix + r'(' + \
- r'|'.join([ re.escape(x) for x in words]) + r')' + suffix + r'(?!' + \
- boundary_regex_fragment + r')'
-
- def brackets_callback(token_class):
- def callback(lexer, match, context):
- groups = match.groupdict()
- opening_chars = groups['delimiter']
- n_chars = len(opening_chars)
- adverbs = groups.get('adverbs')
-
- closer = Perl6Lexer.PERL6_BRACKETS.get(opening_chars[0])
- text = context.text
-
- if closer is None: # it's not a mirrored character, which means we
- # just need to look for the next occurrence
-
- end_pos = text.find(opening_chars, match.start('delimiter') + n_chars)
- else: # we need to look for the corresponding closing character,
- # keep nesting in mind
- closing_chars = closer * n_chars
- nesting_level = 1
-
- search_pos = match.start('delimiter')
-
- while nesting_level > 0:
- next_open_pos = text.find(opening_chars, search_pos + n_chars)
- next_close_pos = text.find(closing_chars, search_pos + n_chars)
-
- if next_close_pos == -1:
- next_close_pos = len(text)
- nesting_level = 0
- elif next_open_pos != -1 and next_open_pos < next_close_pos:
- nesting_level += 1
- search_pos = next_open_pos
- else: # next_close_pos < next_open_pos
- nesting_level -= 1
- search_pos = next_close_pos
-
- end_pos = next_close_pos
-
- if end_pos < 0: # if we didn't find a closer, just highlight the
- # rest of the text in this class
- end_pos = len(text)
-
- if adverbs is not None and re.search(r':to\b', adverbs):
- heredoc_terminator = text[match.start('delimiter') + n_chars : end_pos]
- end_heredoc = re.search(r'^\s*' + re.escape(heredoc_terminator) + r'\s*$', text[ end_pos : ], re.MULTILINE)
-
- if end_heredoc:
- end_pos += end_heredoc.end()
- else:
- end_pos = len(text)
-
- yield match.start(), token_class, text[match.start() : end_pos + n_chars]
- context.pos = end_pos + n_chars
-
- return callback
-
- def opening_brace_callback(lexer, match, context):
- stack = context.stack
-
- yield match.start(), Text, context.text[match.start() : match.end()]
- context.pos = match.end()
-
- # if we encounter an opening brace and we're one level
- # below a token state, it means we need to increment
- # the nesting level for braces so we know later when
- # we should return to the token rules.
- if len(stack) > 2 and stack[-2] == 'token':
- context.perl6_token_nesting_level += 1
-
- def closing_brace_callback(lexer, match, context):
- stack = context.stack
-
- yield match.start(), Text, context.text[match.start() : match.end()]
- context.pos = match.end()
-
- # if we encounter a free closing brace and we're one level
- # below a token state, it means we need to check the nesting
- # level to see if we need to return to the token state.
- if len(stack) > 2 and stack[-2] == 'token':
- context.perl6_token_nesting_level -= 1
- if context.perl6_token_nesting_level == 0:
- stack.pop()
-
- def embedded_perl6_callback(lexer, match, context):
- context.perl6_token_nesting_level = 1
- yield match.start(), Text, context.text[match.start() : match.end()]
- context.pos = match.end()
- context.stack.append('root')
-
- # If you're modifying these rules, be careful if you need to process '{' or '}'
- # characters. We have special logic for processing these characters (due to the fact
- # that you can nest Perl 6 code in regex blocks), so if you need to process one of
- # them, make sure you also process the corresponding one!
- tokens = {
- 'common' : [
- (r'#[`|=](?P<delimiter>(?P<first_char>[' + ''.join(PERL6_BRACKETS) + r'])(?P=first_char)*)', brackets_callback(Comment.Multiline)),
- (r'#[^\n]*$', Comment.Singleline),
- (r'^(\s*)=begin\s+(\w+)\b.*?^\1=end\s+\2', Comment.Multiline),
- (r'^(\s*)=for.*?\n\s*?\n', Comment.Multiline),
- (r'^=.*?\n\s*?\n', Comment.Multiline),
- (r'(regex|token|rule)(\s*' + PERL6_IDENTIFIER_RANGE + '+:sym)',
- bygroups(Keyword, Name), 'token-sym-brackets'),
- (r'(regex|token|rule)(?!' + PERL6_IDENTIFIER_RANGE + ')(\s*' + PERL6_IDENTIFIER_RANGE + '+)?', bygroups(Keyword, Name), 'pre-token'),
- # deal with a special case in the Perl 6 grammar (role q { ... })
- (r'(role)(\s+)(q)(\s*)', bygroups(Keyword, Text, Name, Text)),
- (_build_word_match(PERL6_KEYWORDS, PERL6_IDENTIFIER_RANGE), Keyword),
- (_build_word_match(PERL6_BUILTIN_CLASSES, PERL6_IDENTIFIER_RANGE, suffix = '(?::[UD])?'), Name.Builtin),
- (_build_word_match(PERL6_BUILTINS, PERL6_IDENTIFIER_RANGE), Name.Builtin),
- # copied from PerlLexer
- (r'[$@%&][.^:?=!~]?' + PERL6_IDENTIFIER_RANGE + u'+(?:<<.*?>>|<.*?>|«.*?»)*',
- Name.Variable),
- (r'\$[!/](?:<<.*?>>|<.*?>|«.*?»)*', Name.Variable.Global),
- (r'::\?\w+', Name.Variable.Global),
- (r'[$@%&]\*' + PERL6_IDENTIFIER_RANGE + u'+(?:<<.*?>>|<.*?>|«.*?»)*',
- Name.Variable.Global),
- (r'\$(?:<.*?>)+', Name.Variable),
- (r'(?:q|qq|Q)[a-zA-Z]?\s*(?P<adverbs>:[\w\s:]+)?\s*(?P<delimiter>(?P<first_char>[^0-9a-zA-Z:\s])(?P=first_char)*)', brackets_callback(String)),
- # copied from PerlLexer
- (r'0_?[0-7]+(_[0-7]+)*', Number.Oct),
- (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex),
- (r'0b[01]+(_[01]+)*', Number.Bin),
- (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?',
- Number.Float),
- (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float),
- (r'\d+(_\d+)*', Number.Integer),
- (r'(?<=~~)\s*/(?:\\\\|\\/|.)*?/', String.Regex),
- (r'(?<=[=(,])\s*/(?:\\\\|\\/|.)*?/', String.Regex),
- (r'm\w+(?=\()', Name),
- (r'(?:m|ms|rx)\s*(?P<adverbs>:[\w\s:]+)?\s*(?P<delimiter>(?P<first_char>[^0-9a-zA-Z_:\s])(?P=first_char)*)', brackets_callback(String.Regex)),
- (r'(?:s|ss|tr)\s*(?::[\w\s:]+)?\s*/(?:\\\\|\\/|.)*?/(?:\\\\|\\/|.)*?/',
- String.Regex),
- (r'<[^\s=].*?\S>', String),
- (_build_word_match(PERL6_OPERATORS), Operator),
- (r'[0-9a-zA-Z_]' + PERL6_IDENTIFIER_RANGE + '*', Name),
- (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
- (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
- ],
- 'root' : [
- include('common'),
- (r'\{', opening_brace_callback),
- (r'\}', closing_brace_callback),
- (r'.+?', Text),
- ],
- 'pre-token' : [
- include('common'),
- (r'\{', Text, ('#pop', 'token')),
- (r'.+?', Text),
- ],
- 'token-sym-brackets' : [
- (r'(?P<delimiter>(?P<first_char>[' + ''.join(PERL6_BRACKETS) + '])(?P=first_char)*)', brackets_callback(Name), ('#pop', 'pre-token')),
- default(('#pop', 'pre-token')),
- ],
- 'token': [
- (r'}', Text, '#pop'),
- (r'(?<=:)(?:my|our|state|constant|temp|let).*?;', using(this)),
- # make sure that quotes in character classes aren't treated as strings
- (r'<(?:[-!?+.]\s*)?\[.*?\]>', String.Regex),
- # make sure that '#' characters in quotes aren't treated as comments
- (r"(?<!\\)'(\\\\|\\[^\\]|[^'\\])*'", String.Regex),
- (r'(?<!\\)"(\\\\|\\[^\\]|[^"\\])*"', String.Regex),
- (r'#.*?$', Comment.Singleline),
- (r'\{', embedded_perl6_callback),
- ('.+?', String.Regex),
- ],
- }
-
- def analyse_text(text):
- def strip_pod(lines):
- in_pod = False
- stripped_lines = []
-
- for line in lines:
- if re.match(r'^=(?:end|cut)', line):
- in_pod = False
- elif re.match(r'^=\w+', line):
- in_pod = True
- elif not in_pod:
- stripped_lines.append(line)
-
- return stripped_lines
-
- # XXX handle block comments
- lines = text.splitlines()
- lines = strip_pod(lines)
- text = '\n'.join(lines)
-
- if shebang_matches(text, r'perl6|rakudo|niecza|pugs'):
- return True
-
- saw_perl_decl = False
- rating = False
-
- # check for my/our/has declarations
- if re.search("(?:my|our|has)\s+(?:" + Perl6Lexer.PERL6_IDENTIFIER_RANGE + \
- "+\s+)?[$@%&(]", text):
- rating = 0.8
- saw_perl_decl = True
-
- for line in lines:
- line = re.sub('#.*', '', line)
- if re.match('^\s*$', line):
- continue
-
- # match v6; use v6; use v6.0; use v6.0.0;
- if re.match('^\s*(?:use\s+)?v6(?:\.\d(?:\.\d)?)?;', line):
- return True
- # match class, module, role, enum, grammar declarations
- class_decl = re.match('^\s*(?:(?P<scope>my|our)\s+)?(?:module|class|role|enum|grammar)', line)
- if class_decl:
- if saw_perl_decl or class_decl.group('scope') is not None:
- return True
- rating = 0.05
- continue
- break
-
- return rating
-
- def __init__(self, **options):
- super(Perl6Lexer, self).__init__(**options)
- self.encoding = options.get('encoding', 'utf-8')
-
-
-class HyLexer(RegexLexer):
- """
- Lexer for `Hy <http://hylang.org/>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'Hy'
- aliases = ['hylang']
- filenames = ['*.hy']
- mimetypes = ['text/x-hy', 'application/x-hy']
-
- special_forms = [
- 'cond', 'for', '->', '->>', 'car',
- 'cdr', 'first', 'rest', 'let', 'when', 'unless',
- 'import', 'do', 'progn', 'get', 'slice', 'assoc', 'with-decorator',
- ',', 'list_comp', 'kwapply', '~', 'is', 'in', 'is-not', 'not-in',
- 'quasiquote', 'unquote', 'unquote-splice', 'quote', '|', '<<=', '>>=',
- 'foreach', 'while',
- 'eval-and-compile', 'eval-when-compile'
- ]
-
- declarations = [
- 'def', 'defn', 'defun', 'defmacro', 'defclass', 'lambda', 'fn', 'setv'
- ]
-
- hy_builtins = []
-
- hy_core = [
- 'cycle', 'dec', 'distinct', 'drop', 'even?', 'filter', 'inc',
- 'instance?', 'iterable?', 'iterate', 'iterator?', 'neg?',
- 'none?', 'nth', 'numeric?', 'odd?', 'pos?', 'remove', 'repeat',
- 'repeatedly', 'take', 'take_nth', 'take_while', 'zero?'
- ]
-
- builtins = hy_builtins + hy_core
-
- # valid names for identifiers
- # well, names can only not consist fully of numbers
- # but this should be good enough for now
- valid_name = r'(?!#)[\w!$%*+<=>?/.#-]+'
-
- def _multi_escape(entries):
- return '(%s)' % ('|'.join(re.escape(entry) + ' ' for entry in entries))
-
- tokens = {
- 'root': [
- # the comments - always starting with semicolon
- # and going to the end of the line
- (r';.*$', Comment.Single),
-
- # whitespaces - usually not relevant
- (r'[,\s]+', Text),
-
- # numbers
- (r'-?\d+\.\d+', Number.Float),
- (r'-?\d+', Number.Integer),
- (r'0[0-7]+j?', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
-
- # strings, symbols and characters
- (r'"(\\\\|\\"|[^"])*"', String),
- (r"'" + valid_name, String.Symbol),
- (r"\\(.|[a-z]+)", String.Char),
- (r'^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
- (r"^(\s*)([rRuU]{,2}'''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
-
- # keywords
- (r'::?' + valid_name, String.Symbol),
-
- # special operators
- (r'~@|[`\'#^~&@]', Operator),
-
- include('py-keywords'),
- include('py-builtins'),
-
- # highlight the special forms
- (_multi_escape(special_forms), Keyword),
-
- # Technically, only the special forms are 'keywords'. The problem
- # is that only treating them as keywords means that things like
- # 'defn' and 'ns' need to be highlighted as builtins. This is ugly
- # and weird for most styles. So, as a compromise we're going to
- # highlight them as Keyword.Declarations.
- (_multi_escape(declarations), Keyword.Declaration),
-
- # highlight the builtins
- (_multi_escape(builtins), Name.Builtin),
-
- # the remaining functions
- (r'(?<=\()' + valid_name, Name.Function),
-
- # find the remaining variables
- (valid_name, Name.Variable),
-
- # Hy accepts vector notation
- (r'(\[|\])', Punctuation),
-
- # Hy accepts map notation
- (r'(\{|\})', Punctuation),
-
- # the famous parentheses!
- (r'(\(|\))', Punctuation),
-
- ],
- 'py-keywords': PythonLexer.tokens['keywords'],
- 'py-builtins': PythonLexer.tokens['builtins'],
- }
-
- def analyse_text(text):
- if '(import ' in text or '(defn ' in text:
- return 0.9
-
-
-class ChaiscriptLexer(RegexLexer):
- """
- For `ChaiScript <http://chaiscript.com/>`_ source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'ChaiScript'
- aliases = ['chai', 'chaiscript']
- filenames = ['*.chai']
- mimetypes = ['text/x-chaiscript', 'application/x-chaiscript']
-
- flags = re.DOTALL
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'^\#.*?\n', Comment.Single)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- default('#pop')
- ],
- 'badregex': [
- ('\n', Text, '#pop')
- ],
- 'root': [
- include('commentsandwhitespace'),
- (r'\n', Text),
- (r'[^\S\n]+', Text),
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|\.\.'
- r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'[=+\-*/]', Operator),
- (r'(for|in|while|do|break|return|continue|if|else|'
- r'throw|try|catch'
- r')\b', Keyword, 'slashstartsregex'),
- (r'(var)\b', Keyword.Declaration, 'slashstartsregex'),
- (r'(attr|def|fun)\b', Keyword.Reserved),
- (r'(true|false)\b', Keyword.Constant),
- (r'(eval|throw)\b', Name.Builtin),
- (r'`\S+`', Name.Builtin),
- (r'[$a-zA-Z_]\w*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"', String.Double, 'dqstring'),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- ],
- 'dqstring': [
- (r'\${[^"}]+?}', String.Iterpol),
- (r'\$', String.Double),
- (r'\\\\', String.Double),
- (r'\\"', String.Double),
- (r'[^\\\\\\"$]+', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- }
+from pygments.lexers.python import PythonLexer, PythonConsoleLexer, \
+ PythonTracebackLexer, Python3Lexer, Python3TracebackLexer, DgLexer
+from pygments.lexers.ruby import RubyLexer, RubyConsoleLexer, FancyLexer
+from pygments.lexers.perl import PerlLexer, Perl6Lexer
+from pygments.lexers.d import CrocLexer, MiniDLexer
+from pygments.lexers.iolang import IoLexer
+from pygments.lexers.tcl import TclLexer
+from pygments.lexers.factor import FactorLexer
+from pygments.lexers.scripting import LuaLexer, MoonScriptLexer
+
+__all__ = []
diff --git a/pygments/lexers/algebra.py b/pygments/lexers/algebra.py
new file mode 100644
index 00000000..dd5bfea4
--- /dev/null
+++ b/pygments/lexers/algebra.py
@@ -0,0 +1,187 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.algebra
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for computer algebra systems.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['GAPLexer', 'MathematicaLexer', 'MuPADLexer']
+
+
+class GAPLexer(RegexLexer):
+ """
+ For `GAP <http://www.gap-system.org>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'GAP'
+ aliases = ['gap']
+ filenames = ['*.g', '*.gd', '*.gi', '*.gap']
+
+ tokens = {
+ 'root': [
+ (r'#.*$', Comment.Single),
+ (r'"(?:[^"\\]|\\.)*"', String),
+ (r'\(|\)|\[|\]|\{|\}', Punctuation),
+ (r'''(?x)\b(?:
+ if|then|elif|else|fi|
+ for|while|do|od|
+ repeat|until|
+ break|continue|
+ function|local|return|end|
+ rec|
+ quit|QUIT|
+ IsBound|Unbind|
+ TryNextMethod|
+ Info|Assert
+ )\b''', Keyword),
+ (r'''(?x)\b(?:
+ true|false|fail|infinity
+ )\b''',
+ Name.Constant),
+ (r'''(?x)\b(?:
+ (Declare|Install)([A-Z][A-Za-z]+)|
+ BindGlobal|BIND_GLOBAL
+ )\b''',
+ Name.Builtin),
+ (r'\.|,|:=|;|=|\+|-|\*|/|\^|>|<', Operator),
+ (r'''(?x)\b(?:
+ and|or|not|mod|in
+ )\b''',
+ Operator.Word),
+ (r'''(?x)
+ (?:[a-zA-Z_0-9]+|`[^`]*`)
+ (?:::[a-zA-Z_0-9]+|`[^`]*`)*''', Name.Variable),
+ (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number),
+ (r'\.[0-9]+(?:e[0-9]+)?', Number),
+ (r'.', Text)
+ ]
+ }
+
+
+class MathematicaLexer(RegexLexer):
+ """
+ Lexer for `Mathematica <http://www.wolfram.com/mathematica/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Mathematica'
+ aliases = ['mathematica', 'mma', 'nb']
+ filenames = ['*.nb', '*.cdf', '*.nbp', '*.ma']
+ mimetypes = ['application/mathematica',
+ 'application/vnd.wolfram.mathematica',
+ 'application/vnd.wolfram.mathematica.package',
+ 'application/vnd.wolfram.cdf']
+
+ # http://reference.wolfram.com/mathematica/guide/Syntax.html
+ operators = (
+ ";;", "=", "=.", "!=" "==", ":=", "->", ":>", "/.", "+", "-", "*", "/",
+ "^", "&&", "||", "!", "<>", "|", "/;", "?", "@", "//", "/@", "@@",
+ "@@@", "~~", "===", "&", "<", ">", "<=", ">=",
+ )
+
+ punctuation = (",", ";", "(", ")", "[", "]", "{", "}")
+
+ def _multi_escape(entries):
+ return '(%s)' % ('|'.join(re.escape(entry) for entry in entries))
+
+ tokens = {
+ 'root': [
+ (r'(?s)\(\*.*?\*\)', Comment),
+
+ (r'([a-zA-Z]+[A-Za-z0-9]*`)', Name.Namespace),
+ (r'([A-Za-z0-9]*_+[A-Za-z0-9]*)', Name.Variable),
+ (r'#\d*', Name.Variable),
+ (r'([a-zA-Z]+[a-zA-Z0-9]*)', Name),
+
+ (r'-?[0-9]+\.[0-9]*', Number.Float),
+ (r'-?[0-9]*\.[0-9]+', Number.Float),
+ (r'-?[0-9]+', Number.Integer),
+
+ (words(operators), Operator),
+ (words(punctuation), Punctuation),
+ (r'".*?"', String),
+ (r'\s+', Text.Whitespace),
+ ],
+ }
+
+
+class MuPADLexer(RegexLexer):
+ """
+ A `MuPAD <http://www.mupad.com>`_ lexer.
+ Contributed by Christopher Creutzig <christopher@creutzig.de>.
+
+ .. versionadded:: 0.8
+ """
+ name = 'MuPAD'
+ aliases = ['mupad']
+ filenames = ['*.mu']
+
+ tokens = {
+ 'root': [
+ (r'//.*?$', Comment.Single),
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'"(?:[^"\\]|\\.)*"', String),
+ (r'\(|\)|\[|\]|\{|\}', Punctuation),
+ (r'''(?x)\b(?:
+ next|break|end|
+ axiom|end_axiom|category|end_category|domain|end_domain|inherits|
+ if|%if|then|elif|else|end_if|
+ case|of|do|otherwise|end_case|
+ while|end_while|
+ repeat|until|end_repeat|
+ for|from|to|downto|step|end_for|
+ proc|local|option|save|begin|end_proc|
+ delete|frame
+ )\b''', Keyword),
+ (r'''(?x)\b(?:
+ DOM_ARRAY|DOM_BOOL|DOM_COMPLEX|DOM_DOMAIN|DOM_EXEC|DOM_EXPR|
+ DOM_FAIL|DOM_FLOAT|DOM_FRAME|DOM_FUNC_ENV|DOM_HFARRAY|DOM_IDENT|
+ DOM_INT|DOM_INTERVAL|DOM_LIST|DOM_NIL|DOM_NULL|DOM_POLY|DOM_PROC|
+ DOM_PROC_ENV|DOM_RAT|DOM_SET|DOM_STRING|DOM_TABLE|DOM_VAR
+ )\b''', Name.Class),
+ (r'''(?x)\b(?:
+ PI|EULER|E|CATALAN|
+ NIL|FAIL|undefined|infinity|
+ TRUE|FALSE|UNKNOWN
+ )\b''',
+ Name.Constant),
+ (r'\b(?:dom|procname)\b', Name.Builtin.Pseudo),
+ (r'\.|,|:|;|=|\+|-|\*|/|\^|@|>|<|\$|\||!|\'|%|~=', Operator),
+ (r'''(?x)\b(?:
+ and|or|not|xor|
+ assuming|
+ div|mod|
+ union|minus|intersect|in|subset
+ )\b''',
+ Operator.Word),
+ (r'\b(?:I|RDN_INF|RD_NINF|RD_NAN)\b', Number),
+ # (r'\b(?:adt|linalg|newDomain|hold)\b', Name.Builtin),
+ (r'''(?x)
+ ((?:[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)
+ (?:::[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)*)(\s*)([(])''',
+ bygroups(Name.Function, Text, Punctuation)),
+ (r'''(?x)
+ (?:[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)
+ (?:::[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)*''', Name.Variable),
+ (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number),
+ (r'\.[0-9]+(?:e[0-9]+)?', Number),
+ (r'.', Text)
+ ],
+ 'comment': [
+ (r'[^*/]', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ]
+ }
diff --git a/pygments/lexers/ambient.py b/pygments/lexers/ambient.py
new file mode 100644
index 00000000..22ee0f9c
--- /dev/null
+++ b/pygments/lexers/ambient.py
@@ -0,0 +1,76 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.ambient
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for AmbientTalk language.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['AmbientTalkLexer']
+
+
+class AmbientTalkLexer(RegexLexer):
+ """
+ Lexer for `AmbientTalk <https://code.google.com/p/ambienttalk>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'AmbientTalk'
+ filenames = ['*.at']
+ aliases = ['at', 'ambienttalk', 'ambienttalk/2']
+ mimetypes = ['text/x-ambienttalk']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ builtin = words(('if:', 'then:', 'else:', 'when:', 'whenever:', 'discovered:',
+ 'disconnected:', 'reconnected:', 'takenOffline:', 'becomes:',
+ 'export:', 'as:', 'object:', 'actor:', 'mirror:', 'taggedAs:',
+ 'mirroredBy:', 'is:'))
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'(def|deftype|import|alias|exclude)\b', Keyword),
+ (builtin, Name.Builtin),
+ (r'(true|false|nil)\b', Keyword.Constant),
+ (r'(~|lobby|jlobby|/)\.', Keyword.Constant, 'namespace'),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r'\|', Punctuation, 'arglist'),
+ (r'<:|[\^\*!%&<>+=,./?-]|:=', Operator),
+ (r"`[a-zA-Z_]\w*", String.Symbol),
+ (r"[a-zA-Z_]\w*:", Name.Function),
+ (r"[\{\}()\[\];`]", Punctuation),
+ (r'(self|super)\b', Name.Variable.Instance),
+ (r"[a-zA-Z_]\w*", Name.Variable),
+ (r"@[a-zA-Z_]\w*", Name.Class),
+ (r"@\[", Name.Class, 'annotations'),
+ include('numbers'),
+ ],
+ 'numbers': [
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+', Number.Integer)
+ ],
+ 'namespace': [
+ (r'[a-zA-Z_]\w*\.', Name.Namespace),
+ (r'[a-zA-Z_]\w*:', Name.Function, '#pop'),
+ (r'[a-zA-Z_]\w*(?!\.)', Name.Function, '#pop')
+ ],
+ 'annotations': [
+ (r"(.*?)\]", Name.Class, '#pop')
+ ],
+ 'arglist': [
+ (r'\|', Punctuation, '#pop'),
+ (r'\s*(,)\s*', Punctuation),
+ (r'[a-zA-Z_]\w*', Name.Variable),
+ ],
+ }
diff --git a/pygments/lexers/apl.py b/pygments/lexers/apl.py
new file mode 100644
index 00000000..d29133ee
--- /dev/null
+++ b/pygments/lexers/apl.py
@@ -0,0 +1,101 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.apl
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for APL.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['APLLexer']
+
+
+class APLLexer(RegexLexer):
+ """
+ A simple APL lexer.
+
+ .. versionadded:: 2.0
+ """
+ name = 'APL'
+ aliases = ['apl']
+ filenames = ['*.apl']
+
+ tokens = {
+ 'root': [
+ # Whitespace
+ # ==========
+ (r'\s+', Text),
+ #
+ # Comment
+ # =======
+ # '⍝' is traditional; '#' is supported by GNU APL and NGN (but not Dyalog)
+ (u'[⍝#].*$', Comment.Single),
+ #
+ # Strings
+ # =======
+ (r'\'((\'\')|[^\'])*\'', String.Single),
+ (r'"(("")|[^"])*"', String.Double), # supported by NGN APL
+ #
+ # Punctuation
+ # ===========
+ # This token type is used for diamond and parenthesis
+ # but not for bracket and ; (see below)
+ (u'[⋄◇()]', Punctuation),
+ #
+ # Array indexing
+ # ==============
+ # Since this token type is very important in APL, it is not included in
+ # the punctuation token type but rather in the following one
+ (r'[\[\];]', String.Regex),
+ #
+ # Distinguished names
+ # ===================
+ # following IBM APL2 standard
+ (u'⎕[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*', Name.Function),
+ #
+ # Labels
+ # ======
+ # following IBM APL2 standard
+ # (u'[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*:', Name.Label),
+ #
+ # Variables
+ # =========
+ # following IBM APL2 standard
+ (u'[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*', Name.Variable),
+ #
+ # Numbers
+ # =======
+ (u'¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞)'
+ u'([Jj]¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞))?',
+ Number),
+ #
+ # Operators
+ # ==========
+ (u'[\.\\\/⌿⍀¨⍣⍨⍠⍤∘]', Name.Attribute), # closest token type
+ (u'[+\-×÷⌈⌊∣|⍳?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗]',
+ Operator),
+ #
+ # Constant
+ # ========
+ (u'⍬', Name.Constant),
+ #
+ # Quad symbol
+ # ===========
+ (u'[⎕⍞]', Name.Variable.Global),
+ #
+ # Arrows left/right
+ # =================
+ (u'[←→]', Keyword.Declaration),
+ #
+ # D-Fn
+ # ====
+ (u'[⍺⍵⍶⍹∇:]', Name.Builtin.Pseudo),
+ (r'[{}]', Keyword.Type),
+ ],
+ }
diff --git a/pygments/lexers/asm.py b/pygments/lexers/asm.py
index f995264c..f16dbadd 100644
--- a/pygments/lexers/asm.py
+++ b/pygments/lexers/asm.py
@@ -12,12 +12,14 @@
import re
from pygments.lexer import RegexLexer, include, bygroups, using, DelegatingLexer
-from pygments.lexers.compiled import DLexer, CppLexer, CLexer
+from pygments.lexers.c_cpp import CppLexer, CLexer
+from pygments.lexers.d import DLexer
from pygments.token import Text, Name, Number, String, Comment, Punctuation, \
- Other, Keyword, Operator
+ Other, Keyword, Operator
-__all__ = ['GasLexer', 'ObjdumpLexer','DObjdumpLexer', 'CppObjdumpLexer',
- 'CObjdumpLexer', 'LlvmLexer', 'NasmLexer', 'NasmObjdumpLexer', 'Ca65Lexer']
+__all__ = ['GasLexer', 'ObjdumpLexer', 'DObjdumpLexer', 'CppObjdumpLexer',
+ 'CObjdumpLexer', 'LlvmLexer', 'NasmLexer', 'NasmObjdumpLexer',
+ 'Ca65Lexer']
class GasLexer(RegexLexer):
@@ -154,11 +156,9 @@ class ObjdumpLexer(RegexLexer):
filenames = ['*.objdump']
mimetypes = ['text/x-objdump']
-
tokens = _objdump_lexer_tokens(GasLexer)
-
class DObjdumpLexer(DelegatingLexer):
"""
For the output of 'objdump -Sr on compiled D files'
@@ -220,11 +220,11 @@ class LlvmLexer(RegexLexer):
include('keyword'),
- (r'%' + identifier, Name.Variable),#Name.Identifier.Local),
- (r'@' + identifier, Name.Variable.Global),#Name.Identifier.Global),
- (r'%\d+', Name.Variable.Anonymous),#Name.Identifier.Anonymous),
- (r'@\d+', Name.Variable.Global),#Name.Identifier.Anonymous),
- (r'#\d+', Name.Variable.Global),#Name.Identifier.Global),
+ (r'%' + identifier, Name.Variable),
+ (r'@' + identifier, Name.Variable.Global),
+ (r'%\d+', Name.Variable.Anonymous),
+ (r'@\d+', Name.Variable.Global),
+ (r'#\d+', Name.Variable.Global),
(r'!' + identifier, Name.Variable),
(r'!\d+', Name.Variable.Anonymous),
(r'c?' + string, String),
@@ -402,7 +402,7 @@ class Ca65Lexer(RegexLexer):
.. versionadded:: 1.6
"""
- name = 'ca65'
+ name = 'ca65 assembler'
aliases = ['ca65']
filenames = ['*.s']
@@ -424,7 +424,7 @@ class Ca65Lexer(RegexLexer):
(r'\$[0-9a-f]+|[0-9a-f]+h\b', Number.Hex),
(r'\d+', Number.Integer),
(r'%[01]+', Number.Bin),
- (r'[#,.:()=]', Punctuation),
+ (r'[#,.:()=\[\]]', Punctuation),
(r'[a-z_.@$][\w.@$]*', Name),
]
}
diff --git a/pygments/lexers/automation.py b/pygments/lexers/automation.py
new file mode 100644
index 00000000..e012aae4
--- /dev/null
+++ b/pygments/lexers/automation.py
@@ -0,0 +1,373 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.automation
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for automation scripting languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups, combined
+from pygments.token import Text, Comment, Operator, Name, String, \
+ Number, Punctuation, Generic
+
+__all__ = ['AutohotkeyLexer', 'AutoItLexer']
+
+
+class AutohotkeyLexer(RegexLexer):
+ """
+ For `autohotkey <http://www.autohotkey.com/>`_ source code.
+
+ .. versionadded:: 1.4
+ """
+ name = 'autohotkey'
+ aliases = ['ahk', 'autohotkey']
+ filenames = ['*.ahk', '*.ahkl']
+ mimetypes = ['text/x-autohotkey']
+
+ tokens = {
+ 'root': [
+ (r'^(\s*)(/\*)', bygroups(Text, Comment.Multiline), 'incomment'),
+ (r'^(\s*)(\()', bygroups(Text, Generic), 'incontinuation'),
+ (r'\s+;.*?$', Comment.Singleline),
+ (r'^;.*?$', Comment.Singleline),
+ (r'[]{}(),;[]', Punctuation),
+ (r'(in|is|and|or|not)\b', Operator.Word),
+ (r'\%[a-zA-Z_#@$][\w#@$]*\%', Name.Variable),
+ (r'!=|==|:=|\.=|<<|>>|[-~+/*%=<>&^|?:!.]', Operator),
+ include('commands'),
+ include('labels'),
+ include('builtInFunctions'),
+ include('builtInVariables'),
+ (r'"', String, combined('stringescape', 'dqs')),
+ include('numbers'),
+ (r'[a-zA-Z_#@$][\w#@$]*', Name),
+ (r'\\|\'', Text),
+ (r'\`([\,\%\`abfnrtv\-\+;])', String.Escape),
+ include('garbage'),
+ ],
+ 'incomment': [
+ (r'^\s*\*/', Comment.Multiline, '#pop'),
+ (r'[^*/]', Comment.Multiline),
+ (r'[*/]', Comment.Multiline)
+ ],
+ 'incontinuation': [
+ (r'^\s*\)', Generic, '#pop'),
+ (r'[^)]', Generic),
+ (r'[)]', Generic),
+ ],
+ 'commands': [
+ (r'(?i)^(\s*)(global|local|static|'
+ r'#AllowSameLineComments|#ClipboardTimeout|#CommentFlag|'
+ r'#ErrorStdOut|#EscapeChar|#HotkeyInterval|#HotkeyModifierTimeout|'
+ r'#Hotstring|#IfWinActive|#IfWinExist|#IfWinNotActive|'
+ r'#IfWinNotExist|#IncludeAgain|#Include|#InstallKeybdHook|'
+ r'#InstallMouseHook|#KeyHistory|#LTrim|#MaxHotkeysPerInterval|'
+ r'#MaxMem|#MaxThreads|#MaxThreadsBuffer|#MaxThreadsPerHotkey|'
+ r'#NoEnv|#NoTrayIcon|#Persistent|#SingleInstance|#UseHook|'
+ r'#WinActivateForce|AutoTrim|BlockInput|Break|Click|ClipWait|'
+ r'Continue|Control|ControlClick|ControlFocus|ControlGetFocus|'
+ r'ControlGetPos|ControlGetText|ControlGet|ControlMove|ControlSend|'
+ r'ControlSendRaw|ControlSetText|CoordMode|Critical|'
+ r'DetectHiddenText|DetectHiddenWindows|Drive|DriveGet|'
+ r'DriveSpaceFree|Edit|Else|EnvAdd|EnvDiv|EnvGet|EnvMult|EnvSet|'
+ r'EnvSub|EnvUpdate|Exit|ExitApp|FileAppend|'
+ r'FileCopy|FileCopyDir|FileCreateDir|FileCreateShortcut|'
+ r'FileDelete|FileGetAttrib|FileGetShortcut|FileGetSize|'
+ r'FileGetTime|FileGetVersion|FileInstall|FileMove|FileMoveDir|'
+ r'FileRead|FileReadLine|FileRecycle|FileRecycleEmpty|'
+ r'FileRemoveDir|FileSelectFile|FileSelectFolder|FileSetAttrib|'
+ r'FileSetTime|FormatTime|GetKeyState|Gosub|Goto|GroupActivate|'
+ r'GroupAdd|GroupClose|GroupDeactivate|Gui|GuiControl|'
+ r'GuiControlGet|Hotkey|IfEqual|IfExist|IfGreaterOrEqual|IfGreater|'
+ r'IfInString|IfLess|IfLessOrEqual|IfMsgBox|IfNotEqual|IfNotExist|'
+ r'IfNotInString|IfWinActive|IfWinExist|IfWinNotActive|'
+ r'IfWinNotExist|If |ImageSearch|IniDelete|IniRead|IniWrite|'
+ r'InputBox|Input|KeyHistory|KeyWait|ListHotkeys|ListLines|'
+ r'ListVars|Loop|Menu|MouseClickDrag|MouseClick|MouseGetPos|'
+ r'MouseMove|MsgBox|OnExit|OutputDebug|Pause|PixelGetColor|'
+ r'PixelSearch|PostMessage|Process|Progress|Random|RegDelete|'
+ r'RegRead|RegWrite|Reload|Repeat|Return|RunAs|RunWait|Run|'
+ r'SendEvent|SendInput|SendMessage|SendMode|SendPlay|SendRaw|Send|'
+ r'SetBatchLines|SetCapslockState|SetControlDelay|'
+ r'SetDefaultMouseSpeed|SetEnv|SetFormat|SetKeyDelay|'
+ r'SetMouseDelay|SetNumlockState|SetScrollLockState|'
+ r'SetStoreCapslockMode|SetTimer|SetTitleMatchMode|'
+ r'SetWinDelay|SetWorkingDir|Shutdown|Sleep|Sort|SoundBeep|'
+ r'SoundGet|SoundGetWaveVolume|SoundPlay|SoundSet|'
+ r'SoundSetWaveVolume|SplashImage|SplashTextOff|SplashTextOn|'
+ r'SplitPath|StatusBarGetText|StatusBarWait|StringCaseSense|'
+ r'StringGetPos|StringLeft|StringLen|StringLower|StringMid|'
+ r'StringReplace|StringRight|StringSplit|StringTrimLeft|'
+ r'StringTrimRight|StringUpper|Suspend|SysGet|Thread|ToolTip|'
+ r'Transform|TrayTip|URLDownloadToFile|While|WinActivate|'
+ r'WinActivateBottom|WinClose|WinGetActiveStats|WinGetActiveTitle|'
+ r'WinGetClass|WinGetPos|WinGetText|WinGetTitle|WinGet|WinHide|'
+ r'WinKill|WinMaximize|WinMenuSelectItem|WinMinimizeAllUndo|'
+ r'WinMinimizeAll|WinMinimize|WinMove|WinRestore|WinSetTitle|'
+ r'WinSet|WinShow|WinWaitActive|WinWaitClose|WinWaitNotActive|'
+ r'WinWait)\b', bygroups(Text, Name.Builtin)),
+ ],
+ 'builtInFunctions': [
+ (r'(?i)(Abs|ACos|Asc|ASin|ATan|Ceil|Chr|Cos|DllCall|Exp|FileExist|'
+ r'Floor|GetKeyState|IL_Add|IL_Create|IL_Destroy|InStr|IsFunc|'
+ r'IsLabel|Ln|Log|LV_Add|LV_Delete|LV_DeleteCol|LV_GetCount|'
+ r'LV_GetNext|LV_GetText|LV_Insert|LV_InsertCol|LV_Modify|'
+ r'LV_ModifyCol|LV_SetImageList|Mod|NumGet|NumPut|OnMessage|'
+ r'RegExMatch|RegExReplace|RegisterCallback|Round|SB_SetIcon|'
+ r'SB_SetParts|SB_SetText|Sin|Sqrt|StrLen|SubStr|Tan|TV_Add|'
+ r'TV_Delete|TV_GetChild|TV_GetCount|TV_GetNext|TV_Get|'
+ r'TV_GetParent|TV_GetPrev|TV_GetSelection|TV_GetText|TV_Modify|'
+ r'VarSetCapacity|WinActive|WinExist|Object|ComObjActive|'
+ r'ComObjArray|ComObjEnwrap|ComObjUnwrap|ComObjParameter|'
+ r'ComObjType|ComObjConnect|ComObjCreate|ComObjGet|ComObjError|'
+ r'ComObjValue|Insert|MinIndex|MaxIndex|Remove|SetCapacity|'
+ r'GetCapacity|GetAddress|_NewEnum|FileOpen|Read|Write|ReadLine|'
+ r'WriteLine|ReadNumType|WriteNumType|RawRead|RawWrite|Seek|Tell|'
+ r'Close|Next|IsObject|StrPut|StrGet|Trim|LTrim|RTrim)\b',
+ Name.Function),
+ ],
+ 'builtInVariables': [
+ (r'(?i)(A_AhkPath|A_AhkVersion|A_AppData|A_AppDataCommon|'
+ r'A_AutoTrim|A_BatchLines|A_CaretX|A_CaretY|A_ComputerName|'
+ r'A_ControlDelay|A_Cursor|A_DDDD|A_DDD|A_DD|A_DefaultMouseSpeed|'
+ r'A_Desktop|A_DesktopCommon|A_DetectHiddenText|'
+ r'A_DetectHiddenWindows|A_EndChar|A_EventInfo|A_ExitReason|'
+ r'A_FormatFloat|A_FormatInteger|A_Gui|A_GuiEvent|A_GuiControl|'
+ r'A_GuiControlEvent|A_GuiHeight|A_GuiWidth|A_GuiX|A_GuiY|A_Hour|'
+ r'A_IconFile|A_IconHidden|A_IconNumber|A_IconTip|A_Index|'
+ r'A_IPAddress1|A_IPAddress2|A_IPAddress3|A_IPAddress4|A_ISAdmin|'
+ r'A_IsCompiled|A_IsCritical|A_IsPaused|A_IsSuspended|A_KeyDelay|'
+ r'A_Language|A_LastError|A_LineFile|A_LineNumber|A_LoopField|'
+ r'A_LoopFileAttrib|A_LoopFileDir|A_LoopFileExt|A_LoopFileFullPath|'
+ r'A_LoopFileLongPath|A_LoopFileName|A_LoopFileShortName|'
+ r'A_LoopFileShortPath|A_LoopFileSize|A_LoopFileSizeKB|'
+ r'A_LoopFileSizeMB|A_LoopFileTimeAccessed|A_LoopFileTimeCreated|'
+ r'A_LoopFileTimeModified|A_LoopReadLine|A_LoopRegKey|'
+ r'A_LoopRegName|A_LoopRegSubkey|A_LoopRegTimeModified|'
+ r'A_LoopRegType|A_MDAY|A_Min|A_MM|A_MMM|A_MMMM|A_Mon|A_MouseDelay|'
+ r'A_MSec|A_MyDocuments|A_Now|A_NowUTC|A_NumBatchLines|A_OSType|'
+ r'A_OSVersion|A_PriorHotkey|A_ProgramFiles|A_Programs|'
+ r'A_ProgramsCommon|A_ScreenHeight|A_ScreenWidth|A_ScriptDir|'
+ r'A_ScriptFullPath|A_ScriptName|A_Sec|A_Space|A_StartMenu|'
+ r'A_StartMenuCommon|A_Startup|A_StartupCommon|A_StringCaseSense|'
+ r'A_Tab|A_Temp|A_ThisFunc|A_ThisHotkey|A_ThisLabel|A_ThisMenu|'
+ r'A_ThisMenuItem|A_ThisMenuItemPos|A_TickCount|A_TimeIdle|'
+ r'A_TimeIdlePhysical|A_TimeSincePriorHotkey|A_TimeSinceThisHotkey|'
+ r'A_TitleMatchMode|A_TitleMatchModeSpeed|A_UserName|A_WDay|'
+ r'A_WinDelay|A_WinDir|A_WorkingDir|A_YDay|A_YEAR|A_YWeek|A_YYYY|'
+ r'Clipboard|ClipboardAll|ComSpec|ErrorLevel|ProgramFiles|True|'
+ r'False|A_IsUnicode|A_FileEncoding|A_OSVersion|A_PtrSize)\b',
+ Name.Variable),
+ ],
+ 'labels': [
+ # hotkeys and labels
+ # technically, hotkey names are limited to named keys and buttons
+ (r'(^\s*)([^:\s\(\"]+?:{1,2})', bygroups(Text, Name.Label)),
+ (r'(^\s*)(::[^:\s]+?::)', bygroups(Text, Name.Label)),
+ ],
+ 'numbers': [
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+', Number.Float),
+ (r'0\d+', Number.Oct),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'\d+L', Number.Integer.Long),
+ (r'\d+', Number.Integer)
+ ],
+ 'stringescape': [
+ (r'\"\"|\`([\,\%\`abfnrtv])', String.Escape),
+ ],
+ 'strings': [
+ (r'[^"\n]+', String),
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ include('strings')
+ ],
+ 'garbage': [
+ (r'[^\S\n]', Text),
+ # (r'.', Text), # no cheating
+ ],
+ }
+
+
+class AutoItLexer(RegexLexer):
+ """
+ For `AutoIt <http://www.autoitscript.com/site/autoit/>`_ files.
+
+ AutoIt is a freeware BASIC-like scripting language
+ designed for automating the Windows GUI and general scripting
+
+ .. versionadded:: 1.6
+ """
+ name = 'AutoIt'
+ aliases = ['autoit']
+ filenames = ['*.au3']
+ mimetypes = ['text/x-autoit']
+
+ # Keywords, functions, macros from au3.keywords.properties
+ # which can be found in AutoIt installed directory, e.g.
+ # c:\Program Files (x86)\AutoIt3\SciTE\au3.keywords.properties
+
+ keywords = """\
+ #include-once #include #endregion #forcedef #forceref #region
+ and byref case continueloop dim do else elseif endfunc endif
+ endselect exit exitloop for func global
+ if local next not or return select step
+ then to until wend while exit""".split()
+
+ functions = """\
+ abs acos adlibregister adlibunregister asc ascw asin assign atan
+ autoitsetoption autoitwingettitle autoitwinsettitle beep binary binarylen
+ binarymid binarytostring bitand bitnot bitor bitrotate bitshift bitxor
+ blockinput break call cdtray ceiling chr chrw clipget clipput consoleread
+ consolewrite consolewriteerror controlclick controlcommand controldisable
+ controlenable controlfocus controlgetfocus controlgethandle controlgetpos
+ controlgettext controlhide controllistview controlmove controlsend
+ controlsettext controlshow controltreeview cos dec dircopy dircreate
+ dirgetsize dirmove dirremove dllcall dllcalladdress dllcallbackfree
+ dllcallbackgetptr dllcallbackregister dllclose dllopen dllstructcreate
+ dllstructgetdata dllstructgetptr dllstructgetsize dllstructsetdata
+ drivegetdrive drivegetfilesystem drivegetlabel drivegetserial drivegettype
+ drivemapadd drivemapdel drivemapget drivesetlabel drivespacefree
+ drivespacetotal drivestatus envget envset envupdate eval execute exp
+ filechangedir fileclose filecopy filecreatentfslink filecreateshortcut
+ filedelete fileexists filefindfirstfile filefindnextfile fileflush
+ filegetattrib filegetencoding filegetlongname filegetpos filegetshortcut
+ filegetshortname filegetsize filegettime filegetversion fileinstall filemove
+ fileopen fileopendialog fileread filereadline filerecycle filerecycleempty
+ filesavedialog fileselectfolder filesetattrib filesetpos filesettime
+ filewrite filewriteline floor ftpsetproxy guicreate guictrlcreateavi
+ guictrlcreatebutton guictrlcreatecheckbox guictrlcreatecombo
+ guictrlcreatecontextmenu guictrlcreatedate guictrlcreatedummy
+ guictrlcreateedit guictrlcreategraphic guictrlcreategroup guictrlcreateicon
+ guictrlcreateinput guictrlcreatelabel guictrlcreatelist
+ guictrlcreatelistview guictrlcreatelistviewitem guictrlcreatemenu
+ guictrlcreatemenuitem guictrlcreatemonthcal guictrlcreateobj
+ guictrlcreatepic guictrlcreateprogress guictrlcreateradio
+ guictrlcreateslider guictrlcreatetab guictrlcreatetabitem
+ guictrlcreatetreeview guictrlcreatetreeviewitem guictrlcreateupdown
+ guictrldelete guictrlgethandle guictrlgetstate guictrlread guictrlrecvmsg
+ guictrlregisterlistviewsort guictrlsendmsg guictrlsendtodummy
+ guictrlsetbkcolor guictrlsetcolor guictrlsetcursor guictrlsetdata
+ guictrlsetdefbkcolor guictrlsetdefcolor guictrlsetfont guictrlsetgraphic
+ guictrlsetimage guictrlsetlimit guictrlsetonevent guictrlsetpos
+ guictrlsetresizing guictrlsetstate guictrlsetstyle guictrlsettip guidelete
+ guigetcursorinfo guigetmsg guigetstyle guiregistermsg guisetaccelerators
+ guisetbkcolor guisetcoord guisetcursor guisetfont guisethelp guiseticon
+ guisetonevent guisetstate guisetstyle guistartgroup guiswitch hex hotkeyset
+ httpsetproxy httpsetuseragent hwnd inetclose inetget inetgetinfo inetgetsize
+ inetread inidelete iniread inireadsection inireadsectionnames
+ inirenamesection iniwrite iniwritesection inputbox int isadmin isarray
+ isbinary isbool isdeclared isdllstruct isfloat ishwnd isint iskeyword
+ isnumber isobj isptr isstring log memgetstats mod mouseclick mouseclickdrag
+ mousedown mousegetcursor mousegetpos mousemove mouseup mousewheel msgbox
+ number objcreate objcreateinterface objevent objevent objget objname
+ onautoitexitregister onautoitexitunregister opt ping pixelchecksum
+ pixelgetcolor pixelsearch pluginclose pluginopen processclose processexists
+ processgetstats processlist processsetpriority processwait processwaitclose
+ progressoff progresson progressset ptr random regdelete regenumkey
+ regenumval regread regwrite round run runas runaswait runwait send
+ sendkeepactive seterror setextended shellexecute shellexecutewait shutdown
+ sin sleep soundplay soundsetwavevolume splashimageon splashoff splashtexton
+ sqrt srandom statusbargettext stderrread stdinwrite stdioclose stdoutread
+ string stringaddcr stringcompare stringformat stringfromasciiarray
+ stringinstr stringisalnum stringisalpha stringisascii stringisdigit
+ stringisfloat stringisint stringislower stringisspace stringisupper
+ stringisxdigit stringleft stringlen stringlower stringmid stringregexp
+ stringregexpreplace stringreplace stringright stringsplit stringstripcr
+ stringstripws stringtoasciiarray stringtobinary stringtrimleft
+ stringtrimright stringupper tan tcpaccept tcpclosesocket tcpconnect
+ tcplisten tcpnametoip tcprecv tcpsend tcpshutdown tcpstartup timerdiff
+ timerinit tooltip traycreateitem traycreatemenu traygetmsg trayitemdelete
+ trayitemgethandle trayitemgetstate trayitemgettext trayitemsetonevent
+ trayitemsetstate trayitemsettext traysetclick trayseticon traysetonevent
+ traysetpauseicon traysetstate traysettooltip traytip ubound udpbind
+ udpclosesocket udpopen udprecv udpsend udpshutdown udpstartup vargettype
+ winactivate winactive winclose winexists winflash wingetcaretpos
+ wingetclasslist wingetclientsize wingethandle wingetpos wingetprocess
+ wingetstate wingettext wingettitle winkill winlist winmenuselectitem
+ winminimizeall winminimizeallundo winmove winsetontop winsetstate
+ winsettitle winsettrans winwait winwaitactive winwaitclose
+ winwaitnotactive""".split()
+
+ macros = """\
+ @appdatacommondir @appdatadir @autoitexe @autoitpid @autoitversion
+ @autoitx64 @com_eventobj @commonfilesdir @compiled @computername @comspec
+ @cpuarch @cr @crlf @desktopcommondir @desktopdepth @desktopdir
+ @desktopheight @desktoprefresh @desktopwidth @documentscommondir @error
+ @exitcode @exitmethod @extended @favoritescommondir @favoritesdir
+ @gui_ctrlhandle @gui_ctrlid @gui_dragfile @gui_dragid @gui_dropid
+ @gui_winhandle @homedrive @homepath @homeshare @hotkeypressed @hour
+ @ipaddress1 @ipaddress2 @ipaddress3 @ipaddress4 @kblayout @lf
+ @logondnsdomain @logondomain @logonserver @mday @min @mon @msec @muilang
+ @mydocumentsdir @numparams @osarch @osbuild @oslang @osservicepack @ostype
+ @osversion @programfilesdir @programscommondir @programsdir @scriptdir
+ @scriptfullpath @scriptlinenumber @scriptname @sec @startmenucommondir
+ @startmenudir @startupcommondir @startupdir @sw_disable @sw_enable @sw_hide
+ @sw_lock @sw_maximize @sw_minimize @sw_restore @sw_show @sw_showdefault
+ @sw_showmaximized @sw_showminimized @sw_showminnoactive @sw_showna
+ @sw_shownoactivate @sw_shownormal @sw_unlock @systemdir @tab @tempdir
+ @tray_id @trayiconflashing @trayiconvisible @username @userprofiledir @wday
+ @windowsdir @workingdir @yday @year""".split()
+
+ tokens = {
+ 'root': [
+ (r';.*\n', Comment.Single),
+ (r'(#comments-start|#cs).*?(#comments-end|#ce)', Comment.Multiline),
+ (r'[\[\]{}(),;]', Punctuation),
+ (r'(and|or|not)\b', Operator.Word),
+ (r'[\$|@][a-zA-Z_]\w*', Name.Variable),
+ (r'!=|==|:=|\.=|<<|>>|[-~+/*%=<>&^|?:!.]', Operator),
+ include('commands'),
+ include('labels'),
+ include('builtInFunctions'),
+ include('builtInMarcros'),
+ (r'"', String, combined('stringescape', 'dqs')),
+ include('numbers'),
+ (r'[a-zA-Z_#@$][\w#@$]*', Name),
+ (r'\\|\'', Text),
+ (r'\`([\,\%\`abfnrtv\-\+;])', String.Escape),
+ (r'_\n', Text), # Line continuation
+ include('garbage'),
+ ],
+ 'commands': [
+ (r'(?i)(\s*)(%s)\b' % '|'.join(keywords),
+ bygroups(Text, Name.Builtin)),
+ ],
+ 'builtInFunctions': [
+ (r'(?i)(%s)\b' % '|'.join(functions),
+ Name.Function),
+ ],
+ 'builtInMarcros': [
+ (r'(?i)(%s)\b' % '|'.join(macros),
+ Name.Variable.Global),
+ ],
+ 'labels': [
+ # sendkeys
+ (r'(^\s*)({\S+?})', bygroups(Text, Name.Label)),
+ ],
+ 'numbers': [
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+', Number.Float),
+ (r'0\d+', Number.Oct),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'\d+L', Number.Integer.Long),
+ (r'\d+', Number.Integer)
+ ],
+ 'stringescape': [
+ (r'\"\"|\`([\,\%\`abfnrtv])', String.Escape),
+ ],
+ 'strings': [
+ (r'[^"\n]+', String),
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ include('strings')
+ ],
+ 'garbage': [
+ (r'[^\S\n]', Text),
+ ],
+ }
diff --git a/pygments/lexers/basic.py b/pygments/lexers/basic.py
new file mode 100644
index 00000000..a5e79e5e
--- /dev/null
+++ b/pygments/lexers/basic.py
@@ -0,0 +1,500 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.basic
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for BASIC like languages (other than VB.net).
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, default, words, include
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['BlitzBasicLexer', 'BlitzMaxLexer', 'MonkeyLexer', 'CbmBasicV2Lexer',
+ 'QBasicLexer']
+
+
+class BlitzMaxLexer(RegexLexer):
+ """
+ For `BlitzMax <http://blitzbasic.com>`_ source code.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'BlitzMax'
+ aliases = ['blitzmax', 'bmax']
+ filenames = ['*.bmx']
+ mimetypes = ['text/x-bmx']
+
+ bmax_vopwords = r'\b(Shl|Shr|Sar|Mod)\b'
+ bmax_sktypes = r'@{1,2}|[!#$%]'
+ bmax_lktypes = r'\b(Int|Byte|Short|Float|Double|Long)\b'
+ bmax_name = r'[a-z_]\w*'
+ bmax_var = (r'(%s)(?:(?:([ \t]*)(%s)|([ \t]*:[ \t]*\b(?:Shl|Shr|Sar|Mod)\b)'
+ r'|([ \t]*)(:)([ \t]*)(?:%s|(%s)))(?:([ \t]*)(Ptr))?)') % \
+ (bmax_name, bmax_sktypes, bmax_lktypes, bmax_name)
+ bmax_func = bmax_var + r'?((?:[ \t]|\.\.\n)*)([(])'
+
+ flags = re.MULTILINE | re.IGNORECASE
+ tokens = {
+ 'root': [
+ # Text
+ (r'[ \t]+', Text),
+ (r'\.\.\n', Text), # Line continuation
+ # Comments
+ (r"'.*?\n", Comment.Single),
+ (r'([ \t]*)\bRem\n(\n|.)*?\s*\bEnd([ \t]*)Rem', Comment.Multiline),
+ # Data types
+ ('"', String.Double, 'string'),
+ # Numbers
+ (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
+ (r'\.[0-9]*(?!\.)', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r'\$[0-9a-f]+', Number.Hex),
+ (r'\%[10]+', Number.Bin),
+ # Other
+ (r'(?:(?:(:)?([ \t]*)(:?%s|([+\-*/&|~]))|Or|And|Not|[=<>^]))' %
+ (bmax_vopwords), Operator),
+ (r'[(),.:\[\]]', Punctuation),
+ (r'(?:#[\w \t]*)', Name.Label),
+ (r'(?:\?[\w \t]*)', Comment.Preproc),
+ # Identifiers
+ (r'\b(New)\b([ \t]?)([(]?)(%s)' % (bmax_name),
+ bygroups(Keyword.Reserved, Text, Punctuation, Name.Class)),
+ (r'\b(Import|Framework|Module)([ \t]+)(%s\.%s)' %
+ (bmax_name, bmax_name),
+ bygroups(Keyword.Reserved, Text, Keyword.Namespace)),
+ (bmax_func, bygroups(Name.Function, Text, Keyword.Type,
+ Operator, Text, Punctuation, Text,
+ Keyword.Type, Name.Class, Text,
+ Keyword.Type, Text, Punctuation)),
+ (bmax_var, bygroups(Name.Variable, Text, Keyword.Type, Operator,
+ Text, Punctuation, Text, Keyword.Type,
+ Name.Class, Text, Keyword.Type)),
+ (r'\b(Type|Extends)([ \t]+)(%s)' % (bmax_name),
+ bygroups(Keyword.Reserved, Text, Name.Class)),
+ # Keywords
+ (r'\b(Ptr)\b', Keyword.Type),
+ (r'\b(Pi|True|False|Null|Self|Super)\b', Keyword.Constant),
+ (r'\b(Local|Global|Const|Field)\b', Keyword.Declaration),
+ (words((
+ 'TNullMethodException', 'TNullFunctionException',
+ 'TNullObjectException', 'TArrayBoundsException',
+ 'TRuntimeException'), prefix=r'\b', suffix=r'\b'), Name.Exception),
+ (words((
+ 'Strict', 'SuperStrict', 'Module', 'ModuleInfo',
+ 'End', 'Return', 'Continue', 'Exit', 'Public', 'Private',
+ 'Var', 'VarPtr', 'Chr', 'Len', 'Asc', 'SizeOf', 'Sgn', 'Abs', 'Min', 'Max',
+ 'New', 'Release', 'Delete', 'Incbin', 'IncbinPtr', 'IncbinLen',
+ 'Framework', 'Include', 'Import', 'Extern', 'EndExtern',
+ 'Function', 'EndFunction', 'Type', 'EndType', 'Extends', 'Method', 'EndMethod',
+ 'Abstract', 'Final', 'If', 'Then', 'Else', 'ElseIf', 'EndIf',
+ 'For', 'To', 'Next', 'Step', 'EachIn', 'While', 'Wend', 'EndWhile',
+ 'Repeat', 'Until', 'Forever', 'Select', 'Case', 'Default', 'EndSelect',
+ 'Try', 'Catch', 'EndTry', 'Throw', 'Assert', 'Goto', 'DefData', 'ReadData',
+ 'RestoreData'), prefix=r'\b', suffix=r'\b'),
+ Keyword.Reserved),
+ # Final resolve (for variable names and such)
+ (r'(%s)' % (bmax_name), Name.Variable),
+ ],
+ 'string': [
+ (r'""', String.Double),
+ (r'"C?', String.Double, '#pop'),
+ (r'[^"]+', String.Double),
+ ],
+ }
+
+
+class BlitzBasicLexer(RegexLexer):
+ """
+ For `BlitzBasic <http://blitzbasic.com>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'BlitzBasic'
+ aliases = ['blitzbasic', 'b3d', 'bplus']
+ filenames = ['*.bb', '*.decls']
+ mimetypes = ['text/x-bb']
+
+ bb_sktypes = r'@{1,2}|[#$%]'
+ bb_name = r'[a-z]\w*'
+ bb_var = (r'(%s)(?:([ \t]*)(%s)|([ \t]*)([.])([ \t]*)(?:(%s)))?') % \
+ (bb_name, bb_sktypes, bb_name)
+
+ flags = re.MULTILINE | re.IGNORECASE
+ tokens = {
+ 'root': [
+ # Text
+ (r'[ \t]+', Text),
+ # Comments
+ (r";.*?\n", Comment.Single),
+ # Data types
+ ('"', String.Double, 'string'),
+ # Numbers
+ (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
+ (r'\.[0-9]+(?!\.)', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r'\$[0-9a-f]+', Number.Hex),
+ (r'\%[10]+', Number.Bin),
+ # Other
+ (words(('Shl', 'Shr', 'Sar', 'Mod', 'Or', 'And', 'Not',
+ 'Abs', 'Sgn', 'Handle', 'Int', 'Float', 'Str',
+ 'First', 'Last', 'Before', 'After'),
+ prefix=r'\b', suffix=r'\b'),
+ Operator),
+ (r'([+\-*/~=<>^])', Operator),
+ (r'[(),:\[\]\\]', Punctuation),
+ (r'\.([ \t]*)(%s)' % bb_name, Name.Label),
+ # Identifiers
+ (r'\b(New)\b([ \t]+)(%s)' % (bb_name),
+ bygroups(Keyword.Reserved, Text, Name.Class)),
+ (r'\b(Gosub|Goto)\b([ \t]+)(%s)' % (bb_name),
+ bygroups(Keyword.Reserved, Text, Name.Label)),
+ (r'\b(Object)\b([ \t]*)([.])([ \t]*)(%s)\b' % (bb_name),
+ bygroups(Operator, Text, Punctuation, Text, Name.Class)),
+ (r'\b%s\b([ \t]*)(\()' % bb_var,
+ bygroups(Name.Function, Text, Keyword.Type, Text, Punctuation,
+ Text, Name.Class, Text, Punctuation)),
+ (r'\b(Function)\b([ \t]+)%s' % bb_var,
+ bygroups(Keyword.Reserved, Text, Name.Function, Text, Keyword.Type,
+ Text, Punctuation, Text, Name.Class)),
+ (r'\b(Type)([ \t]+)(%s)' % (bb_name),
+ bygroups(Keyword.Reserved, Text, Name.Class)),
+ # Keywords
+ (r'\b(Pi|True|False|Null)\b', Keyword.Constant),
+ (r'\b(Local|Global|Const|Field|Dim)\b', Keyword.Declaration),
+ (words((
+ 'End', 'Return', 'Exit', 'Chr', 'Len', 'Asc', 'New', 'Delete', 'Insert',
+ 'Include', 'Function', 'Type', 'If', 'Then', 'Else', 'ElseIf', 'EndIf',
+ 'For', 'To', 'Next', 'Step', 'Each', 'While', 'Wend',
+ 'Repeat', 'Until', 'Forever', 'Select', 'Case', 'Default',
+ 'Goto', 'Gosub', 'Data', 'Read', 'Restore'), prefix=r'\b', suffix=r'\b'),
+ Keyword.Reserved),
+ # Final resolve (for variable names and such)
+ # (r'(%s)' % (bb_name), Name.Variable),
+ (bb_var, bygroups(Name.Variable, Text, Keyword.Type,
+ Text, Punctuation, Text, Name.Class)),
+ ],
+ 'string': [
+ (r'""', String.Double),
+ (r'"C?', String.Double, '#pop'),
+ (r'[^"]+', String.Double),
+ ],
+ }
+
+
+class MonkeyLexer(RegexLexer):
+ """
+ For
+ `Monkey <https://en.wikipedia.org/wiki/Monkey_(programming_language)>`_
+ source code.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Monkey'
+ aliases = ['monkey']
+ filenames = ['*.monkey']
+ mimetypes = ['text/x-monkey']
+
+ name_variable = r'[a-z_]\w*'
+ name_function = r'[A-Z]\w*'
+ name_constant = r'[A-Z_][A-Z0-9_]*'
+ name_class = r'[A-Z]\w*'
+ name_module = r'[a-z0-9_]*'
+
+ keyword_type = r'(?:Int|Float|String|Bool|Object|Array|Void)'
+ # ? == Bool // % == Int // # == Float // $ == String
+ keyword_type_special = r'[?%#$]'
+
+ flags = re.MULTILINE
+
+ tokens = {
+ 'root': [
+ # Text
+ (r'\s+', Text),
+ # Comments
+ (r"'.*", Comment),
+ (r'(?i)^#rem\b', Comment.Multiline, 'comment'),
+ # preprocessor directives
+ (r'(?i)^(?:#If|#ElseIf|#Else|#EndIf|#End|#Print|#Error)\b', Comment.Preproc),
+ # preprocessor variable (any line starting with '#' that is not a directive)
+ (r'^#', Comment.Preproc, 'variables'),
+ # String
+ ('"', String.Double, 'string'),
+ # Numbers
+ (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
+ (r'\.[0-9]+(?!\.)', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r'\$[0-9a-fA-Z]+', Number.Hex),
+ (r'\%[10]+', Number.Bin),
+ # Native data types
+ (r'\b%s\b' % keyword_type, Keyword.Type),
+ # Exception handling
+ (r'(?i)\b(?:Try|Catch|Throw)\b', Keyword.Reserved),
+ (r'Throwable', Name.Exception),
+ # Builtins
+ (r'(?i)\b(?:Null|True|False)\b', Name.Builtin),
+ (r'(?i)\b(?:Self|Super)\b', Name.Builtin.Pseudo),
+ (r'\b(?:HOST|LANG|TARGET|CONFIG)\b', Name.Constant),
+ # Keywords
+ (r'(?i)^(Import)(\s+)(.*)(\n)',
+ bygroups(Keyword.Namespace, Text, Name.Namespace, Text)),
+ (r'(?i)^Strict\b.*\n', Keyword.Reserved),
+ (r'(?i)(Const|Local|Global|Field)(\s+)',
+ bygroups(Keyword.Declaration, Text), 'variables'),
+ (r'(?i)(New|Class|Interface|Extends|Implements)(\s+)',
+ bygroups(Keyword.Reserved, Text), 'classname'),
+ (r'(?i)(Function|Method)(\s+)',
+ bygroups(Keyword.Reserved, Text), 'funcname'),
+ (r'(?i)(?:End|Return|Public|Private|Extern|Property|'
+ r'Final|Abstract)\b', Keyword.Reserved),
+ # Flow Control stuff
+ (r'(?i)(?:If|Then|Else|ElseIf|EndIf|'
+ r'Select|Case|Default|'
+ r'While|Wend|'
+ r'Repeat|Until|Forever|'
+ r'For|To|Until|Step|EachIn|Next|'
+ r'Exit|Continue)\s+', Keyword.Reserved),
+ # not used yet
+ (r'(?i)\b(?:Module|Inline)\b', Keyword.Reserved),
+ # Array
+ (r'[\[\]]', Punctuation),
+ # Other
+ (r'<=|>=|<>|\*=|/=|\+=|-=|&=|~=|\|=|[-&*/^+=<>|~]', Operator),
+ (r'(?i)(?:Not|Mod|Shl|Shr|And|Or)', Operator.Word),
+ (r'[\(\){}!#,.:]', Punctuation),
+ # catch the rest
+ (r'%s\b' % name_constant, Name.Constant),
+ (r'%s\b' % name_function, Name.Function),
+ (r'%s\b' % name_variable, Name.Variable),
+ ],
+ 'funcname': [
+ (r'(?i)%s\b' % name_function, Name.Function),
+ (r':', Punctuation, 'classname'),
+ (r'\s+', Text),
+ (r'\(', Punctuation, 'variables'),
+ (r'\)', Punctuation, '#pop')
+ ],
+ 'classname': [
+ (r'%s\.' % name_module, Name.Namespace),
+ (r'%s\b' % keyword_type, Keyword.Type),
+ (r'%s\b' % name_class, Name.Class),
+ # array (of given size)
+ (r'(\[)(\s*)(\d*)(\s*)(\])',
+ bygroups(Punctuation, Text, Number.Integer, Text, Punctuation)),
+ # generics
+ (r'\s+(?!<)', Text, '#pop'),
+ (r'<', Punctuation, '#push'),
+ (r'>', Punctuation, '#pop'),
+ (r'\n', Text, '#pop'),
+ default('#pop')
+ ],
+ 'variables': [
+ (r'%s\b' % name_constant, Name.Constant),
+ (r'%s\b' % name_variable, Name.Variable),
+ (r'%s' % keyword_type_special, Keyword.Type),
+ (r'\s+', Text),
+ (r':', Punctuation, 'classname'),
+ (r',', Punctuation, '#push'),
+ default('#pop')
+ ],
+ 'string': [
+ (r'[^"~]+', String.Double),
+ (r'~q|~n|~r|~t|~z|~~', String.Escape),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'comment': [
+ (r'(?i)^#rem.*?', Comment.Multiline, "#push"),
+ (r'(?i)^#end.*?', Comment.Multiline, "#pop"),
+ (r'\n', Comment.Multiline),
+ (r'.+', Comment.Multiline),
+ ],
+ }
+
+
+class CbmBasicV2Lexer(RegexLexer):
+ """
+ For CBM BASIC V2 sources.
+
+ .. versionadded:: 1.6
+ """
+ name = 'CBM BASIC V2'
+ aliases = ['cbmbas']
+ filenames = ['*.bas']
+
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'rem.*\n', Comment.Single),
+ (r'\s+', Text),
+ (r'new|run|end|for|to|next|step|go(to|sub)?|on|return|stop|cont'
+ r'|if|then|input#?|read|wait|load|save|verify|poke|sys|print#?'
+ r'|list|clr|cmd|open|close|get#?', Keyword.Reserved),
+ (r'data|restore|dim|let|def|fn', Keyword.Declaration),
+ (r'tab|spc|sgn|int|abs|usr|fre|pos|sqr|rnd|log|exp|cos|sin|tan|atn'
+ r'|peek|len|val|asc|(str|chr|left|right|mid)\$', Name.Builtin),
+ (r'[-+*/^<>=]', Operator),
+ (r'not|and|or', Operator.Word),
+ (r'"[^"\n]*.', String),
+ (r'\d+|[-+]?\d*\.\d*(e[-+]?\d+)?', Number.Float),
+ (r'[\(\),:;]', Punctuation),
+ (r'\w+[$%]?', Name),
+ ]
+ }
+
+ def analyse_text(self, text):
+ # if it starts with a line number, it shouldn't be a "modern" Basic
+ # like VB.net
+ if re.match(r'\d+', text):
+ return 0.2
+
+
+class QBasicLexer(RegexLexer):
+ """
+ For
+ `QBasic <http://en.wikipedia.org/wiki/QBasic>`_
+ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'QBasic'
+ aliases = ['qbasic', 'basic']
+ filenames = ['*.BAS', '*.bas']
+ mimetypes = ['text/basic']
+
+ declarations = ('DATA', 'LET')
+
+ functions = (
+ 'ABS', 'ASC', 'ATN', 'CDBL', 'CHR$', 'CINT', 'CLNG',
+ 'COMMAND$', 'COS', 'CSNG', 'CSRLIN', 'CVD', 'CVDMBF', 'CVI',
+ 'CVL', 'CVS', 'CVSMBF', 'DATE$', 'ENVIRON$', 'EOF', 'ERDEV',
+ 'ERDEV$', 'ERL', 'ERR', 'EXP', 'FILEATTR', 'FIX', 'FRE',
+ 'FREEFILE', 'HEX$', 'INKEY$', 'INP', 'INPUT$', 'INSTR', 'INT',
+ 'IOCTL$', 'LBOUND', 'LCASE$', 'LEFT$', 'LEN', 'LOC', 'LOF',
+ 'LOG', 'LPOS', 'LTRIM$', 'MID$', 'MKD$', 'MKDMBF$', 'MKI$',
+ 'MKL$', 'MKS$', 'MKSMBF$', 'OCT$', 'PEEK', 'PEN', 'PLAY',
+ 'PMAP', 'POINT', 'POS', 'RIGHT$', 'RND', 'RTRIM$', 'SADD',
+ 'SCREEN', 'SEEK', 'SETMEM', 'SGN', 'SIN', 'SPACE$', 'SPC',
+ 'SQR', 'STICK', 'STR$', 'STRIG', 'STRING$', 'TAB', 'TAN',
+ 'TIME$', 'TIMER', 'UBOUND', 'UCASE$', 'VAL', 'VARPTR',
+ 'VARPTR$', 'VARSEG'
+ )
+
+ metacommands = ('$DYNAMIC', '$INCLUDE', '$STATIC')
+
+ operators = ('AND', 'EQV', 'IMP', 'NOT', 'OR', 'XOR')
+
+ statements = (
+ 'BEEP', 'BLOAD', 'BSAVE', 'CALL', 'CALL ABSOLUTE',
+ 'CALL INTERRUPT', 'CALLS', 'CHAIN', 'CHDIR', 'CIRCLE', 'CLEAR',
+ 'CLOSE', 'CLS', 'COLOR', 'COM', 'COMMON', 'CONST', 'DATA',
+ 'DATE$', 'DECLARE', 'DEF FN', 'DEF SEG', 'DEFDBL', 'DEFINT',
+ 'DEFLNG', 'DEFSNG', 'DEFSTR', 'DEF', 'DIM', 'DO', 'LOOP',
+ 'DRAW', 'END', 'ENVIRON', 'ERASE', 'ERROR', 'EXIT', 'FIELD',
+ 'FILES', 'FOR', 'NEXT', 'FUNCTION', 'GET', 'GOSUB', 'GOTO',
+ 'IF', 'THEN', 'INPUT', 'INPUT #', 'IOCTL', 'KEY', 'KEY',
+ 'KILL', 'LET', 'LINE', 'LINE INPUT', 'LINE INPUT #', 'LOCATE',
+ 'LOCK', 'UNLOCK', 'LPRINT', 'LSET', 'MID$', 'MKDIR', 'NAME',
+ 'ON COM', 'ON ERROR', 'ON KEY', 'ON PEN', 'ON PLAY',
+ 'ON STRIG', 'ON TIMER', 'ON UEVENT', 'ON', 'OPEN', 'OPEN COM',
+ 'OPTION BASE', 'OUT', 'PAINT', 'PALETTE', 'PCOPY', 'PEN',
+ 'PLAY', 'POKE', 'PRESET', 'PRINT', 'PRINT #', 'PRINT USING',
+ 'PSET', 'PUT', 'PUT', 'RANDOMIZE', 'READ', 'REDIM', 'REM',
+ 'RESET', 'RESTORE', 'RESUME', 'RETURN', 'RMDIR', 'RSET', 'RUN',
+ 'SCREEN', 'SEEK', 'SELECT CASE', 'SHARED', 'SHELL', 'SLEEP',
+ 'SOUND', 'STATIC', 'STOP', 'STRIG', 'SUB', 'SWAP', 'SYSTEM',
+ 'TIME$', 'TIMER', 'TROFF', 'TRON', 'TYPE', 'UEVENT', 'UNLOCK',
+ 'VIEW', 'WAIT', 'WHILE', 'WEND', 'WIDTH', 'WINDOW', 'WRITE'
+ )
+
+ keywords = (
+ 'ACCESS', 'ALIAS', 'ANY', 'APPEND', 'AS', 'BASE', 'BINARY',
+ 'BYVAL', 'CASE', 'CDECL', 'DOUBLE', 'ELSE', 'ELSEIF', 'ENDIF',
+ 'INTEGER', 'IS', 'LIST', 'LOCAL', 'LONG', 'LOOP', 'MOD',
+ 'NEXT', 'OFF', 'ON', 'OUTPUT', 'RANDOM', 'SIGNAL', 'SINGLE',
+ 'STEP', 'STRING', 'THEN', 'TO', 'UNTIL', 'USING', 'WEND'
+ )
+
+ tokens = {
+ 'root': [
+ (r'\n+', Text),
+ (r'\s+', Text.Whitespace),
+ (r'^(\s*)(\d*)(\s*)(REM .*)$',
+ bygroups(Text.Whitespace, Name.Label, Text.Whitespace,
+ Comment.Single)),
+ (r'^(\s*)(\d+)(\s*)',
+ bygroups(Text.Whitespace, Name.Label, Text.Whitespace)),
+ (r'(?=[\s]*)(\w+)(?=[\s]*=)', Name.Variable.Global),
+ (r'(?=[^"]*)\'.*$', Comment.Single),
+ (r'"[^\n\"]*"', String.Double),
+ (r'(END)(\s+)(FUNCTION|IF|SELECT|SUB)',
+ bygroups(Keyword.Reserved, Text.Whitespace, Keyword.Reserved)),
+ (r'(DECLARE)(\s+)([A-Z]+)(\s+)(\S+)',
+ bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable,
+ Text.Whitespace, Name)),
+ (r'(DIM)(\s+)(SHARED)(\s+)([^\s\(]+)',
+ bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable,
+ Text.Whitespace, Name.Variable.Global)),
+ (r'(DIM)(\s+)([^\s\(]+)',
+ bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable.Global)),
+ (r'^(\s*)([a-zA-Z_]+)(\s*)(\=)',
+ bygroups(Text.Whitespace, Name.Variable.Global, Text.Whitespace,
+ Operator)),
+ (r'(GOTO|GOSUB)(\s+)(\w+\:?)',
+ bygroups(Keyword.Reserved, Text.Whitespace, Name.Label)),
+ (r'(SUB)(\s+)(\w+\:?)',
+ bygroups(Keyword.Reserved, Text.Whitespace, Name.Label)),
+ include('declarations'),
+ include('functions'),
+ include('metacommands'),
+ include('operators'),
+ include('statements'),
+ include('keywords'),
+ (r'[a-zA-Z_]\w*[\$@#&!]', Name.Variable.Global),
+ (r'[a-zA-Z_]\w*\:', Name.Label),
+ (r'\-?\d*\.\d+[@|#]?', Number.Float),
+ (r'\-?\d+[@|#]', Number.Float),
+ (r'\-?\d+#?', Number.Integer.Long),
+ (r'\-?\d+#?', Number.Integer),
+ (r'!=|==|:=|\.=|<<|>>|[-~+/\\*%=<>&^|?:!.]', Operator),
+ (r'[\[\]{}(),;]', Punctuation),
+ (r'[\w]+', Name.Variable.Global),
+ ],
+ # can't use regular \b because of X$()
+ # XXX: use words() here
+ 'declarations': [
+ (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, declarations)),
+ Keyword.Declaration),
+ ],
+ 'functions': [
+ (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, functions)),
+ Keyword.Reserved),
+ ],
+ 'metacommands': [
+ (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, metacommands)),
+ Keyword.Constant),
+ ],
+ 'operators': [
+ (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, operators)), Operator.Word),
+ ],
+ 'statements': [
+ (r'\b(%s)\b' % '|'.join(map(re.escape, statements)),
+ Keyword.Reserved),
+ ],
+ 'keywords': [
+ (r'\b(%s)\b' % '|'.join(keywords), Keyword),
+ ],
+ }
+
+ def analyse_text(text):
+ if '$DYNAMIC' in text or '$STATIC' in text:
+ return 0.9
diff --git a/pygments/lexers/business.py b/pygments/lexers/business.py
new file mode 100644
index 00000000..e2806664
--- /dev/null
+++ b/pygments/lexers/business.py
@@ -0,0 +1,592 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.business
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for "business-oriented" languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, words, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+
+from pygments.lexers._openedge_builtins import OPENEDGEKEYWORDS
+
+__all__ = ['CobolLexer', 'CobolFreeformatLexer', 'ABAPLexer', 'OpenEdgeLexer',
+ 'GoodDataCLLexer', 'MaqlLexer']
+
+
+class CobolLexer(RegexLexer):
+ """
+ Lexer for OpenCOBOL code.
+
+ .. versionadded:: 1.6
+ """
+ name = 'COBOL'
+ aliases = ['cobol']
+ filenames = ['*.cob', '*.COB', '*.cpy', '*.CPY']
+ mimetypes = ['text/x-cobol']
+ flags = re.IGNORECASE | re.MULTILINE
+
+ # Data Types: by PICTURE and USAGE
+ # Operators: **, *, +, -, /, <, >, <=, >=, =, <>
+ # Logical (?): NOT, AND, OR
+
+ # Reserved words:
+ # http://opencobol.add1tocobol.com/#reserved-words
+ # Intrinsics:
+ # http://opencobol.add1tocobol.com/#does-opencobol-implement-any-intrinsic-functions
+
+ tokens = {
+ 'root': [
+ include('comment'),
+ include('strings'),
+ include('core'),
+ include('nums'),
+ (r'[a-z0-9]([_a-z0-9\-]*[a-z0-9]+)?', Name.Variable),
+ # (r'[\s]+', Text),
+ (r'[ \t]+', Text),
+ ],
+ 'comment': [
+ (r'(^.{6}[*/].*\n|^.{6}|\*>.*\n)', Comment),
+ ],
+ 'core': [
+ # Figurative constants
+ (r'(^|(?<=[^0-9a-z_\-]))(ALL\s+)?'
+ r'((ZEROES)|(HIGH-VALUE|LOW-VALUE|QUOTE|SPACE|ZERO)(S)?)'
+ r'\s*($|(?=[^0-9a-z_\-]))',
+ Name.Constant),
+
+ # Reserved words STATEMENTS and other bolds
+ (words((
+ 'ACCEPT', 'ADD', 'ALLOCATE', 'CALL', 'CANCEL', 'CLOSE', 'COMPUTE',
+ 'CONFIGURATION', 'CONTINUE', 'DATA', 'DELETE', 'DISPLAY', 'DIVIDE',
+ 'DIVISION', 'ELSE', 'END', 'END-ACCEPT',
+ 'END-ADD', 'END-CALL', 'END-COMPUTE', 'END-DELETE', 'END-DISPLAY',
+ 'END-DIVIDE', 'END-EVALUATE', 'END-IF', 'END-MULTIPLY', 'END-OF-PAGE',
+ 'END-PERFORM', 'END-READ', 'END-RETURN', 'END-REWRITE', 'END-SEARCH',
+ 'END-START', 'END-STRING', 'END-SUBTRACT', 'END-UNSTRING', 'END-WRITE',
+ 'ENVIRONMENT', 'EVALUATE', 'EXIT', 'FD', 'FILE', 'FILE-CONTROL', 'FOREVER',
+ 'FREE', 'GENERATE', 'GO', 'GOBACK', 'IDENTIFICATION', 'IF', 'INITIALIZE',
+ 'INITIATE', 'INPUT-OUTPUT', 'INSPECT', 'INVOKE', 'I-O-CONTROL', 'LINKAGE',
+ 'LOCAL-STORAGE', 'MERGE', 'MOVE', 'MULTIPLY', 'OPEN', 'PERFORM',
+ 'PROCEDURE', 'PROGRAM-ID', 'RAISE', 'READ', 'RELEASE', 'RESUME',
+ 'RETURN', 'REWRITE', 'SCREEN', 'SD', 'SEARCH', 'SECTION', 'SET',
+ 'SORT', 'START', 'STOP', 'STRING', 'SUBTRACT', 'SUPPRESS',
+ 'TERMINATE', 'THEN', 'UNLOCK', 'UNSTRING', 'USE', 'VALIDATE',
+ 'WORKING-STORAGE', 'WRITE'), prefix=r'(^|(?<=[^0-9a-z_\-]))',
+ suffix=r'\s*($|(?=[^0-9a-z_\-]))'),
+ Keyword.Reserved),
+
+ # Reserved words
+ (words((
+ 'ACCESS', 'ADDRESS', 'ADVANCING', 'AFTER', 'ALL',
+ 'ALPHABET', 'ALPHABETIC', 'ALPHABETIC-LOWER', 'ALPHABETIC-UPPER',
+ 'ALPHANUMERIC', 'ALPHANUMERIC-EDITED', 'ALSO', 'ALTER', 'ALTERNATE'
+ 'ANY', 'ARE', 'AREA', 'AREAS', 'ARGUMENT-NUMBER', 'ARGUMENT-VALUE', 'AS',
+ 'ASCENDING', 'ASSIGN', 'AT', 'AUTO', 'AUTO-SKIP', 'AUTOMATIC', 'AUTOTERMINATE',
+ 'BACKGROUND-COLOR', 'BASED', 'BEEP', 'BEFORE', 'BELL',
+ 'BLANK', 'BLINK', 'BLOCK', 'BOTTOM', 'BY', 'BYTE-LENGTH', 'CHAINING',
+ 'CHARACTER', 'CHARACTERS', 'CLASS', 'CODE', 'CODE-SET', 'COL', 'COLLATING',
+ 'COLS', 'COLUMN', 'COLUMNS', 'COMMA', 'COMMAND-LINE', 'COMMIT', 'COMMON',
+ 'CONSTANT', 'CONTAINS', 'CONTENT', 'CONTROL',
+ 'CONTROLS', 'CONVERTING', 'COPY', 'CORR', 'CORRESPONDING', 'COUNT', 'CRT',
+ 'CURRENCY', 'CURSOR', 'CYCLE', 'DATE', 'DAY', 'DAY-OF-WEEK', 'DE', 'DEBUGGING',
+ 'DECIMAL-POINT', 'DECLARATIVES', 'DEFAULT', 'DELIMITED',
+ 'DELIMITER', 'DEPENDING', 'DESCENDING', 'DETAIL', 'DISK',
+ 'DOWN', 'DUPLICATES', 'DYNAMIC', 'EBCDIC',
+ 'ENTRY', 'ENVIRONMENT-NAME', 'ENVIRONMENT-VALUE', 'EOL', 'EOP',
+ 'EOS', 'ERASE', 'ERROR', 'ESCAPE', 'EXCEPTION',
+ 'EXCLUSIVE', 'EXTEND', 'EXTERNAL',
+ 'FILE-ID', 'FILLER', 'FINAL', 'FIRST', 'FIXED', 'FLOAT-LONG', 'FLOAT-SHORT',
+ 'FOOTING', 'FOR', 'FOREGROUND-COLOR', 'FORMAT', 'FROM', 'FULL', 'FUNCTION',
+ 'FUNCTION-ID', 'GIVING', 'GLOBAL', 'GROUP',
+ 'HEADING', 'HIGHLIGHT', 'I-O', 'ID',
+ 'IGNORE', 'IGNORING', 'IN', 'INDEX', 'INDEXED', 'INDICATE',
+ 'INITIAL', 'INITIALIZED', 'INPUT',
+ 'INTO', 'INTRINSIC', 'INVALID', 'IS', 'JUST', 'JUSTIFIED', 'KEY', 'LABEL',
+ 'LAST', 'LEADING', 'LEFT', 'LENGTH', 'LIMIT', 'LIMITS', 'LINAGE',
+ 'LINAGE-COUNTER', 'LINE', 'LINES', 'LOCALE', 'LOCK',
+ 'LOWLIGHT', 'MANUAL', 'MEMORY', 'MINUS', 'MODE',
+ 'MULTIPLE', 'NATIONAL', 'NATIONAL-EDITED', 'NATIVE',
+ 'NEGATIVE', 'NEXT', 'NO', 'NULL', 'NULLS', 'NUMBER', 'NUMBERS', 'NUMERIC',
+ 'NUMERIC-EDITED', 'OBJECT-COMPUTER', 'OCCURS', 'OF', 'OFF', 'OMITTED', 'ON', 'ONLY',
+ 'OPTIONAL', 'ORDER', 'ORGANIZATION', 'OTHER', 'OUTPUT', 'OVERFLOW',
+ 'OVERLINE', 'PACKED-DECIMAL', 'PADDING', 'PAGE', 'PARAGRAPH',
+ 'PLUS', 'POINTER', 'POSITION', 'POSITIVE', 'PRESENT', 'PREVIOUS',
+ 'PRINTER', 'PRINTING', 'PROCEDURE-POINTER', 'PROCEDURES',
+ 'PROCEED', 'PROGRAM', 'PROGRAM-POINTER', 'PROMPT', 'QUOTE',
+ 'QUOTES', 'RANDOM', 'RD', 'RECORD', 'RECORDING', 'RECORDS', 'RECURSIVE',
+ 'REDEFINES', 'REEL', 'REFERENCE', 'RELATIVE', 'REMAINDER', 'REMOVAL',
+ 'RENAMES', 'REPLACING', 'REPORT', 'REPORTING', 'REPORTS', 'REPOSITORY',
+ 'REQUIRED', 'RESERVE', 'RETURNING', 'REVERSE-VIDEO', 'REWIND',
+ 'RIGHT', 'ROLLBACK', 'ROUNDED', 'RUN', 'SAME', 'SCROLL',
+ 'SECURE', 'SEGMENT-LIMIT', 'SELECT', 'SENTENCE', 'SEPARATE',
+ 'SEQUENCE', 'SEQUENTIAL', 'SHARING', 'SIGN', 'SIGNED', 'SIGNED-INT',
+ 'SIGNED-LONG', 'SIGNED-SHORT', 'SIZE', 'SORT-MERGE', 'SOURCE',
+ 'SOURCE-COMPUTER', 'SPECIAL-NAMES', 'STANDARD',
+ 'STANDARD-1', 'STANDARD-2', 'STATUS', 'SUM',
+ 'SYMBOLIC', 'SYNC', 'SYNCHRONIZED', 'TALLYING', 'TAPE',
+ 'TEST', 'THROUGH', 'THRU', 'TIME', 'TIMES', 'TO', 'TOP', 'TRAILING',
+ 'TRANSFORM', 'TYPE', 'UNDERLINE', 'UNIT', 'UNSIGNED',
+ 'UNSIGNED-INT', 'UNSIGNED-LONG', 'UNSIGNED-SHORT', 'UNTIL', 'UP',
+ 'UPDATE', 'UPON', 'USAGE', 'USING', 'VALUE', 'VALUES', 'VARYING',
+ 'WAIT', 'WHEN', 'WITH', 'WORDS', 'YYYYDDD', 'YYYYMMDD'),
+ prefix=r'(^|(?<=[^0-9a-z_\-]))', suffix=r'\s*($|(?=[^0-9a-z_\-]))'),
+ Keyword.Pseudo),
+
+ # inactive reserved words
+ (words((
+ 'ACTIVE-CLASS', 'ALIGNED', 'ANYCASE', 'ARITHMETIC', 'ATTRIBUTE', 'B-AND',
+ 'B-NOT', 'B-OR', 'B-XOR', 'BIT', 'BOOLEAN', 'CD', 'CENTER', 'CF', 'CH', 'CHAIN', 'CLASS-ID',
+ 'CLASSIFICATION', 'COMMUNICATION', 'CONDITION', 'DATA-POINTER',
+ 'DESTINATION', 'DISABLE', 'EC', 'EGI', 'EMI', 'ENABLE', 'END-RECEIVE',
+ 'ENTRY-CONVENTION', 'EO', 'ESI', 'EXCEPTION-OBJECT', 'EXPANDS', 'FACTORY',
+ 'FLOAT-BINARY-16', 'FLOAT-BINARY-34', 'FLOAT-BINARY-7',
+ 'FLOAT-DECIMAL-16', 'FLOAT-DECIMAL-34', 'FLOAT-EXTENDED', 'FORMAT',
+ 'FUNCTION-POINTER', 'GET', 'GROUP-USAGE', 'IMPLEMENTS', 'INFINITY',
+ 'INHERITS', 'INTERFACE', 'INTERFACE-ID', 'INVOKE', 'LC_ALL', 'LC_COLLATE',
+ 'LC_CTYPE', 'LC_MESSAGES', 'LC_MONETARY', 'LC_NUMERIC', 'LC_TIME',
+ 'LINE-COUNTER', 'MESSAGE', 'METHOD', 'METHOD-ID', 'NESTED', 'NONE', 'NORMAL',
+ 'OBJECT', 'OBJECT-REFERENCE', 'OPTIONS', 'OVERRIDE', 'PAGE-COUNTER', 'PF', 'PH',
+ 'PROPERTY', 'PROTOTYPE', 'PURGE', 'QUEUE', 'RAISE', 'RAISING', 'RECEIVE',
+ 'RELATION', 'REPLACE', 'REPRESENTS-NOT-A-NUMBER', 'RESET', 'RESUME', 'RETRY',
+ 'RF', 'RH', 'SECONDS', 'SEGMENT', 'SELF', 'SEND', 'SOURCES', 'STATEMENT', 'STEP',
+ 'STRONG', 'SUB-QUEUE-1', 'SUB-QUEUE-2', 'SUB-QUEUE-3', 'SUPER', 'SYMBOL',
+ 'SYSTEM-DEFAULT', 'TABLE', 'TERMINAL', 'TEXT', 'TYPEDEF', 'UCS-4', 'UNIVERSAL',
+ 'USER-DEFAULT', 'UTF-16', 'UTF-8', 'VAL-STATUS', 'VALID', 'VALIDATE',
+ 'VALIDATE-STATUS'),
+ prefix=r'(^|(?<=[^0-9a-z_\-]))', suffix=r'\s*($|(?=[^0-9a-z_\-]))'),
+ Error),
+
+ # Data Types
+ (r'(^|(?<=[^0-9a-z_\-]))'
+ r'(PIC\s+.+?(?=(\s|\.\s))|PICTURE\s+.+?(?=(\s|\.\s))|'
+ r'(COMPUTATIONAL)(-[1-5X])?|(COMP)(-[1-5X])?|'
+ r'BINARY-C-LONG|'
+ r'BINARY-CHAR|BINARY-DOUBLE|BINARY-LONG|BINARY-SHORT|'
+ r'BINARY)\s*($|(?=[^0-9a-z_\-]))', Keyword.Type),
+
+ # Operators
+ (r'(\*\*|\*|\+|-|/|<=|>=|<|>|==|/=|=)', Operator),
+
+ # (r'(::)', Keyword.Declaration),
+
+ (r'([(),;:&%.])', Punctuation),
+
+ # Intrinsics
+ (r'(^|(?<=[^0-9a-z_\-]))(ABS|ACOS|ANNUITY|ASIN|ATAN|BYTE-LENGTH|'
+ r'CHAR|COMBINED-DATETIME|CONCATENATE|COS|CURRENT-DATE|'
+ r'DATE-OF-INTEGER|DATE-TO-YYYYMMDD|DAY-OF-INTEGER|DAY-TO-YYYYDDD|'
+ r'EXCEPTION-(?:FILE|LOCATION|STATEMENT|STATUS)|EXP10|EXP|E|'
+ r'FACTORIAL|FRACTION-PART|INTEGER-OF-(?:DATE|DAY|PART)|INTEGER|'
+ r'LENGTH|LOCALE-(?:DATE|TIME(?:-FROM-SECONDS)?)|LOG(?:10)?|'
+ r'LOWER-CASE|MAX|MEAN|MEDIAN|MIDRANGE|MIN|MOD|NUMVAL(?:-C)?|'
+ r'ORD(?:-MAX|-MIN)?|PI|PRESENT-VALUE|RANDOM|RANGE|REM|REVERSE|'
+ r'SECONDS-FROM-FORMATTED-TIME|SECONDS-PAST-MIDNIGHT|SIGN|SIN|SQRT|'
+ r'STANDARD-DEVIATION|STORED-CHAR-LENGTH|SUBSTITUTE(?:-CASE)?|'
+ r'SUM|TAN|TEST-DATE-YYYYMMDD|TEST-DAY-YYYYDDD|TRIM|'
+ r'UPPER-CASE|VARIANCE|WHEN-COMPILED|YEAR-TO-YYYY)\s*'
+ r'($|(?=[^0-9a-z_\-]))', Name.Function),
+
+ # Booleans
+ (r'(^|(?<=[^0-9a-z_\-]))(true|false)\s*($|(?=[^0-9a-z_\-]))', Name.Builtin),
+ # Comparing Operators
+ (r'(^|(?<=[^0-9a-z_\-]))(equal|equals|ne|lt|le|gt|ge|'
+ r'greater|less|than|not|and|or)\s*($|(?=[^0-9a-z_\-]))', Operator.Word),
+ ],
+
+ # \"[^\"\n]*\"|\'[^\'\n]*\'
+ 'strings': [
+ # apparently strings can be delimited by EOL if they are continued
+ # in the next line
+ (r'"[^"\n]*("|\n)', String.Double),
+ (r"'[^'\n]*('|\n)", String.Single),
+ ],
+
+ 'nums': [
+ (r'\d+(\s*|\.$|$)', Number.Integer),
+ (r'[+-]?\d*\.\d+([eE][-+]?\d+)?', Number.Float),
+ (r'[+-]?\d+\.\d*([eE][-+]?\d+)?', Number.Float),
+ ],
+ }
+
+
+class CobolFreeformatLexer(CobolLexer):
+ """
+ Lexer for Free format OpenCOBOL code.
+
+ .. versionadded:: 1.6
+ """
+ name = 'COBOLFree'
+ aliases = ['cobolfree']
+ filenames = ['*.cbl', '*.CBL']
+ mimetypes = []
+ flags = re.IGNORECASE | re.MULTILINE
+
+ tokens = {
+ 'comment': [
+ (r'(\*>.*\n|^\w*\*.*$)', Comment),
+ ],
+ }
+
+
+class ABAPLexer(RegexLexer):
+ """
+ Lexer for ABAP, SAP's integrated language.
+
+ .. versionadded:: 1.1
+ """
+ name = 'ABAP'
+ aliases = ['abap']
+ filenames = ['*.abap']
+ mimetypes = ['text/x-abap']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ tokens = {
+ 'common': [
+ (r'\s+', Text),
+ (r'^\*.*$', Comment.Single),
+ (r'\".*?\n', Comment.Single),
+ ],
+ 'variable-names': [
+ (r'<\S+>', Name.Variable),
+ (r'\w[\w~]*(?:(\[\])|->\*)?', Name.Variable),
+ ],
+ 'root': [
+ include('common'),
+ # function calls
+ (r'(CALL\s+(?:BADI|CUSTOMER-FUNCTION|FUNCTION))(\s+)(\'?\S+\'?)',
+ bygroups(Keyword, Text, Name.Function)),
+ (r'(CALL\s+(?:DIALOG|SCREEN|SUBSCREEN|SELECTION-SCREEN|'
+ r'TRANSACTION|TRANSFORMATION))\b',
+ Keyword),
+ (r'(FORM|PERFORM)(\s+)(\w+)',
+ bygroups(Keyword, Text, Name.Function)),
+ (r'(PERFORM)(\s+)(\()(\w+)(\))',
+ bygroups(Keyword, Text, Punctuation, Name.Variable, Punctuation)),
+ (r'(MODULE)(\s+)(\S+)(\s+)(INPUT|OUTPUT)',
+ bygroups(Keyword, Text, Name.Function, Text, Keyword)),
+
+ # method implementation
+ (r'(METHOD)(\s+)([\w~]+)',
+ bygroups(Keyword, Text, Name.Function)),
+ # method calls
+ (r'(\s+)([\w\-]+)([=\-]>)([\w\-~]+)',
+ bygroups(Text, Name.Variable, Operator, Name.Function)),
+ # call methodnames returning style
+ (r'(?<=(=|-)>)([\w\-~]+)(?=\()', Name.Function),
+
+ # keywords with dashes in them.
+ # these need to be first, because for instance the -ID part
+ # of MESSAGE-ID wouldn't get highlighted if MESSAGE was
+ # first in the list of keywords.
+ (r'(ADD-CORRESPONDING|AUTHORITY-CHECK|'
+ r'CLASS-DATA|CLASS-EVENTS|CLASS-METHODS|CLASS-POOL|'
+ r'DELETE-ADJACENT|DIVIDE-CORRESPONDING|'
+ r'EDITOR-CALL|ENHANCEMENT-POINT|ENHANCEMENT-SECTION|EXIT-COMMAND|'
+ r'FIELD-GROUPS|FIELD-SYMBOLS|FUNCTION-POOL|'
+ r'INTERFACE-POOL|INVERTED-DATE|'
+ r'LOAD-OF-PROGRAM|LOG-POINT|'
+ r'MESSAGE-ID|MOVE-CORRESPONDING|MULTIPLY-CORRESPONDING|'
+ r'NEW-LINE|NEW-PAGE|NEW-SECTION|NO-EXTENSION|'
+ r'OUTPUT-LENGTH|PRINT-CONTROL|'
+ r'SELECT-OPTIONS|START-OF-SELECTION|SUBTRACT-CORRESPONDING|'
+ r'SYNTAX-CHECK|SYSTEM-EXCEPTIONS|'
+ r'TYPE-POOL|TYPE-POOLS'
+ r')\b', Keyword),
+
+ # keyword kombinations
+ (r'CREATE\s+(PUBLIC|PRIVATE|DATA|OBJECT)|'
+ r'((PUBLIC|PRIVATE|PROTECTED)\s+SECTION|'
+ r'(TYPE|LIKE)(\s+(LINE\s+OF|REF\s+TO|'
+ r'(SORTED|STANDARD|HASHED)\s+TABLE\s+OF))?|'
+ r'FROM\s+(DATABASE|MEMORY)|CALL\s+METHOD|'
+ r'(GROUP|ORDER) BY|HAVING|SEPARATED BY|'
+ r'GET\s+(BADI|BIT|CURSOR|DATASET|LOCALE|PARAMETER|'
+ r'PF-STATUS|(PROPERTY|REFERENCE)\s+OF|'
+ r'RUN\s+TIME|TIME\s+(STAMP)?)?|'
+ r'SET\s+(BIT|BLANK\s+LINES|COUNTRY|CURSOR|DATASET|EXTENDED\s+CHECK|'
+ r'HANDLER|HOLD\s+DATA|LANGUAGE|LEFT\s+SCROLL-BOUNDARY|'
+ r'LOCALE|MARGIN|PARAMETER|PF-STATUS|PROPERTY\s+OF|'
+ r'RUN\s+TIME\s+(ANALYZER|CLOCK\s+RESOLUTION)|SCREEN|'
+ r'TITLEBAR|UPADTE\s+TASK\s+LOCAL|USER-COMMAND)|'
+ r'CONVERT\s+((INVERTED-)?DATE|TIME|TIME\s+STAMP|TEXT)|'
+ r'(CLOSE|OPEN)\s+(DATASET|CURSOR)|'
+ r'(TO|FROM)\s+(DATA BUFFER|INTERNAL TABLE|MEMORY ID|'
+ r'DATABASE|SHARED\s+(MEMORY|BUFFER))|'
+ r'DESCRIBE\s+(DISTANCE\s+BETWEEN|FIELD|LIST|TABLE)|'
+ r'FREE\s(MEMORY|OBJECT)?|'
+ r'PROCESS\s+(BEFORE\s+OUTPUT|AFTER\s+INPUT|'
+ r'ON\s+(VALUE-REQUEST|HELP-REQUEST))|'
+ r'AT\s+(LINE-SELECTION|USER-COMMAND|END\s+OF|NEW)|'
+ r'AT\s+SELECTION-SCREEN(\s+(ON(\s+(BLOCK|(HELP|VALUE)-REQUEST\s+FOR|'
+ r'END\s+OF|RADIOBUTTON\s+GROUP))?|OUTPUT))?|'
+ r'SELECTION-SCREEN:?\s+((BEGIN|END)\s+OF\s+((TABBED\s+)?BLOCK|LINE|'
+ r'SCREEN)|COMMENT|FUNCTION\s+KEY|'
+ r'INCLUDE\s+BLOCKS|POSITION|PUSHBUTTON|'
+ r'SKIP|ULINE)|'
+ r'LEAVE\s+(LIST-PROCESSING|PROGRAM|SCREEN|'
+ r'TO LIST-PROCESSING|TO TRANSACTION)'
+ r'(ENDING|STARTING)\s+AT|'
+ r'FORMAT\s+(COLOR|INTENSIFIED|INVERSE|HOTSPOT|INPUT|FRAMES|RESET)|'
+ r'AS\s+(CHECKBOX|SUBSCREEN|WINDOW)|'
+ r'WITH\s+(((NON-)?UNIQUE)?\s+KEY|FRAME)|'
+ r'(BEGIN|END)\s+OF|'
+ r'DELETE(\s+ADJACENT\s+DUPLICATES\sFROM)?|'
+ r'COMPARING(\s+ALL\s+FIELDS)?|'
+ r'INSERT(\s+INITIAL\s+LINE\s+INTO|\s+LINES\s+OF)?|'
+ r'IN\s+((BYTE|CHARACTER)\s+MODE|PROGRAM)|'
+ r'END-OF-(DEFINITION|PAGE|SELECTION)|'
+ r'WITH\s+FRAME(\s+TITLE)|'
+
+ # simple kombinations
+ r'AND\s+(MARK|RETURN)|CLIENT\s+SPECIFIED|CORRESPONDING\s+FIELDS\s+OF|'
+ r'IF\s+FOUND|FOR\s+EVENT|INHERITING\s+FROM|LEAVE\s+TO\s+SCREEN|'
+ r'LOOP\s+AT\s+(SCREEN)?|LOWER\s+CASE|MATCHCODE\s+OBJECT|MODIF\s+ID|'
+ r'MODIFY\s+SCREEN|NESTING\s+LEVEL|NO\s+INTERVALS|OF\s+STRUCTURE|'
+ r'RADIOBUTTON\s+GROUP|RANGE\s+OF|REF\s+TO|SUPPRESS DIALOG|'
+ r'TABLE\s+OF|UPPER\s+CASE|TRANSPORTING\s+NO\s+FIELDS|'
+ r'VALUE\s+CHECK|VISIBLE\s+LENGTH|HEADER\s+LINE)\b', Keyword),
+
+ # single word keywords.
+ (r'(^|(?<=(\s|\.)))(ABBREVIATED|ADD|ALIASES|APPEND|ASSERT|'
+ r'ASSIGN(ING)?|AT(\s+FIRST)?|'
+ r'BACK|BLOCK|BREAK-POINT|'
+ r'CASE|CATCH|CHANGING|CHECK|CLASS|CLEAR|COLLECT|COLOR|COMMIT|'
+ r'CREATE|COMMUNICATION|COMPONENTS?|COMPUTE|CONCATENATE|CONDENSE|'
+ r'CONSTANTS|CONTEXTS|CONTINUE|CONTROLS|'
+ r'DATA|DECIMALS|DEFAULT|DEFINE|DEFINITION|DEFERRED|DEMAND|'
+ r'DETAIL|DIRECTORY|DIVIDE|DO|'
+ r'ELSE(IF)?|ENDAT|ENDCASE|ENDCLASS|ENDDO|ENDFORM|ENDFUNCTION|'
+ r'ENDIF|ENDLOOP|ENDMETHOD|ENDMODULE|ENDSELECT|ENDTRY|'
+ r'ENHANCEMENT|EVENTS|EXCEPTIONS|EXIT|EXPORT|EXPORTING|EXTRACT|'
+ r'FETCH|FIELDS?|FIND|FOR|FORM|FORMAT|FREE|FROM|'
+ r'HIDE|'
+ r'ID|IF|IMPORT|IMPLEMENTATION|IMPORTING|IN|INCLUDE|INCLUDING|'
+ r'INDEX|INFOTYPES|INITIALIZATION|INTERFACE|INTERFACES|INTO|'
+ r'LENGTH|LINES|LOAD|LOCAL|'
+ r'JOIN|'
+ r'KEY|'
+ r'MAXIMUM|MESSAGE|METHOD[S]?|MINIMUM|MODULE|MODIFY|MOVE|MULTIPLY|'
+ r'NODES|'
+ r'OBLIGATORY|OF|OFF|ON|OVERLAY|'
+ r'PACK|PARAMETERS|PERCENTAGE|POSITION|PROGRAM|PROVIDE|PUBLIC|PUT|'
+ r'RAISE|RAISING|RANGES|READ|RECEIVE|REFRESH|REJECT|REPORT|RESERVE|'
+ r'RESUME|RETRY|RETURN|RETURNING|RIGHT|ROLLBACK|'
+ r'SCROLL|SEARCH|SELECT|SHIFT|SINGLE|SKIP|SORT|SPLIT|STATICS|STOP|'
+ r'SUBMIT|SUBTRACT|SUM|SUMMARY|SUMMING|SUPPLY|'
+ r'TABLE|TABLES|TIMES|TITLE|TO|TOP-OF-PAGE|TRANSFER|TRANSLATE|TRY|TYPES|'
+ r'ULINE|UNDER|UNPACK|UPDATE|USING|'
+ r'VALUE|VALUES|VIA|'
+ r'WAIT|WHEN|WHERE|WHILE|WITH|WINDOW|WRITE)\b', Keyword),
+
+ # builtins
+ (r'(abs|acos|asin|atan|'
+ r'boolc|boolx|bit_set|'
+ r'char_off|charlen|ceil|cmax|cmin|condense|contains|'
+ r'contains_any_of|contains_any_not_of|concat_lines_of|cos|cosh|'
+ r'count|count_any_of|count_any_not_of|'
+ r'dbmaxlen|distance|'
+ r'escape|exp|'
+ r'find|find_end|find_any_of|find_any_not_of|floor|frac|from_mixed|'
+ r'insert|'
+ r'lines|log|log10|'
+ r'match|matches|'
+ r'nmax|nmin|numofchar|'
+ r'repeat|replace|rescale|reverse|round|'
+ r'segment|shift_left|shift_right|sign|sin|sinh|sqrt|strlen|'
+ r'substring|substring_after|substring_from|substring_before|substring_to|'
+ r'tan|tanh|to_upper|to_lower|to_mixed|translate|trunc|'
+ r'xstrlen)(\()\b', bygroups(Name.Builtin, Punctuation)),
+
+ (r'&[0-9]', Name),
+ (r'[0-9]+', Number.Integer),
+
+ # operators which look like variable names before
+ # parsing variable names.
+ (r'(?<=(\s|.))(AND|EQ|NE|GT|LT|GE|LE|CO|CN|CA|NA|CS|NOT|NS|CP|NP|'
+ r'BYTE-CO|BYTE-CN|BYTE-CA|BYTE-NA|BYTE-CS|BYTE-NS|'
+ r'IS\s+(NOT\s+)?(INITIAL|ASSIGNED|REQUESTED|BOUND))\b', Operator),
+
+ include('variable-names'),
+
+ # standard oparators after variable names,
+ # because < and > are part of field symbols.
+ (r'[?*<>=\-+]', Operator),
+ (r"'(''|[^'])*'", String.Single),
+ (r"`([^`])*`", String.Single),
+ (r'[/;:()\[\],\.]', Punctuation)
+ ],
+ }
+
+
+class OpenEdgeLexer(RegexLexer):
+ """
+ Lexer for `OpenEdge ABL (formerly Progress)
+ <http://web.progress.com/en/openedge/abl.html>`_ source code.
+
+ .. versionadded:: 1.5
+ """
+ name = 'OpenEdge ABL'
+ aliases = ['openedge', 'abl', 'progress']
+ filenames = ['*.p', '*.cls']
+ mimetypes = ['text/x-openedge', 'application/x-openedge']
+
+ types = (r'(?i)(^|(?<=[^0-9a-z_\-]))(CHARACTER|CHAR|CHARA|CHARAC|CHARACT|CHARACTE|'
+ r'COM-HANDLE|DATE|DATETIME|DATETIME-TZ|'
+ r'DECIMAL|DEC|DECI|DECIM|DECIMA|HANDLE|'
+ r'INT64|INTEGER|INT|INTE|INTEG|INTEGE|'
+ r'LOGICAL|LONGCHAR|MEMPTR|RAW|RECID|ROWID)\s*($|(?=[^0-9a-z_\-]))')
+
+ keywords = words(OPENEDGEKEYWORDS,
+ prefix=r'(?i)(^|(?<=[^0-9a-z_\-]))',
+ suffix=r'\s*($|(?=[^0-9a-z_\-]))')
+
+ tokens = {
+ 'root': [
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'\{', Comment.Preproc, 'preprocessor'),
+ (r'\s*&.*', Comment.Preproc),
+ (r'0[xX][0-9a-fA-F]+[LlUu]*', Number.Hex),
+ (r'(?i)(DEFINE|DEF|DEFI|DEFIN)\b', Keyword.Declaration),
+ (types, Keyword.Type),
+ (keywords, Name.Builtin),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r'\s+', Text),
+ (r'[+*/=-]', Operator),
+ (r'[.:()]', Punctuation),
+ (r'.', Name.Variable), # Lazy catch-all
+ ],
+ 'comment': [
+ (r'[^*/]', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ 'preprocessor': [
+ (r'[^{}]', Comment.Preproc),
+ (r'{', Comment.Preproc, '#push'),
+ (r'}', Comment.Preproc, '#pop'),
+ ],
+ }
+
+
+class GoodDataCLLexer(RegexLexer):
+ """
+ Lexer for `GoodData-CL
+ <http://github.com/gooddata/GoodData-CL/raw/master/cli/src/main/resources/\
+com/gooddata/processor/COMMANDS.txt>`_
+ script files.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'GoodData-CL'
+ aliases = ['gooddata-cl']
+ filenames = ['*.gdc']
+ mimetypes = ['text/x-gooddata-cl']
+
+ flags = re.IGNORECASE
+ tokens = {
+ 'root': [
+ # Comments
+ (r'#.*', Comment.Single),
+ # Function call
+ (r'[a-z]\w*', Name.Function),
+ # Argument list
+ (r'\(', Punctuation, 'args-list'),
+ # Punctuation
+ (r';', Punctuation),
+ # Space is not significant
+ (r'\s+', Text)
+ ],
+ 'args-list': [
+ (r'\)', Punctuation, '#pop'),
+ (r',', Punctuation),
+ (r'[a-z]\w*', Name.Variable),
+ (r'=', Operator),
+ (r'"', String, 'string-literal'),
+ (r'[0-9]+(?:\.[0-9]+)?(?:[eE][+-]?[0-9]{1,3})?', Number),
+ # Space is not significant
+ (r'\s', Text)
+ ],
+ 'string-literal': [
+ (r'\\[tnrfbae"\\]', String.Escape),
+ (r'"', String, '#pop'),
+ (r'[^\\"]+', String)
+ ]
+ }
+
+
+class MaqlLexer(RegexLexer):
+ """
+ Lexer for `GoodData MAQL
+ <https://secure.gooddata.com/docs/html/advanced.metric.tutorial.html>`_
+ scripts.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'MAQL'
+ aliases = ['maql']
+ filenames = ['*.maql']
+ mimetypes = ['text/x-gooddata-maql', 'application/x-gooddata-maql']
+
+ flags = re.IGNORECASE
+ tokens = {
+ 'root': [
+ # IDENTITY
+ (r'IDENTIFIER\b', Name.Builtin),
+ # IDENTIFIER
+ (r'\{[^}]+\}', Name.Variable),
+ # NUMBER
+ (r'[0-9]+(?:\.[0-9]+)?(?:[eE][+-]?[0-9]{1,3})?', Number),
+ # STRING
+ (r'"', String, 'string-literal'),
+ # RELATION
+ (r'\<\>|\!\=', Operator),
+ (r'\=|\>\=|\>|\<\=|\<', Operator),
+ # :=
+ (r'\:\=', Operator),
+ # OBJECT
+ (r'\[[^]]+\]', Name.Variable.Class),
+ # keywords
+ (words((
+ 'DIMENSION', 'DIMENSIONS', 'BOTTOM', 'METRIC', 'COUNT', 'OTHER',
+ 'FACT', 'WITH', 'TOP', 'OR', 'ATTRIBUTE', 'CREATE', 'PARENT',
+ 'FALSE', 'ROW', 'ROWS', 'FROM', 'ALL', 'AS', 'PF', 'COLUMN',
+ 'COLUMNS', 'DEFINE', 'REPORT', 'LIMIT', 'TABLE', 'LIKE', 'AND',
+ 'BY', 'BETWEEN', 'EXCEPT', 'SELECT', 'MATCH', 'WHERE', 'TRUE',
+ 'FOR', 'IN', 'WITHOUT', 'FILTER', 'ALIAS', 'ORDER', 'FACT',
+ 'WHEN', 'NOT', 'ON', 'KEYS', 'KEY', 'FULLSET', 'PRIMARY',
+ 'LABELS', 'LABEL', 'VISUAL', 'TITLE', 'DESCRIPTION', 'FOLDER',
+ 'ALTER', 'DROP', 'ADD', 'DATASET', 'DATATYPE', 'INT', 'BIGINT',
+ 'DOUBLE', 'DATE', 'VARCHAR', 'DECIMAL', 'SYNCHRONIZE', 'TYPE',
+ 'DEFAULT', 'ORDER', 'ASC', 'DESC', 'HYPERLINK', 'INCLUDE',
+ 'TEMPLATE', 'MODIFY'), suffix=r'\b'),
+ Keyword),
+ # FUNCNAME
+ (r'[a-z]\w*\b', Name.Function),
+ # Comments
+ (r'#.*', Comment.Single),
+ # Punctuation
+ (r'[,;\(\)]', Punctuation),
+ # Space is not significant
+ (r'\s+', Text)
+ ],
+ 'string-literal': [
+ (r'\\[tnrfbae"\\]', String.Escape),
+ (r'"', String, '#pop'),
+ (r'[^\\"]+', String)
+ ],
+ }
diff --git a/pygments/lexers/c_cpp.py b/pygments/lexers/c_cpp.py
new file mode 100644
index 00000000..ac9cc7b8
--- /dev/null
+++ b/pygments/lexers/c_cpp.py
@@ -0,0 +1,231 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.c_cpp
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for C/C++ languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, \
+ this, inherit, default, words
+from pygments.util import get_bool_opt
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+
+__all__ = ['CLexer', 'CppLexer']
+
+
+class CFamilyLexer(RegexLexer):
+ """
+ For C family source code. This is used as a base class to avoid repetitious
+ definitions.
+ """
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
+ #: only one /* */ style comment
+ _ws1 = r'\s*(?:/[*].*?[*]/\s*)*'
+
+ tokens = {
+ 'whitespace': [
+ # preprocessor directives: without whitespace
+ ('^#if\s+0', Comment.Preproc, 'if0'),
+ ('^#', Comment.Preproc, 'macro'),
+ # or with whitespace
+ ('^(' + _ws1 + r')(#if\s+0)',
+ bygroups(using(this), Comment.Preproc), 'if0'),
+ ('^(' + _ws1 + ')(#)',
+ bygroups(using(this), Comment.Preproc), 'macro'),
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ ],
+ 'statements': [
+ (r'L?"', String, 'string'),
+ (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+ (r'0[0-7]+[LlUu]*', Number.Oct),
+ (r'\d+[LlUu]*', Number.Integer),
+ (r'\*/', Error),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'[()\[\],.]', Punctuation),
+ (words(('auto', 'break', 'case', 'const', 'continue', 'default', 'do',
+ 'else', 'enum', 'extern', 'for', 'goto', 'if', 'register',
+ 'restricted', 'return', 'sizeof', 'static', 'struct',
+ 'switch', 'typedef', 'union', 'volatile', 'while'),
+ suffix=r'\b'), Keyword),
+ (r'(bool|int|long|float|short|double|char|unsigned|signed|void|'
+ r'[a-z_][a-z0-9_]*_t)\b',
+ Keyword.Type),
+ (words(('inline', '_inline', '__inline', 'naked', 'restrict',
+ 'thread', 'typename'), suffix=r'\b'), Keyword.Reserved),
+ # Vector intrinsics
+ (r'(__m(128i|128d|128|64))\b', Keyword.Reserved),
+ # Microsoft-isms
+ (words((
+ 'asm', 'int8', 'based', 'except', 'int16', 'stdcall', 'cdecl',
+ 'fastcall', 'int32', 'declspec', 'finally', 'int64', 'try',
+ 'leave', 'wchar_t', 'w64', 'unaligned', 'raise', 'noop',
+ 'identifier', 'forceinline', 'assume'),
+ prefix=r'__', suffix=r'\b'), Keyword.Reserved),
+ (r'(true|false|NULL)\b', Name.Builtin),
+ (r'([a-zA-Z_]\w*)(\s*)(:)(?!:)', bygroups(Name.Label, Text, Punctuation)),
+ ('[a-zA-Z_]\w*', Name),
+ ],
+ 'root': [
+ include('whitespace'),
+ # functions
+ (r'((?:[\w*\s])+?(?:\s|[*]))' # return arguments
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*\([^;]*?\))' # signature
+ r'(' + _ws + r')?({)',
+ bygroups(using(this), Name.Function, using(this), using(this),
+ Punctuation),
+ 'function'),
+ # function declarations
+ (r'((?:[\w*\s])+?(?:\s|[*]))' # return arguments
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*\([^;]*?\))' # signature
+ r'(' + _ws + r')?(;)',
+ bygroups(using(this), Name.Function, using(this), using(this),
+ Punctuation)),
+ default('statement'),
+ ],
+ 'statement': [
+ include('whitespace'),
+ include('statements'),
+ ('[{}]', Punctuation),
+ (';', Punctuation, '#pop'),
+ ],
+ 'function': [
+ include('whitespace'),
+ include('statements'),
+ (';', Punctuation),
+ ('{', Punctuation, '#push'),
+ ('}', Punctuation, '#pop'),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
+ r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ 'macro': [
+ (r'[^/\n]+', Comment.Preproc),
+ (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+ (r'//.*?\n', Comment.Single, '#pop'),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'if0': [
+ (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
+ (r'^\s*#el(?:se|if).*\n', Comment.Preproc, '#pop'),
+ (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
+ (r'.*?\n', Comment),
+ ]
+ }
+
+ stdlib_types = ['size_t', 'ssize_t', 'off_t', 'wchar_t', 'ptrdiff_t',
+ 'sig_atomic_t', 'fpos_t', 'clock_t', 'time_t', 'va_list',
+ 'jmp_buf', 'FILE', 'DIR', 'div_t', 'ldiv_t', 'mbstate_t',
+ 'wctrans_t', 'wint_t', 'wctype_t']
+ c99_types = ['_Bool', '_Complex', 'int8_t', 'int16_t', 'int32_t', 'int64_t',
+ 'uint8_t', 'uint16_t', 'uint32_t', 'uint64_t', 'int_least8_t',
+ 'int_least16_t', 'int_least32_t', 'int_least64_t',
+ 'uint_least8_t', 'uint_least16_t', 'uint_least32_t',
+ 'uint_least64_t', 'int_fast8_t', 'int_fast16_t', 'int_fast32_t',
+ 'int_fast64_t', 'uint_fast8_t', 'uint_fast16_t', 'uint_fast32_t',
+ 'uint_fast64_t', 'intptr_t', 'uintptr_t', 'intmax_t',
+ 'uintmax_t']
+
+ def __init__(self, **options):
+ self.stdlibhighlighting = get_bool_opt(options, 'stdlibhighlighting', True)
+ self.c99highlighting = get_bool_opt(options, 'c99highlighting', True)
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ if self.stdlibhighlighting and value in self.stdlib_types:
+ token = Keyword.Type
+ elif self.c99highlighting and value in self.c99_types:
+ token = Keyword.Type
+ yield index, token, value
+
+
+class CLexer(CFamilyLexer):
+ """
+ For C source code with preprocessor directives.
+ """
+ name = 'C'
+ aliases = ['c']
+ filenames = ['*.c', '*.h', '*.idc']
+ mimetypes = ['text/x-chdr', 'text/x-csrc']
+ priority = 0.1
+
+ def analyse_text(text):
+ if re.search('#include [<"]', text):
+ return 0.1
+
+
+class CppLexer(CFamilyLexer):
+ """
+ For C++ source code with preprocessor directives.
+ """
+ name = 'C++'
+ aliases = ['cpp', 'c++']
+ filenames = ['*.cpp', '*.hpp', '*.c++', '*.h++',
+ '*.cc', '*.hh', '*.cxx', '*.hxx',
+ '*.C', '*.H', '*.cp', '*.CPP']
+ mimetypes = ['text/x-c++hdr', 'text/x-c++src']
+ priority = 0.1
+
+ tokens = {
+ 'statements': [
+ (words((
+ 'asm', 'catch', 'const_cast', 'delete', 'dynamic_cast', 'explicit',
+ 'export', 'friend', 'mutable', 'namespace', 'new', 'operator',
+ 'private', 'protected', 'public', 'reinterpret_cast',
+ 'restrict', 'static_cast', 'template', 'this', 'throw', 'throws',
+ 'typeid', 'typename', 'using', 'virtual',
+ 'constexpr', 'nullptr', 'decltype', 'thread_local',
+ 'alignas', 'alignof', 'static_assert', 'noexcept', 'override',
+ 'final'), suffix=r'\b'), Keyword),
+ (r'char(16_t|32_t)\b', Keyword.Type),
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+ inherit,
+ ],
+ 'root': [
+ inherit,
+ # C++ Microsoft-isms
+ (words(('virtual_inheritance', 'uuidof', 'super', 'single_inheritance',
+ 'multiple_inheritance', 'interface', 'event'),
+ prefix=r'__', suffix=r'\b'), Keyword.Reserved),
+ # Offload C++ extensions, http://offload.codeplay.com/
+ (r'__(offload|blockingoffload|outer)\b', Keyword.Pseudo),
+ ],
+ 'classname': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+ # template specification
+ (r'\s*(?=>)', Text, '#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ if re.search('#include <[a-z]+>', text):
+ return 0.2
+ if re.search('using namespace ', text):
+ return 0.4
diff --git a/pygments/lexers/c_like.py b/pygments/lexers/c_like.py
new file mode 100644
index 00000000..622fa869
--- /dev/null
+++ b/pygments/lexers/c_like.py
@@ -0,0 +1,840 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.c_like
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for other C-like languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, inherit, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+from pygments.lexers.c_cpp import CLexer, CppLexer
+
+__all__ = ['PikeLexer', 'NesCLexer', 'ClayLexer', 'ECLexer', 'ValaLexer',
+ 'CudaLexer', 'SwigLexer', 'MqlLexer']
+
+
+class PikeLexer(CppLexer):
+ """
+ For `Pike <http://pike.lysator.liu.se/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Pike'
+ aliases = ['pike']
+ filenames = ['*.pike', '*.pmod']
+ mimetypes = ['text/x-pike']
+
+ tokens = {
+ 'statements': [
+ (words((
+ 'catch', 'new', 'private', 'protected', 'public', 'gauge',
+ 'throw', 'throws', 'class', 'interface', 'implement', 'abstract', 'extends', 'from',
+ 'this', 'super', 'new', 'constant', 'final', 'static', 'import', 'use', 'extern',
+ 'inline', 'proto', 'break', 'continue', 'if', 'else', 'for',
+ 'while', 'do', 'switch', 'case', 'as', 'in', 'version', 'return', 'true', 'false', 'null',
+ '__VERSION__', '__MAJOR__', '__MINOR__', '__BUILD__', '__REAL_VERSION__',
+ '__REAL_MAJOR__', '__REAL_MINOR__', '__REAL_BUILD__', '__DATE__', '__TIME__',
+ '__FILE__', '__DIR__', '__LINE__', '__AUTO_BIGNUM__', '__NT__', '__PIKE__',
+ '__amigaos__', '_Pragma', 'static_assert', 'defined', 'sscanf'), suffix=r'\b'),
+ Keyword),
+ (r'(bool|int|long|float|short|double|char|string|object|void|mapping|'
+ r'array|multiset|program|function|lambda|mixed|'
+ r'[a-z_][a-z0-9_]*_t)\b',
+ Keyword.Type),
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+ (r'[~!%^&*+=|?:<>/-@]', Operator),
+ inherit,
+ ],
+ 'classname': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+ # template specification
+ (r'\s*(?=>)', Text, '#pop'),
+ ],
+ }
+
+
+class NesCLexer(CLexer):
+ """
+ For `nesC <https://github.com/tinyos/nesc>`_ source code with preprocessor
+ directives.
+
+ .. versionadded:: 2.0
+ """
+ name = 'nesC'
+ aliases = ['nesc']
+ filenames = ['*.nc']
+ mimetypes = ['text/x-nescsrc']
+
+ tokens = {
+ 'statements': [
+ (words((
+ 'abstract', 'as', 'async', 'atomic', 'call', 'command', 'component',
+ 'components', 'configuration', 'event', 'extends', 'generic',
+ 'implementation', 'includes', 'interface', 'module', 'new', 'norace',
+ 'post', 'provides', 'signal', 'task', 'uses'), suffix=r'\b'),
+ Keyword),
+ (words(('nx_struct', 'nx_union', 'nx_int8_t', 'nx_int16_t', 'nx_int32_t',
+ 'nx_int64_t', 'nx_uint8_t', 'nx_uint16_t', 'nx_uint32_t',
+ 'nx_uint64_t'), suffix=r'\b'),
+ Keyword.Type),
+ inherit,
+ ],
+ }
+
+
+class ClayLexer(RegexLexer):
+ """
+ For `Clay <http://claylabs.com/clay/>`_ source.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Clay'
+ filenames = ['*.clay']
+ aliases = ['clay']
+ mimetypes = ['text/x-clay']
+ tokens = {
+ 'root': [
+ (r'\s', Text),
+ (r'//.*?$', Comment.Singleline),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'\b(public|private|import|as|record|variant|instance'
+ r'|define|overload|default|external|alias'
+ r'|rvalue|ref|forward|inline|noinline|forceinline'
+ r'|enum|var|and|or|not|if|else|goto|return|while'
+ r'|switch|case|break|continue|for|in|true|false|try|catch|throw'
+ r'|finally|onerror|staticassert|eval|when|newtype'
+ r'|__FILE__|__LINE__|__COLUMN__|__ARG__'
+ r')\b', Keyword),
+ (r'[~!%^&*+=|:<>/-]', Operator),
+ (r'[#(){}\[\],;.]', Punctuation),
+ (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+ (r'\d+[LlUu]*', Number.Integer),
+ (r'\b(true|false)\b', Name.Builtin),
+ (r'(?i)[a-z_?][a-z_?0-9]*', Name),
+ (r'"""', String, 'tdqs'),
+ (r'"', String, 'dqs'),
+ ],
+ 'strings': [
+ (r'(?i)\\(x[0-9a-f]{2}|.)', String.Escape),
+ (r'.', String),
+ ],
+ 'nl': [
+ (r'\n', String),
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ include('strings'),
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ include('strings'),
+ include('nl'),
+ ],
+ }
+
+
+class ECLexer(CLexer):
+ """
+ For eC source code with preprocessor directives.
+
+ .. versionadded:: 1.5
+ """
+ name = 'eC'
+ aliases = ['ec']
+ filenames = ['*.ec', '*.eh']
+ mimetypes = ['text/x-echdr', 'text/x-ecsrc']
+
+ tokens = {
+ 'statements': [
+ (words((
+ 'virtual', 'class', 'private', 'public', 'property', 'import',
+ 'delete', 'new', 'new0', 'renew', 'renew0', 'define', 'get',
+ 'set', 'remote', 'dllexport', 'dllimport', 'stdcall', 'subclass',
+ '__on_register_module', 'namespace', 'using', 'typed_object',
+ 'any_object', 'incref', 'register', 'watch', 'stopwatching', 'firewatchers',
+ 'watchable', 'class_designer', 'class_fixed', 'class_no_expansion', 'isset',
+ 'class_default_property', 'property_category', 'class_data',
+ 'class_property', 'virtual', 'thisclass', 'dbtable', 'dbindex',
+ 'database_open', 'dbfield'), suffix=r'\b'), Keyword),
+ (words(('uint', 'uint16', 'uint32', 'uint64', 'bool', 'byte',
+ 'unichar', 'int64'), suffix=r'\b'),
+ Keyword.Type),
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+ (r'(null|value|this)\b', Name.Builtin),
+ inherit,
+ ],
+ 'classname': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+ # template specification
+ (r'\s*(?=>)', Text, '#pop'),
+ ],
+ }
+
+
+class ValaLexer(RegexLexer):
+ """
+ For Vala source code with preprocessor directives.
+
+ .. versionadded:: 1.1
+ """
+ name = 'Vala'
+ aliases = ['vala', 'vapi']
+ filenames = ['*.vala', '*.vapi']
+ mimetypes = ['text/x-vala']
+
+ tokens = {
+ 'whitespace': [
+ (r'^\s*#if\s+0', Comment.Preproc, 'if0'),
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ ],
+ 'statements': [
+ (r'[L@]?"', String, 'string'),
+ (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
+ String.Char),
+ (r'(?s)""".*?"""', String), # verbatim strings
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
+ (r'0[0-7]+[Ll]?', Number.Oct),
+ (r'\d+[Ll]?', Number.Integer),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'(\[)(Compact|Immutable|(?:Boolean|Simple)Type)(\])',
+ bygroups(Punctuation, Name.Decorator, Punctuation)),
+ # TODO: "correctly" parse complex code attributes
+ (r'(\[)(CCode|(?:Integer|Floating)Type)',
+ bygroups(Punctuation, Name.Decorator)),
+ (r'[()\[\],.]', Punctuation),
+ (words((
+ 'as', 'base', 'break', 'case', 'catch', 'construct', 'continue',
+ 'default', 'delete', 'do', 'else', 'enum', 'finally', 'for',
+ 'foreach', 'get', 'if', 'in', 'is', 'lock', 'new', 'out', 'params',
+ 'return', 'set', 'sizeof', 'switch', 'this', 'throw', 'try',
+ 'typeof', 'while', 'yield'), suffix=r'\b'),
+ Keyword),
+ (words((
+ 'abstract', 'const', 'delegate', 'dynamic', 'ensures', 'extern',
+ 'inline', 'internal', 'override', 'owned', 'private', 'protected',
+ 'public', 'ref', 'requires', 'signal', 'static', 'throws', 'unowned',
+ 'var', 'virtual', 'volatile', 'weak', 'yields'), suffix=r'\b'),
+ Keyword.Declaration),
+ (r'(namespace|using)(\s+)', bygroups(Keyword.Namespace, Text),
+ 'namespace'),
+ (r'(class|errordomain|interface|struct)(\s+)',
+ bygroups(Keyword.Declaration, Text), 'class'),
+ (r'(\.)([a-zA-Z_]\w*)',
+ bygroups(Operator, Name.Attribute)),
+ # void is an actual keyword, others are in glib-2.0.vapi
+ (words((
+ 'void', 'bool', 'char', 'double', 'float', 'int', 'int8', 'int16',
+ 'int32', 'int64', 'long', 'short', 'size_t', 'ssize_t', 'string',
+ 'time_t', 'uchar', 'uint', 'uint8', 'uint16', 'uint32', 'uint64',
+ 'ulong', 'unichar', 'ushort'), suffix=r'\b'),
+ Keyword.Type),
+ (r'(true|false|null)\b', Name.Builtin),
+ ('[a-zA-Z_]\w*', Name),
+ ],
+ 'root': [
+ include('whitespace'),
+ ('', Text, 'statement'),
+ ],
+ 'statement': [
+ include('whitespace'),
+ include('statements'),
+ ('[{}]', Punctuation),
+ (';', Punctuation, '#pop'),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ 'if0': [
+ (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
+ (r'^\s*#el(?:se|if).*\n', Comment.Preproc, '#pop'),
+ (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
+ (r'.*?\n', Comment),
+ ],
+ 'class': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop')
+ ],
+ 'namespace': [
+ (r'[a-zA-Z_][\w.]*', Name.Namespace, '#pop')
+ ],
+ }
+
+
+class CudaLexer(CLexer):
+ """
+ For NVIDIA `CUDA™ <http://developer.nvidia.com/category/zone/cuda-zone>`_
+ source.
+
+ .. versionadded:: 1.6
+ """
+ name = 'CUDA'
+ filenames = ['*.cu', '*.cuh']
+ aliases = ['cuda', 'cu']
+ mimetypes = ['text/x-cuda']
+
+ function_qualifiers = set(('__device__', '__global__', '__host__',
+ '__noinline__', '__forceinline__'))
+ variable_qualifiers = set(('__device__', '__constant__', '__shared__',
+ '__restrict__'))
+ vector_types = set(('char1', 'uchar1', 'char2', 'uchar2', 'char3', 'uchar3',
+ 'char4', 'uchar4', 'short1', 'ushort1', 'short2', 'ushort2',
+ 'short3', 'ushort3', 'short4', 'ushort4', 'int1', 'uint1',
+ 'int2', 'uint2', 'int3', 'uint3', 'int4', 'uint4', 'long1',
+ 'ulong1', 'long2', 'ulong2', 'long3', 'ulong3', 'long4',
+ 'ulong4', 'longlong1', 'ulonglong1', 'longlong2',
+ 'ulonglong2', 'float1', 'float2', 'float3', 'float4',
+ 'double1', 'double2', 'dim3'))
+ variables = set(('gridDim', 'blockIdx', 'blockDim', 'threadIdx', 'warpSize'))
+ functions = set(('__threadfence_block', '__threadfence', '__threadfence_system',
+ '__syncthreads', '__syncthreads_count', '__syncthreads_and',
+ '__syncthreads_or'))
+ execution_confs = set(('<<<', '>>>'))
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in CLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ if value in self.variable_qualifiers:
+ token = Keyword.Type
+ elif value in self.vector_types:
+ token = Keyword.Type
+ elif value in self.variables:
+ token = Name.Builtin
+ elif value in self.execution_confs:
+ token = Keyword.Pseudo
+ elif value in self.function_qualifiers:
+ token = Keyword.Reserved
+ elif value in self.functions:
+ token = Name.Function
+ yield index, token, value
+
+
+class SwigLexer(CppLexer):
+ """
+ For `SWIG <http://www.swig.org/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'SWIG'
+ aliases = ['swig']
+ filenames = ['*.swg', '*.i']
+ mimetypes = ['text/swig']
+ priority = 0.04 # Lower than C/C++ and Objective C/C++
+
+ tokens = {
+ 'statements': [
+ # SWIG directives
+ (r'(%[a-z_][a-z0-9_]*)', Name.Function),
+ # Special variables
+ ('\$\**\&?\w+', Name),
+ # Stringification / additional preprocessor directives
+ (r'##*[a-zA-Z_]\w*', Comment.Preproc),
+ inherit,
+ ],
+ }
+
+ # This is a far from complete set of SWIG directives
+ swig_directives = set((
+ # Most common directives
+ '%apply', '%define', '%director', '%enddef', '%exception', '%extend',
+ '%feature', '%fragment', '%ignore', '%immutable', '%import', '%include',
+ '%inline', '%insert', '%module', '%newobject', '%nspace', '%pragma',
+ '%rename', '%shared_ptr', '%template', '%typecheck', '%typemap',
+ # Less common directives
+ '%arg', '%attribute', '%bang', '%begin', '%callback', '%catches', '%clear',
+ '%constant', '%copyctor', '%csconst', '%csconstvalue', '%csenum',
+ '%csmethodmodifiers', '%csnothrowexception', '%default', '%defaultctor',
+ '%defaultdtor', '%defined', '%delete', '%delobject', '%descriptor',
+ '%exceptionclass', '%exceptionvar', '%extend_smart_pointer', '%fragments',
+ '%header', '%ifcplusplus', '%ignorewarn', '%implicit', '%implicitconv',
+ '%init', '%javaconst', '%javaconstvalue', '%javaenum', '%javaexception',
+ '%javamethodmodifiers', '%kwargs', '%luacode', '%mutable', '%naturalvar',
+ '%nestedworkaround', '%perlcode', '%pythonabc', '%pythonappend',
+ '%pythoncallback', '%pythoncode', '%pythondynamic', '%pythonmaybecall',
+ '%pythonnondynamic', '%pythonprepend', '%refobject', '%shadow', '%sizeof',
+ '%trackobjects', '%types', '%unrefobject', '%varargs', '%warn',
+ '%warnfilter'))
+
+ def analyse_text(text):
+ rv = 0
+ # Search for SWIG directives, which are conventionally at the beginning of
+ # a line. The probability of them being within a line is low, so let another
+ # lexer win in this case.
+ matches = re.findall(r'^\s*(%[a-z_][a-z0-9_]*)', text, re.M)
+ for m in matches:
+ if m in SwigLexer.swig_directives:
+ rv = 0.98
+ break
+ else:
+ rv = 0.91 # Fraction higher than MatlabLexer
+ return rv
+
+
+class MqlLexer(CppLexer):
+ """
+ For `MQL4 <http://docs.mql4.com/>`_ and
+ `MQL5 <http://www.mql5.com/en/docs>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'MQL'
+ aliases = ['mql', 'mq4', 'mq5', 'mql4', 'mql5']
+ filenames = ['*.mq4', '*.mq5', '*.mqh']
+ mimetypes = ['text/x-mql']
+
+ tokens = {
+ 'statements': [
+ (words((
+ 'input', '_Digits', '_Point', '_LastError', '_Period', '_RandomSeed',
+ '_StopFlag', '_Symbol', '_UninitReason', 'Ask', 'Bars', 'Bid',
+ 'Close', 'Digits', 'High', 'Low', 'Open', 'Point', 'Time',
+ 'Volume'), suffix=r'\b'),
+ Keyword),
+ (words((
+ 'void', 'char', 'uchar', 'bool', 'short', 'ushort', 'int', 'uint',
+ 'color', 'long', 'ulong', 'datetime', 'float', 'double',
+ 'string'), suffix=r'\b'),
+ Keyword.Type),
+ (words((
+ 'Alert', 'CheckPointer', 'Comment', 'DebugBreak', 'ExpertRemove',
+ 'GetPointer', 'GetTickCount', 'MessageBox', 'PeriodSeconds', 'PlaySound',
+ 'Print', 'PrintFormat', 'ResetLastError', 'ResourceCreate', 'ResourceFree',
+ 'ResourceReadImage', 'ResourceSave', 'SendFTP', 'SendMail', 'SendNotification',
+ 'Sleep', 'TerminalClose', 'TesterStatistics', 'ZeroMemory',
+ 'ArrayBsearch', 'ArrayCopy', 'ArrayCompare', 'ArrayFree', 'ArrayGetAsSeries',
+ 'ArrayInitialize', 'ArrayFill', 'ArrayIsSeries', 'ArrayIsDynamic',
+ 'ArrayMaximum', 'ArrayMinimum', 'ArrayRange', 'ArrayResize',
+ 'ArraySetAsSeries', 'ArraySize', 'ArraySort', 'ArrayCopyRates',
+ 'ArrayCopySeries', 'ArrayDimension',
+ 'CharToString', 'DoubleToString', 'EnumToString', 'NormalizeDouble',
+ 'StringToDouble', 'StringToInteger', 'StringToTime', 'TimeToString',
+ 'IntegerToString', 'ShortToString', 'ShortArrayToString',
+ 'StringToShortArray', 'CharArrayToString', 'StringToCharArray',
+ 'ColorToARGB', 'ColorToString', 'StringToColor', 'StringFormat',
+ 'CharToStr', 'DoubleToStr', 'StrToDouble', 'StrToInteger', 'StrToTime', 'TimeToStr',
+ 'MathAbs', 'MathArccos', 'MathArcsin', 'MathArctan', 'MathCeil', 'MathCos', 'MathExp',
+ 'MathFloor', 'MathLog', 'MathMax', 'MathMin', 'MathMod', 'MathPow', 'MathRand',
+ 'MathRound', 'MathSin', 'MathSqrt', 'MathSrand', 'MathTan', 'MathIsValidNumber',
+ 'StringAdd', 'StringBufferLen', 'StringCompare', 'StringConcatenate', 'StringFill',
+ 'StringFind', 'StringGetCharacter', 'StringInit', 'StringLen', 'StringReplace',
+ 'StringSetCharacter', 'StringSplit', 'StringSubstr', 'StringToLower', 'StringToUpper',
+ 'StringTrimLeft', 'StringTrimRight', 'StringGetChar', 'StringSetChar',
+ 'TimeCurrent', 'TimeTradeServer', 'TimeLocal', 'TimeGMT', 'TimeDaylightSavings',
+ 'TimeGMTOffset', 'TimeToStruct', 'StructToTime', 'Day', 'DayOfWeek', 'DayOfYear',
+ 'Hour', 'Minute', 'Month', 'Seconds', 'TimeDay', 'TimeDayOfWeek', 'TimeDayOfYear',
+ 'TimeHour', 'TimeMinute', 'TimeMonth', 'TimeSeconds', 'TimeYear', 'Year',
+ 'AccountInfoDouble', 'AccountInfoInteger', 'AccountInfoString', 'AccountBalance',
+ 'AccountCredit', 'AccountCompany', 'AccountCurrency', 'AccountEquity',
+ 'AccountFreeMargin', 'AccountFreeMarginCheck', 'AccountFreeMarginMode',
+ 'AccountLeverage', 'AccountMargin', 'AccountName', 'AccountNumber', 'AccountProfit',
+ 'AccountServer', 'AccountStopoutLevel', 'AccountStopoutMode',
+ 'GetLastError', 'IsStopped', 'UninitializeReason', 'MQLInfoInteger', 'MQLInfoString',
+ 'Symbol', 'Period', 'Digits', 'Point', 'IsConnected', 'IsDemo', 'IsDllsAllowed',
+ 'IsExpertEnabled', 'IsLibrariesAllowed', 'IsOptimization', 'IsTesting',
+ 'IsTradeAllowed',
+ 'IsTradeContextBusy', 'IsVisualMode', 'TerminalCompany', 'TerminalName',
+ 'TerminalPath',
+ 'SymbolsTotal', 'SymbolName', 'SymbolSelect', 'SymbolIsSynchronized',
+ 'SymbolInfoDouble',
+ 'SymbolInfoInteger', 'SymbolInfoString', 'SymbolInfoTick',
+ 'SymbolInfoSessionQuote',
+ 'SymbolInfoSessionTrade', 'MarketInfo',
+ 'SeriesInfoInteger', 'CopyRates', 'CopyTime', 'CopyOpen',
+ 'CopyHigh', 'CopyLow', 'CopyClose',
+ 'CopyTickVolume', 'CopyRealVolume', 'CopySpread', 'iBars', 'iBarShift', 'iClose',
+ 'iHigh', 'iHighest', 'iLow', 'iLowest', 'iOpen', 'iTime', 'iVolume',
+ 'HideTestIndicators', 'Period', 'RefreshRates', 'Symbol', 'WindowBarsPerChart',
+ 'WindowExpertName', 'WindowFind', 'WindowFirstVisibleBar', 'WindowHandle',
+ 'WindowIsVisible', 'WindowOnDropped', 'WindowPriceMax', 'WindowPriceMin',
+ 'WindowPriceOnDropped', 'WindowRedraw', 'WindowScreenShot',
+ 'WindowTimeOnDropped', 'WindowsTotal', 'WindowXOnDropped', 'WindowYOnDropped',
+ 'OrderClose', 'OrderCloseBy', 'OrderClosePrice', 'OrderCloseTime', 'OrderComment',
+ 'OrderCommission', 'OrderDelete', 'OrderExpiration', 'OrderLots', 'OrderMagicNumber',
+ 'OrderModify', 'OrderOpenPrice', 'OrderOpenTime', 'OrderPrint', 'OrderProfit',
+ 'OrderSelect', 'OrderSend', 'OrdersHistoryTotal', 'OrderStopLoss', 'OrdersTotal',
+ 'OrderSwap', 'OrderSymbol', 'OrderTakeProfit', 'OrderTicket', 'OrderType',
+ 'GlobalVariableCheck', 'GlobalVariableTime',
+ 'GlobalVariableDel', 'GlobalVariableGet', 'GlobalVariableName',
+ 'GlobalVariableSet', 'GlobalVariablesFlush', 'GlobalVariableTemp',
+ 'GlobalVariableSetOnCondition', 'GlobalVariablesDeleteAll',
+ 'GlobalVariablesTotal', 'GlobalVariableCheck', 'GlobalVariableTime',
+ 'GlobalVariableDel', 'GlobalVariableGet',
+ 'GlobalVariableName', 'GlobalVariableSet', 'GlobalVariablesFlush',
+ 'GlobalVariableTemp', 'GlobalVariableSetOnCondition',
+ 'GlobalVariablesDeleteAll', 'GlobalVariablesTotal',
+ 'GlobalVariableCheck', 'GlobalVariableTime', 'GlobalVariableDel',
+ 'GlobalVariableGet', 'GlobalVariableName', 'GlobalVariableSet',
+ 'GlobalVariablesFlush', 'GlobalVariableTemp',
+ 'GlobalVariableSetOnCondition', 'GlobalVariablesDeleteAll',
+ 'GlobalVariablesTotal',
+ 'FileFindFirst', 'FileFindNext', 'FileFindClose', 'FileOpen', 'FileDelete',
+ 'FileFlush', 'FileGetInteger', 'FileIsEnding', 'FileIsLineEnding',
+ 'FileClose', 'FileIsExist', 'FileCopy', 'FileMove', 'FileReadArray',
+ 'FileReadBool', 'FileReadDatetime', 'FileReadDouble', 'FileReadFloat',
+ 'FileReadInteger', 'FileReadLong', 'FileReadNumber', 'FileReadString',
+ 'FileReadStruct', 'FileSeek', 'FileSize', 'FileTell', 'FileWrite',
+ 'FileWriteArray', 'FileWriteDouble', 'FileWriteFloat', 'FileWriteInteger',
+ 'FileWriteLong', 'FileWriteString', 'FileWriteStruct', 'FolderCreate',
+ 'FolderDelete', 'FolderClean', 'FileOpenHistory',
+ 'IndicatorSetDouble', 'IndicatorSetInteger', 'IndicatorSetString',
+ 'SetIndexBuffer', 'IndicatorBuffers', 'IndicatorCounted', 'IndicatorDigits',
+ 'IndicatorShortName', 'SetIndexArrow', 'SetIndexDrawBegin',
+ 'SetIndexEmptyValue', 'SetIndexLabel', 'SetIndexShift',
+ 'SetIndexStyle', 'SetLevelStyle', 'SetLevelValue',
+ 'ObjectCreate', 'ObjectName', 'ObjectDelete', 'ObjectsDeleteAll',
+ 'ObjectFind', 'ObjectGetTimeByValue', 'ObjectGetValueByTime',
+ 'ObjectMove', 'ObjectsTotal', 'ObjectGetDouble', 'ObjectGetInteger',
+ 'ObjectGetString', 'ObjectSetDouble', 'ObjectSetInteger',
+ 'ObjectSetString', 'TextSetFont', 'TextOut', 'TextGetSize',
+ 'ObjectDescription', 'ObjectGet', 'ObjectGetFiboDescription',
+ 'ObjectGetShiftByValue', 'ObjectGetValueByShift', 'ObjectSet',
+ 'ObjectSetFiboDescription', 'ObjectSetText', 'ObjectType',
+ 'iAC', 'iAD', 'iADX', 'iAlligator', 'iAO', 'iATR', 'iBearsPower',
+ 'iBands', 'iBandsOnArray', 'iBullsPower', 'iCCI', 'iCCIOnArray',
+ 'iCustom', 'iDeMarker', 'iEnvelopes', 'iEnvelopesOnArray',
+ 'iForce', 'iFractals', 'iGator', 'iIchimoku', 'iBWMFI', 'iMomentum',
+ 'iMomentumOnArray', 'iMFI', 'iMA', 'iMAOnArray', 'iOsMA', 'iMACD',
+ 'iOBV', 'iSAR', 'iRSI', 'iRSIOnArray', 'iRVI', 'iStdDev', 'iStdDevOnArray',
+ 'iStochastic', 'iWPR',
+ 'EventSetMillisecondTimer', 'EventSetTimer',
+ 'EventKillTimer', 'EventChartCustom'), suffix=r'\b'),
+ Name.Function),
+ (words((
+ 'CHARTEVENT_KEYDOWN', 'CHARTEVENT_MOUSE_MOVE',
+ 'CHARTEVENT_OBJECT_CREATE',
+ 'CHARTEVENT_OBJECT_CHANGE', 'CHARTEVENT_OBJECT_DELETE',
+ 'CHARTEVENT_CLICK',
+ 'CHARTEVENT_OBJECT_CLICK', 'CHARTEVENT_OBJECT_DRAG',
+ 'CHARTEVENT_OBJECT_ENDEDIT',
+ 'CHARTEVENT_CHART_CHANGE', 'CHARTEVENT_CUSTOM',
+ 'CHARTEVENT_CUSTOM_LAST',
+ 'PERIOD_CURRENT', 'PERIOD_M1', 'PERIOD_M2', 'PERIOD_M3',
+ 'PERIOD_M4', 'PERIOD_M5',
+ 'PERIOD_M6', 'PERIOD_M10', 'PERIOD_M12', 'PERIOD_M15',
+ 'PERIOD_M20', 'PERIOD_M30',
+ 'PERIOD_H1', 'PERIOD_H2', 'PERIOD_H3', 'PERIOD_H4',
+ 'PERIOD_H6', 'PERIOD_H8',
+ 'PERIOD_H12', 'PERIOD_D1', 'PERIOD_W1', 'PERIOD_MN1',
+ 'CHART_IS_OBJECT', 'CHART_BRING_TO_TOP',
+ 'CHART_MOUSE_SCROLL', 'CHART_EVENT_MOUSE_MOVE',
+ 'CHART_EVENT_OBJECT_CREATE',
+ 'CHART_EVENT_OBJECT_DELETE', 'CHART_MODE', 'CHART_FOREGROUND',
+ 'CHART_SHIFT',
+ 'CHART_AUTOSCROLL', 'CHART_SCALE', 'CHART_SCALEFIX',
+ 'CHART_SCALEFIX_11',
+ 'CHART_SCALE_PT_PER_BAR', 'CHART_SHOW_OHLC',
+ 'CHART_SHOW_BID_LINE',
+ 'CHART_SHOW_ASK_LINE', 'CHART_SHOW_LAST_LINE',
+ 'CHART_SHOW_PERIOD_SEP',
+ 'CHART_SHOW_GRID', 'CHART_SHOW_VOLUMES',
+ 'CHART_SHOW_OBJECT_DESCR',
+ 'CHART_VISIBLE_BARS', 'CHART_WINDOWS_TOTAL',
+ 'CHART_WINDOW_IS_VISIBLE',
+ 'CHART_WINDOW_HANDLE', 'CHART_WINDOW_YDISTANCE',
+ 'CHART_FIRST_VISIBLE_BAR',
+ 'CHART_WIDTH_IN_BARS', 'CHART_WIDTH_IN_PIXELS',
+ 'CHART_HEIGHT_IN_PIXELS',
+ 'CHART_COLOR_BACKGROUND', 'CHART_COLOR_FOREGROUND',
+ 'CHART_COLOR_GRID',
+ 'CHART_COLOR_VOLUME', 'CHART_COLOR_CHART_UP',
+ 'CHART_COLOR_CHART_DOWN',
+ 'CHART_COLOR_CHART_LINE', 'CHART_COLOR_CANDLE_BULL',
+ 'CHART_COLOR_CANDLE_BEAR',
+ 'CHART_COLOR_BID', 'CHART_COLOR_ASK', 'CHART_COLOR_LAST',
+ 'CHART_COLOR_STOP_LEVEL',
+ 'CHART_SHOW_TRADE_LEVELS', 'CHART_DRAG_TRADE_LEVELS',
+ 'CHART_SHOW_DATE_SCALE',
+ 'CHART_SHOW_PRICE_SCALE', 'CHART_SHIFT_SIZE',
+ 'CHART_FIXED_POSITION',
+ 'CHART_FIXED_MAX', 'CHART_FIXED_MIN', 'CHART_POINTS_PER_BAR',
+ 'CHART_PRICE_MIN',
+ 'CHART_PRICE_MAX', 'CHART_COMMENT', 'CHART_BEGIN',
+ 'CHART_CURRENT_POS', 'CHART_END',
+ 'CHART_BARS', 'CHART_CANDLES', 'CHART_LINE', 'CHART_VOLUME_HIDE',
+ 'CHART_VOLUME_TICK', 'CHART_VOLUME_REAL',
+ 'OBJ_VLINE', 'OBJ_HLINE', 'OBJ_TREND', 'OBJ_TRENDBYANGLE', 'OBJ_CYCLES',
+ 'OBJ_CHANNEL', 'OBJ_STDDEVCHANNEL', 'OBJ_REGRESSION', 'OBJ_PITCHFORK',
+ 'OBJ_GANNLINE', 'OBJ_GANNFAN', 'OBJ_GANNGRID', 'OBJ_FIBO',
+ 'OBJ_FIBOTIMES', 'OBJ_FIBOFAN', 'OBJ_FIBOARC', 'OBJ_FIBOCHANNEL',
+ 'OBJ_EXPANSION', 'OBJ_RECTANGLE', 'OBJ_TRIANGLE', 'OBJ_ELLIPSE',
+ 'OBJ_ARROW_THUMB_UP', 'OBJ_ARROW_THUMB_DOWN',
+ 'OBJ_ARROW_UP', 'OBJ_ARROW_DOWN',
+ 'OBJ_ARROW_STOP', 'OBJ_ARROW_CHECK', 'OBJ_ARROW_LEFT_PRICE',
+ 'OBJ_ARROW_RIGHT_PRICE', 'OBJ_ARROW_BUY', 'OBJ_ARROW_SELL',
+ 'OBJ_ARROW',
+ 'OBJ_TEXT', 'OBJ_LABEL', 'OBJ_BUTTON', 'OBJ_BITMAP',
+ 'OBJ_BITMAP_LABEL',
+ 'OBJ_EDIT', 'OBJ_EVENT', 'OBJ_RECTANGLE_LABEL',
+ 'OBJPROP_TIME1', 'OBJPROP_PRICE1', 'OBJPROP_TIME2',
+ 'OBJPROP_PRICE2', 'OBJPROP_TIME3',
+ 'OBJPROP_PRICE3', 'OBJPROP_COLOR', 'OBJPROP_STYLE',
+ 'OBJPROP_WIDTH',
+ 'OBJPROP_BACK', 'OBJPROP_RAY', 'OBJPROP_ELLIPSE',
+ 'OBJPROP_SCALE',
+ 'OBJPROP_ANGLE', 'OBJPROP_ARROWCODE', 'OBJPROP_TIMEFRAMES',
+ 'OBJPROP_DEVIATION', 'OBJPROP_FONTSIZE', 'OBJPROP_CORNER',
+ 'OBJPROP_XDISTANCE', 'OBJPROP_YDISTANCE', 'OBJPROP_FIBOLEVELS',
+ 'OBJPROP_LEVELCOLOR', 'OBJPROP_LEVELSTYLE', 'OBJPROP_LEVELWIDTH',
+ 'OBJPROP_FIRSTLEVEL', 'OBJPROP_COLOR', 'OBJPROP_STYLE', 'OBJPROP_WIDTH',
+ 'OBJPROP_BACK', 'OBJPROP_ZORDER', 'OBJPROP_FILL', 'OBJPROP_HIDDEN',
+ 'OBJPROP_SELECTED', 'OBJPROP_READONLY', 'OBJPROP_TYPE', 'OBJPROP_TIME',
+ 'OBJPROP_SELECTABLE', 'OBJPROP_CREATETIME', 'OBJPROP_LEVELS',
+ 'OBJPROP_LEVELCOLOR', 'OBJPROP_LEVELSTYLE', 'OBJPROP_LEVELWIDTH',
+ 'OBJPROP_ALIGN', 'OBJPROP_FONTSIZE', 'OBJPROP_RAY_RIGHT', 'OBJPROP_RAY',
+ 'OBJPROP_ELLIPSE', 'OBJPROP_ARROWCODE', 'OBJPROP_TIMEFRAMES', 'OBJPROP_ANCHOR',
+ 'OBJPROP_XDISTANCE', 'OBJPROP_YDISTANCE', 'OBJPROP_DRAWLINES', 'OBJPROP_STATE',
+ 'OBJPROP_CHART_ID', 'OBJPROP_XSIZE', 'OBJPROP_YSIZE', 'OBJPROP_XOFFSET',
+ 'OBJPROP_YOFFSET', 'OBJPROP_PERIOD', 'OBJPROP_DATE_SCALE', 'OBJPROP_PRICE_SCALE',
+ 'OBJPROP_CHART_SCALE', 'OBJPROP_BGCOLOR', 'OBJPROP_CORNER', 'OBJPROP_BORDER_TYPE',
+ 'OBJPROP_BORDER_COLOR', 'OBJPROP_PRICE', 'OBJPROP_LEVELVALUE', 'OBJPROP_SCALE',
+ 'OBJPROP_ANGLE', 'OBJPROP_DEVIATION',
+ 'OBJPROP_NAME', 'OBJPROP_TEXT', 'OBJPROP_TOOLTIP', 'OBJPROP_LEVELTEXT',
+ 'OBJPROP_FONT', 'OBJPROP_BMPFILE', 'OBJPROP_SYMBOL',
+ 'BORDER_FLAT', 'BORDER_RAISED', 'BORDER_SUNKEN', 'ALIGN_LEFT', 'ALIGN_CENTER',
+ 'ALIGN_RIGHT', 'ANCHOR_LEFT_UPPER', 'ANCHOR_LEFT', 'ANCHOR_LEFT_LOWER',
+ 'ANCHOR_LOWER', 'ANCHOR_RIGHT_LOWER', 'ANCHOR_RIGHT', 'ANCHOR_RIGHT_UPPER',
+ 'ANCHOR_UPPER', 'ANCHOR_CENTER', 'ANCHOR_TOP', 'ANCHOR_BOTTOM',
+ 'CORNER_LEFT_UPPER', 'CORNER_LEFT_LOWER', 'CORNER_RIGHT_LOWER',
+ 'CORNER_RIGHT_UPPER',
+ 'OBJ_NO_PERIODS', 'EMPTY', 'OBJ_PERIOD_M1', 'OBJ_PERIOD_M5', 'OBJ_PERIOD_M15',
+ 'OBJ_PERIOD_M30', 'OBJ_PERIOD_H1', 'OBJ_PERIOD_H4', 'OBJ_PERIOD_D1',
+ 'OBJ_PERIOD_W1', 'OBJ_PERIOD_MN1', 'OBJ_ALL_PERIODS',
+ 'GANN_UP_TREND', 'GANN_DOWN_TREND',
+ 'SYMBOL_THUMBSUP', 'SYMBOL_THUMBSDOWN',
+ 'SYMBOL_ARROWUP', 'SYMBOL_ARROWDOWN',
+ 'SYMBOL_STOPSIGN', 'SYMBOL_CHECKSIGN',
+ 'SYMBOL_LEFTPRICE', 'SYMBOL_RIGHTPRICE',
+ 'PRICE_CLOSE', 'PRICE_OPEN', 'PRICE_HIGH', 'PRICE_LOW',
+ 'PRICE_MEDIAN', 'PRICE_TYPICAL', 'PRICE_WEIGHTED',
+ 'VOLUME_TICK', 'VOLUME_REAL',
+ 'STO_LOWHIGH', 'STO_CLOSECLOSE',
+ 'MODE_OPEN', 'MODE_LOW', 'MODE_HIGH', 'MODE_CLOSE', 'MODE_VOLUME', 'MODE_TIME',
+ 'MODE_SMA', 'MODE_EMA', 'MODE_SMMA', 'MODE_LWMA',
+ 'MODE_MAIN', 'MODE_SIGNAL', 'MODE_MAIN',
+ 'MODE_PLUSDI', 'MODE_MINUSDI', 'MODE_UPPER',
+ 'MODE_LOWER', 'MODE_GATORJAW', 'MODE_GATORTEETH',
+ 'MODE_GATORLIPS', 'MODE_TENKANSEN',
+ 'MODE_KIJUNSEN', 'MODE_SENKOUSPANA',
+ 'MODE_SENKOUSPANB', 'MODE_CHINKOUSPAN',
+ 'DRAW_LINE', 'DRAW_SECTION', 'DRAW_HISTOGRAM',
+ 'DRAW_ARROW', 'DRAW_ZIGZAG', 'DRAW_NONE',
+ 'STYLE_SOLID', 'STYLE_DASH', 'STYLE_DOT',
+ 'STYLE_DASHDOT', 'STYLE_DASHDOTDOT',
+ 'DRAW_NONE', 'DRAW_LINE', 'DRAW_SECTION', 'DRAW_HISTOGRAM',
+ 'DRAW_ARROW', 'DRAW_ZIGZAG', 'DRAW_FILLING',
+ 'INDICATOR_DATA', 'INDICATOR_COLOR_INDEX',
+ 'INDICATOR_CALCULATIONS', 'INDICATOR_DIGITS',
+ 'INDICATOR_HEIGHT', 'INDICATOR_LEVELS',
+ 'INDICATOR_LEVELCOLOR', 'INDICATOR_LEVELSTYLE',
+ 'INDICATOR_LEVELWIDTH', 'INDICATOR_MINIMUM',
+ 'INDICATOR_MAXIMUM', 'INDICATOR_LEVELVALUE',
+ 'INDICATOR_SHORTNAME', 'INDICATOR_LEVELTEXT',
+ 'TERMINAL_BUILD', 'TERMINAL_CONNECTED',
+ 'TERMINAL_DLLS_ALLOWED', 'TERMINAL_TRADE_ALLOWED',
+ 'TERMINAL_EMAIL_ENABLED',
+ 'TERMINAL_FTP_ENABLED', 'TERMINAL_MAXBARS',
+ 'TERMINAL_CODEPAGE', 'TERMINAL_CPU_CORES',
+ 'TERMINAL_DISK_SPACE', 'TERMINAL_MEMORY_PHYSICAL',
+ 'TERMINAL_MEMORY_TOTAL',
+ 'TERMINAL_MEMORY_AVAILABLE', 'TERMINAL_MEMORY_USED',
+ 'TERMINAL_X64',
+ 'TERMINAL_OPENCL_SUPPORT', 'TERMINAL_LANGUAGE',
+ 'TERMINAL_COMPANY', 'TERMINAL_NAME',
+ 'TERMINAL_PATH', 'TERMINAL_DATA_PATH',
+ 'TERMINAL_COMMONDATA_PATH',
+ 'MQL_PROGRAM_TYPE', 'MQL_DLLS_ALLOWED',
+ 'MQL_TRADE_ALLOWED', 'MQL_DEBUG',
+ 'MQL_PROFILER', 'MQL_TESTER', 'MQL_OPTIMIZATION',
+ 'MQL_VISUAL_MODE',
+ 'MQL_FRAME_MODE', 'MQL_LICENSE_TYPE', 'MQL_PROGRAM_NAME',
+ 'MQL_PROGRAM_PATH',
+ 'PROGRAM_SCRIPT', 'PROGRAM_EXPERT',
+ 'PROGRAM_INDICATOR', 'LICENSE_FREE',
+ 'LICENSE_DEMO', 'LICENSE_FULL', 'LICENSE_TIME',
+ 'MODE_LOW', 'MODE_HIGH', 'MODE_TIME', 'MODE_BID',
+ 'MODE_ASK', 'MODE_POINT',
+ 'MODE_DIGITS', 'MODE_SPREAD', 'MODE_STOPLEVEL',
+ 'MODE_LOTSIZE', 'MODE_TICKVALUE',
+ 'MODE_TICKSIZE', 'MODE_SWAPLONG',
+ 'MODE_SWAPSHORT', 'MODE_STARTING',
+ 'MODE_EXPIRATION', 'MODE_TRADEALLOWED',
+ 'MODE_MINLOT', 'MODE_LOTSTEP', 'MODE_MAXLOT',
+ 'MODE_SWAPTYPE', 'MODE_PROFITCALCMODE',
+ 'MODE_MARGINCALCMODE', 'MODE_MARGININIT',
+ 'MODE_MARGINMAINTENANCE', 'MODE_MARGINHEDGED',
+ 'MODE_MARGINREQUIRED', 'MODE_FREEZELEVEL',
+ 'SUNDAY', 'MONDAY', 'TUESDAY', 'WEDNESDAY', 'THURSDAY',
+ 'FRIDAY', 'SATURDAY',
+ 'ACCOUNT_LOGIN', 'ACCOUNT_TRADE_MODE',
+ 'ACCOUNT_LEVERAGE',
+ 'ACCOUNT_LIMIT_ORDERS', 'ACCOUNT_MARGIN_SO_MODE',
+ 'ACCOUNT_TRADE_ALLOWED', 'ACCOUNT_TRADE_EXPERT',
+ 'ACCOUNT_BALANCE',
+ 'ACCOUNT_CREDIT', 'ACCOUNT_PROFIT', 'ACCOUNT_EQUITY',
+ 'ACCOUNT_MARGIN',
+ 'ACCOUNT_FREEMARGIN', 'ACCOUNT_MARGIN_LEVEL',
+ 'ACCOUNT_MARGIN_SO_CALL',
+ 'ACCOUNT_MARGIN_SO_SO', 'ACCOUNT_NAME',
+ 'ACCOUNT_SERVER', 'ACCOUNT_CURRENCY',
+ 'ACCOUNT_COMPANY', 'ACCOUNT_TRADE_MODE_DEMO',
+ 'ACCOUNT_TRADE_MODE_CONTEST',
+ 'ACCOUNT_TRADE_MODE_REAL', 'ACCOUNT_STOPOUT_MODE_PERCENT',
+ 'ACCOUNT_STOPOUT_MODE_MONEY',
+ 'STAT_INITIAL_DEPOSIT', 'STAT_WITHDRAWAL', 'STAT_PROFIT',
+ 'STAT_GROSS_PROFIT',
+ 'STAT_GROSS_LOSS', 'STAT_MAX_PROFITTRADE',
+ 'STAT_MAX_LOSSTRADE', 'STAT_CONPROFITMAX',
+ 'STAT_CONPROFITMAX_TRADES', 'STAT_MAX_CONWINS',
+ 'STAT_MAX_CONPROFIT_TRADES',
+ 'STAT_CONLOSSMAX', 'STAT_CONLOSSMAX_TRADES',
+ 'STAT_MAX_CONLOSSES',
+ 'STAT_MAX_CONLOSS_TRADES', 'STAT_BALANCEMIN',
+ 'STAT_BALANCE_DD',
+ 'STAT_BALANCEDD_PERCENT', 'STAT_BALANCE_DDREL_PERCENT',
+ 'STAT_BALANCE_DD_RELATIVE', 'STAT_EQUITYMIN',
+ 'STAT_EQUITY_DD',
+ 'STAT_EQUITYDD_PERCENT', 'STAT_EQUITY_DDREL_PERCENT',
+ 'STAT_EQUITY_DD_RELATIVE', 'STAT_EXPECTED_PAYOFF',
+ 'STAT_PROFIT_FACTOR',
+ 'STAT_RECOVERY_FACTOR', 'STAT_SHARPE_RATIO',
+ 'STAT_MIN_MARGINLEVEL',
+ 'STAT_CUSTOM_ONTESTER', 'STAT_DEALS', 'STAT_TRADES',
+ 'STAT_PROFIT_TRADES',
+ 'STAT_LOSS_TRADES', 'STAT_SHORT_TRADES', 'STAT_LONG_TRADES',
+ 'STAT_PROFIT_SHORTTRADES', 'STAT_PROFIT_LONGTRADES',
+ 'STAT_PROFITTRADES_AVGCON', 'STAT_LOSSTRADES_AVGCON',
+ 'SERIES_BARS_COUNT', 'SERIES_FIRSTDATE', 'SERIES_LASTBAR_DATE',
+ 'SERIES_SERVER_FIRSTDATE', 'SERIES_TERMINAL_FIRSTDATE',
+ 'SERIES_SYNCHRONIZED',
+ 'OP_BUY', 'OP_SELL', 'OP_BUYLIMIT', 'OP_SELLLIMIT',
+ 'OP_BUYSTOP', 'OP_SELLSTOP',
+ 'TRADE_ACTION_DEAL', 'TRADE_ACTION_PENDING',
+ 'TRADE_ACTION_SLTP',
+ 'TRADE_ACTION_MODIFY', 'TRADE_ACTION_REMOVE',
+ '__DATE__', '__DATETIME__', '__LINE__', '__FILE__',
+ '__PATH__', '__FUNCTION__',
+ '__FUNCSIG__', '__MQLBUILD__', '__MQL4BUILD__',
+ 'M_E', 'M_LOG2E', 'M_LOG10E', 'M_LN2', 'M_LN10',
+ 'M_PI', 'M_PI_2', 'M_PI_4', 'M_1_PI',
+ 'M_2_PI', 'M_2_SQRTPI', 'M_SQRT2', 'M_SQRT1_2',
+ 'CHAR_MIN', 'CHAR_MAX', 'UCHAR_MAX',
+ 'SHORT_MIN', 'SHORT_MAX', 'USHORT_MAX',
+ 'INT_MIN', 'INT_MAX', 'UINT_MAX',
+ 'LONG_MIN', 'LONG_MAX', 'ULONG_MAX',
+ 'DBL_MIN', 'DBL_MAX', 'DBL_EPSILON', 'DBL_DIG', 'DBL_MANT_DIG',
+ 'DBL_MAX_10_EXP', 'DBL_MAX_EXP', 'DBL_MIN_10_EXP', 'DBL_MIN_EXP',
+ 'FLT_MIN', 'FLT_MAX', 'FLT_EPSILON',
+ 'FLT_DIG', 'FLT_MANT_DIG', 'FLT_MAX_10_EXP',
+ 'FLT_MAX_EXP', 'FLT_MIN_10_EXP', 'FLT_MIN_EXP', 'REASON_PROGRAM'
+ 'REASON_REMOVE', 'REASON_RECOMPILE',
+ 'REASON_CHARTCHANGE', 'REASON_CHARTCLOSE',
+ 'REASON_PARAMETERS', 'REASON_ACCOUNT',
+ 'REASON_TEMPLATE', 'REASON_INITFAILED',
+ 'REASON_CLOSE', 'POINTER_INVALID'
+ 'POINTER_DYNAMIC', 'POINTER_AUTOMATIC',
+ 'NULL', 'EMPTY', 'EMPTY_VALUE', 'CLR_NONE', 'WHOLE_ARRAY',
+ 'CHARTS_MAX', 'clrNONE', 'EMPTY_VALUE', 'INVALID_HANDLE',
+ 'IS_DEBUG_MODE', 'IS_PROFILE_MODE', 'NULL', 'WHOLE_ARRAY', 'WRONG_VALUE',
+ 'ERR_NO_ERROR', 'ERR_NO_RESULT', 'ERR_COMMON_ERROR',
+ 'ERR_INVALID_TRADE_PARAMETERS',
+ 'ERR_SERVER_BUSY', 'ERR_OLD_VERSION', 'ERR_NO_CONNECTION',
+ 'ERR_NOT_ENOUGH_RIGHTS',
+ 'ERR_TOO_FREQUENT_REQUESTS', 'ERR_MALFUNCTIONAL_TRADE',
+ 'ERR_ACCOUNT_DISABLED',
+ 'ERR_INVALID_ACCOUNT', 'ERR_TRADE_TIMEOUT',
+ 'ERR_INVALID_PRICE', 'ERR_INVALID_STOPS',
+ 'ERR_INVALID_TRADE_VOLUME', 'ERR_MARKET_CLOSED',
+ 'ERR_TRADE_DISABLED',
+ 'ERR_NOT_ENOUGH_MONEY', 'ERR_PRICE_CHANGED',
+ 'ERR_OFF_QUOTES', 'ERR_BROKER_BUSY',
+ 'ERR_REQUOTE', 'ERR_ORDER_LOCKED',
+ 'ERR_LONG_POSITIONS_ONLY_ALLOWED', 'ERR_TOO_MANY_REQUESTS',
+ 'ERR_TRADE_MODIFY_DENIED', 'ERR_TRADE_CONTEXT_BUSY',
+ 'ERR_TRADE_EXPIRATION_DENIED',
+ 'ERR_TRADE_TOO_MANY_ORDERS', 'ERR_TRADE_HEDGE_PROHIBITED',
+ 'ERR_TRADE_PROHIBITED_BY_FIFO',
+ 'FILE_READ', 'FILE_WRITE', 'FILE_BIN', 'FILE_CSV', 'FILE_TXT',
+ 'FILE_ANSI', 'FILE_UNICODE',
+ 'FILE_SHARE_READ', 'FILE_SHARE_WRITE', 'FILE_REWRITE',
+ 'FILE_COMMON', 'FILE_EXISTS',
+ 'FILE_CREATE_DATE', 'FILE_MODIFY_DATE',
+ 'FILE_ACCESS_DATE', 'FILE_SIZE', 'FILE_POSITION',
+ 'FILE_END', 'FILE_LINE_END', 'FILE_IS_COMMON',
+ 'FILE_IS_TEXT', 'FILE_IS_BINARY',
+ 'FILE_IS_CSV', 'FILE_IS_ANSI', 'FILE_IS_READABLE', 'FILE_IS_WRITABLE',
+ 'SEEK_SET', 'SEEK_CUR', 'SEEK_END', 'CP_ACP',
+ 'CP_OEMCP', 'CP_MACCP', 'CP_THREAD_ACP',
+ 'CP_SYMBOL', 'CP_UTF7', 'CP_UTF8', 'IDOK', 'IDCANCEL', 'IDABORT',
+ 'IDRETRY', 'IDIGNORE', 'IDYES', 'IDNO', 'IDTRYAGAIN', 'IDCONTINUE',
+ 'MB_OK', 'MB_OKCANCEL', 'MB_ABORTRETRYIGNORE', 'MB_YESNOCANCEL',
+ 'MB_YESNO', 'MB_RETRYCANCEL',
+ 'MB_CANCELTRYCONTINUE', 'MB_ICONSTOP', 'MB_ICONERROR',
+ 'MB_ICONHAND', 'MB_ICONQUESTION',
+ 'MB_ICONEXCLAMATION', 'MB_ICONWARNING',
+ 'MB_ICONINFORMATION', 'MB_ICONASTERISK',
+ 'MB_DEFBUTTON1', 'MB_DEFBUTTON2', 'MB_DEFBUTTON3',
+ 'MB_DEFBUTTON4'), suffix=r'\b'),
+ Name.Constant),
+ (words((
+ 'Black', 'DarkGreen', 'DarkSlateGray', 'Olive',
+ 'Green', 'Teal', 'Navy', 'Purple',
+ 'Maroon', 'Indigo', 'MidnightBlue', 'DarkBlue',
+ 'DarkOliveGreen', 'SaddleBrown',
+ 'ForestGreen', 'OliveDrab', 'SeaGreen',
+ 'DarkGoldenrod', 'DarkSlateBlue',
+ 'Sienna', 'MediumBlue', 'Brown', 'DarkTurquoise',
+ 'DimGray', 'LightSeaGreen',
+ 'DarkViolet', 'FireBrick', 'MediumVioletRed',
+ 'MediumSeaGreen', 'Chocolate',
+ 'Crimson', 'SteelBlue', 'Goldenrod', 'MediumSpringGreen',
+ 'LawnGreen', 'CadetBlue',
+ 'DarkOrchid', 'YellowGreen', 'LimeGreen', 'OrangeRed',
+ 'DarkOrange', 'Orange',
+ 'Gold', 'Yellow', 'Chartreuse', 'Lime', 'SpringGreen',
+ 'Aqua', 'DeepSkyBlue', 'Blue',
+ 'Magenta', 'Red', 'Gray', 'SlateGray', 'Peru', 'BlueViolet',
+ 'LightSlateGray', 'DeepPink',
+ 'MediumTurquoise', 'DodgerBlue', 'Turquoise', 'RoyalBlue',
+ 'SlateBlue', 'DarkKhaki',
+ 'IndianRed', 'MediumOrchid', 'GreenYellow',
+ 'MediumAquamarine', 'DarkSeaGreen',
+ 'Tomato', 'RosyBrown', 'Orchid', 'MediumPurple',
+ 'PaleVioletRed', 'Coral', 'CornflowerBlue',
+ 'DarkGray', 'SandyBrown', 'MediumSlateBlue',
+ 'Tan', 'DarkSalmon', 'BurlyWood',
+ 'HotPink', 'Salmon', 'Violet', 'LightCoral', 'SkyBlue',
+ 'LightSalmon', 'Plum',
+ 'Khaki', 'LightGreen', 'Aquamarine', 'Silver',
+ 'LightSkyBlue', 'LightSteelBlue',
+ 'LightBlue', 'PaleGreen', 'Thistle', 'PowderBlue',
+ 'PaleGoldenrod', 'PaleTurquoise',
+ 'LightGray', 'Wheat', 'NavajoWhite', 'Moccasin',
+ 'LightPink', 'Gainsboro', 'PeachPuff',
+ 'Pink', 'Bisque', 'LightGoldenrod', 'BlanchedAlmond',
+ 'LemonChiffon', 'Beige',
+ 'AntiqueWhite', 'PapayaWhip', 'Cornsilk',
+ 'LightYellow', 'LightCyan', 'Linen',
+ 'Lavender', 'MistyRose', 'OldLace', 'WhiteSmoke',
+ 'Seashell', 'Ivory', 'Honeydew',
+ 'AliceBlue', 'LavenderBlush', 'MintCream', 'Snow',
+ 'White'), prefix='(clr)?', suffix=r'\b'),
+ Name.Constant),
+ inherit,
+ ],
+ }
diff --git a/pygments/lexers/chapel.py b/pygments/lexers/chapel.py
new file mode 100644
index 00000000..0a12dd95
--- /dev/null
+++ b/pygments/lexers/chapel.py
@@ -0,0 +1,98 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.chapel
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Chapel language.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['ChapelLexer']
+
+
+class ChapelLexer(RegexLexer):
+ """
+ For `Chapel <http://chapel.cray.com/>`_ source.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Chapel'
+ filenames = ['*.chpl']
+ aliases = ['chapel', 'chpl']
+ # mimetypes = ['text/x-chapel']
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text),
+
+ (r'//(.*?)\n', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+
+ (r'(config|const|in|inout|out|param|ref|type|var)\b',
+ Keyword.Declaration),
+ (r'(false|nil|true)\b', Keyword.Constant),
+ (r'(bool|complex|imag|int|opaque|range|real|string|uint)\b',
+ Keyword.Type),
+ (words((
+ 'align', 'atomic', 'begin', 'break', 'by', 'cobegin', 'coforall',
+ 'continue', 'delete', 'dmapped', 'do', 'domain', 'else', 'enum',
+ 'export', 'extern', 'for', 'forall', 'if', 'index', 'inline',
+ 'iter', 'label', 'lambda', 'let', 'local', 'new', 'noinit', 'on',
+ 'otherwise', 'pragma', 'reduce', 'return', 'scan', 'select',
+ 'serial', 'single', 'sparse', 'subdomain', 'sync', 'then', 'use',
+ 'when', 'where', 'while', 'with', 'yield', 'zip'), suffix=r'\b'),
+ Keyword),
+ (r'(proc)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'procname'),
+ (r'(class|module|record|union)(\s+)', bygroups(Keyword, Text),
+ 'classname'),
+
+ # imaginary integers
+ (r'\d+i', Number),
+ (r'\d+\.\d*([Ee][-+]\d+)?i', Number),
+ (r'\.\d+([Ee][-+]\d+)?i', Number),
+ (r'\d+[Ee][-+]\d+i', Number),
+
+ # reals cannot end with a period due to lexical ambiguity with
+ # .. operator. See reference for rationale.
+ (r'(\d*\.\d+)([eE][+-]?[0-9]+)?i?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+i?', Number.Float),
+
+ # integer literals
+ # -- binary
+ (r'0[bB][0-1]+', Number.Bin),
+ # -- hex
+ (r'0[xX][0-9a-fA-F]+', Number.Hex),
+ # -- octal
+ (r'0[oO][0-7]+', Number.Oct),
+ # -- decimal
+ (r'[0-9]+', Number.Integer),
+
+ # strings
+ (r'["\'](\\\\|\\"|[^"\'])*["\']', String),
+
+ # tokens
+ (r'(=|\+=|-=|\*=|/=|\*\*=|%=|&=|\|=|\^=|&&=|\|\|=|<<=|>>=|'
+ r'<=>|<~>|\.\.|by|#|\.\.\.|'
+ r'&&|\|\||!|&|\||\^|~|<<|>>|'
+ r'==|!=|<=|>=|<|>|'
+ r'[+\-*/%]|\*\*)', Operator),
+ (r'[:;,.?()\[\]{}]', Punctuation),
+
+ # identifiers
+ (r'[a-zA-Z_][\w$]*', Name.Other),
+ ],
+ 'classname': [
+ (r'[a-zA-Z_][\w$]*', Name.Class, '#pop'),
+ ],
+ 'procname': [
+ (r'[a-zA-Z_][\w$]*', Name.Function, '#pop'),
+ ],
+ }
diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py
index 38bd901f..74326c3c 100644
--- a/pygments/lexers/compiled.py
+++ b/pygments/lexers/compiled.py
@@ -3,5190 +3,31 @@
pygments.lexers.compiled
~~~~~~~~~~~~~~~~~~~~~~~~
- Lexers for compiled languages.
+ Just export lexer classes previously contained in this module.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import re
-from string import Template
-
-from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
- this, combined, inherit, do_insertions, default
-from pygments.util import get_bool_opt, get_list_opt
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation, Error, Literal, Generic
-from pygments.scanner import Scanner
-
-# backwards compatibility
-from pygments.lexers.functional import OcamlLexer
from pygments.lexers.jvm import JavaLexer, ScalaLexer
-
-__all__ = ['CLexer', 'CppLexer', 'DLexer', 'DelphiLexer', 'ECLexer',
- 'NesCLexer', 'DylanLexer', 'ObjectiveCLexer', 'ObjectiveCppLexer',
- 'FortranLexer', 'GLShaderLexer', 'PrologLexer', 'CythonLexer',
- 'ValaLexer', 'OocLexer', 'GoLexer', 'FelixLexer', 'AdaLexer',
- 'Modula2Lexer', 'BlitzMaxLexer', 'BlitzBasicLexer', 'NimrodLexer',
- 'FantomLexer', 'RustLexer', 'CudaLexer', 'MonkeyLexer', 'SwigLexer',
- 'DylanLidLexer', 'DylanConsoleLexer', 'CobolLexer',
- 'CobolFreeformatLexer', 'LogosLexer', 'ClayLexer', 'PikeLexer',
- 'ChapelLexer', 'EiffelLexer', 'Inform6Lexer', 'Inform7Lexer',
- 'Inform6TemplateLexer', 'MqlLexer', 'SwiftLexer']
-
-
-class CFamilyLexer(RegexLexer):
- """
- For C family source code. This is used as a base class to avoid repetitious
- definitions.
- """
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
- #: only one /* */ style comment
- _ws1 = r'\s*(?:/[*].*?[*]/\s*)*'
-
- tokens = {
- 'whitespace': [
- # preprocessor directives: without whitespace
- ('^#if\s+0', Comment.Preproc, 'if0'),
- ('^#', Comment.Preproc, 'macro'),
- # or with whitespace
- ('^(' + _ws1 + r')(#if\s+0)',
- bygroups(using(this), Comment.Preproc), 'if0'),
- ('^(' + _ws1 + ')(#)',
- bygroups(using(this), Comment.Preproc), 'macro'),
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- ],
- 'statements': [
- (r'L?"', String, 'string'),
- (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'0[0-7]+[LlUu]*', Number.Oct),
- (r'\d+[LlUu]*', Number.Integer),
- (r'\*/', Error),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'[()\[\],.]', Punctuation),
- (r'(auto|break|case|const|continue|default|do|else|enum|extern|'
- r'for|goto|if|register|restricted|return|sizeof|static|struct|'
- r'switch|typedef|union|volatile|while)\b', Keyword),
- (r'(bool|int|long|float|short|double|char|unsigned|signed|void|'
- r'[a-z_][a-z0-9_]*_t)\b',
- Keyword.Type),
- (r'(_{0,2}inline|naked|restrict|thread|typename)\b', Keyword.Reserved),
- # Vector intrinsics
- (r'(__(m128i|m128d|m128|m64))\b', Keyword.Reserved),
- # Microsoft-isms
- (r'__(asm|int8|based|except|int16|stdcall|cdecl|fastcall|int32|'
- r'declspec|finally|int64|try|leave|wchar_t|w64|unaligned|'
- r'raise|noop|identifier|forceinline|assume)\b', Keyword.Reserved),
- (r'(true|false|NULL)\b', Name.Builtin),
- (r'([a-zA-Z_]\w*)(\s*)(:)(?!:)', bygroups(Name.Label, Text, Punctuation)),
- ('[a-zA-Z_]\w*', Name),
- ],
- 'root': [
- include('whitespace'),
- # functions
- (r'((?:[\w*\s])+?(?:\s|[*]))' # return arguments
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*\([^;]*?\))' # signature
- r'(' + _ws + r')?({)',
- bygroups(using(this), Name.Function, using(this), using(this),
- Punctuation),
- 'function'),
- # function declarations
- (r'((?:[\w*\s])+?(?:\s|[*]))' # return arguments
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*\([^;]*?\))' # signature
- r'(' + _ws + r')?(;)',
- bygroups(using(this), Name.Function, using(this), using(this),
- Punctuation)),
- default('statement'),
- ],
- 'statement' : [
- include('whitespace'),
- include('statements'),
- ('[{}]', Punctuation),
- (';', Punctuation, '#pop'),
- ],
- 'function': [
- include('whitespace'),
- include('statements'),
- (';', Punctuation),
- ('{', Punctuation, '#push'),
- ('}', Punctuation, '#pop'),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|'
- r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'macro': [
- (r'[^/\n]+', Comment.Preproc),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
- (r'//.*?\n', Comment.Single, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
- (r'^\s*#el(?:se|if).*\n', Comment.Preproc, '#pop'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
- (r'.*?\n', Comment),
- ]
- }
-
- stdlib_types = ['size_t', 'ssize_t', 'off_t', 'wchar_t', 'ptrdiff_t',
- 'sig_atomic_t', 'fpos_t', 'clock_t', 'time_t', 'va_list',
- 'jmp_buf', 'FILE', 'DIR', 'div_t', 'ldiv_t', 'mbstate_t',
- 'wctrans_t', 'wint_t', 'wctype_t']
- c99_types = ['_Bool', '_Complex', 'int8_t', 'int16_t', 'int32_t', 'int64_t',
- 'uint8_t', 'uint16_t', 'uint32_t', 'uint64_t', 'int_least8_t',
- 'int_least16_t', 'int_least32_t', 'int_least64_t',
- 'uint_least8_t', 'uint_least16_t', 'uint_least32_t',
- 'uint_least64_t', 'int_fast8_t', 'int_fast16_t', 'int_fast32_t',
- 'int_fast64_t', 'uint_fast8_t', 'uint_fast16_t', 'uint_fast32_t',
- 'uint_fast64_t', 'intptr_t', 'uintptr_t', 'intmax_t',
- 'uintmax_t']
-
- def __init__(self, **options):
- self.stdlibhighlighting = get_bool_opt(options,
- 'stdlibhighlighting', True)
- self.c99highlighting = get_bool_opt(options,
- 'c99highlighting', True)
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- if self.stdlibhighlighting and value in self.stdlib_types:
- token = Keyword.Type
- elif self.c99highlighting and value in self.c99_types:
- token = Keyword.Type
- yield index, token, value
-
-
-class CLexer(CFamilyLexer):
- """
- For C source code with preprocessor directives.
- """
- name = 'C'
- aliases = ['c']
- filenames = ['*.c', '*.h', '*.idc']
- mimetypes = ['text/x-chdr', 'text/x-csrc']
- priority = 0.1
-
- def analyse_text(text):
- if re.search('#include [<"]', text):
- return 0.1
-
-
-class CppLexer(CFamilyLexer):
- """
- For C++ source code with preprocessor directives.
- """
- name = 'C++'
- aliases = ['cpp', 'c++']
- filenames = ['*.cpp', '*.hpp', '*.c++', '*.h++',
- '*.cc', '*.hh', '*.cxx', '*.hxx',
- '*.C', '*.H', '*.cp', '*.CPP']
- mimetypes = ['text/x-c++hdr', 'text/x-c++src']
- priority = 0.1
-
- tokens = {
- 'statements': [
- (r'(asm|catch|const_cast|delete|dynamic_cast|explicit|'
- r'export|friend|mutable|namespace|new|operator|'
- r'private|protected|public|reinterpret_cast|'
- r'restrict|static_cast|template|this|throw|throws|'
- r'typeid|typename|using|virtual|'
- r'constexpr|nullptr|decltype|thread_local|'
- r'alignas|alignof|static_assert|noexcept|override|final)\b', Keyword),
- (r'(char16_t|char32_t)\b', Keyword.Type),
- (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
- inherit,
- ],
- 'root': [
- inherit,
- # C++ Microsoft-isms
- (r'__(virtual_inheritance|uuidof|super|single_inheritance|'
- r'multiple_inheritance|interface|event)\b', Keyword.Reserved),
- # Offload C++ extensions, http://offload.codeplay.com/
- (r'(__offload|__blockingoffload|__outer)\b', Keyword.Pseudo),
- ],
- 'classname': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
- # template specification
- (r'\s*(?=>)', Text, '#pop'),
- ],
- }
-
- def analyse_text(text):
- if re.search('#include <[a-z]+>', text):
- return 0.2
- if re.search('using namespace ', text):
- return 0.4
-
-
-class PikeLexer(CppLexer):
- """
- For `Pike <http://pike.lysator.liu.se/>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'Pike'
- aliases = ['pike']
- filenames = ['*.pike', '*.pmod']
- mimetypes = ['text/x-pike']
-
- tokens = {
- 'statements': [
- (r'(catch|new|private|protected|public|gauge|'
- r'throw|throws|class|interface|implement|abstract|extends|from|'
- r'this|super|new|constant|final|static|import|use|extern|'
- r'inline|proto|break|continue|if|else|for|'
- r'while|do|switch|case|as|in|version|return|true|false|null|'
- r'__VERSION__|__MAJOR__|__MINOR__|__BUILD__|__REAL_VERSION__|'
- r'__REAL_MAJOR__|__REAL_MINOR__|__REAL_BUILD__|__DATE__|__TIME__|'
- r'__FILE__|__DIR__|__LINE__|__AUTO_BIGNUM__|__NT__|__PIKE__|'
- r'__amigaos__|_Pragma|static_assert|defined|sscanf)\b',
- Keyword),
- (r'(bool|int|long|float|short|double|char|string|object|void|mapping|'
- r'array|multiset|program|function|lambda|mixed|'
- r'[a-z_][a-z0-9_]*_t)\b',
- Keyword.Type),
- (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
- (r'[~!%^&*+=|?:<>/-@]', Operator),
- inherit,
- ],
- 'classname': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
- # template specification
- (r'\s*(?=>)', Text, '#pop'),
- ],
- }
-
-
-class SwigLexer(CppLexer):
- """
- For `SWIG <http://www.swig.org/>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'SWIG'
- aliases = ['swig']
- filenames = ['*.swg', '*.i']
- mimetypes = ['text/swig']
- priority = 0.04 # Lower than C/C++ and Objective C/C++
-
- tokens = {
- 'statements': [
- # SWIG directives
- (r'(%[a-z_][a-z0-9_]*)', Name.Function),
- # Special variables
- ('\$\**\&?\w+', Name),
- # Stringification / additional preprocessor directives
- (r'##*[a-zA-Z_]\w*', Comment.Preproc),
- inherit,
- ],
- }
-
- # This is a far from complete set of SWIG directives
- swig_directives = (
- # Most common directives
- '%apply', '%define', '%director', '%enddef', '%exception', '%extend',
- '%feature', '%fragment', '%ignore', '%immutable', '%import', '%include',
- '%inline', '%insert', '%module', '%newobject', '%nspace', '%pragma',
- '%rename', '%shared_ptr', '%template', '%typecheck', '%typemap',
- # Less common directives
- '%arg', '%attribute', '%bang', '%begin', '%callback', '%catches', '%clear',
- '%constant', '%copyctor', '%csconst', '%csconstvalue', '%csenum',
- '%csmethodmodifiers', '%csnothrowexception', '%default', '%defaultctor',
- '%defaultdtor', '%defined', '%delete', '%delobject', '%descriptor',
- '%exceptionclass', '%exceptionvar', '%extend_smart_pointer', '%fragments',
- '%header', '%ifcplusplus', '%ignorewarn', '%implicit', '%implicitconv',
- '%init', '%javaconst', '%javaconstvalue', '%javaenum', '%javaexception',
- '%javamethodmodifiers', '%kwargs', '%luacode', '%mutable', '%naturalvar',
- '%nestedworkaround', '%perlcode', '%pythonabc', '%pythonappend',
- '%pythoncallback', '%pythoncode', '%pythondynamic', '%pythonmaybecall',
- '%pythonnondynamic', '%pythonprepend', '%refobject', '%shadow', '%sizeof',
- '%trackobjects', '%types', '%unrefobject', '%varargs', '%warn', '%warnfilter')
-
- def analyse_text(text):
- rv = 0
- # Search for SWIG directives, which are conventionally at the beginning of
- # a line. The probability of them being within a line is low, so let another
- # lexer win in this case.
- matches = re.findall(r'^\s*(%[a-z_][a-z0-9_]*)', text, re.M)
- for m in matches:
- if m in SwigLexer.swig_directives:
- rv = 0.98
- break
- else:
- rv = 0.91 # Fraction higher than MatlabLexer
- return rv
-
-
-class ECLexer(CLexer):
- """
- For eC source code with preprocessor directives.
-
- .. versionadded:: 1.5
- """
- name = 'eC'
- aliases = ['ec']
- filenames = ['*.ec', '*.eh']
- mimetypes = ['text/x-echdr', 'text/x-ecsrc']
-
- tokens = {
- 'statements': [
- (r'(virtual|class|private|public|property|import|delete|new|new0|'
- r'renew|renew0|define|get|set|remote|dllexport|dllimport|stdcall|'
- r'subclass|__on_register_module|namespace|using|typed_object|'
- r'any_object|incref|register|watch|stopwatching|firewatchers|'
- r'watchable|class_designer|class_fixed|class_no_expansion|isset|'
- r'class_default_property|property_category|class_data|'
- r'class_property|virtual|thisclass|'
- r'dbtable|dbindex|database_open|dbfield)\b', Keyword),
- (r'(uint|uint16|uint32|uint64|bool|byte|unichar|int64)\b',
- Keyword.Type),
- (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
- (r'(null|value|this)\b', Name.Builtin),
- inherit,
- ],
- 'classname': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
- # template specification
- (r'\s*(?=>)', Text, '#pop'),
- ],
- }
-
-
-class NesCLexer(CLexer):
- """
- For `nesC <https://github.com/tinyos/nesc>`_ source code with preprocessor
- directives.
-
- .. versionadded:: 2.0
- """
- name = 'nesC'
- aliases = ['nesc']
- filenames = ['*.nc']
- mimetypes = ['text/x-nescsrc']
-
- tokens = {
- 'statements': [
- (r'(abstract|as|async|atomic|call|command|component|components|'
- r'configuration|event|extends|generic|implementation|includes|'
- r'interface|module|new|norace|post|provides|signal|task|uses)\b',
- Keyword),
- (r'(nx_struct|nx_union|nx_int8_t|nx_int16_t|nx_int32_t|nx_int64_t|'
- r'nx_uint8_t|nx_uint16_t|nx_uint32_t|nx_uint64_t)\b',
- Keyword.Type),
- inherit,
- ],
- }
-
-
-class ClayLexer(RegexLexer):
- """
- For `Clay <http://claylabs.com/clay/>`_ source.
-
- .. versionadded:: 2.0
- """
- name = 'Clay'
- filenames = ['*.clay']
- aliases = ['clay']
- mimetypes = ['text/x-clay']
- tokens = {
- 'root': [
- (r'\s', Text),
- (r'//.*?$', Comment.Singleline),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'\b(public|private|import|as|record|variant|instance'
- r'|define|overload|default|external|alias'
- r'|rvalue|ref|forward|inline|noinline|forceinline'
- r'|enum|var|and|or|not|if|else|goto|return|while'
- r'|switch|case|break|continue|for|in|true|false|try|catch|throw'
- r'|finally|onerror|staticassert|eval|when|newtype'
- r'|__FILE__|__LINE__|__COLUMN__|__ARG__'
- r')\b', Keyword),
- (r'[~!%^&*+=|:<>/-]', Operator),
- (r'[#(){}\[\],;.]', Punctuation),
- (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'\d+[LlUu]*', Number.Integer),
- (r'\b(true|false)\b', Name.Builtin),
- (r'(?i)[a-z_?][a-z_?0-9]*', Name),
- (r'"""', String, 'tdqs'),
- (r'"', String, 'dqs'),
- ],
- 'strings': [
- (r'(?i)\\(x[0-9a-f]{2}|.)', String.Escape),
- (r'.', String),
- ],
- 'nl': [
- (r'\n', String),
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- include('strings'),
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- include('strings'),
- include('nl'),
- ],
- }
-
-
-class DLexer(RegexLexer):
- """
- For D source.
-
- .. versionadded:: 1.2
- """
- name = 'D'
- filenames = ['*.d', '*.di']
- aliases = ['d']
- mimetypes = ['text/x-dsrc']
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'\s+', Text),
- #(r'\\\n', Text), # line continuations
- # Comments
- (r'//(.*?)\n', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'/\+', Comment.Multiline, 'nested_comment'),
- # Keywords
- (r'(abstract|alias|align|asm|assert|auto|body|break|case|cast'
- r'|catch|class|const|continue|debug|default|delegate|delete'
- r'|deprecated|do|else|enum|export|extern|finally|final'
- r'|foreach_reverse|foreach|for|function|goto|if|immutable|import'
- r'|interface|invariant|inout|in|is|lazy|mixin|module|new|nothrow|out'
- r'|override|package|pragma|private|protected|public|pure|ref|return'
- r'|scope|shared|static|struct|super|switch|synchronized|template|this'
- r'|throw|try|typedef|typeid|typeof|union|unittest|version|volatile'
- r'|while|with|__gshared|__traits|__vector|__parameters)\b', Keyword
- ),
- (r'(bool|byte|cdouble|cent|cfloat|char|creal|dchar|double|float'
- r'|idouble|ifloat|int|ireal|long|real|short|ubyte|ucent|uint|ulong'
- r'|ushort|void|wchar)\b', Keyword.Type
- ),
- (r'(false|true|null)\b', Keyword.Constant),
- (r'(__FILE__|__MODULE__|__LINE__|__FUNCTION__|__PRETTY_FUNCTION__'
- r'|__DATE__|__EOF__|__TIME__|__TIMESTAMP__|__VENDOR__|__VERSION__)\b',
- Keyword.Pseudo),
- (r'macro\b', Keyword.Reserved),
- (r'(string|wstring|dstring|size_t|ptrdiff_t)\b', Name.Builtin),
- # FloatLiteral
- # -- HexFloat
- (r'0[xX]([0-9a-fA-F_]*\.[0-9a-fA-F_]+|[0-9a-fA-F_]+)'
- r'[pP][+\-]?[0-9_]+[fFL]?[i]?', Number.Float),
- # -- DecimalFloat
- (r'[0-9_]+(\.[0-9_]+[eE][+\-]?[0-9_]+|'
- r'\.[0-9_]*|[eE][+\-]?[0-9_]+)[fFL]?[i]?', Number.Float),
- (r'\.(0|[1-9][0-9_]*)([eE][+\-]?[0-9_]+)?[fFL]?[i]?', Number.Float),
- # IntegerLiteral
- # -- Binary
- (r'0[Bb][01_]+', Number.Bin),
- # -- Octal
- (r'0[0-7_]+', Number.Oct),
- # -- Hexadecimal
- (r'0[xX][0-9a-fA-F_]+', Number.Hex),
- # -- Decimal
- (r'(0|[1-9][0-9_]*)([LUu]|Lu|LU|uL|UL)?', Number.Integer),
- # CharacterLiteral
- (r"""'(\\['"?\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
- r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|\\&\w+;|.)'""",
- String.Char
- ),
- # StringLiteral
- # -- WysiwygString
- (r'r"[^"]*"[cwd]?', String),
- # -- AlternateWysiwygString
- (r'`[^`]*`[cwd]?', String),
- # -- DoubleQuotedString
- (r'"(\\\\|\\"|[^"])*"[cwd]?', String),
- # -- EscapeSequence
- (r"\\(['\"?\\abfnrtv]|x[0-9a-fA-F]{2}|[0-7]{1,3}"
- r"|u[0-9a-fA-F]{4}|U[0-9a-fA-F]{8}|&\w+;)",
- String
- ),
- # -- HexString
- (r'x"[0-9a-fA-F_\s]*"[cwd]?', String),
- # -- DelimitedString
- (r'q"\[', String, 'delimited_bracket'),
- (r'q"\(', String, 'delimited_parenthesis'),
- (r'q"<', String, 'delimited_angle'),
- (r'q"{', String, 'delimited_curly'),
- (r'q"([a-zA-Z_]\w*)\n.*?\n\1"', String),
- (r'q"(.).*?\1"', String),
- # -- TokenString
- (r'q{', String, 'token_string'),
- # Attributes
- (r'@([a-zA-Z_]\w*)?', Name.Decorator),
- # Tokens
- (r'(~=|\^=|%=|\*=|==|!>=|!<=|!<>=|!<>|!<|!>|!=|>>>=|>>>|>>=|>>|>='
- r'|<>=|<>|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.\.|\.\.|/=)'
- r'|[/.&|\-+<>!()\[\]{}?,;:$=*%^~]', Punctuation
- ),
- # Identifier
- (r'[a-zA-Z_]\w*', Name),
- # Line
- (r'#line\s.*\n', Comment.Special),
- ],
- 'nested_comment': [
- (r'[^+/]+', Comment.Multiline),
- (r'/\+', Comment.Multiline, '#push'),
- (r'\+/', Comment.Multiline, '#pop'),
- (r'[+/]', Comment.Multiline),
- ],
- 'token_string': [
- (r'{', Punctuation, 'token_string_nest'),
- (r'}', String, '#pop'),
- include('root'),
- ],
- 'token_string_nest': [
- (r'{', Punctuation, '#push'),
- (r'}', Punctuation, '#pop'),
- include('root'),
- ],
- 'delimited_bracket': [
- (r'[^\[\]]+', String),
- (r'\[', String, 'delimited_inside_bracket'),
- (r'\]"', String, '#pop'),
- ],
- 'delimited_inside_bracket': [
- (r'[^\[\]]+', String),
- (r'\[', String, '#push'),
- (r'\]', String, '#pop'),
- ],
- 'delimited_parenthesis': [
- (r'[^\(\)]+', String),
- (r'\(', String, 'delimited_inside_parenthesis'),
- (r'\)"', String, '#pop'),
- ],
- 'delimited_inside_parenthesis': [
- (r'[^\(\)]+', String),
- (r'\(', String, '#push'),
- (r'\)', String, '#pop'),
- ],
- 'delimited_angle': [
- (r'[^<>]+', String),
- (r'<', String, 'delimited_inside_angle'),
- (r'>"', String, '#pop'),
- ],
- 'delimited_inside_angle': [
- (r'[^<>]+', String),
- (r'<', String, '#push'),
- (r'>', String, '#pop'),
- ],
- 'delimited_curly': [
- (r'[^{}]+', String),
- (r'{', String, 'delimited_inside_curly'),
- (r'}"', String, '#pop'),
- ],
- 'delimited_inside_curly': [
- (r'[^{}]+', String),
- (r'{', String, '#push'),
- (r'}', String, '#pop'),
- ],
- }
-
-
-class DelphiLexer(Lexer):
- """
- For `Delphi <http://www.borland.com/delphi/>`_ (Borland Object Pascal),
- Turbo Pascal and Free Pascal source code.
-
- Additional options accepted:
-
- `turbopascal`
- Highlight Turbo Pascal specific keywords (default: ``True``).
- `delphi`
- Highlight Borland Delphi specific keywords (default: ``True``).
- `freepascal`
- Highlight Free Pascal specific keywords (default: ``True``).
- `units`
- A list of units that should be considered builtin, supported are
- ``System``, ``SysUtils``, ``Classes`` and ``Math``.
- Default is to consider all of them builtin.
- """
- name = 'Delphi'
- aliases = ['delphi', 'pas', 'pascal', 'objectpascal']
- filenames = ['*.pas']
- mimetypes = ['text/x-pascal']
-
- TURBO_PASCAL_KEYWORDS = [
- 'absolute', 'and', 'array', 'asm', 'begin', 'break', 'case',
- 'const', 'constructor', 'continue', 'destructor', 'div', 'do',
- 'downto', 'else', 'end', 'file', 'for', 'function', 'goto',
- 'if', 'implementation', 'in', 'inherited', 'inline', 'interface',
- 'label', 'mod', 'nil', 'not', 'object', 'of', 'on', 'operator',
- 'or', 'packed', 'procedure', 'program', 'record', 'reintroduce',
- 'repeat', 'self', 'set', 'shl', 'shr', 'string', 'then', 'to',
- 'type', 'unit', 'until', 'uses', 'var', 'while', 'with', 'xor'
- ]
-
- DELPHI_KEYWORDS = [
- 'as', 'class', 'except', 'exports', 'finalization', 'finally',
- 'initialization', 'is', 'library', 'on', 'property', 'raise',
- 'threadvar', 'try'
- ]
-
- FREE_PASCAL_KEYWORDS = [
- 'dispose', 'exit', 'false', 'new', 'true'
- ]
-
- BLOCK_KEYWORDS = set([
- 'begin', 'class', 'const', 'constructor', 'destructor', 'end',
- 'finalization', 'function', 'implementation', 'initialization',
- 'label', 'library', 'operator', 'procedure', 'program', 'property',
- 'record', 'threadvar', 'type', 'unit', 'uses', 'var'
- ])
-
- FUNCTION_MODIFIERS = set([
- 'alias', 'cdecl', 'export', 'inline', 'interrupt', 'nostackframe',
- 'pascal', 'register', 'safecall', 'softfloat', 'stdcall',
- 'varargs', 'name', 'dynamic', 'near', 'virtual', 'external',
- 'override', 'assembler'
- ])
-
- # XXX: those aren't global. but currently we know no way for defining
- # them just for the type context.
- DIRECTIVES = set([
- 'absolute', 'abstract', 'assembler', 'cppdecl', 'default', 'far',
- 'far16', 'forward', 'index', 'oldfpccall', 'private', 'protected',
- 'published', 'public'
- ])
-
- BUILTIN_TYPES = set([
- 'ansichar', 'ansistring', 'bool', 'boolean', 'byte', 'bytebool',
- 'cardinal', 'char', 'comp', 'currency', 'double', 'dword',
- 'extended', 'int64', 'integer', 'iunknown', 'longbool', 'longint',
- 'longword', 'pansichar', 'pansistring', 'pbool', 'pboolean',
- 'pbyte', 'pbytearray', 'pcardinal', 'pchar', 'pcomp', 'pcurrency',
- 'pdate', 'pdatetime', 'pdouble', 'pdword', 'pextended', 'phandle',
- 'pint64', 'pinteger', 'plongint', 'plongword', 'pointer',
- 'ppointer', 'pshortint', 'pshortstring', 'psingle', 'psmallint',
- 'pstring', 'pvariant', 'pwidechar', 'pwidestring', 'pword',
- 'pwordarray', 'pwordbool', 'real', 'real48', 'shortint',
- 'shortstring', 'single', 'smallint', 'string', 'tclass', 'tdate',
- 'tdatetime', 'textfile', 'thandle', 'tobject', 'ttime', 'variant',
- 'widechar', 'widestring', 'word', 'wordbool'
- ])
-
- BUILTIN_UNITS = {
- 'System': [
- 'abs', 'acquireexceptionobject', 'addr', 'ansitoutf8',
- 'append', 'arctan', 'assert', 'assigned', 'assignfile',
- 'beginthread', 'blockread', 'blockwrite', 'break', 'chdir',
- 'chr', 'close', 'closefile', 'comptocurrency', 'comptodouble',
- 'concat', 'continue', 'copy', 'cos', 'dec', 'delete',
- 'dispose', 'doubletocomp', 'endthread', 'enummodules',
- 'enumresourcemodules', 'eof', 'eoln', 'erase', 'exceptaddr',
- 'exceptobject', 'exclude', 'exit', 'exp', 'filepos', 'filesize',
- 'fillchar', 'finalize', 'findclasshinstance', 'findhinstance',
- 'findresourcehinstance', 'flush', 'frac', 'freemem',
- 'get8087cw', 'getdir', 'getlasterror', 'getmem',
- 'getmemorymanager', 'getmodulefilename', 'getvariantmanager',
- 'halt', 'hi', 'high', 'inc', 'include', 'initialize', 'insert',
- 'int', 'ioresult', 'ismemorymanagerset', 'isvariantmanagerset',
- 'length', 'ln', 'lo', 'low', 'mkdir', 'move', 'new', 'odd',
- 'olestrtostring', 'olestrtostrvar', 'ord', 'paramcount',
- 'paramstr', 'pi', 'pos', 'pred', 'ptr', 'pucs4chars', 'random',
- 'randomize', 'read', 'readln', 'reallocmem',
- 'releaseexceptionobject', 'rename', 'reset', 'rewrite', 'rmdir',
- 'round', 'runerror', 'seek', 'seekeof', 'seekeoln',
- 'set8087cw', 'setlength', 'setlinebreakstyle',
- 'setmemorymanager', 'setstring', 'settextbuf',
- 'setvariantmanager', 'sin', 'sizeof', 'slice', 'sqr', 'sqrt',
- 'str', 'stringofchar', 'stringtoolestr', 'stringtowidechar',
- 'succ', 'swap', 'trunc', 'truncate', 'typeinfo',
- 'ucs4stringtowidestring', 'unicodetoutf8', 'uniquestring',
- 'upcase', 'utf8decode', 'utf8encode', 'utf8toansi',
- 'utf8tounicode', 'val', 'vararrayredim', 'varclear',
- 'widecharlentostring', 'widecharlentostrvar',
- 'widechartostring', 'widechartostrvar',
- 'widestringtoucs4string', 'write', 'writeln'
- ],
- 'SysUtils': [
- 'abort', 'addexitproc', 'addterminateproc', 'adjustlinebreaks',
- 'allocmem', 'ansicomparefilename', 'ansicomparestr',
- 'ansicomparetext', 'ansidequotedstr', 'ansiextractquotedstr',
- 'ansilastchar', 'ansilowercase', 'ansilowercasefilename',
- 'ansipos', 'ansiquotedstr', 'ansisamestr', 'ansisametext',
- 'ansistrcomp', 'ansistricomp', 'ansistrlastchar', 'ansistrlcomp',
- 'ansistrlicomp', 'ansistrlower', 'ansistrpos', 'ansistrrscan',
- 'ansistrscan', 'ansistrupper', 'ansiuppercase',
- 'ansiuppercasefilename', 'appendstr', 'assignstr', 'beep',
- 'booltostr', 'bytetocharindex', 'bytetocharlen', 'bytetype',
- 'callterminateprocs', 'changefileext', 'charlength',
- 'chartobyteindex', 'chartobytelen', 'comparemem', 'comparestr',
- 'comparetext', 'createdir', 'createguid', 'currentyear',
- 'currtostr', 'currtostrf', 'date', 'datetimetofiledate',
- 'datetimetostr', 'datetimetostring', 'datetimetosystemtime',
- 'datetimetotimestamp', 'datetostr', 'dayofweek', 'decodedate',
- 'decodedatefully', 'decodetime', 'deletefile', 'directoryexists',
- 'diskfree', 'disksize', 'disposestr', 'encodedate', 'encodetime',
- 'exceptionerrormessage', 'excludetrailingbackslash',
- 'excludetrailingpathdelimiter', 'expandfilename',
- 'expandfilenamecase', 'expanduncfilename', 'extractfiledir',
- 'extractfiledrive', 'extractfileext', 'extractfilename',
- 'extractfilepath', 'extractrelativepath', 'extractshortpathname',
- 'fileage', 'fileclose', 'filecreate', 'filedatetodatetime',
- 'fileexists', 'filegetattr', 'filegetdate', 'fileisreadonly',
- 'fileopen', 'fileread', 'filesearch', 'fileseek', 'filesetattr',
- 'filesetdate', 'filesetreadonly', 'filewrite', 'finalizepackage',
- 'findclose', 'findcmdlineswitch', 'findfirst', 'findnext',
- 'floattocurr', 'floattodatetime', 'floattodecimal', 'floattostr',
- 'floattostrf', 'floattotext', 'floattotextfmt', 'fmtloadstr',
- 'fmtstr', 'forcedirectories', 'format', 'formatbuf', 'formatcurr',
- 'formatdatetime', 'formatfloat', 'freeandnil', 'getcurrentdir',
- 'getenvironmentvariable', 'getfileversion', 'getformatsettings',
- 'getlocaleformatsettings', 'getmodulename', 'getpackagedescription',
- 'getpackageinfo', 'gettime', 'guidtostring', 'incamonth',
- 'includetrailingbackslash', 'includetrailingpathdelimiter',
- 'incmonth', 'initializepackage', 'interlockeddecrement',
- 'interlockedexchange', 'interlockedexchangeadd',
- 'interlockedincrement', 'inttohex', 'inttostr', 'isdelimiter',
- 'isequalguid', 'isleapyear', 'ispathdelimiter', 'isvalidident',
- 'languages', 'lastdelimiter', 'loadpackage', 'loadstr',
- 'lowercase', 'msecstotimestamp', 'newstr', 'nextcharindex', 'now',
- 'outofmemoryerror', 'quotedstr', 'raiselastoserror',
- 'raiselastwin32error', 'removedir', 'renamefile', 'replacedate',
- 'replacetime', 'safeloadlibrary', 'samefilename', 'sametext',
- 'setcurrentdir', 'showexception', 'sleep', 'stralloc', 'strbufsize',
- 'strbytetype', 'strcat', 'strcharlength', 'strcomp', 'strcopy',
- 'strdispose', 'strecopy', 'strend', 'strfmt', 'stricomp',
- 'stringreplace', 'stringtoguid', 'strlcat', 'strlcomp', 'strlcopy',
- 'strlen', 'strlfmt', 'strlicomp', 'strlower', 'strmove', 'strnew',
- 'strnextchar', 'strpas', 'strpcopy', 'strplcopy', 'strpos',
- 'strrscan', 'strscan', 'strtobool', 'strtobooldef', 'strtocurr',
- 'strtocurrdef', 'strtodate', 'strtodatedef', 'strtodatetime',
- 'strtodatetimedef', 'strtofloat', 'strtofloatdef', 'strtoint',
- 'strtoint64', 'strtoint64def', 'strtointdef', 'strtotime',
- 'strtotimedef', 'strupper', 'supports', 'syserrormessage',
- 'systemtimetodatetime', 'texttofloat', 'time', 'timestamptodatetime',
- 'timestamptomsecs', 'timetostr', 'trim', 'trimleft', 'trimright',
- 'tryencodedate', 'tryencodetime', 'tryfloattocurr', 'tryfloattodatetime',
- 'trystrtobool', 'trystrtocurr', 'trystrtodate', 'trystrtodatetime',
- 'trystrtofloat', 'trystrtoint', 'trystrtoint64', 'trystrtotime',
- 'unloadpackage', 'uppercase', 'widecomparestr', 'widecomparetext',
- 'widefmtstr', 'wideformat', 'wideformatbuf', 'widelowercase',
- 'widesamestr', 'widesametext', 'wideuppercase', 'win32check',
- 'wraptext'
- ],
- 'Classes': [
- 'activateclassgroup', 'allocatehwnd', 'bintohex', 'checksynchronize',
- 'collectionsequal', 'countgenerations', 'deallocatehwnd', 'equalrect',
- 'extractstrings', 'findclass', 'findglobalcomponent', 'getclass',
- 'groupdescendantswith', 'hextobin', 'identtoint',
- 'initinheritedcomponent', 'inttoident', 'invalidpoint',
- 'isuniqueglobalcomponentname', 'linestart', 'objectbinarytotext',
- 'objectresourcetotext', 'objecttexttobinary', 'objecttexttoresource',
- 'pointsequal', 'readcomponentres', 'readcomponentresex',
- 'readcomponentresfile', 'rect', 'registerclass', 'registerclassalias',
- 'registerclasses', 'registercomponents', 'registerintegerconsts',
- 'registernoicon', 'registernonactivex', 'smallpoint', 'startclassgroup',
- 'teststreamformat', 'unregisterclass', 'unregisterclasses',
- 'unregisterintegerconsts', 'unregistermoduleclasses',
- 'writecomponentresfile'
- ],
- 'Math': [
- 'arccos', 'arccosh', 'arccot', 'arccoth', 'arccsc', 'arccsch', 'arcsec',
- 'arcsech', 'arcsin', 'arcsinh', 'arctan2', 'arctanh', 'ceil',
- 'comparevalue', 'cosecant', 'cosh', 'cot', 'cotan', 'coth', 'csc',
- 'csch', 'cycletodeg', 'cycletograd', 'cycletorad', 'degtocycle',
- 'degtograd', 'degtorad', 'divmod', 'doubledecliningbalance',
- 'ensurerange', 'floor', 'frexp', 'futurevalue', 'getexceptionmask',
- 'getprecisionmode', 'getroundmode', 'gradtocycle', 'gradtodeg',
- 'gradtorad', 'hypot', 'inrange', 'interestpayment', 'interestrate',
- 'internalrateofreturn', 'intpower', 'isinfinite', 'isnan', 'iszero',
- 'ldexp', 'lnxp1', 'log10', 'log2', 'logn', 'max', 'maxintvalue',
- 'maxvalue', 'mean', 'meanandstddev', 'min', 'minintvalue', 'minvalue',
- 'momentskewkurtosis', 'netpresentvalue', 'norm', 'numberofperiods',
- 'payment', 'periodpayment', 'poly', 'popnstddev', 'popnvariance',
- 'power', 'presentvalue', 'radtocycle', 'radtodeg', 'radtograd',
- 'randg', 'randomrange', 'roundto', 'samevalue', 'sec', 'secant',
- 'sech', 'setexceptionmask', 'setprecisionmode', 'setroundmode',
- 'sign', 'simpleroundto', 'sincos', 'sinh', 'slndepreciation', 'stddev',
- 'sum', 'sumint', 'sumofsquares', 'sumsandsquares', 'syddepreciation',
- 'tan', 'tanh', 'totalvariance', 'variance'
- ]
- }
-
- ASM_REGISTERS = set([
- 'ah', 'al', 'ax', 'bh', 'bl', 'bp', 'bx', 'ch', 'cl', 'cr0',
- 'cr1', 'cr2', 'cr3', 'cr4', 'cs', 'cx', 'dh', 'di', 'dl', 'dr0',
- 'dr1', 'dr2', 'dr3', 'dr4', 'dr5', 'dr6', 'dr7', 'ds', 'dx',
- 'eax', 'ebp', 'ebx', 'ecx', 'edi', 'edx', 'es', 'esi', 'esp',
- 'fs', 'gs', 'mm0', 'mm1', 'mm2', 'mm3', 'mm4', 'mm5', 'mm6',
- 'mm7', 'si', 'sp', 'ss', 'st0', 'st1', 'st2', 'st3', 'st4', 'st5',
- 'st6', 'st7', 'xmm0', 'xmm1', 'xmm2', 'xmm3', 'xmm4', 'xmm5',
- 'xmm6', 'xmm7'
- ])
-
- ASM_INSTRUCTIONS = set([
- 'aaa', 'aad', 'aam', 'aas', 'adc', 'add', 'and', 'arpl', 'bound',
- 'bsf', 'bsr', 'bswap', 'bt', 'btc', 'btr', 'bts', 'call', 'cbw',
- 'cdq', 'clc', 'cld', 'cli', 'clts', 'cmc', 'cmova', 'cmovae',
- 'cmovb', 'cmovbe', 'cmovc', 'cmovcxz', 'cmove', 'cmovg',
- 'cmovge', 'cmovl', 'cmovle', 'cmovna', 'cmovnae', 'cmovnb',
- 'cmovnbe', 'cmovnc', 'cmovne', 'cmovng', 'cmovnge', 'cmovnl',
- 'cmovnle', 'cmovno', 'cmovnp', 'cmovns', 'cmovnz', 'cmovo',
- 'cmovp', 'cmovpe', 'cmovpo', 'cmovs', 'cmovz', 'cmp', 'cmpsb',
- 'cmpsd', 'cmpsw', 'cmpxchg', 'cmpxchg486', 'cmpxchg8b', 'cpuid',
- 'cwd', 'cwde', 'daa', 'das', 'dec', 'div', 'emms', 'enter', 'hlt',
- 'ibts', 'icebp', 'idiv', 'imul', 'in', 'inc', 'insb', 'insd',
- 'insw', 'int', 'int01', 'int03', 'int1', 'int3', 'into', 'invd',
- 'invlpg', 'iret', 'iretd', 'iretw', 'ja', 'jae', 'jb', 'jbe',
- 'jc', 'jcxz', 'jcxz', 'je', 'jecxz', 'jg', 'jge', 'jl', 'jle',
- 'jmp', 'jna', 'jnae', 'jnb', 'jnbe', 'jnc', 'jne', 'jng', 'jnge',
- 'jnl', 'jnle', 'jno', 'jnp', 'jns', 'jnz', 'jo', 'jp', 'jpe',
- 'jpo', 'js', 'jz', 'lahf', 'lar', 'lcall', 'lds', 'lea', 'leave',
- 'les', 'lfs', 'lgdt', 'lgs', 'lidt', 'ljmp', 'lldt', 'lmsw',
- 'loadall', 'loadall286', 'lock', 'lodsb', 'lodsd', 'lodsw',
- 'loop', 'loope', 'loopne', 'loopnz', 'loopz', 'lsl', 'lss', 'ltr',
- 'mov', 'movd', 'movq', 'movsb', 'movsd', 'movsw', 'movsx',
- 'movzx', 'mul', 'neg', 'nop', 'not', 'or', 'out', 'outsb', 'outsd',
- 'outsw', 'pop', 'popa', 'popad', 'popaw', 'popf', 'popfd', 'popfw',
- 'push', 'pusha', 'pushad', 'pushaw', 'pushf', 'pushfd', 'pushfw',
- 'rcl', 'rcr', 'rdmsr', 'rdpmc', 'rdshr', 'rdtsc', 'rep', 'repe',
- 'repne', 'repnz', 'repz', 'ret', 'retf', 'retn', 'rol', 'ror',
- 'rsdc', 'rsldt', 'rsm', 'sahf', 'sal', 'salc', 'sar', 'sbb',
- 'scasb', 'scasd', 'scasw', 'seta', 'setae', 'setb', 'setbe',
- 'setc', 'setcxz', 'sete', 'setg', 'setge', 'setl', 'setle',
- 'setna', 'setnae', 'setnb', 'setnbe', 'setnc', 'setne', 'setng',
- 'setnge', 'setnl', 'setnle', 'setno', 'setnp', 'setns', 'setnz',
- 'seto', 'setp', 'setpe', 'setpo', 'sets', 'setz', 'sgdt', 'shl',
- 'shld', 'shr', 'shrd', 'sidt', 'sldt', 'smi', 'smint', 'smintold',
- 'smsw', 'stc', 'std', 'sti', 'stosb', 'stosd', 'stosw', 'str',
- 'sub', 'svdc', 'svldt', 'svts', 'syscall', 'sysenter', 'sysexit',
- 'sysret', 'test', 'ud1', 'ud2', 'umov', 'verr', 'verw', 'wait',
- 'wbinvd', 'wrmsr', 'wrshr', 'xadd', 'xbts', 'xchg', 'xlat',
- 'xlatb', 'xor'
- ])
-
- def __init__(self, **options):
- Lexer.__init__(self, **options)
- self.keywords = set()
- if get_bool_opt(options, 'turbopascal', True):
- self.keywords.update(self.TURBO_PASCAL_KEYWORDS)
- if get_bool_opt(options, 'delphi', True):
- self.keywords.update(self.DELPHI_KEYWORDS)
- if get_bool_opt(options, 'freepascal', True):
- self.keywords.update(self.FREE_PASCAL_KEYWORDS)
- self.builtins = set()
- for unit in get_list_opt(options, 'units', list(self.BUILTIN_UNITS)):
- self.builtins.update(self.BUILTIN_UNITS[unit])
-
- def get_tokens_unprocessed(self, text):
- scanner = Scanner(text, re.DOTALL | re.MULTILINE | re.IGNORECASE)
- stack = ['initial']
- in_function_block = False
- in_property_block = False
- was_dot = False
- next_token_is_function = False
- next_token_is_property = False
- collect_labels = False
- block_labels = set()
- brace_balance = [0, 0]
-
- while not scanner.eos:
- token = Error
-
- if stack[-1] == 'initial':
- if scanner.scan(r'\s+'):
- token = Text
- elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
- if scanner.match.startswith('$'):
- token = Comment.Preproc
- else:
- token = Comment.Multiline
- elif scanner.scan(r'//.*?$'):
- token = Comment.Single
- elif scanner.scan(r'[-+*\/=<>:;,.@\^]'):
- token = Operator
- # stop label highlighting on next ";"
- if collect_labels and scanner.match == ';':
- collect_labels = False
- elif scanner.scan(r'[\(\)\[\]]+'):
- token = Punctuation
- # abort function naming ``foo = Function(...)``
- next_token_is_function = False
- # if we are in a function block we count the open
- # braces because ootherwise it's impossible to
- # determine the end of the modifier context
- if in_function_block or in_property_block:
- if scanner.match == '(':
- brace_balance[0] += 1
- elif scanner.match == ')':
- brace_balance[0] -= 1
- elif scanner.match == '[':
- brace_balance[1] += 1
- elif scanner.match == ']':
- brace_balance[1] -= 1
- elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
- lowercase_name = scanner.match.lower()
- if lowercase_name == 'result':
- token = Name.Builtin.Pseudo
- elif lowercase_name in self.keywords:
- token = Keyword
- # if we are in a special block and a
- # block ending keyword occours (and the parenthesis
- # is balanced) we end the current block context
- if (in_function_block or in_property_block) and \
- lowercase_name in self.BLOCK_KEYWORDS and \
- brace_balance[0] <= 0 and \
- brace_balance[1] <= 0:
- in_function_block = False
- in_property_block = False
- brace_balance = [0, 0]
- block_labels = set()
- if lowercase_name in ('label', 'goto'):
- collect_labels = True
- elif lowercase_name == 'asm':
- stack.append('asm')
- elif lowercase_name == 'property':
- in_property_block = True
- next_token_is_property = True
- elif lowercase_name in ('procedure', 'operator',
- 'function', 'constructor',
- 'destructor'):
- in_function_block = True
- next_token_is_function = True
- # we are in a function block and the current name
- # is in the set of registered modifiers. highlight
- # it as pseudo keyword
- elif in_function_block and \
- lowercase_name in self.FUNCTION_MODIFIERS:
- token = Keyword.Pseudo
- # if we are in a property highlight some more
- # modifiers
- elif in_property_block and \
- lowercase_name in ('read', 'write'):
- token = Keyword.Pseudo
- next_token_is_function = True
- # if the last iteration set next_token_is_function
- # to true we now want this name highlighted as
- # function. so do that and reset the state
- elif next_token_is_function:
- # Look if the next token is a dot. If yes it's
- # not a function, but a class name and the
- # part after the dot a function name
- if scanner.test(r'\s*\.\s*'):
- token = Name.Class
- # it's not a dot, our job is done
- else:
- token = Name.Function
- next_token_is_function = False
- # same for properties
- elif next_token_is_property:
- token = Name.Property
- next_token_is_property = False
- # Highlight this token as label and add it
- # to the list of known labels
- elif collect_labels:
- token = Name.Label
- block_labels.add(scanner.match.lower())
- # name is in list of known labels
- elif lowercase_name in block_labels:
- token = Name.Label
- elif lowercase_name in self.BUILTIN_TYPES:
- token = Keyword.Type
- elif lowercase_name in self.DIRECTIVES:
- token = Keyword.Pseudo
- # builtins are just builtins if the token
- # before isn't a dot
- elif not was_dot and lowercase_name in self.builtins:
- token = Name.Builtin
- else:
- token = Name
- elif scanner.scan(r"'"):
- token = String
- stack.append('string')
- elif scanner.scan(r'\#(\d+|\$[0-9A-Fa-f]+)'):
- token = String.Char
- elif scanner.scan(r'\$[0-9A-Fa-f]+'):
- token = Number.Hex
- elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
- token = Number.Integer
- elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
- token = Number.Float
- else:
- # if the stack depth is deeper than once, pop
- if len(stack) > 1:
- stack.pop()
- scanner.get_char()
-
- elif stack[-1] == 'string':
- if scanner.scan(r"''"):
- token = String.Escape
- elif scanner.scan(r"'"):
- token = String
- stack.pop()
- elif scanner.scan(r"[^']*"):
- token = String
- else:
- scanner.get_char()
- stack.pop()
-
- elif stack[-1] == 'asm':
- if scanner.scan(r'\s+'):
- token = Text
- elif scanner.scan(r'end'):
- token = Keyword
- stack.pop()
- elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
- if scanner.match.startswith('$'):
- token = Comment.Preproc
- else:
- token = Comment.Multiline
- elif scanner.scan(r'//.*?$'):
- token = Comment.Single
- elif scanner.scan(r"'"):
- token = String
- stack.append('string')
- elif scanner.scan(r'@@[A-Za-z_][A-Za-z_0-9]*'):
- token = Name.Label
- elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
- lowercase_name = scanner.match.lower()
- if lowercase_name in self.ASM_INSTRUCTIONS:
- token = Keyword
- elif lowercase_name in self.ASM_REGISTERS:
- token = Name.Builtin
- else:
- token = Name
- elif scanner.scan(r'[-+*\/=<>:;,.@\^]+'):
- token = Operator
- elif scanner.scan(r'[\(\)\[\]]+'):
- token = Punctuation
- elif scanner.scan(r'\$[0-9A-Fa-f]+'):
- token = Number.Hex
- elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
- token = Number.Integer
- elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
- token = Number.Float
- else:
- scanner.get_char()
- stack.pop()
-
- # save the dot!!!11
- if scanner.match.strip():
- was_dot = scanner.match == '.'
- yield scanner.start_pos, token, scanner.match or ''
-
-
-class DylanLexer(RegexLexer):
- """
- For the `Dylan <http://www.opendylan.org/>`_ language.
-
- .. versionadded:: 0.7
- """
-
- name = 'Dylan'
- aliases = ['dylan']
- filenames = ['*.dylan', '*.dyl', '*.intr']
- mimetypes = ['text/x-dylan']
-
- flags = re.IGNORECASE
-
- builtins = set([
- 'subclass', 'abstract', 'block', 'concrete', 'constant', 'class',
- 'compiler-open', 'compiler-sideways', 'domain', 'dynamic',
- 'each-subclass', 'exception', 'exclude', 'function', 'generic',
- 'handler', 'inherited', 'inline', 'inline-only', 'instance',
- 'interface', 'import', 'keyword', 'library', 'macro', 'method',
- 'module', 'open', 'primary', 'required', 'sealed', 'sideways',
- 'singleton', 'slot', 'thread', 'variable', 'virtual'])
-
- keywords = set([
- 'above', 'afterwards', 'begin', 'below', 'by', 'case', 'cleanup',
- 'create', 'define', 'else', 'elseif', 'end', 'export', 'finally',
- 'for', 'from', 'if', 'in', 'let', 'local', 'otherwise', 'rename',
- 'select', 'signal', 'then', 'to', 'unless', 'until', 'use', 'when',
- 'while'])
-
- operators = set([
- '~', '+', '-', '*', '|', '^', '=', '==', '~=', '~==', '<', '<=',
- '>', '>=', '&', '|'])
-
- functions = set([
- 'abort', 'abs', 'add', 'add!', 'add-method', 'add-new', 'add-new!',
- 'all-superclasses', 'always', 'any?', 'applicable-method?', 'apply',
- 'aref', 'aref-setter', 'as', 'as-lowercase', 'as-lowercase!',
- 'as-uppercase', 'as-uppercase!', 'ash', 'backward-iteration-protocol',
- 'break', 'ceiling', 'ceiling/', 'cerror', 'check-type', 'choose',
- 'choose-by', 'complement', 'compose', 'concatenate', 'concatenate-as',
- 'condition-format-arguments', 'condition-format-string', 'conjoin',
- 'copy-sequence', 'curry', 'default-handler', 'dimension', 'dimensions',
- 'direct-subclasses', 'direct-superclasses', 'disjoin', 'do',
- 'do-handlers', 'element', 'element-setter', 'empty?', 'error', 'even?',
- 'every?', 'false-or', 'fill!', 'find-key', 'find-method', 'first',
- 'first-setter', 'floor', 'floor/', 'forward-iteration-protocol',
- 'function-arguments', 'function-return-values',
- 'function-specializers', 'gcd', 'generic-function-mandatory-keywords',
- 'generic-function-methods', 'head', 'head-setter', 'identity',
- 'initialize', 'instance?', 'integral?', 'intersection',
- 'key-sequence', 'key-test', 'last', 'last-setter', 'lcm', 'limited',
- 'list', 'logand', 'logbit?', 'logior', 'lognot', 'logxor', 'make',
- 'map', 'map-as', 'map-into', 'max', 'member?', 'merge-hash-codes',
- 'min', 'modulo', 'negative', 'negative?', 'next-method',
- 'object-class', 'object-hash', 'odd?', 'one-of', 'pair', 'pop',
- 'pop-last', 'positive?', 'push', 'push-last', 'range', 'rank',
- 'rcurry', 'reduce', 'reduce1', 'remainder', 'remove', 'remove!',
- 'remove-duplicates', 'remove-duplicates!', 'remove-key!',
- 'remove-method', 'replace-elements!', 'replace-subsequence!',
- 'restart-query', 'return-allowed?', 'return-description',
- 'return-query', 'reverse', 'reverse!', 'round', 'round/',
- 'row-major-index', 'second', 'second-setter', 'shallow-copy',
- 'signal', 'singleton', 'size', 'size-setter', 'slot-initialized?',
- 'sort', 'sort!', 'sorted-applicable-methods', 'subsequence-position',
- 'subtype?', 'table-protocol', 'tail', 'tail-setter', 'third',
- 'third-setter', 'truncate', 'truncate/', 'type-error-expected-type',
- 'type-error-value', 'type-for-copy', 'type-union', 'union', 'values',
- 'vector', 'zero?'])
-
- valid_name = '\\\\?[a-z0-9' + re.escape('!&*<>|^$%@_-+~?/=') + ']+'
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- lowercase_value = value.lower()
- if lowercase_value in self.builtins:
- yield index, Name.Builtin, value
- continue
- if lowercase_value in self.keywords:
- yield index, Keyword, value
- continue
- if lowercase_value in self.functions:
- yield index, Name.Builtin, value
- continue
- if lowercase_value in self.operators:
- yield index, Operator, value
- continue
- yield index, token, value
-
- tokens = {
- 'root': [
- # Whitespace
- (r'\s+', Text),
-
- # single line comment
- (r'//.*?\n', Comment.Single),
-
- # lid header
- (r'([a-z0-9-]+)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
- bygroups(Name.Attribute, Operator, Text, String)),
-
- ('', Text, 'code') # no header match, switch to code
- ],
- 'code': [
- # Whitespace
- (r'\s+', Text),
-
- # single line comment
- (r'//.*?\n', Comment.Single),
-
- # multi-line comment
- (r'/\*', Comment.Multiline, 'comment'),
-
- # strings and characters
- (r'"', String, 'string'),
- (r"'(\\.|\\[0-7]{1,3}|\\x[a-f0-9]{1,2}|[^\\\'\n])'", String.Char),
-
- # binary integer
- (r'#[bB][01]+', Number.Bin),
-
- # octal integer
- (r'#[oO][0-7]+', Number.Oct),
-
- # floating point
- (r'[-+]?(\d*\.\d+(e[-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)', Number.Float),
-
- # decimal integer
- (r'[-+]?\d+', Number.Integer),
-
- # hex integer
- (r'#[xX][0-9a-f]+', Number.Hex),
-
- # Macro parameters
- (r'(\?' + valid_name + ')(:)'
- r'(token|name|variable|expression|body|case-body|\*)',
- bygroups(Name.Tag, Operator, Name.Builtin)),
- (r'(\?)(:)(token|name|variable|expression|body|case-body|\*)',
- bygroups(Name.Tag, Operator, Name.Builtin)),
- (r'\?' + valid_name, Name.Tag),
-
- # Punctuation
- (r'(=>|::|#\(|#\[|##|\?|\?\?|\?=|[(){}\[\],\.;])', Punctuation),
-
- # Most operators are picked up as names and then re-flagged.
- # This one isn't valid in a name though, so we pick it up now.
- (r':=', Operator),
-
- # Pick up #t / #f before we match other stuff with #.
- (r'#[tf]', Literal),
-
- # #"foo" style keywords
- (r'#"', String.Symbol, 'keyword'),
-
- # #rest, #key, #all-keys, etc.
- (r'#[a-z0-9-]+', Keyword),
-
- # required-init-keyword: style keywords.
- (valid_name + ':', Keyword),
-
- # class names
- (r'<' + valid_name + '>', Name.Class),
-
- # define variable forms.
- (r'\*' + valid_name + '\*', Name.Variable.Global),
-
- # define constant forms.
- (r'\$' + valid_name, Name.Constant),
-
- # everything else. We re-flag some of these in the method above.
- (valid_name, Name),
- ],
- 'comment': [
- (r'[^*/]', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- 'keyword': [
- (r'"', String.Symbol, '#pop'),
- (r'[^\\"]+', String.Symbol), # all other characters
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ]
- }
-
-
-class DylanLidLexer(RegexLexer):
- """
- For Dylan LID (Library Interchange Definition) files.
-
- .. versionadded:: 1.6
- """
-
- name = 'DylanLID'
- aliases = ['dylan-lid', 'lid']
- filenames = ['*.lid', '*.hdp']
- mimetypes = ['text/x-dylan-lid']
-
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- # Whitespace
- (r'\s+', Text),
-
- # single line comment
- (r'//.*?\n', Comment.Single),
-
- # lid header
- (r'(.*?)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
- bygroups(Name.Attribute, Operator, Text, String)),
- ]
- }
-
-
-class DylanConsoleLexer(Lexer):
- """
- For Dylan interactive console output like:
-
- .. sourcecode:: dylan-console
-
- ? let a = 1;
- => 1
- ? a
- => 1
-
- This is based on a copy of the RubyConsoleLexer.
-
- .. versionadded:: 1.6
- """
- name = 'Dylan session'
- aliases = ['dylan-console', 'dylan-repl']
- filenames = ['*.dylan-console']
- mimetypes = ['text/x-dylan-console']
-
- _line_re = re.compile('.*?\n')
- _prompt_re = re.compile('\?| ')
-
- def get_tokens_unprocessed(self, text):
- dylexer = DylanLexer(**self.options)
-
- curcode = ''
- insertions = []
- for match in self._line_re.finditer(text):
- line = match.group()
- m = self._prompt_re.match(line)
- if m is not None:
- end = m.end()
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:end])]))
- curcode += line[end:]
- else:
- if curcode:
- for item in do_insertions(insertions,
- dylexer.get_tokens_unprocessed(curcode)):
- yield item
- curcode = ''
- insertions = []
- yield match.start(), Generic.Output, line
- if curcode:
- for item in do_insertions(insertions,
- dylexer.get_tokens_unprocessed(curcode)):
- yield item
-
-
-def objective(baselexer):
- """
- Generate a subclass of baselexer that accepts the Objective-C syntax
- extensions.
- """
-
- # Have to be careful not to accidentally match JavaDoc/Doxygen syntax here,
- # since that's quite common in ordinary C/C++ files. It's OK to match
- # JavaDoc/Doxygen keywords that only apply to Objective-C, mind.
- #
- # The upshot of this is that we CANNOT match @class or @interface
- _oc_keywords = re.compile(r'@(?:end|implementation|protocol)')
-
- # Matches [ <ws>? identifier <ws> ( identifier <ws>? ] | identifier? : )
- # (note the identifier is *optional* when there is a ':'!)
- _oc_message = re.compile(r'\[\s*[a-zA-Z_]\w*\s+'
- r'(?:[a-zA-Z_]\w*\s*\]|'
- r'(?:[a-zA-Z_]\w*)?:)')
-
- class GeneratedObjectiveCVariant(baselexer):
- """
- Implements Objective-C syntax on top of an existing C family lexer.
- """
-
- tokens = {
- 'statements': [
- (r'@"', String, 'string'),
- (r'@(YES|NO)', Number),
- (r"@'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
- (r'@(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
- (r'@(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'@0x[0-9a-fA-F]+[Ll]?', Number.Hex),
- (r'@0[0-7]+[Ll]?', Number.Oct),
- (r'@\d+[Ll]?', Number.Integer),
- (r'@\(', Literal, 'literal_number'),
- (r'@\[', Literal, 'literal_array'),
- (r'@\{', Literal, 'literal_dictionary'),
- (r'(@selector|@private|@protected|@public|@encode|'
- r'@synchronized|@try|@throw|@catch|@finally|@end|@property|@synthesize|'
- r'__bridge|__bridge_transfer|__autoreleasing|__block|__weak|__strong|'
- r'weak|strong|copy|retain|assign|unsafe_unretained|atomic|nonatomic|'
- r'readonly|readwrite|setter|getter|typeof|in|out|inout|release|class|'
- r'@dynamic|@optional|@required|@autoreleasepool)\b', Keyword),
- (r'(id|instancetype|Class|IMP|SEL|BOOL|IBOutlet|IBAction|unichar)\b',
- Keyword.Type),
- (r'@(true|false|YES|NO)\n', Name.Builtin),
- (r'(YES|NO|nil|self|super)\b', Name.Builtin),
- # Carbon types
- (r'(Boolean|UInt8|SInt8|UInt16|SInt16|UInt32|SInt32)\b', Keyword.Type),
- # Carbon built-ins
- (r'(TRUE|FALSE)\b', Name.Builtin),
- (r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text),
- ('#pop', 'oc_classname')),
- (r'(@class|@protocol)(\s+)', bygroups(Keyword, Text),
- ('#pop', 'oc_forward_classname')),
- # @ can also prefix other expressions like @{...} or @(...)
- (r'@', Punctuation),
- inherit,
- ],
- 'oc_classname' : [
- # interface definition that inherits
- ('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?(\s*)({)',
- bygroups(Name.Class, Text, Name.Class, Text, Punctuation),
- ('#pop', 'oc_ivars')),
- ('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?',
- bygroups(Name.Class, Text, Name.Class), '#pop'),
- # interface definition for a category
- ('([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))(\s*)({)',
- bygroups(Name.Class, Text, Name.Label, Text, Punctuation),
- ('#pop', 'oc_ivars')),
- ('([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))',
- bygroups(Name.Class, Text, Name.Label), '#pop'),
- # simple interface / implementation
- ('([a-zA-Z$_][\w$]*)(\s*)({)',
- bygroups(Name.Class, Text, Punctuation), ('#pop', 'oc_ivars')),
- ('([a-zA-Z$_][\w$]*)', Name.Class, '#pop')
- ],
- 'oc_forward_classname' : [
- ('([a-zA-Z$_][\w$]*)(\s*,\s*)',
- bygroups(Name.Class, Text), 'oc_forward_classname'),
- ('([a-zA-Z$_][\w$]*)(\s*;?)',
- bygroups(Name.Class, Text), '#pop')
- ],
- 'oc_ivars' : [
- include('whitespace'),
- include('statements'),
- (';', Punctuation),
- ('{', Punctuation, '#push'),
- ('}', Punctuation, '#pop'),
- ],
- 'root': [
- # methods
- (r'^([-+])(\s*)' # method marker
- r'(\(.*?\))?(\s*)' # return type
- r'([a-zA-Z$_][\w$]*:?)', # begin of method name
- bygroups(Punctuation, Text, using(this),
- Text, Name.Function),
- 'method'),
- inherit,
- ],
- 'method': [
- include('whitespace'),
- # TODO unsure if ellipses are allowed elsewhere, see
- # discussion in Issue 789
- (r',', Punctuation),
- (r'\.\.\.', Punctuation),
- (r'(\(.*?\))(\s*)([a-zA-Z$_][\w$]*)',
- bygroups(using(this), Text, Name.Variable)),
- (r'[a-zA-Z$_][\w$]*:', Name.Function),
- (';', Punctuation, '#pop'),
- ('{', Punctuation, 'function'),
- ('', Text, '#pop'),
- ],
- 'literal_number': [
- (r'\(', Punctuation, 'literal_number_inner'),
- (r'\)', Literal, '#pop'),
- include('statement'),
- ],
- 'literal_number_inner': [
- (r'\(', Punctuation, '#push'),
- (r'\)', Punctuation, '#pop'),
- include('statement'),
- ],
- 'literal_array': [
- (r'\[', Punctuation, 'literal_array_inner'),
- (r'\]', Literal, '#pop'),
- include('statement'),
- ],
- 'literal_array_inner': [
- (r'\[', Punctuation, '#push'),
- (r'\]', Punctuation, '#pop'),
- include('statement'),
- ],
- 'literal_dictionary': [
- (r'\}', Literal, '#pop'),
- include('statement'),
- ],
- }
-
- def analyse_text(text):
- if _oc_keywords.search(text):
- return 1.0
- elif '@"' in text: # strings
- return 0.8
- elif re.search('@[0-9]+', text):
- return 0.7
- elif _oc_message.search(text):
- return 0.8
- return 0
-
- def get_tokens_unprocessed(self, text):
- from pygments.lexers._cocoabuiltins import COCOA_INTERFACES, \
- COCOA_PROTOCOLS, COCOA_PRIMITIVES
-
- for index, token, value in \
- baselexer.get_tokens_unprocessed(self, text):
- if token is Name or token is Name.Class:
- if value in COCOA_INTERFACES or value in COCOA_PROTOCOLS \
- or value in COCOA_PRIMITIVES:
- token = Name.Builtin.Pseudo
-
- yield index, token, value
-
- return GeneratedObjectiveCVariant
-
-
-class ObjectiveCLexer(objective(CLexer)):
- """
- For Objective-C source code with preprocessor directives.
- """
-
- name = 'Objective-C'
- aliases = ['objective-c', 'objectivec', 'obj-c', 'objc']
- filenames = ['*.m', '*.h']
- mimetypes = ['text/x-objective-c']
- priority = 0.05 # Lower than C
-
-
-class ObjectiveCppLexer(objective(CppLexer)):
- """
- For Objective-C++ source code with preprocessor directives.
- """
-
- name = 'Objective-C++'
- aliases = ['objective-c++', 'objectivec++', 'obj-c++', 'objc++']
- filenames = ['*.mm', '*.hh']
- mimetypes = ['text/x-objective-c++']
- priority = 0.05 # Lower than C++
-
-
-class FortranLexer(RegexLexer):
- """
- Lexer for FORTRAN 90 code.
-
- .. versionadded:: 0.10
- """
- name = 'Fortran'
- aliases = ['fortran']
- filenames = ['*.f', '*.f90', '*.F', '*.F90']
- mimetypes = ['text/x-fortran']
- flags = re.IGNORECASE
-
- # Data Types: INTEGER, REAL, COMPLEX, LOGICAL, CHARACTER and DOUBLE PRECISION
- # Operators: **, *, +, -, /, <, >, <=, >=, ==, /=
- # Logical (?): NOT, AND, OR, EQV, NEQV
-
- # Builtins:
- # http://gcc.gnu.org/onlinedocs/gcc-3.4.6/g77/Table-of-Intrinsic-Functions.html
-
- tokens = {
- 'root': [
- (r'!.*\n', Comment),
- include('strings'),
- include('core'),
- (r'[a-z]\w*', Name.Variable),
- include('nums'),
- (r'[\s]+', Text),
- ],
- 'core': [
- # Statements
- (r'\b(ABSTRACT|ACCEPT|ALLOCATABLE|ALLOCATE|ARRAY|ASSIGN|ASYNCHRONOUS|'
- r'BACKSPACE|BIND|BLOCK( DATA)?|BYTE|CALL|CASE|CLASS|CLOSE|COMMON|CONTAINS|'
- r'CONTINUE|CYCLE|DATA|DEALLOCATE|DECODE|DEFERRED|DIMENSION|DO|'
- r'ELEMENTAL|ELSE|ENCODE|END( FILE)?|ENDIF|ENTRY|ENUMERATOR|EQUIVALENCE|'
- r'EXIT|EXTERNAL|EXTRINSIC|FINAL|FORALL|FORMAT|FUNCTION|GENERIC|'
- r'GOTO|IF|IMPLICIT|IMPORT|INCLUDE|INQUIRE|INTENT|INTERFACE|'
- r'INTRINSIC|MODULE|NAMELIST|NULLIFY|NONE|NON_INTRINSIC|'
- r'NON_OVERRIDABLE|NOPASS|OPEN|OPTIONAL|OPTIONS|PARAMETER|PASS|'
- r'PAUSE|POINTER|PRINT|PRIVATE|PROGRAM|PROTECTED|PUBLIC|PURE|READ|'
- r'RECURSIVE|RESULT|RETURN|REWIND|SAVE|SELECT|SEQUENCE|STOP|SUBROUTINE|'
- r'TARGET|THEN|TYPE|USE|VALUE|VOLATILE|WHERE|WRITE|WHILE)\s*\b',
- Keyword),
-
- # Data Types
- (r'\b(CHARACTER|COMPLEX|DOUBLE PRECISION|DOUBLE COMPLEX|INTEGER|'
- r'LOGICAL|REAL|C_INT|C_SHORT|C_LONG|C_LONG_LONG|C_SIGNED_CHAR|'
- r'C_SIZE_T|C_INT8_T|C_INT16_T|C_INT32_T|C_INT64_T|C_INT_LEAST8_T|'
- r'C_INT_LEAST16_T|C_INT_LEAST32_T|C_INT_LEAST64_T|C_INT_FAST8_T|'
- r'C_INT_FAST16_T|C_INT_FAST32_T|C_INT_FAST64_T|C_INTMAX_T|'
- r'C_INTPTR_T|C_FLOAT|C_DOUBLE|C_LONG_DOUBLE|C_FLOAT_COMPLEX|'
- r'C_DOUBLE_COMPLEX|C_LONG_DOUBLE_COMPLEX|C_BOOL|C_CHAR|C_PTR|'
- r'C_FUNPTR)\s*\b',
- Keyword.Type),
-
- # Operators
- (r'(\*\*|\*|\+|-|\/|<|>|<=|>=|==|\/=|=)', Operator),
-
- (r'(::)', Keyword.Declaration),
-
- (r'[()\[\],:&%;]', Punctuation),
-
- # Intrinsics
- (r'\b(Abort|Abs|Access|AChar|ACos|AdjustL|AdjustR|AImag|AInt|Alarm|'
- r'All|Allocated|ALog|AMax|AMin|AMod|And|ANInt|Any|ASin|Associated|'
- r'ATan|BesJ|BesJN|BesY|BesYN|Bit_Size|BTest|CAbs|CCos|Ceiling|'
- r'CExp|Char|ChDir|ChMod|CLog|Cmplx|Command_Argument_Count|Complex|'
- r'Conjg|Cos|CosH|Count|CPU_Time|CShift|CSin|CSqRt|CTime|C_Funloc|'
- r'C_Loc|C_Associated|C_Null_Ptr|C_Null_Funptr|C_F_Pointer|'
- r'C_Null_Char|C_Alert|C_Backspace|C_Form_Feed|C_New_Line|'
- r'C_Carriage_Return|C_Horizontal_Tab|C_Vertical_Tab|'
- r'DAbs|DACos|DASin|DATan|Date_and_Time|DbesJ|'
- r'DbesJ|DbesJN|DbesY|DbesY|DbesYN|Dble|DCos|DCosH|DDiM|DErF|DErFC|'
- r'DExp|Digits|DiM|DInt|DLog|DLog|DMax|DMin|DMod|DNInt|Dot_Product|'
- r'DProd|DSign|DSinH|DSin|DSqRt|DTanH|DTan|DTime|EOShift|Epsilon|'
- r'ErF|ErFC|ETime|Exit|Exp|Exponent|Extends_Type_Of|FDate|FGet|'
- r'FGetC|Float|Floor|Flush|FNum|FPutC|FPut|Fraction|FSeek|FStat|'
- r'FTell|GError|GetArg|Get_Command|Get_Command_Argument|'
- r'Get_Environment_Variable|GetCWD|GetEnv|GetGId|GetLog|GetPId|'
- r'GetUId|GMTime|HostNm|Huge|IAbs|IAChar|IAnd|IArgC|IBClr|IBits|'
- r'IBSet|IChar|IDate|IDiM|IDInt|IDNInt|IEOr|IErrNo|IFix|Imag|'
- r'ImagPart|Index|Int|IOr|IRand|IsaTty|IShft|IShftC|ISign|'
- r'Iso_C_Binding|Is_Iostat_End|Is_Iostat_Eor|ITime|Kill|Kind|'
- r'LBound|Len|Len_Trim|LGe|LGt|Link|LLe|LLt|LnBlnk|Loc|Log|'
- r'Logical|Long|LShift|LStat|LTime|MatMul|Max|MaxExponent|MaxLoc|'
- r'MaxVal|MClock|Merge|Move_Alloc|Min|MinExponent|MinLoc|MinVal|'
- r'Mod|Modulo|MvBits|Nearest|New_Line|NInt|Not|Or|Pack|PError|'
- r'Precision|Present|Product|Radix|Rand|Random_Number|Random_Seed|'
- r'Range|Real|RealPart|Rename|Repeat|Reshape|RRSpacing|RShift|'
- r'Same_Type_As|Scale|Scan|Second|Selected_Int_Kind|'
- r'Selected_Real_Kind|Set_Exponent|Shape|Short|Sign|Signal|SinH|'
- r'Sin|Sleep|Sngl|Spacing|Spread|SqRt|SRand|Stat|Sum|SymLnk|'
- r'System|System_Clock|Tan|TanH|Time|Tiny|Transfer|Transpose|Trim|'
- r'TtyNam|UBound|UMask|Unlink|Unpack|Verify|XOr|ZAbs|ZCos|ZExp|'
- r'ZLog|ZSin|ZSqRt)\s*\b',
- Name.Builtin),
-
- # Booleans
- (r'\.(true|false)\.', Name.Builtin),
- # Comparing Operators
- (r'\.(eq|ne|lt|le|gt|ge|not|and|or|eqv|neqv)\.', Operator.Word),
- ],
-
- 'strings': [
- (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
- (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
- ],
-
- 'nums': [
- (r'\d+(?![.e])(_[a-z]\w+)?', Number.Integer),
- (r'[+-]?\d*\.\d+(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float),
- (r'[+-]?\d+\.\d*(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float),
- ],
- }
-
-
-class GLShaderLexer(RegexLexer):
- """
- GLSL (OpenGL Shader) lexer.
-
- .. versionadded:: 1.1
- """
- name = 'GLSL'
- aliases = ['glsl']
- filenames = ['*.vert', '*.frag', '*.geo']
- mimetypes = ['text/x-glslsrc']
-
- tokens = {
- 'root': [
- (r'^#.*', Comment.Preproc),
- (r'//.*', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'\+|-|~|!=?|\*|/|%|<<|>>|<=?|>=?|==?|&&?|\^|\|\|?',
- Operator),
- (r'[?:]', Operator), # quick hack for ternary
- (r'\bdefined\b', Operator),
- (r'[;{}(),\[\]]', Punctuation),
- #FIXME when e is present, no decimal point needed
- (r'[+-]?\d*\.\d+([eE][-+]?\d+)?', Number.Float),
- (r'[+-]?\d+\.\d*([eE][-+]?\d+)?', Number.Float),
- (r'0[xX][0-9a-fA-F]*', Number.Hex),
- (r'0[0-7]*', Number.Oct),
- (r'[1-9][0-9]*', Number.Integer),
- (r'\b(attribute|const|uniform|varying|centroid|break|continue|'
- r'do|for|while|if|else|in|out|inout|float|int|void|bool|true|'
- r'false|invariant|discard|return|mat[234]|mat[234]x[234]|'
- r'vec[234]|[ib]vec[234]|sampler[123]D|samplerCube|'
- r'sampler[12]DShadow|struct)\b', Keyword),
- (r'\b(asm|class|union|enum|typedef|template|this|packed|goto|'
- r'switch|default|inline|noinline|volatile|public|static|extern|'
- r'external|interface|long|short|double|half|fixed|unsigned|'
- r'lowp|mediump|highp|precision|input|output|hvec[234]|'
- r'[df]vec[234]|sampler[23]DRect|sampler2DRectShadow|sizeof|'
- r'cast|namespace|using)\b', Keyword), #future use
- (r'[a-zA-Z_][a-zA-Z_0-9]*', Name),
- (r'\.', Punctuation),
- (r'\s+', Text),
- ],
- }
-
-
-class PrologLexer(RegexLexer):
- """
- Lexer for Prolog files.
- """
- name = 'Prolog'
- aliases = ['prolog']
- filenames = ['*.prolog', '*.pro', '*.pl']
- mimetypes = ['text/x-prolog']
-
- flags = re.UNICODE
-
- tokens = {
- 'root': [
- (r'^#.*', Comment.Single),
- (r'/\*', Comment.Multiline, 'nested-comment'),
- (r'%.*', Comment.Single),
- # character literal
- (r'0\'.', String.Char),
- (r'0b[01]+', Number.Bin),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- # literal with prepended base
- (r'\d\d?\'[a-zA-Z0-9]+', Number.Integer),
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+', Number.Integer),
- (r'[\[\](){}|.,;!]', Punctuation),
- (r':-|-->', Punctuation),
- (r'"(?:\\x[0-9a-fA-F]+\\|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|'
- r'\\[0-7]+\\|\\["\nabcefnrstv]|[^\\"])*"', String.Double),
- (r"'(?:''|[^'])*'", String.Atom), # quoted atom
- # Needs to not be followed by an atom.
- #(r'=(?=\s|[a-zA-Z\[])', Operator),
- (r'is\b', Operator),
- (r'(<|>|=<|>=|==|=:=|=|/|//|\*|\+|-)(?=\s|[a-zA-Z0-9\[])',
- Operator),
- (r'(mod|div|not)\b', Operator),
- (r'_', Keyword), # The don't-care variable
- (r'([a-z]+)(:)', bygroups(Name.Namespace, Punctuation)),
- (u'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
- u'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
- u'(\\s*)(:-|-->)',
- bygroups(Name.Function, Text, Operator)), # function defn
- (u'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
- u'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
- u'(\\s*)(\\()',
- bygroups(Name.Function, Text, Punctuation)),
- (u'[a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
- u'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*',
- String.Atom), # atom, characters
- # This one includes !
- (u'[#&*+\\-./:<=>?@\\\\^~\u00a1-\u00bf\u2010-\u303f]+',
- String.Atom), # atom, graphics
- (r'[A-Z_]\w*', Name.Variable),
- (u'\\s+|[\u2000-\u200f\ufff0-\ufffe\uffef]', Text),
- ],
- 'nested-comment': [
- (r'\*/', Comment.Multiline, '#pop'),
- (r'/\*', Comment.Multiline, '#push'),
- (r'[^*/]+', Comment.Multiline),
- (r'[*/]', Comment.Multiline),
- ],
- }
-
- def analyse_text(text):
- return ':-' in text
-
-
-class CythonLexer(RegexLexer):
- """
- For Pyrex and `Cython <http://cython.org>`_ source code.
-
- .. versionadded:: 1.1
- """
-
- name = 'Cython'
- aliases = ['cython', 'pyx', 'pyrex']
- filenames = ['*.pyx', '*.pxd', '*.pxi']
- mimetypes = ['text/x-cython', 'application/x-cython']
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'^(\s*)("""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
- (r"^(\s*)('''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
- (r'[^\S\n]+', Text),
- (r'#.*$', Comment),
- (r'[]{}:(),;[]', Punctuation),
- (r'\\\n', Text),
- (r'\\', Text),
- (r'(in|is|and|or|not)\b', Operator.Word),
- (r'(<)([a-zA-Z0-9.?]+)(>)',
- bygroups(Punctuation, Keyword.Type, Punctuation)),
- (r'!=|==|<<|>>|[-~+/*%=<>&^|.?]', Operator),
- (r'(from)(\d+)(<=)(\s+)(<)(\d+)(:)',
- bygroups(Keyword, Number.Integer, Operator, Name, Operator,
- Name, Punctuation)),
- include('keywords'),
- (r'(def|property)(\s+)', bygroups(Keyword, Text), 'funcname'),
- (r'(cp?def)(\s+)', bygroups(Keyword, Text), 'cdef'),
- (r'(class|struct)(\s+)', bygroups(Keyword, Text), 'classname'),
- (r'(from)(\s+)', bygroups(Keyword, Text), 'fromimport'),
- (r'(c?import)(\s+)', bygroups(Keyword, Text), 'import'),
- include('builtins'),
- include('backtick'),
- ('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'),
- ("(?:[rR]|[uU][rR]|[rR][uU])'''", String, 'tsqs'),
- ('(?:[rR]|[uU][rR]|[rR][uU])"', String, 'dqs'),
- ("(?:[rR]|[uU][rR]|[rR][uU])'", String, 'sqs'),
- ('[uU]?"""', String, combined('stringescape', 'tdqs')),
- ("[uU]?'''", String, combined('stringescape', 'tsqs')),
- ('[uU]?"', String, combined('stringescape', 'dqs')),
- ("[uU]?'", String, combined('stringescape', 'sqs')),
- include('name'),
- include('numbers'),
- ],
- 'keywords': [
- (r'(assert|break|by|continue|ctypedef|del|elif|else|except\??|exec|'
- r'finally|for|gil|global|if|include|lambda|nogil|pass|print|raise|'
- r'return|try|while|yield|as|with)\b', Keyword),
- (r'(DEF|IF|ELIF|ELSE)\b', Comment.Preproc),
- ],
- 'builtins': [
- (r'(?<!\.)(__import__|abs|all|any|apply|basestring|bin|bool|buffer|'
- r'bytearray|bytes|callable|chr|classmethod|cmp|coerce|compile|'
- r'complex|delattr|dict|dir|divmod|enumerate|eval|execfile|exit|'
- r'file|filter|float|frozenset|getattr|globals|hasattr|hash|hex|id|'
- r'input|int|intern|isinstance|issubclass|iter|len|list|locals|'
- r'long|map|max|min|next|object|oct|open|ord|pow|property|range|'
- r'raw_input|reduce|reload|repr|reversed|round|set|setattr|slice|'
- r'sorted|staticmethod|str|sum|super|tuple|type|unichr|unicode|'
- r'vars|xrange|zip)\b', Name.Builtin),
- (r'(?<!\.)(self|None|Ellipsis|NotImplemented|False|True|NULL'
- r')\b', Name.Builtin.Pseudo),
- (r'(?<!\.)(ArithmeticError|AssertionError|AttributeError|'
- r'BaseException|DeprecationWarning|EOFError|EnvironmentError|'
- r'Exception|FloatingPointError|FutureWarning|GeneratorExit|IOError|'
- r'ImportError|ImportWarning|IndentationError|IndexError|KeyError|'
- r'KeyboardInterrupt|LookupError|MemoryError|NameError|'
- r'NotImplemented|NotImplementedError|OSError|OverflowError|'
- r'OverflowWarning|PendingDeprecationWarning|ReferenceError|'
- r'RuntimeError|RuntimeWarning|StandardError|StopIteration|'
- r'SyntaxError|SyntaxWarning|SystemError|SystemExit|TabError|'
- r'TypeError|UnboundLocalError|UnicodeDecodeError|'
- r'UnicodeEncodeError|UnicodeError|UnicodeTranslateError|'
- r'UnicodeWarning|UserWarning|ValueError|Warning|ZeroDivisionError'
- r')\b', Name.Exception),
- ],
- 'numbers': [
- (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'0\d+', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+', Number.Integer)
- ],
- 'backtick': [
- ('`.*?`', String.Backtick),
- ],
- 'name': [
- (r'@\w+', Name.Decorator),
- ('[a-zA-Z_]\w*', Name),
- ],
- 'funcname': [
- ('[a-zA-Z_]\w*', Name.Function, '#pop')
- ],
- 'cdef': [
- (r'(public|readonly|extern|api|inline)\b', Keyword.Reserved),
- (r'(struct|enum|union|class)\b', Keyword),
- (r'([a-zA-Z_]\w*)(\s*)(?=[(:#=]|$)',
- bygroups(Name.Function, Text), '#pop'),
- (r'([a-zA-Z_]\w*)(\s*)(,)',
- bygroups(Name.Function, Text, Punctuation)),
- (r'from\b', Keyword, '#pop'),
- (r'as\b', Keyword),
- (r':', Punctuation, '#pop'),
- (r'(?=["\'])', Text, '#pop'),
- (r'[a-zA-Z_]\w*', Keyword.Type),
- (r'.', Text),
- ],
- 'classname': [
- ('[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'import': [
- (r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
- (r'[a-zA-Z_][\w.]*', Name.Namespace),
- (r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
- default('#pop') # all else: go back
- ],
- 'fromimport': [
- (r'(\s+)(c?import)\b', bygroups(Text, Keyword), '#pop'),
- (r'[a-zA-Z_.][\w.]*', Name.Namespace),
- # ``cdef foo from "header"``, or ``for foo from 0 < i < 10``
- default('#pop'),
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N{.*?}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'strings': [
- (r'%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
- '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol),
- (r'[^\\\'"%\n]+', String),
- # quotes, percents and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- # unhandled string formatting sign
- (r'%', String)
- # newlines are an error (use "nl" state)
- ],
- 'nl': [
- (r'\n', String)
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape), # included here again for raw strings
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- (r"\\\\|\\'|\\\n", String.Escape), # included here again for raw strings
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- include('strings'),
- include('nl')
- ],
- 'tsqs': [
- (r"'''", String, '#pop'),
- include('strings'),
- include('nl')
- ],
- }
-
-
-class ValaLexer(RegexLexer):
- """
- For Vala source code with preprocessor directives.
-
- .. versionadded:: 1.1
- """
- name = 'Vala'
- aliases = ['vala', 'vapi']
- filenames = ['*.vala', '*.vapi']
- mimetypes = ['text/x-vala']
-
- tokens = {
- 'whitespace': [
- (r'^\s*#if\s+0', Comment.Preproc, 'if0'),
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- ],
- 'statements': [
- (r'L?"', String, 'string'),
- (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
- String.Char),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
- (r'0[0-7]+[Ll]?', Number.Oct),
- (r'\d+[Ll]?', Number.Integer),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'(\[)(Compact|Immutable|(?:Boolean|Simple)Type)(\])',
- bygroups(Punctuation, Name.Decorator, Punctuation)),
- # TODO: "correctly" parse complex code attributes
- (r'(\[)(CCode|(?:Integer|Floating)Type)',
- bygroups(Punctuation, Name.Decorator)),
- (r'[()\[\],.]', Punctuation),
- (r'(as|base|break|case|catch|construct|continue|default|delete|do|'
- r'else|enum|finally|for|foreach|get|if|in|is|lock|new|out|params|'
- r'return|set|sizeof|switch|this|throw|try|typeof|while|yield)\b',
- Keyword),
- (r'(abstract|const|delegate|dynamic|ensures|extern|inline|internal|'
- r'override|owned|private|protected|public|ref|requires|signal|'
- r'static|throws|unowned|var|virtual|volatile|weak|yields)\b',
- Keyword.Declaration),
- (r'(namespace|using)(\s+)', bygroups(Keyword.Namespace, Text),
- 'namespace'),
- (r'(class|errordomain|interface|struct)(\s+)',
- bygroups(Keyword.Declaration, Text), 'class'),
- (r'(\.)([a-zA-Z_]\w*)',
- bygroups(Operator, Name.Attribute)),
- # void is an actual keyword, others are in glib-2.0.vapi
- (r'(void|bool|char|double|float|int|int8|int16|int32|int64|long|'
- r'short|size_t|ssize_t|string|time_t|uchar|uint|uint8|uint16|'
- r'uint32|uint64|ulong|unichar|ushort)\b', Keyword.Type),
- (r'(true|false|null)\b', Name.Builtin),
- ('[a-zA-Z_]\w*', Name),
- ],
- 'root': [
- include('whitespace'),
- ('', Text, 'statement'),
- ],
- 'statement' : [
- include('whitespace'),
- include('statements'),
- ('[{}]', Punctuation),
- (';', Punctuation, '#pop'),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
- (r'^\s*#el(?:se|if).*\n', Comment.Preproc, '#pop'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
- (r'.*?\n', Comment),
- ],
- 'class': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'namespace': [
- (r'[a-zA-Z_][\w.]*', Name.Namespace, '#pop')
- ],
- }
-
-
-class OocLexer(RegexLexer):
- """
- For `Ooc <http://ooc-lang.org/>`_ source code
-
- .. versionadded:: 1.2
- """
- name = 'Ooc'
- aliases = ['ooc']
- filenames = ['*.ooc']
- mimetypes = ['text/x-ooc']
-
- tokens = {
- 'root': [
- (r'\b(class|interface|implement|abstract|extends|from|'
- r'this|super|new|const|final|static|import|use|extern|'
- r'inline|proto|break|continue|fallthrough|operator|if|else|for|'
- r'while|do|switch|case|as|in|version|return|true|false|null)\b',
- Keyword),
- (r'include\b', Keyword, 'include'),
- (r'(cover)([ \t]+)(from)([ \t]+)(\w+[*@]?)',
- bygroups(Keyword, Text, Keyword, Text, Name.Class)),
- (r'(func)((?:[ \t]|\\\n)+)(~[a-z_]\w*)',
- bygroups(Keyword, Text, Name.Function)),
- (r'\bfunc\b', Keyword),
- # Note: %= and ^= not listed on http://ooc-lang.org/syntax
- (r'//.*', Comment),
- (r'(?s)/\*.*?\*/', Comment.Multiline),
- (r'(==?|\+=?|-[=>]?|\*=?|/=?|:=|!=?|%=?|\?|>{1,3}=?|<{1,3}=?|\.\.|'
- r'&&?|\|\|?|\^=?)', Operator),
- (r'(\.)([ \t]*)([a-z]\w*)', bygroups(Operator, Text,
- Name.Function)),
- (r'[A-Z][A-Z0-9_]+', Name.Constant),
- (r'[A-Z]\w*([@*]|\[[ \t]*\])?', Name.Class),
-
- (r'([a-z]\w*(?:~[a-z]\w*)?)((?:[ \t]|\\\n)*)(?=\()',
- bygroups(Name.Function, Text)),
- (r'[a-z]\w*', Name.Variable),
-
- # : introduces types
- (r'[:(){}\[\];,]', Punctuation),
-
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'0c[0-9]+', Number.Oct),
- (r'0b[01]+', Number.Bin),
- (r'[0-9_]\.[0-9_]*(?!\.)', Number.Float),
- (r'[0-9_]+', Number.Decimal),
-
- (r'"(?:\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\"])*"',
- String.Double),
- (r"'(?:\\.|\\[0-9]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
- String.Char),
- (r'@', Punctuation), # pointer dereference
- (r'\.', Punctuation), # imports or chain operator
-
- (r'\\[ \t\n]', Text),
- (r'[ \t]+', Text),
- ],
- 'include': [
- (r'[\w/]+', Name),
- (r',', Punctuation),
- (r'[ \t]', Text),
- (r'[;\n]', Text, '#pop'),
- ],
- }
-
-
-class GoLexer(RegexLexer):
- """
- For `Go <http://golang.org>`_ source.
- """
- name = 'Go'
- filenames = ['*.go']
- aliases = ['go']
- mimetypes = ['text/x-gosrc']
-
- flags = re.MULTILINE | re.UNICODE
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuations
- (r'//(.*?)\n', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'(import|package)\b', Keyword.Namespace),
- (r'(var|func|struct|map|chan|type|interface|const)\b', Keyword.Declaration),
- (r'(break|default|select|case|defer|go'
- r'|else|goto|switch|fallthrough|if|range'
- r'|continue|for|return)\b', Keyword),
- (r'(true|false|iota|nil)\b', Keyword.Constant),
- # It seems the builtin types aren't actually keywords, but
- # can be used as functions. So we need two declarations.
- (r'(uint|uint8|uint16|uint32|uint64'
- r'|int|int8|int16|int32|int64'
- r'|float|float32|float64'
- r'|complex64|complex128|byte|rune'
- r'|string|bool|error|uintptr'
- r'|print|println|panic|recover|close|complex|real|imag'
- r'|len|cap|append|copy|delete|new|make)\b(\()',
- bygroups(Name.Builtin, Punctuation)),
- (r'(uint|uint8|uint16|uint32|uint64'
- r'|int|int8|int16|int32|int64'
- r'|float|float32|float64'
- r'|complex64|complex128|byte|rune'
- r'|string|bool|error|uintptr)\b', Keyword.Type),
- # imaginary_lit
- (r'\d+i', Number),
- (r'\d+\.\d*([Ee][-+]\d+)?i', Number),
- (r'\.\d+([Ee][-+]\d+)?i', Number),
- (r'\d+[Ee][-+]\d+i', Number),
- # float_lit
- (r'\d+(\.\d+[eE][+\-]?\d+|'
- r'\.\d*|[eE][+\-]?\d+)', Number.Float),
- (r'\.\d+([eE][+\-]?\d+)?', Number.Float),
- # int_lit
- # -- octal_lit
- (r'0[0-7]+', Number.Oct),
- # -- hex_lit
- (r'0[xX][0-9a-fA-F]+', Number.Hex),
- # -- decimal_lit
- (r'(0|[1-9][0-9]*)', Number.Integer),
- # char_lit
- (r"""'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
- r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'""",
- String.Char
- ),
- # StringLiteral
- # -- raw_string_lit
- (r'`[^`]*`', String),
- # -- interpreted_string_lit
- (r'"(\\\\|\\"|[^"])*"', String),
- # Tokens
- (r'(<<=|>>=|<<|>>|<=|>=|&\^=|&\^|\+=|-=|\*=|/=|%=|&=|\|=|&&|\|\|'
- r'|<-|\+\+|--|==|!=|:=|\.\.\.|[+\-*/%&])', Operator),
- (r'[|^<>=!()\[\]{}.,;:]', Punctuation),
- # identifier
- (r'[^\W\d]\w*', Name.Other),
- ]
- }
-
-
-class FelixLexer(RegexLexer):
- """
- For `Felix <http://www.felix-lang.org>`_ source code.
-
- .. versionadded:: 1.2
- """
-
- name = 'Felix'
- aliases = ['felix', 'flx']
- filenames = ['*.flx', '*.flxh']
- mimetypes = ['text/x-felix']
-
- preproc = [
- 'elif', 'else', 'endif', 'if', 'ifdef', 'ifndef',
- ]
-
- keywords = [
- '_', '_deref', 'all', 'as',
- 'assert', 'attempt', 'call', 'callback', 'case', 'caseno', 'cclass',
- 'code', 'compound', 'ctypes', 'do', 'done', 'downto', 'elif', 'else',
- 'endattempt', 'endcase', 'endif', 'endmatch', 'enum', 'except',
- 'exceptions', 'expect', 'finally', 'for', 'forall', 'forget', 'fork',
- 'functor', 'goto', 'ident', 'if', 'incomplete', 'inherit', 'instance',
- 'interface', 'jump', 'lambda', 'loop', 'match', 'module', 'namespace',
- 'new', 'noexpand', 'nonterm', 'obj', 'of', 'open', 'parse', 'raise',
- 'regexp', 'reglex', 'regmatch', 'rename', 'return', 'the', 'then',
- 'to', 'type', 'typecase', 'typedef', 'typematch', 'typeof', 'upto',
- 'when', 'whilst', 'with', 'yield',
- ]
-
- keyword_directives = [
- '_gc_pointer', '_gc_type', 'body', 'comment', 'const', 'export',
- 'header', 'inline', 'lval', 'macro', 'noinline', 'noreturn',
- 'package', 'private', 'pod', 'property', 'public', 'publish',
- 'requires', 'todo', 'virtual', 'use',
- ]
-
- keyword_declarations = [
- 'def', 'let', 'ref', 'val', 'var',
- ]
-
- keyword_types = [
- 'unit', 'void', 'any', 'bool',
- 'byte', 'offset',
- 'address', 'caddress', 'cvaddress', 'vaddress',
- 'tiny', 'short', 'int', 'long', 'vlong',
- 'utiny', 'ushort', 'vshort', 'uint', 'ulong', 'uvlong',
- 'int8', 'int16', 'int32', 'int64',
- 'uint8', 'uint16', 'uint32', 'uint64',
- 'float', 'double', 'ldouble',
- 'complex', 'dcomplex', 'lcomplex',
- 'imaginary', 'dimaginary', 'limaginary',
- 'char', 'wchar', 'uchar',
- 'charp', 'charcp', 'ucharp', 'ucharcp',
- 'string', 'wstring', 'ustring',
- 'cont',
- 'array', 'varray', 'list',
- 'lvalue', 'opt', 'slice',
- ]
-
- keyword_constants = [
- 'false', 'true',
- ]
-
- operator_words = [
- 'and', 'not', 'in', 'is', 'isin', 'or', 'xor',
- ]
-
- name_builtins = [
- '_svc', 'while',
- ]
-
- name_pseudo = [
- 'root', 'self', 'this',
- ]
-
- decimal_suffixes = '([tTsSiIlLvV]|ll|LL|([iIuU])(8|16|32|64))?'
-
- tokens = {
- 'root': [
- include('whitespace'),
-
- # Keywords
- (r'(axiom|ctor|fun|gen|proc|reduce|union)\b', Keyword,
- 'funcname'),
- (r'(class|cclass|cstruct|obj|struct)\b', Keyword, 'classname'),
- (r'(instance|module|typeclass)\b', Keyword, 'modulename'),
-
- (r'(%s)\b' % '|'.join(keywords), Keyword),
- (r'(%s)\b' % '|'.join(keyword_directives), Name.Decorator),
- (r'(%s)\b' % '|'.join(keyword_declarations), Keyword.Declaration),
- (r'(%s)\b' % '|'.join(keyword_types), Keyword.Type),
- (r'(%s)\b' % '|'.join(keyword_constants), Keyword.Constant),
-
- # Operators
- include('operators'),
-
- # Float Literal
- # -- Hex Float
- (r'0[xX]([0-9a-fA-F_]*\.[0-9a-fA-F_]+|[0-9a-fA-F_]+)'
- r'[pP][+\-]?[0-9_]+[lLfFdD]?', Number.Float),
- # -- DecimalFloat
- (r'[0-9_]+(\.[0-9_]+[eE][+\-]?[0-9_]+|'
- r'\.[0-9_]*|[eE][+\-]?[0-9_]+)[lLfFdD]?', Number.Float),
- (r'\.(0|[1-9][0-9_]*)([eE][+\-]?[0-9_]+)?[lLfFdD]?',
- Number.Float),
-
- # IntegerLiteral
- # -- Binary
- (r'0[Bb][01_]+%s' % decimal_suffixes, Number.Bin),
- # -- Octal
- (r'0[0-7_]+%s' % decimal_suffixes, Number.Oct),
- # -- Hexadecimal
- (r'0[xX][0-9a-fA-F_]+%s' % decimal_suffixes, Number.Hex),
- # -- Decimal
- (r'(0|[1-9][0-9_]*)%s' % decimal_suffixes, Number.Integer),
-
- # Strings
- ('([rR][cC]?|[cC][rR])"""', String, 'tdqs'),
- ("([rR][cC]?|[cC][rR])'''", String, 'tsqs'),
- ('([rR][cC]?|[cC][rR])"', String, 'dqs'),
- ("([rR][cC]?|[cC][rR])'", String, 'sqs'),
- ('[cCfFqQwWuU]?"""', String, combined('stringescape', 'tdqs')),
- ("[cCfFqQwWuU]?'''", String, combined('stringescape', 'tsqs')),
- ('[cCfFqQwWuU]?"', String, combined('stringescape', 'dqs')),
- ("[cCfFqQwWuU]?'", String, combined('stringescape', 'sqs')),
-
- # Punctuation
- (r'[\[\]{}:(),;?]', Punctuation),
-
- # Labels
- (r'[a-zA-Z_]\w*:>', Name.Label),
-
- # Identifiers
- (r'(%s)\b' % '|'.join(name_builtins), Name.Builtin),
- (r'(%s)\b' % '|'.join(name_pseudo), Name.Builtin.Pseudo),
- (r'[a-zA-Z_]\w*', Name),
- ],
- 'whitespace': [
- (r'\n', Text),
- (r'\s+', Text),
-
- include('comment'),
-
- # Preprocessor
- (r'#\s*if\s+0', Comment.Preproc, 'if0'),
- (r'#', Comment.Preproc, 'macro'),
- ],
- 'operators': [
- (r'(%s)\b' % '|'.join(operator_words), Operator.Word),
- (r'!=|==|<<|>>|\|\||&&|[-~+/*%=<>&^|.$]', Operator),
- ],
- 'comment': [
- (r'//(.*?)\n', Comment.Single),
- (r'/[*]', Comment.Multiline, 'comment2'),
- ],
- 'comment2': [
- (r'[^\/*]', Comment.Multiline),
- (r'/[*]', Comment.Multiline, '#push'),
- (r'[*]/', Comment.Multiline, '#pop'),
- (r'[\/*]', Comment.Multiline),
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment, '#push'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment, '#pop'),
- (r'.*?\n', Comment),
- ],
- 'macro': [
- include('comment'),
- (r'(import|include)(\s+)(<[^>]*?>)',
- bygroups(Comment.Preproc, Text, String), '#pop'),
- (r'(import|include)(\s+)("[^"]*?")',
- bygroups(Comment.Preproc, Text, String), '#pop'),
- (r"(import|include)(\s+)('[^']*?')",
- bygroups(Comment.Preproc, Text, String), '#pop'),
- (r'[^/\n]+', Comment.Preproc),
- ##(r'/[*](.|\n)*?[*]/', Comment),
- ##(r'//.*?\n', Comment, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'funcname': [
- include('whitespace'),
- (r'[a-zA-Z_]\w*', Name.Function, '#pop'),
- # anonymous functions
- (r'(?=\()', Text, '#pop'),
- ],
- 'classname': [
- include('whitespace'),
- (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
- # anonymous classes
- (r'(?=\{)', Text, '#pop'),
- ],
- 'modulename': [
- include('whitespace'),
- (r'\[', Punctuation, ('modulename2', 'tvarlist')),
- default('modulename2'),
- ],
- 'modulename2': [
- include('whitespace'),
- (r'([a-zA-Z_]\w*)', Name.Namespace, '#pop:2'),
- ],
- 'tvarlist': [
- include('whitespace'),
- include('operators'),
- (r'\[', Punctuation, '#push'),
- (r'\]', Punctuation, '#pop'),
- (r',', Punctuation),
- (r'(with|where)\b', Keyword),
- (r'[a-zA-Z_]\w*', Name),
- ],
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N{.*?}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
- 'strings': [
- (r'%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
- '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol),
- (r'[^\\\'"%\n]+', String),
- # quotes, percents and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- # unhandled string formatting sign
- (r'%', String)
- # newlines are an error (use "nl" state)
- ],
- 'nl': [
- (r'\n', String)
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- # included here again for raw strings
- (r'\\\\|\\"|\\\n', String.Escape),
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- # included here again for raw strings
- (r"\\\\|\\'|\\\n", String.Escape),
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- include('strings'),
- include('nl')
- ],
- 'tsqs': [
- (r"'''", String, '#pop'),
- include('strings'),
- include('nl')
- ],
- }
-
-
-class AdaLexer(RegexLexer):
- """
- For Ada source code.
-
- .. versionadded:: 1.3
- """
-
- name = 'Ada'
- aliases = ['ada', 'ada95', 'ada2005']
- filenames = ['*.adb', '*.ads', '*.ada']
- mimetypes = ['text/x-ada']
-
- flags = re.MULTILINE | re.I # Ignore case
-
- tokens = {
- 'root': [
- (r'[^\S\n]+', Text),
- (r'--.*?\n', Comment.Single),
- (r'[^\S\n]+', Text),
- (r'function|procedure|entry', Keyword.Declaration, 'subprogram'),
- (r'(subtype|type)(\s+)([a-z0-9_]+)',
- bygroups(Keyword.Declaration, Text, Keyword.Type), 'type_def'),
- (r'task|protected', Keyword.Declaration),
- (r'(subtype)(\s+)', bygroups(Keyword.Declaration, Text)),
- (r'(end)(\s+)', bygroups(Keyword.Reserved, Text), 'end'),
- (r'(pragma)(\s+)(\w+)', bygroups(Keyword.Reserved, Text,
- Comment.Preproc)),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'(Address|Byte|Boolean|Character|Controlled|Count|Cursor|'
- r'Duration|File_Mode|File_Type|Float|Generator|Integer|Long_Float|'
- r'Long_Integer|Long_Long_Float|Long_Long_Integer|Natural|Positive|'
- r'Reference_Type|Short_Float|Short_Integer|Short_Short_Float|'
- r'Short_Short_Integer|String|Wide_Character|Wide_String)\b',
- Keyword.Type),
- (r'(and(\s+then)?|in|mod|not|or(\s+else)|rem)\b', Operator.Word),
- (r'generic|private', Keyword.Declaration),
- (r'package', Keyword.Declaration, 'package'),
- (r'array\b', Keyword.Reserved, 'array_def'),
- (r'(with|use)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
- (r'([a-z0-9_]+)(\s*)(:)(\s*)(constant)',
- bygroups(Name.Constant, Text, Punctuation, Text,
- Keyword.Reserved)),
- (r'<<[a-z0-9_]+>>', Name.Label),
- (r'([a-z0-9_]+)(\s*)(:)(\s*)(declare|begin|loop|for|while)',
- bygroups(Name.Label, Text, Punctuation, Text, Keyword.Reserved)),
- (r'\b(abort|abs|abstract|accept|access|aliased|all|array|at|begin|'
- r'body|case|constant|declare|delay|delta|digits|do|else|elsif|end|'
- r'entry|exception|exit|interface|for|goto|if|is|limited|loop|new|'
- r'null|of|or|others|out|overriding|pragma|protected|raise|range|'
- r'record|renames|requeue|return|reverse|select|separate|subtype|'
- r'synchronized|task|tagged|terminate|then|type|until|when|while|'
- r'xor)\b',
- Keyword.Reserved),
- (r'"[^"]*"', String),
- include('attribute'),
- include('numbers'),
- (r"'[^']'", String.Character),
- (r'([a-z0-9_]+)(\s*|[(,])', bygroups(Name, using(this))),
- (r"(<>|=>|:=|[()|:;,.'])", Punctuation),
- (r'[*<>+=/&-]', Operator),
- (r'\n+', Text),
- ],
- 'numbers' : [
- (r'[0-9_]+#[0-9a-f]+#', Number.Hex),
- (r'[0-9_]+\.[0-9_]*', Number.Float),
- (r'[0-9_]+', Number.Integer),
- ],
- 'attribute' : [
- (r"(')(\w+)", bygroups(Punctuation, Name.Attribute)),
- ],
- 'subprogram' : [
- (r'\(', Punctuation, ('#pop', 'formal_part')),
- (r';', Punctuation, '#pop'),
- (r'is\b', Keyword.Reserved, '#pop'),
- (r'"[^"]+"|[a-z0-9_]+', Name.Function),
- include('root'),
- ],
- 'end' : [
- ('(if|case|record|loop|select)', Keyword.Reserved),
- ('"[^"]+"|[\w.]+', Name.Function),
- ('\s+', Text),
- (';', Punctuation, '#pop'),
- ],
- 'type_def': [
- (r';', Punctuation, '#pop'),
- (r'\(', Punctuation, 'formal_part'),
- (r'with|and|use', Keyword.Reserved),
- (r'array\b', Keyword.Reserved, ('#pop', 'array_def')),
- (r'record\b', Keyword.Reserved, ('record_def')),
- (r'(null record)(;)', bygroups(Keyword.Reserved, Punctuation), '#pop'),
- include('root'),
- ],
- 'array_def' : [
- (r';', Punctuation, '#pop'),
- (r'([a-z0-9_]+)(\s+)(range)', bygroups(Keyword.Type, Text,
- Keyword.Reserved)),
- include('root'),
- ],
- 'record_def' : [
- (r'end record', Keyword.Reserved, '#pop'),
- include('root'),
- ],
- 'import': [
- (r'[a-z0-9_.]+', Name.Namespace, '#pop'),
- default('#pop'),
- ],
- 'formal_part' : [
- (r'\)', Punctuation, '#pop'),
- (r'[a-z0-9_]+', Name.Variable),
- (r',|:[^=]', Punctuation),
- (r'(in|not|null|out|access)\b', Keyword.Reserved),
- include('root'),
- ],
- 'package': [
- ('body', Keyword.Declaration),
- ('is\s+new|renames', Keyword.Reserved),
- ('is', Keyword.Reserved, '#pop'),
- (';', Punctuation, '#pop'),
- ('\(', Punctuation, 'package_instantiation'),
- ('([\w.]+)', Name.Class),
- include('root'),
- ],
- 'package_instantiation': [
- (r'("[^"]+"|[a-z0-9_]+)(\s+)(=>)', bygroups(Name.Variable,
- Text, Punctuation)),
- (r'[a-z0-9._\'"]', Text),
- (r'\)', Punctuation, '#pop'),
- include('root'),
- ],
- }
-
-
-class Modula2Lexer(RegexLexer):
- """
- For `Modula-2 <http://www.modula2.org/>`_ source code.
-
- Additional options that determine which keywords are highlighted:
-
- `pim`
- Select PIM Modula-2 dialect (default: True).
- `iso`
- Select ISO Modula-2 dialect (default: False).
- `objm2`
- Select Objective Modula-2 dialect (default: False).
- `gm2ext`
- Also highlight GNU extensions (default: False).
-
- .. versionadded:: 1.3
- """
- name = 'Modula-2'
- aliases = ['modula2', 'm2']
- filenames = ['*.def', '*.mod']
- mimetypes = ['text/x-modula2']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'whitespace': [
- (r'\n+', Text), # blank lines
- (r'\s+', Text), # whitespace
- ],
- 'identifiers': [
- (r'([a-zA-Z_\$][\w\$]*)', Name),
- ],
- 'numliterals': [
- (r'[01]+B', Number.Bin), # binary number (ObjM2)
- (r'[0-7]+B', Number.Oct), # octal number (PIM + ISO)
- (r'[0-7]+C', Number.Oct), # char code (PIM + ISO)
- (r'[0-9A-F]+C', Number.Hex), # char code (ObjM2)
- (r'[0-9A-F]+H', Number.Hex), # hexadecimal number
- (r'[0-9]+\.[0-9]+E[+-][0-9]+', Number.Float), # real number
- (r'[0-9]+\.[0-9]+', Number.Float), # real number
- (r'[0-9]+', Number.Integer), # decimal whole number
- ],
- 'strings': [
- (r"'(\\\\|\\'|[^'])*'", String), # single quoted string
- (r'"(\\\\|\\"|[^"])*"', String), # double quoted string
- ],
- 'operators': [
- (r'[*/+=#~&<>\^-]', Operator),
- (r':=', Operator), # assignment
- (r'@', Operator), # pointer deref (ISO)
- (r'\.\.', Operator), # ellipsis or range
- (r'`', Operator), # Smalltalk message (ObjM2)
- (r'::', Operator), # type conversion (ObjM2)
- ],
- 'punctuation': [
- (r'[\(\)\[\]{},.:;|]', Punctuation),
- ],
- 'comments': [
- (r'//.*?\n', Comment.Single), # ObjM2
- (r'/\*(.*?)\*/', Comment.Multiline), # ObjM2
- (r'\(\*([^\$].*?)\*\)', Comment.Multiline),
- # TO DO: nesting of (* ... *) comments
- ],
- 'pragmas': [
- (r'\(\*\$(.*?)\*\)', Comment.Preproc), # PIM
- (r'<\*(.*?)\*>', Comment.Preproc), # ISO + ObjM2
- ],
- 'root': [
- include('whitespace'),
- include('comments'),
- include('pragmas'),
- include('identifiers'),
- include('numliterals'),
- include('strings'),
- include('operators'),
- include('punctuation'),
- ]
- }
-
- pim_reserved_words = [
- # 40 reserved words
- 'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION',
- 'DIV', 'DO', 'ELSE', 'ELSIF', 'END', 'EXIT', 'EXPORT', 'FOR',
- 'FROM', 'IF', 'IMPLEMENTATION', 'IMPORT', 'IN', 'LOOP', 'MOD',
- 'MODULE', 'NOT', 'OF', 'OR', 'POINTER', 'PROCEDURE', 'QUALIFIED',
- 'RECORD', 'REPEAT', 'RETURN', 'SET', 'THEN', 'TO', 'TYPE',
- 'UNTIL', 'VAR', 'WHILE', 'WITH',
- ]
-
- pim_pervasives = [
- # 31 pervasives
- 'ABS', 'BITSET', 'BOOLEAN', 'CAP', 'CARDINAL', 'CHAR', 'CHR', 'DEC',
- 'DISPOSE', 'EXCL', 'FALSE', 'FLOAT', 'HALT', 'HIGH', 'INC', 'INCL',
- 'INTEGER', 'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NEW', 'NIL', 'ODD',
- 'ORD', 'PROC', 'REAL', 'SIZE', 'TRUE', 'TRUNC', 'VAL',
- ]
-
- iso_reserved_words = [
- # 46 reserved words
- 'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION', 'DIV',
- 'DO', 'ELSE', 'ELSIF', 'END', 'EXCEPT', 'EXIT', 'EXPORT', 'FINALLY',
- 'FOR', 'FORWARD', 'FROM', 'IF', 'IMPLEMENTATION', 'IMPORT', 'IN',
- 'LOOP', 'MOD', 'MODULE', 'NOT', 'OF', 'OR', 'PACKEDSET', 'POINTER',
- 'PROCEDURE', 'QUALIFIED', 'RECORD', 'REPEAT', 'REM', 'RETRY',
- 'RETURN', 'SET', 'THEN', 'TO', 'TYPE', 'UNTIL', 'VAR', 'WHILE',
- 'WITH',
- ]
-
- iso_pervasives = [
- # 42 pervasives
- 'ABS', 'BITSET', 'BOOLEAN', 'CAP', 'CARDINAL', 'CHAR', 'CHR', 'CMPLX',
- 'COMPLEX', 'DEC', 'DISPOSE', 'EXCL', 'FALSE', 'FLOAT', 'HALT', 'HIGH',
- 'IM', 'INC', 'INCL', 'INT', 'INTEGER', 'INTERRUPTIBLE', 'LENGTH',
- 'LFLOAT', 'LONGCOMPLEX', 'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NEW',
- 'NIL', 'ODD', 'ORD', 'PROC', 'PROTECTION', 'RE', 'REAL', 'SIZE',
- 'TRUE', 'TRUNC', 'UNINTERRUBTIBLE', 'VAL',
- ]
-
- objm2_reserved_words = [
- # base language, 42 reserved words
- 'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION', 'DIV',
- 'DO', 'ELSE', 'ELSIF', 'END', 'ENUM', 'EXIT', 'FOR', 'FROM', 'IF',
- 'IMMUTABLE', 'IMPLEMENTATION', 'IMPORT', 'IN', 'IS', 'LOOP', 'MOD',
- 'MODULE', 'NOT', 'OF', 'OPAQUE', 'OR', 'POINTER', 'PROCEDURE',
- 'RECORD', 'REPEAT', 'RETURN', 'SET', 'THEN', 'TO', 'TYPE',
- 'UNTIL', 'VAR', 'VARIADIC', 'WHILE',
- # OO extensions, 16 reserved words
- 'BYCOPY', 'BYREF', 'CLASS', 'CONTINUE', 'CRITICAL', 'INOUT', 'METHOD',
- 'ON', 'OPTIONAL', 'OUT', 'PRIVATE', 'PROTECTED', 'PROTOCOL', 'PUBLIC',
- 'SUPER', 'TRY',
- ]
-
- objm2_pervasives = [
- # base language, 38 pervasives
- 'ABS', 'BITSET', 'BOOLEAN', 'CARDINAL', 'CHAR', 'CHR', 'DISPOSE',
- 'FALSE', 'HALT', 'HIGH', 'INTEGER', 'INRANGE', 'LENGTH', 'LONGCARD',
- 'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NEG', 'NEW', 'NEXTV', 'NIL',
- 'OCTET', 'ODD', 'ORD', 'PRED', 'PROC', 'READ', 'REAL', 'SUCC', 'TMAX',
- 'TMIN', 'TRUE', 'TSIZE', 'UNICHAR', 'VAL', 'WRITE', 'WRITEF',
- # OO extensions, 3 pervasives
- 'OBJECT', 'NO', 'YES',
- ]
-
- gnu_reserved_words = [
- # 10 additional reserved words
- 'ASM', '__ATTRIBUTE__', '__BUILTIN__', '__COLUMN__', '__DATE__',
- '__FILE__', '__FUNCTION__', '__LINE__', '__MODULE__', 'VOLATILE',
- ]
-
- gnu_pervasives = [
- # 21 identifiers, actually from pseudo-module SYSTEM
- # but we will highlight them as if they were pervasives
- 'BITSET8', 'BITSET16', 'BITSET32', 'CARDINAL8', 'CARDINAL16',
- 'CARDINAL32', 'CARDINAL64', 'COMPLEX32', 'COMPLEX64', 'COMPLEX96',
- 'COMPLEX128', 'INTEGER8', 'INTEGER16', 'INTEGER32', 'INTEGER64',
- 'REAL8', 'REAL16', 'REAL32', 'REAL96', 'REAL128', 'THROW',
- ]
-
- def __init__(self, **options):
- self.reserved_words = set()
- self.pervasives = set()
- # ISO Modula-2
- if get_bool_opt(options, 'iso', False):
- self.reserved_words.update(self.iso_reserved_words)
- self.pervasives.update(self.iso_pervasives)
- # Objective Modula-2
- elif get_bool_opt(options, 'objm2', False):
- self.reserved_words.update(self.objm2_reserved_words)
- self.pervasives.update(self.objm2_pervasives)
- # PIM Modula-2 (DEFAULT)
- else:
- self.reserved_words.update(self.pim_reserved_words)
- self.pervasives.update(self.pim_pervasives)
- # GNU extensions
- if get_bool_opt(options, 'gm2ext', False):
- self.reserved_words.update(self.gnu_reserved_words)
- self.pervasives.update(self.gnu_pervasives)
- # initialise
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- # check for reserved words and pervasives
- if token is Name:
- if value in self.reserved_words:
- token = Keyword.Reserved
- elif value in self.pervasives:
- token = Keyword.Pervasive
- # return result
- yield index, token, value
-
-
-class BlitzMaxLexer(RegexLexer):
- """
- For `BlitzMax <http://blitzbasic.com>`_ source code.
-
- .. versionadded:: 1.4
- """
-
- name = 'BlitzMax'
- aliases = ['blitzmax', 'bmax']
- filenames = ['*.bmx']
- mimetypes = ['text/x-bmx']
-
- bmax_vopwords = r'\b(Shl|Shr|Sar|Mod)\b'
- bmax_sktypes = r'@{1,2}|[!#$%]'
- bmax_lktypes = r'\b(Int|Byte|Short|Float|Double|Long)\b'
- bmax_name = r'[a-z_]\w*'
- bmax_var = (r'(%s)(?:(?:([ \t]*)(%s)|([ \t]*:[ \t]*\b(?:Shl|Shr|Sar|Mod)\b)'
- r'|([ \t]*)(:)([ \t]*)(?:%s|(%s)))(?:([ \t]*)(Ptr))?)') % \
- (bmax_name, bmax_sktypes, bmax_lktypes, bmax_name)
- bmax_func = bmax_var + r'?((?:[ \t]|\.\.\n)*)([(])'
-
- flags = re.MULTILINE | re.IGNORECASE
- tokens = {
- 'root': [
- # Text
- (r'[ \t]+', Text),
- (r'\.\.\n', Text), # Line continuation
- # Comments
- (r"'.*?\n", Comment.Single),
- (r'([ \t]*)\bRem\n(\n|.)*?\s*\bEnd([ \t]*)Rem', Comment.Multiline),
- # Data types
- ('"', String.Double, 'string'),
- # Numbers
- (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
- (r'\.[0-9]*(?!\.)', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'\$[0-9a-f]+', Number.Hex),
- (r'\%[10]+', Number.Bin),
- # Other
- (r'(?:(?:(:)?([ \t]*)(:?%s|([+\-*/&|~]))|Or|And|Not|[=<>^]))' %
- (bmax_vopwords), Operator),
- (r'[(),.:\[\]]', Punctuation),
- (r'(?:#[\w \t]*)', Name.Label),
- (r'(?:\?[\w \t]*)', Comment.Preproc),
- # Identifiers
- (r'\b(New)\b([ \t]?)([(]?)(%s)' % (bmax_name),
- bygroups(Keyword.Reserved, Text, Punctuation, Name.Class)),
- (r'\b(Import|Framework|Module)([ \t]+)(%s\.%s)' %
- (bmax_name, bmax_name),
- bygroups(Keyword.Reserved, Text, Keyword.Namespace)),
- (bmax_func, bygroups(Name.Function, Text, Keyword.Type,
- Operator, Text, Punctuation, Text,
- Keyword.Type, Name.Class, Text,
- Keyword.Type, Text, Punctuation)),
- (bmax_var, bygroups(Name.Variable, Text, Keyword.Type, Operator,
- Text, Punctuation, Text, Keyword.Type,
- Name.Class, Text, Keyword.Type)),
- (r'\b(Type|Extends)([ \t]+)(%s)' % (bmax_name),
- bygroups(Keyword.Reserved, Text, Name.Class)),
- # Keywords
- (r'\b(Ptr)\b', Keyword.Type),
- (r'\b(Pi|True|False|Null|Self|Super)\b', Keyword.Constant),
- (r'\b(Local|Global|Const|Field)\b', Keyword.Declaration),
- (r'\b(TNullMethodException|TNullFunctionException|'
- r'TNullObjectException|TArrayBoundsException|'
- r'TRuntimeException)\b', Name.Exception),
- (r'\b(Strict|SuperStrict|Module|ModuleInfo|'
- r'End|Return|Continue|Exit|Public|Private|'
- r'Var|VarPtr|Chr|Len|Asc|SizeOf|Sgn|Abs|Min|Max|'
- r'New|Release|Delete|'
- r'Incbin|IncbinPtr|IncbinLen|'
- r'Framework|Include|Import|Extern|EndExtern|'
- r'Function|EndFunction|'
- r'Type|EndType|Extends|'
- r'Method|EndMethod|'
- r'Abstract|Final|'
- r'If|Then|Else|ElseIf|EndIf|'
- r'For|To|Next|Step|EachIn|'
- r'While|Wend|EndWhile|'
- r'Repeat|Until|Forever|'
- r'Select|Case|Default|EndSelect|'
- r'Try|Catch|EndTry|Throw|Assert|'
- r'Goto|DefData|ReadData|RestoreData)\b', Keyword.Reserved),
- # Final resolve (for variable names and such)
- (r'(%s)' % (bmax_name), Name.Variable),
- ],
- 'string': [
- (r'""', String.Double),
- (r'"C?', String.Double, '#pop'),
- (r'[^"]+', String.Double),
- ],
- }
-
-
-class BlitzBasicLexer(RegexLexer):
- """
- For `BlitzBasic <http://blitzbasic.com>`_ source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'BlitzBasic'
- aliases = ['blitzbasic', 'b3d', 'bplus']
- filenames = ['*.bb', '*.decls']
- mimetypes = ['text/x-bb']
-
- bb_vopwords = (r'\b(Shl|Shr|Sar|Mod|Or|And|Not|'
- r'Abs|Sgn|Handle|Int|Float|Str|'
- r'First|Last|Before|After)\b')
- bb_sktypes = r'@{1,2}|[#$%]'
- bb_name = r'[a-z]\w*'
- bb_var = (r'(%s)(?:([ \t]*)(%s)|([ \t]*)([.])([ \t]*)(?:(%s)))?') % \
- (bb_name, bb_sktypes, bb_name)
-
- flags = re.MULTILINE | re.IGNORECASE
- tokens = {
- 'root': [
- # Text
- (r'[ \t]+', Text),
- # Comments
- (r";.*?\n", Comment.Single),
- # Data types
- ('"', String.Double, 'string'),
- # Numbers
- (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
- (r'\.[0-9]+(?!\.)', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'\$[0-9a-f]+', Number.Hex),
- (r'\%[10]+', Number.Bin),
- # Other
- (r'(?:%s|([+\-*/~=<>^]))' % (bb_vopwords), Operator),
- (r'[(),:\[\]\\]', Punctuation),
- (r'\.([ \t]*)(%s)' % bb_name, Name.Label),
- # Identifiers
- (r'\b(New)\b([ \t]+)(%s)' % (bb_name),
- bygroups(Keyword.Reserved, Text, Name.Class)),
- (r'\b(Gosub|Goto)\b([ \t]+)(%s)' % (bb_name),
- bygroups(Keyword.Reserved, Text, Name.Label)),
- (r'\b(Object)\b([ \t]*)([.])([ \t]*)(%s)\b' % (bb_name),
- bygroups(Operator, Text, Punctuation, Text, Name.Class)),
- (r'\b%s\b([ \t]*)(\()' % bb_var,
- bygroups(Name.Function, Text, Keyword.Type,Text, Punctuation,
- Text, Name.Class, Text, Punctuation)),
- (r'\b(Function)\b([ \t]+)%s' % bb_var,
- bygroups(Keyword.Reserved, Text, Name.Function, Text, Keyword.Type,
- Text, Punctuation, Text, Name.Class)),
- (r'\b(Type)([ \t]+)(%s)' % (bb_name),
- bygroups(Keyword.Reserved, Text, Name.Class)),
- # Keywords
- (r'\b(Pi|True|False|Null)\b', Keyword.Constant),
- (r'\b(Local|Global|Const|Field|Dim)\b', Keyword.Declaration),
- (r'\b(End|Return|Exit|'
- r'Chr|Len|Asc|'
- r'New|Delete|Insert|'
- r'Include|'
- r'Function|'
- r'Type|'
- r'If|Then|Else|ElseIf|EndIf|'
- r'For|To|Next|Step|Each|'
- r'While|Wend|'
- r'Repeat|Until|Forever|'
- r'Select|Case|Default|'
- r'Goto|Gosub|Data|Read|Restore)\b', Keyword.Reserved),
- # Final resolve (for variable names and such)
-# (r'(%s)' % (bb_name), Name.Variable),
- (bb_var, bygroups(Name.Variable, Text, Keyword.Type,
- Text, Punctuation, Text, Name.Class)),
- ],
- 'string': [
- (r'""', String.Double),
- (r'"C?', String.Double, '#pop'),
- (r'[^"]+', String.Double),
- ],
- }
-
-
-class NimrodLexer(RegexLexer):
- """
- For `Nimrod <http://nimrod-code.org/>`_ source code.
-
- .. versionadded:: 1.5
- """
-
- name = 'Nimrod'
- aliases = ['nimrod', 'nim']
- filenames = ['*.nim', '*.nimrod']
- mimetypes = ['text/x-nimrod']
-
- flags = re.MULTILINE | re.IGNORECASE | re.UNICODE
-
- def underscorize(words):
- newWords = []
- new = ""
- for word in words:
- for ch in word:
- new += (ch + "_?")
- newWords.append(new)
- new = ""
- return "|".join(newWords)
-
- keywords = [
- 'addr', 'and', 'as', 'asm', 'atomic', 'bind', 'block', 'break',
- 'case', 'cast', 'const', 'continue', 'converter', 'discard',
- 'distinct', 'div', 'elif', 'else', 'end', 'enum', 'except', 'finally',
- 'for', 'generic', 'if', 'implies', 'in', 'yield',
- 'is', 'isnot', 'iterator', 'lambda', 'let', 'macro', 'method',
- 'mod', 'not', 'notin', 'object', 'of', 'or', 'out', 'proc',
- 'ptr', 'raise', 'ref', 'return', 'shl', 'shr', 'template', 'try',
- 'tuple', 'type' , 'when', 'while', 'with', 'without', 'xor'
- ]
-
- keywordsPseudo = [
- 'nil', 'true', 'false'
- ]
-
- opWords = [
- 'and', 'or', 'not', 'xor', 'shl', 'shr', 'div', 'mod', 'in',
- 'notin', 'is', 'isnot'
- ]
-
- types = [
- 'int', 'int8', 'int16', 'int32', 'int64', 'float', 'float32', 'float64',
- 'bool', 'char', 'range', 'array', 'seq', 'set', 'string'
- ]
-
- tokens = {
- 'root': [
- (r'##.*$', String.Doc),
- (r'#.*$', Comment),
- (r'\*|=|>|<|\+|-|/|@|\$|~|&|%|\!|\?|\||\\|\[|\]', Operator),
- (r'\.\.|\.|,|\[\.|\.\]|{\.|\.}|\(\.|\.\)|{|}|\(|\)|:|\^|`|;',
- Punctuation),
-
- # Strings
- (r'(?:[\w]+)"', String, 'rdqs'),
- (r'"""', String, 'tdqs'),
- ('"', String, 'dqs'),
-
- # Char
- ("'", String.Char, 'chars'),
-
- # Keywords
- (r'(%s)\b' % underscorize(opWords), Operator.Word),
- (r'(p_?r_?o_?c_?\s)(?![\(\[\]])', Keyword, 'funcname'),
- (r'(%s)\b' % underscorize(keywords), Keyword),
- (r'(%s)\b' % underscorize(['from', 'import', 'include']),
- Keyword.Namespace),
- (r'(v_?a_?r)\b', Keyword.Declaration),
- (r'(%s)\b' % underscorize(types), Keyword.Type),
- (r'(%s)\b' % underscorize(keywordsPseudo), Keyword.Pseudo),
- # Identifiers
- (r'\b((?![_\d])\w)(((?!_)\w)|(_(?!_)\w))*', Name),
- # Numbers
- (r'[0-9][0-9_]*(?=([eE.]|\'[fF](32|64)))',
- Number.Float, ('float-suffix', 'float-number')),
- (r'0[xX][a-f0-9][a-f0-9_]*', Number.Hex, 'int-suffix'),
- (r'0[bB][01][01_]*', Number.Bin, 'int-suffix'),
- (r'0o[0-7][0-7_]*', Number.Oct, 'int-suffix'),
- (r'[0-9][0-9_]*', Number.Integer, 'int-suffix'),
- # Whitespace
- (r'\s+', Text),
- (r'.+$', Error),
- ],
- 'chars': [
- (r'\\([\\abcefnrtvl"\']|x[a-f0-9]{2}|[0-9]{1,3})', String.Escape),
- (r"'", String.Char, '#pop'),
- (r".", String.Char)
- ],
- 'strings': [
- (r'(?<!\$)\$(\d+|#|\w+)+', String.Interpol),
- (r'[^\\\'"\$\n]+', String),
- # quotes, dollars and backslashes must be parsed one at a time
- (r'[\'"\\]', String),
- # unhandled string formatting sign
- (r'\$', String)
- # newlines are an error (use "nl" state)
- ],
- 'dqs': [
- (r'\\([\\abcefnrtvl"\']|\n|x[a-f0-9]{2}|[0-9]{1,3})',
- String.Escape),
- (r'"', String, '#pop'),
- include('strings')
- ],
- 'rdqs': [
- (r'"(?!")', String, '#pop'),
- (r'""', String.Escape),
- include('strings')
- ],
- 'tdqs': [
- (r'"""(?!")', String, '#pop'),
- include('strings'),
- include('nl')
- ],
- 'funcname': [
- (r'((?![\d_])\w)(((?!_)\w)|(_(?!_)\w))*', Name.Function, '#pop'),
- (r'`.+`', Name.Function, '#pop')
- ],
- 'nl': [
- (r'\n', String)
- ],
- 'float-number': [
- (r'\.(?!\.)[0-9_]*', Number.Float),
- (r'[eE][+-]?[0-9][0-9_]*', Number.Float),
- default('#pop')
- ],
- 'float-suffix': [
- (r'\'[fF](32|64)', Number.Float),
- default('#pop')
- ],
- 'int-suffix': [
- (r'\'[iI](32|64)', Number.Integer.Long),
- (r'\'[iI](8|16)', Number.Integer),
- default('#pop')
- ],
- }
-
-
-class FantomLexer(RegexLexer):
- """
- For Fantom source code.
-
- .. versionadded:: 1.5
- """
- name = 'Fantom'
- aliases = ['fan']
- filenames = ['*.fan']
- mimetypes = ['application/x-fantom']
-
- # often used regexes
- def s(str):
- return Template(str).substitute(
- dict (
- pod = r'[\"\w\.]+',
- eos = r'\n|;',
- id = r'[a-zA-Z_]\w*',
- # all chars which can be part of type definition. Starts with
- # either letter, or [ (maps), or | (funcs)
- type = r'(?:\[|[a-zA-Z_]|\|)[:\w\[\]\|\->\?]*?',
- )
- )
-
-
- tokens = {
- 'comments': [
- (r'(?s)/\*.*?\*/', Comment.Multiline), #Multiline
- (r'//.*?\n', Comment.Single), #Single line
- #todo: highlight references in fandocs
- (r'\*\*.*?\n', Comment.Special), #Fandoc
- (r'#.*\n', Comment.Single) #Shell-style
- ],
- 'literals': [
- (r'\b-?[\d_]+(ns|ms|sec|min|hr|day)', Number), #Duration
- (r'\b-?[\d_]*\.[\d_]+(ns|ms|sec|min|hr|day)', Number),
- #Duration with dot
- (r'\b-?(\d+)?\.\d+(f|F|d|D)?', Number.Float), #Float/Decimal
- (r'\b-?0x[0-9a-fA-F_]+', Number.Hex), #Hex
- (r'\b-?[\d_]+', Number.Integer), #Int
- (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char), #Char
- (r'"', Punctuation, 'insideStr'), #Opening quote
- (r'`', Punctuation, 'insideUri'), #Opening accent
- (r'\b(true|false|null)\b', Keyword.Constant), #Bool & null
- (r'(?:(\w+)(::))?(\w+)(<\|)(.*?)(\|>)', #DSL
- bygroups(Name.Namespace, Punctuation, Name.Class,
- Punctuation, String, Punctuation)),
- (r'(?:(\w+)(::))?(\w+)?(#)(\w+)?', #Type/slot literal
- bygroups(Name.Namespace, Punctuation, Name.Class,
- Punctuation, Name.Function)),
- (r'\[,\]', Literal), # Empty list
- (s(r'($type)(\[,\])'), # Typed empty list
- bygroups(using(this, state = 'inType'), Literal)),
- (r'\[:\]', Literal), # Empty Map
- (s(r'($type)(\[:\])'),
- bygroups(using(this, state = 'inType'), Literal)),
- ],
- 'insideStr': [
- (r'\\\\', String.Escape), #Escaped backslash
- (r'\\"', String.Escape), #Escaped "
- (r'\\`', String.Escape), #Escaped `
- (r'\$\w+', String.Interpol), #Subst var
- (r'\${.*?}', String.Interpol), #Subst expr
- (r'"', Punctuation, '#pop'), #Closing quot
- (r'.', String) #String content
- ],
- 'insideUri': [ #TODO: remove copy/paste str/uri
- (r'\\\\', String.Escape), #Escaped backslash
- (r'\\"', String.Escape), #Escaped "
- (r'\\`', String.Escape), #Escaped `
- (r'\$\w+', String.Interpol), #Subst var
- (r'\${.*?}', String.Interpol), #Subst expr
- (r'`', Punctuation, '#pop'), #Closing tick
- (r'.', String.Backtick) #URI content
- ],
- 'protectionKeywords': [
- (r'\b(public|protected|private|internal)\b', Keyword),
- ],
- 'typeKeywords': [
- (r'\b(abstract|final|const|native|facet|enum)\b', Keyword),
- ],
- 'methodKeywords': [
- (r'\b(abstract|native|once|override|static|virtual|final)\b',
- Keyword),
- ],
- 'fieldKeywords': [
- (r'\b(abstract|const|final|native|override|static|virtual|'
- r'readonly)\b', Keyword)
- ],
- 'otherKeywords': [
- (r'\b(try|catch|throw|finally|for|if|else|while|as|is|isnot|'
- r'switch|case|default|continue|break|do|return|get|set)\b',
- Keyword),
- (r'\b(it|this|super)\b', Name.Builtin.Pseudo),
- ],
- 'operators': [
- (r'\+\+|\-\-|\+|\-|\*|/|\|\||&&|<=>|<=|<|>=|>|=|!|\[|\]', Operator)
- ],
- 'inType': [
- (r'[\[\]\|\->:\?]', Punctuation),
- (s(r'$id'), Name.Class),
- default('#pop'),
-
- ],
- 'root': [
- include('comments'),
- include('protectionKeywords'),
- include('typeKeywords'),
- include('methodKeywords'),
- include('fieldKeywords'),
- include('literals'),
- include('otherKeywords'),
- include('operators'),
- (r'using\b', Keyword.Namespace, 'using'), # Using stmt
- (r'@\w+', Name.Decorator, 'facet'), # Symbol
- (r'(class|mixin)(\s+)(\w+)', bygroups(Keyword, Text, Name.Class),
- 'inheritance'), # Inheritance list
-
-
- ### Type var := val
- (s(r'($type)([ \t]+)($id)(\s*)(:=)'),
- bygroups(using(this, state = 'inType'), Text,
- Name.Variable, Text, Operator)),
-
- ### var := val
- (s(r'($id)(\s*)(:=)'),
- bygroups(Name.Variable, Text, Operator)),
-
- ### .someId( or ->someId( ###
- (s(r'(\.|(?:\->))($id)(\s*)(\()'),
- bygroups(Operator, Name.Function, Text, Punctuation),
- 'insideParen'),
-
- ### .someId or ->someId
- (s(r'(\.|(?:\->))($id)'),
- bygroups(Operator, Name.Function)),
-
- ### new makeXXX ( ####
- (r'(new)(\s+)(make\w*)(\s*)(\()',
- bygroups(Keyword, Text, Name.Function, Text, Punctuation),
- 'insideMethodDeclArgs'),
-
- ### Type name ( ####
- (s(r'($type)([ \t]+)' #Return type and whitespace
- r'($id)(\s*)(\()'), #method name + open brace
- bygroups(using(this, state = 'inType'), Text,
- Name.Function, Text, Punctuation),
- 'insideMethodDeclArgs'),
-
- ### ArgType argName, #####
- (s(r'($type)(\s+)($id)(\s*)(,)'),
- bygroups(using(this, state='inType'), Text, Name.Variable,
- Text, Punctuation)),
-
- #### ArgType argName) ####
- ## Covered in 'insideParen' state
-
- ### ArgType argName -> ArgType| ###
- (s(r'($type)(\s+)($id)(\s*)(\->)(\s*)($type)(\|)'),
- bygroups(using(this, state='inType'), Text, Name.Variable,
- Text, Punctuation, Text, using(this, state = 'inType'),
- Punctuation)),
-
- ### ArgType argName| ###
- (s(r'($type)(\s+)($id)(\s*)(\|)'),
- bygroups(using(this, state='inType'), Text, Name.Variable,
- Text, Punctuation)),
-
- ### Type var
- (s(r'($type)([ \t]+)($id)'),
- bygroups(using(this, state='inType'), Text,
- Name.Variable)),
-
- (r'\(', Punctuation, 'insideParen'),
- (r'\{', Punctuation, 'insideBrace'),
- (r'.', Text)
- ],
- 'insideParen': [
- (r'\)', Punctuation, '#pop'),
- include('root'),
- ],
- 'insideMethodDeclArgs': [
- (r'\)', Punctuation, '#pop'),
- (s(r'($type)(\s+)($id)(\s*)(\))'),
- bygroups(using(this, state='inType'), Text, Name.Variable,
- Text, Punctuation), '#pop'),
- include('root'),
- ],
- 'insideBrace': [
- (r'\}', Punctuation, '#pop'),
- include('root'),
- ],
- 'inheritance': [
- (r'\s+', Text), #Whitespace
- (r':|,', Punctuation),
- (r'(?:(\w+)(::))?(\w+)',
- bygroups(Name.Namespace, Punctuation, Name.Class)),
- (r'{', Punctuation, '#pop')
- ],
- 'using': [
- (r'[ \t]+', Text), # consume whitespaces
- (r'(\[)(\w+)(\])',
- bygroups(Punctuation, Comment.Special, Punctuation)), #ffi
- (r'(\")?([\w\.]+)(\")?',
- bygroups(Punctuation, Name.Namespace, Punctuation)), #podname
- (r'::', Punctuation, 'usingClass'),
- default('#pop')
- ],
- 'usingClass': [
- (r'[ \t]+', Text), # consume whitespaces
- (r'(as)(\s+)(\w+)',
- bygroups(Keyword.Declaration, Text, Name.Class), '#pop:2'),
- (r'[\w\$]+', Name.Class),
- default('#pop:2') # jump out to root state
- ],
- 'facet': [
- (r'\s+', Text),
- (r'{', Punctuation, 'facetFields'),
- default('#pop')
- ],
- 'facetFields': [
- include('comments'),
- include('literals'),
- include('operators'),
- (r'\s+', Text),
- (r'(\s*)(\w+)(\s*)(=)', bygroups(Text, Name, Text, Operator)),
- (r'}', Punctuation, '#pop'),
- (r'.', Text)
- ],
- }
-
-
-class RustLexer(RegexLexer):
- """
- Lexer for the Rust programming language (version 0.9).
-
- .. versionadded:: 1.6
- """
- name = 'Rust'
- filenames = ['*.rs']
- aliases = ['rust']
- mimetypes = ['text/x-rustsrc']
-
- tokens = {
- 'root': [
- # Whitespace and Comments
- (r'\n', Text),
- (r'\s+', Text),
- (r'//[/!](.*?)\n', Comment.Doc),
- (r'//(.*?)\n', Comment.Single),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
-
- # Keywords
- (r'(as|box|break|continue'
- r'|do|else|enum|extern'
- r'|fn|for|if|impl|in'
- r'|loop|match|mut|priv|proc|pub'
- r'|ref|return|static|\'static|struct|trait|true|type'
- r'|unsafe|while)\b',
- Keyword),
- (r'(alignof|be|const|offsetof|pure|sizeof|typeof|once|unsized'
- r'|yield)\b', Keyword.Reserved),
- (r'(mod|use)\b', Keyword.Namespace),
- (r'(true|false)\b', Keyword.Constant),
- (r'let\b', Keyword.Declaration),
- (r'(u8|u16|u32|u64|i8|i16|i32|i64|uint|int|f32|f64'
- r'|str|bool)\b', Keyword.Type),
- (r'self\b', Name.Builtin.Pseudo),
- # Prelude
- (r'(Freeze|Pod|Send|Sized|Add|Sub|Mul|Div|Rem|Neg|Not|BitAnd'
- r'|BitOr|BitXor|Drop|Shl|Shr|Index|Option|Some|None|Result'
- r'|Ok|Err|from_str|range|print|println|Any|AnyOwnExt|AnyRefExt'
- r'|AnyMutRefExt|Ascii|AsciiCast|OnwedAsciiCast|AsciiStr'
- r'|IntoBytes|Bool|ToCStr|Char|Clone|DeepClone|Eq|ApproxEq'
- r'|Ord|TotalEq|Ordering|Less|Equal|Greater|Equiv|Container'
- r'|Mutable|Map|MutableMap|Set|MutableSet|Default|FromStr'
- r'|Hash|FromIterator|Extendable|Iterator|DoubleEndedIterator'
- r'|RandomAccessIterator|CloneableIterator|OrdIterator'
- r'|MutableDoubleEndedIterator|ExactSize|Times|Algebraic'
- r'|Trigonometric|Exponential|Hyperbolic|Bitwise|BitCount'
- r'|Bounded|Integer|Fractional|Real|RealExt|Num|NumCast'
- r'|CheckedAdd|CheckedSub|CheckedMul|Orderable|Signed'
- r'|Unsigned|Round|Primitive|Int|Float|ToStrRadix'
- r'|ToPrimitive|FromPrimitive|GenericPath|Path|PosixPath'
- r'|WindowsPath|RawPtr|Buffer|Writer|Reader|Seek'
- r'|SendStr|SendStrOwned|SendStrStatic|IntoSendStr|Str'
- r'|StrVector|StrSlice|OwnedStr|IterBytes|ToStr|IntoStr'
- r'|CopyableTuple|ImmutableTuple|ImmutableTuple\d+'
- r'|Tuple\d+|ImmutableEqVector|ImmutableTotalOrdVector'
- r'|ImmutableCopyableVector|OwnedVector|OwnedCopyableVector'
- r'|OwnedEqVector|MutableVector|MutableTotalOrdVector'
- r'|Vector|VectorVector|CopyableVector|ImmutableVector'
- r'|Port|Chan|SharedChan|spawn|drop)\b', Name.Builtin),
- # Borrowed pointer
- (r'(&)(\'[A-Za-z_]\w*)?', bygroups(Operator, Name)),
- # Labels
- (r'\'[A-Za-z_]\w*:', Name.Label),
- # Character Literal
- (r"""'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
- r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|.)'""",
- String.Char),
- # Lifetime
- (r"""'[a-zA-Z_]\w*""", Name.Label),
- # Binary Literal
- (r'0b[01_]+', Number.Bin, 'number_lit'),
- # Octal Literal
- (r'0o[0-7_]+', Number.Oct, 'number_lit'),
- # Hexadecimal Literal
- (r'0[xX][0-9a-fA-F_]+', Number.Hex, 'number_lit'),
- # Decimal Literal
- (r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|'
- r'\.[0-9_]*|[eE][+\-]?[0-9_]+)', Number.Float, 'number_lit'),
- (r'[0-9][0-9_]*', Number.Integer, 'number_lit'),
- # String Literal
- (r'"', String, 'string'),
- (r'r(#*)".*?"\1', String.Raw),
-
- # Operators and Punctuation
- (r'[{}()\[\],.;]', Punctuation),
- (r'[+\-*/%&|<>^!~@=:?]', Operator),
-
- # Identifier
- (r'[a-zA-Z_]\w*', Name),
-
- # Attributes
- (r'#\[', Comment.Preproc, 'attribute['),
- # Macros
- (r'([A-Za-z_]\w*)!\s*([A-Za-z_]\w*)?\s*\{',
- bygroups(Comment.Preproc, Name), 'macro{'),
- (r'([A-Za-z_]\w*)!\s*([A-Za-z_]\w*)?\(',
- bygroups(Comment.Preproc, Name), 'macro('),
- ],
- 'number_lit': [
- (r'(([ui](8|16|32|64)?)|(f(32|64)?))?', Keyword, '#pop'),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r"""\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
- r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}""", String.Escape),
- (r'[^\\"]+', String),
- (r'\\', String),
- ],
- 'macro{': [
- (r'\{', Operator, '#push'),
- (r'\}', Operator, '#pop'),
- ],
- 'macro(': [
- (r'\(', Operator, '#push'),
- (r'\)', Operator, '#pop'),
- ],
- 'attribute_common': [
- (r'"', String, 'string'),
- (r'\[', Comment.Preproc, 'attribute['),
- (r'\(', Comment.Preproc, 'attribute('),
- ],
- 'attribute[': [
- include('attribute_common'),
- (r'\];?', Comment.Preproc, '#pop'),
- (r'[^"\]]+', Comment.Preproc),
- ],
- 'attribute(': [
- include('attribute_common'),
- (r'\);?', Comment.Preproc, '#pop'),
- (r'[^"\)]+', Comment.Preproc),
- ],
- }
-
-
-class CudaLexer(CLexer):
- """
- For NVIDIA `CUDA™ <http://developer.nvidia.com/category/zone/cuda-zone>`_
- source.
-
- .. versionadded:: 1.6
- """
- name = 'CUDA'
- filenames = ['*.cu', '*.cuh']
- aliases = ['cuda', 'cu']
- mimetypes = ['text/x-cuda']
-
- function_qualifiers = ['__device__', '__global__', '__host__',
- '__noinline__', '__forceinline__']
- variable_qualifiers = ['__device__', '__constant__', '__shared__',
- '__restrict__']
- vector_types = ['char1', 'uchar1', 'char2', 'uchar2', 'char3', 'uchar3',
- 'char4', 'uchar4', 'short1', 'ushort1', 'short2', 'ushort2',
- 'short3', 'ushort3', 'short4', 'ushort4', 'int1', 'uint1',
- 'int2', 'uint2', 'int3', 'uint3', 'int4', 'uint4', 'long1',
- 'ulong1', 'long2', 'ulong2', 'long3', 'ulong3', 'long4',
- 'ulong4', 'longlong1', 'ulonglong1', 'longlong2',
- 'ulonglong2', 'float1', 'float2', 'float3', 'float4',
- 'double1', 'double2', 'dim3']
- variables = ['gridDim', 'blockIdx', 'blockDim', 'threadIdx', 'warpSize']
- functions = ['__threadfence_block', '__threadfence', '__threadfence_system',
- '__syncthreads', '__syncthreads_count', '__syncthreads_and',
- '__syncthreads_or']
- execution_confs = ['<<<', '>>>']
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in \
- CLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- if value in self.variable_qualifiers:
- token = Keyword.Type
- elif value in self.vector_types:
- token = Keyword.Type
- elif value in self.variables:
- token = Name.Builtin
- elif value in self.execution_confs:
- token = Keyword.Pseudo
- elif value in self.function_qualifiers:
- token = Keyword.Reserved
- elif value in self.functions:
- token = Name.Function
- yield index, token, value
-
-
-class MonkeyLexer(RegexLexer):
- """
- For
- `Monkey <https://en.wikipedia.org/wiki/Monkey_(programming_language)>`_
- source code.
-
- .. versionadded:: 1.6
- """
-
- name = 'Monkey'
- aliases = ['monkey']
- filenames = ['*.monkey']
- mimetypes = ['text/x-monkey']
-
- name_variable = r'[a-z_]\w*'
- name_function = r'[A-Z]\w*'
- name_constant = r'[A-Z_][A-Z0-9_]*'
- name_class = r'[A-Z]\w*'
- name_module = r'[a-z0-9_]*'
-
- keyword_type = r'(?:Int|Float|String|Bool|Object|Array|Void)'
- # ? == Bool // % == Int // # == Float // $ == String
- keyword_type_special = r'[?%#$]'
-
- flags = re.MULTILINE
-
- tokens = {
- 'root': [
- #Text
- (r'\s+', Text),
- # Comments
- (r"'.*", Comment),
- (r'(?i)^#rem\b', Comment.Multiline, 'comment'),
- # preprocessor directives
- (r'(?i)^(?:#If|#ElseIf|#Else|#EndIf|#End|#Print|#Error)\b', Comment.Preproc),
- # preprocessor variable (any line starting with '#' that is not a directive)
- (r'^#', Comment.Preproc, 'variables'),
- # String
- ('"', String.Double, 'string'),
- # Numbers
- (r'[0-9]+\.[0-9]*(?!\.)', Number.Float),
- (r'\.[0-9]+(?!\.)', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'\$[0-9a-fA-Z]+', Number.Hex),
- (r'\%[10]+', Number.Bin),
- # Native data types
- (r'\b%s\b' % keyword_type, Keyword.Type),
- # Exception handling
- (r'(?i)\b(?:Try|Catch|Throw)\b', Keyword.Reserved),
- (r'Throwable', Name.Exception),
- # Builtins
- (r'(?i)\b(?:Null|True|False)\b', Name.Builtin),
- (r'(?i)\b(?:Self|Super)\b', Name.Builtin.Pseudo),
- (r'\b(?:HOST|LANG|TARGET|CONFIG)\b', Name.Constant),
- # Keywords
- (r'(?i)^(Import)(\s+)(.*)(\n)',
- bygroups(Keyword.Namespace, Text, Name.Namespace, Text)),
- (r'(?i)^Strict\b.*\n', Keyword.Reserved),
- (r'(?i)(Const|Local|Global|Field)(\s+)',
- bygroups(Keyword.Declaration, Text), 'variables'),
- (r'(?i)(New|Class|Interface|Extends|Implements)(\s+)',
- bygroups(Keyword.Reserved, Text), 'classname'),
- (r'(?i)(Function|Method)(\s+)',
- bygroups(Keyword.Reserved, Text), 'funcname'),
- (r'(?i)(?:End|Return|Public|Private|Extern|Property|'
- r'Final|Abstract)\b', Keyword.Reserved),
- # Flow Control stuff
- (r'(?i)(?:If|Then|Else|ElseIf|EndIf|'
- r'Select|Case|Default|'
- r'While|Wend|'
- r'Repeat|Until|Forever|'
- r'For|To|Until|Step|EachIn|Next|'
- r'Exit|Continue)\s+', Keyword.Reserved),
- # not used yet
- (r'(?i)\b(?:Module|Inline)\b', Keyword.Reserved),
- # Array
- (r'[\[\]]', Punctuation),
- # Other
- (r'<=|>=|<>|\*=|/=|\+=|-=|&=|~=|\|=|[-&*/^+=<>|~]', Operator),
- (r'(?i)(?:Not|Mod|Shl|Shr|And|Or)', Operator.Word),
- (r'[\(\){}!#,.:]', Punctuation),
- # catch the rest
- (r'%s\b' % name_constant, Name.Constant),
- (r'%s\b' % name_function, Name.Function),
- (r'%s\b' % name_variable, Name.Variable),
- ],
- 'funcname': [
- (r'(?i)%s\b' % name_function, Name.Function),
- (r':', Punctuation, 'classname'),
- (r'\s+', Text),
- (r'\(', Punctuation, 'variables'),
- (r'\)', Punctuation, '#pop')
- ],
- 'classname': [
- (r'%s\.' % name_module, Name.Namespace),
- (r'%s\b' % keyword_type, Keyword.Type),
- (r'%s\b' % name_class, Name.Class),
- # array (of given size)
- (r'(\[)(\s*)(\d*)(\s*)(\])',
- bygroups(Punctuation, Text, Number.Integer, Text, Punctuation)),
- # generics
- (r'\s+(?!<)', Text, '#pop'),
- (r'<', Punctuation, '#push'),
- (r'>', Punctuation, '#pop'),
- (r'\n', Text, '#pop'),
- default('#pop')
- ],
- 'variables': [
- (r'%s\b' % name_constant, Name.Constant),
- (r'%s\b' % name_variable, Name.Variable),
- (r'%s' % keyword_type_special, Keyword.Type),
- (r'\s+', Text),
- (r':', Punctuation, 'classname'),
- (r',', Punctuation, '#push'),
- default('#pop')
- ],
- 'string': [
- (r'[^"~]+', String.Double),
- (r'~q|~n|~r|~t|~z|~~', String.Escape),
- (r'"', String.Double, '#pop'),
- ],
- 'comment' : [
- (r'(?i)^#rem.*?', Comment.Multiline, "#push"),
- (r'(?i)^#end.*?', Comment.Multiline, "#pop"),
- (r'\n', Comment.Multiline),
- (r'.+', Comment.Multiline),
- ],
- }
-
-
-class CobolLexer(RegexLexer):
- """
- Lexer for OpenCOBOL code.
-
- .. versionadded:: 1.6
- """
- name = 'COBOL'
- aliases = ['cobol']
- filenames = ['*.cob', '*.COB', '*.cpy', '*.CPY']
- mimetypes = ['text/x-cobol']
- flags = re.IGNORECASE | re.MULTILINE
-
- # Data Types: by PICTURE and USAGE
- # Operators: **, *, +, -, /, <, >, <=, >=, =, <>
- # Logical (?): NOT, AND, OR
-
- # Reserved words:
- # http://opencobol.add1tocobol.com/#reserved-words
- # Intrinsics:
- # http://opencobol.add1tocobol.com/#does-opencobol-implement-any-intrinsic-functions
-
- tokens = {
- 'root': [
- include('comment'),
- include('strings'),
- include('core'),
- include('nums'),
- (r'[a-z0-9]([_a-z0-9\-]*[a-z0-9]+)?', Name.Variable),
- # (r'[\s]+', Text),
- (r'[ \t]+', Text),
- ],
- 'comment': [
- (r'(^.{6}[*/].*\n|^.{6}|\*>.*\n)', Comment),
- ],
- 'core': [
- # Figurative constants
- (r'(^|(?<=[^0-9a-z_\-]))(ALL\s+)?'
- r'((ZEROES)|(HIGH-VALUE|LOW-VALUE|QUOTE|SPACE|ZERO)(S)?)'
- r'\s*($|(?=[^0-9a-z_\-]))',
- Name.Constant),
-
- # Reserved words STATEMENTS and other bolds
- (r'(^|(?<=[^0-9a-z_\-]))'
- r'(ACCEPT|ADD|ALLOCATE|CALL|CANCEL|CLOSE|COMPUTE|'
- r'CONFIGURATION|CONTINUE|'
- r'DATA|DELETE|DISPLAY|DIVIDE|DIVISION|ELSE|END|END-ACCEPT|'
- r'END-ADD|END-CALL|END-COMPUTE|END-DELETE|END-DISPLAY|'
- r'END-DIVIDE|END-EVALUATE|END-IF|END-MULTIPLY|END-OF-PAGE|'
- r'END-PERFORM|END-READ|END-RETURN|END-REWRITE|END-SEARCH|'
- r'END-START|END-STRING|END-SUBTRACT|END-UNSTRING|END-WRITE|'
- r'ENVIRONMENT|EVALUATE|EXIT|FD|FILE|FILE-CONTROL|FOREVER|'
- r'FREE|GENERATE|GO|GOBACK|'
- r'IDENTIFICATION|IF|INITIALIZE|'
- r'INITIATE|INPUT-OUTPUT|INSPECT|INVOKE|I-O-CONTROL|LINKAGE|'
- r'LOCAL-STORAGE|MERGE|MOVE|MULTIPLY|OPEN|'
- r'PERFORM|PROCEDURE|PROGRAM-ID|RAISE|READ|RELEASE|RESUME|'
- r'RETURN|REWRITE|SCREEN|'
- r'SD|SEARCH|SECTION|SET|SORT|START|STOP|STRING|SUBTRACT|'
- r'SUPPRESS|TERMINATE|THEN|UNLOCK|UNSTRING|USE|VALIDATE|'
- r'WORKING-STORAGE|WRITE)'
- r'\s*($|(?=[^0-9a-z_\-]))', Keyword.Reserved),
-
- # Reserved words
- (r'(^|(?<=[^0-9a-z_\-]))'
- r'(ACCESS|ADDRESS|ADVANCING|AFTER|ALL|'
- r'ALPHABET|ALPHABETIC|ALPHABETIC-LOWER|ALPHABETIC-UPPER|'
- r'ALPHANUMERIC|ALPHANUMERIC-EDITED|ALSO|ALTER|ALTERNATE'
- r'ANY|ARE|AREA|AREAS|ARGUMENT-NUMBER|ARGUMENT-VALUE|AS|'
- r'ASCENDING|ASSIGN|AT|AUTO|AUTO-SKIP|AUTOMATIC|AUTOTERMINATE|'
- r'BACKGROUND-COLOR|BASED|BEEP|BEFORE|BELL|'
- r'BLANK|'
- r'BLINK|BLOCK|BOTTOM|BY|BYTE-LENGTH|CHAINING|'
- r'CHARACTER|CHARACTERS|CLASS|CODE|CODE-SET|COL|COLLATING|'
- r'COLS|COLUMN|COLUMNS|COMMA|COMMAND-LINE|COMMIT|COMMON|'
- r'CONSTANT|CONTAINS|CONTENT|CONTROL|'
- r'CONTROLS|CONVERTING|COPY|CORR|CORRESPONDING|COUNT|CRT|'
- r'CURRENCY|CURSOR|CYCLE|DATE|DAY|DAY-OF-WEEK|DE|DEBUGGING|'
- r'DECIMAL-POINT|DECLARATIVES|DEFAULT|DELIMITED|'
- r'DELIMITER|DEPENDING|DESCENDING|DETAIL|DISK|'
- r'DOWN|DUPLICATES|DYNAMIC|EBCDIC|'
- r'ENTRY|ENVIRONMENT-NAME|ENVIRONMENT-VALUE|EOL|EOP|'
- r'EOS|ERASE|ERROR|ESCAPE|EXCEPTION|'
- r'EXCLUSIVE|EXTEND|EXTERNAL|'
- r'FILE-ID|FILLER|FINAL|FIRST|FIXED|FLOAT-LONG|FLOAT-SHORT|'
- r'FOOTING|FOR|FOREGROUND-COLOR|FORMAT|FROM|FULL|FUNCTION|'
- r'FUNCTION-ID|GIVING|GLOBAL|GROUP|'
- r'HEADING|HIGHLIGHT|I-O|ID|'
- r'IGNORE|IGNORING|IN|INDEX|INDEXED|INDICATE|'
- r'INITIAL|INITIALIZED|INPUT|'
- r'INTO|INTRINSIC|INVALID|IS|JUST|JUSTIFIED|KEY|LABEL|'
- r'LAST|LEADING|LEFT|LENGTH|LIMIT|LIMITS|LINAGE|'
- r'LINAGE-COUNTER|LINE|LINES|LOCALE|LOCK|'
- r'LOWLIGHT|MANUAL|MEMORY|MINUS|MODE|'
- r'MULTIPLE|NATIONAL|NATIONAL-EDITED|NATIVE|'
- r'NEGATIVE|NEXT|NO|NULL|NULLS|NUMBER|NUMBERS|NUMERIC|'
- r'NUMERIC-EDITED|OBJECT-COMPUTER|OCCURS|OF|OFF|OMITTED|ON|ONLY|'
- r'OPTIONAL|ORDER|ORGANIZATION|OTHER|OUTPUT|OVERFLOW|'
- r'OVERLINE|PACKED-DECIMAL|PADDING|PAGE|PARAGRAPH|'
- r'PLUS|POINTER|POSITION|POSITIVE|PRESENT|PREVIOUS|'
- r'PRINTER|PRINTING|PROCEDURE-POINTER|PROCEDURES|'
- r'PROCEED|PROGRAM|PROGRAM-POINTER|PROMPT|QUOTE|'
- r'QUOTES|RANDOM|RD|RECORD|RECORDING|RECORDS|RECURSIVE|'
- r'REDEFINES|REEL|REFERENCE|RELATIVE|REMAINDER|REMOVAL|'
- r'RENAMES|REPLACING|REPORT|REPORTING|REPORTS|REPOSITORY|'
- r'REQUIRED|RESERVE|RETURNING|REVERSE-VIDEO|REWIND|'
- r'RIGHT|ROLLBACK|ROUNDED|RUN|SAME|SCROLL|'
- r'SECURE|SEGMENT-LIMIT|SELECT|SENTENCE|SEPARATE|'
- r'SEQUENCE|SEQUENTIAL|SHARING|SIGN|SIGNED|SIGNED-INT|'
- r'SIGNED-LONG|SIGNED-SHORT|SIZE|SORT-MERGE|SOURCE|'
- r'SOURCE-COMPUTER|SPECIAL-NAMES|STANDARD|'
- r'STANDARD-1|STANDARD-2|STATUS|SUM|'
- r'SYMBOLIC|SYNC|SYNCHRONIZED|TALLYING|TAPE|'
- r'TEST|THROUGH|THRU|TIME|TIMES|TO|TOP|TRAILING|'
- r'TRANSFORM|TYPE|UNDERLINE|UNIT|UNSIGNED|'
- r'UNSIGNED-INT|UNSIGNED-LONG|UNSIGNED-SHORT|UNTIL|UP|'
- r'UPDATE|UPON|USAGE|USING|VALUE|VALUES|VARYING|WAIT|WHEN|'
- r'WITH|WORDS|YYYYDDD|YYYYMMDD)'
- r'\s*($|(?=[^0-9a-z_\-]))', Keyword.Pseudo),
-
- # inactive reserved words
- (r'(^|(?<=[^0-9a-z_\-]))'
- r'(ACTIVE-CLASS|ALIGNED|ANYCASE|ARITHMETIC|ATTRIBUTE|B-AND|'
- r'B-NOT|B-OR|B-XOR|BIT|BOOLEAN|CD|CENTER|CF|CH|CHAIN|CLASS-ID|'
- r'CLASSIFICATION|COMMUNICATION|CONDITION|DATA-POINTER|'
- r'DESTINATION|DISABLE|EC|EGI|EMI|ENABLE|END-RECEIVE|'
- r'ENTRY-CONVENTION|EO|ESI|EXCEPTION-OBJECT|EXPANDS|FACTORY|'
- r'FLOAT-BINARY-16|FLOAT-BINARY-34|FLOAT-BINARY-7|'
- r'FLOAT-DECIMAL-16|FLOAT-DECIMAL-34|FLOAT-EXTENDED|FORMAT|'
- r'FUNCTION-POINTER|GET|GROUP-USAGE|IMPLEMENTS|INFINITY|'
- r'INHERITS|INTERFACE|INTERFACE-ID|INVOKE|LC_ALL|LC_COLLATE|'
- r'LC_CTYPE|LC_MESSAGES|LC_MONETARY|LC_NUMERIC|LC_TIME|'
- r'LINE-COUNTER|MESSAGE|METHOD|METHOD-ID|NESTED|NONE|NORMAL|'
- r'OBJECT|OBJECT-REFERENCE|OPTIONS|OVERRIDE|PAGE-COUNTER|PF|PH|'
- r'PROPERTY|PROTOTYPE|PURGE|QUEUE|RAISE|RAISING|RECEIVE|'
- r'RELATION|REPLACE|REPRESENTS-NOT-A-NUMBER|RESET|RESUME|RETRY|'
- r'RF|RH|SECONDS|SEGMENT|SELF|SEND|SOURCES|STATEMENT|STEP|'
- r'STRONG|SUB-QUEUE-1|SUB-QUEUE-2|SUB-QUEUE-3|SUPER|SYMBOL|'
- r'SYSTEM-DEFAULT|TABLE|TERMINAL|TEXT|TYPEDEF|UCS-4|UNIVERSAL|'
- r'USER-DEFAULT|UTF-16|UTF-8|VAL-STATUS|VALID|VALIDATE|'
- r'VALIDATE-STATUS)\s*($|(?=[^0-9a-z_\-]))', Error),
-
- # Data Types
- (r'(^|(?<=[^0-9a-z_\-]))'
- r'(PIC\s+.+?(?=(\s|\.\s))|PICTURE\s+.+?(?=(\s|\.\s))|'
- r'(COMPUTATIONAL)(-[1-5X])?|(COMP)(-[1-5X])?|'
- r'BINARY-C-LONG|'
- r'BINARY-CHAR|BINARY-DOUBLE|BINARY-LONG|BINARY-SHORT|'
- r'BINARY)\s*($|(?=[^0-9a-z_\-]))', Keyword.Type),
-
- # Operators
- (r'(\*\*|\*|\+|-|/|<=|>=|<|>|==|/=|=)', Operator),
-
- # (r'(::)', Keyword.Declaration),
-
- (r'([(),;:&%.])', Punctuation),
-
- # Intrinsics
- (r'(^|(?<=[^0-9a-z_\-]))(ABS|ACOS|ANNUITY|ASIN|ATAN|BYTE-LENGTH|'
- r'CHAR|COMBINED-DATETIME|CONCATENATE|COS|CURRENT-DATE|'
- r'DATE-OF-INTEGER|DATE-TO-YYYYMMDD|DAY-OF-INTEGER|DAY-TO-YYYYDDD|'
- r'EXCEPTION-(?:FILE|LOCATION|STATEMENT|STATUS)|EXP10|EXP|E|'
- r'FACTORIAL|FRACTION-PART|INTEGER-OF-(?:DATE|DAY|PART)|INTEGER|'
- r'LENGTH|LOCALE-(?:DATE|TIME(?:-FROM-SECONDS)?)|LOG10|LOG|'
- r'LOWER-CASE|MAX|MEAN|MEDIAN|MIDRANGE|MIN|MOD|NUMVAL(?:-C)?|'
- r'ORD(?:-MAX|-MIN)?|PI|PRESENT-VALUE|RANDOM|RANGE|REM|REVERSE|'
- r'SECONDS-FROM-FORMATTED-TIME|SECONDS-PAST-MIDNIGHT|SIGN|SIN|SQRT|'
- r'STANDARD-DEVIATION|STORED-CHAR-LENGTH|SUBSTITUTE(?:-CASE)?|'
- r'SUM|TAN|TEST-DATE-YYYYMMDD|TEST-DAY-YYYYDDD|TRIM|'
- r'UPPER-CASE|VARIANCE|WHEN-COMPILED|YEAR-TO-YYYY)\s*'
- r'($|(?=[^0-9a-z_\-]))', Name.Function),
-
- # Booleans
- (r'(^|(?<=[^0-9a-z_\-]))(true|false)\s*($|(?=[^0-9a-z_\-]))', Name.Builtin),
- # Comparing Operators
- (r'(^|(?<=[^0-9a-z_\-]))(equal|equals|ne|lt|le|gt|ge|'
- r'greater|less|than|not|and|or)\s*($|(?=[^0-9a-z_\-]))', Operator.Word),
- ],
-
- # \"[^\"\n]*\"|\'[^\'\n]*\'
- 'strings': [
- # apparently strings can be delimited by EOL if they are continued
- # in the next line
- (r'"[^"\n]*("|\n)', String.Double),
- (r"'[^'\n]*('|\n)", String.Single),
- ],
-
- 'nums': [
- (r'\d+(\s*|\.$|$)', Number.Integer),
- (r'[+-]?\d*\.\d+([eE][-+]?\d+)?', Number.Float),
- (r'[+-]?\d+\.\d*([eE][-+]?\d+)?', Number.Float),
- ],
- }
-
-
-class CobolFreeformatLexer(CobolLexer):
- """
- Lexer for Free format OpenCOBOL code.
-
- .. versionadded:: 1.6
- """
- name = 'COBOLFree'
- aliases = ['cobolfree']
- filenames = ['*.cbl', '*.CBL']
- mimetypes = []
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'comment': [
- (r'(\*>.*\n|^\w*\*.*$)', Comment),
- ],
- }
-
-
-class LogosLexer(ObjectiveCppLexer):
- """
- For Logos + Objective-C source code with preprocessor directives.
-
- .. versionadded:: 1.6
- """
-
- name = 'Logos'
- aliases = ['logos']
- filenames = ['*.x', '*.xi', '*.xm', '*.xmi']
- mimetypes = ['text/x-logos']
- priority = 0.25
-
- tokens = {
- 'statements': [
- (r'(%orig|%log)\b', Keyword),
- (r'(%c)\b(\()(\s*)([a-zA-Z$_][\w$]*)(\s*)(\))',
- bygroups(Keyword, Punctuation, Text, Name.Class, Text, Punctuation)),
- (r'(%init)\b(\()',
- bygroups(Keyword, Punctuation), 'logos_init_directive'),
- (r'(%init)(?=\s*;)', bygroups(Keyword)),
- (r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)',
- bygroups(Keyword, Text, Name.Class), '#pop'),
- (r'(%subclass)(\s+)', bygroups(Keyword, Text),
- ('#pop', 'logos_classname')),
- inherit,
- ],
- 'logos_init_directive' : [
- ('\s+', Text),
- (',', Punctuation, ('logos_init_directive', '#pop')),
- ('([a-zA-Z$_][\w$]*)(\s*)(=)(\s*)([^);]*)',
- bygroups(Name.Class, Text, Punctuation, Text, Text)),
- ('([a-zA-Z$_][\w$]*)', Name.Class),
- ('\)', Punctuation, '#pop'),
- ],
- 'logos_classname' : [
- ('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?',
- bygroups(Name.Class, Text, Name.Class), '#pop'),
- ('([a-zA-Z$_][\w$]*)', Name.Class, '#pop')
- ],
- 'root': [
- (r'(%subclass)(\s+)', bygroups(Keyword, Text),
- 'logos_classname'),
- (r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)',
- bygroups(Keyword, Text, Name.Class)),
- (r'(%config)(\s*\(\s*)(\w+)(\s*=\s*)(.*?)(\s*\)\s*)',
- bygroups(Keyword, Text, Name.Variable, Text, String, Text)),
- (r'(%ctor)(\s*)({)', bygroups(Keyword, Text, Punctuation),
- 'function'),
- (r'(%new)(\s*)(\()(\s*.*?\s*)(\))',
- bygroups(Keyword, Text, Keyword, String, Keyword)),
- (r'(\s*)(%end)(\s*)', bygroups(Text, Keyword, Text)),
- inherit,
- ],
- }
-
- _logos_keywords = re.compile(r'%(?:hook|ctor|init|c\()')
-
- def analyse_text(text):
- if LogosLexer._logos_keywords.search(text):
- return 1.0
- return 0
-
-
-class ChapelLexer(RegexLexer):
- """
- For `Chapel <http://chapel.cray.com/>`_ source.
-
- .. versionadded:: 2.0
- """
- name = 'Chapel'
- filenames = ['*.chpl']
- aliases = ['chapel', 'chpl']
- # mimetypes = ['text/x-chapel']
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text),
-
- (r'//(.*?)\n', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
-
- (r'(config|const|in|inout|out|param|ref|type|var)\b',
- Keyword.Declaration),
- (r'(false|nil|true)\b', Keyword.Constant),
- (r'(bool|complex|imag|int|opaque|range|real|string|uint)\b',
- Keyword.Type),
- (r'(atomic|begin|break|by|cobegin|coforall|continue|iter|'
- r'delete|dmapped|do|domain|else|enum|export|extern|for|forall|'
- r'if|index|inline|label|lambda|let|local|new|on|otherwise|'
- r'reduce|return|scan|select|serial|single|sparse|'
- r'subdomain|sync|then|use|when|where|while|yield|zip)\b',
- Keyword),
- (r'(proc)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'procname'),
- (r'(class|module|record|union)(\s+)', bygroups(Keyword, Text),
- 'classname'),
-
- # imaginary integers
- (r'\d+i', Number),
- (r'\d+\.\d*([Ee][-+]\d+)?i', Number),
- (r'\.\d+([Ee][-+]\d+)?i', Number),
- (r'\d+[Ee][-+]\d+i', Number),
-
- # reals cannot end with a period due to lexical ambiguity with
- # .. operator. See reference for rationale.
- (r'(\d*\.\d+)([eE][+-]?[0-9]+)?i?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+i?', Number.Float),
-
- # integer literals
- # -- binary
- (r'0[bB][0-1]+', Number.Bin),
- # -- hex
- (r'0[xX][0-9a-fA-F]+', Number.Hex),
- # -- decimal
- (r'(0|[1-9][0-9]*)', Number.Integer),
-
- # strings
- (r'["\'](\\\\|\\"|[^"\'])*["\']', String),
-
- # tokens
- (r'(=|\+=|-=|\*=|/=|\*\*=|%=|&=|\|=|\^=|&&=|\|\|=|<<=|>>=|'
- r'<=>|\.\.|by|#|\.\.\.|'
- r'&&|\|\||!|&|\||\^|~|<<|>>|'
- r'==|!=|<=|>=|<|>|'
- r'[+\-*/%]|\*\*)', Operator),
- (r'[:;,.?()\[\]{}]', Punctuation),
-
- # identifiers
- (r'[a-zA-Z_][\w$]*', Name.Other),
- ],
- 'classname': [
- (r'[a-zA-Z_][\w$]*', Name.Class, '#pop'),
- ],
- 'procname': [
- (r'[a-zA-Z_][\w$]*', Name.Function, '#pop'),
- ],
- }
-
-
-class EiffelLexer(RegexLexer):
- """
- For `Eiffel <http://www.eiffel.com>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'Eiffel'
- aliases = ['eiffel']
- filenames = ['*.e']
- mimetypes = ['text/x-eiffel']
-
- tokens = {
- 'root': [
- (r'[^\S\n]+', Text),
- (r'--.*?\n', Comment.Single),
- (r'[^\S\n]+', Text),
- # Please note that keyword and operator are case insensitive.
- (r'(?i)(true|false|void|current|result|precursor)\b', Keyword.Constant),
- (r'(?i)(and(\s+then)?|not|xor|implies|or(\s+else)?)\b', Operator.Word),
- (r'(?i)\b(across|agent|alias|all|as|assign|attached|attribute|check|'
- r'class|convert|create|debug|deferred|detachable|do|else|elseif|'
- r'end|ensure|expanded|export|external|feature|from|frozen|if|'
- r'inherit|inspect|invariant|like|local|loop|none|note|obsolete|'
- r'old|once|only|redefine|rename|require|rescue|retry|select|'
- r'separate|then|undefine|until|variant|when)\b',Keyword.Reserved),
- (r'"\[(([^\]%]|\n)|%(.|\n)|\][^"])*?\]"', String),
- (r'"([^"%\n]|%.)*?"', String),
- include('numbers'),
- (r"'([^'%]|%'|%%)'", String.Char),
- (r"(//|\\\\|>=|<=|:=|/=|~|/~|[\\\?!#%&@|+/\-=\>\*$<|^\[\]])", Operator),
- (r"([{}():;,.])", Punctuation),
- (r'([a-z]\w*)|([A-Z][A-Z0-9_]*[a-z]\w*)', Name),
- (r'([A-Z][A-Z0-9_]*)', Name.Class),
- (r'\n+', Text),
- ],
- 'numbers': [
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'0[bB][0-1]+', Number.Bin),
- (r'0[cC][0-7]+', Number.Oct),
- (r'([0-9]+\.[0-9]*)|([0-9]*\.[0-9]+)', Number.Float),
- (r'[0-9]+', Number.Integer),
- ],
- }
-
-
-class Inform6Lexer(RegexLexer):
- """
- For `Inform 6 <http://inform-fiction.org/>`_ source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Inform 6'
- aliases = ['inform6', 'i6']
- filenames = ['*.inf']
-
- flags = re.MULTILINE | re.DOTALL | re.UNICODE
-
- _name = r'[a-zA-Z_][a-zA-Z_0-9]*'
-
- # Inform 7 maps these four character classes to their ASCII
- # equivalents. To support Inform 6 inclusions within Inform 7,
- # Inform6Lexer maps them too.
- _dash = u'\\-\u2010-\u2014'
- _dquote = u'"\u201c\u201d'
- _squote = u"'\u2018\u2019"
- _newline = u'\\n\u0085\u2028\u2029'
-
- tokens = {
- 'root': [
- (r'(\A(!%%[^%s]*[%s])+)?' % (_newline, _newline), Comment.Preproc,
- 'directive')
- ],
- '_whitespace': [
- (r'\s+', Text),
- (r'![^%s]*' % _newline, Comment.Single)
- ],
- 'default': [
- include('_whitespace'),
- (r'\[', Punctuation, 'many-values'), # Array initialization
- (r':|(?=;)', Punctuation, '#pop'),
- (r'<', Punctuation), # Second angle bracket in an action statement
- default(('expression', '_expression'))
- ],
-
- # Expressions
- '_expression': [
- include('_whitespace'),
- (r'(?=sp\b)', Text, '#pop'),
- (r'(?=[%s%s$0-9#a-zA-Z_])' % (_dquote, _squote), Text,
- ('#pop', 'value')),
- (r'\+\+|[%s]{1,2}(?!>)|~~?' % _dash, Operator),
- (r'(?=[()\[%s,?@{:;])' % _dash, Text, '#pop')
- ],
- 'expression': [
- include('_whitespace'),
- (r'\(', Punctuation, ('expression', '_expression')),
- (r'\)', Punctuation, '#pop'),
- (r'\[', Punctuation, ('#pop', 'statements', 'locals')),
- (r'>(?=(\s+|(![^%s]*))*[>;])' % _newline, Punctuation),
- (r'\+\+|[%s]{2}(?!>)' % _dash, Operator),
- (r',', Punctuation, '_expression'),
- (r'&&?|\|\|?|[=~><]?=|[%s]{1,2}>?|\.\.?[&#]?|::|[<>+*/%%]' % _dash,
- Operator, '_expression'),
- (r'(has|hasnt|in|notin|ofclass|or|provides)\b', Operator.Word,
- '_expression'),
- (r'sp\b', Name),
- (r'\?~?', Name.Label, 'label?'),
- (r'[@{]', Error),
- default('#pop')
- ],
- '_assembly-expression': [
- (r'\(', Punctuation, ('#push', '_expression')),
- (r'[\[\]]', Punctuation),
- (r'[%s]>' % _dash, Punctuation, '_expression'),
- (r'sp\b', Keyword.Pseudo),
- (r';', Punctuation, '#pop:3'),
- include('expression')
- ],
- '_for-expression': [
- (r'\)', Punctuation, '#pop:2'),
- (r':', Punctuation, '#pop'),
- include('expression')
- ],
- '_keyword-expression': [
- (r'(from|near|to)\b', Keyword, '_expression'),
- include('expression')
- ],
- '_list-expression': [
- (r',', Punctuation, '#pop'),
- include('expression')
- ],
- '_object-expression': [
- (r'has\b', Keyword.Declaration, '#pop'),
- include('_list-expression')
- ],
-
- # Values
- 'value': [
- include('_whitespace'),
- # Strings
- (r'[%s][^@][%s]' % (_squote, _squote), String.Char, '#pop'),
- (r'([%s])(@{[0-9a-fA-F]{1,4}})([%s])' % (_squote, _squote),
- bygroups(String.Char, String.Escape, String.Char), '#pop'),
- (r'([%s])(@..)([%s])' % (_squote, _squote),
- bygroups(String.Char, String.Escape, String.Char), '#pop'),
- (r'[%s]' % _squote, String.Single, ('#pop', 'dictionary-word')),
- (r'[%s]' % _dquote, String.Double, ('#pop', 'string')),
- # Numbers
- (r'\$[+%s][0-9]*\.?[0-9]*([eE][+%s]?[0-9]+)?' % (_dash, _dash),
- Number.Float, '#pop'),
- (r'\$[0-9a-fA-F]+', Number.Hex, '#pop'),
- (r'\$\$[01]+', Number.Bin, '#pop'),
- (r'[0-9]+', Number.Integer, '#pop'),
- # Values prefixed by hashes
- (r'(##|#a\$)(%s)' % _name, bygroups(Operator, Name), '#pop'),
- (r'(#g\$)(%s)' % _name,
- bygroups(Operator, Name.Variable.Global), '#pop'),
- (r'#[nw]\$', Operator, ('#pop', 'obsolete-dictionary-word')),
- (r'(#r\$)(%s)' % _name, bygroups(Operator, Name.Function), '#pop'),
- (r'#', Name.Builtin, ('#pop', 'system-constant')),
- # System functions
- (r'(child|children|elder|eldest|glk|indirect|metaclass|parent|'
- r'random|sibling|younger|youngest)\b', Name.Builtin, '#pop'),
- # Metaclasses
- (r'(?i)(Class|Object|Routine|String)\b', Name.Builtin, '#pop'),
- # Veneer routines
- (r'(?i)(Box__Routine|CA__Pr|CDefArt|CInDefArt|Cl__Ms|'
- r'Copy__Primitive|CP__Tab|DA__Pr|DB__Pr|DefArt|Dynam__String|'
- r'EnglishNumber|Glk__Wrap|IA__Pr|IB__Pr|InDefArt|Main__|'
- r'Meta__class|OB__Move|OB__Remove|OC__Cl|OP__Pr|Print__Addr|'
- r'Print__PName|PrintShortName|RA__Pr|RA__Sc|RL__Pr|R_Process|'
- r'RT__ChG|RT__ChGt|RT__ChLDB|RT__ChLDW|RT__ChPR|RT__ChPrintA|'
- r'RT__ChPrintC|RT__ChPrintO|RT__ChPrintS|RT__ChPS|RT__ChR|'
- r'RT__ChSTB|RT__ChSTW|RT__ChT|RT__Err|RT__TrPS|RV__Pr|'
- r'Symb__Tab|Unsigned__Compare|WV__Pr|Z__Region)\b', Name.Builtin,
- '#pop'),
- # Other built-in symbols
- (r'(?i)(call|copy|create|DEBUG|destroy|DICT_CHAR_SIZE|'
- r'DICT_ENTRY_BYTES|DICT_IS_UNICODE|DICT_WORD_SIZE|false|'
- r'FLOAT_INFINITY|FLOAT_NAN|FLOAT_NINFINITY|GOBJFIELD_CHAIN|'
- r'GOBJFIELD_CHILD|GOBJFIELD_NAME|GOBJFIELD_PARENT|'
- r'GOBJFIELD_PROPTAB|GOBJFIELD_SIBLING|GOBJ_EXT_START|'
- r'GOBJ_TOTAL_LENGTH|Grammar__Version|INDIV_PROP_START|INFIX|'
- r'infix__watching|MODULE_MODE|name|nothing|NUM_ATTR_BYTES|print|'
- r'print_to_array|recreate|remaining|self|sender|STRICT_MODE|'
- r'sw__var|sys__glob0|sys__glob1|sys__glob2|sys_statusline_flag|'
- r'TARGET_GLULX|TARGET_ZCODE|temp__global2|temp__global3|'
- r'temp__global4|temp_global|true|USE_MODULES|WORDSIZE)\b',
- Name.Builtin, '#pop'),
- # Other values
- (_name, Name, '#pop')
- ],
- # Strings
- 'dictionary-word': [
- (r'[~^]+', String.Escape),
- (r'[^~^\\@({%s]+' % _squote, String.Single),
- (r'[({]', String.Single),
- (r'@{[0-9a-fA-F]{,4}}', String.Escape),
- (r'@..', String.Escape),
- (r'[%s]' % _squote, String.Single, '#pop')
- ],
- 'string': [
- (r'[~^]+', String.Escape),
- (r'[^~^\\@({%s]+' % _dquote, String.Double),
- (r'[({]', String.Double),
- (r'\\', String.Escape),
- (r'@(\\\s*[%s]\s*)*@((\\\s*[%s]\s*)*[0-9])*' %
- (_newline, _newline), String.Escape),
- (r'@(\\\s*[%s]\s*)*{((\\\s*[%s]\s*)*[0-9a-fA-F]){,4}'
- r'(\\\s*[%s]\s*)*}' % (_newline, _newline, _newline),
- String.Escape),
- (r'@(\\\s*[%s]\s*)*.(\\\s*[%s]\s*)*.' % (_newline, _newline),
- String.Escape),
- (r'[%s]' % _dquote, String.Double, '#pop')
- ],
- 'plain-string': [
- (r'[^~^\\({\[\]%s]+' % _dquote, String.Double),
- (r'[~^({\[\]]', String.Double),
- (r'\\', String.Escape),
- (r'[%s]' % _dquote, String.Double, '#pop')
- ],
- # Names
- '_constant': [
- include('_whitespace'),
- (_name, Name.Constant, '#pop'),
- include('value')
- ],
- '_global': [
- include('_whitespace'),
- (_name, Name.Variable.Global, '#pop'),
- include('value')
- ],
- 'label?': [
- include('_whitespace'),
- (r'(%s)?' % _name, Name.Label, '#pop')
- ],
- 'variable?': [
- include('_whitespace'),
- (r'(%s)?' % _name, Name.Variable, '#pop')
- ],
- # Values after hashes
- 'obsolete-dictionary-word': [
- (r'\S[a-zA-Z_0-9]*', String.Other, '#pop')
- ],
- 'system-constant': [
- include('_whitespace'),
- (_name, Name.Builtin, '#pop')
- ],
-
- # Directives
- 'directive': [
- include('_whitespace'),
- (r'#', Punctuation),
- (r';', Punctuation, '#pop'),
- (r'\[', Punctuation,
- ('default', 'statements', 'locals', 'routine-name?')),
- (r'(?i)(abbreviate|endif|dictionary|ifdef|iffalse|ifndef|ifnot|'
- r'iftrue|ifv3|ifv5|release|serial|switches|system_file|version)'
- r'\b', Keyword, 'default'),
- (r'(?i)(array|global)\b', Keyword,
- ('default', 'directive-keyword?', '_global')),
- (r'(?i)attribute\b', Keyword, ('default', 'alias?', '_constant')),
- (r'(?i)class\b', Keyword,
- ('object-body', 'duplicates', 'class-name')),
- (r'(?i)(constant|default)\b', Keyword,
- ('default', 'expression', '_constant')),
- (r'(?i)(end\b)(.*)', bygroups(Keyword, Text)),
- (r'(?i)(extend|verb)\b', Keyword, 'grammar'),
- (r'(?i)fake_action\b', Keyword, ('default', '_constant')),
- (r'(?i)import\b', Keyword, 'manifest'),
- (r'(?i)(include|link)\b', Keyword,
- ('default', 'before-plain-string')),
- (r'(?i)(lowstring|undef)\b', Keyword, ('default', '_constant')),
- (r'(?i)message\b', Keyword, ('default', 'diagnostic')),
- (r'(?i)(nearby|object)\b', Keyword,
- ('object-body', '_object-head')),
- (r'(?i)property\b', Keyword,
- ('default', 'alias?', '_constant', 'property-keyword*')),
- (r'(?i)replace\b', Keyword,
- ('default', 'routine-name?', 'routine-name?')),
- (r'(?i)statusline\b', Keyword, ('default', 'directive-keyword?')),
- (r'(?i)stub\b', Keyword, ('default', 'routine-name?')),
- (r'(?i)trace\b', Keyword,
- ('default', 'trace-keyword?', 'trace-keyword?')),
- (r'(?i)zcharacter\b', Keyword,
- ('default', 'directive-keyword?', 'directive-keyword?')),
- (_name, Name.Class, ('object-body', '_object-head'))
- ],
- # [, Replace, Stub
- 'routine-name?': [
- include('_whitespace'),
- (r'(%s)?' % _name, Name.Function, '#pop')
- ],
- 'locals': [
- include('_whitespace'),
- (r';', Punctuation, '#pop'),
- (r'\*', Punctuation),
- (_name, Name.Variable)
- ],
- # Array
- 'many-values': [
- include('_whitespace'),
- (r';', Punctuation),
- (r'\]', Punctuation, '#pop'),
- (r':', Error),
- default(('expression', '_expression'))
- ],
- # Attribute, Property
- 'alias?': [
- include('_whitespace'),
- (r'alias\b', Keyword, ('#pop', '_constant')),
- default('#pop')
- ],
- # Class, Object, Nearby
- 'class-name': [
- include('_whitespace'),
- (r'(?=[,;]|(class|has|private|with)\b)', Text, '#pop'),
- (_name, Name.Class, '#pop')
- ],
- 'duplicates': [
- include('_whitespace'),
- (r'\(', Punctuation, ('#pop', 'expression', '_expression')),
- default('#pop')
- ],
- '_object-head': [
- (r'[%s]>' % _dash, Punctuation),
- (r'(class|has|private|with)\b', Keyword.Declaration, '#pop'),
- include('_global')
- ],
- 'object-body': [
- include('_whitespace'),
- (r';', Punctuation, '#pop:2'),
- (r',', Punctuation),
- (r'class\b', Keyword.Declaration, 'class-segment'),
- (r'(has|private|with)\b', Keyword.Declaration),
- (r':', Error),
- default(('_object-expression', '_expression'))
- ],
- 'class-segment': [
- include('_whitespace'),
- (r'(?=[,;]|(class|has|private|with)\b)', Text, '#pop'),
- (_name, Name.Class),
- default('value')
- ],
- # Extend, Verb
- 'grammar': [
- include('_whitespace'),
- (r'=', Punctuation, ('#pop', 'default')),
- (r'\*', Punctuation, ('#pop', 'grammar-line')),
- default('_directive-keyword')
- ],
- 'grammar-line': [
- include('_whitespace'),
- (r';', Punctuation, '#pop'),
- (r'[/*]', Punctuation),
- (r'[%s]>' % _dash, Punctuation, 'value'),
- (r'(noun|scope)\b', Keyword, '=routine'),
- default('_directive-keyword')
- ],
- '=routine': [
- include('_whitespace'),
- (r'=', Punctuation, 'routine-name?'),
- default('#pop')
- ],
- # Import
- 'manifest': [
- include('_whitespace'),
- (r';', Punctuation, '#pop'),
- (r',', Punctuation),
- (r'(?i)(global\b)?', Keyword, '_global')
- ],
- # Include, Link, Message
- 'diagnostic': [
- include('_whitespace'),
- (r'[%s]' % _dquote, String.Double, ('#pop', 'message-string')),
- default(('#pop', 'before-plain-string', 'directive-keyword?'))
- ],
- 'before-plain-string': [
- include('_whitespace'),
- (r'[%s]' % _dquote, String.Double, ('#pop', 'plain-string'))
- ],
- 'message-string': [
- (r'[~^]+', String.Escape),
- include('plain-string')
- ],
-
- # Keywords used in directives
- '_directive-keyword!': [
- include('_whitespace'),
- (r'(additive|alias|buffer|class|creature|data|error|fatalerror|'
- r'first|has|held|initial|initstr|last|long|meta|multi|'
- r'multiexcept|multiheld|multiinside|noun|number|only|private|'
- r'replace|reverse|scope|score|special|string|table|terminating|'
- r'time|topic|warning|with)\b', Keyword, '#pop'),
- (r'[%s]{1,2}>|[+=]' % _dash, Punctuation, '#pop')
- ],
- '_directive-keyword': [
- include('_directive-keyword!'),
- include('value')
- ],
- 'directive-keyword?': [
- include('_directive-keyword!'),
- default('#pop')
- ],
- 'property-keyword*': [
- include('_whitespace'),
- (r'(additive|long)\b', Keyword),
- default('#pop')
- ],
- 'trace-keyword?': [
- include('_whitespace'),
- (r'(assembly|dictionary|expressions|lines|linker|objects|off|on|'
- r'symbols|tokens|verbs)\b', Keyword, '#pop'),
- default('#pop')
- ],
-
- # Statements
- 'statements': [
- include('_whitespace'),
- (r'\]', Punctuation, '#pop'),
- (r'[;{}]', Punctuation),
- (r'(box|break|continue|default|give|inversion|new_line|quit|read|'
- r'remove|return|rfalse|rtrue|spaces|string|until)\b', Keyword,
- 'default'),
- (r'(do|else)\b', Keyword),
- (r'(font|style)\b', Keyword,
- ('default', 'miscellaneous-keyword?')),
- (r'for\b', Keyword, ('for', '(?')),
- (r'(if|switch|while)', Keyword,
- ('expression', '_expression', '(?')),
- (r'(jump|save|restore)\b', Keyword, ('default', 'label?')),
- (r'objectloop\b', Keyword,
- ('_keyword-expression', 'variable?', '(?')),
- (r'print(_ret)?\b|(?=[%s])' % _dquote, Keyword, 'print-list'),
- (r'\.', Name.Label, 'label?'),
- (r'@', Keyword, 'opcode'),
- (r'#(?![agrnw]\$|#)', Punctuation, 'directive'),
- (r'<', Punctuation, 'default'),
- (r'(move\b)?', Keyword,
- ('default', '_keyword-expression', '_expression'))
- ],
- 'miscellaneous-keyword?': [
- include('_whitespace'),
- (r'(bold|fixed|from|near|off|on|reverse|roman|to|underline)\b',
- Keyword, '#pop'),
- (r'(a|A|an|address|char|name|number|object|property|string|the|'
- r'The)\b(?=(\s+|(![^%s]*))*\))' % _newline, Keyword.Pseudo,
- '#pop'),
- (r'%s(?=(\s+|(![^%s]*))*\))' % (_name, _newline), Name.Function,
- '#pop'),
- default('#pop')
- ],
- '(?': [
- include('_whitespace'),
- (r'\(?', Punctuation, '#pop')
- ],
- 'for': [
- include('_whitespace'),
- (r';?', Punctuation, ('_for-expression', '_expression'))
- ],
- 'print-list': [
- include('_whitespace'),
- (r';', Punctuation, '#pop'),
- (r':', Error),
- default(('_list-expression', '_expression', '_list-expression', 'form'))
- ],
- 'form': [
- include('_whitespace'),
- (r'\(', Punctuation, ('#pop', 'miscellaneous-keyword?')),
- default('#pop')
- ],
-
- # Assembly
- 'opcode': [
- include('_whitespace'),
- (r'[%s]' % _dquote, String.Double, ('operands', 'plain-string')),
- (_name, Keyword, 'operands')
- ],
- 'operands': [
- (r':', Error),
- default(('_assembly-expression', '_expression'))
- ]
- }
-
- def get_tokens_unprocessed(self, text):
- # 'in' is either a keyword or an operator.
- # If the token two tokens after 'in' is ')', 'in' is a keyword:
- # objectloop(a in b)
- # Otherwise, it is an operator:
- # objectloop(a in b && true)
- objectloop_queue = []
- objectloop_token_count = -1
- previous_token = None
- for index, token, value in RegexLexer.get_tokens_unprocessed(self,
- text):
- if previous_token is Name.Variable and value == 'in':
- objectloop_queue = [[index, token, value]]
- objectloop_token_count = 2
- elif objectloop_token_count > 0:
- if token not in Comment and token not in Text:
- objectloop_token_count -= 1
- objectloop_queue.append((index, token, value))
- else:
- if objectloop_token_count == 0:
- if objectloop_queue[-1][2] == ')':
- objectloop_queue[0][1] = Keyword
- while objectloop_queue:
- yield objectloop_queue.pop(0)
- objectloop_token_count = -1
- yield index, token, value
- if token not in Comment and token not in Text:
- previous_token = token
- while objectloop_queue:
- yield objectloop_queue.pop(0)
-
-
-class Inform7Lexer(RegexLexer):
- """
- For `Inform 7 <http://inform7.com/>`_ source code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Inform 7'
- aliases = ['inform7', 'i7']
- filenames = ['*.ni', '*.i7x']
-
- flags = re.MULTILINE | re.DOTALL | re.UNICODE
-
- _dash = Inform6Lexer._dash
- _dquote = Inform6Lexer._dquote
- _newline = Inform6Lexer._newline
- _start = r'\A|(?<=[%s])' % _newline
-
- # There are three variants of Inform 7, differing in how to
- # interpret at signs and braces in I6T. In top-level inclusions, at
- # signs in the first column are inweb syntax. In phrase definitions
- # and use options, tokens in braces are treated as I7. Use options
- # also interpret "{N}".
- tokens = {}
- token_variants = ['+i6t-not-inline', '+i6t-inline', '+i6t-use-option']
-
- for level in token_variants:
- tokens[level] = {
- '+i6-root': list(Inform6Lexer.tokens['root']),
- '+i6t-root': [ # For Inform6TemplateLexer
- (r'[^%s]*' % Inform6Lexer._newline, Comment.Preproc,
- ('directive', '+p'))
- ],
- 'root': [
- (r'(\|?\s)+', Text),
- (r'\[', Comment.Multiline, '+comment'),
- (r'[%s]' % _dquote, Generic.Heading,
- ('+main', '+titling', '+titling-string')),
- default(('+main', '+heading?'))
- ],
- '+titling-string': [
- (r'[^%s]+' % _dquote, Generic.Heading),
- (r'[%s]' % _dquote, Generic.Heading, '#pop')
- ],
- '+titling': [
- (r'\[', Comment.Multiline, '+comment'),
- (r'[^%s.;:|%s]+' % (_dquote, _newline), Generic.Heading),
- (r'[%s]' % _dquote, Generic.Heading, '+titling-string'),
- (r'[%s]{2}|(?<=[\s%s])\|[\s%s]' % (_newline, _dquote, _dquote),
- Text, ('#pop', '+heading?')),
- (r'[.;:]|(?<=[\s%s])\|' % _dquote, Text, '#pop'),
- (r'[|%s]' % _newline, Generic.Heading)
- ],
- '+main': [
- (r'(?i)[^%s:a\[(|%s]+' % (_dquote, _newline), Text),
- (r'[%s]' % _dquote, String.Double, '+text'),
- (r':', Text, '+phrase-definition'),
- (r'(?i)\bas\b', Text, '+use-option'),
- (r'\[', Comment.Multiline, '+comment'),
- (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
- bygroups(Punctuation,
- using(this, state=('+i6-root', 'directive'),
- i6t='+i6t-not-inline'), Punctuation)),
- (r'(%s|(?<=[\s;:.%s]))\|\s|[%s]{2,}' %
- (_start, _dquote, _newline), Text, '+heading?'),
- (r'(?i)[a(|%s]' % _newline, Text)
- ],
- '+phrase-definition': [
- (r'\s+', Text),
- (r'\[', Comment.Multiline, '+comment'),
- (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
- bygroups(Punctuation,
- using(this, state=('+i6-root', 'directive',
- 'default', 'statements'),
- i6t='+i6t-inline'), Punctuation), '#pop'),
- default('#pop')
- ],
- '+use-option': [
- (r'\s+', Text),
- (r'\[', Comment.Multiline, '+comment'),
- (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
- bygroups(Punctuation,
- using(this, state=('+i6-root', 'directive'),
- i6t='+i6t-use-option'), Punctuation), '#pop'),
- default('#pop')
- ],
- '+comment': [
- (r'[^\[\]]+', Comment.Multiline),
- (r'\[', Comment.Multiline, '#push'),
- (r'\]', Comment.Multiline, '#pop')
- ],
- '+text': [
- (r'[^\[%s]+' % _dquote, String.Double),
- (r'\[.*?\]', String.Interpol),
- (r'[%s]' % _dquote, String.Double, '#pop')
- ],
- '+heading?': [
- (r'(\|?\s)+', Text),
- (r'\[', Comment.Multiline, '+comment'),
- (r'[%s]{4}\s+' % _dash, Text, '+documentation-heading'),
- (r'[%s]{1,3}' % _dash, Text),
- (r'(?i)(volume|book|part|chapter|section)\b[^%s]*' % _newline,
- Generic.Heading, '#pop'),
- default('#pop')
- ],
- '+documentation-heading': [
- (r'\s+', Text),
- (r'\[', Comment.Multiline, '+comment'),
- (r'(?i)documentation\s+', Text, '+documentation-heading2'),
- default('#pop')
- ],
- '+documentation-heading2': [
- (r'\s+', Text),
- (r'\[', Comment.Multiline, '+comment'),
- (r'[%s]{4}\s' % _dash, Text, '+documentation'),
- default('#pop:2')
- ],
- '+documentation': [
- (r'(?i)(%s)\s*(chapter|example)\s*:[^%s]*' %
- (_start, _newline), Generic.Heading),
- (r'(?i)(%s)\s*section\s*:[^%s]*' % (_start, _newline),
- Generic.Subheading),
- (r'((%s)\t.*?[%s])+' % (_start, _newline),
- using(this, state='+main')),
- (r'[^%s\[]+|[%s\[]' % (_newline, _newline), Text),
- (r'\[', Comment.Multiline, '+comment'),
- ],
- '+i6t-not-inline': [
- (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
- Comment.Preproc),
- (r'(%s)@([%s]+|Purpose:)[^%s]*' % (_start, _dash, _newline),
- Comment.Preproc),
- (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
- Generic.Heading, '+p')
- ],
- '+i6t-use-option': [
- include('+i6t-not-inline'),
- (r'({)(N)(})', bygroups(Punctuation, Text, Punctuation))
- ],
- '+i6t-inline': [
- (r'({)(\S[^}]*)?(})',
- bygroups(Punctuation, using(this, state='+main'),
- Punctuation))
- ],
- '+i6t': [
- (r'({[%s])(![^}]*)(}?)' % _dash,
- bygroups(Punctuation, Comment.Single, Punctuation)),
- (r'({[%s])(lines)(:)([^}]*)(}?)' % _dash,
- bygroups(Punctuation, Keyword, Punctuation, Text,
- Punctuation), '+lines'),
- (r'({[%s])([^:}]*)(:?)([^}]*)(}?)' % _dash,
- bygroups(Punctuation, Keyword, Punctuation, Text,
- Punctuation)),
- (r'(\(\+)(.*?)(\+\)|\Z)',
- bygroups(Punctuation, using(this, state='+main'),
- Punctuation))
- ],
- '+p': [
- (r'[^@]+', Comment.Preproc),
- (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
- Comment.Preproc, '#pop'),
- (r'(%s)@([%s]|Purpose:)' % (_start, _dash), Comment.Preproc),
- (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
- Generic.Heading),
- (r'@', Comment.Preproc)
- ],
- '+lines': [
- (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
- Comment.Preproc),
- (r'(%s)@([%s]|Purpose:)[^%s]*' % (_start, _dash, _newline),
- Comment.Preproc),
- (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
- Generic.Heading, '+p'),
- (r'(%s)@[a-zA-Z_0-9]*[ %s]' % (_start, _newline), Keyword),
- (r'![^%s]*' % _newline, Comment.Single),
- (r'({)([%s]endlines)(})' % _dash,
- bygroups(Punctuation, Keyword, Punctuation), '#pop'),
- (r'[^@!{]+?([%s]|\Z)|.' % _newline, Text)
- ]
- }
- # Inform 7 can include snippets of Inform 6 template language,
- # so all of Inform6Lexer's states are copied here, with
- # modifications to account for template syntax. Inform7Lexer's
- # own states begin with '+' to avoid name conflicts. Some of
- # Inform6Lexer's states begin with '_': these are not modified.
- # They deal with template syntax either by including modified
- # states, or by matching r'' then pushing to modified states.
- for token in Inform6Lexer.tokens:
- if token == 'root':
- continue
- tokens[level][token] = list(Inform6Lexer.tokens[token])
- if not token.startswith('_'):
- tokens[level][token][:0] = [include('+i6t'), include(level)]
-
- def __init__(self, **options):
- level = options.get('i6t', '+i6t-not-inline')
- if level not in self._all_tokens:
- self._tokens = self.__class__.process_tokendef(level)
- else:
- self._tokens = self._all_tokens[level]
- RegexLexer.__init__(self, **options)
-
-
-class Inform6TemplateLexer(Inform7Lexer):
- """
- For `Inform 6 template
- <http://inform7.com/sources/src/i6template/Woven/index.html>`_ code.
-
- .. versionadded:: 2.0
- """
-
- name = 'Inform 6 template'
- aliases = ['i6t']
- filenames = ['*.i6t']
-
- def get_tokens_unprocessed(self, text, stack=('+i6t-root',)):
- return Inform7Lexer.get_tokens_unprocessed(self, text, stack)
-
-
-class MqlLexer(CppLexer):
- """
- For `MQL4 <http://docs.mql4.com/>`_ and
- `MQL5 <http://www.mql5.com/en/docs>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'MQL'
- aliases = ['mql', 'mq4', 'mq5', 'mql4', 'mql5']
- filenames = ['*.mq4', '*.mq5', '*.mqh']
- mimetypes = ['text/x-mql']
-
- tokens = {
- 'statements': [
- (r'(input|_Digits|_Point|_LastError|_Period|_RandomSeed|'
- r'_StopFlag|_Symbol|_UninitReason|'
- r'Ask|Bars|Bid|Close|Digits|High|Low|Open|Point|Time|Volume)\b',
- Keyword),
- (r'(void|char|uchar|bool|short|ushort|int|uint|color|long|ulong|datetime|'
- r'float|double|string)\b',
- Keyword.Type),
- (r'(Alert|CheckPointer|Comment|DebugBreak|ExpertRemove|'
- r'GetPointer|GetTickCount|MessageBox|PeriodSeconds|PlaySound|'
- r'Print|PrintFormat|ResetLastError|ResourceCreate|ResourceFree|'
- r'ResourceReadImage|ResourceSave|SendFTP|SendMail|SendNotification|'
- r'Sleep|TerminalClose|TesterStatistics|ZeroMemory|'
- r'ArrayBsearch|ArrayCopy|ArrayCompare|ArrayFree|ArrayGetAsSeries|'
- r'ArrayInitialize|ArrayFill|ArrayIsSeries|ArrayIsDynamic|'
- r'ArrayMaximum|ArrayMinimum|ArrayRange|ArrayResize|'
- r'ArraySetAsSeries|ArraySize|ArraySort|ArrayCopyRates|'
- r'ArrayCopySeries|ArrayDimension|'
- r'CharToString|DoubleToString|EnumToString|NormalizeDouble|'
- r'StringToDouble|StringToInteger|StringToTime|TimeToString|'
- r'IntegerToString|ShortToString|ShortArrayToString|'
- r'StringToShortArray|CharArrayToString|StringToCharArray|'
- r'ColorToARGB|ColorToString|StringToColor|StringFormat|'
- r'CharToStr|DoubleToStr|StrToDouble|StrToInteger|StrToTime|TimeToStr|'
- r'MathAbs|MathArccos|MathArcsin|MathArctan|MathCeil|MathCos|MathExp|'
- r'MathFloor|MathLog|MathMax|MathMin|MathMod|MathPow|MathRand|'
- r'MathRound|MathSin|MathSqrt|MathSrand|MathTan|MathIsValidNumber|'
- r'StringAdd|StringBufferLen|StringCompare|StringConcatenate|StringFill|'
- r'StringFind|StringGetCharacter|StringInit|StringLen|StringReplace|'
- r'StringSetCharacter|StringSplit|StringSubstr|StringToLower|StringToUpper|'
- r'StringTrimLeft|StringTrimRight|StringGetChar|StringSetChar|'
- r'TimeCurrent|TimeTradeServer|TimeLocal|TimeGMT|TimeDaylightSavings|'
- r'TimeGMTOffset|TimeToStruct|StructToTime|Day|DayOfWeek|DayOfYear|'
- r'Hour|Minute|Month|Seconds|TimeDay|TimeDayOfWeek|TimeDayOfYear|TimeHour|'
- r'TimeMinute|TimeMonth|TimeSeconds|TimeYear|Year|'
- r'AccountInfoDouble|AccountInfoInteger|AccountInfoString|AccountBalance|'
- r'AccountCredit|AccountCompany|AccountCurrency|AccountEquity|'
- r'AccountFreeMargin|AccountFreeMarginCheck|AccountFreeMarginMode|'
- r'AccountLeverage|AccountMargin|AccountName|AccountNumber|AccountProfit|'
- r'AccountServer|AccountStopoutLevel|AccountStopoutMode|'
- r'GetLastError|IsStopped|UninitializeReason|MQLInfoInteger|MQLInfoString|'
- r'Symbol|Period|Digits|Point|IsConnected|IsDemo|IsDllsAllowed|'
- r'IsExpertEnabled|IsLibrariesAllowed|IsOptimization|IsTesting|'
- r'IsTradeAllowed|'
- r'IsTradeContextBusy|IsVisualMode|TerminalCompany|TerminalName|'
- r'TerminalPath|'
- r'SymbolsTotal|SymbolName|SymbolSelect|SymbolIsSynchronized|'
- r'SymbolInfoDouble|'
- r'SymbolInfoInteger|SymbolInfoString|SymbolInfoTick|'
- r'SymbolInfoSessionQuote|'
- r'SymbolInfoSessionTrade|MarketInfo|'
- r'SeriesInfoInteger|CopyRates|CopyTime|CopyOpen|'
- r'CopyHigh|CopyLow|CopyClose|'
- r'CopyTickVolume|CopyRealVolume|CopySpread|iBars|iBarShift|iClose|'
- r'iHigh|iHighest|iLow|iLowest|iOpen|iTime|iVolume|'
- r'HideTestIndicators|Period|RefreshRates|Symbol|WindowBarsPerChart|'
- r'WindowExpertName|WindowFind|WindowFirstVisibleBar|WindowHandle|'
- r'WindowIsVisible|WindowOnDropped|WindowPriceMax|WindowPriceMin|'
- r'WindowPriceOnDropped|WindowRedraw|WindowScreenShot|'
- r'WindowTimeOnDropped|WindowsTotal|WindowXOnDropped|WindowYOnDropped|'
- r'OrderClose|OrderCloseBy|OrderClosePrice|OrderCloseTime|OrderComment|'
- r'OrderCommission|OrderDelete|OrderExpiration|OrderLots|OrderMagicNumber|'
- r'OrderModify|OrderOpenPrice|OrderOpenTime|OrderPrint|OrderProfit|'
- r'OrderSelect|OrderSend|OrdersHistoryTotal|OrderStopLoss|OrdersTotal|'
- r'OrderSwap|OrderSymbol|OrderTakeProfit|OrderTicket|OrderType|'
- r'GlobalVariableCheck|GlobalVariableTime|'
- r'GlobalVariableDel|GlobalVariableGet|GlobalVariableName|'
- r'GlobalVariableSet|GlobalVariablesFlush|GlobalVariableTemp|'
- r'GlobalVariableSetOnCondition|GlobalVariablesDeleteAll|'
- r'GlobalVariablesTotal|GlobalVariableCheck|GlobalVariableTime|'
- r'GlobalVariableDel|GlobalVariableGet|'
- r'GlobalVariableName|GlobalVariableSet|GlobalVariablesFlush|'
- r'GlobalVariableTemp|GlobalVariableSetOnCondition|'
- r'GlobalVariablesDeleteAll|GlobalVariablesTotal|'
- r'GlobalVariableCheck|GlobalVariableTime|GlobalVariableDel|'
- r'GlobalVariableGet|GlobalVariableName|GlobalVariableSet|'
- r'GlobalVariablesFlush|GlobalVariableTemp|'
- r'GlobalVariableSetOnCondition|GlobalVariablesDeleteAll|'
- r'GlobalVariablesTotal|'
- r'FileFindFirst|FileFindNext|FileFindClose|FileOpen|FileDelete|'
- r'FileFlush|FileGetInteger|FileIsEnding|FileIsLineEnding|'
- r'FileClose|FileIsExist|FileCopy|FileMove|FileReadArray|'
- r'FileReadBool|FileReadDatetime|FileReadDouble|FileReadFloat|'
- r'FileReadInteger|FileReadLong|FileReadNumber|FileReadString|'
- r'FileReadStruct|FileSeek|FileSize|FileTell|FileWrite|'
- r'FileWriteArray|FileWriteDouble|FileWriteFloat|FileWriteInteger|'
- r'FileWriteLong|FileWriteString|FileWriteStruct|FolderCreate|'
- r'FolderDelete|FolderClean|FileOpenHistory|'
- r'IndicatorSetDouble|IndicatorSetInteger|IndicatorSetString|'
- r'SetIndexBuffer|IndicatorBuffers|IndicatorCounted|IndicatorDigits|'
- r'IndicatorShortName|SetIndexArrow|SetIndexDrawBegin|'
- r'SetIndexEmptyValue|SetIndexLabel|SetIndexShift|'
- r'SetIndexStyle|SetLevelStyle|SetLevelValue|'
- r'ObjectCreate|ObjectName|ObjectDelete|ObjectsDeleteAll|'
- r'ObjectFind|ObjectGetTimeByValue|ObjectGetValueByTime|'
- r'ObjectMove|ObjectsTotal|ObjectGetDouble|ObjectGetInteger|'
- r'ObjectGetString|ObjectSetDouble|ObjectSetInteger|'
- r'ObjectSetString|TextSetFont|TextOut|TextGetSize|'
- r'ObjectDescription|ObjectGet|ObjectGetFiboDescription|'
- r'ObjectGetShiftByValue|ObjectGetValueByShift|ObjectSet|'
- r'ObjectSetFiboDescription|ObjectSetText|ObjectType|'
- r'iAC|iAD|iADX|iAlligator|iAO|iATR|iBearsPower|'
- r'iBands|iBandsOnArray|iBullsPower|iCCI|iCCIOnArray|'
- r'iCustom|iDeMarker|iEnvelopes|iEnvelopesOnArray|'
- r'iForce|iFractals|iGator|iIchimoku|iBWMFI|iMomentum|'
- r'iMomentumOnArray|iMFI|iMA|iMAOnArray|iOsMA|iMACD|'
- r'iOBV|iSAR|iRSI|iRSIOnArray|iRVI|iStdDev|iStdDevOnArray|'
- r'iStochastic|iWPR|'
- r'EventSetMillisecondTimer|EventSetTimer|'
- r'EventKillTimer|EventChartCustom)\b', Name.Function),
- (r'(CHARTEVENT_KEYDOWN|CHARTEVENT_MOUSE_MOVE|'
- r'CHARTEVENT_OBJECT_CREATE|'
- r'CHARTEVENT_OBJECT_CHANGE|CHARTEVENT_OBJECT_DELETE|'
- r'CHARTEVENT_CLICK|'
- r'CHARTEVENT_OBJECT_CLICK|CHARTEVENT_OBJECT_DRAG|'
- r'CHARTEVENT_OBJECT_ENDEDIT|'
- r'CHARTEVENT_CHART_CHANGE|CHARTEVENT_CUSTOM|'
- r'CHARTEVENT_CUSTOM_LAST|'
- r'PERIOD_CURRENT|PERIOD_M1|PERIOD_M2|PERIOD_M3|'
- r'PERIOD_M4|PERIOD_M5|'
- r'PERIOD_M6|PERIOD_M10|PERIOD_M12|PERIOD_M15|'
- r'PERIOD_M20|PERIOD_M30|'
- r'PERIOD_H1|PERIOD_H2|PERIOD_H3|PERIOD_H4|'
- r'PERIOD_H6|PERIOD_H8|'
- r'PERIOD_H12|PERIOD_D1|PERIOD_W1|PERIOD_MN1|'
- r'CHART_IS_OBJECT|CHART_BRING_TO_TOP|'
- r'CHART_MOUSE_SCROLL|CHART_EVENT_MOUSE_MOVE|'
- r'CHART_EVENT_OBJECT_CREATE|'
- r'CHART_EVENT_OBJECT_DELETE|CHART_MODE|CHART_FOREGROUND|'
- r'CHART_SHIFT|'
- r'CHART_AUTOSCROLL|CHART_SCALE|CHART_SCALEFIX|'
- r'CHART_SCALEFIX_11|'
- r'CHART_SCALE_PT_PER_BAR|CHART_SHOW_OHLC|'
- r'CHART_SHOW_BID_LINE|'
- r'CHART_SHOW_ASK_LINE|CHART_SHOW_LAST_LINE|'
- r'CHART_SHOW_PERIOD_SEP|'
- r'CHART_SHOW_GRID|CHART_SHOW_VOLUMES|'
- r'CHART_SHOW_OBJECT_DESCR|'
- r'CHART_VISIBLE_BARS|CHART_WINDOWS_TOTAL|'
- r'CHART_WINDOW_IS_VISIBLE|'
- r'CHART_WINDOW_HANDLE|CHART_WINDOW_YDISTANCE|'
- r'CHART_FIRST_VISIBLE_BAR|'
- r'CHART_WIDTH_IN_BARS|CHART_WIDTH_IN_PIXELS|'
- r'CHART_HEIGHT_IN_PIXELS|'
- r'CHART_COLOR_BACKGROUND|CHART_COLOR_FOREGROUND|'
- r'CHART_COLOR_GRID|'
- r'CHART_COLOR_VOLUME|CHART_COLOR_CHART_UP|'
- r'CHART_COLOR_CHART_DOWN|'
- r'CHART_COLOR_CHART_LINE|CHART_COLOR_CANDLE_BULL|'
- r'CHART_COLOR_CANDLE_BEAR|'
- r'CHART_COLOR_BID|CHART_COLOR_ASK|CHART_COLOR_LAST|'
- r'CHART_COLOR_STOP_LEVEL|'
- r'CHART_SHOW_TRADE_LEVELS|CHART_DRAG_TRADE_LEVELS|'
- r'CHART_SHOW_DATE_SCALE|'
- r'CHART_SHOW_PRICE_SCALE|CHART_SHIFT_SIZE|'
- r'CHART_FIXED_POSITION|'
- r'CHART_FIXED_MAX|CHART_FIXED_MIN|CHART_POINTS_PER_BAR|'
- r'CHART_PRICE_MIN|'
- r'CHART_PRICE_MAX|CHART_COMMENT|CHART_BEGIN|'
- r'CHART_CURRENT_POS|CHART_END|'
- r'CHART_BARS|CHART_CANDLES|CHART_LINE|CHART_VOLUME_HIDE|'
- r'CHART_VOLUME_TICK|CHART_VOLUME_REAL|'
- r'OBJ_VLINE|OBJ_HLINE|OBJ_TREND|OBJ_TRENDBYANGLE|OBJ_CYCLES|'
- r'OBJ_CHANNEL|OBJ_STDDEVCHANNEL|OBJ_REGRESSION|OBJ_PITCHFORK|'
- r'OBJ_GANNLINE|OBJ_GANNFAN|OBJ_GANNGRID|OBJ_FIBO|'
- r'OBJ_FIBOTIMES|OBJ_FIBOFAN|OBJ_FIBOARC|OBJ_FIBOCHANNEL|'
- r'OBJ_EXPANSION|OBJ_RECTANGLE|OBJ_TRIANGLE|OBJ_ELLIPSE|'
- r'OBJ_ARROW_THUMB_UP|OBJ_ARROW_THUMB_DOWN|'
- r'OBJ_ARROW_UP|OBJ_ARROW_DOWN|'
- r'OBJ_ARROW_STOP|OBJ_ARROW_CHECK|OBJ_ARROW_LEFT_PRICE|'
- r'OBJ_ARROW_RIGHT_PRICE|OBJ_ARROW_BUY|OBJ_ARROW_SELL|'
- r'OBJ_ARROW|'
- r'OBJ_TEXT|OBJ_LABEL|OBJ_BUTTON|OBJ_BITMAP|'
- r'OBJ_BITMAP_LABEL|'
- r'OBJ_EDIT|OBJ_EVENT|OBJ_RECTANGLE_LABEL|'
- r'OBJPROP_TIME1|OBJPROP_PRICE1|OBJPROP_TIME2|'
- r'OBJPROP_PRICE2|OBJPROP_TIME3|'
- r'OBJPROP_PRICE3|OBJPROP_COLOR|OBJPROP_STYLE|'
- r'OBJPROP_WIDTH|'
- r'OBJPROP_BACK|OBJPROP_RAY|OBJPROP_ELLIPSE|'
- r'OBJPROP_SCALE|'
- r'OBJPROP_ANGLE|OBJPROP_ARROWCODE|OBJPROP_TIMEFRAMES|'
- r'OBJPROP_DEVIATION|OBJPROP_FONTSIZE|OBJPROP_CORNER|'
- r'OBJPROP_XDISTANCE|OBJPROP_YDISTANCE|OBJPROP_FIBOLEVELS|'
- r'OBJPROP_LEVELCOLOR|OBJPROP_LEVELSTYLE|OBJPROP_LEVELWIDTH|'
- r'OBJPROP_FIRSTLEVEL|OBJPROP_COLOR|OBJPROP_STYLE|OBJPROP_WIDTH|'
- r'OBJPROP_BACK|OBJPROP_ZORDER|OBJPROP_FILL|OBJPROP_HIDDEN|'
- r'OBJPROP_SELECTED|OBJPROP_READONLY|OBJPROP_TYPE|OBJPROP_TIME|'
- r'OBJPROP_SELECTABLE|OBJPROP_CREATETIME|OBJPROP_LEVELS|'
- r'OBJPROP_LEVELCOLOR|OBJPROP_LEVELSTYLE|OBJPROP_LEVELWIDTH|'
- r'OBJPROP_ALIGN|OBJPROP_FONTSIZE|OBJPROP_RAY_RIGHT|OBJPROP_RAY|'
- r'OBJPROP_ELLIPSE|OBJPROP_ARROWCODE|OBJPROP_TIMEFRAMES|OBJPROP_ANCHOR|'
- r'OBJPROP_XDISTANCE|OBJPROP_YDISTANCE|OBJPROP_DRAWLINES|OBJPROP_STATE|'
- r'OBJPROP_CHART_ID|OBJPROP_XSIZE|OBJPROP_YSIZE|OBJPROP_XOFFSET|'
- r'OBJPROP_YOFFSET|OBJPROP_PERIOD|OBJPROP_DATE_SCALE|OBJPROP_PRICE_SCALE|'
- r'OBJPROP_CHART_SCALE|OBJPROP_BGCOLOR|OBJPROP_CORNER|OBJPROP_BORDER_TYPE|'
- r'OBJPROP_BORDER_COLOR|OBJPROP_PRICE|OBJPROP_LEVELVALUE|OBJPROP_SCALE|'
- r'OBJPROP_ANGLE|OBJPROP_DEVIATION|'
- r'OBJPROP_NAME|OBJPROP_TEXT|OBJPROP_TOOLTIP|OBJPROP_LEVELTEXT|'
- r'OBJPROP_FONT|OBJPROP_BMPFILE|OBJPROP_SYMBOL|'
- r'BORDER_FLAT|BORDER_RAISED|BORDER_SUNKEN|ALIGN_LEFT|ALIGN_CENTER|'
- r'ALIGN_RIGHT|ANCHOR_LEFT_UPPER|ANCHOR_LEFT|ANCHOR_LEFT_LOWER|'
- r'ANCHOR_LOWER|ANCHOR_RIGHT_LOWER|ANCHOR_RIGHT|ANCHOR_RIGHT_UPPER|'
- r'ANCHOR_UPPER|ANCHOR_CENTER|ANCHOR_TOP|ANCHOR_BOTTOM|'
- r'CORNER_LEFT_UPPER|CORNER_LEFT_LOWER|CORNER_RIGHT_LOWER|'
- r'CORNER_RIGHT_UPPER|'
- r'OBJ_NO_PERIODS|EMPTY|OBJ_PERIOD_M1|OBJ_PERIOD_M5|OBJ_PERIOD_M15|'
- r'OBJ_PERIOD_M30|OBJ_PERIOD_H1|OBJ_PERIOD_H4|OBJ_PERIOD_D1|'
- r'OBJ_PERIOD_W1|OBJ_PERIOD_MN1|OBJ_ALL_PERIODS|'
- r'GANN_UP_TREND|GANN_DOWN_TREND|'
- r'((clr)?(Black|DarkGreen|DarkSlateGray|Olive|'
- r'Green|Teal|Navy|Purple|'
- r'Maroon|Indigo|MidnightBlue|DarkBlue|'
- r'DarkOliveGreen|SaddleBrown|'
- r'ForestGreen|OliveDrab|SeaGreen|'
- r'DarkGoldenrod|DarkSlateBlue|'
- r'Sienna|MediumBlue|Brown|DarkTurquoise|'
- r'DimGray|LightSeaGreen|'
- r'DarkViolet|FireBrick|MediumVioletRed|'
- r'MediumSeaGreen|Chocolate|'
- r'Crimson|SteelBlue|Goldenrod|MediumSpringGreen|'
- r'LawnGreen|CadetBlue|'
- r'DarkOrchid|YellowGreen|LimeGreen|OrangeRed|'
- r'DarkOrange|Orange|'
- r'Gold|Yellow|Chartreuse|Lime|SpringGreen|'
- r'Aqua|DeepSkyBlue|Blue|'
- r'Magenta|Red|Gray|SlateGray|Peru|BlueViolet|'
- r'LightSlateGray|DeepPink|'
- r'MediumTurquoise|DodgerBlue|Turquoise|RoyalBlue|'
- r'SlateBlue|DarkKhaki|'
- r'IndianRed|MediumOrchid|GreenYellow|'
- r'MediumAquamarine|DarkSeaGreen|'
- r'Tomato|RosyBrown|Orchid|MediumPurple|'
- r'PaleVioletRed|Coral|CornflowerBlue|'
- r'DarkGray|SandyBrown|MediumSlateBlue|'
- r'Tan|DarkSalmon|BurlyWood|'
- r'HotPink|Salmon|Violet|LightCoral|SkyBlue|'
- r'LightSalmon|Plum|'
- r'Khaki|LightGreen|Aquamarine|Silver|'
- r'LightSkyBlue|LightSteelBlue|'
- r'LightBlue|PaleGreen|Thistle|PowderBlue|'
- r'PaleGoldenrod|PaleTurquoise|'
- r'LightGray|Wheat|NavajoWhite|Moccasin|'
- r'LightPink|Gainsboro|PeachPuff|'
- r'Pink|Bisque|LightGoldenrod|BlanchedAlmond|'
- r'LemonChiffon|Beige|'
- r'AntiqueWhite|PapayaWhip|Cornsilk|'
- r'LightYellow|LightCyan|Linen|'
- r'Lavender|MistyRose|OldLace|WhiteSmoke|'
- r'Seashell|Ivory|Honeydew|'
- r'AliceBlue|LavenderBlush|MintCream|Snow|White))|'
- r'SYMBOL_THUMBSUP|SYMBOL_THUMBSDOWN|'
- r'SYMBOL_ARROWUP|SYMBOL_ARROWDOWN|'
- r'SYMBOL_STOPSIGN|SYMBOL_CHECKSIGN|'
- r'SYMBOL_LEFTPRICE|SYMBOL_RIGHTPRICE|'
- r'PRICE_CLOSE|PRICE_OPEN|PRICE_HIGH|PRICE_LOW|'
- r'PRICE_MEDIAN|PRICE_TYPICAL|PRICE_WEIGHTED|'
- r'VOLUME_TICK|VOLUME_REAL|'
- r'STO_LOWHIGH|STO_CLOSECLOSE|'
- r'MODE_OPEN|MODE_LOW|MODE_HIGH|MODE_CLOSE|MODE_VOLUME|MODE_TIME|'
- r'MODE_SMA|MODE_EMA|MODE_SMMA|MODE_LWMA|'
- r'MODE_MAIN|MODE_SIGNAL|MODE_MAIN|'
- r'MODE_PLUSDI|MODE_MINUSDI|MODE_UPPER|'
- r'MODE_LOWER|MODE_GATORJAW|MODE_GATORTEETH|'
- r'MODE_GATORLIPS|MODE_TENKANSEN|'
- r'MODE_KIJUNSEN|MODE_SENKOUSPANA|'
- r'MODE_SENKOUSPANB|MODE_CHINKOUSPAN|'
- r'DRAW_LINE|DRAW_SECTION|DRAW_HISTOGRAM|'
- r'DRAW_ARROW|DRAW_ZIGZAG|DRAW_NONE|'
- r'STYLE_SOLID|STYLE_DASH|STYLE_DOT|'
- r'STYLE_DASHDOT|STYLE_DASHDOTDOT|'
- r'DRAW_NONE|DRAW_LINE|DRAW_SECTION|DRAW_HISTOGRAM|'
- r'DRAW_ARROW|DRAW_ZIGZAG|DRAW_FILLING|'
- r'INDICATOR_DATA|INDICATOR_COLOR_INDEX|'
- r'INDICATOR_CALCULATIONS|INDICATOR_DIGITS|'
- r'INDICATOR_HEIGHT|INDICATOR_LEVELS|'
- r'INDICATOR_LEVELCOLOR|INDICATOR_LEVELSTYLE|'
- r'INDICATOR_LEVELWIDTH|INDICATOR_MINIMUM|'
- r'INDICATOR_MAXIMUM|INDICATOR_LEVELVALUE|'
- r'INDICATOR_SHORTNAME|INDICATOR_LEVELTEXT|'
- r'TERMINAL_BUILD|TERMINAL_CONNECTED|'
- r'TERMINAL_DLLS_ALLOWED|TERMINAL_TRADE_ALLOWED|'
- r'TERMINAL_EMAIL_ENABLED|'
- r'TERMINAL_FTP_ENABLED|TERMINAL_MAXBARS|'
- r'TERMINAL_CODEPAGE|TERMINAL_CPU_CORES|'
- r'TERMINAL_DISK_SPACE|TERMINAL_MEMORY_PHYSICAL|'
- r'TERMINAL_MEMORY_TOTAL|'
- r'TERMINAL_MEMORY_AVAILABLE|TERMINAL_MEMORY_USED|'
- r'TERMINAL_X64|'
- r'TERMINAL_OPENCL_SUPPORT|TERMINAL_LANGUAGE|'
- r'TERMINAL_COMPANY|TERMINAL_NAME|'
- r'TERMINAL_PATH|TERMINAL_DATA_PATH|'
- r'TERMINAL_COMMONDATA_PATH|'
- r'MQL_PROGRAM_TYPE|MQL_DLLS_ALLOWED|'
- r'MQL_TRADE_ALLOWED|MQL_DEBUG|'
- r'MQL_PROFILER|MQL_TESTER|MQL_OPTIMIZATION|'
- r'MQL_VISUAL_MODE|'
- r'MQL_FRAME_MODE|MQL_LICENSE_TYPE|MQL_PROGRAM_NAME|'
- r'MQL_PROGRAM_PATH|'
- r'PROGRAM_SCRIPT|PROGRAM_EXPERT|'
- r'PROGRAM_INDICATOR|LICENSE_FREE|'
- r'LICENSE_DEMO|LICENSE_FULL|LICENSE_TIME|'
- r'MODE_LOW|MODE_HIGH|MODE_TIME|MODE_BID|'
- r'MODE_ASK|MODE_POINT|'
- r'MODE_DIGITS|MODE_SPREAD|MODE_STOPLEVEL|'
- r'MODE_LOTSIZE|MODE_TICKVALUE|'
- r'MODE_TICKSIZE|MODE_SWAPLONG|'
- r'MODE_SWAPSHORT|MODE_STARTING|'
- r'MODE_EXPIRATION|MODE_TRADEALLOWED|'
- r'MODE_MINLOT|MODE_LOTSTEP|MODE_MAXLOT|'
- r'MODE_SWAPTYPE|MODE_PROFITCALCMODE|'
- r'MODE_MARGINCALCMODE|MODE_MARGININIT|'
- r'MODE_MARGINMAINTENANCE|MODE_MARGINHEDGED|'
- r'MODE_MARGINREQUIRED|MODE_FREEZELEVEL|'
- r'SUNDAY|MONDAY|TUESDAY|WEDNESDAY|THURSDAY|'
- r'FRIDAY|SATURDAY|'
- r'ACCOUNT_LOGIN|ACCOUNT_TRADE_MODE|'
- r'ACCOUNT_LEVERAGE|'
- r'ACCOUNT_LIMIT_ORDERS|ACCOUNT_MARGIN_SO_MODE|'
- r'ACCOUNT_TRADE_ALLOWED|ACCOUNT_TRADE_EXPERT|'
- r'ACCOUNT_BALANCE|'
- r'ACCOUNT_CREDIT|ACCOUNT_PROFIT|ACCOUNT_EQUITY|'
- r'ACCOUNT_MARGIN|'
- r'ACCOUNT_FREEMARGIN|ACCOUNT_MARGIN_LEVEL|'
- r'ACCOUNT_MARGIN_SO_CALL|'
- r'ACCOUNT_MARGIN_SO_SO|ACCOUNT_NAME|'
- r'ACCOUNT_SERVER|ACCOUNT_CURRENCY|'
- r'ACCOUNT_COMPANY|ACCOUNT_TRADE_MODE_DEMO|'
- r'ACCOUNT_TRADE_MODE_CONTEST|'
- r'ACCOUNT_TRADE_MODE_REAL|ACCOUNT_STOPOUT_MODE_PERCENT|'
- r'ACCOUNT_STOPOUT_MODE_MONEY|'
- r'STAT_INITIAL_DEPOSIT|STAT_WITHDRAWAL|STAT_PROFIT|'
- r'STAT_GROSS_PROFIT|'
- r'STAT_GROSS_LOSS|STAT_MAX_PROFITTRADE|'
- r'STAT_MAX_LOSSTRADE|STAT_CONPROFITMAX|'
- r'STAT_CONPROFITMAX_TRADES|STAT_MAX_CONWINS|'
- r'STAT_MAX_CONPROFIT_TRADES|'
- r'STAT_CONLOSSMAX|STAT_CONLOSSMAX_TRADES|'
- r'STAT_MAX_CONLOSSES|'
- r'STAT_MAX_CONLOSS_TRADES|STAT_BALANCEMIN|'
- r'STAT_BALANCE_DD|'
- r'STAT_BALANCEDD_PERCENT|STAT_BALANCE_DDREL_PERCENT|'
- r'STAT_BALANCE_DD_RELATIVE|STAT_EQUITYMIN|'
- r'STAT_EQUITY_DD|'
- r'STAT_EQUITYDD_PERCENT|STAT_EQUITY_DDREL_PERCENT|'
- r'STAT_EQUITY_DD_RELATIVE|STAT_EXPECTED_PAYOFF|'
- r'STAT_PROFIT_FACTOR|'
- r'STAT_RECOVERY_FACTOR|STAT_SHARPE_RATIO|'
- r'STAT_MIN_MARGINLEVEL|'
- r'STAT_CUSTOM_ONTESTER|STAT_DEALS|STAT_TRADES|'
- r'STAT_PROFIT_TRADES|'
- r'STAT_LOSS_TRADES|STAT_SHORT_TRADES|STAT_LONG_TRADES|'
- r'STAT_PROFIT_SHORTTRADES|STAT_PROFIT_LONGTRADES|'
- r'STAT_PROFITTRADES_AVGCON|STAT_LOSSTRADES_AVGCON|'
- r'SERIES_BARS_COUNT|SERIES_FIRSTDATE|SERIES_LASTBAR_DATE|'
- r'SERIES_SERVER_FIRSTDATE|SERIES_TERMINAL_FIRSTDATE|'
- r'SERIES_SYNCHRONIZED|'
- r'OP_BUY|OP_SELL|OP_BUYLIMIT|OP_SELLLIMIT|'
- r'OP_BUYSTOP|OP_SELLSTOP|'
- r'TRADE_ACTION_DEAL|TRADE_ACTION_PENDING|'
- r'TRADE_ACTION_SLTP|'
- r'TRADE_ACTION_MODIFY|TRADE_ACTION_REMOVE|'
- r'__DATE__|__DATETIME__|__LINE__|__FILE__|'
- r'__PATH__|__FUNCTION__|'
- r'__FUNCSIG__|__MQLBUILD__|__MQL4BUILD__|'
- r'M_E|M_LOG2E|M_LOG10E|M_LN2|M_LN10|'
- r'M_PI|M_PI_2|M_PI_4|M_1_PI|'
- r'M_2_PI|M_2_SQRTPI|M_SQRT2|M_SQRT1_2|'
- r'CHAR_MIN|CHAR_MAX|UCHAR_MAX|'
- r'SHORT_MIN|SHORT_MAX|USHORT_MAX|'
- r'INT_MIN|INT_MAX|UINT_MAX|'
- r'LONG_MIN|LONG_MAX|ULONG_MAX|'
- r'DBL_MIN|DBL_MAX|DBL_EPSILON|DBL_DIG|DBL_MANT_DIG|'
- r'DBL_MAX_10_EXP|DBL_MAX_EXP|DBL_MIN_10_EXP|DBL_MIN_EXP|'
- r'FLT_MIN|FLT_MAX|FLT_EPSILON|'
- r'FLT_DIG|FLT_MANT_DIG|FLT_MAX_10_EXP|'
- r'FLT_MAX_EXP|FLT_MIN_10_EXP|FLT_MIN_EXP|REASON_PROGRAM'
- r'REASON_REMOVE|REASON_RECOMPILE|'
- r'REASON_CHARTCHANGE|REASON_CHARTCLOSE|'
- r'REASON_PARAMETERS|REASON_ACCOUNT|'
- r'REASON_TEMPLATE|REASON_INITFAILED|'
- r'REASON_CLOSE|POINTER_INVALID'
- r'POINTER_DYNAMIC|POINTER_AUTOMATIC|'
- r'NULL|EMPTY|EMPTY_VALUE|CLR_NONE|WHOLE_ARRAY|'
- r'CHARTS_MAX|clrNONE|EMPTY_VALUE|INVALID_HANDLE|'
- r'IS_DEBUG_MODE|IS_PROFILE_MODE|NULL|WHOLE_ARRAY|WRONG_VALUE|'
- r'ERR_NO_ERROR|ERR_NO_RESULT|ERR_COMMON_ERROR|'
- r'ERR_INVALID_TRADE_PARAMETERS|'
- r'ERR_SERVER_BUSY|ERR_OLD_VERSION|ERR_NO_CONNECTION|'
- r'ERR_NOT_ENOUGH_RIGHTS|'
- r'ERR_TOO_FREQUENT_REQUESTS|ERR_MALFUNCTIONAL_TRADE|'
- r'ERR_ACCOUNT_DISABLED|'
- r'ERR_INVALID_ACCOUNT|ERR_TRADE_TIMEOUT|'
- r'ERR_INVALID_PRICE|ERR_INVALID_STOPS|'
- r'ERR_INVALID_TRADE_VOLUME|ERR_MARKET_CLOSED|'
- r'ERR_TRADE_DISABLED|'
- r'ERR_NOT_ENOUGH_MONEY|ERR_PRICE_CHANGED|'
- r'ERR_OFF_QUOTES|ERR_BROKER_BUSY|'
- r'ERR_REQUOTE|ERR_ORDER_LOCKED|'
- r'ERR_LONG_POSITIONS_ONLY_ALLOWED|ERR_TOO_MANY_REQUESTS|'
- r'ERR_TRADE_MODIFY_DENIED|ERR_TRADE_CONTEXT_BUSY|'
- r'ERR_TRADE_EXPIRATION_DENIED|'
- r'ERR_TRADE_TOO_MANY_ORDERS|ERR_TRADE_HEDGE_PROHIBITED|'
- r'ERR_TRADE_PROHIBITED_BY_FIFO|'
- r'FILE_READ|FILE_WRITE|FILE_BIN|FILE_CSV|FILE_TXT|'
- r'FILE_ANSI|FILE_UNICODE|'
- r'FILE_SHARE_READ|FILE_SHARE_WRITE|FILE_REWRITE|'
- r'FILE_COMMON|FILE_EXISTS|'
- r'FILE_CREATE_DATE|FILE_MODIFY_DATE|'
- r'FILE_ACCESS_DATE|FILE_SIZE|FILE_POSITION|'
- r'FILE_END|FILE_LINE_END|FILE_IS_COMMON|'
- r'FILE_IS_TEXT|FILE_IS_BINARY|'
- r'FILE_IS_CSV|FILE_IS_ANSI|FILE_IS_READABLE|FILE_IS_WRITABLE|'
- r'SEEK_SET|SEEK_CUR|SEEK_END|CP_ACP|'
- r'CP_OEMCP|CP_MACCP|CP_THREAD_ACP|'
- r'CP_SYMBOL|CP_UTF7|CP_UTF8|IDOK|IDCANCEL|IDABORT|'
- r'IDRETRY|IDIGNORE|IDYES|IDNO|IDTRYAGAIN|IDCONTINUE|'
- r'MB_OK|MB_OKCANCEL|MB_ABORTRETRYIGNORE|MB_YESNOCANCEL|'
- r'MB_YESNO|MB_RETRYCANCEL|'
- r'MB_CANCELTRYCONTINUE|MB_ICONSTOP|MB_ICONERROR|'
- r'MB_ICONHAND|MB_ICONQUESTION|'
- r'MB_ICONEXCLAMATION|MB_ICONWARNING|'
- r'MB_ICONINFORMATION|MB_ICONASTERISK|'
- r'MB_DEFBUTTON1|MB_DEFBUTTON2|MB_DEFBUTTON3|MB_DEFBUTTON4)\b',
- Name.Constant),
- inherit,
- ],
- }
-
-class SwiftLexer(ObjectiveCLexer):
- """
- For `Swift <https://developer.apple.com/swift/>`_ source.
- """
- name = 'Swift'
- filenames = ['*.swift']
- aliases = ['swift']
- mimetypes = ['text/x-swift']
-
- keywords_decl = ['class', 'deinit', 'enum', 'extension', 'func', 'import',
- 'init', 'let', 'protocol', 'static', 'struct', 'subscript',
- 'typealias', 'var']
- keywords_stmt = ['break', 'case', 'continue', 'default', 'do', 'else',
- 'fallthrough', 'if', 'in', 'for', 'return', 'switch',
- 'where', 'while']
- keywords_type = ['as', 'dynamicType', 'is', 'new', 'super', 'self', 'Self',
- 'Type', '__COLUMN__', '__FILE__', '__FUNCTION__',
- '__LINE__']
- keywords_resrv = ['associativity', 'didSet', 'get', 'infix', 'inout', 'left',
- 'mutating', 'none', 'nonmutating', 'operator', 'override',
- 'postfix', 'precedence', 'prefix', 'right', 'set',
- 'unowned', 'unowned(safe)', 'unowned(unsafe)', 'weak',
- 'willSet']
- operators = ['->']
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in \
- ObjectiveCLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- if value in self.keywords_decl:
- token = Keyword
- elif value in self.keywords_stmt:
- token = Keyword
- elif value in self.keywords_type:
- token = Keyword.Type
- elif value in self.keywords_resrv:
- token = Keyword.Reserved
- elif value in self.operators:
- token = Operator
- yield index, token, value
+from pygments.lexers.c_cpp import CLexer, CppLexer
+from pygments.lexers.d import DLexer
+from pygments.lexers.objective import ObjectiveCLexer, \
+ ObjectiveCppLexer, LogosLexer
+from pygments.lexers.go import GoLexer
+from pygments.lexers.rust import RustLexer
+from pygments.lexers.c_like import ECLexer, ValaLexer, CudaLexer
+from pygments.lexers.pascal import DelphiLexer, Modula2Lexer, AdaLexer
+from pygments.lexers.business import CobolLexer, CobolFreeformatLexer
+from pygments.lexers.fortran import FortranLexer
+from pygments.lexers.prolog import PrologLexer
+from pygments.lexers.python import CythonLexer
+from pygments.lexers.graphics import GLShaderLexer
+from pygments.lexers.ml import OcamlLexer
+from pygments.lexers.basic import BlitzBasicLexer, BlitzMaxLexer, MonkeyLexer
+from pygments.lexers.dylan import DylanLexer, DylanLidLexer, DylanConsoleLexer
+from pygments.lexers.ooc import OocLexer
+from pygments.lexers.felix import FelixLexer
+from pygments.lexers.nimrod import NimrodLexer
+
+__all__ = []
diff --git a/pygments/lexers/configs.py b/pygments/lexers/configs.py
new file mode 100644
index 00000000..44475357
--- /dev/null
+++ b/pygments/lexers/configs.py
@@ -0,0 +1,536 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.configs
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for configuration file formats.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, default, words, bygroups, include, using
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Whitespace
+from pygments.lexers.shell import BashLexer
+
+__all__ = ['IniLexer', 'RegeditLexer', 'PropertiesLexer', 'KconfigLexer',
+ 'Cfengine3Lexer', 'ApacheConfLexer', 'SquidConfLexer',
+ 'NginxConfLexer', 'LighttpdConfLexer', 'DockerLexer']
+
+
+class IniLexer(RegexLexer):
+ """
+ Lexer for configuration files in INI style.
+ """
+
+ name = 'INI'
+ aliases = ['ini', 'cfg', 'dosini']
+ filenames = ['*.ini', '*.cfg']
+ mimetypes = ['text/x-ini']
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'[;#].*', Comment.Single),
+ (r'\[.*?\]$', Keyword),
+ (r'(.*?)([ \t]*)(=)([ \t]*)(.*(?:\n[ \t].+)*)',
+ bygroups(Name.Attribute, Text, Operator, Text, String))
+ ]
+ }
+
+ def analyse_text(text):
+ npos = text.find('\n')
+ if npos < 3:
+ return False
+ return text[0] == '[' and text[npos-1] == ']'
+
+
+class RegeditLexer(RegexLexer):
+ """
+ Lexer for `Windows Registry
+ <http://en.wikipedia.org/wiki/Windows_Registry#.REG_files>`_ files produced
+ by regedit.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'reg'
+ aliases = ['registry']
+ filenames = ['*.reg']
+ mimetypes = ['text/x-windows-registry']
+
+ tokens = {
+ 'root': [
+ (r'Windows Registry Editor.*', Text),
+ (r'\s+', Text),
+ (r'[;#].*', Comment.Single),
+ (r'(\[)(-?)(HKEY_[A-Z_]+)(.*?\])$',
+ bygroups(Keyword, Operator, Name.Builtin, Keyword)),
+ # String keys, which obey somewhat normal escaping
+ (r'("(?:\\"|\\\\|[^"])+")([ \t]*)(=)([ \t]*)',
+ bygroups(Name.Attribute, Text, Operator, Text),
+ 'value'),
+ # Bare keys (includes @)
+ (r'(.*?)([ \t]*)(=)([ \t]*)',
+ bygroups(Name.Attribute, Text, Operator, Text),
+ 'value'),
+ ],
+ 'value': [
+ (r'-', Operator, '#pop'), # delete value
+ (r'(dword|hex(?:\([0-9a-fA-F]\))?)(:)([0-9a-fA-F,]+)',
+ bygroups(Name.Variable, Punctuation, Number), '#pop'),
+ # As far as I know, .reg files do not support line continuation.
+ (r'.*', String, '#pop'),
+ ]
+ }
+
+ def analyse_text(text):
+ return text.startswith('Windows Registry Editor')
+
+
+class PropertiesLexer(RegexLexer):
+ """
+ Lexer for configuration files in Java's properties format.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'Properties'
+ aliases = ['properties', 'jproperties']
+ filenames = ['*.properties']
+ mimetypes = ['text/x-java-properties']
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'(?:[;#]|//).*$', Comment),
+ (r'(.*?)([ \t]*)([=:])([ \t]*)(.*(?:(?<=\\)\n.*)*)',
+ bygroups(Name.Attribute, Text, Operator, Text, String)),
+ ],
+ }
+
+
+def _rx_indent(level):
+ # Kconfig *always* interprets a tab as 8 spaces, so this is the default.
+ # Edit this if you are in an environment where KconfigLexer gets expanded
+ # input (tabs expanded to spaces) and the expansion tab width is != 8,
+ # e.g. in connection with Trac (trac.ini, [mimeviewer], tab_width).
+ # Value range here is 2 <= {tab_width} <= 8.
+ tab_width = 8
+ # Regex matching a given indentation {level}, assuming that indentation is
+ # a multiple of {tab_width}. In other cases there might be problems.
+ return r'(?:\t| {1,%s}\t| {%s}){%s}.*\n' % (tab_width-1, tab_width, level)
+
+
+class KconfigLexer(RegexLexer):
+ """
+ For Linux-style Kconfig files.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Kconfig'
+ aliases = ['kconfig', 'menuconfig', 'linux-config', 'kernel-config']
+ # Adjust this if new kconfig file names appear in your environment
+ filenames = ['Kconfig', '*Config.in*', 'external.in*',
+ 'standard-modules.in']
+ mimetypes = ['text/x-kconfig']
+ # No re.MULTILINE, indentation-aware help text needs line-by-line handling
+ flags = 0
+
+ def call_indent(level):
+ # If indentation >= {level} is detected, enter state 'indent{level}'
+ return (_rx_indent(level), String.Doc, 'indent%s' % level)
+
+ def do_indent(level):
+ # Print paragraphs of indentation level >= {level} as String.Doc,
+ # ignoring blank lines. Then return to 'root' state.
+ return [
+ (_rx_indent(level), String.Doc),
+ (r'\s*\n', Text),
+ default('#pop:2')
+ ]
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'#.*?\n', Comment.Single),
+ (words((
+ 'mainmenu', 'config', 'menuconfig', 'choice', 'endchoice',
+ 'comment', 'menu', 'endmenu', 'visible if', 'if', 'endif',
+ 'source', 'prompt', 'select', 'depends on', 'default',
+ 'range', 'option'), suffix=r'\b'),
+ Keyword),
+ (r'(---help---|help)[\t ]*\n', Keyword, 'help'),
+ (r'(bool|tristate|string|hex|int|defconfig_list|modules|env)\b',
+ Name.Builtin),
+ (r'[!=&|]', Operator),
+ (r'[()]', Punctuation),
+ (r'[0-9]+', Number.Integer),
+ (r"'(''|[^'])*'", String.Single),
+ (r'"(""|[^"])*"', String.Double),
+ (r'\S+', Text),
+ ],
+ # Help text is indented, multi-line and ends when a lower indentation
+ # level is detected.
+ 'help': [
+ # Skip blank lines after help token, if any
+ (r'\s*\n', Text),
+ # Determine the first help line's indentation level heuristically(!).
+ # Attention: this is not perfect, but works for 99% of "normal"
+ # indentation schemes up to a max. indentation level of 7.
+ call_indent(7),
+ call_indent(6),
+ call_indent(5),
+ call_indent(4),
+ call_indent(3),
+ call_indent(2),
+ call_indent(1),
+ ('', Text, '#pop'), # for incomplete help sections without text
+ ],
+ # Handle text for indentation levels 7 to 1
+ 'indent7': do_indent(7),
+ 'indent6': do_indent(6),
+ 'indent5': do_indent(5),
+ 'indent4': do_indent(4),
+ 'indent3': do_indent(3),
+ 'indent2': do_indent(2),
+ 'indent1': do_indent(1),
+ }
+
+
+class Cfengine3Lexer(RegexLexer):
+ """
+ Lexer for `CFEngine3 <http://cfengine.org>`_ policy files.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'CFEngine3'
+ aliases = ['cfengine3', 'cf3']
+ filenames = ['*.cf']
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ (r'#.*?\n', Comment),
+ (r'(body)(\s+)(\S+)(\s+)(control)',
+ bygroups(Keyword, Text, Keyword, Text, Keyword)),
+ (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)(\()',
+ bygroups(Keyword, Text, Keyword, Text, Name.Function, Punctuation),
+ 'arglist'),
+ (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)',
+ bygroups(Keyword, Text, Keyword, Text, Name.Function)),
+ (r'(")([^"]+)(")(\s+)(string|slist|int|real)(\s*)(=>)(\s*)',
+ bygroups(Punctuation, Name.Variable, Punctuation,
+ Text, Keyword.Type, Text, Operator, Text)),
+ (r'(\S+)(\s*)(=>)(\s*)',
+ bygroups(Keyword.Reserved, Text, Operator, Text)),
+ (r'"', String, 'string'),
+ (r'(\w+)(\()', bygroups(Name.Function, Punctuation)),
+ (r'([\w.!&|\(\)]+)(::)', bygroups(Name.Class, Punctuation)),
+ (r'(\w+)(:)', bygroups(Keyword.Declaration, Punctuation)),
+ (r'@[\{\(][^\)\}]+[\}\)]', Name.Variable),
+ (r'[(){},;]', Punctuation),
+ (r'=>', Operator),
+ (r'->', Operator),
+ (r'\d+\.\d+', Number.Float),
+ (r'\d+', Number.Integer),
+ (r'\w+', Name.Function),
+ (r'\s+', Text),
+ ],
+ 'string': [
+ (r'\$[\{\(]', String.Interpol, 'interpol'),
+ (r'\\.', String.Escape),
+ (r'"', String, '#pop'),
+ (r'\n', String),
+ (r'.', String),
+ ],
+ 'interpol': [
+ (r'\$[\{\(]', String.Interpol, '#push'),
+ (r'[\}\)]', String.Interpol, '#pop'),
+ (r'[^\$\{\(\)\}]+', String.Interpol),
+ ],
+ 'arglist': [
+ (r'\)', Punctuation, '#pop'),
+ (r',', Punctuation),
+ (r'\w+', Name.Variable),
+ (r'\s+', Text),
+ ],
+ }
+
+
+class ApacheConfLexer(RegexLexer):
+ """
+ Lexer for configuration files following the Apache config file
+ format.
+
+ .. versionadded:: 0.6
+ """
+
+ name = 'ApacheConf'
+ aliases = ['apacheconf', 'aconf', 'apache']
+ filenames = ['.htaccess', 'apache.conf', 'apache2.conf']
+ mimetypes = ['text/x-apacheconf']
+ flags = re.MULTILINE | re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'(#.*?)$', Comment),
+ (r'(<[^\s>]+)(?:(\s+)(.*?))?(>)',
+ bygroups(Name.Tag, Text, String, Name.Tag)),
+ (r'([a-z]\w*)(\s+)',
+ bygroups(Name.Builtin, Text), 'value'),
+ (r'\.+', Text),
+ ],
+ 'value': [
+ (r'\\\n', Text),
+ (r'$', Text, '#pop'),
+ (r'\\', Text),
+ (r'[^\S\n]+', Text),
+ (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
+ (r'\d+', Number),
+ (r'/([a-z0-9][\w./-]+)', String.Other),
+ (r'(on|off|none|any|all|double|email|dns|min|minimal|'
+ r'os|productonly|full|emerg|alert|crit|error|warn|'
+ r'notice|info|debug|registry|script|inetd|standalone|'
+ r'user|group)\b', Keyword),
+ (r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double),
+ (r'[^\s"\\]+', Text)
+ ],
+ }
+
+
+class SquidConfLexer(RegexLexer):
+ """
+ Lexer for `squid <http://www.squid-cache.org/>`_ configuration files.
+
+ .. versionadded:: 0.9
+ """
+
+ name = 'SquidConf'
+ aliases = ['squidconf', 'squid.conf', 'squid']
+ filenames = ['squid.conf']
+ mimetypes = ['text/x-squidconf']
+ flags = re.IGNORECASE
+
+ keywords = (
+ "access_log", "acl", "always_direct", "announce_host",
+ "announce_period", "announce_port", "announce_to", "anonymize_headers",
+ "append_domain", "as_whois_server", "auth_param_basic",
+ "authenticate_children", "authenticate_program", "authenticate_ttl",
+ "broken_posts", "buffered_logs", "cache_access_log", "cache_announce",
+ "cache_dir", "cache_dns_program", "cache_effective_group",
+ "cache_effective_user", "cache_host", "cache_host_acl",
+ "cache_host_domain", "cache_log", "cache_mem", "cache_mem_high",
+ "cache_mem_low", "cache_mgr", "cachemgr_passwd", "cache_peer",
+ "cache_peer_access", "cahce_replacement_policy", "cache_stoplist",
+ "cache_stoplist_pattern", "cache_store_log", "cache_swap",
+ "cache_swap_high", "cache_swap_log", "cache_swap_low", "client_db",
+ "client_lifetime", "client_netmask", "connect_timeout", "coredump_dir",
+ "dead_peer_timeout", "debug_options", "delay_access", "delay_class",
+ "delay_initial_bucket_level", "delay_parameters", "delay_pools",
+ "deny_info", "dns_children", "dns_defnames", "dns_nameservers",
+ "dns_testnames", "emulate_httpd_log", "err_html_text",
+ "fake_user_agent", "firewall_ip", "forwarded_for", "forward_snmpd_port",
+ "fqdncache_size", "ftpget_options", "ftpget_program", "ftp_list_width",
+ "ftp_passive", "ftp_user", "half_closed_clients", "header_access",
+ "header_replace", "hierarchy_stoplist", "high_response_time_warning",
+ "high_page_fault_warning", "hosts_file", "htcp_port", "http_access",
+ "http_anonymizer", "httpd_accel", "httpd_accel_host",
+ "httpd_accel_port", "httpd_accel_uses_host_header",
+ "httpd_accel_with_proxy", "http_port", "http_reply_access",
+ "icp_access", "icp_hit_stale", "icp_port", "icp_query_timeout",
+ "ident_lookup", "ident_lookup_access", "ident_timeout",
+ "incoming_http_average", "incoming_icp_average", "inside_firewall",
+ "ipcache_high", "ipcache_low", "ipcache_size", "local_domain",
+ "local_ip", "logfile_rotate", "log_fqdn", "log_icp_queries",
+ "log_mime_hdrs", "maximum_object_size", "maximum_single_addr_tries",
+ "mcast_groups", "mcast_icp_query_timeout", "mcast_miss_addr",
+ "mcast_miss_encode_key", "mcast_miss_port", "memory_pools",
+ "memory_pools_limit", "memory_replacement_policy", "mime_table",
+ "min_http_poll_cnt", "min_icp_poll_cnt", "minimum_direct_hops",
+ "minimum_object_size", "minimum_retry_timeout", "miss_access",
+ "negative_dns_ttl", "negative_ttl", "neighbor_timeout",
+ "neighbor_type_domain", "netdb_high", "netdb_low", "netdb_ping_period",
+ "netdb_ping_rate", "never_direct", "no_cache", "passthrough_proxy",
+ "pconn_timeout", "pid_filename", "pinger_program", "positive_dns_ttl",
+ "prefer_direct", "proxy_auth", "proxy_auth_realm", "query_icmp",
+ "quick_abort", "quick_abort", "quick_abort_max", "quick_abort_min",
+ "quick_abort_pct", "range_offset_limit", "read_timeout",
+ "redirect_children", "redirect_program",
+ "redirect_rewrites_host_header", "reference_age", "reference_age",
+ "refresh_pattern", "reload_into_ims", "request_body_max_size",
+ "request_size", "request_timeout", "shutdown_lifetime",
+ "single_parent_bypass", "siteselect_timeout", "snmp_access",
+ "snmp_incoming_address", "snmp_port", "source_ping", "ssl_proxy",
+ "store_avg_object_size", "store_objects_per_bucket",
+ "strip_query_terms", "swap_level1_dirs", "swap_level2_dirs",
+ "tcp_incoming_address", "tcp_outgoing_address", "tcp_recv_bufsize",
+ "test_reachability", "udp_hit_obj", "udp_hit_obj_size",
+ "udp_incoming_address", "udp_outgoing_address", "unique_hostname",
+ "unlinkd_program", "uri_whitespace", "useragent_log",
+ "visible_hostname", "wais_relay", "wais_relay_host", "wais_relay_port",
+ )
+
+ opts = (
+ "proxy-only", "weight", "ttl", "no-query", "default", "round-robin",
+ "multicast-responder", "on", "off", "all", "deny", "allow", "via",
+ "parent", "no-digest", "heap", "lru", "realm", "children", "q1", "q2",
+ "credentialsttl", "none", "disable", "offline_toggle", "diskd",
+ )
+
+ actions = (
+ "shutdown", "info", "parameter", "server_list", "client_list",
+ r'squid.conf',
+ )
+
+ actions_stats = (
+ "objects", "vm_objects", "utilization", "ipcache", "fqdncache", "dns",
+ "redirector", "io", "reply_headers", "filedescriptors", "netdb",
+ )
+
+ actions_log = ("status", "enable", "disable", "clear")
+
+ acls = (
+ "url_regex", "urlpath_regex", "referer_regex", "port", "proto",
+ "req_mime_type", "rep_mime_type", "method", "browser", "user", "src",
+ "dst", "time", "dstdomain", "ident", "snmp_community",
+ )
+
+ ip_re = (
+ r'(?:(?:(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|0x0*[0-9a-f]{1,2}|'
+ r'0+[1-3]?[0-7]{0,2})(?:\.(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|'
+ r'0x0*[0-9a-f]{1,2}|0+[1-3]?[0-7]{0,2})){3})|(?!.*::.*::)(?:(?!:)|'
+ r':(?=:))(?:[0-9a-f]{0,4}(?:(?<=::)|(?<!::):)){6}(?:[0-9a-f]{0,4}'
+ r'(?:(?<=::)|(?<!::):)[0-9a-f]{0,4}(?:(?<=::)|(?<!:)|(?<=:)(?<!::):)|'
+ r'(?:25[0-4]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-4]|2[0-4]\d|1\d\d|'
+ r'[1-9]?\d)){3}))'
+ )
+
+ tokens = {
+ 'root': [
+ (r'\s+', Whitespace),
+ (r'#', Comment, 'comment'),
+ (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(opts, prefix=r'\b', suffix=r'\b'), Name.Constant),
+ # Actions
+ (words(actions, prefix=r'\b', suffix=r'\b'), String),
+ (words(actions_stats, prefix=r'stats/', suffix=r'\b'), String),
+ (words(actions_log, prefix=r'log/', suffix=r'='), String),
+ (words(acls, prefix=r'\b', suffix=r'\b'), Keyword),
+ (ip_re + r'(?:/(?:' + ip_re + r'|\b\d+\b))?', Number.Float),
+ (r'(?:\b\d+\b(?:-\b\d+|%)?)', Number),
+ (r'\S+', Text),
+ ],
+ 'comment': [
+ (r'\s*TAG:.*', String.Escape, '#pop'),
+ (r'.*', Comment, '#pop'),
+ ],
+ }
+
+
+class NginxConfLexer(RegexLexer):
+ """
+ Lexer for `Nginx <http://nginx.net/>`_ configuration files.
+
+ .. versionadded:: 0.11
+ """
+ name = 'Nginx configuration file'
+ aliases = ['nginx']
+ filenames = []
+ mimetypes = ['text/x-nginx-conf']
+
+ tokens = {
+ 'root': [
+ (r'(include)(\s+)([^\s;]+)', bygroups(Keyword, Text, Name)),
+ (r'[^\s;#]+', Keyword, 'stmt'),
+ include('base'),
+ ],
+ 'block': [
+ (r'}', Punctuation, '#pop:2'),
+ (r'[^\s;#]+', Keyword.Namespace, 'stmt'),
+ include('base'),
+ ],
+ 'stmt': [
+ (r'{', Punctuation, 'block'),
+ (r';', Punctuation, '#pop'),
+ include('base'),
+ ],
+ 'base': [
+ (r'#.*\n', Comment.Single),
+ (r'on|off', Name.Constant),
+ (r'\$[^\s;#()]+', Name.Variable),
+ (r'([a-z0-9.-]+)(:)([0-9]+)',
+ bygroups(Name, Punctuation, Number.Integer)),
+ (r'[a-z-]+/[a-z-+]+', String), # mimetype
+ # (r'[a-zA-Z._-]+', Keyword),
+ (r'[0-9]+[km]?\b', Number.Integer),
+ (r'(~)(\s*)([^\s{]+)', bygroups(Punctuation, Text, String.Regex)),
+ (r'[:=~]', Punctuation),
+ (r'[^\s;#{}$]+', String), # catch all
+ (r'/[^\s;#]*', Name), # pathname
+ (r'\s+', Text),
+ (r'[$;]', Text), # leftover characters
+ ],
+ }
+
+
+class LighttpdConfLexer(RegexLexer):
+ """
+ Lexer for `Lighttpd <http://lighttpd.net/>`_ configuration files.
+
+ .. versionadded:: 0.11
+ """
+ name = 'Lighttpd configuration file'
+ aliases = ['lighty', 'lighttpd']
+ filenames = []
+ mimetypes = ['text/x-lighttpd-conf']
+
+ tokens = {
+ 'root': [
+ (r'#.*\n', Comment.Single),
+ (r'/\S*', Name), # pathname
+ (r'[a-zA-Z._-]+', Keyword),
+ (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
+ (r'[0-9]+', Number),
+ (r'=>|=~|\+=|==|=|\+', Operator),
+ (r'\$[A-Z]+', Name.Builtin),
+ (r'[(){}\[\],]', Punctuation),
+ (r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double),
+ (r'\s+', Text),
+ ],
+
+ }
+
+
+class DockerLexer(RegexLexer):
+ """
+ Lexer for `Docker <http://docker.io>`_ configuration files.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Docker'
+ aliases = ['docker', 'dockerfile']
+ filenames = ['Dockerfile', '*.docker']
+ mimetypes = ['text/x-dockerfile-config']
+
+ _keywords = (r'(?:FROM|MAINTAINER|CMD|EXPOSE|ENV|ADD|ENTRYPOINT|'
+ r'VOLUME|WORKDIR)')
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ (r'^(ONBUILD)(\s+)(%s)\b' % (_keywords,),
+ bygroups(Name.Keyword, Whitespace, Keyword)),
+ (r'^(%s)\b(.*)' % (_keywords,), bygroups(Keyword, String)),
+ (r'#.*', Comment),
+ (r'RUN', Keyword), # Rest of line falls through
+ (r'(.*\\\n)*.+', using(BashLexer)),
+ ],
+ }
diff --git a/pygments/lexers/console.py b/pygments/lexers/console.py
new file mode 100644
index 00000000..334e7195
--- /dev/null
+++ b/pygments/lexers/console.py
@@ -0,0 +1,114 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.console
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for misc console output.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups
+from pygments.token import Generic, Comment, String, Text, Keyword, Name, \
+ Punctuation, Number
+
+__all__ = ['VCTreeStatusLexer', 'PyPyLogLexer']
+
+
+class VCTreeStatusLexer(RegexLexer):
+ """
+ For colorizing output of version control status commans, like "hg
+ status" or "svn status".
+
+ .. versionadded:: 2.0
+ """
+ name = 'VCTreeStatus'
+ aliases = ['vctreestatus']
+ filenames = []
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ (r'^A \+ C\s+', Generic.Error),
+ (r'^A\s+\+?\s+', String),
+ (r'^M\s+', Generic.Inserted),
+ (r'^C\s+', Generic.Error),
+ (r'^D\s+', Generic.Deleted),
+ (r'^[\?!]\s+', Comment.Preproc),
+ (r' >\s+.*\n', Comment.Preproc),
+ (r'.*\n', Text)
+ ]
+ }
+
+
+class PyPyLogLexer(RegexLexer):
+ """
+ Lexer for PyPy log files.
+
+ .. versionadded:: 1.5
+ """
+ name = "PyPy Log"
+ aliases = ["pypylog", "pypy"]
+ filenames = ["*.pypylog"]
+ mimetypes = ['application/x-pypylog']
+
+ tokens = {
+ "root": [
+ (r"\[\w+\] {jit-log-.*?$", Keyword, "jit-log"),
+ (r"\[\w+\] {jit-backend-counts$", Keyword, "jit-backend-counts"),
+ include("extra-stuff"),
+ ],
+ "jit-log": [
+ (r"\[\w+\] jit-log-.*?}$", Keyword, "#pop"),
+ (r"^\+\d+: ", Comment),
+ (r"--end of the loop--", Comment),
+ (r"[ifp]\d+", Name),
+ (r"ptr\d+", Name),
+ (r"(\()(\w+(?:\.\w+)?)(\))",
+ bygroups(Punctuation, Name.Builtin, Punctuation)),
+ (r"[\[\]=,()]", Punctuation),
+ (r"(\d+\.\d+|inf|-inf)", Number.Float),
+ (r"-?\d+", Number.Integer),
+ (r"'.*'", String),
+ (r"(None|descr|ConstClass|ConstPtr|TargetToken)", Name),
+ (r"<.*?>+", Name.Builtin),
+ (r"(label|debug_merge_point|jump|finish)", Name.Class),
+ (r"(int_add_ovf|int_add|int_sub_ovf|int_sub|int_mul_ovf|int_mul|"
+ r"int_floordiv|int_mod|int_lshift|int_rshift|int_and|int_or|"
+ r"int_xor|int_eq|int_ne|int_ge|int_gt|int_le|int_lt|int_is_zero|"
+ r"int_is_true|"
+ r"uint_floordiv|uint_ge|uint_lt|"
+ r"float_add|float_sub|float_mul|float_truediv|float_neg|"
+ r"float_eq|float_ne|float_ge|float_gt|float_le|float_lt|float_abs|"
+ r"ptr_eq|ptr_ne|instance_ptr_eq|instance_ptr_ne|"
+ r"cast_int_to_float|cast_float_to_int|"
+ r"force_token|quasiimmut_field|same_as|virtual_ref_finish|"
+ r"virtual_ref|mark_opaque_ptr|"
+ r"call_may_force|call_assembler|call_loopinvariant|"
+ r"call_release_gil|call_pure|call|"
+ r"new_with_vtable|new_array|newstr|newunicode|new|"
+ r"arraylen_gc|"
+ r"getarrayitem_gc_pure|getarrayitem_gc|setarrayitem_gc|"
+ r"getarrayitem_raw|setarrayitem_raw|getfield_gc_pure|"
+ r"getfield_gc|getinteriorfield_gc|setinteriorfield_gc|"
+ r"getfield_raw|setfield_gc|setfield_raw|"
+ r"strgetitem|strsetitem|strlen|copystrcontent|"
+ r"unicodegetitem|unicodesetitem|unicodelen|"
+ r"guard_true|guard_false|guard_value|guard_isnull|"
+ r"guard_nonnull_class|guard_nonnull|guard_class|guard_no_overflow|"
+ r"guard_not_forced|guard_no_exception|guard_not_invalidated)",
+ Name.Builtin),
+ include("extra-stuff"),
+ ],
+ "jit-backend-counts": [
+ (r"\[\w+\] jit-backend-counts}$", Keyword, "#pop"),
+ (r":", Punctuation),
+ (r"\d+", Number),
+ include("extra-stuff"),
+ ],
+ "extra-stuff": [
+ (r"\s+", Text),
+ (r"#.*?$", Comment),
+ ],
+ }
diff --git a/pygments/lexers/css.py b/pygments/lexers/css.py
new file mode 100644
index 00000000..091d4696
--- /dev/null
+++ b/pygments/lexers/css.py
@@ -0,0 +1,496 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.css
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for CSS and related stylesheet formats.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+import copy
+
+from pygments.lexer import ExtendedRegexLexer, RegexLexer, include, bygroups, \
+ default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+from pygments.util import iteritems
+
+__all__ = ['CssLexer', 'SassLexer', 'ScssLexer']
+
+
+class CssLexer(RegexLexer):
+ """
+ For CSS (Cascading Style Sheets).
+ """
+
+ name = 'CSS'
+ aliases = ['css']
+ filenames = ['*.css']
+ mimetypes = ['text/css']
+
+ tokens = {
+ 'root': [
+ include('basics'),
+ ],
+ 'basics': [
+ (r'\s+', Text),
+ (r'/\*(?:.|\n)*?\*/', Comment),
+ (r'{', Punctuation, 'content'),
+ (r'\:[\w-]+', Name.Decorator),
+ (r'\.[\w-]+', Name.Class),
+ (r'\#[\w-]+', Name.Function),
+ (r'@[\w-]+', Keyword, 'atrule'),
+ (r'[\w-]+', Name.Tag),
+ (r'[~\^\*!%&$\[\]\(\)<>\|+=@:;,./?-]', Operator),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single)
+ ],
+ 'atrule': [
+ (r'{', Punctuation, 'atcontent'),
+ (r';', Punctuation, '#pop'),
+ include('basics'),
+ ],
+ 'atcontent': [
+ include('basics'),
+ (r'}', Punctuation, '#pop:2'),
+ ],
+ 'content': [
+ (r'\s+', Text),
+ (r'}', Punctuation, '#pop'),
+ (r'url\(.*?\)', String.Other),
+ (r'^@.*?$', Comment.Preproc),
+ (words((
+ 'azimuth', 'background-attachment', 'background-color',
+ 'background-image', 'background-position', 'background-repeat',
+ 'background', 'border-bottom-color', 'border-bottom-style',
+ 'border-bottom-width', 'border-left-color', 'border-left-style',
+ 'border-left-width', 'border-right', 'border-right-color',
+ 'border-right-style', 'border-right-width', 'border-top-color',
+ 'border-top-style', 'border-top-width', 'border-bottom',
+ 'border-collapse', 'border-left', 'border-width', 'border-color',
+ 'border-spacing', 'border-style', 'border-top', 'border', 'caption-side',
+ 'clear', 'clip', 'color', 'content', 'counter-increment', 'counter-reset',
+ 'cue-after', 'cue-before', 'cue', 'cursor', 'direction', 'display',
+ 'elevation', 'empty-cells', 'float', 'font-family', 'font-size',
+ 'font-size-adjust', 'font-stretch', 'font-style', 'font-variant',
+ 'font-weight', 'font', 'height', 'letter-spacing', 'line-height',
+ 'list-style-type', 'list-style-image', 'list-style-position',
+ 'list-style', 'margin-bottom', 'margin-left', 'margin-right',
+ 'margin-top', 'margin', 'marker-offset', 'marks', 'max-height', 'max-width',
+ 'min-height', 'min-width', 'opacity', 'orphans', 'outline-color',
+ 'outline-style', 'outline-width', 'outline', 'overflow', 'overflow-x',
+ 'overflow-y', 'padding-bottom', 'padding-left', 'padding-right', 'padding-top',
+ 'padding', 'page', 'page-break-after', 'page-break-before', 'page-break-inside',
+ 'pause-after', 'pause-before', 'pause', 'pitch-range', 'pitch',
+ 'play-during', 'position', 'quotes', 'richness', 'right', 'size',
+ 'speak-header', 'speak-numeral', 'speak-punctuation', 'speak',
+ 'speech-rate', 'stress', 'table-layout', 'text-align', 'text-decoration',
+ 'text-indent', 'text-shadow', 'text-transform', 'top', 'unicode-bidi',
+ 'vertical-align', 'visibility', 'voice-family', 'volume', 'white-space',
+ 'widows', 'width', 'word-spacing', 'z-index', 'bottom',
+ 'above', 'absolute', 'always', 'armenian', 'aural', 'auto', 'avoid', 'baseline',
+ 'behind', 'below', 'bidi-override', 'blink', 'block', 'bolder', 'bold', 'both',
+ 'capitalize', 'center-left', 'center-right', 'center', 'circle',
+ 'cjk-ideographic', 'close-quote', 'collapse', 'condensed', 'continuous',
+ 'crop', 'crosshair', 'cross', 'cursive', 'dashed', 'decimal-leading-zero',
+ 'decimal', 'default', 'digits', 'disc', 'dotted', 'double', 'e-resize', 'embed',
+ 'extra-condensed', 'extra-expanded', 'expanded', 'fantasy', 'far-left',
+ 'far-right', 'faster', 'fast', 'fixed', 'georgian', 'groove', 'hebrew', 'help',
+ 'hidden', 'hide', 'higher', 'high', 'hiragana-iroha', 'hiragana', 'icon',
+ 'inherit', 'inline-table', 'inline', 'inset', 'inside', 'invert', 'italic',
+ 'justify', 'katakana-iroha', 'katakana', 'landscape', 'larger', 'large',
+ 'left-side', 'leftwards', 'left', 'level', 'lighter', 'line-through', 'list-item',
+ 'loud', 'lower-alpha', 'lower-greek', 'lower-roman', 'lowercase', 'ltr',
+ 'lower', 'low', 'medium', 'message-box', 'middle', 'mix', 'monospace',
+ 'n-resize', 'narrower', 'ne-resize', 'no-close-quote', 'no-open-quote',
+ 'no-repeat', 'none', 'normal', 'nowrap', 'nw-resize', 'oblique', 'once',
+ 'open-quote', 'outset', 'outside', 'overline', 'pointer', 'portrait', 'px',
+ 'relative', 'repeat-x', 'repeat-y', 'repeat', 'rgb', 'ridge', 'right-side',
+ 'rightwards', 's-resize', 'sans-serif', 'scroll', 'se-resize',
+ 'semi-condensed', 'semi-expanded', 'separate', 'serif', 'show', 'silent',
+ 'slower', 'slow', 'small-caps', 'small-caption', 'smaller', 'soft', 'solid',
+ 'spell-out', 'square', 'static', 'status-bar', 'super', 'sw-resize',
+ 'table-caption', 'table-cell', 'table-column', 'table-column-group',
+ 'table-footer-group', 'table-header-group', 'table-row',
+ 'table-row-group', 'text-bottom', 'text-top', 'text', 'thick', 'thin',
+ 'transparent', 'ultra-condensed', 'ultra-expanded', 'underline',
+ 'upper-alpha', 'upper-latin', 'upper-roman', 'uppercase', 'url',
+ 'visible', 'w-resize', 'wait', 'wider', 'x-fast', 'x-high', 'x-large', 'x-loud',
+ 'x-low', 'x-small', 'x-soft', 'xx-large', 'xx-small', 'yes'), suffix=r'\b'),
+ Keyword),
+ (words((
+ 'indigo', 'gold', 'firebrick', 'indianred', 'yellow', 'darkolivegreen',
+ 'darkseagreen', 'mediumvioletred', 'mediumorchid', 'chartreuse',
+ 'mediumslateblue', 'black', 'springgreen', 'crimson', 'lightsalmon', 'brown',
+ 'turquoise', 'olivedrab', 'cyan', 'silver', 'skyblue', 'gray', 'darkturquoise',
+ 'goldenrod', 'darkgreen', 'darkviolet', 'darkgray', 'lightpink', 'teal',
+ 'darkmagenta', 'lightgoldenrodyellow', 'lavender', 'yellowgreen', 'thistle',
+ 'violet', 'navy', 'orchid', 'blue', 'ghostwhite', 'honeydew', 'cornflowerblue',
+ 'darkblue', 'darkkhaki', 'mediumpurple', 'cornsilk', 'red', 'bisque', 'slategray',
+ 'darkcyan', 'khaki', 'wheat', 'deepskyblue', 'darkred', 'steelblue', 'aliceblue',
+ 'gainsboro', 'mediumturquoise', 'floralwhite', 'coral', 'purple', 'lightgrey',
+ 'lightcyan', 'darksalmon', 'beige', 'azure', 'lightsteelblue', 'oldlace',
+ 'greenyellow', 'royalblue', 'lightseagreen', 'mistyrose', 'sienna',
+ 'lightcoral', 'orangered', 'navajowhite', 'lime', 'palegreen', 'burlywood',
+ 'seashell', 'mediumspringgreen', 'fuchsia', 'papayawhip', 'blanchedalmond',
+ 'peru', 'aquamarine', 'white', 'darkslategray', 'ivory', 'dodgerblue',
+ 'lemonchiffon', 'chocolate', 'orange', 'forestgreen', 'slateblue', 'olive',
+ 'mintcream', 'antiquewhite', 'darkorange', 'cadetblue', 'moccasin',
+ 'limegreen', 'saddlebrown', 'darkslateblue', 'lightskyblue', 'deeppink',
+ 'plum', 'aqua', 'darkgoldenrod', 'maroon', 'sandybrown', 'magenta', 'tan',
+ 'rosybrown', 'pink', 'lightblue', 'palevioletred', 'mediumseagreen',
+ 'dimgray', 'powderblue', 'seagreen', 'snow', 'mediumblue', 'midnightblue',
+ 'paleturquoise', 'palegoldenrod', 'whitesmoke', 'darkorchid', 'salmon',
+ 'lightslategray', 'lawngreen', 'lightgreen', 'tomato', 'hotpink',
+ 'lightyellow', 'lavenderblush', 'linen', 'mediumaquamarine', 'green',
+ 'blueviolet', 'peachpuff'), suffix=r'\b'),
+ Name.Builtin),
+ (r'\!important', Comment.Preproc),
+ (r'/\*(?:.|\n)*?\*/', Comment),
+ (r'\#[a-zA-Z0-9]{1,6}', Number),
+ (r'[\.-]?[0-9]*[\.]?[0-9]+(em|px|pt|pc|in|mm|cm|ex|s)\b', Number),
+ # Separate regex for percentages, as can't do word boundaries with %
+ (r'[\.-]?[0-9]*[\.]?[0-9]+%', Number),
+ (r'-?[0-9]+', Number),
+ (r'[~\^\*!%&<>\|+=@:,./?-]+', Operator),
+ (r'[\[\]();]+', Punctuation),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r'[a-zA-Z_]\w*', Name)
+ ]
+ }
+
+
+common_sass_tokens = {
+ 'value': [
+ (r'[ \t]+', Text),
+ (r'[!$][\w-]+', Name.Variable),
+ (r'url\(', String.Other, 'string-url'),
+ (r'[a-z_-][\w-]*(?=\()', Name.Function),
+ (words((
+ 'azimuth', 'background-attachment', 'background-color',
+ 'background-image', 'background-position', 'background-repeat',
+ 'background', 'border-bottom-color', 'border-bottom-style',
+ 'border-bottom-width', 'border-left-color', 'border-left-style',
+ 'border-left-width', 'border-right', 'border-right-color',
+ 'border-right-style', 'border-right-width', 'border-top-color',
+ 'border-top-style', 'border-top-width', 'border-bottom',
+ 'border-collapse', 'border-left', 'border-width', 'border-color',
+ 'border-spacing', 'border-style', 'border-top', 'border', 'caption-side',
+ 'clear', 'clip', 'color', 'content', 'counter-increment', 'counter-reset',
+ 'cue-after', 'cue-before', 'cue', 'cursor', 'direction', 'display',
+ 'elevation', 'empty-cells', 'float', 'font-family', 'font-size',
+ 'font-size-adjust', 'font-stretch', 'font-style', 'font-variant',
+ 'font-weight', 'font', 'height', 'letter-spacing', 'line-height',
+ 'list-style-type', 'list-style-image', 'list-style-position',
+ 'list-style', 'margin-bottom', 'margin-left', 'margin-right',
+ 'margin-top', 'margin', 'marker-offset', 'marks', 'max-height', 'max-width',
+ 'min-height', 'min-width', 'opacity', 'orphans', 'outline', 'outline-color',
+ 'outline-style', 'outline-width', 'overflow', 'padding-bottom',
+ 'padding-left', 'padding-right', 'padding-top', 'padding', 'page',
+ 'page-break-after', 'page-break-before', 'page-break-inside',
+ 'pause-after', 'pause-before', 'pause', 'pitch', 'pitch-range',
+ 'play-during', 'position', 'quotes', 'richness', 'right', 'size',
+ 'speak-header', 'speak-numeral', 'speak-punctuation', 'speak',
+ 'speech-rate', 'stress', 'table-layout', 'text-align', 'text-decoration',
+ 'text-indent', 'text-shadow', 'text-transform', 'top', 'unicode-bidi',
+ 'vertical-align', 'visibility', 'voice-family', 'volume', 'white-space',
+ 'widows', 'width', 'word-spacing', 'z-index', 'bottom', 'left',
+ 'above', 'absolute', 'always', 'armenian', 'aural', 'auto', 'avoid', 'baseline',
+ 'behind', 'below', 'bidi-override', 'blink', 'block', 'bold', 'bolder', 'both',
+ 'capitalize', 'center-left', 'center-right', 'center', 'circle',
+ 'cjk-ideographic', 'close-quote', 'collapse', 'condensed', 'continuous',
+ 'crop', 'crosshair', 'cross', 'cursive', 'dashed', 'decimal-leading-zero',
+ 'decimal', 'default', 'digits', 'disc', 'dotted', 'double', 'e-resize', 'embed',
+ 'extra-condensed', 'extra-expanded', 'expanded', 'fantasy', 'far-left',
+ 'far-right', 'faster', 'fast', 'fixed', 'georgian', 'groove', 'hebrew', 'help',
+ 'hidden', 'hide', 'higher', 'high', 'hiragana-iroha', 'hiragana', 'icon',
+ 'inherit', 'inline-table', 'inline', 'inset', 'inside', 'invert', 'italic',
+ 'justify', 'katakana-iroha', 'katakana', 'landscape', 'larger', 'large',
+ 'left-side', 'leftwards', 'level', 'lighter', 'line-through', 'list-item',
+ 'loud', 'lower-alpha', 'lower-greek', 'lower-roman', 'lowercase', 'ltr',
+ 'lower', 'low', 'medium', 'message-box', 'middle', 'mix', 'monospace',
+ 'n-resize', 'narrower', 'ne-resize', 'no-close-quote', 'no-open-quote',
+ 'no-repeat', 'none', 'normal', 'nowrap', 'nw-resize', 'oblique', 'once',
+ 'open-quote', 'outset', 'outside', 'overline', 'pointer', 'portrait', 'px',
+ 'relative', 'repeat-x', 'repeat-y', 'repeat', 'rgb', 'ridge', 'right-side',
+ 'rightwards', 's-resize', 'sans-serif', 'scroll', 'se-resize',
+ 'semi-condensed', 'semi-expanded', 'separate', 'serif', 'show', 'silent',
+ 'slow', 'slower', 'small-caps', 'small-caption', 'smaller', 'soft', 'solid',
+ 'spell-out', 'square', 'static', 'status-bar', 'super', 'sw-resize',
+ 'table-caption', 'table-cell', 'table-column', 'table-column-group',
+ 'table-footer-group', 'table-header-group', 'table-row',
+ 'table-row-group', 'text', 'text-bottom', 'text-top', 'thick', 'thin',
+ 'transparent', 'ultra-condensed', 'ultra-expanded', 'underline',
+ 'upper-alpha', 'upper-latin', 'upper-roman', 'uppercase', 'url',
+ 'visible', 'w-resize', 'wait', 'wider', 'x-fast', 'x-high', 'x-large', 'x-loud',
+ 'x-low', 'x-small', 'x-soft', 'xx-large', 'xx-small', 'yes'), suffix=r'\b'),
+ Name.Constant),
+ (words((
+ 'indigo', 'gold', 'firebrick', 'indianred', 'darkolivegreen',
+ 'darkseagreen', 'mediumvioletred', 'mediumorchid', 'chartreuse',
+ 'mediumslateblue', 'springgreen', 'crimson', 'lightsalmon', 'brown',
+ 'turquoise', 'olivedrab', 'cyan', 'skyblue', 'darkturquoise',
+ 'goldenrod', 'darkgreen', 'darkviolet', 'darkgray', 'lightpink',
+ 'darkmagenta', 'lightgoldenrodyellow', 'lavender', 'yellowgreen', 'thistle',
+ 'violet', 'orchid', 'ghostwhite', 'honeydew', 'cornflowerblue',
+ 'darkblue', 'darkkhaki', 'mediumpurple', 'cornsilk', 'bisque', 'slategray',
+ 'darkcyan', 'khaki', 'wheat', 'deepskyblue', 'darkred', 'steelblue', 'aliceblue',
+ 'gainsboro', 'mediumturquoise', 'floralwhite', 'coral', 'lightgrey',
+ 'lightcyan', 'darksalmon', 'beige', 'azure', 'lightsteelblue', 'oldlace',
+ 'greenyellow', 'royalblue', 'lightseagreen', 'mistyrose', 'sienna',
+ 'lightcoral', 'orangered', 'navajowhite', 'palegreen', 'burlywood',
+ 'seashell', 'mediumspringgreen', 'papayawhip', 'blanchedalmond',
+ 'peru', 'aquamarine', 'darkslategray', 'ivory', 'dodgerblue',
+ 'lemonchiffon', 'chocolate', 'orange', 'forestgreen', 'slateblue',
+ 'mintcream', 'antiquewhite', 'darkorange', 'cadetblue', 'moccasin',
+ 'limegreen', 'saddlebrown', 'darkslateblue', 'lightskyblue', 'deeppink',
+ 'plum', 'darkgoldenrod', 'sandybrown', 'magenta', 'tan',
+ 'rosybrown', 'pink', 'lightblue', 'palevioletred', 'mediumseagreen',
+ 'dimgray', 'powderblue', 'seagreen', 'snow', 'mediumblue', 'midnightblue',
+ 'paleturquoise', 'palegoldenrod', 'whitesmoke', 'darkorchid', 'salmon',
+ 'lightslategray', 'lawngreen', 'lightgreen', 'tomato', 'hotpink',
+ 'lightyellow', 'lavenderblush', 'linen', 'mediumaquamarine',
+ 'blueviolet', 'peachpuff'), suffix=r'\b'),
+ Name.Entity),
+ (words((
+ 'black', 'silver', 'gray', 'white', 'maroon', 'red', 'purple', 'fuchsia', 'green',
+ 'lime', 'olive', 'yellow', 'navy', 'blue', 'teal', 'aqua'), suffix=r'\b'),
+ Name.Builtin),
+ (r'\!(important|default)', Name.Exception),
+ (r'(true|false)', Name.Pseudo),
+ (r'(and|or|not)', Operator.Word),
+ (r'/\*', Comment.Multiline, 'inline-comment'),
+ (r'//[^\n]*', Comment.Single),
+ (r'\#[a-z0-9]{1,6}', Number.Hex),
+ (r'(-?\d+)(\%|[a-z]+)?', bygroups(Number.Integer, Keyword.Type)),
+ (r'(-?\d*\.\d+)(\%|[a-z]+)?', bygroups(Number.Float, Keyword.Type)),
+ (r'#{', String.Interpol, 'interpolation'),
+ (r'[~\^\*!&%<>\|+=@:,./?-]+', Operator),
+ (r'[\[\]()]+', Punctuation),
+ (r'"', String.Double, 'string-double'),
+ (r"'", String.Single, 'string-single'),
+ (r'[a-z_-][\w-]*', Name),
+ ],
+
+ 'interpolation': [
+ (r'\}', String.Interpol, '#pop'),
+ include('value'),
+ ],
+
+ 'selector': [
+ (r'[ \t]+', Text),
+ (r'\:', Name.Decorator, 'pseudo-class'),
+ (r'\.', Name.Class, 'class'),
+ (r'\#', Name.Namespace, 'id'),
+ (r'[\w-]+', Name.Tag),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r'&', Keyword),
+ (r'[~\^\*!&\[\]\(\)<>\|+=@:;,./?-]', Operator),
+ (r'"', String.Double, 'string-double'),
+ (r"'", String.Single, 'string-single'),
+ ],
+
+ 'string-double': [
+ (r'(\\.|#(?=[^\n{])|[^\n"#])+', String.Double),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r'"', String.Double, '#pop'),
+ ],
+
+ 'string-single': [
+ (r"(\\.|#(?=[^\n{])|[^\n'#])+", String.Double),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r"'", String.Double, '#pop'),
+ ],
+
+ 'string-url': [
+ (r'(\\#|#(?=[^\n{])|[^\n#)])+', String.Other),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r'\)', String.Other, '#pop'),
+ ],
+
+ 'pseudo-class': [
+ (r'[\w-]+', Name.Decorator),
+ (r'#\{', String.Interpol, 'interpolation'),
+ default('#pop'),
+ ],
+
+ 'class': [
+ (r'[\w-]+', Name.Class),
+ (r'#\{', String.Interpol, 'interpolation'),
+ default('#pop'),
+ ],
+
+ 'id': [
+ (r'[\w-]+', Name.Namespace),
+ (r'#\{', String.Interpol, 'interpolation'),
+ default('#pop'),
+ ],
+
+ 'for': [
+ (r'(from|to|through)', Operator.Word),
+ include('value'),
+ ],
+}
+
+
+def _indentation(lexer, match, ctx):
+ indentation = match.group(0)
+ yield match.start(), Text, indentation
+ ctx.last_indentation = indentation
+ ctx.pos = match.end()
+
+ if hasattr(ctx, 'block_state') and ctx.block_state and \
+ indentation.startswith(ctx.block_indentation) and \
+ indentation != ctx.block_indentation:
+ ctx.stack.append(ctx.block_state)
+ else:
+ ctx.block_state = None
+ ctx.block_indentation = None
+ ctx.stack.append('content')
+
+
+def _starts_block(token, state):
+ def callback(lexer, match, ctx):
+ yield match.start(), token, match.group(0)
+
+ if hasattr(ctx, 'last_indentation'):
+ ctx.block_indentation = ctx.last_indentation
+ else:
+ ctx.block_indentation = ''
+
+ ctx.block_state = state
+ ctx.pos = match.end()
+
+ return callback
+
+
+class SassLexer(ExtendedRegexLexer):
+ """
+ For Sass stylesheets.
+
+ .. versionadded:: 1.3
+ """
+
+ name = 'Sass'
+ aliases = ['sass']
+ filenames = ['*.sass']
+ mimetypes = ['text/x-sass']
+
+ flags = re.IGNORECASE
+ tokens = {
+ 'root': [
+ (r'[ \t]*\n', Text),
+ (r'[ \t]*', _indentation),
+ ],
+
+ 'content': [
+ (r'//[^\n]*', _starts_block(Comment.Single, 'single-comment'),
+ 'root'),
+ (r'/\*[^\n]*', _starts_block(Comment.Multiline, 'multi-comment'),
+ 'root'),
+ (r'@import', Keyword, 'import'),
+ (r'@for', Keyword, 'for'),
+ (r'@(debug|warn|if|while)', Keyword, 'value'),
+ (r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'),
+ (r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'),
+ (r'@extend', Keyword, 'selector'),
+ (r'@[\w-]+', Keyword, 'selector'),
+ (r'=[\w-]+', Name.Function, 'value'),
+ (r'\+[\w-]+', Name.Decorator, 'value'),
+ (r'([!$][\w-]\w*)([ \t]*(?:(?:\|\|)?=|:))',
+ bygroups(Name.Variable, Operator), 'value'),
+ (r':', Name.Attribute, 'old-style-attr'),
+ (r'(?=.+?[=:]([^a-z]|$))', Name.Attribute, 'new-style-attr'),
+ default('selector'),
+ ],
+
+ 'single-comment': [
+ (r'.+', Comment.Single),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'multi-comment': [
+ (r'.+', Comment.Multiline),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'import': [
+ (r'[ \t]+', Text),
+ (r'\S+', String),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'old-style-attr': [
+ (r'[^\s:="\[]+', Name.Attribute),
+ (r'#{', String.Interpol, 'interpolation'),
+ (r'[ \t]*=', Operator, 'value'),
+ default('value'),
+ ],
+
+ 'new-style-attr': [
+ (r'[^\s:="\[]+', Name.Attribute),
+ (r'#{', String.Interpol, 'interpolation'),
+ (r'[ \t]*[=:]', Operator, 'value'),
+ ],
+
+ 'inline-comment': [
+ (r"(\\#|#(?=[^\n{])|\*(?=[^\n/])|[^\n#*])+", Comment.Multiline),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r"\*/", Comment, '#pop'),
+ ],
+ }
+ for group, common in iteritems(common_sass_tokens):
+ tokens[group] = copy.copy(common)
+ tokens['value'].append((r'\n', Text, 'root'))
+ tokens['selector'].append((r'\n', Text, 'root'))
+
+
+class ScssLexer(RegexLexer):
+ """
+ For SCSS stylesheets.
+ """
+
+ name = 'SCSS'
+ aliases = ['scss']
+ filenames = ['*.scss']
+ mimetypes = ['text/x-scss']
+
+ flags = re.IGNORECASE | re.DOTALL
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'@import', Keyword, 'value'),
+ (r'@for', Keyword, 'for'),
+ (r'@(debug|warn|if|while)', Keyword, 'value'),
+ (r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'),
+ (r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'),
+ (r'@extend', Keyword, 'selector'),
+ (r'@[\w-]+', Keyword, 'selector'),
+ (r'(\$[\w-]*\w)([ \t]*:)', bygroups(Name.Variable, Operator), 'value'),
+ (r'(?=[^;{}][;}])', Name.Attribute, 'attr'),
+ (r'(?=[^;{}:]+:[^a-z])', Name.Attribute, 'attr'),
+ default('selector'),
+ ],
+
+ 'attr': [
+ (r'[^\s:="\[]+', Name.Attribute),
+ (r'#{', String.Interpol, 'interpolation'),
+ (r'[ \t]*:', Operator, 'value'),
+ ],
+
+ 'inline-comment': [
+ (r"(\\#|#(?=[^{])|\*(?=[^/])|[^#*])+", Comment.Multiline),
+ (r'#\{', String.Interpol, 'interpolation'),
+ (r"\*/", Comment, '#pop'),
+ ],
+ }
+ for group, common in iteritems(common_sass_tokens):
+ tokens[group] = copy.copy(common)
+ tokens['value'].extend([(r'\n', Text), (r'[;{}]', Punctuation, 'root')])
+ tokens['selector'].extend([(r'\n', Text), (r'[;{}]', Punctuation, 'root')])
diff --git a/pygments/lexers/d.py b/pygments/lexers/d.py
new file mode 100644
index 00000000..c3c3dc1d
--- /dev/null
+++ b/pygments/lexers/d.py
@@ -0,0 +1,251 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.d
+ ~~~~~~~~~~~~~~~~~
+
+ Lexers for D languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Text, Comment, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['DLexer', 'CrocLexer', 'MiniDLexer']
+
+
+class DLexer(RegexLexer):
+ """
+ For D source.
+
+ .. versionadded:: 1.2
+ """
+ name = 'D'
+ filenames = ['*.d', '*.di']
+ aliases = ['d']
+ mimetypes = ['text/x-dsrc']
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'\s+', Text),
+ # (r'\\\n', Text), # line continuations
+ # Comments
+ (r'//(.*?)\n', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'/\+', Comment.Multiline, 'nested_comment'),
+ # Keywords
+ (words((
+ 'abstract', 'alias', 'align', 'asm', 'assert', 'auto', 'body',
+ 'break', 'case', 'cast', 'catch', 'class', 'const', 'continue',
+ 'debug', 'default', 'delegate', 'delete', 'deprecated', 'do', 'else',
+ 'enum', 'export', 'extern', 'finally', 'final', 'foreach_reverse',
+ 'foreach', 'for', 'function', 'goto', 'if', 'immutable', 'import',
+ 'interface', 'invariant', 'inout', 'in', 'is', 'lazy', 'mixin',
+ 'module', 'new', 'nothrow', 'out', 'override', 'package', 'pragma',
+ 'private', 'protected', 'public', 'pure', 'ref', 'return', 'scope',
+ 'shared', 'static', 'struct', 'super', 'switch', 'synchronized',
+ 'template', 'this', 'throw', 'try', 'typedef', 'typeid', 'typeof',
+ 'union', 'unittest', 'version', 'volatile', 'while', 'with',
+ '__gshared', '__traits', '__vector', '__parameters'),
+ suffix=r'\b'),
+ Keyword),
+ (words((
+ 'bool', 'byte', 'cdouble', 'cent', 'cfloat', 'char', 'creal',
+ 'dchar', 'double', 'float', 'idouble', 'ifloat', 'int', 'ireal',
+ 'long', 'real', 'short', 'ubyte', 'ucent', 'uint', 'ulong',
+ 'ushort', 'void', 'wchar'), suffix=r'\b'),
+ Keyword.Type),
+ (r'(false|true|null)\b', Keyword.Constant),
+ (words((
+ '__FILE__', '__MODULE__', '__LINE__', '__FUNCTION__', '__PRETTY_FUNCTION__'
+ '', '__DATE__', '__EOF__', '__TIME__', '__TIMESTAMP__', '__VENDOR__',
+ '__VERSION__'), suffix=r'\b'),
+ Keyword.Pseudo),
+ (r'macro\b', Keyword.Reserved),
+ (r'(string|wstring|dstring|size_t|ptrdiff_t)\b', Name.Builtin),
+ # FloatLiteral
+ # -- HexFloat
+ (r'0[xX]([0-9a-fA-F_]*\.[0-9a-fA-F_]+|[0-9a-fA-F_]+)'
+ r'[pP][+\-]?[0-9_]+[fFL]?[i]?', Number.Float),
+ # -- DecimalFloat
+ (r'[0-9_]+(\.[0-9_]+[eE][+\-]?[0-9_]+|'
+ r'\.[0-9_]*|[eE][+\-]?[0-9_]+)[fFL]?[i]?', Number.Float),
+ (r'\.(0|[1-9][0-9_]*)([eE][+\-]?[0-9_]+)?[fFL]?[i]?', Number.Float),
+ # IntegerLiteral
+ # -- Binary
+ (r'0[Bb][01_]+', Number.Bin),
+ # -- Octal
+ (r'0[0-7_]+', Number.Oct),
+ # -- Hexadecimal
+ (r'0[xX][0-9a-fA-F_]+', Number.Hex),
+ # -- Decimal
+ (r'(0|[1-9][0-9_]*)([LUu]|Lu|LU|uL|UL)?', Number.Integer),
+ # CharacterLiteral
+ (r"""'(\\['"?\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
+ r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|\\&\w+;|.)'""",
+ String.Char),
+ # StringLiteral
+ # -- WysiwygString
+ (r'r"[^"]*"[cwd]?', String),
+ # -- AlternateWysiwygString
+ (r'`[^`]*`[cwd]?', String),
+ # -- DoubleQuotedString
+ (r'"(\\\\|\\"|[^"])*"[cwd]?', String),
+ # -- EscapeSequence
+ (r"\\(['\"?\\abfnrtv]|x[0-9a-fA-F]{2}|[0-7]{1,3}"
+ r"|u[0-9a-fA-F]{4}|U[0-9a-fA-F]{8}|&\w+;)",
+ String),
+ # -- HexString
+ (r'x"[0-9a-fA-F_\s]*"[cwd]?', String),
+ # -- DelimitedString
+ (r'q"\[', String, 'delimited_bracket'),
+ (r'q"\(', String, 'delimited_parenthesis'),
+ (r'q"<', String, 'delimited_angle'),
+ (r'q"{', String, 'delimited_curly'),
+ (r'q"([a-zA-Z_]\w*)\n.*?\n\1"', String),
+ (r'q"(.).*?\1"', String),
+ # -- TokenString
+ (r'q{', String, 'token_string'),
+ # Attributes
+ (r'@([a-zA-Z_]\w*)?', Name.Decorator),
+ # Tokens
+ (r'(~=|\^=|%=|\*=|==|!>=|!<=|!<>=|!<>|!<|!>|!=|>>>=|>>>|>>=|>>|>='
+ r'|<>=|<>|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.\.|\.\.|/=)'
+ r'|[/.&|\-+<>!()\[\]{}?,;:$=*%^~]', Punctuation),
+ # Identifier
+ (r'[a-zA-Z_]\w*', Name),
+ # Line
+ (r'#line\s.*\n', Comment.Special),
+ ],
+ 'nested_comment': [
+ (r'[^+/]+', Comment.Multiline),
+ (r'/\+', Comment.Multiline, '#push'),
+ (r'\+/', Comment.Multiline, '#pop'),
+ (r'[+/]', Comment.Multiline),
+ ],
+ 'token_string': [
+ (r'{', Punctuation, 'token_string_nest'),
+ (r'}', String, '#pop'),
+ include('root'),
+ ],
+ 'token_string_nest': [
+ (r'{', Punctuation, '#push'),
+ (r'}', Punctuation, '#pop'),
+ include('root'),
+ ],
+ 'delimited_bracket': [
+ (r'[^\[\]]+', String),
+ (r'\[', String, 'delimited_inside_bracket'),
+ (r'\]"', String, '#pop'),
+ ],
+ 'delimited_inside_bracket': [
+ (r'[^\[\]]+', String),
+ (r'\[', String, '#push'),
+ (r'\]', String, '#pop'),
+ ],
+ 'delimited_parenthesis': [
+ (r'[^\(\)]+', String),
+ (r'\(', String, 'delimited_inside_parenthesis'),
+ (r'\)"', String, '#pop'),
+ ],
+ 'delimited_inside_parenthesis': [
+ (r'[^\(\)]+', String),
+ (r'\(', String, '#push'),
+ (r'\)', String, '#pop'),
+ ],
+ 'delimited_angle': [
+ (r'[^<>]+', String),
+ (r'<', String, 'delimited_inside_angle'),
+ (r'>"', String, '#pop'),
+ ],
+ 'delimited_inside_angle': [
+ (r'[^<>]+', String),
+ (r'<', String, '#push'),
+ (r'>', String, '#pop'),
+ ],
+ 'delimited_curly': [
+ (r'[^{}]+', String),
+ (r'{', String, 'delimited_inside_curly'),
+ (r'}"', String, '#pop'),
+ ],
+ 'delimited_inside_curly': [
+ (r'[^{}]+', String),
+ (r'{', String, '#push'),
+ (r'}', String, '#pop'),
+ ],
+ }
+
+
+class CrocLexer(RegexLexer):
+ """
+ For `Croc <http://jfbillingsley.com/croc>`_ source.
+ """
+ name = 'Croc'
+ filenames = ['*.croc']
+ aliases = ['croc']
+ mimetypes = ['text/x-crocsrc']
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'\s+', Text),
+ # Comments
+ (r'//(.*?)\n', Comment.Single),
+ (r'/\*', Comment.Multiline, 'nestedcomment'),
+ # Keywords
+ (words((
+ 'as', 'assert', 'break', 'case', 'catch', 'class', 'continue',
+ 'default', 'do', 'else', 'finally', 'for', 'foreach', 'function',
+ 'global', 'namespace', 'if', 'import', 'in', 'is', 'local',
+ 'module', 'return', 'scope', 'super', 'switch', 'this', 'throw',
+ 'try', 'vararg', 'while', 'with', 'yield'), suffix=r'\b'),
+ Keyword),
+ (r'(false|true|null)\b', Keyword.Constant),
+ # FloatLiteral
+ (r'([0-9][0-9_]*)(?=[.eE])(\.[0-9][0-9_]*)?([eE][+\-]?[0-9_]+)?',
+ Number.Float),
+ # IntegerLiteral
+ # -- Binary
+ (r'0[bB][01][01_]*', Number.Bin),
+ # -- Hexadecimal
+ (r'0[xX][0-9a-fA-F][0-9a-fA-F_]*', Number.Hex),
+ # -- Decimal
+ (r'([0-9][0-9_]*)(?![.eE])', Number.Integer),
+ # CharacterLiteral
+ (r"""'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-9]{1,3}"""
+ r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|.)'""",
+ String.Char),
+ # StringLiteral
+ # -- WysiwygString
+ (r'@"(""|[^"])*"', String),
+ (r'@`(``|[^`])*`', String),
+ (r"@'(''|[^'])*'", String),
+ # -- DoubleQuotedString
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # Tokens
+ (r'(~=|\^=|%=|\*=|==|!=|>>>=|>>>|>>=|>>|>=|<=>|\?=|-\>'
+ r'|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.|/=)'
+ r'|[-/.&$@|\+<>!()\[\]{}?,;:=*%^~#\\]', Punctuation),
+ # Identifier
+ (r'[a-zA-Z_]\w*', Name),
+ ],
+ 'nestedcomment': [
+ (r'[^*/]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline),
+ ],
+ }
+
+
+class MiniDLexer(CrocLexer):
+ """
+ For MiniD source. MiniD is now known as Croc.
+ """
+ name = 'MiniD'
+ filenames = ['*.md']
+ aliases = ['minid']
+ mimetypes = ['text/x-minidsrc']
diff --git a/pygments/lexers/data.py b/pygments/lexers/data.py
new file mode 100644
index 00000000..6b7cc107
--- /dev/null
+++ b/pygments/lexers/data.py
@@ -0,0 +1,509 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.data
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for data file format.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, ExtendedRegexLexer, LexerContext, \
+ include, bygroups
+from pygments.token import Text, Comment, Keyword, Name, String, Number, \
+ Punctuation, Literal
+
+__all__ = ['YamlLexer', 'JsonLexer']
+
+
+class YamlLexerContext(LexerContext):
+ """Indentation context for the YAML lexer."""
+
+ def __init__(self, *args, **kwds):
+ super(YamlLexerContext, self).__init__(*args, **kwds)
+ self.indent_stack = []
+ self.indent = -1
+ self.next_indent = 0
+ self.block_scalar_indent = None
+
+
+class YamlLexer(ExtendedRegexLexer):
+ """
+ Lexer for `YAML <http://yaml.org/>`_, a human-friendly data serialization
+ language.
+
+ .. versionadded:: 0.11
+ """
+
+ name = 'YAML'
+ aliases = ['yaml']
+ filenames = ['*.yaml', '*.yml']
+ mimetypes = ['text/x-yaml']
+
+ def something(token_class):
+ """Do not produce empty tokens."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if not text:
+ return
+ yield match.start(), token_class, text
+ context.pos = match.end()
+ return callback
+
+ def reset_indent(token_class):
+ """Reset the indentation levels."""
+ def callback(lexer, match, context):
+ text = match.group()
+ context.indent_stack = []
+ context.indent = -1
+ context.next_indent = 0
+ context.block_scalar_indent = None
+ yield match.start(), token_class, text
+ context.pos = match.end()
+ return callback
+
+ def save_indent(token_class, start=False):
+ """Save a possible indentation level."""
+ def callback(lexer, match, context):
+ text = match.group()
+ extra = ''
+ if start:
+ context.next_indent = len(text)
+ if context.next_indent < context.indent:
+ while context.next_indent < context.indent:
+ context.indent = context.indent_stack.pop()
+ if context.next_indent > context.indent:
+ extra = text[context.indent:]
+ text = text[:context.indent]
+ else:
+ context.next_indent += len(text)
+ if text:
+ yield match.start(), token_class, text
+ if extra:
+ yield match.start()+len(text), token_class.Error, extra
+ context.pos = match.end()
+ return callback
+
+ def set_indent(token_class, implicit=False):
+ """Set the previously saved indentation level."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if context.indent < context.next_indent:
+ context.indent_stack.append(context.indent)
+ context.indent = context.next_indent
+ if not implicit:
+ context.next_indent += len(text)
+ yield match.start(), token_class, text
+ context.pos = match.end()
+ return callback
+
+ def set_block_scalar_indent(token_class):
+ """Set an explicit indentation level for a block scalar."""
+ def callback(lexer, match, context):
+ text = match.group()
+ context.block_scalar_indent = None
+ if not text:
+ return
+ increment = match.group(1)
+ if increment:
+ current_indent = max(context.indent, 0)
+ increment = int(increment)
+ context.block_scalar_indent = current_indent + increment
+ if text:
+ yield match.start(), token_class, text
+ context.pos = match.end()
+ return callback
+
+ def parse_block_scalar_empty_line(indent_token_class, content_token_class):
+ """Process an empty line in a block scalar."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if (context.block_scalar_indent is None or
+ len(text) <= context.block_scalar_indent):
+ if text:
+ yield match.start(), indent_token_class, text
+ else:
+ indentation = text[:context.block_scalar_indent]
+ content = text[context.block_scalar_indent:]
+ yield match.start(), indent_token_class, indentation
+ yield (match.start()+context.block_scalar_indent,
+ content_token_class, content)
+ context.pos = match.end()
+ return callback
+
+ def parse_block_scalar_indent(token_class):
+ """Process indentation spaces in a block scalar."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if context.block_scalar_indent is None:
+ if len(text) <= max(context.indent, 0):
+ context.stack.pop()
+ context.stack.pop()
+ return
+ context.block_scalar_indent = len(text)
+ else:
+ if len(text) < context.block_scalar_indent:
+ context.stack.pop()
+ context.stack.pop()
+ return
+ if text:
+ yield match.start(), token_class, text
+ context.pos = match.end()
+ return callback
+
+ def parse_plain_scalar_indent(token_class):
+ """Process indentation spaces in a plain scalar."""
+ def callback(lexer, match, context):
+ text = match.group()
+ if len(text) <= context.indent:
+ context.stack.pop()
+ context.stack.pop()
+ return
+ if text:
+ yield match.start(), token_class, text
+ context.pos = match.end()
+ return callback
+
+ tokens = {
+ # the root rules
+ 'root': [
+ # ignored whitespaces
+ (r'[ ]+(?=#|$)', Text),
+ # line breaks
+ (r'\n+', Text),
+ # a comment
+ (r'#[^\n]*', Comment.Single),
+ # the '%YAML' directive
+ (r'^%YAML(?=[ ]|$)', reset_indent(Name.Tag), 'yaml-directive'),
+ # the %TAG directive
+ (r'^%TAG(?=[ ]|$)', reset_indent(Name.Tag), 'tag-directive'),
+ # document start and document end indicators
+ (r'^(?:---|\.\.\.)(?=[ ]|$)', reset_indent(Name.Namespace),
+ 'block-line'),
+ # indentation spaces
+ (r'[ ]*(?![ \t\n\r\f\v]|$)', save_indent(Text, start=True),
+ ('block-line', 'indentation')),
+ ],
+
+ # trailing whitespaces after directives or a block scalar indicator
+ 'ignored-line': [
+ # ignored whitespaces
+ (r'[ ]+(?=#|$)', Text),
+ # a comment
+ (r'#[^\n]*', Comment.Single),
+ # line break
+ (r'\n', Text, '#pop:2'),
+ ],
+
+ # the %YAML directive
+ 'yaml-directive': [
+ # the version number
+ (r'([ ]+)([0-9]+\.[0-9]+)',
+ bygroups(Text, Number), 'ignored-line'),
+ ],
+
+ # the %YAG directive
+ 'tag-directive': [
+ # a tag handle and the corresponding prefix
+ (r'([ ]+)(!|![0-9A-Za-z_-]*!)'
+ r'([ ]+)(!|!?[0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+)',
+ bygroups(Text, Keyword.Type, Text, Keyword.Type),
+ 'ignored-line'),
+ ],
+
+ # block scalar indicators and indentation spaces
+ 'indentation': [
+ # trailing whitespaces are ignored
+ (r'[ ]*$', something(Text), '#pop:2'),
+ # whitespaces preceeding block collection indicators
+ (r'[ ]+(?=[?:-](?:[ ]|$))', save_indent(Text)),
+ # block collection indicators
+ (r'[?:-](?=[ ]|$)', set_indent(Punctuation.Indicator)),
+ # the beginning a block line
+ (r'[ ]*', save_indent(Text), '#pop'),
+ ],
+
+ # an indented line in the block context
+ 'block-line': [
+ # the line end
+ (r'[ ]*(?=#|$)', something(Text), '#pop'),
+ # whitespaces separating tokens
+ (r'[ ]+', Text),
+ # tags, anchors and aliases,
+ include('descriptors'),
+ # block collections and scalars
+ include('block-nodes'),
+ # flow collections and quoted scalars
+ include('flow-nodes'),
+ # a plain scalar
+ (r'(?=[^ \t\n\r\f\v?:,\[\]{}#&*!|>\'"%@`-]|[?:-][^ \t\n\r\f\v])',
+ something(Name.Variable),
+ 'plain-scalar-in-block-context'),
+ ],
+
+ # tags, anchors, aliases
+ 'descriptors': [
+ # a full-form tag
+ (r'!<[0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+>', Keyword.Type),
+ # a tag in the form '!', '!suffix' or '!handle!suffix'
+ (r'!(?:[0-9A-Za-z_-]+)?'
+ r'(?:![0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+)?', Keyword.Type),
+ # an anchor
+ (r'&[0-9A-Za-z_-]+', Name.Label),
+ # an alias
+ (r'\*[0-9A-Za-z_-]+', Name.Variable),
+ ],
+
+ # block collections and scalars
+ 'block-nodes': [
+ # implicit key
+ (r':(?=[ ]|$)', set_indent(Punctuation.Indicator, implicit=True)),
+ # literal and folded scalars
+ (r'[|>]', Punctuation.Indicator,
+ ('block-scalar-content', 'block-scalar-header')),
+ ],
+
+ # flow collections and quoted scalars
+ 'flow-nodes': [
+ # a flow sequence
+ (r'\[', Punctuation.Indicator, 'flow-sequence'),
+ # a flow mapping
+ (r'\{', Punctuation.Indicator, 'flow-mapping'),
+ # a single-quoted scalar
+ (r'\'', String, 'single-quoted-scalar'),
+ # a double-quoted scalar
+ (r'\"', String, 'double-quoted-scalar'),
+ ],
+
+ # the content of a flow collection
+ 'flow-collection': [
+ # whitespaces
+ (r'[ ]+', Text),
+ # line breaks
+ (r'\n+', Text),
+ # a comment
+ (r'#[^\n]*', Comment.Single),
+ # simple indicators
+ (r'[?:,]', Punctuation.Indicator),
+ # tags, anchors and aliases
+ include('descriptors'),
+ # nested collections and quoted scalars
+ include('flow-nodes'),
+ # a plain scalar
+ (r'(?=[^ \t\n\r\f\v?:,\[\]{}#&*!|>\'"%@`])',
+ something(Name.Variable),
+ 'plain-scalar-in-flow-context'),
+ ],
+
+ # a flow sequence indicated by '[' and ']'
+ 'flow-sequence': [
+ # include flow collection rules
+ include('flow-collection'),
+ # the closing indicator
+ (r'\]', Punctuation.Indicator, '#pop'),
+ ],
+
+ # a flow mapping indicated by '{' and '}'
+ 'flow-mapping': [
+ # include flow collection rules
+ include('flow-collection'),
+ # the closing indicator
+ (r'\}', Punctuation.Indicator, '#pop'),
+ ],
+
+ # block scalar lines
+ 'block-scalar-content': [
+ # line break
+ (r'\n', Text),
+ # empty line
+ (r'^[ ]+$',
+ parse_block_scalar_empty_line(Text, Name.Constant)),
+ # indentation spaces (we may leave the state here)
+ (r'^[ ]*', parse_block_scalar_indent(Text)),
+ # line content
+ (r'[^\n\r\f\v]+', Name.Constant),
+ ],
+
+ # the content of a literal or folded scalar
+ 'block-scalar-header': [
+ # indentation indicator followed by chomping flag
+ (r'([1-9])?[+-]?(?=[ ]|$)',
+ set_block_scalar_indent(Punctuation.Indicator),
+ 'ignored-line'),
+ # chomping flag followed by indentation indicator
+ (r'[+-]?([1-9])?(?=[ ]|$)',
+ set_block_scalar_indent(Punctuation.Indicator),
+ 'ignored-line'),
+ ],
+
+ # ignored and regular whitespaces in quoted scalars
+ 'quoted-scalar-whitespaces': [
+ # leading and trailing whitespaces are ignored
+ (r'^[ ]+', Text),
+ (r'[ ]+$', Text),
+ # line breaks are ignored
+ (r'\n+', Text),
+ # other whitespaces are a part of the value
+ (r'[ ]+', Name.Variable),
+ ],
+
+ # single-quoted scalars
+ 'single-quoted-scalar': [
+ # include whitespace and line break rules
+ include('quoted-scalar-whitespaces'),
+ # escaping of the quote character
+ (r'\'\'', String.Escape),
+ # regular non-whitespace characters
+ (r'[^ \t\n\r\f\v\']+', String),
+ # the closing quote
+ (r'\'', String, '#pop'),
+ ],
+
+ # double-quoted scalars
+ 'double-quoted-scalar': [
+ # include whitespace and line break rules
+ include('quoted-scalar-whitespaces'),
+ # escaping of special characters
+ (r'\\[0abt\tn\nvfre "\\N_LP]', String),
+ # escape codes
+ (r'\\(?:x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})',
+ String.Escape),
+ # regular non-whitespace characters
+ (r'[^ \t\n\r\f\v\"\\]+', String),
+ # the closing quote
+ (r'"', String, '#pop'),
+ ],
+
+ # the beginning of a new line while scanning a plain scalar
+ 'plain-scalar-in-block-context-new-line': [
+ # empty lines
+ (r'^[ ]+$', Text),
+ # line breaks
+ (r'\n+', Text),
+ # document start and document end indicators
+ (r'^(?=---|\.\.\.)', something(Name.Namespace), '#pop:3'),
+ # indentation spaces (we may leave the block line state here)
+ (r'^[ ]*', parse_plain_scalar_indent(Text), '#pop'),
+ ],
+
+ # a plain scalar in the block context
+ 'plain-scalar-in-block-context': [
+ # the scalar ends with the ':' indicator
+ (r'[ ]*(?=:[ ]|:$)', something(Text), '#pop'),
+ # the scalar ends with whitespaces followed by a comment
+ (r'[ ]+(?=#)', Text, '#pop'),
+ # trailing whitespaces are ignored
+ (r'[ ]+$', Text),
+ # line breaks are ignored
+ (r'\n+', Text, 'plain-scalar-in-block-context-new-line'),
+ # other whitespaces are a part of the value
+ (r'[ ]+', Literal.Scalar.Plain),
+ # regular non-whitespace characters
+ (r'(?::(?![ \t\n\r\f\v])|[^ \t\n\r\f\v:])+', Literal.Scalar.Plain),
+ ],
+
+ # a plain scalar is the flow context
+ 'plain-scalar-in-flow-context': [
+ # the scalar ends with an indicator character
+ (r'[ ]*(?=[,:?\[\]{}])', something(Text), '#pop'),
+ # the scalar ends with a comment
+ (r'[ ]+(?=#)', Text, '#pop'),
+ # leading and trailing whitespaces are ignored
+ (r'^[ ]+', Text),
+ (r'[ ]+$', Text),
+ # line breaks are ignored
+ (r'\n+', Text),
+ # other whitespaces are a part of the value
+ (r'[ ]+', Name.Variable),
+ # regular non-whitespace characters
+ (r'[^ \t\n\r\f\v,:?\[\]{}]+', Name.Variable),
+ ],
+
+ }
+
+ def get_tokens_unprocessed(self, text=None, context=None):
+ if context is None:
+ context = YamlLexerContext(text, 0)
+ return super(YamlLexer, self).get_tokens_unprocessed(text, context)
+
+
+class JsonLexer(RegexLexer):
+ """
+ For JSON data structures.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'JSON'
+ aliases = ['json']
+ filenames = ['*.json']
+ mimetypes = ['application/json']
+
+ flags = re.DOTALL
+
+ # integer part of a number
+ int_part = r'-?(0|[1-9]\d*)'
+
+ # fractional part of a number
+ frac_part = r'\.\d+'
+
+ # exponential part of a number
+ exp_part = r'[eE](\+|-)?\d+'
+
+ tokens = {
+ 'whitespace': [
+ (r'\s+', Text),
+ ],
+
+ # represents a simple terminal value
+ 'simplevalue': [
+ (r'(true|false|null)\b', Keyword.Constant),
+ (('%(int_part)s(%(frac_part)s%(exp_part)s|'
+ '%(exp_part)s|%(frac_part)s)') % vars(),
+ Number.Float),
+ (int_part, Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ ],
+
+
+ # the right hand side of an object, after the attribute name
+ 'objectattribute': [
+ include('value'),
+ (r':', Punctuation),
+ # comma terminates the attribute but expects more
+ (r',', Punctuation, '#pop'),
+ # a closing bracket terminates the entire object, so pop twice
+ (r'}', Punctuation, ('#pop', '#pop')),
+ ],
+
+ # a json object - { attr, attr, ... }
+ 'objectvalue': [
+ include('whitespace'),
+ (r'"(\\\\|\\"|[^"])*"', Name.Tag, 'objectattribute'),
+ (r'}', Punctuation, '#pop'),
+ ],
+
+ # json array - [ value, value, ... }
+ 'arrayvalue': [
+ include('whitespace'),
+ include('value'),
+ (r',', Punctuation),
+ (r']', Punctuation, '#pop'),
+ ],
+
+ # a json value - either a simple value or a complex value (object or array)
+ 'value': [
+ include('whitespace'),
+ include('simplevalue'),
+ (r'{', Punctuation, 'objectvalue'),
+ (r'\[', Punctuation, 'arrayvalue'),
+ ],
+
+ # the root of a json document whould be a value
+ 'root': [
+ include('value'),
+ ],
+ }
diff --git a/pygments/lexers/diff.py b/pygments/lexers/diff.py
new file mode 100644
index 00000000..fe6435c5
--- /dev/null
+++ b/pygments/lexers/diff.py
@@ -0,0 +1,106 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.diff
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for diff/patch formats.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, Generic, \
+ Literal
+
+__all__ = ['DiffLexer', 'DarcsPatchLexer']
+
+
+class DiffLexer(RegexLexer):
+ """
+ Lexer for unified or context-style diffs or patches.
+ """
+
+ name = 'Diff'
+ aliases = ['diff', 'udiff']
+ filenames = ['*.diff', '*.patch']
+ mimetypes = ['text/x-diff', 'text/x-patch']
+
+ tokens = {
+ 'root': [
+ (r' .*\n', Text),
+ (r'\+.*\n', Generic.Inserted),
+ (r'-.*\n', Generic.Deleted),
+ (r'!.*\n', Generic.Strong),
+ (r'@.*\n', Generic.Subheading),
+ (r'([Ii]ndex|diff).*\n', Generic.Heading),
+ (r'=.*\n', Generic.Heading),
+ (r'.*\n', Text),
+ ]
+ }
+
+ def analyse_text(text):
+ if text[:7] == 'Index: ':
+ return True
+ if text[:5] == 'diff ':
+ return True
+ if text[:4] == '--- ':
+ return 0.9
+
+
+class DarcsPatchLexer(RegexLexer):
+ """
+ DarcsPatchLexer is a lexer for the various versions of the darcs patch
+ format. Examples of this format are derived by commands such as
+ ``darcs annotate --patch`` and ``darcs send``.
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'Darcs Patch'
+ aliases = ['dpatch']
+ filenames = ['*.dpatch', '*.darcspatch']
+
+ DPATCH_KEYWORDS = ('hunk', 'addfile', 'adddir', 'rmfile', 'rmdir', 'move',
+ 'replace')
+
+ tokens = {
+ 'root': [
+ (r'<', Operator),
+ (r'>', Operator),
+ (r'{', Operator),
+ (r'}', Operator),
+ (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)(\])',
+ bygroups(Operator, Keyword, Name, Text, Name, Operator,
+ Literal.Date, Text, Operator)),
+ (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)',
+ bygroups(Operator, Keyword, Name, Text, Name, Operator,
+ Literal.Date, Text), 'comment'),
+ (r'New patches:', Generic.Heading),
+ (r'Context:', Generic.Heading),
+ (r'Patch bundle hash:', Generic.Heading),
+ (r'(\s*)(%s)(.*\n)' % '|'.join(DPATCH_KEYWORDS),
+ bygroups(Text, Keyword, Text)),
+ (r'\+', Generic.Inserted, "insert"),
+ (r'-', Generic.Deleted, "delete"),
+ (r'.*\n', Text),
+ ],
+ 'comment': [
+ (r'[^\]].*\n', Comment),
+ (r'\]', Operator, "#pop"),
+ ],
+ 'specialText': [ # darcs add [_CODE_] special operators for clarity
+ (r'\n', Text, "#pop"), # line-based
+ (r'\[_[^_]*_]', Operator),
+ ],
+ 'insert': [
+ include('specialText'),
+ (r'\[', Generic.Inserted),
+ (r'[^\n\[]+', Generic.Inserted),
+ ],
+ 'delete': [
+ include('specialText'),
+ (r'\[', Generic.Deleted),
+ (r'[^\n\[]+', Generic.Deleted),
+ ],
+ }
diff --git a/pygments/lexers/dotnet.py b/pygments/lexers/dotnet.py
index 2ff57a73..b26f6843 100644
--- a/pygments/lexers/dotnet.py
+++ b/pygments/lexers/dotnet.py
@@ -11,13 +11,13 @@
import re
from pygments.lexer import RegexLexer, DelegatingLexer, bygroups, include, \
- using, this, default
+ using, this, default
from pygments.token import Punctuation, \
- Text, Comment, Operator, Keyword, Name, String, Number, Literal, Other
+ Text, Comment, Operator, Keyword, Name, String, Number, Literal, Other
from pygments.util import get_choice_opt, iteritems
from pygments import unistring as uni
-from pygments.lexers.web import XmlLexer
+from pygments.lexers.html import XmlLexer
__all__ = ['CSharpLexer', 'NemerleLexer', 'BooLexer', 'VbNetLexer',
'CSharpAspxLexer', 'VbNetAspxLexer', 'FSharpLexer']
@@ -50,7 +50,7 @@ class CSharpLexer(RegexLexer):
name = 'C#'
aliases = ['csharp', 'c#']
filenames = ['*.cs']
- mimetypes = ['text/x-csharp'] # inferred
+ mimetypes = ['text/x-csharp'] # inferred
flags = re.MULTILINE | re.DOTALL | re.UNICODE
@@ -75,13 +75,13 @@ class CSharpLexer(RegexLexer):
tokens[levelname] = {
'root': [
# method names
- (r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
- r'(' + cs_ident + ')' # method name
+ (r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
+ r'(' + cs_ident + ')' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Punctuation)),
(r'^\s*\[.*?\]', Name.Attribute),
(r'[^\S\n]+', Text),
- (r'\\\n', Text), # line continuation
+ (r'\\\n', Text), # line continuation
(r'//.*?\n', Comment.Single),
(r'/[*].*?[*]/', Comment.Multiline),
(r'\n', Text),
@@ -120,7 +120,7 @@ class CSharpLexer(RegexLexer):
(cs_ident, Name.Class, '#pop')
],
'namespace': [
- (r'(?=\()', Text, '#pop'), # using (resource)
+ (r'(?=\()', Text, '#pop'), # using (resource)
('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop')
]
}
@@ -162,7 +162,7 @@ class NemerleLexer(RegexLexer):
name = 'Nemerle'
aliases = ['nemerle']
filenames = ['*.n']
- mimetypes = ['text/x-nemerle'] # inferred
+ mimetypes = ['text/x-nemerle'] # inferred
flags = re.MULTILINE | re.DOTALL | re.UNICODE
@@ -170,14 +170,14 @@ class NemerleLexer(RegexLexer):
# http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf
levels = dict(
- none = '@?[_a-zA-Z]\w*',
- basic = ('@?[_' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl + ']' +
- '[' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl +
- uni.Nd + uni.Pc + uni.Cf + uni.Mn + uni.Mc + ']*'),
- full = ('@?(?:_|[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo',
- 'Nl') + '])'
- + '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
- 'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'),
+ none='@?[_a-zA-Z]\w*',
+ basic=('@?[_' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl + ']' +
+ '[' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl +
+ uni.Nd + uni.Pc + uni.Cf + uni.Mn + uni.Mc + ']*'),
+ full=('@?(?:_|[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo',
+ 'Nl') + '])'
+ + '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl',
+ 'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'),
)
tokens = {}
@@ -187,13 +187,13 @@ class NemerleLexer(RegexLexer):
tokens[levelname] = {
'root': [
# method names
- (r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
- r'(' + cs_ident + ')' # method name
+ (r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type
+ r'(' + cs_ident + ')' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Punctuation)),
(r'^\s*\[.*?\]', Name.Attribute),
(r'[^\S\n]+', Text),
- (r'\\\n', Text), # line continuation
+ (r'\\\n', Text), # line continuation
(r'//.*?\n', Comment.Single),
(r'/[*].*?[*]/', Comment.Multiline),
(r'\n', Text),
@@ -249,7 +249,7 @@ class NemerleLexer(RegexLexer):
(cs_ident, Name.Class, '#pop')
],
'namespace': [
- (r'(?=\()', Text, '#pop'), # using (resource)
+ (r'(?=\()', Text, '#pop'), # using (resource)
('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop')
],
'splice-string': [
@@ -372,7 +372,7 @@ class VbNetLexer(RegexLexer):
name = 'VB.net'
aliases = ['vb.net', 'vbnet']
filenames = ['*.vb', '*.bas']
- mimetypes = ['text/x-vbnet', 'text/x-vba'] # (?)
+ mimetypes = ['text/x-vbnet', 'text/x-vba'] # (?)
flags = re.MULTILINE | re.IGNORECASE
tokens = {
@@ -422,7 +422,7 @@ class VbNetLexer(RegexLexer):
(r'(?<!\.)(AddressOf|And|AndAlso|As|GetType|In|Is|IsNot|Like|Mod|'
r'Or|OrElse|TypeOf|Xor)\b', Operator.Word),
(r'&=|[*]=|/=|\\=|\^=|\+=|-=|<<=|>>=|<<|>>|:=|'
- r'<=|>=|<>|[-&*/\\^+=<>]',
+ r'<=|>=|<>|[-&*/\\^+=<>\[\]]',
Operator),
('"', String, 'string'),
('[a-z_]\w*[%&@!#$]?', Name),
@@ -431,7 +431,7 @@ class VbNetLexer(RegexLexer):
(r'\d+([SILDFR]|US|UI|UL)?', Number.Integer),
(r'&H[0-9a-f]+([SILDFR]|US|UI|UL)?', Number.Integer),
(r'&O[0-7]+([SILDFR]|US|UI|UL)?', Number.Integer),
- (r'_\n', Text), # Line continuation
+ (r'_\n', Text), # Line continuation
],
'string': [
(r'""', String),
@@ -488,7 +488,7 @@ class GenericAspxLexer(RegexLexer):
}
-#TODO support multiple languages within the same source file
+# TODO support multiple languages within the same source file
class CSharpAspxLexer(DelegatingLexer):
"""
Lexer for highligting C# within ASP.NET pages.
@@ -500,7 +500,7 @@ class CSharpAspxLexer(DelegatingLexer):
mimetypes = []
def __init__(self, **options):
- super(CSharpAspxLexer, self).__init__(CSharpLexer,GenericAspxLexer,
+ super(CSharpAspxLexer, self).__init__(CSharpLexer, GenericAspxLexer,
**options)
def analyse_text(text):
@@ -521,8 +521,8 @@ class VbNetAspxLexer(DelegatingLexer):
mimetypes = []
def __init__(self, **options):
- super(VbNetAspxLexer, self).__init__(VbNetLexer,GenericAspxLexer,
- **options)
+ super(VbNetAspxLexer, self).__init__(VbNetLexer, GenericAspxLexer,
+ **options)
def analyse_text(text):
if re.search(r'Page\s*Language="Vb"', text, re.I) is not None:
@@ -548,15 +548,15 @@ class FSharpLexer(RegexLexer):
mimetypes = ['text/x-fsharp']
keywords = [
- 'abstract', 'as', 'assert', 'base', 'begin', 'class', 'default',
- 'delegate', 'do!', 'do', 'done', 'downcast', 'downto', 'elif', 'else',
- 'end', 'exception', 'extern', 'false', 'finally', 'for', 'function',
- 'fun', 'global', 'if', 'inherit', 'inline', 'interface', 'internal',
- 'in', 'lazy', 'let!', 'let', 'match', 'member', 'module', 'mutable',
- 'namespace', 'new', 'null', 'of', 'open', 'override', 'private', 'public',
- 'rec', 'return!', 'return', 'select', 'static', 'struct', 'then', 'to',
- 'true', 'try', 'type', 'upcast', 'use!', 'use', 'val', 'void', 'when',
- 'while', 'with', 'yield!', 'yield',
+ 'abstract', 'as', 'assert', 'base', 'begin', 'class', 'default',
+ 'delegate', 'do!', 'do', 'done', 'downcast', 'downto', 'elif', 'else',
+ 'end', 'exception', 'extern', 'false', 'finally', 'for', 'function',
+ 'fun', 'global', 'if', 'inherit', 'inline', 'interface', 'internal',
+ 'in', 'lazy', 'let!', 'let', 'match', 'member', 'module', 'mutable',
+ 'namespace', 'new', 'null', 'of', 'open', 'override', 'private', 'public',
+ 'rec', 'return!', 'return', 'select', 'static', 'struct', 'then', 'to',
+ 'true', 'try', 'type', 'upcast', 'use!', 'use', 'val', 'void', 'when',
+ 'while', 'with', 'yield!', 'yield',
]
# Reserved words; cannot hurt to color them as keywords too.
keywords += [
@@ -567,10 +567,10 @@ class FSharpLexer(RegexLexer):
'virtual', 'volatile',
]
keyopts = [
- '!=', '#', '&&', '&', '\(', '\)', '\*', '\+', ',', '-\.',
- '->', '-', '\.\.', '\.', '::', ':=', ':>', ':', ';;', ';', '<-',
- '<\]', '<', '>\]', '>', '\?\?', '\?', '\[<', '\[\|', '\[', '\]',
- '_', '`', '{', '\|\]', '\|', '}', '~', '<@@', '<@', '=', '@>', '@@>',
+ '!=', '#', '&&', '&', '\(', '\)', '\*', '\+', ',', '-\.',
+ '->', '-', '\.\.', '\.', '::', ':=', ':>', ':', ';;', ';', '<-',
+ '<\]', '<', '>\]', '>', '\?\?', '\?', '\[<', '\[\|', '\[', '\]',
+ '_', '`', '{', '\|\]', '\|', '}', '~', '<@@', '<@', '=', '@>', '@@>',
]
operators = r'[!$%&*+\./:<=>?@^|~-]'
@@ -636,7 +636,7 @@ class FSharpLexer(RegexLexer):
(r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'B?",
String.Char),
(r"'.'", String.Char),
- (r"'", Keyword), # a stray quote is another syntax element
+ (r"'", Keyword), # a stray quote is another syntax element
(r'@?"', String.Double, 'string'),
diff --git a/pygments/lexers/dsls.py b/pygments/lexers/dsls.py
new file mode 100644
index 00000000..e059ecfd
--- /dev/null
+++ b/pygments/lexers/dsls.py
@@ -0,0 +1,510 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.dsls
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for various domain-specific languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, words, include
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Literal
+
+__all__ = ['ProtoBufLexer', 'BroLexer', 'PuppetLexer', 'RslLexer',
+ 'MscgenLexer', 'VGLLexer', 'AlloyLexer', 'PanLexer']
+
+
+class ProtoBufLexer(RegexLexer):
+ """
+ Lexer for `Protocol Buffer <http://code.google.com/p/protobuf/>`_
+ definition files.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'Protocol Buffer'
+ aliases = ['protobuf', 'proto']
+ filenames = ['*.proto']
+
+ tokens = {
+ 'root': [
+ (r'[ \t]+', Text),
+ (r'[,;{}\[\]\(\)]', Punctuation),
+ (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
+ (words((
+ 'import', 'option', 'optional', 'required', 'repeated', 'default',
+ 'packed', 'ctype', 'extensions', 'to', 'max', 'rpc', 'returns',
+ 'oneof'), prefix=r'\b', suffix=r'\b'),
+ Keyword),
+ (words((
+ 'int32', 'int64', 'uint32', 'uint64', 'sint32', 'sint64',
+ 'fixed32', 'fixed64', 'sfixed32', 'sfixed64',
+ 'float', 'double', 'bool', 'string', 'bytes'), suffix=r'\b'),
+ Keyword.Type),
+ (r'(true|false)\b', Keyword.Constant),
+ (r'(package)(\s+)', bygroups(Keyword.Namespace, Text), 'package'),
+ (r'(message|extend)(\s+)',
+ bygroups(Keyword.Declaration, Text), 'message'),
+ (r'(enum|group|service)(\s+)',
+ bygroups(Keyword.Declaration, Text), 'type'),
+ (r'\".*\"', String),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'(\-?(inf|nan))\b', Number.Float),
+ (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+ (r'0[0-7]+[LlUu]*', Number.Oct),
+ (r'\d+[LlUu]*', Number.Integer),
+ (r'[+-=]', Operator),
+ (r'([a-zA-Z_][\w\.]*)([ \t]*)(=)',
+ bygroups(Name.Attribute, Text, Operator)),
+ ('[a-zA-Z_][\w\.]*', Name),
+ ],
+ 'package': [
+ (r'[a-zA-Z_]\w*', Name.Namespace, '#pop')
+ ],
+ 'message': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop')
+ ],
+ 'type': [
+ (r'[a-zA-Z_]\w*', Name, '#pop')
+ ],
+ }
+
+
+class BroLexer(RegexLexer):
+ """
+ For `Bro <http://bro-ids.org/>`_ scripts.
+
+ .. versionadded:: 1.5
+ """
+ name = 'Bro'
+ aliases = ['bro']
+ filenames = ['*.bro']
+
+ _hex = r'[0-9a-fA-F_]+'
+ _float = r'((\d*\.?\d+)|(\d+\.?\d*))([eE][-+]?\d+)?'
+ _h = r'[A-Za-z0-9][-A-Za-z0-9]*'
+
+ tokens = {
+ 'root': [
+ # Whitespace
+ (r'^@.*?\n', Comment.Preproc),
+ (r'#.*?\n', Comment.Single),
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text),
+ # Keywords
+ (r'(add|alarm|break|case|const|continue|delete|do|else|enum|event'
+ r'|export|for|function|if|global|hook|local|module|next'
+ r'|of|print|redef|return|schedule|switch|type|when|while)\b', Keyword),
+ (r'(addr|any|bool|count|counter|double|file|int|interval|net'
+ r'|pattern|port|record|set|string|subnet|table|time|timer'
+ r'|vector)\b', Keyword.Type),
+ (r'(T|F)\b', Keyword.Constant),
+ (r'(&)((?:add|delete|expire)_func|attr|(?:create|read|write)_expire'
+ r'|default|disable_print_hook|raw_output|encrypt|group|log'
+ r'|mergeable|optional|persistent|priority|redef'
+ r'|rotate_(?:interval|size)|synchronized)\b',
+ bygroups(Punctuation, Keyword)),
+ (r'\s+module\b', Keyword.Namespace),
+ # Addresses, ports and networks
+ (r'\d+/(tcp|udp|icmp|unknown)\b', Number),
+ (r'(\d+\.){3}\d+', Number),
+ (r'(' + _hex + r'){7}' + _hex, Number),
+ (r'0x' + _hex + r'(' + _hex + r'|:)*::(' + _hex + r'|:)*', Number),
+ (r'((\d+|:)(' + _hex + r'|:)*)?::(' + _hex + r'|:)*', Number),
+ (r'(\d+\.\d+\.|(\d+\.){2}\d+)', Number),
+ # Hostnames
+ (_h + r'(\.' + _h + r')+', String),
+ # Numeric
+ (_float + r'\s+(day|hr|min|sec|msec|usec)s?\b', Literal.Date),
+ (r'0[xX]' + _hex, Number.Hex),
+ (_float, Number.Float),
+ (r'\d+', Number.Integer),
+ (r'/', String.Regex, 'regex'),
+ (r'"', String, 'string'),
+ # Operators
+ (r'[!%*/+:<=>?~|-]', Operator),
+ (r'([-+=&|]{2}|[+=!><-]=)', Operator),
+ (r'(in|match)\b', Operator.Word),
+ (r'[{}()\[\]$.,;]', Punctuation),
+ # Identfier
+ (r'([_a-zA-Z]\w*)(::)', bygroups(Name, Name.Namespace)),
+ (r'[a-zA-Z_][a-zA-Z_0-9]*', Name)
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String),
+ (r'\\\n', String),
+ (r'\\', String)
+ ],
+ 'regex': [
+ (r'/', String.Regex, '#pop'),
+ (r'\\[\\nt/]', String.Regex), # String.Escape is too intense here.
+ (r'[^\\/\n]+', String.Regex),
+ (r'\\\n', String.Regex),
+ (r'\\', String.Regex)
+ ]
+ }
+
+
+class PuppetLexer(RegexLexer):
+ """
+ For `Puppet <http://puppetlabs.com/>`__ configuration DSL.
+
+ .. versionadded:: 1.6
+ """
+ name = 'Puppet'
+ aliases = ['puppet']
+ filenames = ['*.pp']
+
+ tokens = {
+ 'root': [
+ include('comments'),
+ include('keywords'),
+ include('names'),
+ include('numbers'),
+ include('operators'),
+ include('strings'),
+
+ (r'[]{}:(),;[]', Punctuation),
+ (r'[^\S\n]+', Text),
+ ],
+
+ 'comments': [
+ (r'\s*#.*$', Comment),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ ],
+
+ 'operators': [
+ (r'(=>|\?|<|>|=|\+|-|/|\*|~|!|\|)', Operator),
+ (r'(in|and|or|not)\b', Operator.Word),
+ ],
+
+ 'names': [
+ ('[a-zA-Z_]\w*', Name.Attribute),
+ (r'(\$\S+)(\[)(\S+)(\])', bygroups(Name.Variable, Punctuation,
+ String, Punctuation)),
+ (r'\$\S+', Name.Variable),
+ ],
+
+ 'numbers': [
+ # Copypasta from the Python lexer
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+j?', Number.Float),
+ (r'0[0-7]+j?', Number.Oct),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'\d+L', Number.Integer.Long),
+ (r'\d+j?', Number.Integer)
+ ],
+
+ 'keywords': [
+ # Left out 'group' and 'require'
+ # Since they're often used as attributes
+ (words((
+ 'absent', 'alert', 'alias', 'audit', 'augeas', 'before', 'case',
+ 'check', 'class', 'computer', 'configured', 'contained',
+ 'create_resources', 'crit', 'cron', 'debug', 'default',
+ 'define', 'defined', 'directory', 'else', 'elsif', 'emerg',
+ 'err', 'exec', 'extlookup', 'fail', 'false', 'file',
+ 'filebucket', 'fqdn_rand', 'generate', 'host', 'if', 'import',
+ 'include', 'info', 'inherits', 'inline_template', 'installed',
+ 'interface', 'k5login', 'latest', 'link', 'loglevel',
+ 'macauthorization', 'mailalias', 'maillist', 'mcx', 'md5',
+ 'mount', 'mounted', 'nagios_command', 'nagios_contact',
+ 'nagios_contactgroup', 'nagios_host', 'nagios_hostdependency',
+ 'nagios_hostescalation', 'nagios_hostextinfo', 'nagios_hostgroup',
+ 'nagios_service', 'nagios_servicedependency', 'nagios_serviceescalation',
+ 'nagios_serviceextinfo', 'nagios_servicegroup', 'nagios_timeperiod',
+ 'node', 'noop', 'notice', 'notify', 'package', 'present', 'purged',
+ 'realize', 'regsubst', 'resources', 'role', 'router', 'running',
+ 'schedule', 'scheduled_task', 'search', 'selboolean', 'selmodule',
+ 'service', 'sha1', 'shellquote', 'split', 'sprintf',
+ 'ssh_authorized_key', 'sshkey', 'stage', 'stopped', 'subscribe',
+ 'tag', 'tagged', 'template', 'tidy', 'true', 'undef', 'unmounted',
+ 'user', 'versioncmp', 'vlan', 'warning', 'yumrepo', 'zfs', 'zone',
+ 'zpool'), prefix='(?i)', suffix=r'\b'),
+ Keyword),
+ ],
+
+ 'strings': [
+ (r'"([^"])*"', String),
+ (r"'(\\'|[^'])*'", String),
+ ],
+
+ }
+
+
+class RslLexer(RegexLexer):
+ """
+ `RSL <http://en.wikipedia.org/wiki/RAISE>`_ is the formal specification
+ language used in RAISE (Rigorous Approach to Industrial Software Engineering)
+ method.
+
+ .. versionadded:: 2.0
+ """
+ name = 'RSL'
+ aliases = ['rsl']
+ filenames = ['*.rsl']
+ mimetypes = ['text/rsl']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ (words((
+ 'Bool', 'Char', 'Int', 'Nat', 'Real', 'Text', 'Unit', 'abs',
+ 'all', 'always', 'any', 'as', 'axiom', 'card', 'case', 'channel',
+ 'chaos', 'class', 'devt_relation', 'dom', 'elems', 'else', 'elif',
+ 'end', 'exists', 'extend', 'false', 'for', 'hd', 'hide', 'if',
+ 'in', 'is', 'inds', 'initialise', 'int', 'inter', 'isin', 'len',
+ 'let', 'local', 'ltl_assertion', 'object', 'of', 'out', 'post',
+ 'pre', 'read', 'real', 'rng', 'scheme', 'skip', 'stop', 'swap',
+ 'then', 'theory', 'test_case', 'tl', 'transition_system', 'true',
+ 'type', 'union', 'until', 'use', 'value', 'variable', 'while',
+ 'with', 'write', '~isin', '-inflist', '-infset', '-list',
+ '-set'), prefix=r'\b', suffix=r'\b'),
+ Keyword),
+ (r'(variable|value)\b', Keyword.Declaration),
+ (r'--.*?\n', Comment),
+ (r'<:.*?:>', Comment),
+ (r'\{!.*?!\}', Comment),
+ (r'/\*.*?\*/', Comment),
+ (r'^[ \t]*([\w]+)[ \t]*:[^:]', Name.Function),
+ (r'(^[ \t]*)([\w]+)([ \t]*\([\w\s,]*\)[ \t]*)(is|as)',
+ bygroups(Text, Name.Function, Text, Keyword)),
+ (r'\b[A-Z]\w*\b', Keyword.Type),
+ (r'(true|false)\b', Keyword.Constant),
+ (r'".*"', String),
+ (r'\'.\'', String.Char),
+ (r'(><|->|-m->|/\\|<=|<<=|<\.|\|\||\|\^\||-~->|-~m->|\\/|>=|>>|'
+ r'\.>|\+\+|-\\|<->|=>|:-|~=|\*\*|<<|>>=|\+>|!!|\|=\||#)',
+ Operator),
+ (r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-f]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'.', Text),
+ ],
+ }
+
+ def analyse_text(text):
+ """
+ Check for the most common text in the beginning of a RSL file.
+ """
+ if re.search(r'scheme\s*.*?=\s*class\s*type', text, re.I) is not None:
+ return 1.0
+
+
+class MscgenLexer(RegexLexer):
+ """
+ For `Mscgen <http://www.mcternan.me.uk/mscgen/>`_ files.
+
+ .. versionadded:: 1.6
+ """
+ name = 'Mscgen'
+ aliases = ['mscgen', 'msc']
+ filenames = ['*.msc']
+
+ _var = r'(\w+|"(?:\\"|[^"])*")'
+
+ tokens = {
+ 'root': [
+ (r'msc\b', Keyword.Type),
+ # Options
+ (r'(hscale|HSCALE|width|WIDTH|wordwraparcs|WORDWRAPARCS'
+ r'|arcgradient|ARCGRADIENT)\b', Name.Property),
+ # Operators
+ (r'(abox|ABOX|rbox|RBOX|box|BOX|note|NOTE)\b', Operator.Word),
+ (r'(\.|-|\|){3}', Keyword),
+ (r'(?:-|=|\.|:){2}'
+ r'|<<=>>|<->|<=>|<<>>|<:>'
+ r'|->|=>>|>>|=>|:>|-x|-X'
+ r'|<-|<<=|<<|<=|<:|x-|X-|=', Operator),
+ # Names
+ (r'\*', Name.Builtin),
+ (_var, Name.Variable),
+ # Other
+ (r'\[', Punctuation, 'attrs'),
+ (r'\{|\}|,|;', Punctuation),
+ include('comments')
+ ],
+ 'attrs': [
+ (r'\]', Punctuation, '#pop'),
+ (_var + r'(\s*)(=)(\s*)' + _var,
+ bygroups(Name.Attribute, Text.Whitespace, Operator, Text.Whitespace,
+ String)),
+ (r',', Punctuation),
+ include('comments')
+ ],
+ 'comments': [
+ (r'(?://|#).*?\n', Comment.Single),
+ (r'/\*(?:.|\n)*?\*/', Comment.Multiline),
+ (r'[ \t\r\n]+', Text.Whitespace)
+ ]
+ }
+
+
+class VGLLexer(RegexLexer):
+ """
+ For `SampleManager VGL <http://www.thermoscientific.com/samplemanager>`_
+ source code.
+
+ .. versionadded:: 1.6
+ """
+ name = 'VGL'
+ aliases = ['vgl']
+ filenames = ['*.rpf']
+
+ flags = re.MULTILINE | re.DOTALL | re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'\{[^\}]*\}', Comment.Multiline),
+ (r'declare', Keyword.Constant),
+ (r'(if|then|else|endif|while|do|endwhile|and|or|prompt|object'
+ r'|create|on|line|with|global|routine|value|endroutine|constant'
+ r'|global|set|join|library|compile_option|file|exists|create|copy'
+ r'|delete|enable|windows|name|notprotected)(?! *[=<>.,()])',
+ Keyword),
+ (r'(true|false|null|empty|error|locked)', Keyword.Constant),
+ (r'[~\^\*\#!%&\[\]\(\)<>\|+=:;,./?-]', Operator),
+ (r'"[^"]*"', String),
+ (r'(\.)([a-z_\$][\w\$]*)', bygroups(Operator, Name.Attribute)),
+ (r'[0-9][0-9]*(\.[0-9]+(e[+\-]?[0-9]+)?)?', Number),
+ (r'[a-z_\$][\w\$]*', Name),
+ (r'[\r\n]+', Text),
+ (r'\s+', Text)
+ ]
+ }
+
+
+class AlloyLexer(RegexLexer):
+ """
+ For `Alloy <http://alloy.mit.edu>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Alloy'
+ aliases = ['alloy']
+ filenames = ['*.als']
+ mimetypes = ['text/x-alloy']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ iden_rex = r'[a-zA-Z_][a-zA-Z0-9_\']*'
+ text_tuple = (r'[^\S\n]+', Text)
+
+ tokens = {
+ 'sig': [
+ (r'(extends)\b', Keyword, '#pop'),
+ (iden_rex, Name),
+ text_tuple,
+ (r',', Punctuation),
+ (r'\{', Operator, '#pop'),
+ ],
+ 'module': [
+ text_tuple,
+ (iden_rex, Name, '#pop'),
+ ],
+ 'fun': [
+ text_tuple,
+ (r'\{', Operator, '#pop'),
+ (iden_rex, Name, '#pop'),
+ ],
+ 'root': [
+ (r'--.*?$', Comment.Single),
+ (r'//.*?$', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ text_tuple,
+ (r'(module|open)(\s+)', bygroups(Keyword.Namespace, Text),
+ 'module'),
+ (r'(sig|enum)(\s+)', bygroups(Keyword.Declaration, Text), 'sig'),
+ (r'(iden|univ|none)\b', Keyword.Constant),
+ (r'(int|Int)\b', Keyword.Type),
+ (r'(this|abstract|extends|set|seq|one|lone|let)\b', Keyword),
+ (r'(all|some|no|sum|disj|when|else)\b', Keyword),
+ (r'(run|check|for|but|exactly|expect|as)\b', Keyword),
+ (r'(and|or|implies|iff|in)\b', Operator.Word),
+ (r'(fun|pred|fact|assert)(\s+)', bygroups(Keyword, Text), 'fun'),
+ (r'!|#|&&|\+\+|<<|>>|>=|<=>|<=|\.|->', Operator),
+ (r'[-+/*%=<>&!^|~\{\}\[\]\(\)\.]', Operator),
+ (iden_rex, Name),
+ (r'[:,]', Punctuation),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r'\n', Text),
+ ]
+ }
+
+
+class PanLexer(RegexLexer):
+ """
+ Lexer for `pan <http://github.com/quattor/pan/>`_ source files.
+
+ Based on tcsh lexer.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Pan'
+ aliases = ['pan']
+ filenames = ['*.pan']
+
+ tokens = {
+ 'root': [
+ include('basic'),
+ (r'\(', Keyword, 'paren'),
+ (r'{', Keyword, 'curly'),
+ include('data'),
+ ],
+ 'basic': [
+ (words((
+ 'if', 'for', 'with', 'else', 'type', 'bind', 'while', 'valid', 'final', 'prefix',
+ 'unique', 'object', 'foreach', 'include', 'template', 'function', 'variable',
+ 'structure', 'extensible', 'declaration'), prefix=r'\b', suffix=r'\s*\b'),
+ Keyword),
+ (words((
+ 'file_contents', 'format', 'index', 'length', 'match', 'matches', 'replace',
+ 'splice', 'split', 'substr', 'to_lowercase', 'to_uppercase', 'debug', 'error',
+ 'traceback', 'deprecated', 'base64_decode', 'base64_encode', 'digest', 'escape',
+ 'unescape', 'append', 'create', 'first', 'nlist', 'key', 'length', 'list', 'merge', 'next',
+ 'prepend', 'splice', 'is_boolean', 'is_defined', 'is_double', 'is_list', 'is_long',
+ 'is_nlist', 'is_null', 'is_number', 'is_property', 'is_resource', 'is_string',
+ 'to_boolean', 'to_double', 'to_long', 'to_string', 'clone', 'delete', 'exists',
+ 'path_exists', 'if_exists', 'return', 'value'), prefix=r'\b', suffix=r'\s*\b'),
+ Name.Builtin),
+ (r'#.*', Comment),
+ (r'\\[\w\W]', String.Escape),
+ (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
+ (r'[\[\]{}()=]+', Operator),
+ (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
+ (r';', Punctuation),
+ ],
+ 'data': [
+ (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
+ (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ (r'\s+', Text),
+ (r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
+ (r'\d+(?= |\Z)', Number),
+ ],
+ 'curly': [
+ (r'}', Keyword, '#pop'),
+ (r':-', Keyword),
+ (r'\w+', Name.Variable),
+ (r'[^}:"\'`$]+', Punctuation),
+ (r':', Punctuation),
+ include('root'),
+ ],
+ 'paren': [
+ (r'\)', Keyword, '#pop'),
+ include('root'),
+ ],
+ }
diff --git a/pygments/lexers/dylan.py b/pygments/lexers/dylan.py
new file mode 100644
index 00000000..b4f2a976
--- /dev/null
+++ b/pygments/lexers/dylan.py
@@ -0,0 +1,289 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.dylan
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Dylan language.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic, Literal
+
+__all__ = ['DylanLexer', 'DylanConsoleLexer', 'DylanLidLexer']
+
+
+class DylanLexer(RegexLexer):
+ """
+ For the `Dylan <http://www.opendylan.org/>`_ language.
+
+ .. versionadded:: 0.7
+ """
+
+ name = 'Dylan'
+ aliases = ['dylan']
+ filenames = ['*.dylan', '*.dyl', '*.intr']
+ mimetypes = ['text/x-dylan']
+
+ flags = re.IGNORECASE
+
+ builtins = set((
+ 'subclass', 'abstract', 'block', 'concrete', 'constant', 'class',
+ 'compiler-open', 'compiler-sideways', 'domain', 'dynamic',
+ 'each-subclass', 'exception', 'exclude', 'function', 'generic',
+ 'handler', 'inherited', 'inline', 'inline-only', 'instance',
+ 'interface', 'import', 'keyword', 'library', 'macro', 'method',
+ 'module', 'open', 'primary', 'required', 'sealed', 'sideways',
+ 'singleton', 'slot', 'thread', 'variable', 'virtual'))
+
+ keywords = set((
+ 'above', 'afterwards', 'begin', 'below', 'by', 'case', 'cleanup',
+ 'create', 'define', 'else', 'elseif', 'end', 'export', 'finally',
+ 'for', 'from', 'if', 'in', 'let', 'local', 'otherwise', 'rename',
+ 'select', 'signal', 'then', 'to', 'unless', 'until', 'use', 'when',
+ 'while'))
+
+ operators = set((
+ '~', '+', '-', '*', '|', '^', '=', '==', '~=', '~==', '<', '<=',
+ '>', '>=', '&', '|'))
+
+ functions = set((
+ 'abort', 'abs', 'add', 'add!', 'add-method', 'add-new', 'add-new!',
+ 'all-superclasses', 'always', 'any?', 'applicable-method?', 'apply',
+ 'aref', 'aref-setter', 'as', 'as-lowercase', 'as-lowercase!',
+ 'as-uppercase', 'as-uppercase!', 'ash', 'backward-iteration-protocol',
+ 'break', 'ceiling', 'ceiling/', 'cerror', 'check-type', 'choose',
+ 'choose-by', 'complement', 'compose', 'concatenate', 'concatenate-as',
+ 'condition-format-arguments', 'condition-format-string', 'conjoin',
+ 'copy-sequence', 'curry', 'default-handler', 'dimension', 'dimensions',
+ 'direct-subclasses', 'direct-superclasses', 'disjoin', 'do',
+ 'do-handlers', 'element', 'element-setter', 'empty?', 'error', 'even?',
+ 'every?', 'false-or', 'fill!', 'find-key', 'find-method', 'first',
+ 'first-setter', 'floor', 'floor/', 'forward-iteration-protocol',
+ 'function-arguments', 'function-return-values',
+ 'function-specializers', 'gcd', 'generic-function-mandatory-keywords',
+ 'generic-function-methods', 'head', 'head-setter', 'identity',
+ 'initialize', 'instance?', 'integral?', 'intersection',
+ 'key-sequence', 'key-test', 'last', 'last-setter', 'lcm', 'limited',
+ 'list', 'logand', 'logbit?', 'logior', 'lognot', 'logxor', 'make',
+ 'map', 'map-as', 'map-into', 'max', 'member?', 'merge-hash-codes',
+ 'min', 'modulo', 'negative', 'negative?', 'next-method',
+ 'object-class', 'object-hash', 'odd?', 'one-of', 'pair', 'pop',
+ 'pop-last', 'positive?', 'push', 'push-last', 'range', 'rank',
+ 'rcurry', 'reduce', 'reduce1', 'remainder', 'remove', 'remove!',
+ 'remove-duplicates', 'remove-duplicates!', 'remove-key!',
+ 'remove-method', 'replace-elements!', 'replace-subsequence!',
+ 'restart-query', 'return-allowed?', 'return-description',
+ 'return-query', 'reverse', 'reverse!', 'round', 'round/',
+ 'row-major-index', 'second', 'second-setter', 'shallow-copy',
+ 'signal', 'singleton', 'size', 'size-setter', 'slot-initialized?',
+ 'sort', 'sort!', 'sorted-applicable-methods', 'subsequence-position',
+ 'subtype?', 'table-protocol', 'tail', 'tail-setter', 'third',
+ 'third-setter', 'truncate', 'truncate/', 'type-error-expected-type',
+ 'type-error-value', 'type-for-copy', 'type-union', 'union', 'values',
+ 'vector', 'zero?'))
+
+ valid_name = '\\\\?[a-z0-9' + re.escape('!&*<>|^$%@_-+~?/=') + ']+'
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ lowercase_value = value.lower()
+ if lowercase_value in self.builtins:
+ yield index, Name.Builtin, value
+ continue
+ if lowercase_value in self.keywords:
+ yield index, Keyword, value
+ continue
+ if lowercase_value in self.functions:
+ yield index, Name.Builtin, value
+ continue
+ if lowercase_value in self.operators:
+ yield index, Operator, value
+ continue
+ yield index, token, value
+
+ tokens = {
+ 'root': [
+ # Whitespace
+ (r'\s+', Text),
+
+ # single line comment
+ (r'//.*?\n', Comment.Single),
+
+ # lid header
+ (r'([a-z0-9-]+)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
+ bygroups(Name.Attribute, Operator, Text, String)),
+
+ ('', Text, 'code') # no header match, switch to code
+ ],
+ 'code': [
+ # Whitespace
+ (r'\s+', Text),
+
+ # single line comment
+ (r'//.*?\n', Comment.Single),
+
+ # multi-line comment
+ (r'/\*', Comment.Multiline, 'comment'),
+
+ # strings and characters
+ (r'"', String, 'string'),
+ (r"'(\\.|\\[0-7]{1,3}|\\x[a-f0-9]{1,2}|[^\\\'\n])'", String.Char),
+
+ # binary integer
+ (r'#[bB][01]+', Number.Bin),
+
+ # octal integer
+ (r'#[oO][0-7]+', Number.Oct),
+
+ # floating point
+ (r'[-+]?(\d*\.\d+(e[-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)', Number.Float),
+
+ # decimal integer
+ (r'[-+]?\d+', Number.Integer),
+
+ # hex integer
+ (r'#[xX][0-9a-f]+', Number.Hex),
+
+ # Macro parameters
+ (r'(\?' + valid_name + ')(:)'
+ r'(token|name|variable|expression|body|case-body|\*)',
+ bygroups(Name.Tag, Operator, Name.Builtin)),
+ (r'(\?)(:)(token|name|variable|expression|body|case-body|\*)',
+ bygroups(Name.Tag, Operator, Name.Builtin)),
+ (r'\?' + valid_name, Name.Tag),
+
+ # Punctuation
+ (r'(=>|::|#\(|#\[|##|\?\?|\?=|\?|[(){}\[\],\.;])', Punctuation),
+
+ # Most operators are picked up as names and then re-flagged.
+ # This one isn't valid in a name though, so we pick it up now.
+ (r':=', Operator),
+
+ # Pick up #t / #f before we match other stuff with #.
+ (r'#[tf]', Literal),
+
+ # #"foo" style keywords
+ (r'#"', String.Symbol, 'keyword'),
+
+ # #rest, #key, #all-keys, etc.
+ (r'#[a-z0-9-]+', Keyword),
+
+ # required-init-keyword: style keywords.
+ (valid_name + ':', Keyword),
+
+ # class names
+ (r'<' + valid_name + '>', Name.Class),
+
+ # define variable forms.
+ (r'\*' + valid_name + '\*', Name.Variable.Global),
+
+ # define constant forms.
+ (r'\$' + valid_name, Name.Constant),
+
+ # everything else. We re-flag some of these in the method above.
+ (valid_name, Name),
+ ],
+ 'comment': [
+ (r'[^*/]', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ 'keyword': [
+ (r'"', String.Symbol, '#pop'),
+ (r'[^\\"]+', String.Symbol), # all other characters
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ]
+ }
+
+
+class DylanLidLexer(RegexLexer):
+ """
+ For Dylan LID (Library Interchange Definition) files.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'DylanLID'
+ aliases = ['dylan-lid', 'lid']
+ filenames = ['*.lid', '*.hdp']
+ mimetypes = ['text/x-dylan-lid']
+
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ # Whitespace
+ (r'\s+', Text),
+
+ # single line comment
+ (r'//.*?\n', Comment.Single),
+
+ # lid header
+ (r'(.*?)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
+ bygroups(Name.Attribute, Operator, Text, String)),
+ ]
+ }
+
+
+class DylanConsoleLexer(Lexer):
+ """
+ For Dylan interactive console output like:
+
+ .. sourcecode:: dylan-console
+
+ ? let a = 1;
+ => 1
+ ? a
+ => 1
+
+ This is based on a copy of the RubyConsoleLexer.
+
+ .. versionadded:: 1.6
+ """
+ name = 'Dylan session'
+ aliases = ['dylan-console', 'dylan-repl']
+ filenames = ['*.dylan-console']
+ mimetypes = ['text/x-dylan-console']
+
+ _line_re = re.compile('.*?\n')
+ _prompt_re = re.compile('\?| ')
+
+ def get_tokens_unprocessed(self, text):
+ dylexer = DylanLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+ for match in self._line_re.finditer(text):
+ line = match.group()
+ m = self._prompt_re.match(line)
+ if m is not None:
+ end = m.end()
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:end])]))
+ curcode += line[end:]
+ else:
+ if curcode:
+ for item in do_insertions(insertions,
+ dylexer.get_tokens_unprocessed(curcode)):
+ yield item
+ curcode = ''
+ insertions = []
+ yield match.start(), Generic.Output, line
+ if curcode:
+ for item in do_insertions(insertions,
+ dylexer.get_tokens_unprocessed(curcode)):
+ yield item
diff --git a/pygments/lexers/ecl.py b/pygments/lexers/ecl.py
new file mode 100644
index 00000000..a0138e32
--- /dev/null
+++ b/pygments/lexers/ecl.py
@@ -0,0 +1,125 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.ecl
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the ECL language.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+
+__all__ = ['ECLLexer']
+
+
+class ECLLexer(RegexLexer):
+ """
+ Lexer for the declarative big-data `ECL
+ <http://hpccsystems.com/community/docs/ecl-language-reference/html>`_
+ language.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'ECL'
+ aliases = ['ecl']
+ filenames = ['*.ecl']
+ mimetypes = ['application/x-ecl']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ include('statements'),
+ ],
+ 'whitespace': [
+ (r'\s+', Text),
+ (r'\/\/.*', Comment.Single),
+ (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
+ ],
+ 'statements': [
+ include('types'),
+ include('keywords'),
+ include('functions'),
+ include('hash'),
+ (r'"', String, 'string'),
+ (r'\'', String, 'string'),
+ (r'(\d+\.\d*|\.\d+|\d+)e[+-]?\d+[lu]*', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'0x[0-9a-f]+[lu]*', Number.Hex),
+ (r'0[0-7]+[lu]*', Number.Oct),
+ (r'\d+[LlUu]*', Number.Integer),
+ (r'\*/', Error),
+ (r'[~!%^&*+=|?:<>/-]+', Operator),
+ (r'[{}()\[\],.;]', Punctuation),
+ (r'[a-z_]\w*', Name),
+ ],
+ 'hash': [
+ (r'^#.*$', Comment.Preproc),
+ ],
+ 'types': [
+ (r'(RECORD|END)\D', Keyword.Declaration),
+ (r'((?:ASCII|BIG_ENDIAN|BOOLEAN|DATA|DECIMAL|EBCDIC|INTEGER|PATTERN|'
+ r'QSTRING|REAL|RECORD|RULE|SET OF|STRING|TOKEN|UDECIMAL|UNICODE|'
+ r'UNSIGNED|VARSTRING|VARUNICODE)\d*)(\s+)',
+ bygroups(Keyword.Type, Text)),
+ ],
+ 'keywords': [
+ (words((
+ 'APPLY', 'ASSERT', 'BUILD', 'BUILDINDEX', 'EVALUATE', 'FAIL',
+ 'KEYDIFF', 'KEYPATCH', 'LOADXML', 'NOTHOR', 'NOTIFY', 'OUTPUT',
+ 'PARALLEL', 'SEQUENTIAL', 'SOAPCALL', 'CHECKPOINT', 'DEPRECATED',
+ 'FAILCODE', 'FAILMESSAGE', 'FAILURE', 'GLOBAL', 'INDEPENDENT',
+ 'ONWARNING', 'PERSIST', 'PRIORITY', 'RECOVERY', 'STORED', 'SUCCESS',
+ 'WAIT', 'WHEN'), suffix=r'\b'),
+ Keyword.Reserved),
+ # These are classed differently, check later
+ (words((
+ 'ALL', 'AND', 'ANY', 'AS', 'ATMOST', 'BEFORE', 'BEGINC++', 'BEST', 'BETWEEN', 'CASE',
+ 'CONST', 'COUNTER', 'CSV', 'DESCEND', 'ENCRYPT', 'ENDC++', 'ENDMACRO', 'EXCEPT',
+ 'EXCLUSIVE', 'EXPIRE', 'EXPORT', 'EXTEND', 'FALSE', 'FEW', 'FIRST', 'FLAT', 'FULL',
+ 'FUNCTION', 'GROUP', 'HEADER', 'HEADING', 'HOLE', 'IFBLOCK', 'IMPORT', 'IN', 'JOINED',
+ 'KEEP', 'KEYED', 'LAST', 'LEFT', 'LIMIT', 'LOAD', 'LOCAL', 'LOCALE', 'LOOKUP', 'MACRO',
+ 'MANY', 'MAXCOUNT', 'MAXLENGTH', 'MIN SKEW', 'MODULE', 'INTERFACE', 'NAMED', 'NOCASE',
+ 'NOROOT', 'NOSCAN', 'NOSORT', 'NOT', 'OF', 'ONLY', 'OPT', 'OR', 'OUTER', 'OVERWRITE',
+ 'PACKED', 'PARTITION', 'PENALTY', 'PHYSICALLENGTH', 'PIPE', 'QUOTE', 'RELATIONSHIP',
+ 'REPEAT', 'RETURN', 'RIGHT', 'SCAN', 'SELF', 'SEPARATOR', 'SERVICE', 'SHARED', 'SKEW',
+ 'SKIP', 'SQL', 'STORE', 'TERMINATOR', 'THOR', 'THRESHOLD', 'TOKEN', 'TRANSFORM', 'TRIM',
+ 'TRUE', 'TYPE', 'UNICODEORDER', 'UNSORTED', 'VALIDATE', 'VIRTUAL', 'WHOLE', 'WILD',
+ 'WITHIN', 'XML', 'XPATH', '__COMPRESSED__'), suffix=r'\b'),
+ Keyword.Reserved),
+ ],
+ 'functions': [
+ (words((
+ 'ABS', 'ACOS', 'ALLNODES', 'ASCII', 'ASIN', 'ASSTRING', 'ATAN', 'ATAN2', 'AVE', 'CASE',
+ 'CHOOSE', 'CHOOSEN', 'CHOOSESETS', 'CLUSTERSIZE', 'COMBINE', 'CORRELATION', 'COS',
+ 'COSH', 'COUNT', 'COVARIANCE', 'CRON', 'DATASET', 'DEDUP', 'DEFINE', 'DENORMALIZE',
+ 'DISTRIBUTE', 'DISTRIBUTED', 'DISTRIBUTION', 'EBCDIC', 'ENTH', 'ERROR', 'EVALUATE',
+ 'EVENT', 'EVENTEXTRA', 'EVENTNAME', 'EXISTS', 'EXP', 'FAILCODE', 'FAILMESSAGE',
+ 'FETCH', 'FROMUNICODE', 'GETISVALID', 'GLOBAL', 'GRAPH', 'GROUP', 'HASH', 'HASH32',
+ 'HASH64', 'HASHCRC', 'HASHMD5', 'HAVING', 'IF', 'INDEX', 'INTFORMAT', 'ISVALID',
+ 'ITERATE', 'JOIN', 'KEYUNICODE', 'LENGTH', 'LIBRARY', 'LIMIT', 'LN', 'LOCAL', 'LOG', 'LOOP',
+ 'MAP', 'MATCHED', 'MATCHLENGTH', 'MATCHPOSITION', 'MATCHTEXT', 'MATCHUNICODE',
+ 'MAX', 'MERGE', 'MERGEJOIN', 'MIN', 'NOLOCAL', 'NONEMPTY', 'NORMALIZE', 'PARSE', 'PIPE',
+ 'POWER', 'PRELOAD', 'PROCESS', 'PROJECT', 'PULL', 'RANDOM', 'RANGE', 'RANK', 'RANKED',
+ 'REALFORMAT', 'RECORDOF', 'REGEXFIND', 'REGEXREPLACE', 'REGROUP', 'REJECTED',
+ 'ROLLUP', 'ROUND', 'ROUNDUP', 'ROW', 'ROWDIFF', 'SAMPLE', 'SET', 'SIN', 'SINH', 'SIZEOF',
+ 'SOAPCALL', 'SORT', 'SORTED', 'SQRT', 'STEPPED', 'STORED', 'SUM', 'TABLE', 'TAN', 'TANH',
+ 'THISNODE', 'TOPN', 'TOUNICODE', 'TRANSFER', 'TRIM', 'TRUNCATE', 'TYPEOF', 'UNGROUP',
+ 'UNICODEORDER', 'VARIANCE', 'WHICH', 'WORKUNIT', 'XMLDECODE', 'XMLENCODE',
+ 'XMLTEXT', 'XMLUNICODE'), suffix=r'\b'),
+ Name.Function),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\'', String, '#pop'),
+ (r'[^"\']+', String),
+ ],
+ }
diff --git a/pygments/lexers/eiffel.py b/pygments/lexers/eiffel.py
new file mode 100644
index 00000000..28e3fcac
--- /dev/null
+++ b/pygments/lexers/eiffel.py
@@ -0,0 +1,65 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.eiffel
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Eiffel language.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['EiffelLexer']
+
+
+class EiffelLexer(RegexLexer):
+ """
+ For `Eiffel <http://www.eiffel.com>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Eiffel'
+ aliases = ['eiffel']
+ filenames = ['*.e']
+ mimetypes = ['text/x-eiffel']
+
+ tokens = {
+ 'root': [
+ (r'[^\S\n]+', Text),
+ (r'--.*?\n', Comment.Single),
+ (r'[^\S\n]+', Text),
+ # Please note that keyword and operator are case insensitive.
+ (r'(?i)(true|false|void|current|result|precursor)\b', Keyword.Constant),
+ (r'(?i)(and(\s+then)?|not|xor|implies|or(\s+else)?)\b', Operator.Word),
+ (words((
+ 'across', 'agent', 'alias', 'all', 'as', 'assign', 'attached',
+ 'attribute', 'check', 'class', 'convert', 'create', 'debug',
+ 'deferred', 'detachable', 'do', 'else', 'elseif', 'end', 'ensure',
+ 'expanded', 'export', 'external', 'feature', 'from', 'frozen', 'if',
+ 'inherit', 'inspect', 'invariant', 'like', 'local', 'loop', 'none',
+ 'note', 'obsolete', 'old', 'once', 'only', 'redefine', 'rename',
+ 'require', 'rescue', 'retry', 'select', 'separate', 'then',
+ 'undefine', 'until', 'variant', 'when'), prefix=r'(?i)\b', suffix=r'\b'),
+ Keyword.Reserved),
+ (r'"\[(([^\]%]|\n)|%(.|\n)|\][^"])*?\]"', String),
+ (r'"([^"%\n]|%.)*?"', String),
+ include('numbers'),
+ (r"'([^'%]|%'|%%)'", String.Char),
+ (r"(//|\\\\|>=|<=|:=|/=|~|/~|[\\\?!#%&@|+/\-=\>\*$<|^\[\]])", Operator),
+ (r"([{}():;,.])", Punctuation),
+ (r'([a-z]\w*)|([A-Z][A-Z0-9_]*[a-z]\w*)', Name),
+ (r'([A-Z][A-Z0-9_]*)', Name.Class),
+ (r'\n+', Text),
+ ],
+ 'numbers': [
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'0[bB][0-1]+', Number.Bin),
+ (r'0[cC][0-7]+', Number.Oct),
+ (r'([0-9]+\.[0-9]*)|([0-9]*\.[0-9]+)', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ ],
+ }
diff --git a/pygments/lexers/erlang.py b/pygments/lexers/erlang.py
new file mode 100644
index 00000000..b8ed7ff2
--- /dev/null
+++ b/pygments/lexers/erlang.py
@@ -0,0 +1,508 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.erlang
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Erlang.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, bygroups, words, do_insertions, \
+ include
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic
+
+__all__ = ['ErlangLexer', 'ErlangShellLexer', 'ElixirConsoleLexer',
+ 'ElixirLexer']
+
+
+line_re = re.compile('.*?\n')
+
+
+class ErlangLexer(RegexLexer):
+ """
+ For the Erlang functional programming language.
+
+ Blame Jeremy Thurgood (http://jerith.za.net/).
+
+ .. versionadded:: 0.9
+ """
+
+ name = 'Erlang'
+ aliases = ['erlang']
+ filenames = ['*.erl', '*.hrl', '*.es', '*.escript']
+ mimetypes = ['text/x-erlang']
+
+ keywords = (
+ 'after', 'begin', 'case', 'catch', 'cond', 'end', 'fun', 'if',
+ 'let', 'of', 'query', 'receive', 'try', 'when',
+ )
+
+ builtins = ( # See erlang(3) man page
+ 'abs', 'append_element', 'apply', 'atom_to_list', 'binary_to_list',
+ 'bitstring_to_list', 'binary_to_term', 'bit_size', 'bump_reductions',
+ 'byte_size', 'cancel_timer', 'check_process_code', 'delete_module',
+ 'demonitor', 'disconnect_node', 'display', 'element', 'erase', 'exit',
+ 'float', 'float_to_list', 'fun_info', 'fun_to_list',
+ 'function_exported', 'garbage_collect', 'get', 'get_keys',
+ 'group_leader', 'hash', 'hd', 'integer_to_list', 'iolist_to_binary',
+ 'iolist_size', 'is_atom', 'is_binary', 'is_bitstring', 'is_boolean',
+ 'is_builtin', 'is_float', 'is_function', 'is_integer', 'is_list',
+ 'is_number', 'is_pid', 'is_port', 'is_process_alive', 'is_record',
+ 'is_reference', 'is_tuple', 'length', 'link', 'list_to_atom',
+ 'list_to_binary', 'list_to_bitstring', 'list_to_existing_atom',
+ 'list_to_float', 'list_to_integer', 'list_to_pid', 'list_to_tuple',
+ 'load_module', 'localtime_to_universaltime', 'make_tuple', 'md5',
+ 'md5_final', 'md5_update', 'memory', 'module_loaded', 'monitor',
+ 'monitor_node', 'node', 'nodes', 'open_port', 'phash', 'phash2',
+ 'pid_to_list', 'port_close', 'port_command', 'port_connect',
+ 'port_control', 'port_call', 'port_info', 'port_to_list',
+ 'process_display', 'process_flag', 'process_info', 'purge_module',
+ 'put', 'read_timer', 'ref_to_list', 'register', 'resume_process',
+ 'round', 'send', 'send_after', 'send_nosuspend', 'set_cookie',
+ 'setelement', 'size', 'spawn', 'spawn_link', 'spawn_monitor',
+ 'spawn_opt', 'split_binary', 'start_timer', 'statistics',
+ 'suspend_process', 'system_flag', 'system_info', 'system_monitor',
+ 'system_profile', 'term_to_binary', 'tl', 'trace', 'trace_delivered',
+ 'trace_info', 'trace_pattern', 'trunc', 'tuple_size', 'tuple_to_list',
+ 'universaltime_to_localtime', 'unlink', 'unregister', 'whereis'
+ )
+
+ operators = r'(\+\+?|--?|\*|/|<|>|/=|=:=|=/=|=<|>=|==?|<-|!|\?)'
+ word_operators = (
+ 'and', 'andalso', 'band', 'bnot', 'bor', 'bsl', 'bsr', 'bxor',
+ 'div', 'not', 'or', 'orelse', 'rem', 'xor'
+ )
+
+ atom_re = r"(?:[a-z]\w*|'[^\n']*[^\\]')"
+
+ variable_re = r'(?:[A-Z_]\w*)'
+
+ escape_re = r'(?:\\(?:[bdefnrstv\'"\\/]|[0-7][0-7]?[0-7]?|\^[a-zA-Z]))'
+
+ macro_re = r'(?:'+variable_re+r'|'+atom_re+r')'
+
+ base_re = r'(?:[2-9]|[12][0-9]|3[0-6])'
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'%.*\n', Comment),
+ (words(keywords, suffix=r'\b'), Keyword),
+ (words(builtins, suffix=r'\b'), Name.Builtin),
+ (words(word_operators, suffix=r'\b'), Operator.Word),
+ (r'^-', Punctuation, 'directive'),
+ (operators, Operator),
+ (r'"', String, 'string'),
+ (r'<<', Name.Label),
+ (r'>>', Name.Label),
+ ('(' + atom_re + ')(:)', bygroups(Name.Namespace, Punctuation)),
+ ('(?:^|(?<=:))(' + atom_re + r')(\s*)(\()',
+ bygroups(Name.Function, Text, Punctuation)),
+ (r'[+-]?' + base_re + r'#[0-9a-zA-Z]+', Number.Integer),
+ (r'[+-]?\d+', Number.Integer),
+ (r'[+-]?\d+.\d+', Number.Float),
+ (r'[]\[:_@\".{}()|;,]', Punctuation),
+ (variable_re, Name.Variable),
+ (atom_re, Name),
+ (r'\?'+macro_re, Name.Constant),
+ (r'\$(?:'+escape_re+r'|\\[ %]|[^\\])', String.Char),
+ (r'#'+atom_re+r'(:?\.'+atom_re+r')?', Name.Label),
+ ],
+ 'string': [
+ (escape_re, String.Escape),
+ (r'"', String, '#pop'),
+ (r'~[0-9.*]*[~#+bBcdefginpPswWxX]', String.Interpol),
+ (r'[^"\\~]+', String),
+ (r'~', String),
+ ],
+ 'directive': [
+ (r'(define)(\s*)(\()('+macro_re+r')',
+ bygroups(Name.Entity, Text, Punctuation, Name.Constant), '#pop'),
+ (r'(record)(\s*)(\()('+macro_re+r')',
+ bygroups(Name.Entity, Text, Punctuation, Name.Label), '#pop'),
+ (atom_re, Name.Entity, '#pop'),
+ ],
+ }
+
+
+class ErlangShellLexer(Lexer):
+ """
+ Shell sessions in erl (for Erlang code).
+
+ .. versionadded:: 1.1
+ """
+ name = 'Erlang erl session'
+ aliases = ['erl']
+ filenames = ['*.erl-sh']
+ mimetypes = ['text/x-erl-shellsession']
+
+ _prompt_re = re.compile(r'\d+>(?=\s|\Z)')
+
+ def get_tokens_unprocessed(self, text):
+ erlexer = ErlangLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+ for match in line_re.finditer(text):
+ line = match.group()
+ m = self._prompt_re.match(line)
+ if m is not None:
+ end = m.end()
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:end])]))
+ curcode += line[end:]
+ else:
+ if curcode:
+ for item in do_insertions(insertions,
+ erlexer.get_tokens_unprocessed(curcode)):
+ yield item
+ curcode = ''
+ insertions = []
+ if line.startswith('*'):
+ yield match.start(), Generic.Traceback, line
+ else:
+ yield match.start(), Generic.Output, line
+ if curcode:
+ for item in do_insertions(insertions,
+ erlexer.get_tokens_unprocessed(curcode)):
+ yield item
+
+
+def gen_elixir_string_rules(name, symbol, token):
+ states = {}
+ states['string_' + name] = [
+ (r'[^#%s\\]+' % (symbol,), token),
+ include('escapes'),
+ (r'\\.', token),
+ (r'(%s)' % (symbol,), bygroups(token), "#pop"),
+ include('interpol')
+ ]
+ return states
+
+
+def gen_elixir_sigstr_rules(term, token, interpol=True):
+ if interpol:
+ return [
+ (r'[^#%s\\]+' % (term,), token),
+ include('escapes'),
+ (r'\\.', token),
+ (r'%s[a-zA-Z]*' % (term,), token, '#pop'),
+ include('interpol')
+ ]
+ else:
+ return [
+ (r'[^%s\\]+' % (term,), token),
+ (r'\\.', token),
+ (r'%s[a-zA-Z]*' % (term,), token, '#pop'),
+ ]
+
+
+class ElixirLexer(RegexLexer):
+ """
+ For the `Elixir language <http://elixir-lang.org>`_.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Elixir'
+ aliases = ['elixir', 'ex', 'exs']
+ filenames = ['*.ex', '*.exs']
+ mimetypes = ['text/x-elixir']
+
+ KEYWORD = ('fn', 'do', 'end', 'after', 'else', 'rescue', 'catch')
+ KEYWORD_OPERATOR = ('not', 'and', 'or', 'when', 'in')
+ BUILTIN = (
+ 'case', 'cond', 'for', 'if', 'unless', 'try', 'receive', 'raise',
+ 'quote', 'unquote', 'unquote_splicing', 'throw', 'super'
+ )
+ BUILTIN_DECLARATION = (
+ 'def', 'defp', 'defmodule', 'defprotocol', 'defmacro', 'defmacrop',
+ 'defdelegate', 'defexception', 'defstruct', 'defimpl', 'defcallback'
+ )
+
+ BUILTIN_NAMESPACE = ('import', 'require', 'use', 'alias')
+ CONSTANT = ('nil', 'true', 'false')
+
+ PSEUDO_VAR = ('_', '__MODULE__', '__DIR__', '__ENV__', '__CALLER__')
+
+ OPERATORS3 = (
+ '<<<', '>>>', '|||', '&&&', '^^^', '~~~', '===', '!==',
+ '~>>', '<~>', '|~>', '<|>',
+ )
+ OPERATORS2 = (
+ '==', '!=', '<=', '>=', '&&', '||', '<>', '++', '--', '|>', '=~',
+ '->', '<-', '|', '.', '=', '~>', '<~',
+ )
+ OPERATORS1 = ('<', '>', '+', '-', '*', '/', '!', '^', '&')
+
+ PUNCTUATION = (
+ '\\\\', '<<', '>>', '=>', '(', ')', ':', ';', ',', '[', ']'
+ )
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ if value in self.KEYWORD:
+ yield index, Keyword, value
+ elif value in self.KEYWORD_OPERATOR:
+ yield index, Operator.Word, value
+ elif value in self.BUILTIN:
+ yield index, Keyword, value
+ elif value in self.BUILTIN_DECLARATION:
+ yield index, Keyword.Declaration, value
+ elif value in self.BUILTIN_NAMESPACE:
+ yield index, Keyword.Namespace, value
+ elif value in self.CONSTANT:
+ yield index, Name.Constant, value
+ elif value in self.PSEUDO_VAR:
+ yield index, Name.Builtin.Pseudo, value
+ else:
+ yield index, token, value
+ else:
+ yield index, token, value
+
+ def gen_elixir_sigil_rules():
+ # all valid sigil terminators (excluding heredocs)
+ terminators = [
+ (r'\{', r'\}', 'cb'),
+ (r'\[', r'\]', 'sb'),
+ (r'\(', r'\)', 'pa'),
+ (r'\<', r'\>', 'ab'),
+ (r'/', r'/', 'slas'),
+ (r'\|', r'\|', 'pipe'),
+ ('"', '"', 'quot'),
+ ("'", "'", 'apos'),
+ ]
+
+ # heredocs have slightly different rules
+ triquotes = [(r'"""', 'triquot'), (r"'''", 'triapos')]
+
+ token = String.Other
+ states = {'sigils': []}
+
+ for term, name in triquotes:
+ states['sigils'] += [
+ (r'(~[a-z])(%s)' % (term,), bygroups(token, String.Heredoc),
+ (name + '-end', name + '-intp')),
+ (r'(~[A-Z])(%s)' % (term,), bygroups(token, String.Heredoc),
+ (name + '-end', name + '-no-intp')),
+ ]
+
+ states[name + '-end'] = [(r'[a-zA-Z]*', token, '#pop')]
+ states[name + '-intp'] = [
+ (r'^\s*' + term, String.Heredoc, '#pop'),
+ include('heredoc_interpol'),
+ ]
+ states[name + '-no-intp'] = [
+ (r'^\s*' + term, String.Heredoc, '#pop'),
+ include('heredoc_no_interpol'),
+ ]
+
+ for lterm, rterm, name in terminators:
+ states['sigils'] += [
+ (r'~[a-z]' + lterm, token, name + '-intp'),
+ (r'~[A-Z]' + lterm, token, name + '-no-intp'),
+ ]
+ states[name + '-intp'] = gen_elixir_sigstr_rules(rterm, token)
+ states[name + '-no-intp'] = \
+ gen_elixir_sigstr_rules(rterm, token, interpol=False)
+
+ return states
+
+ op3_re = "|".join(re.escape(s) for s in OPERATORS3)
+ op2_re = "|".join(re.escape(s) for s in OPERATORS2)
+ op1_re = "|".join(re.escape(s) for s in OPERATORS1)
+ ops_re = r'(?:%s|%s|%s)' % (op3_re, op2_re, op1_re)
+ punctuation_re = "|".join(re.escape(s) for s in PUNCTUATION)
+ alnum = '[A-Za-z_0-9]'
+ name_re = r'(?:\.\.\.|[a-z_]%s*[!\?]?)' % alnum
+ modname_re = r'[A-Z]%(alnum)s*(?:\.[A-Z]%(alnum)s*)*' % {'alnum': alnum}
+ complex_name_re = r'(?:%s|%s|%s)' % (name_re, modname_re, ops_re)
+ special_atom_re = r'(?:\.\.\.|<<>>|%{}|%|{})'
+
+ long_hex_char_re = r'(\\x{)([\da-fA-F]+)(})'
+ hex_char_re = r'(\\x[\da-fA-F]{1,2})'
+ escape_char_re = r'(\\[abdefnrstv])'
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'#.*$', Comment.Single),
+
+ # Various kinds of characters
+ (r'(\?)' + long_hex_char_re,
+ bygroups(String.Char,
+ String.Escape, Number.Hex, String.Escape)),
+ (r'(\?)' + hex_char_re,
+ bygroups(String.Char, String.Escape)),
+ (r'(\?)' + escape_char_re,
+ bygroups(String.Char, String.Escape)),
+ (r'\?\\?.', String.Char),
+
+ # '::' has to go before atoms
+ (r':::', String.Symbol),
+ (r'::', Operator),
+
+ # atoms
+ (r':' + special_atom_re, String.Symbol),
+ (r':' + complex_name_re, String.Symbol),
+ (r':"', String.Symbol, 'string_double_atom'),
+ (r":'", String.Symbol, 'string_single_atom'),
+
+ # [keywords: ...]
+ (r'(%s|%s)(:)(?=\s|\n)' % (special_atom_re, complex_name_re),
+ bygroups(String.Symbol, Punctuation)),
+
+ # @attributes
+ (r'@' + name_re, Name.Attribute),
+
+ # identifiers
+ (name_re, Name),
+ (r'(%%?)(%s)' % (modname_re,), bygroups(Punctuation, Name.Class)),
+
+ # operators and punctuation
+ (op3_re, Operator),
+ (op2_re, Operator),
+ (punctuation_re, Punctuation),
+ (r'&\d', Name.Entity), # anon func arguments
+ (op1_re, Operator),
+
+ # numbers
+ (r'0b[01]+', Number.Bin),
+ (r'0o[0-7]+', Number.Oct),
+ (r'0x[\da-fA-F]+', Number.Hex),
+ (r'\d(_?\d)*\.\d(_?\d)*([eE][-+]?\d(_?\d)*)?', Number.Float),
+ (r'\d(_?\d)*', Number.Integer),
+
+ # strings and heredocs
+ (r'"""\s*', String.Heredoc, 'heredoc_double'),
+ (r"'''\s*$", String.Heredoc, 'heredoc_single'),
+ (r'"', String.Double, 'string_double'),
+ (r"'", String.Single, 'string_single'),
+
+ include('sigils'),
+
+ (r'%{', Punctuation, 'map_key'),
+ (r'{', Punctuation, 'tuple'),
+ ],
+ 'heredoc_double': [
+ (r'^\s*"""', String.Heredoc, '#pop'),
+ include('heredoc_interpol'),
+ ],
+ 'heredoc_single': [
+ (r"^\s*'''", String.Heredoc, '#pop'),
+ include('heredoc_interpol'),
+ ],
+ 'heredoc_interpol': [
+ (r'[^#\\\n]+', String.Heredoc),
+ include('escapes'),
+ (r'\\.', String.Heredoc),
+ (r'\n+', String.Heredoc),
+ include('interpol'),
+ ],
+ 'heredoc_no_interpol': [
+ (r'[^\\\n]+', String.Heredoc),
+ (r'\\.', String.Heredoc),
+ (r'\n+', String.Heredoc),
+ ],
+ 'escapes': [
+ (long_hex_char_re,
+ bygroups(String.Escape, Number.Hex, String.Escape)),
+ (hex_char_re, String.Escape),
+ (escape_char_re, String.Escape),
+ ],
+ 'interpol': [
+ (r'#{', String.Interpol, 'interpol_string'),
+ ],
+ 'interpol_string': [
+ (r'}', String.Interpol, "#pop"),
+ include('root')
+ ],
+ 'map_key': [
+ include('root'),
+ (r':', Punctuation, 'map_val'),
+ (r'=>', Punctuation, 'map_val'),
+ (r'}', Punctuation, '#pop'),
+ ],
+ 'map_val': [
+ include('root'),
+ (r',', Punctuation, '#pop'),
+ (r'(?=})', Punctuation, '#pop'),
+ ],
+ 'tuple': [
+ include('root'),
+ (r'}', Punctuation, '#pop'),
+ ],
+ }
+ tokens.update(gen_elixir_string_rules('double', '"', String.Double))
+ tokens.update(gen_elixir_string_rules('single', "'", String.Single))
+ tokens.update(gen_elixir_string_rules('double_atom', '"', String.Symbol))
+ tokens.update(gen_elixir_string_rules('single_atom', "'", String.Symbol))
+ tokens.update(gen_elixir_sigil_rules())
+
+
+class ElixirConsoleLexer(Lexer):
+ """
+ For Elixir interactive console (iex) output like:
+
+ .. sourcecode:: iex
+
+ iex> [head | tail] = [1,2,3]
+ [1,2,3]
+ iex> head
+ 1
+ iex> tail
+ [2,3]
+ iex> [head | tail]
+ [1,2,3]
+ iex> length [head | tail]
+ 3
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Elixir iex session'
+ aliases = ['iex']
+ mimetypes = ['text/x-elixir-shellsession']
+
+ _prompt_re = re.compile('(iex|\.{3})(\(\d+\))?> ')
+
+ def get_tokens_unprocessed(self, text):
+ exlexer = ElixirLexer(**self.options)
+
+ curcode = ''
+ in_error = False
+ insertions = []
+ for match in line_re.finditer(text):
+ line = match.group()
+ if line.startswith(u'** '):
+ in_error = True
+ insertions.append((len(curcode),
+ [(0, Generic.Error, line[:-1])]))
+ curcode += line[-1:]
+ else:
+ m = self._prompt_re.match(line)
+ if m is not None:
+ in_error = False
+ end = m.end()
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:end])]))
+ curcode += line[end:]
+ else:
+ if curcode:
+ for item in do_insertions(
+ insertions, exlexer.get_tokens_unprocessed(curcode)):
+ yield item
+ curcode = ''
+ insertions = []
+ token = Generic.Error if in_error else Generic.Output
+ yield match.start(), token, line
+ if curcode:
+ for item in do_insertions(
+ insertions, exlexer.get_tokens_unprocessed(curcode)):
+ yield item
diff --git a/pygments/lexers/esoteric.py b/pygments/lexers/esoteric.py
new file mode 100644
index 00000000..2efc057b
--- /dev/null
+++ b/pygments/lexers/esoteric.py
@@ -0,0 +1,114 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.esoteric
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for esoteric languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+
+__all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer']
+
+
+class BrainfuckLexer(RegexLexer):
+ """
+ Lexer for the esoteric `BrainFuck <http://www.muppetlabs.com/~breadbox/bf/>`_
+ language.
+ """
+
+ name = 'Brainfuck'
+ aliases = ['brainfuck', 'bf']
+ filenames = ['*.bf', '*.b']
+ mimetypes = ['application/x-brainfuck']
+
+ tokens = {
+ 'common': [
+ # use different colors for different instruction types
+ (r'[.,]+', Name.Tag),
+ (r'[+-]+', Name.Builtin),
+ (r'[<>]+', Name.Variable),
+ (r'[^.,+\-<>\[\]]+', Comment),
+ ],
+ 'root': [
+ (r'\[', Keyword, 'loop'),
+ (r'\]', Error),
+ include('common'),
+ ],
+ 'loop': [
+ (r'\[', Keyword, '#push'),
+ (r'\]', Keyword, '#pop'),
+ include('common'),
+ ]
+ }
+
+
+class BefungeLexer(RegexLexer):
+ """
+ Lexer for the esoteric `Befunge <http://en.wikipedia.org/wiki/Befunge>`_
+ language.
+
+ .. versionadded:: 0.7
+ """
+ name = 'Befunge'
+ aliases = ['befunge']
+ filenames = ['*.befunge']
+ mimetypes = ['application/x-befunge']
+
+ tokens = {
+ 'root': [
+ (r'[0-9a-f]', Number),
+ (r'[\+\*/%!`-]', Operator), # Traditional math
+ (r'[<>^v?\[\]rxjk]', Name.Variable), # Move, imperatives
+ (r'[:\\$.,n]', Name.Builtin), # Stack ops, imperatives
+ (r'[|_mw]', Keyword),
+ (r'[{}]', Name.Tag), # Befunge-98 stack ops
+ (r'".*?"', String.Double), # Strings don't appear to allow escapes
+ (r'\'.', String.Single), # Single character
+ (r'[#;]', Comment), # Trampoline... depends on direction hit
+ (r'[pg&~=@iotsy]', Keyword), # Misc
+ (r'[()A-Z]', Comment), # Fingerprints
+ (r'\s+', Text), # Whitespace doesn't matter
+ ],
+ }
+
+
+class RedcodeLexer(RegexLexer):
+ """
+ A simple Redcode lexer based on ICWS'94.
+ Contributed by Adam Blinkinsop <blinks@acm.org>.
+
+ .. versionadded:: 0.8
+ """
+ name = 'Redcode'
+ aliases = ['redcode']
+ filenames = ['*.cw']
+
+ opcodes = ('DAT', 'MOV', 'ADD', 'SUB', 'MUL', 'DIV', 'MOD',
+ 'JMP', 'JMZ', 'JMN', 'DJN', 'CMP', 'SLT', 'SPL',
+ 'ORG', 'EQU', 'END')
+ modifiers = ('A', 'B', 'AB', 'BA', 'F', 'X', 'I')
+
+ tokens = {
+ 'root': [
+ # Whitespace:
+ (r'\s+', Text),
+ (r';.*$', Comment.Single),
+ # Lexemes:
+ # Identifiers
+ (r'\b(%s)\b' % '|'.join(opcodes), Name.Function),
+ (r'\b(%s)\b' % '|'.join(modifiers), Name.Decorator),
+ (r'[A-Za-z_][A-Za-z_0-9]+', Name),
+ # Operators
+ (r'[-+*/%]', Operator),
+ (r'[#$@<>]', Operator), # mode
+ (r'[.,]', Punctuation), # mode
+ # Numbers
+ (r'[-+]?\d+', Number.Integer),
+ ],
+ }
diff --git a/pygments/lexers/factor.py b/pygments/lexers/factor.py
new file mode 100644
index 00000000..ab1ca16b
--- /dev/null
+++ b/pygments/lexers/factor.py
@@ -0,0 +1,344 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.factor
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Factor language.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, default, words
+from pygments.token import Text, Comment, Keyword, Name, String, Number
+
+__all__ = ['FactorLexer']
+
+
+class FactorLexer(RegexLexer):
+ """
+ Lexer for the `Factor <http://factorcode.org>`_ language.
+
+ .. versionadded:: 1.4
+ """
+ name = 'Factor'
+ aliases = ['factor']
+ filenames = ['*.factor']
+ mimetypes = ['text/x-factor']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ builtin_kernel = words((
+ '-rot', '2bi', '2bi@', '2bi*', '2curry', '2dip', '2drop', '2dup', '2keep', '2nip',
+ '2over', '2tri', '2tri@', '2tri*', '3bi', '3curry', '3dip', '3drop', '3dup', '3keep',
+ '3tri', '4dip', '4drop', '4dup', '4keep', '<wrapper>', '=', '>boolean', 'clone',
+ '?', '?execute', '?if', 'and', 'assert', 'assert=', 'assert?', 'bi', 'bi-curry',
+ 'bi-curry@', 'bi-curry*', 'bi@', 'bi*', 'boa', 'boolean', 'boolean?', 'both?',
+ 'build', 'call', 'callstack', 'callstack>array', 'callstack?', 'clear', '(clone)',
+ 'compose', 'compose?', 'curry', 'curry?', 'datastack', 'die', 'dip', 'do', 'drop',
+ 'dup', 'dupd', 'either?', 'eq?', 'equal?', 'execute', 'hashcode', 'hashcode*',
+ 'identity-hashcode', 'identity-tuple', 'identity-tuple?', 'if', 'if*',
+ 'keep', 'loop', 'most', 'new', 'nip', 'not', 'null', 'object', 'or', 'over',
+ 'pick', 'prepose', 'retainstack', 'rot', 'same?', 'swap', 'swapd', 'throw',
+ 'tri', 'tri-curry', 'tri-curry@', 'tri-curry*', 'tri@', 'tri*', 'tuple',
+ 'tuple?', 'unless', 'unless*', 'until', 'when', 'when*', 'while', 'with',
+ 'wrapper', 'wrapper?', 'xor'), suffix=r'\s')
+
+ builtin_assocs = words((
+ '2cache', '<enum>', '>alist', '?at', '?of', 'assoc', 'assoc-all?',
+ 'assoc-any?', 'assoc-clone-like', 'assoc-combine', 'assoc-diff',
+ 'assoc-diff!', 'assoc-differ', 'assoc-each', 'assoc-empty?',
+ 'assoc-filter', 'assoc-filter!', 'assoc-filter-as', 'assoc-find',
+ 'assoc-hashcode', 'assoc-intersect', 'assoc-like', 'assoc-map',
+ 'assoc-map-as', 'assoc-partition', 'assoc-refine', 'assoc-size',
+ 'assoc-stack', 'assoc-subset?', 'assoc-union', 'assoc-union!',
+ 'assoc=', 'assoc>map', 'assoc?', 'at', 'at+', 'at*', 'cache', 'change-at',
+ 'clear-assoc', 'delete-at', 'delete-at*', 'enum', 'enum?', 'extract-keys',
+ 'inc-at', 'key?', 'keys', 'map>assoc', 'maybe-set-at', 'new-assoc', 'of',
+ 'push-at', 'rename-at', 'set-at', 'sift-keys', 'sift-values', 'substitute',
+ 'unzip', 'value-at', 'value-at*', 'value?', 'values', 'zip'), suffix=r'\s')
+
+ builtin_combinators = words((
+ '2cleave', '2cleave>quot', '3cleave', '3cleave>quot', '4cleave',
+ '4cleave>quot', 'alist>quot', 'call-effect', 'case', 'case-find',
+ 'case>quot', 'cleave', 'cleave>quot', 'cond', 'cond>quot', 'deep-spread>quot',
+ 'execute-effect', 'linear-case-quot', 'no-case', 'no-case?', 'no-cond',
+ 'no-cond?', 'recursive-hashcode', 'shallow-spread>quot', 'spread',
+ 'to-fixed-point', 'wrong-values', 'wrong-values?'), suffix=r'\s')
+
+ builtin_math = words((
+ '-', '/', '/f', '/i', '/mod', '2/', '2^', '<', '<=', '<fp-nan>', '>',
+ '>=', '>bignum', '>fixnum', '>float', '>integer', '(all-integers?)',
+ '(each-integer)', '(find-integer)', '*', '+', '?1+',
+ 'abs', 'align', 'all-integers?', 'bignum', 'bignum?', 'bit?', 'bitand',
+ 'bitnot', 'bitor', 'bits>double', 'bits>float', 'bitxor', 'complex',
+ 'complex?', 'denominator', 'double>bits', 'each-integer', 'even?',
+ 'find-integer', 'find-last-integer', 'fixnum', 'fixnum?', 'float',
+ 'float>bits', 'float?', 'fp-bitwise=', 'fp-infinity?', 'fp-nan-payload',
+ 'fp-nan?', 'fp-qnan?', 'fp-sign', 'fp-snan?', 'fp-special?',
+ 'if-zero', 'imaginary-part', 'integer', 'integer>fixnum',
+ 'integer>fixnum-strict', 'integer?', 'log2', 'log2-expects-positive',
+ 'log2-expects-positive?', 'mod', 'neg', 'neg?', 'next-float',
+ 'next-power-of-2', 'number', 'number=', 'number?', 'numerator', 'odd?',
+ 'out-of-fixnum-range', 'out-of-fixnum-range?', 'power-of-2?',
+ 'prev-float', 'ratio', 'ratio?', 'rational', 'rational?', 'real',
+ 'real-part', 'real?', 'recip', 'rem', 'sgn', 'shift', 'sq', 'times',
+ 'u<', 'u<=', 'u>', 'u>=', 'unless-zero', 'unordered?', 'when-zero',
+ 'zero?'), suffix=r'\s')
+
+ builtin_sequences = words((
+ '1sequence', '2all?', '2each', '2map', '2map-as', '2map-reduce', '2reduce',
+ '2selector', '2sequence', '3append', '3append-as', '3each', '3map', '3map-as',
+ '3sequence', '4sequence', '<repetition>', '<reversed>', '<slice>', '?first',
+ '?last', '?nth', '?second', '?set-nth', 'accumulate', 'accumulate!',
+ 'accumulate-as', 'all?', 'any?', 'append', 'append!', 'append-as',
+ 'assert-sequence', 'assert-sequence=', 'assert-sequence?',
+ 'binary-reduce', 'bounds-check', 'bounds-check?', 'bounds-error',
+ 'bounds-error?', 'but-last', 'but-last-slice', 'cartesian-each',
+ 'cartesian-map', 'cartesian-product', 'change-nth', 'check-slice',
+ 'check-slice-error', 'clone-like', 'collapse-slice', 'collector',
+ 'collector-for', 'concat', 'concat-as', 'copy', 'count', 'cut', 'cut-slice',
+ 'cut*', 'delete-all', 'delete-slice', 'drop-prefix', 'each', 'each-from',
+ 'each-index', 'empty?', 'exchange', 'filter', 'filter!', 'filter-as', 'find',
+ 'find-from', 'find-index', 'find-index-from', 'find-last', 'find-last-from',
+ 'first', 'first2', 'first3', 'first4', 'flip', 'follow', 'fourth', 'glue', 'halves',
+ 'harvest', 'head', 'head-slice', 'head-slice*', 'head*', 'head?',
+ 'if-empty', 'immutable', 'immutable-sequence', 'immutable-sequence?',
+ 'immutable?', 'index', 'index-from', 'indices', 'infimum', 'infimum-by',
+ 'insert-nth', 'interleave', 'iota', 'iota-tuple', 'iota-tuple?', 'join',
+ 'join-as', 'last', 'last-index', 'last-index-from', 'length', 'lengthen',
+ 'like', 'longer', 'longer?', 'longest', 'map', 'map!', 'map-as', 'map-find',
+ 'map-find-last', 'map-index', 'map-integers', 'map-reduce', 'map-sum',
+ 'max-length', 'member-eq?', 'member?', 'midpoint@', 'min-length',
+ 'mismatch', 'move', 'new-like', 'new-resizable', 'new-sequence',
+ 'non-negative-integer-expected', 'non-negative-integer-expected?',
+ 'nth', 'nths', 'pad-head', 'pad-tail', 'padding', 'partition', 'pop', 'pop*',
+ 'prefix', 'prepend', 'prepend-as', 'produce', 'produce-as', 'product', 'push',
+ 'push-all', 'push-either', 'push-if', 'reduce', 'reduce-index', 'remove',
+ 'remove!', 'remove-eq', 'remove-eq!', 'remove-nth', 'remove-nth!', 'repetition',
+ 'repetition?', 'replace-slice', 'replicate', 'replicate-as', 'rest',
+ 'rest-slice', 'reverse', 'reverse!', 'reversed', 'reversed?', 'second',
+ 'selector', 'selector-for', 'sequence', 'sequence-hashcode', 'sequence=',
+ 'sequence?', 'set-first', 'set-fourth', 'set-last', 'set-length', 'set-nth',
+ 'set-second', 'set-third', 'short', 'shorten', 'shorter', 'shorter?',
+ 'shortest', 'sift', 'slice', 'slice-error', 'slice-error?', 'slice?',
+ 'snip', 'snip-slice', 'start', 'start*', 'subseq', 'subseq?', 'suffix',
+ 'suffix!', 'sum', 'sum-lengths', 'supremum', 'supremum-by', 'surround', 'tail',
+ 'tail-slice', 'tail-slice*', 'tail*', 'tail?', 'third', 'trim',
+ 'trim-head', 'trim-head-slice', 'trim-slice', 'trim-tail', 'trim-tail-slice',
+ 'unclip', 'unclip-last', 'unclip-last-slice', 'unclip-slice', 'unless-empty',
+ 'virtual-exemplar', 'virtual-sequence', 'virtual-sequence?', 'virtual@',
+ 'when-empty'), suffix=r'\s')
+
+ builtin_namespaces = words((
+ '+@', 'change', 'change-global', 'counter', 'dec', 'get', 'get-global',
+ 'global', 'inc', 'init-namespaces', 'initialize', 'is-global', 'make-assoc',
+ 'namespace', 'namestack', 'off', 'on', 'set', 'set-global', 'set-namestack',
+ 'toggle', 'with-global', 'with-scope', 'with-variable', 'with-variables'),
+ suffix=r'\s')
+
+ builtin_arrays = words((
+ '1array', '2array', '3array', '4array', '<array>', '>array', 'array',
+ 'array?', 'pair', 'pair?', 'resize-array'), suffix=r'\s')
+
+ builtin_io = words((
+ '(each-stream-block-slice)', '(each-stream-block)',
+ '(stream-contents-by-block)', '(stream-contents-by-element)',
+ '(stream-contents-by-length-or-block)',
+ '(stream-contents-by-length)', '+byte+', '+character+',
+ 'bad-seek-type', 'bad-seek-type?', 'bl', 'contents', 'each-block',
+ 'each-block-size', 'each-block-slice', 'each-line', 'each-morsel',
+ 'each-stream-block', 'each-stream-block-slice', 'each-stream-line',
+ 'error-stream', 'flush', 'input-stream', 'input-stream?',
+ 'invalid-read-buffer', 'invalid-read-buffer?', 'lines', 'nl',
+ 'output-stream', 'output-stream?', 'print', 'read', 'read-into',
+ 'read-partial', 'read-partial-into', 'read-until', 'read1', 'readln',
+ 'seek-absolute', 'seek-absolute?', 'seek-end', 'seek-end?',
+ 'seek-input', 'seek-output', 'seek-relative', 'seek-relative?',
+ 'stream-bl', 'stream-contents', 'stream-contents*', 'stream-copy',
+ 'stream-copy*', 'stream-element-type', 'stream-flush',
+ 'stream-length', 'stream-lines', 'stream-nl', 'stream-print',
+ 'stream-read', 'stream-read-into', 'stream-read-partial',
+ 'stream-read-partial-into', 'stream-read-partial-unsafe',
+ 'stream-read-unsafe', 'stream-read-until', 'stream-read1',
+ 'stream-readln', 'stream-seek', 'stream-seekable?', 'stream-tell',
+ 'stream-write', 'stream-write1', 'tell-input', 'tell-output',
+ 'with-error-stream', 'with-error-stream*', 'with-error>output',
+ 'with-input-output+error-streams',
+ 'with-input-output+error-streams*', 'with-input-stream',
+ 'with-input-stream*', 'with-output-stream', 'with-output-stream*',
+ 'with-output>error', 'with-output+error-stream',
+ 'with-output+error-stream*', 'with-streams', 'with-streams*',
+ 'write', 'write1'), suffix=r'\s')
+
+ builtin_strings = words((
+ '1string', '<string>', '>string', 'resize-string', 'string',
+ 'string?'), suffix=r'\s')
+
+ builtin_vectors = words((
+ '1vector', '<vector>', '>vector', '?push', 'vector', 'vector?'),
+ suffix=r'\s')
+
+ builtin_continuations = words((
+ '<condition>', '<continuation>', '<restart>', 'attempt-all',
+ 'attempt-all-error', 'attempt-all-error?', 'callback-error-hook',
+ 'callcc0', 'callcc1', 'cleanup', 'compute-restarts', 'condition',
+ 'condition?', 'continuation', 'continuation?', 'continue',
+ 'continue-restart', 'continue-with', 'current-continuation',
+ 'error', 'error-continuation', 'error-in-thread', 'error-thread',
+ 'ifcc', 'ignore-errors', 'in-callback?', 'original-error', 'recover',
+ 'restart', 'restart?', 'restarts', 'rethrow', 'rethrow-restarts',
+ 'return', 'return-continuation', 'thread-error-hook', 'throw-continue',
+ 'throw-restarts', 'with-datastack', 'with-return'), suffix=r'\s')
+
+ tokens = {
+ 'root': [
+ # factor allows a file to start with a shebang
+ (r'#!.*$', Comment.Preproc),
+ default('base'),
+ ],
+ 'base': [
+ (r'\s+', Text),
+
+ # defining words
+ (r'((?:MACRO|MEMO|TYPED)?:[:]?)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Function)),
+ (r'(M:[:]?)(\s+)(\S+)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Class, Text, Name.Function)),
+ (r'(C:)(\s+)(\S+)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Function, Text, Name.Class)),
+ (r'(GENERIC:)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Function)),
+ (r'(HOOK:|GENERIC#)(\s+)(\S+)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Function, Text, Name.Function)),
+ (r'\(\s', Name.Function, 'stackeffect'),
+ (r';\s', Keyword),
+
+ # imports and namespaces
+ (r'(USING:)(\s+)',
+ bygroups(Keyword.Namespace, Text), 'vocabs'),
+ (r'(USE:|UNUSE:|IN:|QUALIFIED:)(\s+)(\S+)',
+ bygroups(Keyword.Namespace, Text, Name.Namespace)),
+ (r'(QUALIFIED-WITH:)(\s+)(\S+)(\s+)(\S+)',
+ bygroups(Keyword.Namespace, Text, Name.Namespace, Text, Name.Namespace)),
+ (r'(FROM:|EXCLUDE:)(\s+)(\S+)(\s+=>\s)',
+ bygroups(Keyword.Namespace, Text, Name.Namespace, Text), 'words'),
+ (r'(RENAME:)(\s+)(\S+)(\s+)(\S+)(\s+=>\s+)(\S+)',
+ bygroups(Keyword.Namespace, Text, Name.Function, Text, Name.Namespace, Text, Name.Function)),
+ (r'(ALIAS:|TYPEDEF:)(\s+)(\S+)(\s+)(\S+)',
+ bygroups(Keyword.Namespace, Text, Name.Function, Text, Name.Function)),
+ (r'(DEFER:|FORGET:|POSTPONE:)(\s+)(\S+)',
+ bygroups(Keyword.Namespace, Text, Name.Function)),
+
+ # tuples and classes
+ (r'(TUPLE:|ERROR:)(\s+)(\S+)(\s+<\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Class, Text, Name.Class), 'slots'),
+ (r'(TUPLE:|ERROR:|BUILTIN:)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Class), 'slots'),
+ (r'(MIXIN:|UNION:|INTERSECTION:)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Class)),
+ (r'(PREDICATE:)(\s+)(\S+)(\s+<\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Class, Text, Name.Class)),
+ (r'(C:)(\s+)(\S+)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Function, Text, Name.Class)),
+ (r'(INSTANCE:)(\s+)(\S+)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Class, Text, Name.Class)),
+ (r'(SLOT:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Function)),
+ (r'(SINGLETON:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class)),
+ (r'SINGLETONS:', Keyword, 'classes'),
+
+ # other syntax
+ (r'(CONSTANT:|SYMBOL:|MAIN:|HELP:)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Function)),
+ (r'SYMBOLS:\s', Keyword, 'words'),
+ (r'SYNTAX:\s', Keyword),
+ (r'ALIEN:\s', Keyword),
+ (r'(STRUCT:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class)),
+ (r'(FUNCTION:)(\s+\S+\s+)(\S+)(\s+\(\s+[^\)]+\)\s)',
+ bygroups(Keyword.Namespace, Text, Name.Function, Text)),
+ (r'(FUNCTION-ALIAS:)(\s+)(\S+)(\s+\S+\s+)(\S+)(\s+\(\s+[^\)]+\)\s)',
+ bygroups(Keyword.Namespace, Text, Name.Function, Text, Name.Function, Text)),
+
+ # vocab.private
+ (r'(?:<PRIVATE|PRIVATE>)\s', Keyword.Namespace),
+
+ # strings
+ (r'"""\s+(?:.|\n)*?\s+"""', String),
+ (r'"(?:\\\\|\\"|[^"])*"', String),
+ (r'\S+"\s+(?:\\\\|\\"|[^"])*"', String),
+ (r'CHAR:\s+(?:\\[\\abfnrstv]|[^\\]\S*)\s', String.Char),
+
+ # comments
+ (r'!\s+.*$', Comment),
+ (r'#!\s+.*$', Comment),
+ (r'/\*\s+(?:.|\n)*?\s\*/\s', Comment),
+
+ # boolean constants
+ (r'[tf]\s', Name.Constant),
+
+ # symbols and literals
+ (r'[\\$]\s+\S+', Name.Constant),
+ (r'M\\\s+\S+\s+\S+', Name.Constant),
+
+ # numbers
+ (r'[+-]?(?:[\d,]*\d)?\.(?:\d([\d,]*\d)?)?(?:[eE][+-]?\d+)?\s', Number),
+ (r'[+-]?\d(?:[\d,]*\d)?(?:[eE][+-]?\d+)?\s', Number),
+ (r'0x[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s', Number),
+ (r'NAN:\s+[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s', Number),
+ (r'0b[01]+\s', Number.Bin),
+ (r'0o[0-7]+\s', Number.Oct),
+ (r'(?:\d([\d,]*\d)?)?\+\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s', Number),
+ (r'(?:\-\d([\d,]*\d)?)?\-\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s', Number),
+
+ # keywords
+ (r'(?:deprecated|final|foldable|flushable|inline|recursive)\s',
+ Keyword),
+
+ # builtins
+ (builtin_kernel, Name.Builtin),
+ (builtin_assocs, Name.Builtin),
+ (builtin_combinators, Name.Builtin),
+ (builtin_math, Name.Builtin),
+ (builtin_sequences, Name.Builtin),
+ (builtin_namespaces, Name.Builtin),
+ (builtin_arrays, Name.Builtin),
+ (builtin_io, Name.Builtin),
+ (builtin_strings, Name.Builtin),
+ (builtin_vectors, Name.Builtin),
+ (builtin_continuations, Name.Builtin),
+
+ # everything else is text
+ (r'\S+', Text),
+ ],
+ 'stackeffect': [
+ (r'\s+', Text),
+ (r'\(\s+', Name.Function, 'stackeffect'),
+ (r'\)\s', Name.Function, '#pop'),
+ (r'--\s', Name.Function),
+ (r'\S+', Name.Variable),
+ ],
+ 'slots': [
+ (r'\s+', Text),
+ (r';\s', Keyword, '#pop'),
+ (r'({\s+)(\S+)(\s+[^}]+\s+}\s)',
+ bygroups(Text, Name.Variable, Text)),
+ (r'\S+', Name.Variable),
+ ],
+ 'vocabs': [
+ (r'\s+', Text),
+ (r';\s', Keyword, '#pop'),
+ (r'\S+', Name.Namespace),
+ ],
+ 'classes': [
+ (r'\s+', Text),
+ (r';\s', Keyword, '#pop'),
+ (r'\S+', Name.Class),
+ ],
+ 'words': [
+ (r'\s+', Text),
+ (r';\s', Keyword, '#pop'),
+ (r'\S+', Name.Function),
+ ],
+ }
diff --git a/pygments/lexers/fantom.py b/pygments/lexers/fantom.py
new file mode 100644
index 00000000..fd850336
--- /dev/null
+++ b/pygments/lexers/fantom.py
@@ -0,0 +1,250 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.fantom
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Fantom language.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from string import Template
+
+from pygments.lexer import RegexLexer, include, bygroups, using, \
+ this, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Literal
+
+__all__ = ['FantomLexer']
+
+
+class FantomLexer(RegexLexer):
+ """
+ For Fantom source code.
+
+ .. versionadded:: 1.5
+ """
+ name = 'Fantom'
+ aliases = ['fan']
+ filenames = ['*.fan']
+ mimetypes = ['application/x-fantom']
+
+ # often used regexes
+ def s(str):
+ return Template(str).substitute(
+ dict(
+ pod=r'[\"\w\.]+',
+ eos=r'\n|;',
+ id=r'[a-zA-Z_]\w*',
+ # all chars which can be part of type definition. Starts with
+ # either letter, or [ (maps), or | (funcs)
+ type=r'(?:\[|[a-zA-Z_]|\|)[:\w\[\]\|\->\?]*?',
+ )
+ )
+
+ tokens = {
+ 'comments': [
+ (r'(?s)/\*.*?\*/', Comment.Multiline), # Multiline
+ (r'//.*?\n', Comment.Single), # Single line
+ # TODO: highlight references in fandocs
+ (r'\*\*.*?\n', Comment.Special), # Fandoc
+ (r'#.*\n', Comment.Single) # Shell-style
+ ],
+ 'literals': [
+ (r'\b-?[\d_]+(ns|ms|sec|min|hr|day)', Number), # Duration
+ (r'\b-?[\d_]*\.[\d_]+(ns|ms|sec|min|hr|day)', Number), # Duration with dot
+ (r'\b-?(\d+)?\.\d+(f|F|d|D)?', Number.Float), # Float/Decimal
+ (r'\b-?0x[0-9a-fA-F_]+', Number.Hex), # Hex
+ (r'\b-?[\d_]+', Number.Integer), # Int
+ (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char), # Char
+ (r'"', Punctuation, 'insideStr'), # Opening quote
+ (r'`', Punctuation, 'insideUri'), # Opening accent
+ (r'\b(true|false|null)\b', Keyword.Constant), # Bool & null
+ (r'(?:(\w+)(::))?(\w+)(<\|)(.*?)(\|>)', # DSL
+ bygroups(Name.Namespace, Punctuation, Name.Class,
+ Punctuation, String, Punctuation)),
+ (r'(?:(\w+)(::))?(\w+)?(#)(\w+)?', # Type/slot literal
+ bygroups(Name.Namespace, Punctuation, Name.Class,
+ Punctuation, Name.Function)),
+ (r'\[,\]', Literal), # Empty list
+ (s(r'($type)(\[,\])'), # Typed empty list
+ bygroups(using(this, state='inType'), Literal)),
+ (r'\[:\]', Literal), # Empty Map
+ (s(r'($type)(\[:\])'),
+ bygroups(using(this, state='inType'), Literal)),
+ ],
+ 'insideStr': [
+ (r'\\\\', String.Escape), # Escaped backslash
+ (r'\\"', String.Escape), # Escaped "
+ (r'\\`', String.Escape), # Escaped `
+ (r'\$\w+', String.Interpol), # Subst var
+ (r'\${.*?}', String.Interpol), # Subst expr
+ (r'"', Punctuation, '#pop'), # Closing quot
+ (r'.', String) # String content
+ ],
+ 'insideUri': [ # TODO: remove copy/paste str/uri
+ (r'\\\\', String.Escape), # Escaped backslash
+ (r'\\"', String.Escape), # Escaped "
+ (r'\\`', String.Escape), # Escaped `
+ (r'\$\w+', String.Interpol), # Subst var
+ (r'\${.*?}', String.Interpol), # Subst expr
+ (r'`', Punctuation, '#pop'), # Closing tick
+ (r'.', String.Backtick) # URI content
+ ],
+ 'protectionKeywords': [
+ (r'\b(public|protected|private|internal)\b', Keyword),
+ ],
+ 'typeKeywords': [
+ (r'\b(abstract|final|const|native|facet|enum)\b', Keyword),
+ ],
+ 'methodKeywords': [
+ (r'\b(abstract|native|once|override|static|virtual|final)\b',
+ Keyword),
+ ],
+ 'fieldKeywords': [
+ (r'\b(abstract|const|final|native|override|static|virtual|'
+ r'readonly)\b', Keyword)
+ ],
+ 'otherKeywords': [
+ (words((
+ 'try', 'catch', 'throw', 'finally', 'for', 'if', 'else', 'while',
+ 'as', 'is', 'isnot', 'switch', 'case', 'default', 'continue',
+ 'break', 'do', 'return', 'get', 'set'), prefix=r'\b', suffix=r'\b'),
+ Keyword),
+ (r'\b(it|this|super)\b', Name.Builtin.Pseudo),
+ ],
+ 'operators': [
+ (r'\+\+|\-\-|\+|\-|\*|/|\|\||&&|<=>|<=|<|>=|>|=|!|\[|\]', Operator)
+ ],
+ 'inType': [
+ (r'[\[\]\|\->:\?]', Punctuation),
+ (s(r'$id'), Name.Class),
+ default('#pop'),
+
+ ],
+ 'root': [
+ include('comments'),
+ include('protectionKeywords'),
+ include('typeKeywords'),
+ include('methodKeywords'),
+ include('fieldKeywords'),
+ include('literals'),
+ include('otherKeywords'),
+ include('operators'),
+ (r'using\b', Keyword.Namespace, 'using'), # Using stmt
+ (r'@\w+', Name.Decorator, 'facet'), # Symbol
+ (r'(class|mixin)(\s+)(\w+)', bygroups(Keyword, Text, Name.Class),
+ 'inheritance'), # Inheritance list
+
+ # Type var := val
+ (s(r'($type)([ \t]+)($id)(\s*)(:=)'),
+ bygroups(using(this, state='inType'), Text,
+ Name.Variable, Text, Operator)),
+
+ # var := val
+ (s(r'($id)(\s*)(:=)'),
+ bygroups(Name.Variable, Text, Operator)),
+
+ # .someId( or ->someId( ###
+ (s(r'(\.|(?:\->))($id)(\s*)(\()'),
+ bygroups(Operator, Name.Function, Text, Punctuation),
+ 'insideParen'),
+
+ # .someId or ->someId
+ (s(r'(\.|(?:\->))($id)'),
+ bygroups(Operator, Name.Function)),
+
+ # new makeXXX (
+ (r'(new)(\s+)(make\w*)(\s*)(\()',
+ bygroups(Keyword, Text, Name.Function, Text, Punctuation),
+ 'insideMethodDeclArgs'),
+
+ # Type name (
+ (s(r'($type)([ \t]+)' # Return type and whitespace
+ r'($id)(\s*)(\()'), # method name + open brace
+ bygroups(using(this, state='inType'), Text,
+ Name.Function, Text, Punctuation),
+ 'insideMethodDeclArgs'),
+
+ # ArgType argName,
+ (s(r'($type)(\s+)($id)(\s*)(,)'),
+ bygroups(using(this, state='inType'), Text, Name.Variable,
+ Text, Punctuation)),
+
+ # ArgType argName)
+ # Covered in 'insideParen' state
+
+ # ArgType argName -> ArgType|
+ (s(r'($type)(\s+)($id)(\s*)(\->)(\s*)($type)(\|)'),
+ bygroups(using(this, state='inType'), Text, Name.Variable,
+ Text, Punctuation, Text, using(this, state='inType'),
+ Punctuation)),
+
+ # ArgType argName|
+ (s(r'($type)(\s+)($id)(\s*)(\|)'),
+ bygroups(using(this, state='inType'), Text, Name.Variable,
+ Text, Punctuation)),
+
+ # Type var
+ (s(r'($type)([ \t]+)($id)'),
+ bygroups(using(this, state='inType'), Text,
+ Name.Variable)),
+
+ (r'\(', Punctuation, 'insideParen'),
+ (r'\{', Punctuation, 'insideBrace'),
+ (r'.', Text)
+ ],
+ 'insideParen': [
+ (r'\)', Punctuation, '#pop'),
+ include('root'),
+ ],
+ 'insideMethodDeclArgs': [
+ (r'\)', Punctuation, '#pop'),
+ (s(r'($type)(\s+)($id)(\s*)(\))'),
+ bygroups(using(this, state='inType'), Text, Name.Variable,
+ Text, Punctuation), '#pop'),
+ include('root'),
+ ],
+ 'insideBrace': [
+ (r'\}', Punctuation, '#pop'),
+ include('root'),
+ ],
+ 'inheritance': [
+ (r'\s+', Text), # Whitespace
+ (r':|,', Punctuation),
+ (r'(?:(\w+)(::))?(\w+)',
+ bygroups(Name.Namespace, Punctuation, Name.Class)),
+ (r'{', Punctuation, '#pop')
+ ],
+ 'using': [
+ (r'[ \t]+', Text), # consume whitespaces
+ (r'(\[)(\w+)(\])',
+ bygroups(Punctuation, Comment.Special, Punctuation)), # ffi
+ (r'(\")?([\w\.]+)(\")?',
+ bygroups(Punctuation, Name.Namespace, Punctuation)), # podname
+ (r'::', Punctuation, 'usingClass'),
+ default('#pop')
+ ],
+ 'usingClass': [
+ (r'[ \t]+', Text), # consume whitespaces
+ (r'(as)(\s+)(\w+)',
+ bygroups(Keyword.Declaration, Text, Name.Class), '#pop:2'),
+ (r'[\w\$]+', Name.Class),
+ default('#pop:2') # jump out to root state
+ ],
+ 'facet': [
+ (r'\s+', Text),
+ (r'{', Punctuation, 'facetFields'),
+ default('#pop')
+ ],
+ 'facetFields': [
+ include('comments'),
+ include('literals'),
+ include('operators'),
+ (r'\s+', Text),
+ (r'(\s*)(\w+)(\s*)(=)', bygroups(Text, Name, Text, Operator)),
+ (r'}', Punctuation, '#pop'),
+ (r'.', Text)
+ ],
+ }
diff --git a/pygments/lexers/felix.py b/pygments/lexers/felix.py
new file mode 100644
index 00000000..25232378
--- /dev/null
+++ b/pygments/lexers/felix.py
@@ -0,0 +1,273 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.felix
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Felix language.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups, default, words, \
+ combined
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['FelixLexer']
+
+
+class FelixLexer(RegexLexer):
+ """
+ For `Felix <http://www.felix-lang.org>`_ source code.
+
+ .. versionadded:: 1.2
+ """
+
+ name = 'Felix'
+ aliases = ['felix', 'flx']
+ filenames = ['*.flx', '*.flxh']
+ mimetypes = ['text/x-felix']
+
+ preproc = (
+ 'elif', 'else', 'endif', 'if', 'ifdef', 'ifndef',
+ )
+
+ keywords = (
+ '_', '_deref', 'all', 'as',
+ 'assert', 'attempt', 'call', 'callback', 'case', 'caseno', 'cclass',
+ 'code', 'compound', 'ctypes', 'do', 'done', 'downto', 'elif', 'else',
+ 'endattempt', 'endcase', 'endif', 'endmatch', 'enum', 'except',
+ 'exceptions', 'expect', 'finally', 'for', 'forall', 'forget', 'fork',
+ 'functor', 'goto', 'ident', 'if', 'incomplete', 'inherit', 'instance',
+ 'interface', 'jump', 'lambda', 'loop', 'match', 'module', 'namespace',
+ 'new', 'noexpand', 'nonterm', 'obj', 'of', 'open', 'parse', 'raise',
+ 'regexp', 'reglex', 'regmatch', 'rename', 'return', 'the', 'then',
+ 'to', 'type', 'typecase', 'typedef', 'typematch', 'typeof', 'upto',
+ 'when', 'whilst', 'with', 'yield',
+ )
+
+ keyword_directives = (
+ '_gc_pointer', '_gc_type', 'body', 'comment', 'const', 'export',
+ 'header', 'inline', 'lval', 'macro', 'noinline', 'noreturn',
+ 'package', 'private', 'pod', 'property', 'public', 'publish',
+ 'requires', 'todo', 'virtual', 'use',
+ )
+
+ keyword_declarations = (
+ 'def', 'let', 'ref', 'val', 'var',
+ )
+
+ keyword_types = (
+ 'unit', 'void', 'any', 'bool',
+ 'byte', 'offset',
+ 'address', 'caddress', 'cvaddress', 'vaddress',
+ 'tiny', 'short', 'int', 'long', 'vlong',
+ 'utiny', 'ushort', 'vshort', 'uint', 'ulong', 'uvlong',
+ 'int8', 'int16', 'int32', 'int64',
+ 'uint8', 'uint16', 'uint32', 'uint64',
+ 'float', 'double', 'ldouble',
+ 'complex', 'dcomplex', 'lcomplex',
+ 'imaginary', 'dimaginary', 'limaginary',
+ 'char', 'wchar', 'uchar',
+ 'charp', 'charcp', 'ucharp', 'ucharcp',
+ 'string', 'wstring', 'ustring',
+ 'cont',
+ 'array', 'varray', 'list',
+ 'lvalue', 'opt', 'slice',
+ )
+
+ keyword_constants = (
+ 'false', 'true',
+ )
+
+ operator_words = (
+ 'and', 'not', 'in', 'is', 'isin', 'or', 'xor',
+ )
+
+ name_builtins = (
+ '_svc', 'while',
+ )
+
+ name_pseudo = (
+ 'root', 'self', 'this',
+ )
+
+ decimal_suffixes = '([tTsSiIlLvV]|ll|LL|([iIuU])(8|16|32|64))?'
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+
+ # Keywords
+ (words(('axiom', 'ctor', 'fun', 'gen', 'proc', 'reduce',
+ 'union'), suffix=r'\b'),
+ Keyword, 'funcname'),
+ (words(('class', 'cclass', 'cstruct', 'obj', 'struct'), suffix=r'\b'),
+ Keyword, 'classname'),
+ (r'(instance|module|typeclass)\b', Keyword, 'modulename'),
+
+ (words(keywords, suffix=r'\b'), Keyword),
+ (words(keyword_directives, suffix=r'\b'), Name.Decorator),
+ (words(keyword_declarations, suffix=r'\b'), Keyword.Declaration),
+ (words(keyword_types, suffix=r'\b'), Keyword.Type),
+ (words(keyword_constants, suffix=r'\b'), Keyword.Constant),
+
+ # Operators
+ include('operators'),
+
+ # Float Literal
+ # -- Hex Float
+ (r'0[xX]([0-9a-fA-F_]*\.[0-9a-fA-F_]+|[0-9a-fA-F_]+)'
+ r'[pP][+\-]?[0-9_]+[lLfFdD]?', Number.Float),
+ # -- DecimalFloat
+ (r'[0-9_]+(\.[0-9_]+[eE][+\-]?[0-9_]+|'
+ r'\.[0-9_]*|[eE][+\-]?[0-9_]+)[lLfFdD]?', Number.Float),
+ (r'\.(0|[1-9][0-9_]*)([eE][+\-]?[0-9_]+)?[lLfFdD]?',
+ Number.Float),
+
+ # IntegerLiteral
+ # -- Binary
+ (r'0[Bb][01_]+%s' % decimal_suffixes, Number.Bin),
+ # -- Octal
+ (r'0[0-7_]+%s' % decimal_suffixes, Number.Oct),
+ # -- Hexadecimal
+ (r'0[xX][0-9a-fA-F_]+%s' % decimal_suffixes, Number.Hex),
+ # -- Decimal
+ (r'(0|[1-9][0-9_]*)%s' % decimal_suffixes, Number.Integer),
+
+ # Strings
+ ('([rR][cC]?|[cC][rR])"""', String, 'tdqs'),
+ ("([rR][cC]?|[cC][rR])'''", String, 'tsqs'),
+ ('([rR][cC]?|[cC][rR])"', String, 'dqs'),
+ ("([rR][cC]?|[cC][rR])'", String, 'sqs'),
+ ('[cCfFqQwWuU]?"""', String, combined('stringescape', 'tdqs')),
+ ("[cCfFqQwWuU]?'''", String, combined('stringescape', 'tsqs')),
+ ('[cCfFqQwWuU]?"', String, combined('stringescape', 'dqs')),
+ ("[cCfFqQwWuU]?'", String, combined('stringescape', 'sqs')),
+
+ # Punctuation
+ (r'[\[\]{}:(),;?]', Punctuation),
+
+ # Labels
+ (r'[a-zA-Z_]\w*:>', Name.Label),
+
+ # Identifiers
+ (r'(%s)\b' % '|'.join(name_builtins), Name.Builtin),
+ (r'(%s)\b' % '|'.join(name_pseudo), Name.Builtin.Pseudo),
+ (r'[a-zA-Z_]\w*', Name),
+ ],
+ 'whitespace': [
+ (r'\n', Text),
+ (r'\s+', Text),
+
+ include('comment'),
+
+ # Preprocessor
+ (r'#\s*if\s+0', Comment.Preproc, 'if0'),
+ (r'#', Comment.Preproc, 'macro'),
+ ],
+ 'operators': [
+ (r'(%s)\b' % '|'.join(operator_words), Operator.Word),
+ (r'!=|==|<<|>>|\|\||&&|[-~+/*%=<>&^|.$]', Operator),
+ ],
+ 'comment': [
+ (r'//(.*?)\n', Comment.Single),
+ (r'/[*]', Comment.Multiline, 'comment2'),
+ ],
+ 'comment2': [
+ (r'[^\/*]', Comment.Multiline),
+ (r'/[*]', Comment.Multiline, '#push'),
+ (r'[*]/', Comment.Multiline, '#pop'),
+ (r'[\/*]', Comment.Multiline),
+ ],
+ 'if0': [
+ (r'^\s*#if.*?(?<!\\)\n', Comment, '#push'),
+ (r'^\s*#endif.*?(?<!\\)\n', Comment, '#pop'),
+ (r'.*?\n', Comment),
+ ],
+ 'macro': [
+ include('comment'),
+ (r'(import|include)(\s+)(<[^>]*?>)',
+ bygroups(Comment.Preproc, Text, String), '#pop'),
+ (r'(import|include)(\s+)("[^"]*?")',
+ bygroups(Comment.Preproc, Text, String), '#pop'),
+ (r"(import|include)(\s+)('[^']*?')",
+ bygroups(Comment.Preproc, Text, String), '#pop'),
+ (r'[^/\n]+', Comment.Preproc),
+ # (r'/[*](.|\n)*?[*]/', Comment),
+ # (r'//.*?\n', Comment, '#pop'),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'funcname': [
+ include('whitespace'),
+ (r'[a-zA-Z_]\w*', Name.Function, '#pop'),
+ # anonymous functions
+ (r'(?=\()', Text, '#pop'),
+ ],
+ 'classname': [
+ include('whitespace'),
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
+ # anonymous classes
+ (r'(?=\{)', Text, '#pop'),
+ ],
+ 'modulename': [
+ include('whitespace'),
+ (r'\[', Punctuation, ('modulename2', 'tvarlist')),
+ default('modulename2'),
+ ],
+ 'modulename2': [
+ include('whitespace'),
+ (r'([a-zA-Z_]\w*)', Name.Namespace, '#pop:2'),
+ ],
+ 'tvarlist': [
+ include('whitespace'),
+ include('operators'),
+ (r'\[', Punctuation, '#push'),
+ (r'\]', Punctuation, '#pop'),
+ (r',', Punctuation),
+ (r'(with|where)\b', Keyword),
+ (r'[a-zA-Z_]\w*', Name),
+ ],
+ 'stringescape': [
+ (r'\\([\\abfnrtv"\']|\n|N{.*?}|u[a-fA-F0-9]{4}|'
+ r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'strings': [
+ (r'%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
+ '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol),
+ (r'[^\\\'"%\n]+', String),
+ # quotes, percents and backslashes must be parsed one at a time
+ (r'[\'"\\]', String),
+ # unhandled string formatting sign
+ (r'%', String)
+ # newlines are an error (use "nl" state)
+ ],
+ 'nl': [
+ (r'\n', String)
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ # included here again for raw strings
+ (r'\\\\|\\"|\\\n', String.Escape),
+ include('strings')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop'),
+ # included here again for raw strings
+ (r"\\\\|\\'|\\\n", String.Escape),
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ include('strings'),
+ include('nl')
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop'),
+ include('strings'),
+ include('nl')
+ ],
+ }
diff --git a/pygments/lexers/fortran.py b/pygments/lexers/fortran.py
new file mode 100644
index 00000000..2e08a0c2
--- /dev/null
+++ b/pygments/lexers/fortran.py
@@ -0,0 +1,160 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.fortran
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Fortran languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['FortranLexer']
+
+
+class FortranLexer(RegexLexer):
+ """
+ Lexer for FORTRAN 90 code.
+
+ .. versionadded:: 0.10
+ """
+ name = 'Fortran'
+ aliases = ['fortran']
+ filenames = ['*.f', '*.f90', '*.F', '*.F90']
+ mimetypes = ['text/x-fortran']
+ flags = re.IGNORECASE
+
+ # Data Types: INTEGER, REAL, COMPLEX, LOGICAL, CHARACTER and DOUBLE PRECISION
+ # Operators: **, *, +, -, /, <, >, <=, >=, ==, /=
+ # Logical (?): NOT, AND, OR, EQV, NEQV
+
+ # Builtins:
+ # http://gcc.gnu.org/onlinedocs/gcc-3.4.6/g77/Table-of-Intrinsic-Functions.html
+
+ tokens = {
+ 'root': [
+ (r'!.*\n', Comment),
+ include('strings'),
+ include('core'),
+ (r'[a-z]\w*', Name.Variable),
+ include('nums'),
+ (r'[\s]+', Text),
+ ],
+ 'core': [
+ # Statements
+ (words((
+ 'ABSTRACT', 'ACCEPT', 'ALL', 'ALLSTOP', 'ALLOCATABLE', 'ALLOCATE',
+ 'ARRAY', 'ASSIGN', 'ASSOCIATE', 'ASYNCHRONOUS', 'BACKSPACE', 'BIND',
+ 'BLOCK', 'BLOCKDATA', 'BYTE', 'CALL', 'CASE', 'CLASS', 'CLOSE',
+ 'CODIMENSION', 'COMMON', 'CONCURRRENT', 'CONTIGUOUS', 'CONTAINS',
+ 'CONTINUE', 'CRITICAL', 'CYCLE', 'DATA', 'DEALLOCATE', 'DECODE',
+ 'DEFERRED', 'DIMENSION', 'DO', 'ELEMENTAL', 'ELSE', 'ENCODE', 'END',
+ 'ENTRY', 'ENUM', 'ENUMERATOR', 'EQUIVALENCE', 'EXIT', 'EXTENDS',
+ 'EXTERNAL', 'EXTRINSIC', 'FILE', 'FINAL', 'FORALL', 'FORMAT',
+ 'FUNCTION', 'GENERIC', 'GOTO', 'IF', 'IMAGES', 'IMPLICIT',
+ 'IMPORT', 'IMPURE', 'INCLUDE', 'INQUIRE', 'INTENT', 'INTERFACE',
+ 'INTRINSIC', 'IS', 'LOCK', 'MEMORY', 'MODULE', 'NAMELIST', 'NULLIFY',
+ 'NONE', 'NON_INTRINSIC', 'NON_OVERRIDABLE', 'NOPASS', 'OPEN', 'OPTIONAL',
+ 'OPTIONS', 'PARAMETER', 'PASS', 'PAUSE', 'POINTER', 'PRINT', 'PRIVATE',
+ 'PROGRAM', 'PROCEDURE', 'PROTECTED', 'PUBLIC', 'PURE', 'READ',
+ 'RECURSIVE', 'RESULT', 'RETURN', 'REWIND', 'SAVE', 'SELECT', 'SEQUENCE',
+ 'STOP', 'SUBMODULE', 'SUBROUTINE', 'SYNC', 'SYNCALL', 'SYNCIMAGES',
+ 'SYNCMEMORY', 'TARGET', 'THEN', 'TYPE', 'UNLOCK', 'USE', 'VALUE',
+ 'VOLATILE', 'WHERE', 'WRITE', 'WHILE'), prefix=r'\b', suffix=r'\s*\b'),
+ Keyword),
+
+ # Data Types
+ (words((
+ 'CHARACTER', 'COMPLEX', 'DOUBLE PRECISION', 'DOUBLE COMPLEX', 'INTEGER',
+ 'LOGICAL', 'REAL', 'C_INT', 'C_SHORT', 'C_LONG', 'C_LONG_LONG', 'C_SIGNED_CHAR',
+ 'C_SIZE_T', 'C_INT8_T', 'C_INT16_T', 'C_INT32_T', 'C_INT64_T', 'C_INT_LEAST8_T',
+ 'C_INT_LEAST16_T', 'C_INT_LEAST32_T', 'C_INT_LEAST64_T', 'C_INT_FAST8_T',
+ 'C_INT_FAST16_T', 'C_INT_FAST32_T', 'C_INT_FAST64_T', 'C_INTMAX_T',
+ 'C_INTPTR_T', 'C_FLOAT', 'C_DOUBLE', 'C_LONG_DOUBLE', 'C_FLOAT_COMPLEX',
+ 'C_DOUBLE_COMPLEX', 'C_LONG_DOUBLE_COMPLEX', 'C_BOOL', 'C_CHAR', 'C_PTR',
+ 'C_FUNPTR'), prefix=r'\b', suffix=r'\s*\b'),
+ Keyword.Type),
+
+ # Operators
+ (r'(\*\*|\*|\+|-|\/|<|>|<=|>=|==|\/=|=)', Operator),
+
+ (r'(::)', Keyword.Declaration),
+
+ (r'[()\[\],:&%;]', Punctuation),
+ # Intrinsics
+ (words((
+ 'Abort', 'Abs', 'Access', 'AChar', 'ACos', 'ACosH', 'AdjustL',
+ 'AdjustR', 'AImag', 'AInt', 'Alarm', 'All', 'Allocated', 'ALog',
+ 'AMax', 'AMin', 'AMod', 'And', 'ANInt', 'Any', 'ASin', 'ASinH',
+ 'Associated', 'ATan', 'ATanH', 'Atomic_Define', 'Atomic_Ref',
+ 'BesJ', 'BesJN', 'Bessel_J0', 'Bessel_J1', 'Bessel_JN', 'Bessel_Y0',
+ 'Bessel_Y1', 'Bessel_YN', 'BesY', 'BesYN', 'BGE', 'BGT', 'BLE',
+ 'BLT', 'Bit_Size', 'BTest', 'CAbs', 'CCos', 'Ceiling', 'CExp',
+ 'Char', 'ChDir', 'ChMod', 'CLog', 'Cmplx', 'Command_Argument_Count',
+ 'Complex', 'Conjg', 'Cos', 'CosH', 'Count', 'CPU_Time', 'CShift',
+ 'CSin', 'CSqRt', 'CTime', 'C_Funloc', 'C_Loc', 'C_Associated',
+ 'C_Null_Ptr', 'C_Null_Funptr', 'C_F_Pointer', 'C_F_ProcPointer',
+ 'C_Null_Char', 'C_Alert', 'C_Backspace', 'C_Form_Feed', 'C_FunLoc',
+ 'C_Loc', 'C_Sizeof', 'C_New_Line', 'C_Carriage_Return',
+ 'C_Horizontal_Tab', 'C_Vertical_Tab', 'DAbs', 'DACos', 'DASin',
+ 'DATan', 'Date_and_Time', 'DbesJ', 'DbesJ', 'DbesJN', 'DbesY',
+ 'DbesY', 'DbesYN', 'Dble', 'DCos', 'DCosH', 'DDiM', 'DErF',
+ 'DErFC', 'DExp', 'Digits', 'DiM', 'DInt', 'DLog', 'DLog', 'DMax',
+ 'DMin', 'DMod', 'DNInt', 'Dot_Product', 'DProd', 'DSign', 'DSinH',
+ 'DShiftL', 'DShiftR', 'DSin', 'DSqRt', 'DTanH', 'DTan', 'DTime',
+ 'EOShift', 'Epsilon', 'ErF', 'ErFC', 'ErFC_Scaled', 'ETime',
+ 'Execute_Command_Line', 'Exit', 'Exp', 'Exponent', 'Extends_Type_Of',
+ 'FDate', 'FGet', 'FGetC', 'FindLoc', 'Float', 'Floor', 'Flush',
+ 'FNum', 'FPutC', 'FPut', 'Fraction', 'FSeek', 'FStat', 'FTell',
+ 'Gamma', 'GError', 'GetArg', 'Get_Command', 'Get_Command_Argument',
+ 'Get_Environment_Variable', 'GetCWD', 'GetEnv', 'GetGId', 'GetLog',
+ 'GetPId', 'GetUId', 'GMTime', 'HostNm', 'Huge', 'Hypot', 'IAbs',
+ 'IAChar', 'IAll', 'IAnd', 'IAny', 'IArgC', 'IBClr', 'IBits',
+ 'IBSet', 'IChar', 'IDate', 'IDiM', 'IDInt', 'IDNInt', 'IEOr',
+ 'IErrNo', 'IFix', 'Imag', 'ImagPart', 'Image_Index', 'Index',
+ 'Int', 'IOr', 'IParity', 'IRand', 'IsaTty', 'IShft', 'IShftC',
+ 'ISign', 'Iso_C_Binding', 'Is_Contiguous', 'Is_Iostat_End',
+ 'Is_Iostat_Eor', 'ITime', 'Kill', 'Kind', 'LBound', 'LCoBound',
+ 'Len', 'Len_Trim', 'LGe', 'LGt', 'Link', 'LLe', 'LLt', 'LnBlnk',
+ 'Loc', 'Log', 'Log_Gamma', 'Logical', 'Long', 'LShift', 'LStat',
+ 'LTime', 'MaskL', 'MaskR', 'MatMul', 'Max', 'MaxExponent',
+ 'MaxLoc', 'MaxVal', 'MClock', 'Merge', 'Merge_Bits', 'Move_Alloc',
+ 'Min', 'MinExponent', 'MinLoc', 'MinVal', 'Mod', 'Modulo', 'MvBits',
+ 'Nearest', 'New_Line', 'NInt', 'Norm2', 'Not', 'Null', 'Num_Images',
+ 'Or', 'Pack', 'Parity', 'PError', 'Precision', 'Present', 'Product',
+ 'Radix', 'Rand', 'Random_Number', 'Random_Seed', 'Range', 'Real',
+ 'RealPart', 'Rename', 'Repeat', 'Reshape', 'RRSpacing', 'RShift',
+ 'Same_Type_As', 'Scale', 'Scan', 'Second', 'Selected_Char_Kind',
+ 'Selected_Int_Kind', 'Selected_Real_Kind', 'Set_Exponent', 'Shape',
+ 'ShiftA', 'ShiftL', 'ShiftR', 'Short', 'Sign', 'Signal', 'SinH',
+ 'Sin', 'Sleep', 'Sngl', 'Spacing', 'Spread', 'SqRt', 'SRand',
+ 'Stat', 'Storage_Size', 'Sum', 'SymLnk', 'System', 'System_Clock',
+ 'Tan', 'TanH', 'Time', 'This_Image', 'Tiny', 'TrailZ', 'Transfer',
+ 'Transpose', 'Trim', 'TtyNam', 'UBound', 'UCoBound', 'UMask',
+ 'Unlink', 'Unpack', 'Verify', 'XOr', 'ZAbs', 'ZCos', 'ZExp',
+ 'ZLog', 'ZSin', 'ZSqRt'), prefix=r'\b', suffix=r'\s*\b'),
+ Name.Builtin),
+
+ # Booleans
+ (r'\.(true|false)\.', Name.Builtin),
+ # Comparing Operators
+ (r'\.(eq|ne|lt|le|gt|ge|not|and|or|eqv|neqv)\.', Operator.Word),
+ ],
+
+ 'strings': [
+ (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
+ (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ ],
+
+ 'nums': [
+ (r'\d+(?![.e])(_[a-z]\w+)?', Number.Integer),
+ (r'[+-]?\d*\.\d+(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float),
+ (r'[+-]?\d+\.\d*(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float),
+ ],
+ }
diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py
index 19c54f94..791e8b64 100644
--- a/pygments/lexers/functional.py
+++ b/pygments/lexers/functional.py
@@ -3,3756 +3,19 @@
pygments.lexers.functional
~~~~~~~~~~~~~~~~~~~~~~~~~~
- Lexers for functional languages.
+ Just export lexer classes previously contained in this module.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import re
+from pygments.lexers.lisp import SchemeLexer, CommonLispLexer, RacketLexer, \
+ NewLispLexer
+from pygments.lexers.haskell import HaskellLexer, LiterateHaskellLexer, \
+ KokaLexer
+from pygments.lexers.theorem import CoqLexer
+from pygments.lexers.erlang import ErlangLexer, ErlangShellLexer, \
+ ElixirConsoleLexer, ElixirLexer
+from pygments.lexers.ml import SMLLexer, OcamlLexer, OpaLexer
-from pygments.lexer import Lexer, RegexLexer, bygroups, include, do_insertions, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, \
- String, Number, Punctuation, Literal, Generic, Error
-from pygments import unistring as uni
-
-__all__ = ['RacketLexer', 'SchemeLexer', 'CommonLispLexer', 'CryptolLexer',
- 'HaskellLexer', 'AgdaLexer', 'LiterateCryptolLexer',
- 'LiterateHaskellLexer', 'LiterateAgdaLexer', 'SMLLexer',
- 'OcamlLexer', 'ErlangLexer', 'ErlangShellLexer', 'OpaLexer',
- 'CoqLexer', 'NewLispLexer', 'NixLexer', 'ElixirLexer',
- 'ElixirConsoleLexer', 'KokaLexer', 'IdrisLexer', 'LeanLexer',
- 'LiterateIdrisLexer']
-
-
-line_re = re.compile('.*?\n')
-
-
-class RacketLexer(RegexLexer):
- """
- Lexer for `Racket <http://racket-lang.org/>`_ source code (formerly
- known as PLT Scheme).
-
- .. versionadded:: 1.6
- """
-
- name = 'Racket'
- aliases = ['racket', 'rkt']
- filenames = ['*.rkt', '*.rktd', '*.rktl']
- mimetypes = ['text/x-racket', 'application/x-racket']
-
- # Generated by example.rkt
- _keywords = [
- '#%app', '#%datum', '#%declare', '#%expression', '#%module-begin',
- '#%plain-app', '#%plain-lambda', '#%plain-module-begin',
- '#%printing-module-begin', '#%provide', '#%require',
- '#%stratified-body', '#%top', '#%top-interaction',
- '#%variable-reference', '->', '->*', '->*m', '->d', '->dm', '->i',
- '->m', '...', ':do-in', '==', '=>', '_', 'absent', 'abstract',
- 'all-defined-out', 'all-from-out', 'and', 'any', 'augment', 'augment*',
- 'augment-final', 'augment-final*', 'augride', 'augride*', 'begin',
- 'begin-for-syntax', 'begin0', 'case', 'case->', 'case->m',
- 'case-lambda', 'class', 'class*', 'class-field-accessor',
- 'class-field-mutator', 'class/c', 'class/derived', 'combine-in',
- 'combine-out', 'command-line', 'compound-unit', 'compound-unit/infer',
- 'cond', 'contract', 'contract-out', 'contract-struct', 'contracted',
- 'define', 'define-compound-unit', 'define-compound-unit/infer',
- 'define-contract-struct', 'define-custom-hash-types',
- 'define-custom-set-types', 'define-for-syntax',
- 'define-local-member-name', 'define-logger', 'define-match-expander',
- 'define-member-name', 'define-module-boundary-contract',
- 'define-namespace-anchor', 'define-opt/c', 'define-sequence-syntax',
- 'define-serializable-class', 'define-serializable-class*',
- 'define-signature', 'define-signature-form', 'define-struct',
- 'define-struct/contract', 'define-struct/derived', 'define-syntax',
- 'define-syntax-rule', 'define-syntaxes', 'define-unit',
- 'define-unit-binding', 'define-unit-from-context',
- 'define-unit/contract', 'define-unit/new-import-export',
- 'define-unit/s', 'define-values', 'define-values-for-export',
- 'define-values-for-syntax', 'define-values/invoke-unit',
- 'define-values/invoke-unit/infer', 'define/augment',
- 'define/augment-final', 'define/augride', 'define/contract',
- 'define/final-prop', 'define/match', 'define/overment',
- 'define/override', 'define/override-final', 'define/private',
- 'define/public', 'define/public-final', 'define/pubment',
- 'define/subexpression-pos-prop', 'delay', 'delay/idle', 'delay/name',
- 'delay/strict', 'delay/sync', 'delay/thread', 'do', 'else', 'except',
- 'except-in', 'except-out', 'export', 'extends', 'failure-cont',
- 'false', 'false/c', 'field', 'field-bound?', 'file',
- 'flat-murec-contract', 'flat-rec-contract', 'for', 'for*', 'for*/and',
- 'for*/first', 'for*/fold', 'for*/fold/derived', 'for*/hash',
- 'for*/hasheq', 'for*/hasheqv', 'for*/last', 'for*/list', 'for*/lists',
- 'for*/mutable-set', 'for*/mutable-seteq', 'for*/mutable-seteqv',
- 'for*/or', 'for*/product', 'for*/set', 'for*/seteq', 'for*/seteqv',
- 'for*/sum', 'for*/vector', 'for*/weak-set', 'for*/weak-seteq',
- 'for*/weak-seteqv', 'for-label', 'for-meta', 'for-syntax',
- 'for-template', 'for/and', 'for/first', 'for/fold', 'for/fold/derived',
- 'for/hash', 'for/hasheq', 'for/hasheqv', 'for/last', 'for/list',
- 'for/lists', 'for/mutable-set', 'for/mutable-seteq',
- 'for/mutable-seteqv', 'for/or', 'for/product', 'for/set', 'for/seteq',
- 'for/seteqv', 'for/sum', 'for/vector', 'for/weak-set',
- 'for/weak-seteq', 'for/weak-seteqv', 'gen:custom-write', 'gen:dict',
- 'gen:equal+hash', 'gen:set', 'gen:stream', 'generic', 'get-field',
- 'if', 'implies', 'import', 'include', 'include-at/relative-to',
- 'include-at/relative-to/reader', 'include/reader', 'inherit',
- 'inherit-field', 'inherit/inner', 'inherit/super', 'init',
- 'init-depend', 'init-field', 'init-rest', 'inner', 'inspect',
- 'instantiate', 'interface', 'interface*', 'invoke-unit',
- 'invoke-unit/infer', 'lambda', 'lazy', 'let', 'let*', 'let*-values',
- 'let-syntax', 'let-syntaxes', 'let-values', 'let/cc', 'let/ec',
- 'letrec', 'letrec-syntax', 'letrec-syntaxes', 'letrec-syntaxes+values',
- 'letrec-values', 'lib', 'link', 'local', 'local-require', 'log-debug',
- 'log-error', 'log-fatal', 'log-info', 'log-warning', 'match', 'match*',
- 'match*/derived', 'match-define', 'match-define-values',
- 'match-lambda', 'match-lambda*', 'match-lambda**', 'match-let',
- 'match-let*', 'match-let*-values', 'match-let-values', 'match-letrec',
- 'match/derived', 'match/values', 'member-name-key', 'method-contract?',
- 'mixin', 'module', 'module*', 'module+', 'nand', 'new', 'nor',
- 'object-contract', 'object/c', 'only', 'only-in', 'only-meta-in',
- 'open', 'opt/c', 'or', 'overment', 'overment*', 'override',
- 'override*', 'override-final', 'override-final*', 'parameterize',
- 'parameterize*', 'parameterize-break', 'parametric->/c', 'place',
- 'place*', 'planet', 'prefix', 'prefix-in', 'prefix-out', 'private',
- 'private*', 'prompt-tag/c', 'protect-out', 'provide',
- 'provide-signature-elements', 'provide/contract', 'public', 'public*',
- 'public-final', 'public-final*', 'pubment', 'pubment*', 'quasiquote',
- 'quasisyntax', 'quasisyntax/loc', 'quote', 'quote-syntax',
- 'quote-syntax/prune', 'recontract-out', 'recursive-contract',
- 'relative-in', 'rename', 'rename-in', 'rename-inner', 'rename-out',
- 'rename-super', 'require', 'send', 'send*', 'send+', 'send-generic',
- 'send/apply', 'send/keyword-apply', 'set!', 'set!-values',
- 'set-field!', 'shared', 'stream', 'stream-cons', 'struct', 'struct*',
- 'struct-copy', 'struct-field-index', 'struct-out', 'struct/c',
- 'struct/ctc', 'struct/dc', 'submod', 'super', 'super-instantiate',
- 'super-make-object', 'super-new', 'syntax', 'syntax-case',
- 'syntax-case*', 'syntax-id-rules', 'syntax-rules', 'syntax/loc', 'tag',
- 'this', 'this%', 'thunk', 'thunk*', 'time', 'unconstrained-domain->',
- 'unit', 'unit-from-context', 'unit/c', 'unit/new-import-export',
- 'unit/s', 'unless', 'unquote', 'unquote-splicing', 'unsyntax',
- 'unsyntax-splicing', 'values/drop', 'when', 'with-continuation-mark',
- 'with-contract', 'with-handlers', 'with-handlers*', 'with-method',
- 'with-syntax', u'λ'
- ]
-
- # Generated by example.rkt
- _builtins = [
- '*', '+', '-', '/', '<', '</c', '<=', '<=/c', '=', '=/c', '>', '>/c',
- '>=', '>=/c', 'abort-current-continuation', 'abs', 'absolute-path?',
- 'acos', 'add-between', 'add1', 'alarm-evt', 'always-evt', 'and/c',
- 'andmap', 'angle', 'any/c', 'append', 'append*', 'append-map', 'apply',
- 'argmax', 'argmin', 'arithmetic-shift', 'arity-at-least',
- 'arity-at-least-value', 'arity-at-least?', 'arity-checking-wrapper',
- 'arity-includes?', 'arity=?', 'asin', 'assf', 'assoc', 'assq', 'assv',
- 'atan', 'bad-number-of-results', 'banner', 'base->-doms/c',
- 'base->-rngs/c', 'base->?', 'between/c', 'bitwise-and',
- 'bitwise-bit-field', 'bitwise-bit-set?', 'bitwise-ior', 'bitwise-not',
- 'bitwise-xor', 'blame-add-car-context', 'blame-add-cdr-context',
- 'blame-add-context', 'blame-add-missing-party',
- 'blame-add-nth-arg-context', 'blame-add-or-context',
- 'blame-add-range-context', 'blame-add-unknown-context',
- 'blame-context', 'blame-contract', 'blame-fmt->-string',
- 'blame-negative', 'blame-original?', 'blame-positive',
- 'blame-replace-negative', 'blame-source', 'blame-swap',
- 'blame-swapped?', 'blame-update', 'blame-value', 'blame?', 'boolean=?',
- 'boolean?', 'bound-identifier=?', 'box', 'box-cas!', 'box-immutable',
- 'box-immutable/c', 'box/c', 'box?', 'break-enabled', 'break-thread',
- 'build-chaperone-contract-property', 'build-compound-type-name',
- 'build-contract-property', 'build-flat-contract-property',
- 'build-list', 'build-path', 'build-path/convention-type',
- 'build-string', 'build-vector', 'byte-pregexp', 'byte-pregexp?',
- 'byte-ready?', 'byte-regexp', 'byte-regexp?', 'byte?', 'bytes',
- 'bytes->immutable-bytes', 'bytes->list', 'bytes->path',
- 'bytes->path-element', 'bytes->string/latin-1', 'bytes->string/locale',
- 'bytes->string/utf-8', 'bytes-append', 'bytes-append*',
- 'bytes-close-converter', 'bytes-convert', 'bytes-convert-end',
- 'bytes-converter?', 'bytes-copy', 'bytes-copy!',
- 'bytes-environment-variable-name?', 'bytes-fill!', 'bytes-join',
- 'bytes-length', 'bytes-no-nuls?', 'bytes-open-converter', 'bytes-ref',
- 'bytes-set!', 'bytes-utf-8-index', 'bytes-utf-8-length',
- 'bytes-utf-8-ref', 'bytes<?', 'bytes=?', 'bytes>?', 'bytes?', 'caaaar',
- 'caaadr', 'caaar', 'caadar', 'caaddr', 'caadr', 'caar', 'cadaar',
- 'cadadr', 'cadar', 'caddar', 'cadddr', 'caddr', 'cadr',
- 'call-in-nested-thread', 'call-with-atomic-output-file',
- 'call-with-break-parameterization',
- 'call-with-composable-continuation', 'call-with-continuation-barrier',
- 'call-with-continuation-prompt', 'call-with-current-continuation',
- 'call-with-default-reading-parameterization',
- 'call-with-escape-continuation', 'call-with-exception-handler',
- 'call-with-file-lock/timeout', 'call-with-immediate-continuation-mark',
- 'call-with-input-bytes', 'call-with-input-file',
- 'call-with-input-file*', 'call-with-input-string',
- 'call-with-output-bytes', 'call-with-output-file',
- 'call-with-output-file*', 'call-with-output-string',
- 'call-with-parameterization', 'call-with-semaphore',
- 'call-with-semaphore/enable-break', 'call-with-values', 'call/cc',
- 'call/ec', 'car', 'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr',
- 'cdadr', 'cdar', 'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr',
- 'cdddr', 'cddr', 'cdr', 'ceiling', 'channel-get', 'channel-put',
- 'channel-put-evt', 'channel-put-evt?', 'channel-try-get', 'channel/c',
- 'channel?', 'chaperone-box', 'chaperone-channel',
- 'chaperone-continuation-mark-key', 'chaperone-contract-property?',
- 'chaperone-contract?', 'chaperone-evt', 'chaperone-hash',
- 'chaperone-of?', 'chaperone-procedure', 'chaperone-prompt-tag',
- 'chaperone-struct', 'chaperone-struct-type', 'chaperone-vector',
- 'chaperone?', 'char->integer', 'char-alphabetic?', 'char-blank?',
- 'char-ci<=?', 'char-ci<?', 'char-ci=?', 'char-ci>=?', 'char-ci>?',
- 'char-downcase', 'char-foldcase', 'char-general-category',
- 'char-graphic?', 'char-iso-control?', 'char-lower-case?',
- 'char-numeric?', 'char-punctuation?', 'char-ready?', 'char-symbolic?',
- 'char-title-case?', 'char-titlecase', 'char-upcase',
- 'char-upper-case?', 'char-utf-8-length', 'char-whitespace?', 'char<=?',
- 'char<?', 'char=?', 'char>=?', 'char>?', 'char?',
- 'check-duplicate-identifier', 'checked-procedure-check-and-extract',
- 'choice-evt', 'class->interface', 'class-info', 'class?',
- 'cleanse-path', 'close-input-port', 'close-output-port',
- 'coerce-chaperone-contract', 'coerce-chaperone-contracts',
- 'coerce-contract', 'coerce-contract/f', 'coerce-contracts',
- 'coerce-flat-contract', 'coerce-flat-contracts', 'collect-garbage',
- 'collection-file-path', 'collection-path', 'compile',
- 'compile-allow-set!-undefined', 'compile-context-preservation-enabled',
- 'compile-enforce-module-constants', 'compile-syntax',
- 'compiled-expression?', 'compiled-module-expression?',
- 'complete-path?', 'complex?', 'compose', 'compose1', 'conjugate',
- 'cons', 'cons/c', 'cons?', 'const', 'continuation-mark-key/c',
- 'continuation-mark-key?', 'continuation-mark-set->context',
- 'continuation-mark-set->list', 'continuation-mark-set->list*',
- 'continuation-mark-set-first', 'continuation-mark-set?',
- 'continuation-marks', 'continuation-prompt-available?',
- 'continuation-prompt-tag?', 'continuation?',
- 'contract-continuation-mark-key', 'contract-first-order',
- 'contract-first-order-passes?', 'contract-name', 'contract-proc',
- 'contract-projection', 'contract-property?',
- 'contract-random-generate', 'contract-stronger?',
- 'contract-struct-exercise', 'contract-struct-generate',
- 'contract-val-first-projection', 'contract?', 'convert-stream',
- 'copy-directory/files', 'copy-file', 'copy-port', 'cos', 'cosh',
- 'count', 'current-blame-format', 'current-break-parameterization',
- 'current-code-inspector', 'current-command-line-arguments',
- 'current-compile', 'current-compiled-file-roots',
- 'current-continuation-marks', 'current-contract-region',
- 'current-custodian', 'current-directory', 'current-directory-for-user',
- 'current-drive', 'current-environment-variables', 'current-error-port',
- 'current-eval', 'current-evt-pseudo-random-generator',
- 'current-future', 'current-gc-milliseconds',
- 'current-get-interaction-input-port', 'current-inexact-milliseconds',
- 'current-input-port', 'current-inspector',
- 'current-library-collection-links', 'current-library-collection-paths',
- 'current-load', 'current-load-extension',
- 'current-load-relative-directory', 'current-load/use-compiled',
- 'current-locale', 'current-logger', 'current-memory-use',
- 'current-milliseconds', 'current-module-declare-name',
- 'current-module-declare-source', 'current-module-name-resolver',
- 'current-module-path-for-load', 'current-namespace',
- 'current-output-port', 'current-parameterization',
- 'current-preserved-thread-cell-values', 'current-print',
- 'current-process-milliseconds', 'current-prompt-read',
- 'current-pseudo-random-generator', 'current-read-interaction',
- 'current-reader-guard', 'current-readtable', 'current-seconds',
- 'current-security-guard', 'current-subprocess-custodian-mode',
- 'current-thread', 'current-thread-group',
- 'current-thread-initial-stack-size',
- 'current-write-relative-directory', 'curry', 'curryr',
- 'custodian-box-value', 'custodian-box?', 'custodian-limit-memory',
- 'custodian-managed-list', 'custodian-memory-accounting-available?',
- 'custodian-require-memory', 'custodian-shutdown-all', 'custodian?',
- 'custom-print-quotable-accessor', 'custom-print-quotable?',
- 'custom-write-accessor', 'custom-write-property-proc', 'custom-write?',
- 'date', 'date*', 'date*-nanosecond', 'date*-time-zone-name', 'date*?',
- 'date-day', 'date-dst?', 'date-hour', 'date-minute', 'date-month',
- 'date-second', 'date-time-zone-offset', 'date-week-day', 'date-year',
- 'date-year-day', 'date?', 'datum->syntax', 'datum-intern-literal',
- 'default-continuation-prompt-tag', 'degrees->radians',
- 'delete-directory', 'delete-directory/files', 'delete-file',
- 'denominator', 'dict->list', 'dict-can-functional-set?',
- 'dict-can-remove-keys?', 'dict-clear', 'dict-clear!', 'dict-copy',
- 'dict-count', 'dict-empty?', 'dict-for-each', 'dict-has-key?',
- 'dict-implements/c', 'dict-implements?', 'dict-iter-contract',
- 'dict-iterate-first', 'dict-iterate-key', 'dict-iterate-next',
- 'dict-iterate-value', 'dict-key-contract', 'dict-keys', 'dict-map',
- 'dict-mutable?', 'dict-ref', 'dict-ref!', 'dict-remove',
- 'dict-remove!', 'dict-set', 'dict-set!', 'dict-set*', 'dict-set*!',
- 'dict-update', 'dict-update!', 'dict-value-contract', 'dict-values',
- 'dict?', 'directory-exists?', 'directory-list', 'display',
- 'display-lines', 'display-lines-to-file', 'display-to-file',
- 'displayln', 'double-flonum?', 'drop', 'drop-right', 'dropf',
- 'dropf-right', 'dump-memory-stats', 'dup-input-port',
- 'dup-output-port', 'dynamic-get-field', 'dynamic-place',
- 'dynamic-place*', 'dynamic-require', 'dynamic-require-for-syntax',
- 'dynamic-send', 'dynamic-set-field!', 'dynamic-wind', 'eighth',
- 'empty', 'empty-sequence', 'empty-stream', 'empty?',
- 'environment-variables-copy', 'environment-variables-names',
- 'environment-variables-ref', 'environment-variables-set!',
- 'environment-variables?', 'eof', 'eof-evt', 'eof-object?',
- 'ephemeron-value', 'ephemeron?', 'eprintf', 'eq-contract-val',
- 'eq-contract?', 'eq-hash-code', 'eq?', 'equal-contract-val',
- 'equal-contract?', 'equal-hash-code', 'equal-secondary-hash-code',
- 'equal<%>', 'equal?', 'equal?/recur', 'eqv-hash-code', 'eqv?', 'error',
- 'error-display-handler', 'error-escape-handler',
- 'error-print-context-length', 'error-print-source-location',
- 'error-print-width', 'error-value->string-handler', 'eval',
- 'eval-jit-enabled', 'eval-syntax', 'even?', 'evt/c', 'evt?',
- 'exact->inexact', 'exact-ceiling', 'exact-floor', 'exact-integer?',
- 'exact-nonnegative-integer?', 'exact-positive-integer?', 'exact-round',
- 'exact-truncate', 'exact?', 'executable-yield-handler', 'exit',
- 'exit-handler', 'exn', 'exn-continuation-marks', 'exn-message',
- 'exn:break', 'exn:break-continuation', 'exn:break:hang-up',
- 'exn:break:hang-up?', 'exn:break:terminate', 'exn:break:terminate?',
- 'exn:break?', 'exn:fail', 'exn:fail:contract',
- 'exn:fail:contract:arity', 'exn:fail:contract:arity?',
- 'exn:fail:contract:blame', 'exn:fail:contract:blame-object',
- 'exn:fail:contract:blame?', 'exn:fail:contract:continuation',
- 'exn:fail:contract:continuation?', 'exn:fail:contract:divide-by-zero',
- 'exn:fail:contract:divide-by-zero?',
- 'exn:fail:contract:non-fixnum-result',
- 'exn:fail:contract:non-fixnum-result?', 'exn:fail:contract:variable',
- 'exn:fail:contract:variable-id', 'exn:fail:contract:variable?',
- 'exn:fail:contract?', 'exn:fail:filesystem',
- 'exn:fail:filesystem:errno', 'exn:fail:filesystem:errno-errno',
- 'exn:fail:filesystem:errno?', 'exn:fail:filesystem:exists',
- 'exn:fail:filesystem:exists?', 'exn:fail:filesystem:missing-module',
- 'exn:fail:filesystem:missing-module-path',
- 'exn:fail:filesystem:missing-module?', 'exn:fail:filesystem:version',
- 'exn:fail:filesystem:version?', 'exn:fail:filesystem?',
- 'exn:fail:network', 'exn:fail:network:errno',
- 'exn:fail:network:errno-errno', 'exn:fail:network:errno?',
- 'exn:fail:network?', 'exn:fail:object', 'exn:fail:object?',
- 'exn:fail:out-of-memory', 'exn:fail:out-of-memory?', 'exn:fail:read',
- 'exn:fail:read-srclocs', 'exn:fail:read:eof', 'exn:fail:read:eof?',
- 'exn:fail:read:non-char', 'exn:fail:read:non-char?', 'exn:fail:read?',
- 'exn:fail:syntax', 'exn:fail:syntax-exprs',
- 'exn:fail:syntax:missing-module',
- 'exn:fail:syntax:missing-module-path',
- 'exn:fail:syntax:missing-module?', 'exn:fail:syntax:unbound',
- 'exn:fail:syntax:unbound?', 'exn:fail:syntax?', 'exn:fail:unsupported',
- 'exn:fail:unsupported?', 'exn:fail:user', 'exn:fail:user?',
- 'exn:fail?', 'exn:misc:match?', 'exn:missing-module-accessor',
- 'exn:missing-module?', 'exn:srclocs-accessor', 'exn:srclocs?', 'exn?',
- 'exp', 'expand', 'expand-once', 'expand-syntax', 'expand-syntax-once',
- 'expand-syntax-to-top-form', 'expand-to-top-form', 'expand-user-path',
- 'explode-path', 'expt', 'externalizable<%>', 'false?', 'field-names',
- 'fifth', 'file->bytes', 'file->bytes-lines', 'file->lines',
- 'file->list', 'file->string', 'file->value', 'file-exists?',
- 'file-name-from-path', 'file-or-directory-identity',
- 'file-or-directory-modify-seconds', 'file-or-directory-permissions',
- 'file-position', 'file-position*', 'file-size',
- 'file-stream-buffer-mode', 'file-stream-port?', 'file-truncate',
- 'filename-extension', 'filesystem-change-evt',
- 'filesystem-change-evt-cancel', 'filesystem-change-evt?',
- 'filesystem-root-list', 'filter', 'filter-map', 'filter-not',
- 'filter-read-input-port', 'find-executable-path', 'find-files',
- 'find-library-collection-links', 'find-library-collection-paths',
- 'find-relative-path', 'find-system-path', 'findf', 'first', 'fixnum?',
- 'flat-contract', 'flat-contract-predicate', 'flat-contract-property?',
- 'flat-contract?', 'flat-named-contract', 'flatten',
- 'floating-point-bytes->real', 'flonum?', 'floor', 'flush-output',
- 'fold-files', 'foldl', 'foldr', 'for-each', 'force', 'format',
- 'fourth', 'fprintf', 'free-identifier=?', 'free-label-identifier=?',
- 'free-template-identifier=?', 'free-transformer-identifier=?',
- 'fsemaphore-count', 'fsemaphore-post', 'fsemaphore-try-wait?',
- 'fsemaphore-wait', 'fsemaphore?', 'future', 'future?',
- 'futures-enabled?', 'gcd', 'generate-member-key',
- 'generate-temporaries', 'generic-set?', 'generic?', 'gensym',
- 'get-output-bytes', 'get-output-string', 'get-preference',
- 'get/build-val-first-projection', 'getenv',
- 'global-port-print-handler', 'group-execute-bit', 'group-read-bit',
- 'group-write-bit', 'guard-evt', 'handle-evt', 'handle-evt?',
- 'has-contract?', 'hash', 'hash->list', 'hash-clear', 'hash-clear!',
- 'hash-copy', 'hash-copy-clear', 'hash-count', 'hash-empty?',
- 'hash-eq?', 'hash-equal?', 'hash-eqv?', 'hash-for-each',
- 'hash-has-key?', 'hash-iterate-first', 'hash-iterate-key',
- 'hash-iterate-next', 'hash-iterate-value', 'hash-keys', 'hash-map',
- 'hash-placeholder?', 'hash-ref', 'hash-ref!', 'hash-remove',
- 'hash-remove!', 'hash-set', 'hash-set!', 'hash-set*', 'hash-set*!',
- 'hash-update', 'hash-update!', 'hash-values', 'hash-weak?', 'hash/c',
- 'hash?', 'hasheq', 'hasheqv', 'identifier-binding',
- 'identifier-binding-symbol', 'identifier-label-binding',
- 'identifier-prune-lexical-context',
- 'identifier-prune-to-source-module',
- 'identifier-remove-from-definition-context',
- 'identifier-template-binding', 'identifier-transformer-binding',
- 'identifier?', 'identity', 'imag-part', 'immutable?',
- 'impersonate-box', 'impersonate-channel',
- 'impersonate-continuation-mark-key', 'impersonate-hash',
- 'impersonate-procedure', 'impersonate-prompt-tag',
- 'impersonate-struct', 'impersonate-vector', 'impersonator-contract?',
- 'impersonator-ephemeron', 'impersonator-of?',
- 'impersonator-prop:application-mark', 'impersonator-prop:contracted',
- 'impersonator-property-accessor-procedure?', 'impersonator-property?',
- 'impersonator?', 'implementation?', 'implementation?/c', 'in-bytes',
- 'in-bytes-lines', 'in-cycle', 'in-dict', 'in-dict-keys',
- 'in-dict-pairs', 'in-dict-values', 'in-directory', 'in-hash',
- 'in-hash-keys', 'in-hash-pairs', 'in-hash-values', 'in-indexed',
- 'in-input-port-bytes', 'in-input-port-chars', 'in-lines', 'in-list',
- 'in-mlist', 'in-naturals', 'in-parallel', 'in-permutations', 'in-port',
- 'in-producer', 'in-range', 'in-sequences', 'in-set', 'in-stream',
- 'in-string', 'in-value', 'in-values*-sequence', 'in-values-sequence',
- 'in-vector', 'inexact->exact', 'inexact-real?', 'inexact?',
- 'infinite?', 'input-port-append', 'input-port?', 'inspector?',
- 'instanceof/c', 'integer->char', 'integer->integer-bytes',
- 'integer-bytes->integer', 'integer-in', 'integer-length',
- 'integer-sqrt', 'integer-sqrt/remainder', 'integer?',
- 'interface->method-names', 'interface-extension?', 'interface?',
- 'internal-definition-context-seal', 'internal-definition-context?',
- 'is-a?', 'is-a?/c', 'keyword->string', 'keyword-apply', 'keyword<?',
- 'keyword?', 'keywords-match', 'kill-thread', 'last', 'last-pair',
- 'lcm', 'length', 'liberal-define-context?', 'link-exists?', 'list',
- 'list*', 'list->bytes', 'list->mutable-set', 'list->mutable-seteq',
- 'list->mutable-seteqv', 'list->set', 'list->seteq', 'list->seteqv',
- 'list->string', 'list->vector', 'list->weak-set', 'list->weak-seteq',
- 'list->weak-seteqv', 'list-ref', 'list-tail', 'list/c', 'list?',
- 'listof', 'load', 'load-extension', 'load-on-demand-enabled',
- 'load-relative', 'load-relative-extension', 'load/cd',
- 'load/use-compiled', 'local-expand', 'local-expand/capture-lifts',
- 'local-transformer-expand', 'local-transformer-expand/capture-lifts',
- 'locale-string-encoding', 'log', 'log-level?', 'log-max-level',
- 'log-message', 'log-receiver?', 'logger-name', 'logger?', 'magnitude',
- 'make-arity-at-least', 'make-base-empty-namespace',
- 'make-base-namespace', 'make-bytes', 'make-channel',
- 'make-chaperone-contract', 'make-continuation-mark-key',
- 'make-continuation-prompt-tag', 'make-contract', 'make-custodian',
- 'make-custodian-box', 'make-custom-hash', 'make-custom-hash-types',
- 'make-custom-set', 'make-custom-set-types', 'make-date', 'make-date*',
- 'make-derived-parameter', 'make-directory', 'make-directory*',
- 'make-do-sequence', 'make-empty-namespace',
- 'make-environment-variables', 'make-ephemeron', 'make-exn',
- 'make-exn:break', 'make-exn:break:hang-up', 'make-exn:break:terminate',
- 'make-exn:fail', 'make-exn:fail:contract',
- 'make-exn:fail:contract:arity', 'make-exn:fail:contract:blame',
- 'make-exn:fail:contract:continuation',
- 'make-exn:fail:contract:divide-by-zero',
- 'make-exn:fail:contract:non-fixnum-result',
- 'make-exn:fail:contract:variable', 'make-exn:fail:filesystem',
- 'make-exn:fail:filesystem:errno', 'make-exn:fail:filesystem:exists',
- 'make-exn:fail:filesystem:missing-module',
- 'make-exn:fail:filesystem:version', 'make-exn:fail:network',
- 'make-exn:fail:network:errno', 'make-exn:fail:object',
- 'make-exn:fail:out-of-memory', 'make-exn:fail:read',
- 'make-exn:fail:read:eof', 'make-exn:fail:read:non-char',
- 'make-exn:fail:syntax', 'make-exn:fail:syntax:missing-module',
- 'make-exn:fail:syntax:unbound', 'make-exn:fail:unsupported',
- 'make-exn:fail:user', 'make-file-or-directory-link',
- 'make-flat-contract', 'make-fsemaphore', 'make-generic',
- 'make-handle-get-preference-locked', 'make-hash',
- 'make-hash-placeholder', 'make-hasheq', 'make-hasheq-placeholder',
- 'make-hasheqv', 'make-hasheqv-placeholder',
- 'make-immutable-custom-hash', 'make-immutable-hash',
- 'make-immutable-hasheq', 'make-immutable-hasheqv',
- 'make-impersonator-property', 'make-input-port',
- 'make-input-port/read-to-peek', 'make-inspector',
- 'make-keyword-procedure', 'make-known-char-range-list',
- 'make-limited-input-port', 'make-list', 'make-lock-file-name',
- 'make-log-receiver', 'make-logger', 'make-mixin-contract',
- 'make-mutable-custom-set', 'make-none/c', 'make-object',
- 'make-output-port', 'make-parameter', 'make-phantom-bytes',
- 'make-pipe', 'make-pipe-with-specials', 'make-placeholder',
- 'make-polar', 'make-prefab-struct', 'make-primitive-class',
- 'make-proj-contract', 'make-pseudo-random-generator',
- 'make-reader-graph', 'make-readtable', 'make-rectangular',
- 'make-rename-transformer', 'make-resolved-module-path',
- 'make-security-guard', 'make-semaphore', 'make-set!-transformer',
- 'make-shared-bytes', 'make-sibling-inspector', 'make-special-comment',
- 'make-srcloc', 'make-string', 'make-struct-field-accessor',
- 'make-struct-field-mutator', 'make-struct-type',
- 'make-struct-type-property', 'make-syntax-delta-introducer',
- 'make-syntax-introducer', 'make-temporary-file',
- 'make-tentative-pretty-print-output-port', 'make-thread-cell',
- 'make-thread-group', 'make-vector', 'make-weak-box',
- 'make-weak-custom-hash', 'make-weak-custom-set', 'make-weak-hash',
- 'make-weak-hasheq', 'make-weak-hasheqv', 'make-will-executor', 'map',
- 'match-equality-test', 'matches-arity-exactly?', 'max', 'mcar', 'mcdr',
- 'mcons', 'member', 'member-name-key-hash-code', 'member-name-key=?',
- 'member-name-key?', 'memf', 'memq', 'memv', 'merge-input',
- 'method-in-interface?', 'min', 'mixin-contract', 'module->exports',
- 'module->imports', 'module->language-info', 'module->namespace',
- 'module-compiled-cross-phase-persistent?', 'module-compiled-exports',
- 'module-compiled-imports', 'module-compiled-language-info',
- 'module-compiled-name', 'module-compiled-submodules',
- 'module-declared?', 'module-path-index-join',
- 'module-path-index-resolve', 'module-path-index-split',
- 'module-path-index-submodule', 'module-path-index?', 'module-path?',
- 'module-predefined?', 'module-provide-protected?', 'modulo', 'mpair?',
- 'mutable-set', 'mutable-seteq', 'mutable-seteqv', 'n->th',
- 'nack-guard-evt', 'namespace-anchor->empty-namespace',
- 'namespace-anchor->namespace', 'namespace-anchor?',
- 'namespace-attach-module', 'namespace-attach-module-declaration',
- 'namespace-base-phase', 'namespace-mapped-symbols',
- 'namespace-module-identifier', 'namespace-module-registry',
- 'namespace-require', 'namespace-require/constant',
- 'namespace-require/copy', 'namespace-require/expansion-time',
- 'namespace-set-variable-value!', 'namespace-symbol->identifier',
- 'namespace-syntax-introduce', 'namespace-undefine-variable!',
- 'namespace-unprotect-module', 'namespace-variable-value', 'namespace?',
- 'nan?', 'natural-number/c', 'negate', 'negative?', 'never-evt',
- u'new-∀/c', u'new-∃/c', 'newline', 'ninth', 'non-empty-listof',
- 'none/c', 'normal-case-path', 'normalize-arity', 'normalize-path',
- 'normalized-arity?', 'not', 'not/c', 'null', 'null?', 'number->string',
- 'number?', 'numerator', 'object%', 'object->vector', 'object-info',
- 'object-interface', 'object-method-arity-includes?', 'object-name',
- 'object=?', 'object?', 'odd?', 'one-of/c', 'open-input-bytes',
- 'open-input-file', 'open-input-output-file', 'open-input-string',
- 'open-output-bytes', 'open-output-file', 'open-output-nowhere',
- 'open-output-string', 'or/c', 'order-of-magnitude', 'ormap',
- 'other-execute-bit', 'other-read-bit', 'other-write-bit',
- 'output-port?', 'pair?', 'parameter-procedure=?', 'parameter/c',
- 'parameter?', 'parameterization?', 'parse-command-line', 'partition',
- 'path->bytes', 'path->complete-path', 'path->directory-path',
- 'path->string', 'path-add-suffix', 'path-convention-type',
- 'path-element->bytes', 'path-element->string', 'path-element?',
- 'path-for-some-system?', 'path-list-string->path-list', 'path-only',
- 'path-replace-suffix', 'path-string?', 'path<?', 'path?',
- 'pathlist-closure', 'peek-byte', 'peek-byte-or-special', 'peek-bytes',
- 'peek-bytes!', 'peek-bytes!-evt', 'peek-bytes-avail!',
- 'peek-bytes-avail!*', 'peek-bytes-avail!-evt',
- 'peek-bytes-avail!/enable-break', 'peek-bytes-evt', 'peek-char',
- 'peek-char-or-special', 'peek-string', 'peek-string!',
- 'peek-string!-evt', 'peek-string-evt', 'peeking-input-port',
- 'permutations', 'phantom-bytes?', 'pi', 'pi.f', 'pipe-content-length',
- 'place-break', 'place-channel', 'place-channel-get',
- 'place-channel-put', 'place-channel-put/get', 'place-channel?',
- 'place-dead-evt', 'place-enabled?', 'place-kill', 'place-location?',
- 'place-message-allowed?', 'place-sleep', 'place-wait', 'place?',
- 'placeholder-get', 'placeholder-set!', 'placeholder?',
- 'poll-guard-evt', 'port->bytes', 'port->bytes-lines', 'port->lines',
- 'port->list', 'port->string', 'port-closed-evt', 'port-closed?',
- 'port-commit-peeked', 'port-count-lines!', 'port-count-lines-enabled',
- 'port-counts-lines?', 'port-display-handler', 'port-file-identity',
- 'port-file-unlock', 'port-next-location', 'port-print-handler',
- 'port-progress-evt', 'port-provides-progress-evts?',
- 'port-read-handler', 'port-try-file-lock?', 'port-write-handler',
- 'port-writes-atomic?', 'port-writes-special?', 'port?', 'positive?',
- 'predicate/c', 'prefab-key->struct-type', 'prefab-key?',
- 'prefab-struct-key', 'preferences-lock-file-mode', 'pregexp',
- 'pregexp?', 'pretty-display', 'pretty-format', 'pretty-print',
- 'pretty-print-.-symbol-without-bars',
- 'pretty-print-abbreviate-read-macros', 'pretty-print-columns',
- 'pretty-print-current-style-table', 'pretty-print-depth',
- 'pretty-print-exact-as-decimal', 'pretty-print-extend-style-table',
- 'pretty-print-handler', 'pretty-print-newline',
- 'pretty-print-post-print-hook', 'pretty-print-pre-print-hook',
- 'pretty-print-print-hook', 'pretty-print-print-line',
- 'pretty-print-remap-stylable', 'pretty-print-show-inexactness',
- 'pretty-print-size-hook', 'pretty-print-style-table?',
- 'pretty-printing', 'pretty-write', 'primitive-closure?',
- 'primitive-result-arity', 'primitive?', 'print', 'print-as-expression',
- 'print-boolean-long-form', 'print-box', 'print-graph',
- 'print-hash-table', 'print-mpair-curly-braces',
- 'print-pair-curly-braces', 'print-reader-abbreviations',
- 'print-struct', 'print-syntax-width', 'print-unreadable',
- 'print-vector-length', 'printable/c', 'printable<%>', 'printf',
- 'procedure->method', 'procedure-arity', 'procedure-arity-includes/c',
- 'procedure-arity-includes?', 'procedure-arity?',
- 'procedure-closure-contents-eq?', 'procedure-extract-target',
- 'procedure-keywords', 'procedure-reduce-arity',
- 'procedure-reduce-keyword-arity', 'procedure-rename',
- 'procedure-struct-type?', 'procedure?', 'process', 'process*',
- 'process*/ports', 'process/ports', 'processor-count', 'progress-evt?',
- 'promise-forced?', 'promise-running?', 'promise/c', 'promise?',
- 'prop:arity-string', 'prop:chaperone-contract',
- 'prop:checked-procedure', 'prop:contract', 'prop:contracted',
- 'prop:custom-print-quotable', 'prop:custom-write', 'prop:dict',
- 'prop:dict/contract', 'prop:equal+hash', 'prop:evt',
- 'prop:exn:missing-module', 'prop:exn:srclocs', 'prop:flat-contract',
- 'prop:impersonator-of', 'prop:input-port',
- 'prop:liberal-define-context', 'prop:opt-chaperone-contract',
- 'prop:opt-chaperone-contract-get-test', 'prop:opt-chaperone-contract?',
- 'prop:output-port', 'prop:place-location', 'prop:procedure',
- 'prop:rename-transformer', 'prop:sequence', 'prop:set!-transformer',
- 'prop:stream', 'proper-subset?', 'pseudo-random-generator->vector',
- 'pseudo-random-generator-vector?', 'pseudo-random-generator?',
- 'put-preferences', 'putenv', 'quotient', 'quotient/remainder',
- 'radians->degrees', 'raise', 'raise-argument-error',
- 'raise-arguments-error', 'raise-arity-error', 'raise-blame-error',
- 'raise-contract-error', 'raise-mismatch-error',
- 'raise-not-cons-blame-error', 'raise-range-error',
- 'raise-result-error', 'raise-syntax-error', 'raise-type-error',
- 'raise-user-error', 'random', 'random-seed', 'range', 'rational?',
- 'rationalize', 'read', 'read-accept-bar-quote', 'read-accept-box',
- 'read-accept-compiled', 'read-accept-dot', 'read-accept-graph',
- 'read-accept-infix-dot', 'read-accept-lang', 'read-accept-quasiquote',
- 'read-accept-reader', 'read-byte', 'read-byte-or-special',
- 'read-bytes', 'read-bytes!', 'read-bytes!-evt', 'read-bytes-avail!',
- 'read-bytes-avail!*', 'read-bytes-avail!-evt',
- 'read-bytes-avail!/enable-break', 'read-bytes-evt', 'read-bytes-line',
- 'read-bytes-line-evt', 'read-case-sensitive', 'read-char',
- 'read-char-or-special', 'read-curly-brace-as-paren',
- 'read-decimal-as-inexact', 'read-eval-print-loop', 'read-language',
- 'read-line', 'read-line-evt', 'read-on-demand-source',
- 'read-square-bracket-as-paren', 'read-string', 'read-string!',
- 'read-string!-evt', 'read-string-evt', 'read-syntax',
- 'read-syntax/recursive', 'read/recursive', 'readtable-mapping',
- 'readtable?', 'real->decimal-string', 'real->double-flonum',
- 'real->floating-point-bytes', 'real->single-flonum', 'real-in',
- 'real-part', 'real?', 'reencode-input-port', 'reencode-output-port',
- 'regexp', 'regexp-match', 'regexp-match*', 'regexp-match-evt',
- 'regexp-match-exact?', 'regexp-match-peek',
- 'regexp-match-peek-immediate', 'regexp-match-peek-positions',
- 'regexp-match-peek-positions*',
- 'regexp-match-peek-positions-immediate',
- 'regexp-match-peek-positions-immediate/end',
- 'regexp-match-peek-positions/end', 'regexp-match-positions',
- 'regexp-match-positions*', 'regexp-match-positions/end',
- 'regexp-match/end', 'regexp-match?', 'regexp-max-lookbehind',
- 'regexp-quote', 'regexp-replace', 'regexp-replace*',
- 'regexp-replace-quote', 'regexp-replaces', 'regexp-split',
- 'regexp-try-match', 'regexp?', 'relative-path?', 'relocate-input-port',
- 'relocate-output-port', 'remainder', 'remove', 'remove*',
- 'remove-duplicates', 'remq', 'remq*', 'remv', 'remv*',
- 'rename-file-or-directory', 'rename-transformer-target',
- 'rename-transformer?', 'reroot-path', 'resolve-path',
- 'resolved-module-path-name', 'resolved-module-path?', 'rest',
- 'reverse', 'round', 'second', 'seconds->date', 'security-guard?',
- 'semaphore-peek-evt', 'semaphore-peek-evt?', 'semaphore-post',
- 'semaphore-try-wait?', 'semaphore-wait', 'semaphore-wait/enable-break',
- 'semaphore?', 'sequence->list', 'sequence->stream',
- 'sequence-add-between', 'sequence-andmap', 'sequence-append',
- 'sequence-count', 'sequence-filter', 'sequence-fold',
- 'sequence-for-each', 'sequence-generate', 'sequence-generate*',
- 'sequence-length', 'sequence-map', 'sequence-ormap', 'sequence-ref',
- 'sequence-tail', 'sequence?', 'set', 'set!-transformer-procedure',
- 'set!-transformer?', 'set->list', 'set->stream', 'set-add', 'set-add!',
- 'set-box!', 'set-clear', 'set-clear!', 'set-copy', 'set-copy-clear',
- 'set-count', 'set-empty?', 'set-eq?', 'set-equal?', 'set-eqv?',
- 'set-first', 'set-for-each', 'set-implements/c', 'set-implements?',
- 'set-intersect', 'set-intersect!', 'set-map', 'set-mcar!', 'set-mcdr!',
- 'set-member?', 'set-mutable?', 'set-phantom-bytes!',
- 'set-port-next-location!', 'set-remove', 'set-remove!', 'set-rest',
- 'set-subtract', 'set-subtract!', 'set-symmetric-difference',
- 'set-symmetric-difference!', 'set-union', 'set-union!', 'set-weak?',
- 'set/c', 'set=?', 'set?', 'seteq', 'seteqv', 'seventh', 'sgn',
- 'shared-bytes', 'shell-execute', 'shrink-path-wrt', 'shuffle',
- 'simple-form-path', 'simplify-path', 'sin', 'single-flonum?', 'sinh',
- 'sixth', 'skip-projection-wrapper?', 'sleep',
- 'some-system-path->string', 'sort', 'special-comment-value',
- 'special-comment?', 'special-filter-input-port', 'split-at',
- 'split-at-right', 'split-path', 'splitf-at', 'splitf-at-right', 'sqr',
- 'sqrt', 'srcloc', 'srcloc->string', 'srcloc-column', 'srcloc-line',
- 'srcloc-position', 'srcloc-source', 'srcloc-span', 'srcloc?',
- 'stop-after', 'stop-before', 'stream->list', 'stream-add-between',
- 'stream-andmap', 'stream-append', 'stream-count', 'stream-empty?',
- 'stream-filter', 'stream-first', 'stream-fold', 'stream-for-each',
- 'stream-length', 'stream-map', 'stream-ormap', 'stream-ref',
- 'stream-rest', 'stream-tail', 'stream?', 'string',
- 'string->bytes/latin-1', 'string->bytes/locale', 'string->bytes/utf-8',
- 'string->immutable-string', 'string->keyword', 'string->list',
- 'string->number', 'string->path', 'string->path-element',
- 'string->some-system-path', 'string->symbol',
- 'string->uninterned-symbol', 'string->unreadable-symbol',
- 'string-append', 'string-append*', 'string-ci<=?', 'string-ci<?',
- 'string-ci=?', 'string-ci>=?', 'string-ci>?', 'string-copy',
- 'string-copy!', 'string-downcase', 'string-environment-variable-name?',
- 'string-fill!', 'string-foldcase', 'string-join', 'string-len/c',
- 'string-length', 'string-locale-ci<?', 'string-locale-ci=?',
- 'string-locale-ci>?', 'string-locale-downcase', 'string-locale-upcase',
- 'string-locale<?', 'string-locale=?', 'string-locale>?',
- 'string-no-nuls?', 'string-normalize-nfc', 'string-normalize-nfd',
- 'string-normalize-nfkc', 'string-normalize-nfkd',
- 'string-normalize-spaces', 'string-ref', 'string-replace',
- 'string-set!', 'string-split', 'string-titlecase', 'string-trim',
- 'string-upcase', 'string-utf-8-length', 'string<=?', 'string<?',
- 'string=?', 'string>=?', 'string>?', 'string?', 'struct->vector',
- 'struct-accessor-procedure?', 'struct-constructor-procedure?',
- 'struct-info', 'struct-mutator-procedure?',
- 'struct-predicate-procedure?', 'struct-type-info',
- 'struct-type-make-constructor', 'struct-type-make-predicate',
- 'struct-type-property-accessor-procedure?', 'struct-type-property/c',
- 'struct-type-property?', 'struct-type?', 'struct:arity-at-least',
- 'struct:date', 'struct:date*', 'struct:exn', 'struct:exn:break',
- 'struct:exn:break:hang-up', 'struct:exn:break:terminate',
- 'struct:exn:fail', 'struct:exn:fail:contract',
- 'struct:exn:fail:contract:arity', 'struct:exn:fail:contract:blame',
- 'struct:exn:fail:contract:continuation',
- 'struct:exn:fail:contract:divide-by-zero',
- 'struct:exn:fail:contract:non-fixnum-result',
- 'struct:exn:fail:contract:variable', 'struct:exn:fail:filesystem',
- 'struct:exn:fail:filesystem:errno',
- 'struct:exn:fail:filesystem:exists',
- 'struct:exn:fail:filesystem:missing-module',
- 'struct:exn:fail:filesystem:version', 'struct:exn:fail:network',
- 'struct:exn:fail:network:errno', 'struct:exn:fail:object',
- 'struct:exn:fail:out-of-memory', 'struct:exn:fail:read',
- 'struct:exn:fail:read:eof', 'struct:exn:fail:read:non-char',
- 'struct:exn:fail:syntax', 'struct:exn:fail:syntax:missing-module',
- 'struct:exn:fail:syntax:unbound', 'struct:exn:fail:unsupported',
- 'struct:exn:fail:user', 'struct:srcloc',
- 'struct:wrapped-extra-arg-arrow', 'struct?', 'sub1', 'subbytes',
- 'subclass?', 'subclass?/c', 'subprocess', 'subprocess-group-enabled',
- 'subprocess-kill', 'subprocess-pid', 'subprocess-status',
- 'subprocess-wait', 'subprocess?', 'subset?', 'substring',
- 'symbol->string', 'symbol-interned?', 'symbol-unreadable?', 'symbol<?',
- 'symbol=?', 'symbol?', 'symbols', 'sync', 'sync/enable-break',
- 'sync/timeout', 'sync/timeout/enable-break', 'syntax->datum',
- 'syntax->list', 'syntax-arm', 'syntax-column', 'syntax-disarm',
- 'syntax-e', 'syntax-line', 'syntax-local-bind-syntaxes',
- 'syntax-local-certifier', 'syntax-local-context',
- 'syntax-local-expand-expression', 'syntax-local-get-shadower',
- 'syntax-local-introduce', 'syntax-local-lift-context',
- 'syntax-local-lift-expression',
- 'syntax-local-lift-module-end-declaration',
- 'syntax-local-lift-provide', 'syntax-local-lift-require',
- 'syntax-local-lift-values-expression',
- 'syntax-local-make-definition-context',
- 'syntax-local-make-delta-introducer',
- 'syntax-local-module-defined-identifiers',
- 'syntax-local-module-exports',
- 'syntax-local-module-required-identifiers', 'syntax-local-name',
- 'syntax-local-phase-level', 'syntax-local-submodules',
- 'syntax-local-transforming-module-provides?', 'syntax-local-value',
- 'syntax-local-value/immediate', 'syntax-original?', 'syntax-position',
- 'syntax-property', 'syntax-property-symbol-keys', 'syntax-protect',
- 'syntax-rearm', 'syntax-recertify', 'syntax-shift-phase-level',
- 'syntax-source', 'syntax-source-module', 'syntax-span', 'syntax-taint',
- 'syntax-tainted?', 'syntax-track-origin',
- 'syntax-transforming-module-expression?', 'syntax-transforming?',
- 'syntax/c', 'syntax?', 'system', 'system*', 'system*/exit-code',
- 'system-big-endian?', 'system-idle-evt', 'system-language+country',
- 'system-library-subpath', 'system-path-convention-type', 'system-type',
- 'system/exit-code', 'tail-marks-match?', 'take', 'take-right', 'takef',
- 'takef-right', 'tan', 'tanh', 'tcp-abandon-port', 'tcp-accept',
- 'tcp-accept-evt', 'tcp-accept-ready?', 'tcp-accept/enable-break',
- 'tcp-addresses', 'tcp-close', 'tcp-connect',
- 'tcp-connect/enable-break', 'tcp-listen', 'tcp-listener?', 'tcp-port?',
- 'tentative-pretty-print-port-cancel',
- 'tentative-pretty-print-port-transfer', 'tenth', 'terminal-port?',
- 'the-unsupplied-arg', 'third', 'thread', 'thread-cell-ref',
- 'thread-cell-set!', 'thread-cell-values?', 'thread-cell?',
- 'thread-dead-evt', 'thread-dead?', 'thread-group?', 'thread-receive',
- 'thread-receive-evt', 'thread-resume', 'thread-resume-evt',
- 'thread-rewind-receive', 'thread-running?', 'thread-send',
- 'thread-suspend', 'thread-suspend-evt', 'thread-try-receive',
- 'thread-wait', 'thread/suspend-to-kill', 'thread?', 'time-apply',
- 'touch', 'transplant-input-port', 'transplant-output-port', 'true',
- 'truncate', 'udp-addresses', 'udp-bind!', 'udp-bound?', 'udp-close',
- 'udp-connect!', 'udp-connected?', 'udp-multicast-interface',
- 'udp-multicast-join-group!', 'udp-multicast-leave-group!',
- 'udp-multicast-loopback?', 'udp-multicast-set-interface!',
- 'udp-multicast-set-loopback!', 'udp-multicast-set-ttl!',
- 'udp-multicast-ttl', 'udp-open-socket', 'udp-receive!',
- 'udp-receive!*', 'udp-receive!-evt', 'udp-receive!/enable-break',
- 'udp-receive-ready-evt', 'udp-send', 'udp-send*', 'udp-send-evt',
- 'udp-send-ready-evt', 'udp-send-to', 'udp-send-to*', 'udp-send-to-evt',
- 'udp-send-to/enable-break', 'udp-send/enable-break', 'udp?', 'unbox',
- 'uncaught-exception-handler', 'unit?', 'unspecified-dom',
- 'unsupplied-arg?', 'use-collection-link-paths',
- 'use-compiled-file-paths', 'use-user-specific-search-paths',
- 'user-execute-bit', 'user-read-bit', 'user-write-bit',
- 'value-contract', 'values', 'variable-reference->empty-namespace',
- 'variable-reference->module-base-phase',
- 'variable-reference->module-declaration-inspector',
- 'variable-reference->module-path-index',
- 'variable-reference->module-source', 'variable-reference->namespace',
- 'variable-reference->phase',
- 'variable-reference->resolved-module-path',
- 'variable-reference-constant?', 'variable-reference?', 'vector',
- 'vector->immutable-vector', 'vector->list',
- 'vector->pseudo-random-generator', 'vector->pseudo-random-generator!',
- 'vector->values', 'vector-append', 'vector-argmax', 'vector-argmin',
- 'vector-copy', 'vector-copy!', 'vector-count', 'vector-drop',
- 'vector-drop-right', 'vector-fill!', 'vector-filter',
- 'vector-filter-not', 'vector-immutable', 'vector-immutable/c',
- 'vector-immutableof', 'vector-length', 'vector-map', 'vector-map!',
- 'vector-member', 'vector-memq', 'vector-memv', 'vector-ref',
- 'vector-set!', 'vector-set*!', 'vector-set-performance-stats!',
- 'vector-split-at', 'vector-split-at-right', 'vector-take',
- 'vector-take-right', 'vector/c', 'vector?', 'vectorof', 'version',
- 'void', 'void?', 'weak-box-value', 'weak-box?', 'weak-set',
- 'weak-seteq', 'weak-seteqv', 'will-execute', 'will-executor?',
- 'will-register', 'will-try-execute', 'with-input-from-bytes',
- 'with-input-from-file', 'with-input-from-string',
- 'with-output-to-bytes', 'with-output-to-file', 'with-output-to-string',
- 'would-be-future', 'wrap-evt', 'wrapped-extra-arg-arrow',
- 'wrapped-extra-arg-arrow-extra-neg-party-argument',
- 'wrapped-extra-arg-arrow-real-func', 'wrapped-extra-arg-arrow?',
- 'writable<%>', 'write', 'write-byte', 'write-bytes',
- 'write-bytes-avail', 'write-bytes-avail*', 'write-bytes-avail-evt',
- 'write-bytes-avail/enable-break', 'write-char', 'write-special',
- 'write-special-avail*', 'write-special-evt', 'write-string',
- 'write-to-file', 'xor', 'zero?', '~.a', '~.s', '~.v', '~a', '~e', '~r',
- '~s', '~v'
- ]
-
- _opening_parenthesis = r'[([{]'
- _closing_parenthesis = r'[)\]}]'
- _delimiters = r'()[\]{}",\'`;\s'
- _symbol = r'(?u)(?:\|[^|]*\||\\[\w\W]|[^|\\%s]+)+' % _delimiters
- _exact_decimal_prefix = r'(?:#e)?(?:#d)?(?:#e)?'
- _exponent = r'(?:[defls][-+]?\d+)'
- _inexact_simple_no_hashes = r'(?:\d+(?:/\d+|\.\d*)?|\.\d+)'
- _inexact_simple = (r'(?:%s|(?:\d+#+(?:\.#*|/\d+#*)?|\.\d+#+|'
- r'\d+(?:\.\d*#+|/\d+#+)))' % _inexact_simple_no_hashes)
- _inexact_normal_no_hashes = r'(?:%s%s?)' % (_inexact_simple_no_hashes,
- _exponent)
- _inexact_normal = r'(?:%s%s?)' % (_inexact_simple, _exponent)
- _inexact_special = r'(?:(?:inf|nan)\.[0f])'
- _inexact_real = r'(?:[-+]?%s|[-+]%s)' % (_inexact_normal,
- _inexact_special)
- _inexact_unsigned = r'(?:%s|%s)' % (_inexact_normal, _inexact_special)
-
- tokens = {
- 'root': [
- (_closing_parenthesis, Error),
- (r'(?!\Z)', Text, 'unquoted-datum')
- ],
- 'datum': [
- (r'(?s)#;|#![ /]([^\\\n]|\\.)*', Comment),
- (u';[^\\n\\r\x85\u2028\u2029]*', Comment.Single),
- (r'#\|', Comment.Multiline, 'block-comment'),
-
- # Whitespaces
- (r'(?u)\s+', Text),
-
- # Numbers: Keep in mind Racket reader hash prefixes, which
- # can denote the base or the type. These don't map neatly
- # onto Pygments token types; some judgment calls here.
-
- # #d or no prefix
- (r'(?i)%s[-+]?\d+(?=[%s])' % (_exact_decimal_prefix, _delimiters),
- Number.Integer, '#pop'),
- (r'(?i)%s[-+]?(\d+(\.\d*)?|\.\d+)([deflst][-+]?\d+)?(?=[%s])' %
- (_exact_decimal_prefix, _delimiters), Number.Float, '#pop'),
- (r'(?i)%s[-+]?(%s([-+]%s?i)?|[-+]%s?i)(?=[%s])' %
- (_exact_decimal_prefix, _inexact_normal_no_hashes,
- _inexact_normal_no_hashes, _inexact_normal_no_hashes,
- _delimiters), Number, '#pop'),
-
- # Inexact without explicit #i
- (r'(?i)(#d)?(%s([-+]%s?i)?|[-+]%s?i|%s@%s)(?=[%s])' %
- (_inexact_real, _inexact_unsigned, _inexact_unsigned,
- _inexact_real, _inexact_real, _delimiters), Number.Float,
- '#pop'),
-
- # The remaining extflonums
- (r'(?i)(([-+]?%st[-+]?\d+)|[-+](inf|nan)\.t)(?=[%s])' %
- (_inexact_simple, _delimiters), Number.Float, '#pop'),
-
- # #b
- (r'(?i)(#[ei])?#b%s' % _symbol, Number.Bin, '#pop'),
-
- # #o
- (r'(?i)(#[ei])?#o%s' % _symbol, Number.Oct, '#pop'),
-
- # #x
- (r'(?i)(#[ei])?#x%s' % _symbol, Number.Hex, '#pop'),
-
- # #i is always inexact, i.e. float
- (r'(?i)(#d)?#i%s' % _symbol, Number.Float, '#pop'),
-
- # Strings and characters
- (r'#?"', String.Double, ('#pop', 'string')),
- (r'#<<(.+)\n(^(?!\1$).*$\n)*^\1$', String.Heredoc, '#pop'),
- (r'#\\(u[\da-fA-F]{1,4}|U[\da-fA-F]{1,8})', String.Char, '#pop'),
- (r'(?is)#\\([0-7]{3}|[a-z]+|.)', String.Char, '#pop'),
- (r'(?s)#[pr]x#?"(\\?.)*?"', String.Regex, '#pop'),
-
- # Constants
- (r'#(true|false|[tTfF])', Name.Constant, '#pop'),
-
- # Keyword argument names (e.g. #:keyword)
- (r'#:%s' % _symbol, Keyword.Declaration, '#pop'),
-
- # Reader extensions
- (r'(#lang |#!)(\S+)',
- bygroups(Keyword.Namespace, Name.Namespace)),
- (r'#reader', Keyword.Namespace, 'quoted-datum'),
-
- # Other syntax
- (r"(?i)\.(?=[%s])|#c[is]|#['`]|#,@?" % _delimiters, Operator),
- (r"'|#[s&]|#hash(eqv?)?|#\d*(?=%s)" % _opening_parenthesis,
- Operator, ('#pop', 'quoted-datum'))
- ],
- 'datum*': [
- (r'`|,@?', Operator),
- (_symbol, String.Symbol, '#pop'),
- (r'[|\\]', Error),
- default('#pop')
- ],
- 'list': [
- (_closing_parenthesis, Punctuation, '#pop')
- ],
- 'unquoted-datum': [
- include('datum'),
- (r'quote(?=[%s])' % _delimiters, Keyword,
- ('#pop', 'quoted-datum')),
- (r'`', Operator, ('#pop', 'quasiquoted-datum')),
- (r'quasiquote(?=[%s])' % _delimiters, Keyword,
- ('#pop', 'quasiquoted-datum')),
- (_opening_parenthesis, Punctuation, ('#pop', 'unquoted-list')),
- (r'(?u)(%s)(?=[%s])' % ('|'.join(
- [re.escape(entry) for entry in _keywords]), _delimiters),
- Keyword, '#pop'),
- (r'(?u)(%s)(?=[%s])' % ('|'.join(
- [re.escape(entry) for entry in _builtins]), _delimiters),
- Name.Builtin, '#pop'),
- (_symbol, Name, '#pop'),
- include('datum*')
- ],
- 'unquoted-list': [
- include('list'),
- (r'(?!\Z)', Text, 'unquoted-datum')
- ],
- 'quasiquoted-datum': [
- include('datum'),
- (r',@?', Operator, ('#pop', 'unquoted-datum')),
- (r'unquote(-splicing)?(?=[%s])' % _delimiters, Keyword,
- ('#pop', 'unquoted-datum')),
- (_opening_parenthesis, Punctuation, ('#pop', 'quasiquoted-list')),
- include('datum*')
- ],
- 'quasiquoted-list': [
- include('list'),
- (r'(?!\Z)', Text, 'quasiquoted-datum')
- ],
- 'quoted-datum': [
- include('datum'),
- (_opening_parenthesis, Punctuation, ('#pop', 'quoted-list')),
- include('datum*')
- ],
- 'quoted-list': [
- include('list'),
- (r'(?!\Z)', Text, 'quoted-datum')
- ],
- 'block-comment': [
- (r'#\|', Comment.Multiline, '#push'),
- (r'\|#', Comment.Multiline, '#pop'),
- (r'[^#|]+|.', Comment.Multiline)
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'(?s)\\([0-7]{1,3}|x[\da-fA-F]{1,2}|u[\da-fA-F]{1,4}|'
- r'U[\da-fA-F]{1,8}|.)', String.Escape),
- (r'[^\\"]+', String.Double)
- ]
- }
-
-
-class SchemeLexer(RegexLexer):
- """
- A Scheme lexer, parsing a stream and outputting the tokens
- needed to highlight scheme code.
- This lexer could be most probably easily subclassed to parse
- other LISP-Dialects like Common Lisp, Emacs Lisp or AutoLisp.
-
- This parser is checked with pastes from the LISP pastebin
- at http://paste.lisp.org/ to cover as much syntax as possible.
-
- It supports the full Scheme syntax as defined in R5RS.
-
- .. versionadded:: 0.6
- """
- name = 'Scheme'
- aliases = ['scheme', 'scm']
- filenames = ['*.scm', '*.ss']
- mimetypes = ['text/x-scheme', 'application/x-scheme']
-
- # list of known keywords and builtins taken form vim 6.4 scheme.vim
- # syntax file.
- keywords = [
- 'lambda', 'define', 'if', 'else', 'cond', 'and', 'or', 'case', 'let',
- 'let*', 'letrec', 'begin', 'do', 'delay', 'set!', '=>', 'quote',
- 'quasiquote', 'unquote', 'unquote-splicing', 'define-syntax',
- 'let-syntax', 'letrec-syntax', 'syntax-rules'
- ]
- builtins = [
- '*', '+', '-', '/', '<', '<=', '=', '>', '>=', 'abs', 'acos', 'angle',
- 'append', 'apply', 'asin', 'assoc', 'assq', 'assv', 'atan',
- 'boolean?', 'caaaar', 'caaadr', 'caaar', 'caadar', 'caaddr', 'caadr',
- 'caar', 'cadaar', 'cadadr', 'cadar', 'caddar', 'cadddr', 'caddr',
- 'cadr', 'call-with-current-continuation', 'call-with-input-file',
- 'call-with-output-file', 'call-with-values', 'call/cc', 'car',
- 'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cdar',
- 'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr', 'cdddr', 'cddr',
- 'cdr', 'ceiling', 'char->integer', 'char-alphabetic?', 'char-ci<=?',
- 'char-ci<?', 'char-ci=?', 'char-ci>=?', 'char-ci>?', 'char-downcase',
- 'char-lower-case?', 'char-numeric?', 'char-ready?', 'char-upcase',
- 'char-upper-case?', 'char-whitespace?', 'char<=?', 'char<?', 'char=?',
- 'char>=?', 'char>?', 'char?', 'close-input-port', 'close-output-port',
- 'complex?', 'cons', 'cos', 'current-input-port', 'current-output-port',
- 'denominator', 'display', 'dynamic-wind', 'eof-object?', 'eq?',
- 'equal?', 'eqv?', 'eval', 'even?', 'exact->inexact', 'exact?', 'exp',
- 'expt', 'floor', 'for-each', 'force', 'gcd', 'imag-part',
- 'inexact->exact', 'inexact?', 'input-port?', 'integer->char',
- 'integer?', 'interaction-environment', 'lcm', 'length', 'list',
- 'list->string', 'list->vector', 'list-ref', 'list-tail', 'list?',
- 'load', 'log', 'magnitude', 'make-polar', 'make-rectangular',
- 'make-string', 'make-vector', 'map', 'max', 'member', 'memq', 'memv',
- 'min', 'modulo', 'negative?', 'newline', 'not', 'null-environment',
- 'null?', 'number->string', 'number?', 'numerator', 'odd?',
- 'open-input-file', 'open-output-file', 'output-port?', 'pair?',
- 'peek-char', 'port?', 'positive?', 'procedure?', 'quotient',
- 'rational?', 'rationalize', 'read', 'read-char', 'real-part', 'real?',
- 'remainder', 'reverse', 'round', 'scheme-report-environment',
- 'set-car!', 'set-cdr!', 'sin', 'sqrt', 'string', 'string->list',
- 'string->number', 'string->symbol', 'string-append', 'string-ci<=?',
- 'string-ci<?', 'string-ci=?', 'string-ci>=?', 'string-ci>?',
- 'string-copy', 'string-fill!', 'string-length', 'string-ref',
- 'string-set!', 'string<=?', 'string<?', 'string=?', 'string>=?',
- 'string>?', 'string?', 'substring', 'symbol->string', 'symbol?',
- 'tan', 'transcript-off', 'transcript-on', 'truncate', 'values',
- 'vector', 'vector->list', 'vector-fill!', 'vector-length',
- 'vector-ref', 'vector-set!', 'vector?', 'with-input-from-file',
- 'with-output-to-file', 'write', 'write-char', 'zero?'
- ]
-
- # valid names for identifiers
- # well, names can only not consist fully of numbers
- # but this should be good enough for now
- valid_name = r'[a-zA-Z0-9!$%&*+,/:<=>?@^_~|-]+'
-
- tokens = {
- 'root' : [
- # the comments
- # and going to the end of the line
- (r';.*$', Comment.Single),
- # multi-line comment
- (r'#\|', Comment.Multiline, 'multiline-comment'),
- # commented form (entire sexpr folliwng)
- (r'#;\s*\(', Comment, 'commented-form'),
- # signifies that the program text that follows is written with the
- # lexical and datum syntax described in r6rs
- (r'#!r6rs', Comment),
-
- # whitespaces - usually not relevant
- (r'\s+', Text),
-
- # numbers
- (r'-?\d+\.\d+', Number.Float),
- (r'-?\d+', Number.Integer),
- # support for uncommon kinds of numbers -
- # have to figure out what the characters mean
- #(r'(#e|#i|#b|#o|#d|#x)[\d.]+', Number),
-
- # strings, symbols and characters
- (r'"(\\\\|\\"|[^"])*"', String),
- (r"'" + valid_name, String.Symbol),
- (r"#\\([()/'\"._!§$%& ?=+-]{1}|[a-zA-Z0-9]+)", String.Char),
-
- # constants
- (r'(#t|#f)', Name.Constant),
-
- # special operators
- (r"('|#|`|,@|,|\.)", Operator),
-
- # highlight the keywords
- ('(%s)' % '|'.join([
- re.escape(entry) + ' ' for entry in keywords]),
- Keyword
- ),
-
- # first variable in a quoted string like
- # '(this is syntactic sugar)
- (r"(?<='\()" + valid_name, Name.Variable),
- (r"(?<=#\()" + valid_name, Name.Variable),
-
- # highlight the builtins
- ("(?<=\()(%s)" % '|'.join([
- re.escape(entry) + ' ' for entry in builtins]),
- Name.Builtin
- ),
-
- # the remaining functions
- (r'(?<=\()' + valid_name, Name.Function),
- # find the remaining variables
- (valid_name, Name.Variable),
-
- # the famous parentheses!
- (r'(\(|\))', Punctuation),
- (r'(\[|\])', Punctuation),
- ],
- 'multiline-comment' : [
- (r'#\|', Comment.Multiline, '#push'),
- (r'\|#', Comment.Multiline, '#pop'),
- (r'[^|#]+', Comment.Multiline),
- (r'[|#]', Comment.Multiline),
- ],
- 'commented-form' : [
- (r'\(', Comment, '#push'),
- (r'\)', Comment, '#pop'),
- (r'[^()]+', Comment),
- ],
- }
-
-
-class CommonLispLexer(RegexLexer):
- """
- A Common Lisp lexer.
-
- .. versionadded:: 0.9
- """
- name = 'Common Lisp'
- aliases = ['common-lisp', 'cl', 'lisp', 'elisp', 'emacs', 'emacs-lisp']
- filenames = ['*.cl', '*.lisp', '*.el'] # use for Elisp too
- mimetypes = ['text/x-common-lisp']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- ### couple of useful regexes
-
- # characters that are not macro-characters and can be used to begin a symbol
- nonmacro = r'\\.|[\w!$%&*+-/<=>?@\[\]^{}~]'
- constituent = nonmacro + '|[#.:]'
- terminated = r'(?=[ "()\'\n,;`])' # whitespace or terminating macro characters
-
- ### symbol token, reverse-engineered from hyperspec
- # Take a deep breath...
- symbol = r'(\|[^|]+\||(?:%s)(?:%s)*)' % (nonmacro, constituent)
-
- def __init__(self, **options):
- from pygments.lexers._clbuiltins import BUILTIN_FUNCTIONS, \
- SPECIAL_FORMS, MACROS, LAMBDA_LIST_KEYWORDS, DECLARATIONS, \
- BUILTIN_TYPES, BUILTIN_CLASSES
- self.builtin_function = BUILTIN_FUNCTIONS
- self.special_forms = SPECIAL_FORMS
- self.macros = MACROS
- self.lambda_list_keywords = LAMBDA_LIST_KEYWORDS
- self.declarations = DECLARATIONS
- self.builtin_types = BUILTIN_TYPES
- self.builtin_classes = BUILTIN_CLASSES
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- stack = ['root']
- for index, token, value in RegexLexer.get_tokens_unprocessed(self, text, stack):
- if token is Name.Variable:
- if value in self.builtin_function:
- yield index, Name.Builtin, value
- continue
- if value in self.special_forms:
- yield index, Keyword, value
- continue
- if value in self.macros:
- yield index, Name.Builtin, value
- continue
- if value in self.lambda_list_keywords:
- yield index, Keyword, value
- continue
- if value in self.declarations:
- yield index, Keyword, value
- continue
- if value in self.builtin_types:
- yield index, Keyword.Type, value
- continue
- if value in self.builtin_classes:
- yield index, Name.Class, value
- continue
- yield index, token, value
-
- tokens = {
- 'root' : [
- ('', Text, 'body'),
- ],
- 'multiline-comment' : [
- (r'#\|', Comment.Multiline, '#push'), # (cf. Hyperspec 2.4.8.19)
- (r'\|#', Comment.Multiline, '#pop'),
- (r'[^|#]+', Comment.Multiline),
- (r'[|#]', Comment.Multiline),
- ],
- 'commented-form' : [
- (r'\(', Comment.Preproc, '#push'),
- (r'\)', Comment.Preproc, '#pop'),
- (r'[^()]+', Comment.Preproc),
- ],
- 'body' : [
- # whitespace
- (r'\s+', Text),
-
- # single-line comment
- (r';.*$', Comment.Single),
-
- # multi-line comment
- (r'#\|', Comment.Multiline, 'multiline-comment'),
-
- # encoding comment (?)
- (r'#\d*Y.*$', Comment.Special),
-
- # strings and characters
- (r'"(\\.|\\\n|[^"\\])*"', String),
- # quoting
- (r":" + symbol, String.Symbol),
- (r"::" + symbol, String.Symbol),
- (r":#" + symbol, String.Symbol),
- (r"'" + symbol, String.Symbol),
- (r"'", Operator),
- (r"`", Operator),
-
- # decimal numbers
- (r'[-+]?\d+\.?' + terminated, Number.Integer),
- (r'[-+]?\d+/\d+' + terminated, Number),
- (r'[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)' \
- + terminated, Number.Float),
-
- # sharpsign strings and characters
- (r"#\\." + terminated, String.Char),
- (r"#\\" + symbol, String.Char),
-
- # vector
- (r'#\(', Operator, 'body'),
-
- # bitstring
- (r'#\d*\*[01]*', Literal.Other),
-
- # uninterned symbol
- (r'#:' + symbol, String.Symbol),
-
- # read-time and load-time evaluation
- (r'#[.,]', Operator),
-
- # function shorthand
- (r'#\'', Name.Function),
-
- # binary rational
- (r'#[bB][+-]?[01]+(/[01]+)?', Number.Bin),
-
- # octal rational
- (r'#[oO][+-]?[0-7]+(/[0-7]+)?', Number.Oct),
-
- # hex rational
- (r'#[xX][+-]?[0-9a-f]+(/[0-9a-f]+)?', Number.Hex),
-
- # radix rational
- (r'#\d+[rR][+-]?[0-9a-z]+(/[0-9a-z]+)?', Number),
-
- # complex
- (r'(#[cC])(\()', bygroups(Number, Punctuation), 'body'),
-
- # array
- (r'(#\d+[aA])(\()', bygroups(Literal.Other, Punctuation), 'body'),
-
- # structure
- (r'(#[sS])(\()', bygroups(Literal.Other, Punctuation), 'body'),
-
- # path
- (r'#[pP]?"(\\.|[^"])*"', Literal.Other),
-
- # reference
- (r'#\d+=', Operator),
- (r'#\d+#', Operator),
-
- # read-time comment
- (r'#+nil' + terminated + '\s*\(', Comment.Preproc, 'commented-form'),
-
- # read-time conditional
- (r'#[+-]', Operator),
-
- # special operators that should have been parsed already
- (r'(,@|,|\.)', Operator),
-
- # special constants
- (r'(t|nil)' + terminated, Name.Constant),
-
- # functions and variables
- (r'\*' + symbol + '\*', Name.Variable.Global),
- (symbol, Name.Variable),
-
- # parentheses
- (r'\(', Punctuation, 'body'),
- (r'\)', Punctuation, '#pop'),
- ],
- }
-
-
-class CryptolLexer(RegexLexer):
- """
- FIXME: A Cryptol2 lexer based on the lexemes defined in the Haskell 98 Report.
-
- .. versionadded:: 2.0
- """
- name = 'Cryptol'
- aliases = ['cryptol', 'cry']
- filenames = ['*.cry']
- mimetypes = ['text/x-cryptol']
-
- reserved = ['Arith','Bit','Cmp','False','Inf','True','else',
- 'export','extern','fin','if','import','inf','lg2',
- 'max','min','module','newtype','pragma','property',
- 'then','type','where','width']
- ascii = ['NUL','SOH','[SE]TX','EOT','ENQ','ACK',
- 'BEL','BS','HT','LF','VT','FF','CR','S[OI]','DLE',
- 'DC[1-4]','NAK','SYN','ETB','CAN',
- 'EM','SUB','ESC','[FGRU]S','SP','DEL']
-
- tokens = {
- 'root': [
- # Whitespace:
- (r'\s+', Text),
- #(r'--\s*|.*$', Comment.Doc),
- (r'//.*$', Comment.Single),
- (r'/\*', Comment.Multiline, 'comment'),
- # Lexemes:
- # Identifiers
- (r'\bimport\b', Keyword.Reserved, 'import'),
- (r'\bmodule\b', Keyword.Reserved, 'module'),
- (r'\berror\b', Name.Exception),
- (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
- (r'^[_a-z][\w\']*', Name.Function),
- (r"'?[_a-z][\w']*", Name),
- (r"('')?[A-Z][\w\']*", Keyword.Type),
- # Operators
- (r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
- (r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
- (r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators
- (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators
- # Numbers
- (r'\d+[eE][+-]?\d+', Number.Float),
- (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
- (r'0[oO][0-7]+', Number.Oct),
- (r'0[xX][\da-fA-F]+', Number.Hex),
- (r'\d+', Number.Integer),
- # Character/String Literals
- (r"'", String.Char, 'character'),
- (r'"', String, 'string'),
- # Special
- (r'\[\]', Keyword.Type),
- (r'\(\)', Name.Builtin),
- (r'[][(),;`{}]', Punctuation),
- ],
- 'import': [
- # Import statements
- (r'\s+', Text),
- (r'"', String, 'string'),
- # after "funclist" state
- (r'\)', Punctuation, '#pop'),
- (r'qualified\b', Keyword),
- # import X as Y
- (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(as)(\s+)([A-Z][a-zA-Z0-9_.]*)',
- bygroups(Name.Namespace, Text, Keyword, Text, Name), '#pop'),
- # import X hiding (functions)
- (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(hiding)(\s+)(\()',
- bygroups(Name.Namespace, Text, Keyword, Text, Punctuation), 'funclist'),
- # import X (functions)
- (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(\()',
- bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
- # import X
- (r'[a-zA-Z0-9_.]+', Name.Namespace, '#pop'),
- ],
- 'module': [
- (r'\s+', Text),
- (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(\()',
- bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
- (r'[A-Z][a-zA-Z0-9_.]*', Name.Namespace, '#pop'),
- ],
- 'funclist': [
- (r'\s+', Text),
- (r'[A-Z][a-zA-Z0-9_]*', Keyword.Type),
- (r'(_[\w\']+|[a-z][\w\']*)', Name.Function),
- (r'--(?![!#$%&*+./<=>?@\^|_~:\\]).*?$', Comment.Single),
- (r'{-', Comment.Multiline, 'comment'),
- (r',', Punctuation),
- (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
- # (HACK, but it makes sense to push two instances, believe me)
- (r'\(', Punctuation, ('funclist', 'funclist')),
- (r'\)', Punctuation, '#pop:2'),
- ],
- 'comment': [
- # Multiline Comments
- (r'[^/\*]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[\*/]', Comment.Multiline),
- ],
- 'character': [
- # Allows multi-chars, incorrectly.
- (r"[^\\']'", String.Char, '#pop'),
- (r"\\", String.Escape, 'escape'),
- ("'", String.Char, '#pop'),
- ],
- 'string': [
- (r'[^\\"]+', String),
- (r"\\", String.Escape, 'escape'),
- ('"', String, '#pop'),
- ],
- 'escape': [
- (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
- (r'\^[][A-Z@\^_]', String.Escape, '#pop'),
- ('|'.join(ascii), String.Escape, '#pop'),
- (r'o[0-7]+', String.Escape, '#pop'),
- (r'x[\da-fA-F]+', String.Escape, '#pop'),
- (r'\d+', String.Escape, '#pop'),
- (r'\s+\\', String.Escape, '#pop'),
- ],
- }
-
- EXTRA_KEYWORDS = ['join', 'split', 'reverse', 'transpose', 'width',
- 'length', 'tail', '<<', '>>', '<<<', '>>>', 'const',
- 'reg', 'par', 'seq', 'ASSERT', 'undefined', 'error',
- 'trace']
-
- def get_tokens_unprocessed(self, text):
- stack = ['root']
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text, stack):
- if token is Name and value in self.EXTRA_KEYWORDS:
- yield index, Name.Builtin, value
- else:
- yield index, token, value
-
-
-class HaskellLexer(RegexLexer):
- """
- A Haskell lexer based on the lexemes defined in the Haskell 98 Report.
-
- .. versionadded:: 0.8
- """
- name = 'Haskell'
- aliases = ['haskell', 'hs']
- filenames = ['*.hs']
- mimetypes = ['text/x-haskell']
-
- flags = re.MULTILINE | re.UNICODE
-
- reserved = ['case','class','data','default','deriving','do','else',
- 'if','in','infix[lr]?','instance',
- 'let','newtype','of','then','type','where','_']
- ascii = ['NUL','SOH','[SE]TX','EOT','ENQ','ACK',
- 'BEL','BS','HT','LF','VT','FF','CR','S[OI]','DLE',
- 'DC[1-4]','NAK','SYN','ETB','CAN',
- 'EM','SUB','ESC','[FGRU]S','SP','DEL']
-
- tokens = {
- 'root': [
- # Whitespace:
- (r'\s+', Text),
- #(r'--\s*|.*$', Comment.Doc),
- (r'--(?![!#$%&*+./<=>?@\^|_~:\\]).*?$', Comment.Single),
- (r'{-', Comment.Multiline, 'comment'),
- # Lexemes:
- # Identifiers
- (r'\bimport\b', Keyword.Reserved, 'import'),
- (r'\bmodule\b', Keyword.Reserved, 'module'),
- (r'\berror\b', Name.Exception),
- (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
- (r'^[_' + uni.Ll + r'][\w\']*', Name.Function),
- (r"'?[_" + uni.Ll + r"'][\w']*", Name),
- (r"('')?[" + uni.Lu + r"][\w\']*", Keyword.Type),
- # Operators
- (r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
- (r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
- (r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators
- (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators
- # Numbers
- (r'\d+[eE][+-]?\d+', Number.Float),
- (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
- (r'0[oO][0-7]+', Number.Oct),
- (r'0[xX][\da-fA-F]+', Number.Hex),
- (r'\d+', Number.Integer),
- # Character/String Literals
- (r"'", String.Char, 'character'),
- (r'"', String, 'string'),
- # Special
- (r'\[\]', Keyword.Type),
- (r'\(\)', Name.Builtin),
- (r'[][(),;`{}]', Punctuation),
- ],
- 'import': [
- # Import statements
- (r'\s+', Text),
- (r'"', String, 'string'),
- # after "funclist" state
- (r'\)', Punctuation, '#pop'),
- (r'qualified\b', Keyword),
- # import X as Y
- (r'([' + uni.Lu + r'][\w.]*)(\s+)(as)(\s+)([' + uni.Lu + r'][\w.]*)',
- bygroups(Name.Namespace, Text, Keyword, Text, Name), '#pop'),
- # import X hiding (functions)
- (r'([' + uni.Lu + r'][\w.]*)(\s+)(hiding)(\s+)(\()',
- bygroups(Name.Namespace, Text, Keyword, Text, Punctuation), 'funclist'),
- # import X (functions)
- (r'([' + uni.Lu + r'][\w.]*)(\s+)(\()',
- bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
- # import X
- (r'[\w.]+', Name.Namespace, '#pop'),
- ],
- 'module': [
- (r'\s+', Text),
- (r'([' + uni.Lu + r'][\w.]*)(\s+)(\()',
- bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
- (r'[' + uni.Lu + r'][\w.]*', Name.Namespace, '#pop'),
- ],
- 'funclist': [
- (r'\s+', Text),
- (r'[' + uni.Lu + r']\w*', Keyword.Type),
- (r'(_[\w\']+|[' + uni.Ll + r'][\w\']*)', Name.Function),
- (r'--(?![!#$%&*+./<=>?@\^|_~:\\]).*?$', Comment.Single),
- (r'{-', Comment.Multiline, 'comment'),
- (r',', Punctuation),
- (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
- # (HACK, but it makes sense to push two instances, believe me)
- (r'\(', Punctuation, ('funclist', 'funclist')),
- (r'\)', Punctuation, '#pop:2'),
- ],
- # NOTE: the next four states are shared in the AgdaLexer; make sure
- # any change is compatible with Agda as well or copy over and change
- 'comment': [
- # Multiline Comments
- (r'[^-{}]+', Comment.Multiline),
- (r'{-', Comment.Multiline, '#push'),
- (r'-}', Comment.Multiline, '#pop'),
- (r'[-{}]', Comment.Multiline),
- ],
- 'character': [
- # Allows multi-chars, incorrectly.
- (r"[^\\']'", String.Char, '#pop'),
- (r"\\", String.Escape, 'escape'),
- ("'", String.Char, '#pop'),
- ],
- 'string': [
- (r'[^\\"]+', String),
- (r"\\", String.Escape, 'escape'),
- ('"', String, '#pop'),
- ],
- 'escape': [
- (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
- (r'\^[][' + uni.Lu + r'@\^_]', String.Escape, '#pop'),
- ('|'.join(ascii), String.Escape, '#pop'),
- (r'o[0-7]+', String.Escape, '#pop'),
- (r'x[\da-fA-F]+', String.Escape, '#pop'),
- (r'\d+', String.Escape, '#pop'),
- (r'\s+\\', String.Escape, '#pop'),
- ],
- }
-
-
-class IdrisLexer(RegexLexer):
- """
- A lexer for the dependently typed programming language Idris.
-
- Based on the Haskell and Agda Lexer.
-
- .. versionadded:: 2.0
- """
- name = 'Idris'
- aliases = ['idris', 'idr']
- filenames = ['*.idr']
- mimetypes = ['text/x-idris']
-
- reserved = ['case','class','data','default','using','do','else',
- 'if','in','infix[lr]?','instance','rewrite','auto',
- 'namespace','codata','mutual','private','public','abstract',
- 'total','partial',
- 'let','proof','of','then','static','where','_','with',
- 'pattern', 'term', 'syntax','prefix',
- 'postulate','parameters','record','dsl','impossible','implicit',
- 'tactics','intros','intro','compute','refine','exaxt','trivial']
-
- ascii = ['NUL','SOH','[SE]TX','EOT','ENQ','ACK',
- 'BEL','BS','HT','LF','VT','FF','CR','S[OI]','DLE',
- 'DC[1-4]','NAK','SYN','ETB','CAN',
- 'EM','SUB','ESC','[FGRU]S','SP','DEL']
-
- annotations = ['assert_total','lib','link','include','provide','access',
- 'default']
-
- tokens = {
- 'root': [
- # Declaration
- (r'^(\s*)([^\s\(\)\{\}]+)(\s*)(:)(\s*)',
- bygroups(Text, Name.Function, Text, Operator.Word, Text)),
- # Comments
- (r'^(\s*)(%%%s)' % '|'.join(annotations),
- bygroups(Text, Keyword.Reserved)),
- (r'--(?![!#$%&*+./<=>?@\^|_~:\\]).*?$', Comment.Single),
- (r'{-', Comment.Multiline, 'comment'),
- # Identifiers
- (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
- (r'(import|module)(\s+)', bygroups(Keyword.Reserved, Text), 'module'),
- (r"('')?[A-Z][\w\']*", Keyword.Type),
- (r'[a-z][\w\']*', Text),
- # Special Symbols
- (r'(<-|::|->|=>|=)', Operator.Word), # specials
- (r'([\(\)\{\}\[\]:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
- # Numbers
- (r'\d+[eE][+-]?\d+', Number.Float),
- (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
- (r'0[xX][\da-fA-F]+', Number.Hex),
- (r'\d+', Number.Integer),
- # Strings
- (r"'", String.Char, 'character'),
- (r'"', String, 'string'),
- (r'[^\s\(\)\{\}]+', Text),
- (r'\s+?', Text), # Whitespace
- ],
- 'module': [
- (r'\s+', Text),
- (r'([A-Z][\w.]*)(\s+)(\()',
- bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
- (r'[A-Z][\w.]*', Name.Namespace, '#pop'),
- ],
- 'funclist': [
- (r'\s+', Text),
- (r'[A-Z]\w*', Keyword.Type),
- (r'(_[\w\']+|[a-z][\w\']*)', Name.Function),
- (r'--.*$', Comment.Single),
- (r'{-', Comment.Multiline, 'comment'),
- (r',', Punctuation),
- (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
- # (HACK, but it makes sense to push two instances, believe me)
- (r'\(', Punctuation, ('funclist', 'funclist')),
- (r'\)', Punctuation, '#pop:2'),
- ],
- # NOTE: the next four states are shared in the AgdaLexer; make sure
- # any change is compatible with Agda as well or copy over and change
- 'comment': [
- # Multiline Comments
- (r'[^-{}]+', Comment.Multiline),
- (r'{-', Comment.Multiline, '#push'),
- (r'-}', Comment.Multiline, '#pop'),
- (r'[-{}]', Comment.Multiline),
- ],
- 'character': [
- # Allows multi-chars, incorrectly.
- (r"[^\\']", String.Char),
- (r"\\", String.Escape, 'escape'),
- ("'", String.Char, '#pop'),
- ],
- 'string': [
- (r'[^\\"]+', String),
- (r"\\", String.Escape, 'escape'),
- ('"', String, '#pop'),
- ],
- 'escape': [
- (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
- (r'\^[][A-Z@\^_]', String.Escape, '#pop'),
- ('|'.join(ascii), String.Escape, '#pop'),
- (r'o[0-7]+', String.Escape, '#pop'),
- (r'x[\da-fA-F]+', String.Escape, '#pop'),
- (r'\d+', String.Escape, '#pop'),
- (r'\s+\\', String.Escape, '#pop')
- ],
- }
-
-
-class AgdaLexer(RegexLexer):
- """
- For the `Agda <http://wiki.portal.chalmers.se/agda/pmwiki.php>`_
- dependently typed functional programming language and proof assistant.
-
- .. versionadded:: 2.0
- """
-
- name = 'Agda'
- aliases = ['agda']
- filenames = ['*.agda']
- mimetypes = ['text/x-agda']
-
- reserved = ['abstract', 'codata', 'coinductive', 'constructor', 'data',
- 'field', 'forall', 'hiding', 'in', 'inductive', 'infix',
- 'infixl', 'infixr', 'let', 'open', 'pattern', 'primitive',
- 'private', 'mutual', 'quote', 'quoteGoal', 'quoteTerm',
- 'record', 'syntax', 'rewrite', 'unquote', 'using', 'where',
- 'with']
-
- tokens = {
- 'root': [
- # Declaration
- (r'^(\s*)([^\s\(\)\{\}]+)(\s*)(:)(\s*)',
- bygroups(Text, Name.Function, Text, Operator.Word, Text)),
- # Comments
- (r'--(?![!#$%&*+./<=>?@\^|_~:\\]).*?$', Comment.Single),
- (r'{-', Comment.Multiline, 'comment'),
- # Holes
- (r'{!', Comment.Directive, 'hole'),
- # Lexemes:
- # Identifiers
- (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
- (r'(import|module)(\s+)', bygroups(Keyword.Reserved, Text), 'module'),
- (r'\b(Set|Prop)\b', Keyword.Type),
- # Special Symbols
- (r'(\(|\)|\{|\})', Operator),
- (u'(\\.{1,3}|\\||\u039B|\u2200|\u2192|:|=|->)', Operator.Word),
- # Numbers
- (r'\d+[eE][+-]?\d+', Number.Float),
- (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
- (r'0[xX][\da-fA-F]+', Number.Hex),
- (r'\d+', Number.Integer),
- # Strings
- (r"'", String.Char, 'character'),
- (r'"', String, 'string'),
- (r'[^\s\(\)\{\}]+', Text),
- (r'\s+?', Text), # Whitespace
- ],
- 'hole': [
- # Holes
- (r'[^!{}]+', Comment.Directive),
- (r'{!', Comment.Directive, '#push'),
- (r'!}', Comment.Directive, '#pop'),
- (r'[!{}]', Comment.Directive),
- ],
- 'module': [
- (r'{-', Comment.Multiline, 'comment'),
- (r'[a-zA-Z][\w.]*', Name, '#pop'),
- (r'[^a-zA-Z]*', Text)
- ],
- 'comment': HaskellLexer.tokens['comment'],
- 'character': HaskellLexer.tokens['character'],
- 'string': HaskellLexer.tokens['string'],
- 'escape': HaskellLexer.tokens['escape']
- }
-
-class LeanLexer(RegexLexer):
- """
- For the `Lean <https://github.com/leanprover/lean>`_
- theorem prover
- """
- name = 'Lean'
- aliases = ['lean']
- filenames = ['*.lean']
- mimetypes = ['text/x-lean']
-
- flags = re.MULTILINE | re.UNICODE
-
- keywords1 = ['import', 'abbreviation', 'opaque_hint', 'tactic_hint', 'definition', 'renaming',
- 'inline', 'hiding', 'exposing', 'parameter', 'parameters', 'conjecture',
- 'hypothesis', 'lemma', 'corollary', 'variable', 'variables', 'print', 'theorem',
- 'axiom', 'inductive', 'structure', 'universe', 'alias', 'help',
- 'options', 'precedence', 'postfix', 'prefix', 'calc_trans', 'calc_subst', 'calc_refl',
- 'infix', 'infixl', 'infixr', 'notation', 'eval', 'check', 'exit', 'coercion', 'end',
- 'private', 'using', 'namespace', 'including', 'instance', 'section', 'context',
- 'protected', 'expose', 'export', 'set_option', 'add_rewrite', 'extends']
-
- keywords2 = [
- 'forall', 'exists', 'fun', 'Pi', 'obtain', 'from', 'have', 'show', 'assume', 'take',
- 'let', 'if', 'else', 'then', 'by', 'in', 'with', 'begin', 'proof', 'qed', 'calc'
- ]
-
- keywords3 = [
- # Sorts
- 'Type', 'Prop',
- ]
-
- keywords4 = [
- # Tactics
- 'apply', 'and_then', 'or_else', 'append', 'interleave', 'par', 'fixpoint', 'repeat',
- 'at_most', 'discard', 'focus_at', 'rotate', 'try_for', 'now', 'assumption', 'eassumption',
- 'state', 'intro', 'generalize', 'exact', 'unfold', 'beta', 'trace', 'focus', 'repeat1',
- 'determ', 'destruct', 'try', 'auto', 'intros'
- ]
-
- operators = [
- '!=', '#', '&', '&&', r'\*', r'\+', '-', '/', r'#', r'@',
- r'-\.', '->', r'\.', r'\.\.', r'\.\.\.', '::', ':>', ';', ';;', '<',
- '<-', '=', '==', '>', '_', '`', r'\|', r'\|\|', '~', '=>', '<=', '>=',
- r'/\\', r'\\/', u'∀', u'Π', u'λ', u'↔', u'∧', u'∨', u'≠', u'≤', u'≥', u'¬', u'⁻¹', u'⬝', u'▸',
- u'→', u'∃', u'ℕ', u'ℤ', u'≈'
- ]
-
- word_operators = ['and', 'or', 'not', 'iff', 'eq']
-
- punctuation = [ r'\(', r'\)', r':', r'{', r'}', r'\[', r'\]', u'⦃', u'⦄', r':=', r',' ]
-
- primitives = ['unit', 'int', 'bool', 'string', 'char', 'list',
- 'array', 'prod', 'sum', 'pair', 'real', 'nat', 'num', 'path']
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'\b(false)\b|\b(true)\b|\(\)|\[\]', Name.Builtin.Pseudo),
- (r'/-', Comment, 'comment'),
- (r'--.*?$', Comment.Single),
- (r'\b(%s)\b' % '|'.join(keywords1), Keyword.Namespace),
- (r'\b(%s)\b' % '|'.join(keywords2), Keyword),
- (r'\b(%s)\b' % '|'.join(keywords3), Keyword.Type),
- (r'\b(%s)\b' % '|'.join(keywords4), Keyword),
- (r'(%s)' % '|'.join(operators[::-1]), Name.Builtin.Pseudo),
- (r'\b(%s)\b' % '|'.join(word_operators), Name.Builtin.Pseudo),
- (r'(%s)' % '|'.join(punctuation[::-1]), Operator),
- (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
- (u"[A-Za-z_\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2100-\u214f][A-Za-z_'\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2070-\u2079\u207f-\u2089\u2090-\u209c\u2100-\u214f]*", Name),
- (r'\d[\d]*', Number.Integer),
- (r'"', String.Double, 'string'),
- (r'[~?][a-z][\w\']*:', Name.Variable)
- ],
- 'comment': [
- # Multiline Comments
- (r'[^/-]', Comment.Multiline),
- (r'/-', Comment.Multiline, '#push'),
- (r'-/', Comment.Multiline, '#pop'),
- (r'[/-]', Comment.Multiline)
- ],
- 'string': [
- (r'[^\\"]+', String.Double),
- (r'\\[n"\\]', String.Escape),
- ('"', String.Double, '#pop'),
- ],
- }
-
-
-class LiterateLexer(Lexer):
- """
- Base class for lexers of literate file formats based on LaTeX or Bird-style
- (prefixing each code line with ">").
-
- Additional options accepted:
-
- `litstyle`
- If given, must be ``"bird"`` or ``"latex"``. If not given, the style
- is autodetected: if the first non-whitespace character in the source
- is a backslash or percent character, LaTeX is assumed, else Bird.
- """
-
- bird_re = re.compile(r'(>[ \t]*)(.*\n)')
-
- def __init__(self, baselexer, **options):
- self.baselexer = baselexer
- Lexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- style = self.options.get('litstyle')
- if style is None:
- style = (text.lstrip()[0:1] in '%\\') and 'latex' or 'bird'
-
- code = ''
- insertions = []
- if style == 'bird':
- # bird-style
- for match in line_re.finditer(text):
- line = match.group()
- m = self.bird_re.match(line)
- if m:
- insertions.append((len(code),
- [(0, Comment.Special, m.group(1))]))
- code += m.group(2)
- else:
- insertions.append((len(code), [(0, Text, line)]))
- else:
- # latex-style
- from pygments.lexers.text import TexLexer
- lxlexer = TexLexer(**self.options)
- codelines = 0
- latex = ''
- for match in line_re.finditer(text):
- line = match.group()
- if codelines:
- if line.lstrip().startswith('\\end{code}'):
- codelines = 0
- latex += line
- else:
- code += line
- elif line.lstrip().startswith('\\begin{code}'):
- codelines = 1
- latex += line
- insertions.append((len(code),
- list(lxlexer.get_tokens_unprocessed(latex))))
- latex = ''
- else:
- latex += line
- insertions.append((len(code),
- list(lxlexer.get_tokens_unprocessed(latex))))
- for item in do_insertions(insertions, self.baselexer.get_tokens_unprocessed(code)):
- yield item
-
-
-class LiterateCryptolLexer(LiterateLexer):
- """
- For Literate Cryptol (Bird-style or LaTeX) source.
-
- Additional options accepted:
-
- `litstyle`
- If given, must be ``"bird"`` or ``"latex"``. If not given, the style
- is autodetected: if the first non-whitespace character in the source
- is a backslash or percent character, LaTeX is assumed, else Bird.
-
- .. versionadded:: 0.9
- """
- name = 'Literate Cryptol'
- aliases = ['lcry', 'literate-cryptol', 'lcryptol']
- filenames = ['*.lcry']
- mimetypes = ['text/x-literate-cryptol']
-
- def __init__(self, **options):
- crylexer = CryptolLexer(**options)
- LiterateLexer.__init__(self, crylexer, **options)
-
-
-class LiterateHaskellLexer(LiterateLexer):
- """
- For Literate Haskell (Bird-style or LaTeX) source.
-
- Additional options accepted:
-
- `litstyle`
- If given, must be ``"bird"`` or ``"latex"``. If not given, the style
- is autodetected: if the first non-whitespace character in the source
- is a backslash or percent character, LaTeX is assumed, else Bird.
-
- .. versionadded:: 0.9
- """
- name = 'Literate Haskell'
- aliases = ['lhs', 'literate-haskell', 'lhaskell']
- filenames = ['*.lhs']
- mimetypes = ['text/x-literate-haskell']
-
- def __init__(self, **options):
- hslexer = HaskellLexer(**options)
- LiterateLexer.__init__(self, hslexer, **options)
-
-
-class LiterateIdrisLexer(LiterateLexer):
- """
- For Literate Idris (Bird-style or LaTeX) source.
-
- Additional options accepted:
-
- `litstyle`
- If given, must be ``"bird"`` or ``"latex"``. If not given, the style
- is autodetected: if the first non-whitespace character in the source
- is a backslash or percent character, LaTeX is assumed, else Bird.
-
- .. versionadded:: 2.0
- """
- name = 'Literate Idris'
- aliases = ['lidr', 'literate-idris', 'lidris']
- filenames = ['*.lidr']
- mimetypes = ['text/x-literate-idris']
-
- def __init__(self, **options):
- hslexer = IdrisLexer(**options)
- LiterateLexer.__init__(self, hslexer, **options)
-
-
-class LiterateAgdaLexer(LiterateLexer):
- """
- For Literate Agda source.
-
- Additional options accepted:
-
- `litstyle`
- If given, must be ``"bird"`` or ``"latex"``. If not given, the style
- is autodetected: if the first non-whitespace character in the source
- is a backslash or percent character, LaTeX is assumed, else Bird.
-
- .. versionadded:: 2.0
- """
- name = 'Literate Agda'
- aliases = ['lagda', 'literate-agda']
- filenames = ['*.lagda']
- mimetypes = ['text/x-literate-agda']
-
- def __init__(self, **options):
- agdalexer = AgdaLexer(**options)
- LiterateLexer.__init__(self, agdalexer, litstyle='latex', **options)
-
-
-class SMLLexer(RegexLexer):
- """
- For the Standard ML language.
-
- .. versionadded:: 1.5
- """
-
- name = 'Standard ML'
- aliases = ['sml']
- filenames = ['*.sml', '*.sig', '*.fun',]
- mimetypes = ['text/x-standardml', 'application/x-standardml']
-
- alphanumid_reserved = [
- # Core
- 'abstype', 'and', 'andalso', 'as', 'case', 'datatype', 'do', 'else',
- 'end', 'exception', 'fn', 'fun', 'handle', 'if', 'in', 'infix',
- 'infixr', 'let', 'local', 'nonfix', 'of', 'op', 'open', 'orelse',
- 'raise', 'rec', 'then', 'type', 'val', 'with', 'withtype', 'while',
- # Modules
- 'eqtype', 'functor', 'include', 'sharing', 'sig', 'signature',
- 'struct', 'structure', 'where',
- ]
-
- symbolicid_reserved = [
- # Core
- ':', '\|', '=', '=>', '->', '#',
- # Modules
- ':>',
- ]
-
- nonid_reserved = [ '(', ')', '[', ']', '{', '}', ',', ';', '...', '_' ]
-
- alphanumid_re = r"[a-zA-Z][\w']*"
- symbolicid_re = r"[!%&$#+\-/:<=>?@\\~`^|*]+"
-
- # A character constant is a sequence of the form #s, where s is a string
- # constant denoting a string of size one character. This setup just parses
- # the entire string as either a String.Double or a String.Char (depending
- # on the argument), even if the String.Char is an erronous
- # multiple-character string.
- def stringy (whatkind):
- return [
- (r'[^"\\]', whatkind),
- (r'\\[\\\"abtnvfr]', String.Escape),
- # Control-character notation is used for codes < 32,
- # where \^@ == \000
- (r'\\\^[\x40-\x5e]', String.Escape),
- # Docs say 'decimal digits'
- (r'\\[0-9]{3}', String.Escape),
- (r'\\u[0-9a-fA-F]{4}', String.Escape),
- (r'\\\s+\\', String.Interpol),
- (r'"', whatkind, '#pop'),
- ]
-
- # Callbacks for distinguishing tokens and reserved words
- def long_id_callback(self, match):
- if match.group(1) in self.alphanumid_reserved: token = Error
- else: token = Name.Namespace
- yield match.start(1), token, match.group(1)
- yield match.start(2), Punctuation, match.group(2)
-
- def end_id_callback(self, match):
- if match.group(1) in self.alphanumid_reserved: token = Error
- elif match.group(1) in self.symbolicid_reserved: token = Error
- else: token = Name
- yield match.start(1), token, match.group(1)
-
- def id_callback(self, match):
- str = match.group(1)
- if str in self.alphanumid_reserved: token = Keyword.Reserved
- elif str in self.symbolicid_reserved: token = Punctuation
- else: token = Name
- yield match.start(1), token, str
-
- tokens = {
- # Whitespace and comments are (almost) everywhere
- 'whitespace': [
- (r'\s+', Text),
- (r'\(\*', Comment.Multiline, 'comment'),
- ],
-
- 'delimiters': [
- # This lexer treats these delimiters specially:
- # Delimiters define scopes, and the scope is how the meaning of
- # the `|' is resolved - is it a case/handle expression, or function
- # definition by cases? (This is not how the Definition works, but
- # it's how MLton behaves, see http://mlton.org/SMLNJDeviations)
- (r'\(|\[|{', Punctuation, 'main'),
- (r'\)|\]|}', Punctuation, '#pop'),
- (r'\b(let|if|local)\b(?!\')', Keyword.Reserved, ('main', 'main')),
- (r'\b(struct|sig|while)\b(?!\')', Keyword.Reserved, 'main'),
- (r'\b(do|else|end|in|then)\b(?!\')', Keyword.Reserved, '#pop'),
- ],
-
- 'core': [
- # Punctuation that doesn't overlap symbolic identifiers
- (r'(%s)' % '|'.join([re.escape(z) for z in nonid_reserved]),
- Punctuation),
-
- # Special constants: strings, floats, numbers in decimal and hex
- (r'#"', String.Char, 'char'),
- (r'"', String.Double, 'string'),
- (r'~?0x[0-9a-fA-F]+', Number.Hex),
- (r'0wx[0-9a-fA-F]+', Number.Hex),
- (r'0w\d+', Number.Integer),
- (r'~?\d+\.\d+[eE]~?\d+', Number.Float),
- (r'~?\d+\.\d+', Number.Float),
- (r'~?\d+[eE]~?\d+', Number.Float),
- (r'~?\d+', Number.Integer),
-
- # Labels
- (r'#\s*[1-9][0-9]*', Name.Label),
- (r'#\s*(%s)' % alphanumid_re, Name.Label),
- (r'#\s+(%s)' % symbolicid_re, Name.Label),
- # Some reserved words trigger a special, local lexer state change
- (r'\b(datatype|abstype)\b(?!\')', Keyword.Reserved, 'dname'),
- (r'(?=\b(exception)\b(?!\'))', Text, ('ename')),
- (r'\b(functor|include|open|signature|structure)\b(?!\')',
- Keyword.Reserved, 'sname'),
- (r'\b(type|eqtype)\b(?!\')', Keyword.Reserved, 'tname'),
-
- # Regular identifiers, long and otherwise
- (r'\'[\w\']*', Name.Decorator),
- (r'(%s)(\.)' % alphanumid_re, long_id_callback, "dotted"),
- (r'(%s)' % alphanumid_re, id_callback),
- (r'(%s)' % symbolicid_re, id_callback),
- ],
- 'dotted': [
- (r'(%s)(\.)' % alphanumid_re, long_id_callback),
- (r'(%s)' % alphanumid_re, end_id_callback, "#pop"),
- (r'(%s)' % symbolicid_re, end_id_callback, "#pop"),
- (r'\s+', Error),
- (r'\S+', Error),
- ],
-
-
- # Main parser (prevents errors in files that have scoping errors)
- 'root': [ default('main') ],
-
- # In this scope, I expect '|' to not be followed by a function name,
- # and I expect 'and' to be followed by a binding site
- 'main': [
- include('whitespace'),
-
- # Special behavior of val/and/fun
- (r'\b(val|and)\b(?!\')', Keyword.Reserved, 'vname'),
- (r'\b(fun)\b(?!\')', Keyword.Reserved,
- ('#pop', 'main-fun', 'fname')),
-
- include('delimiters'),
- include('core'),
- (r'\S+', Error),
- ],
-
- # In this scope, I expect '|' and 'and' to be followed by a function
- 'main-fun': [
- include('whitespace'),
-
- (r'\s', Text),
- (r'\(\*', Comment.Multiline, 'comment'),
-
- # Special behavior of val/and/fun
- (r'\b(fun|and)\b(?!\')', Keyword.Reserved, 'fname'),
- (r'\b(val)\b(?!\')', Keyword.Reserved,
- ('#pop', 'main', 'vname')),
-
- # Special behavior of '|' and '|'-manipulating keywords
- (r'\|', Punctuation, 'fname'),
- (r'\b(case|handle)\b(?!\')', Keyword.Reserved,
- ('#pop', 'main')),
-
- include('delimiters'),
- include('core'),
- (r'\S+', Error),
- ],
-
- # Character and string parsers
- 'char': stringy(String.Char),
- 'string': stringy(String.Double),
-
- 'breakout': [
- (r'(?=\b(%s)\b(?!\'))' % '|'.join(alphanumid_reserved), Text, '#pop'),
- ],
-
- # Dealing with what comes after module system keywords
- 'sname': [
- include('whitespace'),
- include('breakout'),
-
- (r'(%s)' % alphanumid_re, Name.Namespace),
- default('#pop'),
- ],
-
- # Dealing with what comes after the 'fun' (or 'and' or '|') keyword
- 'fname': [
- include('whitespace'),
- (r'\'[0-9a-zA-Z_\']*', Name.Decorator),
- (r'\(', Punctuation, 'tyvarseq'),
-
- (r'(%s)' % alphanumid_re, Name.Function, '#pop'),
- (r'(%s)' % symbolicid_re, Name.Function, '#pop'),
-
- # Ignore interesting function declarations like "fun (x + y) = ..."
- default('#pop'),
- ],
-
- # Dealing with what comes after the 'val' (or 'and') keyword
- 'vname': [
- include('whitespace'),
- (r'\'[0-9a-zA-Z_\']*', Name.Decorator),
- (r'\(', Punctuation, 'tyvarseq'),
-
- (r'(%s)(\s*)(=(?!%s))' % (alphanumid_re, symbolicid_re),
- bygroups(Name.Variable, Text, Punctuation), '#pop'),
- (r'(%s)(\s*)(=(?!%s))' % (symbolicid_re, symbolicid_re),
- bygroups(Name.Variable, Text, Punctuation), '#pop'),
- (r'(%s)' % alphanumid_re, Name.Variable, '#pop'),
- (r'(%s)' % symbolicid_re, Name.Variable, '#pop'),
-
- # Ignore interesting patterns like 'val (x, y)'
- default('#pop'),
- ],
-
- # Dealing with what comes after the 'type' (or 'and') keyword
- 'tname': [
- include('whitespace'),
- include('breakout'),
-
- (r'\'[0-9a-zA-Z_\']*', Name.Decorator),
- (r'\(', Punctuation, 'tyvarseq'),
- (r'=(?!%s)' % symbolicid_re, Punctuation, ('#pop', 'typbind')),
-
- (r'(%s)' % alphanumid_re, Keyword.Type),
- (r'(%s)' % symbolicid_re, Keyword.Type),
- (r'\S+', Error, '#pop'),
- ],
-
- # A type binding includes most identifiers
- 'typbind': [
- include('whitespace'),
-
- (r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')),
-
- include('breakout'),
- include('core'),
- (r'\S+', Error, '#pop'),
- ],
-
- # Dealing with what comes after the 'datatype' (or 'and') keyword
- 'dname': [
- include('whitespace'),
- include('breakout'),
-
- (r'\'[0-9a-zA-Z_\']*', Name.Decorator),
- (r'\(', Punctuation, 'tyvarseq'),
- (r'(=)(\s*)(datatype)',
- bygroups(Punctuation, Text, Keyword.Reserved), '#pop'),
- (r'=(?!%s)' % symbolicid_re, Punctuation,
- ('#pop', 'datbind', 'datcon')),
-
- (r'(%s)' % alphanumid_re, Keyword.Type),
- (r'(%s)' % symbolicid_re, Keyword.Type),
- (r'\S+', Error, '#pop'),
- ],
-
- # common case - A | B | C of int
- 'datbind': [
- include('whitespace'),
-
- (r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'dname')),
- (r'\b(withtype)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')),
- (r'\b(of)\b(?!\')', Keyword.Reserved),
-
- (r'(\|)(\s*)(%s)' % alphanumid_re,
- bygroups(Punctuation, Text, Name.Class)),
- (r'(\|)(\s+)(%s)' % symbolicid_re,
- bygroups(Punctuation, Text, Name.Class)),
-
- include('breakout'),
- include('core'),
- (r'\S+', Error),
- ],
-
- # Dealing with what comes after an exception
- 'ename': [
- include('whitespace'),
-
- (r'(exception|and)\b(\s+)(%s)' % alphanumid_re,
- bygroups(Keyword.Reserved, Text, Name.Class)),
- (r'(exception|and)\b(\s*)(%s)' % symbolicid_re,
- bygroups(Keyword.Reserved, Text, Name.Class)),
- (r'\b(of)\b(?!\')', Keyword.Reserved),
-
- include('breakout'),
- include('core'),
- (r'\S+', Error),
- ],
-
- 'datcon': [
- include('whitespace'),
- (r'(%s)' % alphanumid_re, Name.Class, '#pop'),
- (r'(%s)' % symbolicid_re, Name.Class, '#pop'),
- (r'\S+', Error, '#pop'),
- ],
-
- # Series of type variables
- 'tyvarseq': [
- (r'\s', Text),
- (r'\(\*', Comment.Multiline, 'comment'),
-
- (r'\'[0-9a-zA-Z_\']*', Name.Decorator),
- (alphanumid_re, Name),
- (r',', Punctuation),
- (r'\)', Punctuation, '#pop'),
- (symbolicid_re, Name),
- ],
-
- 'comment': [
- (r'[^(*)]', Comment.Multiline),
- (r'\(\*', Comment.Multiline, '#push'),
- (r'\*\)', Comment.Multiline, '#pop'),
- (r'[(*)]', Comment.Multiline),
- ],
- }
-
-
-class OcamlLexer(RegexLexer):
- """
- For the OCaml language.
-
- .. versionadded:: 0.7
- """
-
- name = 'OCaml'
- aliases = ['ocaml']
- filenames = ['*.ml', '*.mli', '*.mll', '*.mly']
- mimetypes = ['text/x-ocaml']
-
- keywords = [
- 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
- 'downto', 'else', 'end', 'exception', 'external', 'false',
- 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
- 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
- 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
- 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
- 'type', 'value', 'val', 'virtual', 'when', 'while', 'with',
- ]
- keyopts = [
- '!=','#','&','&&','\(','\)','\*','\+',',','-',
- '-\.','->','\.','\.\.',':','::',':=',':>',';',';;','<',
- '<-','=','>','>]','>}','\?','\?\?','\[','\[<','\[>','\[\|',
- ']','_','`','{','{<','\|','\|]','}','~'
- ]
-
- operators = r'[!$%&*+\./:<=>?@^|~-]'
- word_operators = ['and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or']
- prefix_syms = r'[!?~]'
- infix_syms = r'[=<>@^|&+\*/$%-]'
- primitives = ['unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array']
-
- tokens = {
- 'escape-sequence': [
- (r'\\[\\\"\'ntbr]', String.Escape),
- (r'\\[0-9]{3}', String.Escape),
- (r'\\x[0-9a-fA-F]{2}', String.Escape),
- ],
- 'root': [
- (r'\s+', Text),
- (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
- (r'\b([A-Z][\w\']*)(?=\s*\.)',
- Name.Namespace, 'dotted'),
- (r'\b([A-Z][\w\']*)', Name.Class),
- (r'\(\*(?![)])', Comment, 'comment'),
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
- (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
- (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
- (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
- (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
-
- (r"[^\W\d][\w']*", Name),
-
- (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
- (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
- (r'0[oO][0-7][0-7_]*', Number.Oct),
- (r'0[bB][01][01_]*', Number.Bin),
- (r'\d[\d_]*', Number.Integer),
-
- (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
- String.Char),
- (r"'.'", String.Char),
- (r"'", Keyword), # a stray quote is another syntax element
-
- (r'"', String.Double, 'string'),
-
- (r'[~?][a-z][\w\']*:', Name.Variable),
- ],
- 'comment': [
- (r'[^(*)]+', Comment),
- (r'\(\*', Comment, '#push'),
- (r'\*\)', Comment, '#pop'),
- (r'[(*)]', Comment),
- ],
- 'string': [
- (r'[^\\"]+', String.Double),
- include('escape-sequence'),
- (r'\\\n', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- 'dotted': [
- (r'\s+', Text),
- (r'\.', Punctuation),
- (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
- (r'[A-Z][\w\']*', Name.Class, '#pop'),
- (r'[a-z_][\w\']*', Name, '#pop'),
- ],
- }
-
-
-class ErlangLexer(RegexLexer):
- """
- For the Erlang functional programming language.
-
- Blame Jeremy Thurgood (http://jerith.za.net/).
-
- .. versionadded:: 0.9
- """
-
- name = 'Erlang'
- aliases = ['erlang']
- filenames = ['*.erl', '*.hrl', '*.es', '*.escript']
- mimetypes = ['text/x-erlang']
-
- keywords = [
- 'after', 'begin', 'case', 'catch', 'cond', 'end', 'fun', 'if',
- 'let', 'of', 'query', 'receive', 'try', 'when',
- ]
-
- builtins = [ # See erlang(3) man page
- 'abs', 'append_element', 'apply', 'atom_to_list', 'binary_to_list',
- 'bitstring_to_list', 'binary_to_term', 'bit_size', 'bump_reductions',
- 'byte_size', 'cancel_timer', 'check_process_code', 'delete_module',
- 'demonitor', 'disconnect_node', 'display', 'element', 'erase', 'exit',
- 'float', 'float_to_list', 'fun_info', 'fun_to_list',
- 'function_exported', 'garbage_collect', 'get', 'get_keys',
- 'group_leader', 'hash', 'hd', 'integer_to_list', 'iolist_to_binary',
- 'iolist_size', 'is_atom', 'is_binary', 'is_bitstring', 'is_boolean',
- 'is_builtin', 'is_float', 'is_function', 'is_integer', 'is_list',
- 'is_number', 'is_pid', 'is_port', 'is_process_alive', 'is_record',
- 'is_reference', 'is_tuple', 'length', 'link', 'list_to_atom',
- 'list_to_binary', 'list_to_bitstring', 'list_to_existing_atom',
- 'list_to_float', 'list_to_integer', 'list_to_pid', 'list_to_tuple',
- 'load_module', 'localtime_to_universaltime', 'make_tuple', 'md5',
- 'md5_final', 'md5_update', 'memory', 'module_loaded', 'monitor',
- 'monitor_node', 'node', 'nodes', 'open_port', 'phash', 'phash2',
- 'pid_to_list', 'port_close', 'port_command', 'port_connect',
- 'port_control', 'port_call', 'port_info', 'port_to_list',
- 'process_display', 'process_flag', 'process_info', 'purge_module',
- 'put', 'read_timer', 'ref_to_list', 'register', 'resume_process',
- 'round', 'send', 'send_after', 'send_nosuspend', 'set_cookie',
- 'setelement', 'size', 'spawn', 'spawn_link', 'spawn_monitor',
- 'spawn_opt', 'split_binary', 'start_timer', 'statistics',
- 'suspend_process', 'system_flag', 'system_info', 'system_monitor',
- 'system_profile', 'term_to_binary', 'tl', 'trace', 'trace_delivered',
- 'trace_info', 'trace_pattern', 'trunc', 'tuple_size', 'tuple_to_list',
- 'universaltime_to_localtime', 'unlink', 'unregister', 'whereis'
- ]
-
- operators = r'(\+\+?|--?|\*|/|<|>|/=|=:=|=/=|=<|>=|==?|<-|!|\?)'
- word_operators = [
- 'and', 'andalso', 'band', 'bnot', 'bor', 'bsl', 'bsr', 'bxor',
- 'div', 'not', 'or', 'orelse', 'rem', 'xor'
- ]
-
- atom_re = r"(?:[a-z]\w*|'[^\n']*[^\\]')"
-
- variable_re = r'(?:[A-Z_]\w*)'
-
- escape_re = r'(?:\\(?:[bdefnrstv\'"\\/]|[0-7][0-7]?[0-7]?|\^[a-zA-Z]))'
-
- macro_re = r'(?:'+variable_re+r'|'+atom_re+r')'
-
- base_re = r'(?:[2-9]|[12][0-9]|3[0-6])'
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'%.*\n', Comment),
- ('(' + '|'.join(keywords) + r')\b', Keyword),
- ('(' + '|'.join(builtins) + r')\b', Name.Builtin),
- ('(' + '|'.join(word_operators) + r')\b', Operator.Word),
- (r'^-', Punctuation, 'directive'),
- (operators, Operator),
- (r'"', String, 'string'),
- (r'<<', Name.Label),
- (r'>>', Name.Label),
- ('(' + atom_re + ')(:)', bygroups(Name.Namespace, Punctuation)),
- ('(?:^|(?<=:))(' + atom_re + r')(\s*)(\()',
- bygroups(Name.Function, Text, Punctuation)),
- (r'[+-]?'+base_re+r'#[0-9a-zA-Z]+', Number.Integer),
- (r'[+-]?\d+', Number.Integer),
- (r'[+-]?\d+.\d+', Number.Float),
- (r'[]\[:_@\".{}()|;,]', Punctuation),
- (variable_re, Name.Variable),
- (atom_re, Name),
- (r'\?'+macro_re, Name.Constant),
- (r'\$(?:'+escape_re+r'|\\[ %]|[^\\])', String.Char),
- (r'#'+atom_re+r'(:?\.'+atom_re+r')?', Name.Label),
- ],
- 'string': [
- (escape_re, String.Escape),
- (r'"', String, '#pop'),
- (r'~[0-9.*]*[~#+bBcdefginpPswWxX]', String.Interpol),
- (r'[^"\\~]+', String),
- (r'~', String),
- ],
- 'directive': [
- (r'(define)(\s*)(\()('+macro_re+r')',
- bygroups(Name.Entity, Text, Punctuation, Name.Constant), '#pop'),
- (r'(record)(\s*)(\()('+macro_re+r')',
- bygroups(Name.Entity, Text, Punctuation, Name.Label), '#pop'),
- (atom_re, Name.Entity, '#pop'),
- ],
- }
-
-
-class ErlangShellLexer(Lexer):
- """
- Shell sessions in erl (for Erlang code).
-
- .. versionadded:: 1.1
- """
- name = 'Erlang erl session'
- aliases = ['erl']
- filenames = ['*.erl-sh']
- mimetypes = ['text/x-erl-shellsession']
-
- _prompt_re = re.compile(r'\d+>(?=\s|\Z)')
-
- def get_tokens_unprocessed(self, text):
- erlexer = ErlangLexer(**self.options)
-
- curcode = ''
- insertions = []
- for match in line_re.finditer(text):
- line = match.group()
- m = self._prompt_re.match(line)
- if m is not None:
- end = m.end()
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:end])]))
- curcode += line[end:]
- else:
- if curcode:
- for item in do_insertions(insertions,
- erlexer.get_tokens_unprocessed(curcode)):
- yield item
- curcode = ''
- insertions = []
- if line.startswith('*'):
- yield match.start(), Generic.Traceback, line
- else:
- yield match.start(), Generic.Output, line
- if curcode:
- for item in do_insertions(insertions,
- erlexer.get_tokens_unprocessed(curcode)):
- yield item
-
-
-class OpaLexer(RegexLexer):
- """
- Lexer for the Opa language (http://opalang.org).
-
- .. versionadded:: 1.5
- """
-
- name = 'Opa'
- aliases = ['opa']
- filenames = ['*.opa']
- mimetypes = ['text/x-opa']
-
- # most of these aren't strictly keywords
- # but if you color only real keywords, you might just
- # as well not color anything
- keywords = [
- 'and', 'as', 'begin', 'case', 'client', 'css', 'database', 'db', 'do',
- 'else', 'end', 'external', 'forall', 'function', 'if', 'import',
- 'match', 'module', 'or', 'package', 'parser', 'rec', 'server', 'then',
- 'type', 'val', 'with', 'xml_parser',
- ]
-
- # matches both stuff and `stuff`
- ident_re = r'(([a-zA-Z_]\w*)|(`[^`]*`))'
-
- op_re = r'[.=\-<>,@~%/+?*&^!]'
- punc_re = r'[()\[\],;|]' # '{' and '}' are treated elsewhere
- # because they are also used for inserts
-
- tokens = {
- # copied from the caml lexer, should be adapted
- 'escape-sequence': [
- (r'\\[\\\"\'ntr}]', String.Escape),
- (r'\\[0-9]{3}', String.Escape),
- (r'\\x[0-9a-fA-F]{2}', String.Escape),
- ],
-
- # factorizing these rules, because they are inserted many times
- 'comments': [
- (r'/\*', Comment, 'nested-comment'),
- (r'//.*?$', Comment),
- ],
- 'comments-and-spaces': [
- include('comments'),
- (r'\s+', Text),
- ],
-
- 'root': [
- include('comments-and-spaces'),
- # keywords
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
- # directives
- # we could parse the actual set of directives instead of anything
- # starting with @, but this is troublesome
- # because it needs to be adjusted all the time
- # and assuming we parse only sources that compile, it is useless
- (r'@'+ident_re+r'\b', Name.Builtin.Pseudo),
-
- # number literals
- (r'-?.[\d]+([eE][+\-]?\d+)', Number.Float),
- (r'-?\d+.\d*([eE][+\-]?\d+)', Number.Float),
- (r'-?\d+[eE][+\-]?\d+', Number.Float),
- (r'0[xX][\da-fA-F]+', Number.Hex),
- (r'0[oO][0-7]+', Number.Oct),
- (r'0[bB][01]+', Number.Bin),
- (r'\d+', Number.Integer),
- # color literals
- (r'#[\da-fA-F]{3,6}', Number.Integer),
-
- # string literals
- (r'"', String.Double, 'string'),
- # char literal, should be checked because this is the regexp from
- # the caml lexer
- (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2})|.)'",
- String.Char),
-
- # this is meant to deal with embedded exprs in strings
- # every time we find a '}' we pop a state so that if we were
- # inside a string, we are back in the string state
- # as a consequence, we must also push a state every time we find a
- # '{' or else we will have errors when parsing {} for instance
- (r'{', Operator, '#push'),
- (r'}', Operator, '#pop'),
-
- # html literals
- # this is a much more strict that the actual parser,
- # since a<b would not be parsed as html
- # but then again, the parser is way too lax, and we can't hope
- # to have something as tolerant
- (r'<(?=[a-zA-Z>])', String.Single, 'html-open-tag'),
-
- # db path
- # matching the '[_]' in '/a[_]' because it is a part
- # of the syntax of the db path definition
- # unfortunately, i don't know how to match the ']' in
- # /a[1], so this is somewhat inconsistent
- (r'[@?!]?(/\w+)+(\[_\])?', Name.Variable),
- # putting the same color on <- as on db path, since
- # it can be used only to mean Db.write
- (r'<-(?!'+op_re+r')', Name.Variable),
-
- # 'modules'
- # although modules are not distinguished by their names as in caml
- # the standard library seems to follow the convention that modules
- # only area capitalized
- (r'\b([A-Z]\w*)(?=\.)', Name.Namespace),
-
- # operators
- # = has a special role because this is the only
- # way to syntactic distinguish binding constructions
- # unfortunately, this colors the equal in {x=2} too
- (r'=(?!'+op_re+r')', Keyword),
- (r'(%s)+' % op_re, Operator),
- (r'(%s)+' % punc_re, Operator),
-
- # coercions
- (r':', Operator, 'type'),
- # type variables
- # we need this rule because we don't parse specially type
- # definitions so in "type t('a) = ...", "'a" is parsed by 'root'
- ("'"+ident_re, Keyword.Type),
-
- # id literal, #something, or #{expr}
- (r'#'+ident_re, String.Single),
- (r'#(?={)', String.Single),
-
- # identifiers
- # this avoids to color '2' in 'a2' as an integer
- (ident_re, Text),
-
- # default, not sure if that is needed or not
- # (r'.', Text),
- ],
-
- # it is quite painful to have to parse types to know where they end
- # this is the general rule for a type
- # a type is either:
- # * -> ty
- # * type-with-slash
- # * type-with-slash -> ty
- # * type-with-slash (, type-with-slash)+ -> ty
- #
- # the code is pretty funky in here, but this code would roughly
- # translate in caml to:
- # let rec type stream =
- # match stream with
- # | [< "->"; stream >] -> type stream
- # | [< ""; stream >] ->
- # type_with_slash stream
- # type_lhs_1 stream;
- # and type_1 stream = ...
- 'type': [
- include('comments-and-spaces'),
- (r'->', Keyword.Type),
- default(('#pop', 'type-lhs-1', 'type-with-slash')),
- ],
-
- # parses all the atomic or closed constructions in the syntax of type
- # expressions: record types, tuple types, type constructors, basic type
- # and type variables
- 'type-1': [
- include('comments-and-spaces'),
- (r'\(', Keyword.Type, ('#pop', 'type-tuple')),
- (r'~?{', Keyword.Type, ('#pop', 'type-record')),
- (ident_re+r'\(', Keyword.Type, ('#pop', 'type-tuple')),
- (ident_re, Keyword.Type, '#pop'),
- ("'"+ident_re, Keyword.Type),
- # this case is not in the syntax but sometimes
- # we think we are parsing types when in fact we are parsing
- # some css, so we just pop the states until we get back into
- # the root state
- default('#pop'),
- ],
-
- # type-with-slash is either:
- # * type-1
- # * type-1 (/ type-1)+
- 'type-with-slash': [
- include('comments-and-spaces'),
- default(('#pop', 'slash-type-1', 'type-1')),
- ],
- 'slash-type-1': [
- include('comments-and-spaces'),
- ('/', Keyword.Type, ('#pop', 'type-1')),
- # same remark as above
- default('#pop'),
- ],
-
- # we go in this state after having parsed a type-with-slash
- # while trying to parse a type
- # and at this point we must determine if we are parsing an arrow
- # type (in which case we must continue parsing) or not (in which
- # case we stop)
- 'type-lhs-1': [
- include('comments-and-spaces'),
- (r'->', Keyword.Type, ('#pop', 'type')),
- (r'(?=,)', Keyword.Type, ('#pop', 'type-arrow')),
- default('#pop'),
- ],
- 'type-arrow': [
- include('comments-and-spaces'),
- # the look ahead here allows to parse f(x : int, y : float -> truc)
- # correctly
- (r',(?=[^:]*?->)', Keyword.Type, 'type-with-slash'),
- (r'->', Keyword.Type, ('#pop', 'type')),
- # same remark as above
- default('#pop'),
- ],
-
- # no need to do precise parsing for tuples and records
- # because they are closed constructions, so we can simply
- # find the closing delimiter
- # note that this function would be not work if the source
- # contained identifiers like `{)` (although it could be patched
- # to support it)
- 'type-tuple': [
- include('comments-and-spaces'),
- (r'[^\(\)/*]+', Keyword.Type),
- (r'[/*]', Keyword.Type),
- (r'\(', Keyword.Type, '#push'),
- (r'\)', Keyword.Type, '#pop'),
- ],
- 'type-record': [
- include('comments-and-spaces'),
- (r'[^{}/*]+', Keyword.Type),
- (r'[/*]', Keyword.Type),
- (r'{', Keyword.Type, '#push'),
- (r'}', Keyword.Type, '#pop'),
- ],
-
-# 'type-tuple': [
-# include('comments-and-spaces'),
-# (r'\)', Keyword.Type, '#pop'),
-# default(('#pop', 'type-tuple-1', 'type-1')),
-# ],
-# 'type-tuple-1': [
-# include('comments-and-spaces'),
-# (r',?\s*\)', Keyword.Type, '#pop'), # ,) is a valid end of tuple, in (1,)
-# (r',', Keyword.Type, 'type-1'),
-# ],
-# 'type-record':[
-# include('comments-and-spaces'),
-# (r'}', Keyword.Type, '#pop'),
-# (r'~?(?:\w+|`[^`]*`)', Keyword.Type, 'type-record-field-expr'),
-# ],
-# 'type-record-field-expr': [
-#
-# ],
-
- 'nested-comment': [
- (r'[^/*]+', Comment),
- (r'/\*', Comment, '#push'),
- (r'\*/', Comment, '#pop'),
- (r'[/*]', Comment),
- ],
-
- # the copy pasting between string and single-string
- # is kinda sad. Is there a way to avoid that??
- 'string': [
- (r'[^\\"{]+', String.Double),
- (r'"', String.Double, '#pop'),
- (r'{', Operator, 'root'),
- include('escape-sequence'),
- ],
- 'single-string': [
- (r'[^\\\'{]+', String.Double),
- (r'\'', String.Double, '#pop'),
- (r'{', Operator, 'root'),
- include('escape-sequence'),
- ],
-
- # all the html stuff
- # can't really reuse some existing html parser
- # because we must be able to parse embedded expressions
-
- # we are in this state after someone parsed the '<' that
- # started the html literal
- 'html-open-tag': [
- (r'[\w\-:]+', String.Single, ('#pop', 'html-attr')),
- (r'>', String.Single, ('#pop', 'html-content')),
- ],
-
- # we are in this state after someone parsed the '</' that
- # started the end of the closing tag
- 'html-end-tag': [
- # this is a star, because </> is allowed
- (r'[\w\-:]*>', String.Single, '#pop'),
- ],
-
- # we are in this state after having parsed '<ident(:ident)?'
- # we thus parse a possibly empty list of attributes
- 'html-attr': [
- (r'\s+', Text),
- (r'[\w\-:]+=', String.Single, 'html-attr-value'),
- (r'/>', String.Single, '#pop'),
- (r'>', String.Single, ('#pop', 'html-content')),
- ],
-
- 'html-attr-value': [
- (r"'", String.Single, ('#pop', 'single-string')),
- (r'"', String.Single, ('#pop', 'string')),
- (r'#'+ident_re, String.Single, '#pop'),
- (r'#(?={)', String.Single, ('#pop', 'root')),
- (r'[^"\'{`=<>]+', String.Single, '#pop'),
- (r'{', Operator, ('#pop', 'root')), # this is a tail call!
- ],
-
- # we should probably deal with '\' escapes here
- 'html-content': [
- (r'<!--', Comment, 'html-comment'),
- (r'</', String.Single, ('#pop', 'html-end-tag')),
- (r'<', String.Single, 'html-open-tag'),
- (r'{', Operator, 'root'),
- (r'[^<{]+', String.Single),
- ],
-
- 'html-comment': [
- (r'-->', Comment, '#pop'),
- (r'[^\-]+|-', Comment),
- ],
- }
-
-
-class CoqLexer(RegexLexer):
- """
- For the `Coq <http://coq.inria.fr/>`_ theorem prover.
-
- .. versionadded:: 1.5
- """
-
- name = 'Coq'
- aliases = ['coq']
- filenames = ['*.v']
- mimetypes = ['text/x-coq']
-
- keywords1 = [
- # Vernacular commands
- 'Section', 'Module', 'End', 'Require', 'Import', 'Export', 'Variable',
- 'Variables', 'Parameter', 'Parameters', 'Axiom', 'Hypothesis',
- 'Hypotheses', 'Notation', 'Local', 'Tactic', 'Reserved', 'Scope',
- 'Open', 'Close', 'Bind', 'Delimit', 'Definition', 'Let', 'Ltac',
- 'Fixpoint', 'CoFixpoint', 'Morphism', 'Relation', 'Implicit',
- 'Arguments', 'Set', 'Unset', 'Contextual', 'Strict', 'Prenex',
- 'Implicits', 'Inductive', 'CoInductive', 'Record', 'Structure',
- 'Canonical', 'Coercion', 'Theorem', 'Lemma', 'Corollary',
- 'Proposition', 'Fact', 'Remark', 'Example', 'Proof', 'Goal', 'Save',
- 'Qed', 'Defined', 'Hint', 'Resolve', 'Rewrite', 'View', 'Search',
- 'Show', 'Print', 'Printing', 'All', 'Graph', 'Projections', 'inside',
- 'outside',
- ]
- keywords2 = [
- # Gallina
- 'forall', 'exists', 'exists2', 'fun', 'fix', 'cofix', 'struct',
- 'match', 'end', 'in', 'return', 'let', 'if', 'is', 'then', 'else',
- 'for', 'of', 'nosimpl', 'with', 'as',
- ]
- keywords3 = [
- # Sorts
- 'Type', 'Prop',
- ]
- keywords4 = [
- # Tactics
- 'pose', 'set', 'move', 'case', 'elim', 'apply', 'clear', 'hnf', 'intro',
- 'intros', 'generalize', 'rename', 'pattern', 'after', 'destruct',
- 'induction', 'using', 'refine', 'inversion', 'injection', 'rewrite',
- 'congr', 'unlock', 'compute', 'ring', 'field', 'replace', 'fold',
- 'unfold', 'change', 'cutrewrite', 'simpl', 'have', 'suff', 'wlog',
- 'suffices', 'without', 'loss', 'nat_norm', 'assert', 'cut', 'trivial',
- 'revert', 'bool_congr', 'nat_congr', 'symmetry', 'transitivity', 'auto',
- 'split', 'left', 'right', 'autorewrite',
- ]
- keywords5 = [
- # Terminators
- 'by', 'done', 'exact', 'reflexivity', 'tauto', 'romega', 'omega',
- 'assumption', 'solve', 'contradiction', 'discriminate',
- ]
- keywords6 = [
- # Control
- 'do', 'last', 'first', 'try', 'idtac', 'repeat',
- ]
- # 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
- # 'downto', 'else', 'end', 'exception', 'external', 'false',
- # 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
- # 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
- # 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
- # 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
- # 'type', 'val', 'virtual', 'when', 'while', 'with'
- keyopts = [
- '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-',
- r'-\.', '->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<',
- '<-', '=', '>', '>]', '>}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
- r'\[\|', ']', '_', '`', '{', '{<', r'\|', r'\|]', '}', '~', '=>',
- r'/\\', r'\\/',
- u'Π', u'λ',
- ]
- operators = r'[!$%&*+\./:<=>?@^|~-]'
- word_operators = ['and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or']
- prefix_syms = r'[!?~]'
- infix_syms = r'[=<>@^|&+\*/$%-]'
- primitives = ['unit', 'int', 'float', 'bool', 'string', 'char', 'list',
- 'array']
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
- (r'\(\*', Comment, 'comment'),
- (r'\b(%s)\b' % '|'.join(keywords1), Keyword.Namespace),
- (r'\b(%s)\b' % '|'.join(keywords2), Keyword),
- (r'\b(%s)\b' % '|'.join(keywords3), Keyword.Type),
- (r'\b(%s)\b' % '|'.join(keywords4), Keyword),
- (r'\b(%s)\b' % '|'.join(keywords5), Keyword.Pseudo),
- (r'\b(%s)\b' % '|'.join(keywords6), Keyword.Reserved),
- (r'\b([A-Z][\w\']*)(?=\s*\.)',
- Name.Namespace, 'dotted'),
- (r'\b([A-Z][\w\']*)', Name.Class),
- (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
- (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
- (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
- (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
-
- (r"[^\W\d][\w']*", Name),
-
- (r'\d[\d_]*', Number.Integer),
- (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
- (r'0[oO][0-7][0-7_]*', Number.Oct),
- (r'0[bB][01][01_]*', Number.Bin),
- (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
-
- (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
- String.Char),
- (r"'.'", String.Char),
- (r"'", Keyword), # a stray quote is another syntax element
-
- (r'"', String.Double, 'string'),
-
- (r'[~?][a-z][\w\']*:', Name.Variable),
- ],
- 'comment': [
- (r'[^(*)]+', Comment),
- (r'\(\*', Comment, '#push'),
- (r'\*\)', Comment, '#pop'),
- (r'[(*)]', Comment),
- ],
- 'string': [
- (r'[^"]+', String.Double),
- (r'""', String.Double),
- (r'"', String.Double, '#pop'),
- ],
- 'dotted': [
- (r'\s+', Text),
- (r'\.', Punctuation),
- (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
- (r'[A-Z][\w\']*', Name.Class, '#pop'),
- (r'[a-z][a-z0-9_\']*', Name, '#pop'),
- default('#pop')
- ],
- }
-
- def analyse_text(text):
- if text.startswith('(*'):
- return True
-
-
-class NewLispLexer(RegexLexer):
- """
- For `newLISP. <www.newlisp.org>`_ source code (version 10.3.0).
-
- .. versionadded:: 1.5
- """
-
- name = 'NewLisp'
- aliases = ['newlisp']
- filenames = ['*.lsp', '*.nl']
- mimetypes = ['text/x-newlisp', 'application/x-newlisp']
-
- flags = re.IGNORECASE | re.MULTILINE | re.UNICODE
-
- # list of built-in functions for newLISP version 10.3
- builtins = [
- '^', '--', '-', ':', '!', '!=', '?', '@', '*', '/', '&', '%', '+', '++',
- '<', '<<', '<=', '=', '>', '>=', '>>', '|', '~', '$', '$0', '$1', '$10',
- '$11', '$12', '$13', '$14', '$15', '$2', '$3', '$4', '$5', '$6', '$7',
- '$8', '$9', '$args', '$idx', '$it', '$main-args', 'abort', 'abs',
- 'acos', 'acosh', 'add', 'address', 'amb', 'and', 'and', 'append-file',
- 'append', 'apply', 'args', 'array-list', 'array?', 'array', 'asin',
- 'asinh', 'assoc', 'atan', 'atan2', 'atanh', 'atom?', 'base64-dec',
- 'base64-enc', 'bayes-query', 'bayes-train', 'begin', 'begin', 'begin',
- 'beta', 'betai', 'bind', 'binomial', 'bits', 'callback', 'case', 'case',
- 'case', 'catch', 'ceil', 'change-dir', 'char', 'chop', 'Class', 'clean',
- 'close', 'command-event', 'cond', 'cond', 'cond', 'cons', 'constant',
- 'context?', 'context', 'copy-file', 'copy', 'cos', 'cosh', 'count',
- 'cpymem', 'crc32', 'crit-chi2', 'crit-z', 'current-line', 'curry',
- 'date-list', 'date-parse', 'date-value', 'date', 'debug', 'dec',
- 'def-new', 'default', 'define-macro', 'define-macro', 'define',
- 'delete-file', 'delete-url', 'delete', 'destroy', 'det', 'device',
- 'difference', 'directory?', 'directory', 'div', 'do-until', 'do-while',
- 'doargs', 'dolist', 'dostring', 'dotimes', 'dotree', 'dump', 'dup',
- 'empty?', 'encrypt', 'ends-with', 'env', 'erf', 'error-event',
- 'eval-string', 'eval', 'exec', 'exists', 'exit', 'exp', 'expand',
- 'explode', 'extend', 'factor', 'fft', 'file-info', 'file?', 'filter',
- 'find-all', 'find', 'first', 'flat', 'float?', 'float', 'floor', 'flt',
- 'fn', 'for-all', 'for', 'fork', 'format', 'fv', 'gammai', 'gammaln',
- 'gcd', 'get-char', 'get-float', 'get-int', 'get-long', 'get-string',
- 'get-url', 'global?', 'global', 'if-not', 'if', 'ifft', 'import', 'inc',
- 'index', 'inf?', 'int', 'integer?', 'integer', 'intersect', 'invert',
- 'irr', 'join', 'lambda-macro', 'lambda?', 'lambda', 'last-error',
- 'last', 'legal?', 'length', 'let', 'let', 'let', 'letex', 'letn',
- 'letn', 'letn', 'list?', 'list', 'load', 'local', 'log', 'lookup',
- 'lower-case', 'macro?', 'main-args', 'MAIN', 'make-dir', 'map', 'mat',
- 'match', 'max', 'member', 'min', 'mod', 'module', 'mul', 'multiply',
- 'NaN?', 'net-accept', 'net-close', 'net-connect', 'net-error',
- 'net-eval', 'net-interface', 'net-ipv', 'net-listen', 'net-local',
- 'net-lookup', 'net-packet', 'net-peek', 'net-peer', 'net-ping',
- 'net-receive-from', 'net-receive-udp', 'net-receive', 'net-select',
- 'net-send-to', 'net-send-udp', 'net-send', 'net-service',
- 'net-sessions', 'new', 'nil?', 'nil', 'normal', 'not', 'now', 'nper',
- 'npv', 'nth', 'null?', 'number?', 'open', 'or', 'ostype', 'pack',
- 'parse-date', 'parse', 'peek', 'pipe', 'pmt', 'pop-assoc', 'pop',
- 'post-url', 'pow', 'prefix', 'pretty-print', 'primitive?', 'print',
- 'println', 'prob-chi2', 'prob-z', 'process', 'prompt-event',
- 'protected?', 'push', 'put-url', 'pv', 'quote?', 'quote', 'rand',
- 'random', 'randomize', 'read', 'read-char', 'read-expr', 'read-file',
- 'read-key', 'read-line', 'read-utf8', 'read', 'reader-event',
- 'real-path', 'receive', 'ref-all', 'ref', 'regex-comp', 'regex',
- 'remove-dir', 'rename-file', 'replace', 'reset', 'rest', 'reverse',
- 'rotate', 'round', 'save', 'search', 'seed', 'seek', 'select', 'self',
- 'semaphore', 'send', 'sequence', 'series', 'set-locale', 'set-ref-all',
- 'set-ref', 'set', 'setf', 'setq', 'sgn', 'share', 'signal', 'silent',
- 'sin', 'sinh', 'sleep', 'slice', 'sort', 'source', 'spawn', 'sqrt',
- 'starts-with', 'string?', 'string', 'sub', 'swap', 'sym', 'symbol?',
- 'symbols', 'sync', 'sys-error', 'sys-info', 'tan', 'tanh', 'term',
- 'throw-error', 'throw', 'time-of-day', 'time', 'timer', 'title-case',
- 'trace-highlight', 'trace', 'transpose', 'Tree', 'trim', 'true?',
- 'true', 'unicode', 'unify', 'unique', 'unless', 'unpack', 'until',
- 'upper-case', 'utf8', 'utf8len', 'uuid', 'wait-pid', 'when', 'while',
- 'write', 'write-char', 'write-file', 'write-line', 'write',
- 'xfer-event', 'xml-error', 'xml-parse', 'xml-type-tags', 'zero?',
- ]
-
- # valid names
- valid_name = r'([\w!$%&*+.,/<=>?@^~|-])+|(\[.*?\])+'
-
- tokens = {
- 'root': [
- # shebang
- (r'#!(.*?)$', Comment.Preproc),
- # comments starting with semicolon
- (r';.*$', Comment.Single),
- # comments starting with #
- (r'#.*$', Comment.Single),
-
- # whitespace
- (r'\s+', Text),
-
- # strings, symbols and characters
- (r'"(\\\\|\\"|[^"])*"', String),
-
- # braces
- (r"{", String, "bracestring"),
-
- # [text] ... [/text] delimited strings
- (r'\[text\]*', String, "tagstring"),
-
- # 'special' operators...
- (r"('|:)", Operator),
-
- # highlight the builtins
- ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in builtins),
- Keyword),
-
- # the remaining functions
- (r'(?<=\()' + valid_name, Name.Variable),
-
- # the remaining variables
- (valid_name, String.Symbol),
-
- # parentheses
- (r'(\(|\))', Punctuation),
- ],
-
- # braced strings...
- 'bracestring': [
- ("{", String, "#push"),
- ("}", String, "#pop"),
- ("[^{}]+", String),
- ],
-
- # tagged [text]...[/text] delimited strings...
- 'tagstring': [
- (r'(?s)(.*?)(\[/text\])', String, '#pop'),
- ],
- }
-
-
-class NixLexer(RegexLexer):
- """
- For the `Nix language <http://nixos.org/nix/>`_.
-
- .. versionadded:: 2.0
- """
-
- name = 'Nix'
- aliases = ['nixos', 'nix']
- filenames = ['*.nix']
- mimetypes = ['text/x-nix']
-
- flags = re.MULTILINE | re.UNICODE
-
- keywords = ['rec', 'with', 'let', 'in', 'inherit', 'assert', 'if',
- 'else', 'then', '...']
- builtins = ['import', 'abort', 'baseNameOf', 'dirOf', 'isNull', 'builtins',
- 'map', 'removeAttrs', 'throw', 'toString', 'derivation']
- operators = ['++', '+', '?', '.', '!', '//', '==',
- '!=', '&&', '||', '->', '=']
-
- punctuations = ["(", ")", "[", "]", ";", "{", "}", ":", ",", "@"]
-
- tokens = {
- 'root': [
- # comments starting with #
- (r'#.*$', Comment.Single),
-
- # multiline comments
- (r'/\*', Comment.Multiline, 'comment'),
-
- # whitespace
- (r'\s+', Text),
-
- # keywords
- ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in keywords), Keyword),
-
- # highlight the builtins
- ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in builtins),
- Name.Builtin),
-
- (r'\b(true|false|null)\b', Name.Constant),
-
- # operators
- ('(%s)' % '|'.join(re.escape(entry) for entry in operators),
- Operator),
-
- # word operators
- (r'\b(or|and)\b', Operator.Word),
-
- # punctuations
- ('(%s)' % '|'.join(re.escape(entry) for entry in punctuations), Punctuation),
-
- # integers
- (r'[0-9]+', Number.Integer),
-
- # strings
- (r'"', String.Double, 'doublequote'),
- (r"''", String.Single, 'singlequote'),
-
- # paths
- (r'[\w.+-]*(\/[\w.+-]+)+', Literal),
- (r'\<[\w.+-]+(\/[\w.+-]+)*\>', Literal),
-
- # urls
- (r'[a-zA-Z][a-zA-Z0-9\+\-\.]*\:[\w%/?:@&=+$,\\.!~*\'-]+', Literal),
-
- # names of variables
- (r'[\w-]+\s*=', String.Symbol),
- (r'[a-zA-Z_][\w\'-]*', Text),
-
- ],
- 'comment': [
- (r'[^/\*]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[\*/]', Comment.Multiline),
- ],
- 'singlequote': [
- (r"'''", String.Escape),
- (r"''\$\{", String.Escape),
- (r"''\n", String.Escape),
- (r"''\r", String.Escape),
- (r"''\t", String.Escape),
- (r"''", String.Single, '#pop'),
- (r'\$\{', String.Interpol, 'antiquote'),
- (r"[^']", String.Single),
- ],
- 'doublequote': [
- (r'\\', String.Escape),
- (r'\\"', String.Escape),
- (r'\\${', String.Escape),
- (r'"', String.Double, '#pop'),
- (r'\$\{', String.Interpol, 'antiquote'),
- (r'[^"]', String.Double),
- ],
- 'antiquote': [
- (r"}", String.Interpol, '#pop'),
- # TODO: we should probably escape also here ''${ \${
- (r"\$\{", String.Interpol, '#push'),
- include('root'),
- ],
- }
-
- def analyse_text(text):
- rv = 0.0
- # TODO: let/in
- if re.search(r'import.+?<[^>]+>', text):
- rv += 0.4
- if re.search(r'mkDerivation\s+(\(|\{|rec)', text):
- rv += 0.4
- if re.search(r'with\s+[a-zA-Z\.]+;', text):
- rv += 0.2
- if re.search(r'inherit\s+[a-zA-Z()\.];', text):
- rv += 0.2
- if re.search(r'=\s+mkIf\s+', text):
- rv += 0.4
- if re.search(r'\{[a-zA-Z,\s]+\}:', text):
- rv += 0.1
- return rv
-
-
-def gen_elixir_string_rules(name, symbol, token):
- states = {}
- states['string_' + name] = [
- (r'[^#%s\\]+' % (symbol,), token),
- include('escapes'),
- (r'\\.', token),
- (r'(%s)(:?)' % (symbol,), bygroups(token, Punctuation), "#pop"),
- include('interpol')
- ]
- return states
-
-def gen_elixir_sigstr_rules(term, token, interpol=True):
- if interpol:
- return [
- (r'[^#%s\\]+' % (term,), token),
- include('escapes'),
- (r'\\.', token),
- (r'%s[a-zA-Z]*' % (term,), token, '#pop'),
- include('interpol')
- ]
- else:
- return [
- (r'[^%s\\]+' % (term,), token),
- (r'\\.', token),
- (r'%s[a-zA-Z]*' % (term,), token, '#pop'),
- ]
-
-class ElixirLexer(RegexLexer):
- """
- For the `Elixir language <http://elixir-lang.org>`_.
-
- .. versionadded:: 1.5
- """
-
- name = 'Elixir'
- aliases = ['elixir', 'ex', 'exs']
- filenames = ['*.ex', '*.exs']
- mimetypes = ['text/x-elixir']
-
- KEYWORD = ['fn', 'do', 'end', 'after', 'else', 'rescue', 'catch']
- KEYWORD_OPERATOR = ['not', 'and', 'or', 'xor', 'when', 'in']
- BUILTIN = [
- 'case', 'cond', 'for', 'if', 'unless', 'try', 'receive', 'raise',
- 'quote', 'unquote', 'unquote_splicing', 'throw', 'super'
- ]
- BUILTIN_DECLARATION = [
- 'def', 'defp', 'defmodule', 'defprotocol', 'defmacro', 'defmacrop',
- 'defdelegate', 'defexception', 'defstruct', 'defimpl', 'defcallback'
- ]
-
- BUILTIN_NAMESPACE = ['import', 'require', 'use', 'alias']
- CONSTANT = ['nil', 'true', 'false']
-
- PSEUDO_VAR = ['_', '__MODULE__', '__DIR__', '__ENV__', '__CALLER__']
-
- OPERATORS3 = ['<<<', '>>>', '|||', '&&&', '^^^', '~~~', '===', '!==']
- OPERATORS2 = [
- '==', '!=', '<=', '>=', '&&', '||', '<>', '++', '--', '|>', '=~'
- ]
- OPERATORS1 = ['<', '>', '+', '-', '*', '/', '!', '^', '&']
-
- PUNCTUATION = [
- '\\\\', '<<', '>>', '::', '->', '<-', '=>', '|', '(', ')',
- '{', '}', ';', ',', '.', '[', ']', '%', '='
- ]
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- if value in self.KEYWORD:
- yield index, Keyword, value
- elif value in self.KEYWORD_OPERATOR:
- yield index, Operator.Word, value
- elif value in self.BUILTIN:
- yield index, Keyword, value
- elif value in self.BUILTIN_DECLARATION:
- yield index, Keyword.Declaration, value
- elif value in self.BUILTIN_NAMESPACE:
- yield index, Keyword.Namespace, value
- elif value in self.CONSTANT:
- yield index, Name.Constant, value
- elif value in self.PSEUDO_VAR:
- yield index, Name.Builtin.Pseudo, value
- else:
- yield index, token, value
- else:
- yield index, token, value
-
- def gen_elixir_sigil_rules():
- # these braces are balanced inside the sigil string
- braces = [
- (r'\{', r'\}', 'cb'),
- (r'\[', r'\]', 'sb'),
- (r'\(', r'\)', 'pa'),
- (r'\<', r'\>', 'ab'),
- ]
-
- # these are also valid sigil terminators, they are not balanced
- terms = [
- (r'/', 'slas'), (r'\|', 'pipe'), ('"', 'quot'), ("'", 'apos'),
- ]
-
- # heredocs have slightly different rules, they are not balanced
- triquotes = [(r'"""', 'triquot'), (r"'''", 'triapos')]
-
- token = String.Other
- states = {'sigils': []}
-
- for term, name in triquotes:
- states['sigils'] += [
- (r'(~[a-z])(%s)' % (term,), bygroups(token, String.Heredoc),
- (name + '-end', name + '-intp')),
- (r'(~[A-Z])(%s)' % (term,), bygroups(token, String.Heredoc),
- (name + '-end', name + '-no-intp')),
- ]
-
- states[name +'-end'] = [(r'[a-zA-Z]*', token, '#pop')]
- states[name +'-intp'] = [
- (r'^\s*' + term, String.Heredoc, '#pop'),
- include('heredoc_interpol'),
- ]
- states[name +'-no-intp'] = [
- (r'^\s*' + term, String.Heredoc, '#pop'),
- include('heredoc_no_interpol'),
- ]
-
- for term, name in terms:
- states['sigils'] += [
- (r'~[a-z]' + term, token, name + '-intp'),
- (r'~[A-Z]' + term, token, name + '-no-intp'),
- ]
-
- # Similar states to the braced sigils, but no balancing of
- # terminators
- states[name +'-intp'] = gen_elixir_sigstr_rules(term, token)
- states[name +'-no-intp'] = \
- gen_elixir_sigstr_rules(term, token, interpol=False)
-
- for lbrace, rbrace, name in braces:
- states['sigils'] += [
- (r'~[a-z]' + lbrace, token, name + '-intp'),
- (r'~[A-Z]' + lbrace, token, name + '-no-intp')
- ]
-
- states[name +'-intp'] = [
- (r'\\.', token),
- (lbrace, token, '#push'),
- ] + gen_elixir_sigstr_rules(rbrace, token)
-
- states[name +'-no-intp'] = [
- (r'\\.', token),
- (lbrace, token, '#push'),
- ] + gen_elixir_sigstr_rules(rbrace, token, interpol=False)
-
- return states
-
- op3_re = "|".join(re.escape(s) for s in OPERATORS3)
- op2_re = "|".join(re.escape(s) for s in OPERATORS2)
- op1_re = "|".join(re.escape(s) for s in OPERATORS1)
- ops_re = r'(?:%s|%s|%s)' % (op3_re, op2_re, op1_re)
- punctuation_re = "|".join(re.escape(s) for s in PUNCTUATION)
- name_re = r'[a-z_][a-zA-Z_0-9]*[!\?]?'
- modname_re = r'[A-Z][A-Za-z_]*(?:\.[A-Z][A-Za-z_]*)*'
- complex_name_re = r'(?:%s|%s|%s)' % (name_re, modname_re, ops_re)
- special_atom_re = r'(?:\.\.\.|<<>>|%{}|%|{})'
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'#.*$', Comment.Single),
-
- # Various kinds of characters
- (r'(?i)(\?)(\\x{)([\da-f]+)(})',
- bygroups(String.Char,
- String.Escape, Number.Hex, String.Escape)),
- (r'(?i)(\?)(\\x[\da-f]{1,2})',
- bygroups(String.Char, String.Escape)),
- (r'(\?)(\\[0-7]{1,3})',
- bygroups(String.Char, String.Escape)),
- (r'(\?)(\\[abdefnrstv])',
- bygroups(String.Char, String.Escape)),
- (r'\?\\?.', String.Char),
-
- # atoms
- (r':' + special_atom_re, String.Symbol),
- (r':' + complex_name_re, String.Symbol),
- (r':"', String.Symbol, 'string_double_atom'),
- (r":'", String.Symbol, 'string_single_atom'),
-
- # [keywords: ...]
- (r'(%s|%s)(:)(?=\s|\n)' % (special_atom_re, complex_name_re),
- bygroups(String.Symbol, Punctuation)),
-
- # @attributes
- (r'@' + name_re, Name.Attribute),
-
- # operators and punctuation
- (op3_re, Operator),
- (op2_re, Operator),
- (punctuation_re, Punctuation),
- (r'&\d', Name.Entity), # anon func arguments
- (op1_re, Operator),
-
- # identifiers
- (name_re, Name),
- (modname_re, Name.Class),
-
- # numbers
- (r'0[bB][01]+', Number.Bin),
- (r'0[0-7]+', Number.Oct),
- (r'(?i)0x[\da-f]+', Number.Hex),
- (r'\d(_?\d)*\.\d(_?\d)*([eE][-+]?\d(_?\d)*)?', Number.Float),
- (r'\d(_?\d)*', Number.Integer),
-
- # strings and heredocs
- (r'"""\s*', String.Heredoc, 'heredoc_double'),
- (r"'''\s*$", String.Heredoc, 'heredoc_single'),
- (r'"', String.Double, 'string_double'),
- (r"'", String.Single, 'string_single'),
-
- include('sigils'),
- ],
- 'heredoc_double': [
- (r'^\s*"""', String.Heredoc, '#pop'),
- include('heredoc_interpol'),
- ],
- 'heredoc_single': [
- (r"^\s*'''", String.Heredoc, '#pop'),
- include('heredoc_interpol'),
- ],
- 'heredoc_interpol': [
- (r'[^#\\\n]+', String.Heredoc),
- include('escapes'),
- (r'\\.', String.Heredoc),
- (r'\n+', String.Heredoc),
- include('interpol'),
- ],
- 'heredoc_no_interpol': [
- (r'[^\\\n]+', String.Heredoc),
- (r'\\.', String.Heredoc),
- (r'\n+', String.Heredoc),
- ],
- 'escapes': [
- (r'(?i)(\\x{)([\da-f]+)(})',
- bygroups(String.Escape, Number.Hex, String.Escape)),
- (r'(?i)\\x[\da-f]{1,2}', String.Escape),
- (r'\\[0-7]{1,3}', String.Escape),
- (r'\\[abdefnrstv]', String.Escape),
- ],
- 'interpol': [
- (r'#{', String.Interpol, 'interpol_string'),
- ],
- 'interpol_string' : [
- (r'}', String.Interpol, "#pop"),
- include('root')
- ],
- }
- tokens.update(gen_elixir_string_rules('double', '"', String.Double))
- tokens.update(gen_elixir_string_rules('single', "'", String.Single))
- tokens.update(gen_elixir_string_rules('double_atom', '"', String.Symbol))
- tokens.update(gen_elixir_string_rules('single_atom', "'", String.Symbol))
- tokens.update(gen_elixir_sigil_rules())
-
-
-class ElixirConsoleLexer(Lexer):
- """
- For Elixir interactive console (iex) output like:
-
- .. sourcecode:: iex
-
- iex> [head | tail] = [1,2,3]
- [1,2,3]
- iex> head
- 1
- iex> tail
- [2,3]
- iex> [head | tail]
- [1,2,3]
- iex> length [head | tail]
- 3
-
- .. versionadded:: 1.5
- """
-
- name = 'Elixir iex session'
- aliases = ['iex']
- mimetypes = ['text/x-elixir-shellsession']
-
- _prompt_re = re.compile('(iex|\.{3})(\(\d+\))?> ')
-
- def get_tokens_unprocessed(self, text):
- exlexer = ElixirLexer(**self.options)
-
- curcode = ''
- in_error = False
- insertions = []
- for match in line_re.finditer(text):
- line = match.group()
- if line.startswith(u'** '):
- in_error = True
- insertions.append((len(curcode),
- [(0, Generic.Error, line[:-1])]))
- curcode += line[-1:]
- else:
- m = self._prompt_re.match(line)
- if m is not None:
- in_error = False
- end = m.end()
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:end])]))
- curcode += line[end:]
- else:
- if curcode:
- for item in do_insertions(insertions,
- exlexer.get_tokens_unprocessed(curcode)):
- yield item
- curcode = ''
- insertions = []
- token = Generic.Error if in_error else Generic.Output
- yield match.start(), token, line
- if curcode:
- for item in do_insertions(insertions,
- exlexer.get_tokens_unprocessed(curcode)):
- yield item
-
-
-class KokaLexer(RegexLexer):
- """
- Lexer for the `Koka <http://koka.codeplex.com>`_
- language.
-
- .. versionadded:: 1.6
- """
-
- name = 'Koka'
- aliases = ['koka']
- filenames = ['*.kk', '*.kki']
- mimetypes = ['text/x-koka']
-
- keywords = [
- 'infix', 'infixr', 'infixl',
- 'type', 'cotype', 'rectype', 'alias',
- 'struct', 'con',
- 'fun', 'function', 'val', 'var',
- 'external',
- 'if', 'then', 'else', 'elif', 'return', 'match',
- 'private', 'public', 'private',
- 'module', 'import', 'as',
- 'include', 'inline',
- 'rec',
- 'try', 'yield', 'enum',
- 'interface', 'instance',
- ]
-
- # keywords that are followed by a type
- typeStartKeywords = [
- 'type', 'cotype', 'rectype', 'alias', 'struct', 'enum',
- ]
-
- # keywords valid in a type
- typekeywords = [
- 'forall', 'exists', 'some', 'with',
- ]
-
- # builtin names and special names
- builtin = [
- 'for', 'while', 'repeat',
- 'foreach', 'foreach-indexed',
- 'error', 'catch', 'finally',
- 'cs', 'js', 'file', 'ref', 'assigned',
- ]
-
- # symbols that can be in an operator
- symbols = '[\$%&\*\+@!/\\\^~=\.:\-\?\|<>]+'
-
- # symbol boundary: an operator keyword should not be followed by any of these
- sboundary = '(?!'+symbols+')'
-
- # name boundary: a keyword should not be followed by any of these
- boundary = '(?![\w/])'
-
- # koka token abstractions
- tokenType = Name.Attribute
- tokenTypeDef = Name.Class
- tokenConstructor = Generic.Emph
-
- # main lexer
- tokens = {
- 'root': [
- include('whitespace'),
-
- # go into type mode
- (r'::?' + sboundary, tokenType, 'type'),
- (r'(alias)(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef),
- 'alias-type'),
- (r'(struct)(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef),
- 'struct-type'),
- ((r'(%s)' % '|'.join(typeStartKeywords)) +
- r'(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef),
- 'type'),
-
- # special sequences of tokens (we use ?: for non-capturing group as
- # required by 'bygroups')
- (r'(module)(\s+)(interface\s+)?((?:[a-z]\w*/)*[a-z]\w*)',
- bygroups(Keyword, Text, Keyword, Name.Namespace)),
- (r'(import)(\s+)((?:[a-z]\w*/)*[a-z]\w*)'
- r'(?:(\s*)(=)(\s*)((?:qualified\s*)?)'
- r'((?:[a-z]\w*/)*[a-z]\w*))?',
- bygroups(Keyword, Text, Name.Namespace, Text, Keyword, Text,
- Keyword, Name.Namespace)),
-
- (r'(^(?:(?:public|private)\s*)?(?:function|fun|val))'
- r'(\s+)([a-z]\w*|\((?:' + symbols + r'|/)\))',
- bygroups(Keyword, Text, Name.Function)),
- (r'(^(?:(?:public|private)\s*)?external)(\s+)(inline\s+)?'
- r'([a-z]\w*|\((?:' + symbols + r'|/)\))',
- bygroups(Keyword, Text, Keyword, Name.Function)),
-
- # keywords
- (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword.Type),
- (r'(%s)' % '|'.join(keywords) + boundary, Keyword),
- (r'(%s)' % '|'.join(builtin) + boundary, Keyword.Pseudo),
- (r'::?|:=|\->|[=\.]' + sboundary, Keyword),
-
- # names
- (r'((?:[a-z]\w*/)*)([A-Z]\w*)',
- bygroups(Name.Namespace, tokenConstructor)),
- (r'((?:[a-z]\w*/)*)([a-z]\w*)', bygroups(Name.Namespace, Name)),
- (r'((?:[a-z]\w*/)*)(\((?:' + symbols + r'|/)\))',
- bygroups(Name.Namespace, Name)),
- (r'_\w*', Name.Variable),
-
- # literal string
- (r'@"', String.Double, 'litstring'),
-
- # operators
- (symbols + "|/(?![\*/])", Operator),
- (r'`', Operator),
- (r'[\{\}\(\)\[\];,]', Punctuation),
-
- # literals. No check for literal characters with len > 1
- (r'[0-9]+\.[0-9]+([eE][\-\+]?[0-9]+)?', Number.Float),
- (r'0[xX][0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
-
- (r"'", String.Char, 'char'),
- (r'"', String.Double, 'string'),
- ],
-
- # type started by alias
- 'alias-type': [
- (r'=',Keyword),
- include('type')
- ],
-
- # type started by struct
- 'struct-type': [
- (r'(?=\((?!,*\)))',Punctuation, '#pop'),
- include('type')
- ],
-
- # type started by colon
- 'type': [
- (r'[\(\[<]', tokenType, 'type-nested'),
- include('type-content')
- ],
-
- # type nested in brackets: can contain parameters, comma etc.
- 'type-nested': [
- (r'[\)\]>]', tokenType, '#pop'),
- (r'[\(\[<]', tokenType, 'type-nested'),
- (r',', tokenType),
- (r'([a-z]\w*)(\s*)(:)(?!:)',
- bygroups(Name, Text, tokenType)), # parameter name
- include('type-content')
- ],
-
- # shared contents of a type
- 'type-content': [
- include('whitespace'),
-
- # keywords
- (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword),
- (r'(?=((%s)' % '|'.join(keywords) + boundary + '))',
- Keyword, '#pop'), # need to match because names overlap...
-
- # kinds
- (r'[EPHVX]' + boundary, tokenType),
-
- # type names
- (r'[a-z][0-9]*(?![\w/])', tokenType ),
- (r'_\w*', tokenType.Variable), # Generic.Emph
- (r'((?:[a-z]\w*/)*)([A-Z]\w*)',
- bygroups(Name.Namespace, tokenType)),
- (r'((?:[a-z]\w*/)*)([a-z]\w+)',
- bygroups(Name.Namespace, tokenType)),
-
- # type keyword operators
- (r'::|\->|[\.:|]', tokenType),
-
- #catchall
- default('#pop')
- ],
-
- # comments and literals
- 'whitespace': [
- (r'\n\s*#.*$', Comment.Preproc),
- (r'\s+', Text),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'//.*$', Comment.Single)
- ],
- 'comment': [
- (r'[^/\*]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[\*/]', Comment.Multiline),
- ],
- 'litstring': [
- (r'[^"]+', String.Double),
- (r'""', String.Escape),
- (r'"', String.Double, '#pop'),
- ],
- 'string': [
- (r'[^\\"\n]+', String.Double),
- include('escape-sequence'),
- (r'["\n]', String.Double, '#pop'),
- ],
- 'char': [
- (r'[^\\\'\n]+', String.Char),
- include('escape-sequence'),
- (r'[\'\n]', String.Char, '#pop'),
- ],
- 'escape-sequence': [
- (r'\\[nrt\\\"\']', String.Escape),
- (r'\\x[0-9a-fA-F]{2}', String.Escape),
- (r'\\u[0-9a-fA-F]{4}', String.Escape),
- # Yes, \U literals are 6 hex digits.
- (r'\\U[0-9a-fA-F]{6}', String.Escape)
- ]
- }
+__all__ = []
diff --git a/pygments/lexers/go.py b/pygments/lexers/go.py
new file mode 100644
index 00000000..11e29352
--- /dev/null
+++ b/pygments/lexers/go.py
@@ -0,0 +1,101 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.go
+ ~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Google Go language.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['GoLexer']
+
+
+class GoLexer(RegexLexer):
+ """
+ For `Go <http://golang.org>`_ source.
+
+ .. versionadded:: 1.2
+ """
+ name = 'Go'
+ filenames = ['*.go']
+ aliases = ['go']
+ mimetypes = ['text/x-gosrc']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuations
+ (r'//(.*?)\n', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'(import|package)\b', Keyword.Namespace),
+ (r'(var|func|struct|map|chan|type|interface|const)\b',
+ Keyword.Declaration),
+ (words((
+ 'break', 'default', 'select', 'case', 'defer', 'go',
+ 'else', 'goto', 'switch', 'fallthrough', 'if', 'range',
+ 'continue', 'for', 'return'), suffix=r'\b'),
+ Keyword),
+ (r'(true|false|iota|nil)\b', Keyword.Constant),
+ # It seems the builtin types aren't actually keywords, but
+ # can be used as functions. So we need two declarations.
+ (words((
+ 'uint', 'uint8', 'uint16', 'uint32', 'uint64',
+ 'int', 'int8', 'int16', 'int32', 'int64',
+ 'float', 'float32', 'float64',
+ 'complex64', 'complex128', 'byte', 'rune',
+ 'string', 'bool', 'error', 'uintptr',
+ 'print', 'println', 'panic', 'recover', 'close', 'complex',
+ 'real', 'imag', 'len', 'cap', 'append', 'copy', 'delete',
+ 'new', 'make'), suffix=r'\b(\()'),
+ bygroups(Name.Builtin, Punctuation)),
+ (words((
+ 'uint', 'uint8', 'uint16', 'uint32', 'uint64',
+ 'int', 'int8', 'int16', 'int32', 'int64',
+ 'float', 'float32', 'float64',
+ 'complex64', 'complex128', 'byte', 'rune',
+ 'string', 'bool', 'error', 'uintptr'), suffix=r'\b'),
+ Keyword.Type),
+ # imaginary_lit
+ (r'\d+i', Number),
+ (r'\d+\.\d*([Ee][-+]\d+)?i', Number),
+ (r'\.\d+([Ee][-+]\d+)?i', Number),
+ (r'\d+[Ee][-+]\d+i', Number),
+ # float_lit
+ (r'\d+(\.\d+[eE][+\-]?\d+|'
+ r'\.\d*|[eE][+\-]?\d+)', Number.Float),
+ (r'\.\d+([eE][+\-]?\d+)?', Number.Float),
+ # int_lit
+ # -- octal_lit
+ (r'0[0-7]+', Number.Oct),
+ # -- hex_lit
+ (r'0[xX][0-9a-fA-F]+', Number.Hex),
+ # -- decimal_lit
+ (r'(0|[1-9][0-9]*)', Number.Integer),
+ # char_lit
+ (r"""'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
+ r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'""",
+ String.Char),
+ # StringLiteral
+ # -- raw_string_lit
+ (r'`[^`]*`', String),
+ # -- interpreted_string_lit
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # Tokens
+ (r'(<<=|>>=|<<|>>|<=|>=|&\^=|&\^|\+=|-=|\*=|/=|%=|&=|\|=|&&|\|\|'
+ r'|<-|\+\+|--|==|!=|:=|\.\.\.|[+\-*/%&])', Operator),
+ (r'[|^<>=!()\[\]{}.,;:]', Punctuation),
+ # identifier
+ (r'[^\W\d]\w*', Name.Other),
+ ]
+ }
diff --git a/pygments/lexers/graph.py b/pygments/lexers/graph.py
index 6aa446c7..04ec3262 100644
--- a/pygments/lexers/graph.py
+++ b/pygments/lexers/graph.py
@@ -12,8 +12,8 @@
import re
from pygments.lexer import RegexLexer, include, bygroups, using, this
-from pygments.token import Keyword, Punctuation, Text, Comment, Operator, Name,\
- String, Number, Generic, Whitespace
+from pygments.token import Keyword, Punctuation, Comment, Operator, Name,\
+ String, Number, Whitespace
__all__ = ['CypherLexer']
@@ -30,7 +30,7 @@ class CypherLexer(RegexLexer):
"""
name = 'Cypher'
aliases = ['cypher']
- filenames = ['*.cyp','*.cypher']
+ filenames = ['*.cyp', '*.cypher']
flags = re.MULTILINE | re.IGNORECASE
@@ -43,7 +43,7 @@ class CypherLexer(RegexLexer):
include('strings'),
include('whitespace'),
include('barewords'),
- ],
+ ],
'comment': [
(r'^.*//.*\n', Comment.Single),
],
@@ -77,5 +77,3 @@ class CypherLexer(RegexLexer):
(r'\d+', Number),
],
}
-
-
diff --git a/pygments/lexers/graphics.py b/pygments/lexers/graphics.py
new file mode 100644
index 00000000..bed5f182
--- /dev/null
+++ b/pygments/lexers/graphics.py
@@ -0,0 +1,551 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.graphics
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for computer graphics and plotting related languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words, include, bygroups, using, this
+from pygments.token import Text, Comment, Operator, Keyword, Name, \
+ Number, Punctuation, String
+
+__all__ = ['GLShaderLexer', 'PostScriptLexer', 'AsymptoteLexer', 'GnuplotLexer',
+ 'PovrayLexer']
+
+
+class GLShaderLexer(RegexLexer):
+ """
+ GLSL (OpenGL Shader) lexer.
+
+ .. versionadded:: 1.1
+ """
+ name = 'GLSL'
+ aliases = ['glsl']
+ filenames = ['*.vert', '*.frag', '*.geo']
+ mimetypes = ['text/x-glslsrc']
+
+ tokens = {
+ 'root': [
+ (r'^#.*', Comment.Preproc),
+ (r'//.*', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'\+|-|~|!=?|\*|/|%|<<|>>|<=?|>=?|==?|&&?|\^|\|\|?',
+ Operator),
+ (r'[?:]', Operator), # quick hack for ternary
+ (r'\bdefined\b', Operator),
+ (r'[;{}(),\[\]]', Punctuation),
+ # FIXME when e is present, no decimal point needed
+ (r'[+-]?\d*\.\d+([eE][-+]?\d+)?', Number.Float),
+ (r'[+-]?\d+\.\d*([eE][-+]?\d+)?', Number.Float),
+ (r'0[xX][0-9a-fA-F]*', Number.Hex),
+ (r'0[0-7]*', Number.Oct),
+ (r'[1-9][0-9]*', Number.Integer),
+ (words((
+ 'attribute', 'const', 'uniform', 'varying', 'centroid', 'break',
+ 'continue', 'do', 'for', 'while', 'if', 'else', 'in', 'out',
+ 'inout', 'float', 'int', 'void', 'bool', 'true', 'false',
+ 'invariant', 'discard', 'return', 'mat2', 'mat3' 'mat4',
+ 'mat2x2', 'mat3x2', 'mat4x2', 'mat2x3', 'mat3x3', 'mat4x3',
+ 'mat2x4', 'mat3x4', 'mat4x4', 'vec2', 'vec3', 'vec4',
+ 'ivec2', 'ivec3', 'ivec4', 'bvec2', 'bvec3', 'bvec4',
+ 'sampler1D', 'sampler2D', 'sampler3D' 'samplerCube',
+ 'sampler1DShadow', 'sampler2DShadow', 'struct'),
+ prefix=r'\b', suffix=r'\b'),
+ Keyword),
+ (words((
+ 'asm', 'class', 'union', 'enum', 'typedef', 'template', 'this',
+ 'packed', 'goto', 'switch', 'default', 'inline', 'noinline',
+ 'volatile', 'public', 'static', 'extern', 'external', 'interface',
+ 'long', 'short', 'double', 'half', 'fixed', 'unsigned', 'lowp',
+ 'mediump', 'highp', 'precision', 'input', 'output',
+ 'hvec2', 'hvec3', 'hvec4', 'dvec2', 'dvec3', 'dvec4',
+ 'fvec2', 'fvec3', 'fvec4', 'sampler2DRect', 'sampler3DRect',
+ 'sampler2DRectShadow', 'sizeof', 'cast', 'namespace', 'using'),
+ prefix=r'\b', suffix=r'\b'),
+ Keyword), # future use
+ (r'[a-zA-Z_][a-zA-Z_0-9]*', Name),
+ (r'\.', Punctuation),
+ (r'\s+', Text),
+ ],
+ }
+
+
+class PostScriptLexer(RegexLexer):
+ """
+ Lexer for PostScript files.
+
+ The PostScript Language Reference published by Adobe at
+ <http://partners.adobe.com/public/developer/en/ps/PLRM.pdf>
+ is the authority for this.
+
+ .. versionadded:: 1.4
+ """
+ name = 'PostScript'
+ aliases = ['postscript', 'postscr']
+ filenames = ['*.ps', '*.eps']
+ mimetypes = ['application/postscript']
+
+ delimiter = r'\(\)\<\>\[\]\{\}\/\%\s'
+ delimiter_end = r'(?=[%s])' % delimiter
+
+ valid_name_chars = r'[^%s]' % delimiter
+ valid_name = r"%s+%s" % (valid_name_chars, delimiter_end)
+
+ tokens = {
+ 'root': [
+ # All comment types
+ (r'^%!.+\n', Comment.Preproc),
+ (r'%%.*\n', Comment.Special),
+ (r'(^%.*\n){2,}', Comment.Multiline),
+ (r'%.*\n', Comment.Single),
+
+ # String literals are awkward; enter separate state.
+ (r'\(', String, 'stringliteral'),
+
+ (r'[\{\}(\<\<)(\>\>)\[\]]', Punctuation),
+
+ # Numbers
+ (r'<[0-9A-Fa-f]+>' + delimiter_end, Number.Hex),
+ # Slight abuse: use Oct to signify any explicit base system
+ (r'[0-9]+\#(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)'
+ r'((e|E)[0-9]+)?' + delimiter_end, Number.Oct),
+ (r'(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)((e|E)[0-9]+)?'
+ + delimiter_end, Number.Float),
+ (r'(\-|\+)?[0-9]+' + delimiter_end, Number.Integer),
+
+ # References
+ (r'\/%s' % valid_name, Name.Variable),
+
+ # Names
+ (valid_name, Name.Function), # Anything else is executed
+
+ # These keywords taken from
+ # <http://www.math.ubc.ca/~cass/graphics/manual/pdf/a1.pdf>
+ # Is there an authoritative list anywhere that doesn't involve
+ # trawling documentation?
+
+ (r'(false|true)' + delimiter_end, Keyword.Constant),
+
+ # Conditionals / flow control
+ (r'(eq|ne|g[et]|l[et]|and|or|not|if(?:else)?|for(?:all)?)'
+ + delimiter_end, Keyword.Reserved),
+
+ (words((
+ 'abs', 'add', 'aload', 'arc', 'arcn', 'array', 'atan', 'begin',
+ 'bind', 'ceiling', 'charpath', 'clip', 'closepath', 'concat',
+ 'concatmatrix', 'copy', 'cos', 'currentlinewidth', 'currentmatrix',
+ 'currentpoint', 'curveto', 'cvi', 'cvs', 'def', 'defaultmatrix',
+ 'dict', 'dictstackoverflow', 'div', 'dtransform', 'dup', 'end',
+ 'exch', 'exec', 'exit', 'exp', 'fill', 'findfont', 'floor', 'get',
+ 'getinterval', 'grestore', 'gsave', 'gt', 'identmatrix', 'idiv',
+ 'idtransform', 'index', 'invertmatrix', 'itransform', 'length',
+ 'lineto', 'ln', 'load', 'log', 'loop', 'matrix', 'mod', 'moveto',
+ 'mul', 'neg', 'newpath', 'pathforall', 'pathbbox', 'pop', 'print',
+ 'pstack', 'put', 'quit', 'rand', 'rangecheck', 'rcurveto', 'repeat',
+ 'restore', 'rlineto', 'rmoveto', 'roll', 'rotate', 'round', 'run',
+ 'save', 'scale', 'scalefont', 'setdash', 'setfont', 'setgray',
+ 'setlinecap', 'setlinejoin', 'setlinewidth', 'setmatrix',
+ 'setrgbcolor', 'shfill', 'show', 'showpage', 'sin', 'sqrt',
+ 'stack', 'stringwidth', 'stroke', 'strokepath', 'sub', 'syntaxerror',
+ 'transform', 'translate', 'truncate', 'typecheck', 'undefined',
+ 'undefinedfilename', 'undefinedresult'), suffix=delimiter_end),
+ Name.Builtin),
+
+ (r'\s+', Text),
+ ],
+
+ 'stringliteral': [
+ (r'[^\(\)\\]+', String),
+ (r'\\', String.Escape, 'escape'),
+ (r'\(', String, '#push'),
+ (r'\)', String, '#pop'),
+ ],
+
+ 'escape': [
+ (r'([0-8]{3}|n|r|t|b|f|\\|\(|\))?', String.Escape, '#pop'),
+ ],
+ }
+
+
+class AsymptoteLexer(RegexLexer):
+ """
+ For `Asymptote <http://asymptote.sf.net/>`_ source code.
+
+ .. versionadded:: 1.2
+ """
+ name = 'Asymptote'
+ aliases = ['asy', 'asymptote']
+ filenames = ['*.asy']
+ mimetypes = ['text/x-asymptote']
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+'
+
+ tokens = {
+ 'whitespace': [
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'//(\n|(.|\n)*?[^\\]\n)', Comment),
+ (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment),
+ ],
+ 'statements': [
+ # simple string (TeX friendly)
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # C style string (with character escapes)
+ (r"'", String, 'string'),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
+ (r'0[0-7]+[Ll]?', Number.Oct),
+ (r'\d+[Ll]?', Number.Integer),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'[()\[\],.]', Punctuation),
+ (r'\b(case)(.+?)(:)', bygroups(Keyword, using(this), Text)),
+ (r'(and|controls|tension|atleast|curl|if|else|while|for|do|'
+ r'return|break|continue|struct|typedef|new|access|import|'
+ r'unravel|from|include|quote|static|public|private|restricted|'
+ r'this|explicit|true|false|null|cycle|newframe|operator)\b', Keyword),
+ # Since an asy-type-name can be also an asy-function-name,
+ # in the following we test if the string " [a-zA-Z]" follows
+ # the Keyword.Type.
+ # Of course it is not perfect !
+ (r'(Braid|FitResult|Label|Legend|TreeNode|abscissa|arc|arrowhead|'
+ r'binarytree|binarytreeNode|block|bool|bool3|bounds|bqe|circle|'
+ r'conic|coord|coordsys|cputime|ellipse|file|filltype|frame|grid3|'
+ r'guide|horner|hsv|hyperbola|indexedTransform|int|inversion|key|'
+ r'light|line|linefit|marginT|marker|mass|object|pair|parabola|path|'
+ r'path3|pen|picture|point|position|projection|real|revolution|'
+ r'scaleT|scientific|segment|side|slice|splitface|string|surface|'
+ r'tensionSpecifier|ticklocate|ticksgridT|tickvalues|transform|'
+ r'transformation|tree|triangle|trilinear|triple|vector|'
+ r'vertex|void)(?=([ ]{1,}[a-zA-Z]))', Keyword.Type),
+ # Now the asy-type-name which are not asy-function-name
+ # except yours !
+ # Perhaps useless
+ (r'(Braid|FitResult|TreeNode|abscissa|arrowhead|block|bool|bool3|'
+ r'bounds|coord|frame|guide|horner|int|linefit|marginT|pair|pen|'
+ r'picture|position|real|revolution|slice|splitface|ticksgridT|'
+ r'tickvalues|tree|triple|vertex|void)\b', Keyword.Type),
+ ('[a-zA-Z_]\w*:(?!:)', Name.Label),
+ ('[a-zA-Z_]\w*', Name),
+ ],
+ 'root': [
+ include('whitespace'),
+ # functions
+ (r'((?:[\w*\s])+?(?:\s|\*))' # return arguments
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*\([^;]*?\))' # signature
+ r'(' + _ws + r')({)',
+ bygroups(using(this), Name.Function, using(this), using(this),
+ Punctuation),
+ 'function'),
+ # function declarations
+ (r'((?:[\w*\s])+?(?:\s|\*))' # return arguments
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*\([^;]*?\))' # signature
+ r'(' + _ws + r')(;)',
+ bygroups(using(this), Name.Function, using(this), using(this),
+ Punctuation)),
+ ('', Text, 'statement'),
+ ],
+ 'statement': [
+ include('whitespace'),
+ include('statements'),
+ ('[{}]', Punctuation),
+ (';', Punctuation, '#pop'),
+ ],
+ 'function': [
+ include('whitespace'),
+ include('statements'),
+ (';', Punctuation),
+ ('{', Punctuation, '#push'),
+ ('}', Punctuation, '#pop'),
+ ],
+ 'string': [
+ (r"'", String, '#pop'),
+ (r'\\([\\abfnrtv"\'?]|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'\n', String),
+ (r"[^\\'\n]+", String), # all other characters
+ (r'\\\n', String),
+ (r'\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ }
+
+ def get_tokens_unprocessed(self, text):
+ from pygments.lexers._asy_builtins import ASYFUNCNAME, ASYVARNAME
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name and value in ASYFUNCNAME:
+ token = Name.Function
+ elif token is Name and value in ASYVARNAME:
+ token = Name.Variable
+ yield index, token, value
+
+
+def _shortened(word):
+ dpos = word.find('$')
+ return '|'.join(word[:dpos] + word[dpos+1:i] + r'\b'
+ for i in range(len(word), dpos, -1))
+
+
+def _shortened_many(*words):
+ return '|'.join(map(_shortened, words))
+
+
+class GnuplotLexer(RegexLexer):
+ """
+ For `Gnuplot <http://gnuplot.info/>`_ plotting scripts.
+
+ .. versionadded:: 0.11
+ """
+
+ name = 'Gnuplot'
+ aliases = ['gnuplot']
+ filenames = ['*.plot', '*.plt']
+ mimetypes = ['text/x-gnuplot']
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ (_shortened('bi$nd'), Keyword, 'bind'),
+ (_shortened_many('ex$it', 'q$uit'), Keyword, 'quit'),
+ (_shortened('f$it'), Keyword, 'fit'),
+ (r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation), 'if'),
+ (r'else\b', Keyword),
+ (_shortened('pa$use'), Keyword, 'pause'),
+ (_shortened_many('p$lot', 'rep$lot', 'sp$lot'), Keyword, 'plot'),
+ (_shortened('sa$ve'), Keyword, 'save'),
+ (_shortened('se$t'), Keyword, ('genericargs', 'optionarg')),
+ (_shortened_many('sh$ow', 'uns$et'),
+ Keyword, ('noargs', 'optionarg')),
+ (_shortened_many('low$er', 'ra$ise', 'ca$ll', 'cd$', 'cl$ear',
+ 'h$elp', '\\?$', 'hi$story', 'l$oad', 'pr$int',
+ 'pwd$', 're$read', 'res$et', 'scr$eendump',
+ 'she$ll', 'sy$stem', 'up$date'),
+ Keyword, 'genericargs'),
+ (_shortened_many('pwd$', 're$read', 'res$et', 'scr$eendump',
+ 'she$ll', 'test$'),
+ Keyword, 'noargs'),
+ ('([a-zA-Z_]\w*)(\s*)(=)',
+ bygroups(Name.Variable, Text, Operator), 'genericargs'),
+ ('([a-zA-Z_]\w*)(\s*\(.*?\)\s*)(=)',
+ bygroups(Name.Function, Text, Operator), 'genericargs'),
+ (r'@[a-zA-Z_]\w*', Name.Constant), # macros
+ (r';', Keyword),
+ ],
+ 'comment': [
+ (r'[^\\\n]', Comment),
+ (r'\\\n', Comment),
+ (r'\\', Comment),
+ # don't add the newline to the Comment token
+ ('', Comment, '#pop'),
+ ],
+ 'whitespace': [
+ ('#', Comment, 'comment'),
+ (r'[ \t\v\f]+', Text),
+ ],
+ 'noargs': [
+ include('whitespace'),
+ # semicolon and newline end the argument list
+ (r';', Punctuation, '#pop'),
+ (r'\n', Text, '#pop'),
+ ],
+ 'dqstring': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ (r'\n', String, '#pop'), # newline ends the string too
+ ],
+ 'sqstring': [
+ (r"''", String), # escaped single quote
+ (r"'", String, '#pop'),
+ (r"[^\\'\n]+", String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # normal backslash
+ (r'\n', String, '#pop'), # newline ends the string too
+ ],
+ 'genericargs': [
+ include('noargs'),
+ (r'"', String, 'dqstring'),
+ (r"'", String, 'sqstring'),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
+ (r'(\d+\.\d*|\.\d+)', Number.Float),
+ (r'-?\d+', Number.Integer),
+ ('[,.~!%^&*+=|?:<>/-]', Operator),
+ ('[{}()\[\]]', Punctuation),
+ (r'(eq|ne)\b', Operator.Word),
+ (r'([a-zA-Z_]\w*)(\s*)(\()',
+ bygroups(Name.Function, Text, Punctuation)),
+ (r'[a-zA-Z_]\w*', Name),
+ (r'@[a-zA-Z_]\w*', Name.Constant), # macros
+ (r'\\\n', Text),
+ ],
+ 'optionarg': [
+ include('whitespace'),
+ (_shortened_many(
+ "a$ll", "an$gles", "ar$row", "au$toscale", "b$ars", "bor$der",
+ "box$width", "cl$abel", "c$lip", "cn$trparam", "co$ntour", "da$ta",
+ "data$file", "dg$rid3d", "du$mmy", "enc$oding", "dec$imalsign",
+ "fit$", "font$path", "fo$rmat", "fu$nction", "fu$nctions", "g$rid",
+ "hid$den3d", "his$torysize", "is$osamples", "k$ey", "keyt$itle",
+ "la$bel", "li$nestyle", "ls$", "loa$dpath", "loc$ale", "log$scale",
+ "mac$ros", "map$ping", "map$ping3d", "mar$gin", "lmar$gin",
+ "rmar$gin", "tmar$gin", "bmar$gin", "mo$use", "multi$plot",
+ "mxt$ics", "nomxt$ics", "mx2t$ics", "nomx2t$ics", "myt$ics",
+ "nomyt$ics", "my2t$ics", "nomy2t$ics", "mzt$ics", "nomzt$ics",
+ "mcbt$ics", "nomcbt$ics", "of$fsets", "or$igin", "o$utput",
+ "pa$rametric", "pm$3d", "pal$ette", "colorb$ox", "p$lot",
+ "poi$ntsize", "pol$ar", "pr$int", "obj$ect", "sa$mples", "si$ze",
+ "st$yle", "su$rface", "table$", "t$erminal", "termo$ptions", "ti$cs",
+ "ticsc$ale", "ticsl$evel", "timef$mt", "tim$estamp", "tit$le",
+ "v$ariables", "ve$rsion", "vi$ew", "xyp$lane", "xda$ta", "x2da$ta",
+ "yda$ta", "y2da$ta", "zda$ta", "cbda$ta", "xl$abel", "x2l$abel",
+ "yl$abel", "y2l$abel", "zl$abel", "cbl$abel", "xti$cs", "noxti$cs",
+ "x2ti$cs", "nox2ti$cs", "yti$cs", "noyti$cs", "y2ti$cs", "noy2ti$cs",
+ "zti$cs", "nozti$cs", "cbti$cs", "nocbti$cs", "xdti$cs", "noxdti$cs",
+ "x2dti$cs", "nox2dti$cs", "ydti$cs", "noydti$cs", "y2dti$cs",
+ "noy2dti$cs", "zdti$cs", "nozdti$cs", "cbdti$cs", "nocbdti$cs",
+ "xmti$cs", "noxmti$cs", "x2mti$cs", "nox2mti$cs", "ymti$cs",
+ "noymti$cs", "y2mti$cs", "noy2mti$cs", "zmti$cs", "nozmti$cs",
+ "cbmti$cs", "nocbmti$cs", "xr$ange", "x2r$ange", "yr$ange",
+ "y2r$ange", "zr$ange", "cbr$ange", "rr$ange", "tr$ange", "ur$ange",
+ "vr$ange", "xzeroa$xis", "x2zeroa$xis", "yzeroa$xis", "y2zeroa$xis",
+ "zzeroa$xis", "zeroa$xis", "z$ero"), Name.Builtin, '#pop'),
+ ],
+ 'bind': [
+ ('!', Keyword, '#pop'),
+ (_shortened('all$windows'), Name.Builtin),
+ include('genericargs'),
+ ],
+ 'quit': [
+ (r'gnuplot\b', Keyword),
+ include('noargs'),
+ ],
+ 'fit': [
+ (r'via\b', Name.Builtin),
+ include('plot'),
+ ],
+ 'if': [
+ (r'\)', Punctuation, '#pop'),
+ include('genericargs'),
+ ],
+ 'pause': [
+ (r'(mouse|any|button1|button2|button3)\b', Name.Builtin),
+ (_shortened('key$press'), Name.Builtin),
+ include('genericargs'),
+ ],
+ 'plot': [
+ (_shortened_many('ax$es', 'axi$s', 'bin$ary', 'ev$ery', 'i$ndex',
+ 'mat$rix', 's$mooth', 'thru$', 't$itle',
+ 'not$itle', 'u$sing', 'w$ith'),
+ Name.Builtin),
+ include('genericargs'),
+ ],
+ 'save': [
+ (_shortened_many('f$unctions', 's$et', 't$erminal', 'v$ariables'),
+ Name.Builtin),
+ include('genericargs'),
+ ],
+ }
+
+
+class PovrayLexer(RegexLexer):
+ """
+ For `Persistence of Vision Raytracer <http://www.povray.org/>`_ files.
+
+ .. versionadded:: 0.11
+ """
+ name = 'POVRay'
+ aliases = ['pov']
+ filenames = ['*.pov', '*.inc']
+ mimetypes = ['text/x-povray']
+
+ tokens = {
+ 'root': [
+ (r'/\*[\w\W]*?\*/', Comment.Multiline),
+ (r'//.*\n', Comment.Single),
+ (r'(?s)"(?:\\.|[^"\\])+"', String.Double),
+ (words((
+ 'break', 'case', 'debug', 'declare', 'default', 'define', 'else',
+ 'elseif', 'end', 'error', 'fclose', 'fopen', 'for', 'if', 'ifdef',
+ 'ifndef', 'include', 'local', 'macro', 'range', 'read', 'render',
+ 'statistics', 'switch', 'undef', 'version', 'warning', 'while',
+ 'write'), prefix=r'#', suffix=r'\b'),
+ Comment.Preproc),
+ (words((
+ 'aa_level', 'aa_threshold', 'abs', 'acos', 'acosh', 'adaptive', 'adc_bailout',
+ 'agate', 'agate_turb', 'all', 'alpha', 'ambient', 'ambient_light', 'angle',
+ 'aperture', 'arc_angle', 'area_light', 'asc', 'asin', 'asinh', 'assumed_gamma',
+ 'atan', 'atan2', 'atanh', 'atmosphere', 'atmospheric_attenuation',
+ 'attenuating', 'average', 'background', 'black_hole', 'blue', 'blur_samples',
+ 'bounded_by', 'box_mapping', 'bozo', 'break', 'brick', 'brick_size',
+ 'brightness', 'brilliance', 'bumps', 'bumpy1', 'bumpy2', 'bumpy3', 'bump_map',
+ 'bump_size', 'case', 'caustics', 'ceil', 'checker', 'chr', 'clipped_by', 'clock',
+ 'color', 'color_map', 'colour', 'colour_map', 'component', 'composite', 'concat',
+ 'confidence', 'conic_sweep', 'constant', 'control0', 'control1', 'cos', 'cosh',
+ 'count', 'crackle', 'crand', 'cube', 'cubic_spline', 'cylindrical_mapping',
+ 'debug', 'declare', 'default', 'degrees', 'dents', 'diffuse', 'direction',
+ 'distance', 'distance_maximum', 'div', 'dust', 'dust_type', 'eccentricity',
+ 'else', 'emitting', 'end', 'error', 'error_bound', 'exp', 'exponent',
+ 'fade_distance', 'fade_power', 'falloff', 'falloff_angle', 'false',
+ 'file_exists', 'filter', 'finish', 'fisheye', 'flatness', 'flip', 'floor',
+ 'focal_point', 'fog', 'fog_alt', 'fog_offset', 'fog_type', 'frequency', 'gif',
+ 'global_settings', 'glowing', 'gradient', 'granite', 'gray_threshold',
+ 'green', 'halo', 'hexagon', 'hf_gray_16', 'hierarchy', 'hollow', 'hypercomplex',
+ 'if', 'ifdef', 'iff', 'image_map', 'incidence', 'include', 'int', 'interpolate',
+ 'inverse', 'ior', 'irid', 'irid_wavelength', 'jitter', 'lambda', 'leopard',
+ 'linear', 'linear_spline', 'linear_sweep', 'location', 'log', 'looks_like',
+ 'look_at', 'low_error_factor', 'mandel', 'map_type', 'marble', 'material_map',
+ 'matrix', 'max', 'max_intersections', 'max_iteration', 'max_trace_level',
+ 'max_value', 'metallic', 'min', 'minimum_reuse', 'mod', 'mortar',
+ 'nearest_count', 'no', 'normal', 'normal_map', 'no_shadow', 'number_of_waves',
+ 'octaves', 'off', 'offset', 'omega', 'omnimax', 'on', 'once', 'onion', 'open',
+ 'orthographic', 'panoramic', 'pattern1', 'pattern2', 'pattern3',
+ 'perspective', 'pgm', 'phase', 'phong', 'phong_size', 'pi', 'pigment',
+ 'pigment_map', 'planar_mapping', 'png', 'point_at', 'pot', 'pow', 'ppm',
+ 'precision', 'pwr', 'quadratic_spline', 'quaternion', 'quick_color',
+ 'quick_colour', 'quilted', 'radial', 'radians', 'radiosity', 'radius', 'rainbow',
+ 'ramp_wave', 'rand', 'range', 'reciprocal', 'recursion_limit', 'red',
+ 'reflection', 'refraction', 'render', 'repeat', 'rgb', 'rgbf', 'rgbft', 'rgbt',
+ 'right', 'ripples', 'rotate', 'roughness', 'samples', 'scale', 'scallop_wave',
+ 'scattering', 'seed', 'shadowless', 'sin', 'sine_wave', 'sinh', 'sky', 'sky_sphere',
+ 'slice', 'slope_map', 'smooth', 'specular', 'spherical_mapping', 'spiral',
+ 'spiral1', 'spiral2', 'spotlight', 'spotted', 'sqr', 'sqrt', 'statistics', 'str',
+ 'strcmp', 'strength', 'strlen', 'strlwr', 'strupr', 'sturm', 'substr', 'switch', 'sys',
+ 't', 'tan', 'tanh', 'test_camera_1', 'test_camera_2', 'test_camera_3',
+ 'test_camera_4', 'texture', 'texture_map', 'tga', 'thickness', 'threshold',
+ 'tightness', 'tile2', 'tiles', 'track', 'transform', 'translate', 'transmit',
+ 'triangle_wave', 'true', 'ttf', 'turbulence', 'turb_depth', 'type',
+ 'ultra_wide_angle', 'up', 'use_color', 'use_colour', 'use_index', 'u_steps',
+ 'val', 'variance', 'vaxis_rotate', 'vcross', 'vdot', 'version', 'vlength',
+ 'vnormalize', 'volume_object', 'volume_rendered', 'vol_with_light',
+ 'vrotate', 'v_steps', 'warning', 'warp', 'water_level', 'waves', 'while', 'width',
+ 'wood', 'wrinkles', 'yes'), prefix=r'\b', suffix=r'\b'),
+ Keyword),
+ (words((
+ 'bicubic_patch', 'blob', 'box', 'camera', 'cone', 'cubic', 'cylinder', 'difference',
+ 'disc', 'height_field', 'intersection', 'julia_fractal', 'lathe',
+ 'light_source', 'merge', 'mesh', 'object', 'plane', 'poly', 'polygon', 'prism',
+ 'quadric', 'quartic', 'smooth_triangle', 'sor', 'sphere', 'superellipsoid',
+ 'text', 'torus', 'triangle', 'union'), suffix=r'\b'),
+ Name.Builtin),
+ # TODO: <=, etc
+ (r'[\[\](){}<>;,]', Punctuation),
+ (r'[-+*/=]', Operator),
+ (r'\b(x|y|z|u|v)\b', Name.Builtin.Pseudo),
+ (r'[a-zA-Z_][a-zA-Z_0-9]*', Name),
+ (r'[0-9]+\.[0-9]*', Number.Float),
+ (r'\.[0-9]+', Number.Float),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r'\s+', Text),
+ ]
+ }
diff --git a/pygments/lexers/haskell.py b/pygments/lexers/haskell.py
new file mode 100644
index 00000000..6c71aa97
--- /dev/null
+++ b/pygments/lexers/haskell.py
@@ -0,0 +1,839 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.haskell
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Haskell and related languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, \
+ default, include
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic
+from pygments import unistring as uni
+
+__all__ = ['HaskellLexer', 'IdrisLexer', 'AgdaLexer', 'CryptolLexer',
+ 'LiterateHaskellLexer', 'LiterateIdrisLexer', 'LiterateAgdaLexer',
+ 'LiterateCryptolLexer', 'KokaLexer']
+
+
+line_re = re.compile('.*?\n')
+
+
+class HaskellLexer(RegexLexer):
+ """
+ A Haskell lexer based on the lexemes defined in the Haskell 98 Report.
+
+ .. versionadded:: 0.8
+ """
+ name = 'Haskell'
+ aliases = ['haskell', 'hs']
+ filenames = ['*.hs']
+ mimetypes = ['text/x-haskell']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ reserved = ('case', 'class', 'data', 'default', 'deriving', 'do', 'else',
+ 'if', 'in', 'infix[lr]?', 'instance',
+ 'let', 'newtype', 'of', 'then', 'type', 'where', '_')
+ ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
+ 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
+ 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
+ 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
+
+ tokens = {
+ 'root': [
+ # Whitespace:
+ (r'\s+', Text),
+ # (r'--\s*|.*$', Comment.Doc),
+ (r'--(?![!#$%&*+./<=>?@\^|_~:\\]).*?$', Comment.Single),
+ (r'{-', Comment.Multiline, 'comment'),
+ # Lexemes:
+ # Identifiers
+ (r'\bimport\b', Keyword.Reserved, 'import'),
+ (r'\bmodule\b', Keyword.Reserved, 'module'),
+ (r'\berror\b', Name.Exception),
+ (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
+ (r"'[^\\]'", String.Char), # this has to come before the TH quote
+ (r'^[_' + uni.Ll + r'][\w\']*', Name.Function),
+ (r"'?[_" + uni.Ll + r"][\w']*", Name),
+ (r"('')?[" + uni.Lu + r"][\w\']*", Keyword.Type),
+ # Operators
+ (r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
+ (r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
+ (r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators
+ (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators
+ # Numbers
+ (r'\d+[eE][+-]?\d+', Number.Float),
+ (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
+ (r'0[oO][0-7]+', Number.Oct),
+ (r'0[xX][\da-fA-F]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ # Character/String Literals
+ (r"'", String.Char, 'character'),
+ (r'"', String, 'string'),
+ # Special
+ (r'\[\]', Keyword.Type),
+ (r'\(\)', Name.Builtin),
+ (r'[][(),;`{}]', Punctuation),
+ ],
+ 'import': [
+ # Import statements
+ (r'\s+', Text),
+ (r'"', String, 'string'),
+ # after "funclist" state
+ (r'\)', Punctuation, '#pop'),
+ (r'qualified\b', Keyword),
+ # import X as Y
+ (r'([' + uni.Lu + r'][\w.]*)(\s+)(as)(\s+)([' + uni.Lu + r'][\w.]*)',
+ bygroups(Name.Namespace, Text, Keyword, Text, Name), '#pop'),
+ # import X hiding (functions)
+ (r'([' + uni.Lu + r'][\w.]*)(\s+)(hiding)(\s+)(\()',
+ bygroups(Name.Namespace, Text, Keyword, Text, Punctuation), 'funclist'),
+ # import X (functions)
+ (r'([' + uni.Lu + r'][\w.]*)(\s+)(\()',
+ bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
+ # import X
+ (r'[\w.]+', Name.Namespace, '#pop'),
+ ],
+ 'module': [
+ (r'\s+', Text),
+ (r'([' + uni.Lu + r'][\w.]*)(\s+)(\()',
+ bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
+ (r'[' + uni.Lu + r'][\w.]*', Name.Namespace, '#pop'),
+ ],
+ 'funclist': [
+ (r'\s+', Text),
+ (r'[' + uni.Lu + r']\w*', Keyword.Type),
+ (r'(_[\w\']+|[' + uni.Ll + r'][\w\']*)', Name.Function),
+ (r'--(?![!#$%&*+./<=>?@\^|_~:\\]).*?$', Comment.Single),
+ (r'{-', Comment.Multiline, 'comment'),
+ (r',', Punctuation),
+ (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
+ # (HACK, but it makes sense to push two instances, believe me)
+ (r'\(', Punctuation, ('funclist', 'funclist')),
+ (r'\)', Punctuation, '#pop:2'),
+ ],
+ # NOTE: the next four states are shared in the AgdaLexer; make sure
+ # any change is compatible with Agda as well or copy over and change
+ 'comment': [
+ # Multiline Comments
+ (r'[^-{}]+', Comment.Multiline),
+ (r'{-', Comment.Multiline, '#push'),
+ (r'-}', Comment.Multiline, '#pop'),
+ (r'[-{}]', Comment.Multiline),
+ ],
+ 'character': [
+ # Allows multi-chars, incorrectly.
+ (r"[^\\']'", String.Char, '#pop'),
+ (r"\\", String.Escape, 'escape'),
+ ("'", String.Char, '#pop'),
+ ],
+ 'string': [
+ (r'[^\\"]+', String),
+ (r"\\", String.Escape, 'escape'),
+ ('"', String, '#pop'),
+ ],
+ 'escape': [
+ (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
+ (r'\^[][' + uni.Lu + r'@\^_]', String.Escape, '#pop'),
+ ('|'.join(ascii), String.Escape, '#pop'),
+ (r'o[0-7]+', String.Escape, '#pop'),
+ (r'x[\da-fA-F]+', String.Escape, '#pop'),
+ (r'\d+', String.Escape, '#pop'),
+ (r'\s+\\', String.Escape, '#pop'),
+ ],
+ }
+
+
+class IdrisLexer(RegexLexer):
+ """
+ A lexer for the dependently typed programming language Idris.
+
+ Based on the Haskell and Agda Lexer.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Idris'
+ aliases = ['idris', 'idr']
+ filenames = ['*.idr']
+ mimetypes = ['text/x-idris']
+
+ reserved = ('case', 'class', 'data', 'default', 'using', 'do', 'else',
+ 'if', 'in', 'infix[lr]?', 'instance', 'rewrite', 'auto',
+ 'namespace', 'codata', 'mutual', 'private', 'public', 'abstract',
+ 'total', 'partial',
+ 'let', 'proof', 'of', 'then', 'static', 'where', '_', 'with',
+ 'pattern', 'term', 'syntax', 'prefix',
+ 'postulate', 'parameters', 'record', 'dsl', 'impossible', 'implicit',
+ 'tactics', 'intros', 'intro', 'compute', 'refine', 'exact', 'trivial')
+
+ ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
+ 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
+ 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
+ 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
+
+ directives = ('lib', 'link', 'flag', 'include', 'hide', 'freeze', 'access',
+ 'default', 'logging', 'dynamic', 'name', 'error_handlers', 'language')
+
+ tokens = {
+ 'root': [
+ # Comments
+ (r'^(\s*)(%%%s)' % '|'.join(directives),
+ bygroups(Text, Keyword.Reserved)),
+ (r'(\s*)(--(?![!#$%&*+./<=>?@\^|_~:\\]).*?)$', bygroups(Text, Comment.Single)),
+ (r'(\s*)(\|{3}.*?)$', bygroups(Text, Comment.Single)),
+ (r'(\s*)({-)', bygroups(Text, Comment.Multiline), 'comment'),
+ # Declaration
+ (r'^(\s*)([^\s\(\)\{\}]+)(\s*)(:)(\s*)',
+ bygroups(Text, Name.Function, Text, Operator.Word, Text)),
+ # Identifiers
+ (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
+ (r'(import|module)(\s+)', bygroups(Keyword.Reserved, Text), 'module'),
+ (r"('')?[A-Z][\w\']*", Keyword.Type),
+ (r'[a-z][\w\']*', Text),
+ # Special Symbols
+ (r'(<-|::|->|=>|=)', Operator.Word), # specials
+ (r'([\(\)\{\}\[\]:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
+ # Numbers
+ (r'\d+[eE][+-]?\d+', Number.Float),
+ (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
+ (r'0[xX][\da-fA-F]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ # Strings
+ (r"'", String.Char, 'character'),
+ (r'"', String, 'string'),
+ (r'[^\s\(\)\{\}]+', Text),
+ (r'\s+?', Text), # Whitespace
+ ],
+ 'module': [
+ (r'\s+', Text),
+ (r'([A-Z][\w.]*)(\s+)(\()',
+ bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
+ (r'[A-Z][\w.]*', Name.Namespace, '#pop'),
+ ],
+ 'funclist': [
+ (r'\s+', Text),
+ (r'[A-Z]\w*', Keyword.Type),
+ (r'(_[\w\']+|[a-z][\w\']*)', Name.Function),
+ (r'--.*$', Comment.Single),
+ (r'{-', Comment.Multiline, 'comment'),
+ (r',', Punctuation),
+ (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
+ # (HACK, but it makes sense to push two instances, believe me)
+ (r'\(', Punctuation, ('funclist', 'funclist')),
+ (r'\)', Punctuation, '#pop:2'),
+ ],
+ # NOTE: the next four states are shared in the AgdaLexer; make sure
+ # any change is compatible with Agda as well or copy over and change
+ 'comment': [
+ # Multiline Comments
+ (r'[^-{}]+', Comment.Multiline),
+ (r'{-', Comment.Multiline, '#push'),
+ (r'-}', Comment.Multiline, '#pop'),
+ (r'[-{}]', Comment.Multiline),
+ ],
+ 'character': [
+ # Allows multi-chars, incorrectly.
+ (r"[^\\']", String.Char),
+ (r"\\", String.Escape, 'escape'),
+ ("'", String.Char, '#pop'),
+ ],
+ 'string': [
+ (r'[^\\"]+', String),
+ (r"\\", String.Escape, 'escape'),
+ ('"', String, '#pop'),
+ ],
+ 'escape': [
+ (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
+ (r'\^[][A-Z@\^_]', String.Escape, '#pop'),
+ ('|'.join(ascii), String.Escape, '#pop'),
+ (r'o[0-7]+', String.Escape, '#pop'),
+ (r'x[\da-fA-F]+', String.Escape, '#pop'),
+ (r'\d+', String.Escape, '#pop'),
+ (r'\s+\\', String.Escape, '#pop')
+ ],
+ }
+
+
+class AgdaLexer(RegexLexer):
+ """
+ For the `Agda <http://wiki.portal.chalmers.se/agda/pmwiki.php>`_
+ dependently typed functional programming language and proof assistant.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Agda'
+ aliases = ['agda']
+ filenames = ['*.agda']
+ mimetypes = ['text/x-agda']
+
+ reserved = ['abstract', 'codata', 'coinductive', 'constructor', 'data',
+ 'field', 'forall', 'hiding', 'in', 'inductive', 'infix',
+ 'infixl', 'infixr', 'instance', 'let', 'mutual', 'open',
+ 'pattern', 'postulate', 'primitive', 'private',
+ 'quote', 'quoteGoal', 'quoteTerm',
+ 'record', 'renaming', 'rewrite', 'syntax', 'tactic',
+ 'unquote', 'unquoteDecl', 'using', 'where', 'with']
+
+ tokens = {
+ 'root': [
+ # Declaration
+ (r'^(\s*)([^\s\(\)\{\}]+)(\s*)(:)(\s*)',
+ bygroups(Text, Name.Function, Text, Operator.Word, Text)),
+ # Comments
+ (r'--(?![!#$%&*+./<=>?@\^|_~:\\]).*?$', Comment.Single),
+ (r'{-', Comment.Multiline, 'comment'),
+ # Holes
+ (r'{!', Comment.Directive, 'hole'),
+ # Lexemes:
+ # Identifiers
+ (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
+ (r'(import|module)(\s+)', bygroups(Keyword.Reserved, Text), 'module'),
+ (r'\b(Set|Prop)\b', Keyword.Type),
+ # Special Symbols
+ (r'(\(|\)|\{|\})', Operator),
+ (u'(\\.{1,3}|\\||\u039B|\u2200|\u2192|:|=|->)', Operator.Word),
+ # Numbers
+ (r'\d+[eE][+-]?\d+', Number.Float),
+ (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
+ (r'0[xX][\da-fA-F]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ # Strings
+ (r"'", String.Char, 'character'),
+ (r'"', String, 'string'),
+ (r'[^\s\(\)\{\}]+', Text),
+ (r'\s+?', Text), # Whitespace
+ ],
+ 'hole': [
+ # Holes
+ (r'[^!{}]+', Comment.Directive),
+ (r'{!', Comment.Directive, '#push'),
+ (r'!}', Comment.Directive, '#pop'),
+ (r'[!{}]', Comment.Directive),
+ ],
+ 'module': [
+ (r'{-', Comment.Multiline, 'comment'),
+ (r'[a-zA-Z][\w.]*', Name, '#pop'),
+ (r'[^a-zA-Z]+', Text)
+ ],
+ 'comment': HaskellLexer.tokens['comment'],
+ 'character': HaskellLexer.tokens['character'],
+ 'string': HaskellLexer.tokens['string'],
+ 'escape': HaskellLexer.tokens['escape']
+ }
+
+
+class CryptolLexer(RegexLexer):
+ """
+ FIXME: A Cryptol2 lexer based on the lexemes defined in the Haskell 98 Report.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Cryptol'
+ aliases = ['cryptol', 'cry']
+ filenames = ['*.cry']
+ mimetypes = ['text/x-cryptol']
+
+ reserved = ('Arith', 'Bit', 'Cmp', 'False', 'Inf', 'True', 'else',
+ 'export', 'extern', 'fin', 'if', 'import', 'inf', 'lg2',
+ 'max', 'min', 'module', 'newtype', 'pragma', 'property',
+ 'then', 'type', 'where', 'width')
+ ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK',
+ 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE',
+ 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN',
+ 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL')
+
+ tokens = {
+ 'root': [
+ # Whitespace:
+ (r'\s+', Text),
+ # (r'--\s*|.*$', Comment.Doc),
+ (r'//.*$', Comment.Single),
+ (r'/\*', Comment.Multiline, 'comment'),
+ # Lexemes:
+ # Identifiers
+ (r'\bimport\b', Keyword.Reserved, 'import'),
+ (r'\bmodule\b', Keyword.Reserved, 'module'),
+ (r'\berror\b', Name.Exception),
+ (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved),
+ (r'^[_a-z][\w\']*', Name.Function),
+ (r"'?[_a-z][\w']*", Name),
+ (r"('')?[A-Z][\w\']*", Keyword.Type),
+ # Operators
+ (r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator
+ (r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials
+ (r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators
+ (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators
+ # Numbers
+ (r'\d+[eE][+-]?\d+', Number.Float),
+ (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float),
+ (r'0[oO][0-7]+', Number.Oct),
+ (r'0[xX][\da-fA-F]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ # Character/String Literals
+ (r"'", String.Char, 'character'),
+ (r'"', String, 'string'),
+ # Special
+ (r'\[\]', Keyword.Type),
+ (r'\(\)', Name.Builtin),
+ (r'[][(),;`{}]', Punctuation),
+ ],
+ 'import': [
+ # Import statements
+ (r'\s+', Text),
+ (r'"', String, 'string'),
+ # after "funclist" state
+ (r'\)', Punctuation, '#pop'),
+ (r'qualified\b', Keyword),
+ # import X as Y
+ (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(as)(\s+)([A-Z][a-zA-Z0-9_.]*)',
+ bygroups(Name.Namespace, Text, Keyword, Text, Name), '#pop'),
+ # import X hiding (functions)
+ (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(hiding)(\s+)(\()',
+ bygroups(Name.Namespace, Text, Keyword, Text, Punctuation), 'funclist'),
+ # import X (functions)
+ (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(\()',
+ bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
+ # import X
+ (r'[a-zA-Z0-9_.]+', Name.Namespace, '#pop'),
+ ],
+ 'module': [
+ (r'\s+', Text),
+ (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(\()',
+ bygroups(Name.Namespace, Text, Punctuation), 'funclist'),
+ (r'[A-Z][a-zA-Z0-9_.]*', Name.Namespace, '#pop'),
+ ],
+ 'funclist': [
+ (r'\s+', Text),
+ (r'[A-Z][a-zA-Z0-9_]*', Keyword.Type),
+ (r'(_[\w\']+|[a-z][\w\']*)', Name.Function),
+ (r'--(?![!#$%&*+./<=>?@\^|_~:\\]).*?$', Comment.Single),
+ (r'{-', Comment.Multiline, 'comment'),
+ (r',', Punctuation),
+ (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator),
+ # (HACK, but it makes sense to push two instances, believe me)
+ (r'\(', Punctuation, ('funclist', 'funclist')),
+ (r'\)', Punctuation, '#pop:2'),
+ ],
+ 'comment': [
+ # Multiline Comments
+ (r'[^/\*]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[\*/]', Comment.Multiline),
+ ],
+ 'character': [
+ # Allows multi-chars, incorrectly.
+ (r"[^\\']'", String.Char, '#pop'),
+ (r"\\", String.Escape, 'escape'),
+ ("'", String.Char, '#pop'),
+ ],
+ 'string': [
+ (r'[^\\"]+', String),
+ (r"\\", String.Escape, 'escape'),
+ ('"', String, '#pop'),
+ ],
+ 'escape': [
+ (r'[abfnrtv"\'&\\]', String.Escape, '#pop'),
+ (r'\^[][A-Z@\^_]', String.Escape, '#pop'),
+ ('|'.join(ascii), String.Escape, '#pop'),
+ (r'o[0-7]+', String.Escape, '#pop'),
+ (r'x[\da-fA-F]+', String.Escape, '#pop'),
+ (r'\d+', String.Escape, '#pop'),
+ (r'\s+\\', String.Escape, '#pop'),
+ ],
+ }
+
+ EXTRA_KEYWORDS = set(('join', 'split', 'reverse', 'transpose', 'width',
+ 'length', 'tail', '<<', '>>', '<<<', '>>>', 'const',
+ 'reg', 'par', 'seq', 'ASSERT', 'undefined', 'error',
+ 'trace'))
+
+ def get_tokens_unprocessed(self, text):
+ stack = ['root']
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text, stack):
+ if token is Name and value in self.EXTRA_KEYWORDS:
+ yield index, Name.Builtin, value
+ else:
+ yield index, token, value
+
+
+class LiterateLexer(Lexer):
+ """
+ Base class for lexers of literate file formats based on LaTeX or Bird-style
+ (prefixing each code line with ">").
+
+ Additional options accepted:
+
+ `litstyle`
+ If given, must be ``"bird"`` or ``"latex"``. If not given, the style
+ is autodetected: if the first non-whitespace character in the source
+ is a backslash or percent character, LaTeX is assumed, else Bird.
+ """
+
+ bird_re = re.compile(r'(>[ \t]*)(.*\n)')
+
+ def __init__(self, baselexer, **options):
+ self.baselexer = baselexer
+ Lexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ style = self.options.get('litstyle')
+ if style is None:
+ style = (text.lstrip()[0:1] in '%\\') and 'latex' or 'bird'
+
+ code = ''
+ insertions = []
+ if style == 'bird':
+ # bird-style
+ for match in line_re.finditer(text):
+ line = match.group()
+ m = self.bird_re.match(line)
+ if m:
+ insertions.append((len(code),
+ [(0, Comment.Special, m.group(1))]))
+ code += m.group(2)
+ else:
+ insertions.append((len(code), [(0, Text, line)]))
+ else:
+ # latex-style
+ from pygments.lexers.markup import TexLexer
+ lxlexer = TexLexer(**self.options)
+ codelines = 0
+ latex = ''
+ for match in line_re.finditer(text):
+ line = match.group()
+ if codelines:
+ if line.lstrip().startswith('\\end{code}'):
+ codelines = 0
+ latex += line
+ else:
+ code += line
+ elif line.lstrip().startswith('\\begin{code}'):
+ codelines = 1
+ latex += line
+ insertions.append((len(code),
+ list(lxlexer.get_tokens_unprocessed(latex))))
+ latex = ''
+ else:
+ latex += line
+ insertions.append((len(code),
+ list(lxlexer.get_tokens_unprocessed(latex))))
+ for item in do_insertions(insertions, self.baselexer.get_tokens_unprocessed(code)):
+ yield item
+
+
+class LiterateHaskellLexer(LiterateLexer):
+ """
+ For Literate Haskell (Bird-style or LaTeX) source.
+
+ Additional options accepted:
+
+ `litstyle`
+ If given, must be ``"bird"`` or ``"latex"``. If not given, the style
+ is autodetected: if the first non-whitespace character in the source
+ is a backslash or percent character, LaTeX is assumed, else Bird.
+
+ .. versionadded:: 0.9
+ """
+ name = 'Literate Haskell'
+ aliases = ['lhs', 'literate-haskell', 'lhaskell']
+ filenames = ['*.lhs']
+ mimetypes = ['text/x-literate-haskell']
+
+ def __init__(self, **options):
+ hslexer = HaskellLexer(**options)
+ LiterateLexer.__init__(self, hslexer, **options)
+
+
+class LiterateIdrisLexer(LiterateLexer):
+ """
+ For Literate Idris (Bird-style or LaTeX) source.
+
+ Additional options accepted:
+
+ `litstyle`
+ If given, must be ``"bird"`` or ``"latex"``. If not given, the style
+ is autodetected: if the first non-whitespace character in the source
+ is a backslash or percent character, LaTeX is assumed, else Bird.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Literate Idris'
+ aliases = ['lidr', 'literate-idris', 'lidris']
+ filenames = ['*.lidr']
+ mimetypes = ['text/x-literate-idris']
+
+ def __init__(self, **options):
+ hslexer = IdrisLexer(**options)
+ LiterateLexer.__init__(self, hslexer, **options)
+
+
+class LiterateAgdaLexer(LiterateLexer):
+ """
+ For Literate Agda source.
+
+ Additional options accepted:
+
+ `litstyle`
+ If given, must be ``"bird"`` or ``"latex"``. If not given, the style
+ is autodetected: if the first non-whitespace character in the source
+ is a backslash or percent character, LaTeX is assumed, else Bird.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Literate Agda'
+ aliases = ['lagda', 'literate-agda']
+ filenames = ['*.lagda']
+ mimetypes = ['text/x-literate-agda']
+
+ def __init__(self, **options):
+ agdalexer = AgdaLexer(**options)
+ LiterateLexer.__init__(self, agdalexer, litstyle='latex', **options)
+
+
+class LiterateCryptolLexer(LiterateLexer):
+ """
+ For Literate Cryptol (Bird-style or LaTeX) source.
+
+ Additional options accepted:
+
+ `litstyle`
+ If given, must be ``"bird"`` or ``"latex"``. If not given, the style
+ is autodetected: if the first non-whitespace character in the source
+ is a backslash or percent character, LaTeX is assumed, else Bird.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Literate Cryptol'
+ aliases = ['lcry', 'literate-cryptol', 'lcryptol']
+ filenames = ['*.lcry']
+ mimetypes = ['text/x-literate-cryptol']
+
+ def __init__(self, **options):
+ crylexer = CryptolLexer(**options)
+ LiterateLexer.__init__(self, crylexer, **options)
+
+
+class KokaLexer(RegexLexer):
+ """
+ Lexer for the `Koka <http://koka.codeplex.com>`_
+ language.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Koka'
+ aliases = ['koka']
+ filenames = ['*.kk', '*.kki']
+ mimetypes = ['text/x-koka']
+
+ keywords = [
+ 'infix', 'infixr', 'infixl',
+ 'type', 'cotype', 'rectype', 'alias',
+ 'struct', 'con',
+ 'fun', 'function', 'val', 'var',
+ 'external',
+ 'if', 'then', 'else', 'elif', 'return', 'match',
+ 'private', 'public', 'private',
+ 'module', 'import', 'as',
+ 'include', 'inline',
+ 'rec',
+ 'try', 'yield', 'enum',
+ 'interface', 'instance',
+ ]
+
+ # keywords that are followed by a type
+ typeStartKeywords = [
+ 'type', 'cotype', 'rectype', 'alias', 'struct', 'enum',
+ ]
+
+ # keywords valid in a type
+ typekeywords = [
+ 'forall', 'exists', 'some', 'with',
+ ]
+
+ # builtin names and special names
+ builtin = [
+ 'for', 'while', 'repeat',
+ 'foreach', 'foreach-indexed',
+ 'error', 'catch', 'finally',
+ 'cs', 'js', 'file', 'ref', 'assigned',
+ ]
+
+ # symbols that can be in an operator
+ symbols = '[\$%&\*\+@!/\\\^~=\.:\-\?\|<>]+'
+
+ # symbol boundary: an operator keyword should not be followed by any of these
+ sboundary = '(?!'+symbols+')'
+
+ # name boundary: a keyword should not be followed by any of these
+ boundary = '(?![\w/])'
+
+ # koka token abstractions
+ tokenType = Name.Attribute
+ tokenTypeDef = Name.Class
+ tokenConstructor = Generic.Emph
+
+ # main lexer
+ tokens = {
+ 'root': [
+ include('whitespace'),
+
+ # go into type mode
+ (r'::?' + sboundary, tokenType, 'type'),
+ (r'(alias)(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef),
+ 'alias-type'),
+ (r'(struct)(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef),
+ 'struct-type'),
+ ((r'(%s)' % '|'.join(typeStartKeywords)) +
+ r'(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef),
+ 'type'),
+
+ # special sequences of tokens (we use ?: for non-capturing group as
+ # required by 'bygroups')
+ (r'(module)(\s+)(interface\s+)?((?:[a-z]\w*/)*[a-z]\w*)',
+ bygroups(Keyword, Text, Keyword, Name.Namespace)),
+ (r'(import)(\s+)((?:[a-z]\w*/)*[a-z]\w*)'
+ r'(?:(\s*)(=)(\s*)((?:qualified\s*)?)'
+ r'((?:[a-z]\w*/)*[a-z]\w*))?',
+ bygroups(Keyword, Text, Name.Namespace, Text, Keyword, Text,
+ Keyword, Name.Namespace)),
+
+ (r'(^(?:(?:public|private)\s*)?(?:function|fun|val))'
+ r'(\s+)([a-z]\w*|\((?:' + symbols + r'|/)\))',
+ bygroups(Keyword, Text, Name.Function)),
+ (r'(^(?:(?:public|private)\s*)?external)(\s+)(inline\s+)?'
+ r'([a-z]\w*|\((?:' + symbols + r'|/)\))',
+ bygroups(Keyword, Text, Keyword, Name.Function)),
+
+ # keywords
+ (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword.Type),
+ (r'(%s)' % '|'.join(keywords) + boundary, Keyword),
+ (r'(%s)' % '|'.join(builtin) + boundary, Keyword.Pseudo),
+ (r'::?|:=|\->|[=\.]' + sboundary, Keyword),
+
+ # names
+ (r'((?:[a-z]\w*/)*)([A-Z]\w*)',
+ bygroups(Name.Namespace, tokenConstructor)),
+ (r'((?:[a-z]\w*/)*)([a-z]\w*)', bygroups(Name.Namespace, Name)),
+ (r'((?:[a-z]\w*/)*)(\((?:' + symbols + r'|/)\))',
+ bygroups(Name.Namespace, Name)),
+ (r'_\w*', Name.Variable),
+
+ # literal string
+ (r'@"', String.Double, 'litstring'),
+
+ # operators
+ (symbols + "|/(?![\*/])", Operator),
+ (r'`', Operator),
+ (r'[\{\}\(\)\[\];,]', Punctuation),
+
+ # literals. No check for literal characters with len > 1
+ (r'[0-9]+\.[0-9]+([eE][\-\+]?[0-9]+)?', Number.Float),
+ (r'0[xX][0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+
+ (r"'", String.Char, 'char'),
+ (r'"', String.Double, 'string'),
+ ],
+
+ # type started by alias
+ 'alias-type': [
+ (r'=', Keyword),
+ include('type')
+ ],
+
+ # type started by struct
+ 'struct-type': [
+ (r'(?=\((?!,*\)))', Punctuation, '#pop'),
+ include('type')
+ ],
+
+ # type started by colon
+ 'type': [
+ (r'[\(\[<]', tokenType, 'type-nested'),
+ include('type-content')
+ ],
+
+ # type nested in brackets: can contain parameters, comma etc.
+ 'type-nested': [
+ (r'[\)\]>]', tokenType, '#pop'),
+ (r'[\(\[<]', tokenType, 'type-nested'),
+ (r',', tokenType),
+ (r'([a-z]\w*)(\s*)(:)(?!:)',
+ bygroups(Name, Text, tokenType)), # parameter name
+ include('type-content')
+ ],
+
+ # shared contents of a type
+ 'type-content': [
+ include('whitespace'),
+
+ # keywords
+ (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword),
+ (r'(?=((%s)' % '|'.join(keywords) + boundary + '))',
+ Keyword, '#pop'), # need to match because names overlap...
+
+ # kinds
+ (r'[EPHVX]' + boundary, tokenType),
+
+ # type names
+ (r'[a-z][0-9]*(?![\w/])', tokenType),
+ (r'_\w*', tokenType.Variable), # Generic.Emph
+ (r'((?:[a-z]\w*/)*)([A-Z]\w*)',
+ bygroups(Name.Namespace, tokenType)),
+ (r'((?:[a-z]\w*/)*)([a-z]\w+)',
+ bygroups(Name.Namespace, tokenType)),
+
+ # type keyword operators
+ (r'::|\->|[\.:|]', tokenType),
+
+ # catchall
+ default('#pop')
+ ],
+
+ # comments and literals
+ 'whitespace': [
+ (r'\n\s*#.*$', Comment.Preproc),
+ (r'\s+', Text),
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'//.*$', Comment.Single)
+ ],
+ 'comment': [
+ (r'[^/\*]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[\*/]', Comment.Multiline),
+ ],
+ 'litstring': [
+ (r'[^"]+', String.Double),
+ (r'""', String.Escape),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'string': [
+ (r'[^\\"\n]+', String.Double),
+ include('escape-sequence'),
+ (r'["\n]', String.Double, '#pop'),
+ ],
+ 'char': [
+ (r'[^\\\'\n]+', String.Char),
+ include('escape-sequence'),
+ (r'[\'\n]', String.Char, '#pop'),
+ ],
+ 'escape-sequence': [
+ (r'\\[nrt\\\"\']', String.Escape),
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
+ (r'\\u[0-9a-fA-F]{4}', String.Escape),
+ # Yes, \U literals are 6 hex digits.
+ (r'\\U[0-9a-fA-F]{6}', String.Escape)
+ ]
+ }
diff --git a/pygments/lexers/haxe.py b/pygments/lexers/haxe.py
new file mode 100644
index 00000000..a0758ac6
--- /dev/null
+++ b/pygments/lexers/haxe.py
@@ -0,0 +1,934 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.haxe
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Haxe and related stuff.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import ExtendedRegexLexer, RegexLexer, include, bygroups, \
+ default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic, Whitespace
+
+__all__ = ['HaxeLexer', 'HxmlLexer']
+
+
+class HaxeLexer(ExtendedRegexLexer):
+ """
+ For Haxe source code (http://haxe.org/).
+
+ .. versionadded:: 1.3
+ """
+
+ name = 'Haxe'
+ aliases = ['hx', 'haxe', 'hxsl']
+ filenames = ['*.hx', '*.hxsl']
+ mimetypes = ['text/haxe', 'text/x-haxe', 'text/x-hx']
+
+ # keywords extracted from lexer.mll in the haxe compiler source
+ keyword = (r'(?:function|class|static|var|if|else|while|do|for|'
+ r'break|return|continue|extends|implements|import|'
+ r'switch|case|default|public|private|try|untyped|'
+ r'catch|new|this|throw|extern|enum|in|interface|'
+ r'cast|override|dynamic|typedef|package|'
+ r'inline|using|null|true|false|abstract)\b')
+
+ # idtype in lexer.mll
+ typeid = r'_*[A-Z]\w*'
+
+ # combined ident and dollar and idtype
+ ident = r'(?:_*[a-z]\w*|_+[0-9]\w*|' + typeid + '|_+|\$\w+)'
+
+ binop = (r'(?:%=|&=|\|=|\^=|\+=|\-=|\*=|/=|<<=|>\s*>\s*=|>\s*>\s*>\s*=|==|'
+ r'!=|<=|>\s*=|&&|\|\||<<|>>>|>\s*>|\.\.\.|<|>|%|&|\||\^|\+|\*|'
+ r'/|\-|=>|=)')
+
+ # ident except keywords
+ ident_no_keyword = r'(?!' + keyword + ')' + ident
+
+ flags = re.DOTALL | re.MULTILINE
+
+ preproc_stack = []
+
+ def preproc_callback(self, match, ctx):
+ proc = match.group(2)
+
+ if proc == 'if':
+ # store the current stack
+ self.preproc_stack.append(ctx.stack[:])
+ elif proc in ['else', 'elseif']:
+ # restore the stack back to right before #if
+ if self.preproc_stack:
+ ctx.stack = self.preproc_stack[-1][:]
+ elif proc == 'end':
+ # remove the saved stack of previous #if
+ if self.preproc_stack:
+ self.preproc_stack.pop()
+
+ # #if and #elseif should follow by an expr
+ if proc in ['if', 'elseif']:
+ ctx.stack.append('preproc-expr')
+
+ # #error can be optionally follow by the error msg
+ if proc in ['error']:
+ ctx.stack.append('preproc-error')
+
+ yield match.start(), Comment.Preproc, '#' + proc
+ ctx.pos = match.end()
+
+ tokens = {
+ 'root': [
+ include('spaces'),
+ include('meta'),
+ (r'(?:package)\b', Keyword.Namespace, ('semicolon', 'package')),
+ (r'(?:import)\b', Keyword.Namespace, ('semicolon', 'import')),
+ (r'(?:using)\b', Keyword.Namespace, ('semicolon', 'using')),
+ (r'(?:extern|private)\b', Keyword.Declaration),
+ (r'(?:abstract)\b', Keyword.Declaration, 'abstract'),
+ (r'(?:class|interface)\b', Keyword.Declaration, 'class'),
+ (r'(?:enum)\b', Keyword.Declaration, 'enum'),
+ (r'(?:typedef)\b', Keyword.Declaration, 'typedef'),
+
+ # top-level expression
+ # although it is not supported in haxe, but it is common to write
+ # expression in web pages the positive lookahead here is to prevent
+ # an infinite loop at the EOF
+ (r'(?=.)', Text, 'expr-statement'),
+ ],
+
+ # space/tab/comment/preproc
+ 'spaces': [
+ (r'\s+', Text),
+ (r'//[^\n\r]*', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'(#)(if|elseif|else|end|error)\b', preproc_callback),
+ ],
+
+ 'string-single-interpol': [
+ (r'\$\{', String.Interpol, ('string-interpol-close', 'expr')),
+ (r'\$\$', String.Escape),
+ (r'\$(?=' + ident + ')', String.Interpol, 'ident'),
+ include('string-single'),
+ ],
+
+ 'string-single': [
+ (r"'", String.Single, '#pop'),
+ (r'\\.', String.Escape),
+ (r'.', String.Single),
+ ],
+
+ 'string-double': [
+ (r'"', String.Double, '#pop'),
+ (r'\\.', String.Escape),
+ (r'.', String.Double),
+ ],
+
+ 'string-interpol-close': [
+ (r'\$'+ident, String.Interpol),
+ (r'\}', String.Interpol, '#pop'),
+ ],
+
+ 'package': [
+ include('spaces'),
+ (ident, Name.Namespace),
+ (r'\.', Punctuation, 'import-ident'),
+ default('#pop'),
+ ],
+
+ 'import': [
+ include('spaces'),
+ (ident, Name.Namespace),
+ (r'\*', Keyword), # wildcard import
+ (r'\.', Punctuation, 'import-ident'),
+ (r'in', Keyword.Namespace, 'ident'),
+ default('#pop'),
+ ],
+
+ 'import-ident': [
+ include('spaces'),
+ (r'\*', Keyword, '#pop'), # wildcard import
+ (ident, Name.Namespace, '#pop'),
+ ],
+
+ 'using': [
+ include('spaces'),
+ (ident, Name.Namespace),
+ (r'\.', Punctuation, 'import-ident'),
+ default('#pop'),
+ ],
+
+ 'preproc-error': [
+ (r'\s+', Comment.Preproc),
+ (r"'", String.Single, ('#pop', 'string-single')),
+ (r'"', String.Double, ('#pop', 'string-double')),
+ default('#pop'),
+ ],
+
+ 'preproc-expr': [
+ (r'\s+', Comment.Preproc),
+ (r'\!', Comment.Preproc),
+ (r'\(', Comment.Preproc, ('#pop', 'preproc-parenthesis')),
+
+ (ident, Comment.Preproc, '#pop'),
+
+ # Float
+ (r'\.[0-9]+', Number.Float),
+ (r'[0-9]+[eE][\+\-]?[0-9]+', Number.Float),
+ (r'[0-9]+\.[0-9]*[eE][\+\-]?[0-9]+', Number.Float),
+ (r'[0-9]+\.[0-9]+', Number.Float),
+ (r'[0-9]+\.(?!' + ident + '|\.\.)', Number.Float),
+
+ # Int
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+
+ # String
+ (r"'", String.Single, ('#pop', 'string-single')),
+ (r'"', String.Double, ('#pop', 'string-double')),
+ ],
+
+ 'preproc-parenthesis': [
+ (r'\s+', Comment.Preproc),
+ (r'\)', Comment.Preproc, '#pop'),
+ ('', Text, 'preproc-expr-in-parenthesis'),
+ ],
+
+ 'preproc-expr-chain': [
+ (r'\s+', Comment.Preproc),
+ (binop, Comment.Preproc, ('#pop', 'preproc-expr-in-parenthesis')),
+ default('#pop'),
+ ],
+
+ # same as 'preproc-expr' but able to chain 'preproc-expr-chain'
+ 'preproc-expr-in-parenthesis': [
+ (r'\s+', Comment.Preproc),
+ (r'\!', Comment.Preproc),
+ (r'\(', Comment.Preproc,
+ ('#pop', 'preproc-expr-chain', 'preproc-parenthesis')),
+
+ (ident, Comment.Preproc, ('#pop', 'preproc-expr-chain')),
+
+ # Float
+ (r'\.[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
+ (r'[0-9]+[eE][\+\-]?[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
+ (r'[0-9]+\.[0-9]*[eE][\+\-]?[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
+ (r'[0-9]+\.[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
+ (r'[0-9]+\.(?!' + ident + '|\.\.)', Number.Float, ('#pop', 'preproc-expr-chain')),
+
+ # Int
+ (r'0x[0-9a-fA-F]+', Number.Hex, ('#pop', 'preproc-expr-chain')),
+ (r'[0-9]+', Number.Integer, ('#pop', 'preproc-expr-chain')),
+
+ # String
+ (r"'", String.Single,
+ ('#pop', 'preproc-expr-chain', 'string-single')),
+ (r'"', String.Double,
+ ('#pop', 'preproc-expr-chain', 'string-double')),
+ ],
+
+ 'abstract': [
+ include('spaces'),
+ default(('#pop', 'abstract-body', 'abstract-relation',
+ 'abstract-opaque', 'type-param-constraint', 'type-name')),
+ ],
+
+ 'abstract-body': [
+ include('spaces'),
+ (r'\{', Punctuation, ('#pop', 'class-body')),
+ ],
+
+ 'abstract-opaque': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'parenthesis-close', 'type')),
+ default('#pop'),
+ ],
+
+ 'abstract-relation': [
+ include('spaces'),
+ (r'(?:to|from)', Keyword.Declaration, 'type'),
+ (r',', Punctuation),
+ default('#pop'),
+ ],
+
+ 'meta': [
+ include('spaces'),
+ (r'@', Name.Decorator, ('meta-body', 'meta-ident', 'meta-colon')),
+ ],
+
+ # optional colon
+ 'meta-colon': [
+ include('spaces'),
+ (r':', Name.Decorator, '#pop'),
+ default('#pop'),
+ ],
+
+ # same as 'ident' but set token as Name.Decorator instead of Name
+ 'meta-ident': [
+ include('spaces'),
+ (ident, Name.Decorator, '#pop'),
+ ],
+
+ 'meta-body': [
+ include('spaces'),
+ (r'\(', Name.Decorator, ('#pop', 'meta-call')),
+ default('#pop'),
+ ],
+
+ 'meta-call': [
+ include('spaces'),
+ (r'\)', Name.Decorator, '#pop'),
+ default(('#pop', 'meta-call-sep', 'expr')),
+ ],
+
+ 'meta-call-sep': [
+ include('spaces'),
+ (r'\)', Name.Decorator, '#pop'),
+ (r',', Punctuation, ('#pop', 'meta-call')),
+ ],
+
+ 'typedef': [
+ include('spaces'),
+ default(('#pop', 'typedef-body', 'type-param-constraint',
+ 'type-name')),
+ ],
+
+ 'typedef-body': [
+ include('spaces'),
+ (r'=', Operator, ('#pop', 'optional-semicolon', 'type')),
+ ],
+
+ 'enum': [
+ include('spaces'),
+ default(('#pop', 'enum-body', 'bracket-open',
+ 'type-param-constraint', 'type-name')),
+ ],
+
+ 'enum-body': [
+ include('spaces'),
+ include('meta'),
+ (r'\}', Punctuation, '#pop'),
+ (ident_no_keyword, Name, ('enum-member', 'type-param-constraint')),
+ ],
+
+ 'enum-member': [
+ include('spaces'),
+ (r'\(', Punctuation,
+ ('#pop', 'semicolon', 'flag', 'function-param')),
+ default(('#pop', 'semicolon', 'flag')),
+ ],
+
+ 'class': [
+ include('spaces'),
+ default(('#pop', 'class-body', 'bracket-open', 'extends',
+ 'type-param-constraint', 'type-name')),
+ ],
+
+ 'extends': [
+ include('spaces'),
+ (r'(?:extends|implements)\b', Keyword.Declaration, 'type'),
+ (r',', Punctuation), # the comma is made optional here, since haxe2
+ # requires the comma but haxe3 does not allow it
+ default('#pop'),
+ ],
+
+ 'bracket-open': [
+ include('spaces'),
+ (r'\{', Punctuation, '#pop'),
+ ],
+
+ 'bracket-close': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ ],
+
+ 'class-body': [
+ include('spaces'),
+ include('meta'),
+ (r'\}', Punctuation, '#pop'),
+ (r'(?:static|public|private|override|dynamic|inline|macro)\b',
+ Keyword.Declaration),
+ default('class-member'),
+ ],
+
+ 'class-member': [
+ include('spaces'),
+ (r'(var)\b', Keyword.Declaration,
+ ('#pop', 'optional-semicolon', 'var')),
+ (r'(function)\b', Keyword.Declaration,
+ ('#pop', 'optional-semicolon', 'class-method')),
+ ],
+
+ # local function, anonymous or not
+ 'function-local': [
+ include('spaces'),
+ (r'(' + ident_no_keyword + ')?', Name.Function,
+ ('#pop', 'optional-expr', 'flag', 'function-param',
+ 'parenthesis-open', 'type-param-constraint')),
+ ],
+
+ 'optional-expr': [
+ include('spaces'),
+ include('expr'),
+ default('#pop'),
+ ],
+
+ 'class-method': [
+ include('spaces'),
+ (ident, Name.Function, ('#pop', 'optional-expr', 'flag',
+ 'function-param', 'parenthesis-open',
+ 'type-param-constraint')),
+ ],
+
+ # function arguments
+ 'function-param': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ (r'\?', Punctuation),
+ (ident_no_keyword, Name,
+ ('#pop', 'function-param-sep', 'assign', 'flag')),
+ ],
+
+ 'function-param-sep': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'function-param')),
+ ],
+
+ 'prop-get-set': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'parenthesis-close',
+ 'prop-get-set-opt', 'comma', 'prop-get-set-opt')),
+ default('#pop'),
+ ],
+
+ 'prop-get-set-opt': [
+ include('spaces'),
+ (r'(?:default|null|never|dynamic|get|set)\b', Keyword, '#pop'),
+ (ident_no_keyword, Text, '#pop'), # custom getter/setter
+ ],
+
+ 'expr-statement': [
+ include('spaces'),
+ # makes semicolon optional here, just to avoid checking the last
+ # one is bracket or not.
+ default(('#pop', 'optional-semicolon', 'expr')),
+ ],
+
+ 'expr': [
+ include('spaces'),
+ (r'@', Name.Decorator, ('#pop', 'optional-expr', 'meta-body',
+ 'meta-ident', 'meta-colon')),
+ (r'(?:\+\+|\-\-|~(?!/)|!|\-)', Operator),
+ (r'\(', Punctuation, ('#pop', 'expr-chain', 'parenthesis')),
+ (r'(?:static|public|private|override|dynamic|inline)\b',
+ Keyword.Declaration),
+ (r'(?:function)\b', Keyword.Declaration, ('#pop', 'expr-chain',
+ 'function-local')),
+ (r'\{', Punctuation, ('#pop', 'expr-chain', 'bracket')),
+ (r'(?:true|false|null)\b', Keyword.Constant, ('#pop', 'expr-chain')),
+ (r'(?:this)\b', Keyword, ('#pop', 'expr-chain')),
+ (r'(?:cast)\b', Keyword, ('#pop', 'expr-chain', 'cast')),
+ (r'(?:try)\b', Keyword, ('#pop', 'catch', 'expr')),
+ (r'(?:var)\b', Keyword.Declaration, ('#pop', 'var')),
+ (r'(?:new)\b', Keyword, ('#pop', 'expr-chain', 'new')),
+ (r'(?:switch)\b', Keyword, ('#pop', 'switch')),
+ (r'(?:if)\b', Keyword, ('#pop', 'if')),
+ (r'(?:do)\b', Keyword, ('#pop', 'do')),
+ (r'(?:while)\b', Keyword, ('#pop', 'while')),
+ (r'(?:for)\b', Keyword, ('#pop', 'for')),
+ (r'(?:untyped|throw)\b', Keyword),
+ (r'(?:return)\b', Keyword, ('#pop', 'optional-expr')),
+ (r'(?:macro)\b', Keyword, ('#pop', 'macro')),
+ (r'(?:continue|break)\b', Keyword, '#pop'),
+ (r'(?:\$\s*[a-z]\b|\$(?!'+ident+'))', Name, ('#pop', 'dollar')),
+ (ident_no_keyword, Name, ('#pop', 'expr-chain')),
+
+ # Float
+ (r'\.[0-9]+', Number.Float, ('#pop', 'expr-chain')),
+ (r'[0-9]+[eE][\+\-]?[0-9]+', Number.Float, ('#pop', 'expr-chain')),
+ (r'[0-9]+\.[0-9]*[eE][\+\-]?[0-9]+', Number.Float, ('#pop', 'expr-chain')),
+ (r'[0-9]+\.[0-9]+', Number.Float, ('#pop', 'expr-chain')),
+ (r'[0-9]+\.(?!' + ident + '|\.\.)', Number.Float, ('#pop', 'expr-chain')),
+
+ # Int
+ (r'0x[0-9a-fA-F]+', Number.Hex, ('#pop', 'expr-chain')),
+ (r'[0-9]+', Number.Integer, ('#pop', 'expr-chain')),
+
+ # String
+ (r"'", String.Single, ('#pop', 'expr-chain', 'string-single-interpol')),
+ (r'"', String.Double, ('#pop', 'expr-chain', 'string-double')),
+
+ # EReg
+ (r'~/(\\\\|\\/|[^/\n])*/[gimsu]*', String.Regex, ('#pop', 'expr-chain')),
+
+ # Array
+ (r'\[', Punctuation, ('#pop', 'expr-chain', 'array-decl')),
+ ],
+
+ 'expr-chain': [
+ include('spaces'),
+ (r'(?:\+\+|\-\-)', Operator),
+ (binop, Operator, ('#pop', 'expr')),
+ (r'(?:in)\b', Keyword, ('#pop', 'expr')),
+ (r'\?', Operator, ('#pop', 'expr', 'ternary', 'expr')),
+ (r'(\.)(' + ident_no_keyword + ')', bygroups(Punctuation, Name)),
+ (r'\[', Punctuation, 'array-access'),
+ (r'\(', Punctuation, 'call'),
+ default('#pop'),
+ ],
+
+ # macro reification
+ 'macro': [
+ include('spaces'),
+ include('meta'),
+ (r':', Punctuation, ('#pop', 'type')),
+
+ (r'(?:extern|private)\b', Keyword.Declaration),
+ (r'(?:abstract)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'abstract')),
+ (r'(?:class|interface)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'macro-class')),
+ (r'(?:enum)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'enum')),
+ (r'(?:typedef)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'typedef')),
+
+ default(('#pop', 'expr')),
+ ],
+
+ 'macro-class': [
+ (r'\{', Punctuation, ('#pop', 'class-body')),
+ include('class')
+ ],
+
+ # cast can be written as "cast expr" or "cast(expr, type)"
+ 'cast': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'parenthesis-close',
+ 'cast-type', 'expr')),
+ default(('#pop', 'expr')),
+ ],
+
+ # optionally give a type as the 2nd argument of cast()
+ 'cast-type': [
+ include('spaces'),
+ (r',', Punctuation, ('#pop', 'type')),
+ default('#pop'),
+ ],
+
+ 'catch': [
+ include('spaces'),
+ (r'(?:catch)\b', Keyword, ('expr', 'function-param',
+ 'parenthesis-open')),
+ default('#pop'),
+ ],
+
+ # do-while loop
+ 'do': [
+ include('spaces'),
+ default(('#pop', 'do-while', 'expr')),
+ ],
+
+ # the while after do
+ 'do-while': [
+ include('spaces'),
+ (r'(?:while)\b', Keyword, ('#pop', 'parenthesis',
+ 'parenthesis-open')),
+ ],
+
+ 'while': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'expr', 'parenthesis')),
+ ],
+
+ 'for': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'expr', 'parenthesis')),
+ ],
+
+ 'if': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'else', 'optional-semicolon', 'expr',
+ 'parenthesis')),
+ ],
+
+ 'else': [
+ include('spaces'),
+ (r'(?:else)\b', Keyword, ('#pop', 'expr')),
+ default('#pop'),
+ ],
+
+ 'switch': [
+ include('spaces'),
+ default(('#pop', 'switch-body', 'bracket-open', 'expr')),
+ ],
+
+ 'switch-body': [
+ include('spaces'),
+ (r'(?:case|default)\b', Keyword, ('case-block', 'case')),
+ (r'\}', Punctuation, '#pop'),
+ ],
+
+ 'case': [
+ include('spaces'),
+ (r':', Punctuation, '#pop'),
+ default(('#pop', 'case-sep', 'case-guard', 'expr')),
+ ],
+
+ 'case-sep': [
+ include('spaces'),
+ (r':', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'case')),
+ ],
+
+ 'case-guard': [
+ include('spaces'),
+ (r'(?:if)\b', Keyword, ('#pop', 'parenthesis', 'parenthesis-open')),
+ default('#pop'),
+ ],
+
+ # optional multiple expr under a case
+ 'case-block': [
+ include('spaces'),
+ (r'(?!(?:case|default)\b|\})', Keyword, 'expr-statement'),
+ default('#pop'),
+ ],
+
+ 'new': [
+ include('spaces'),
+ default(('#pop', 'call', 'parenthesis-open', 'type')),
+ ],
+
+ 'array-decl': [
+ include('spaces'),
+ (r'\]', Punctuation, '#pop'),
+ default(('#pop', 'array-decl-sep', 'expr')),
+ ],
+
+ 'array-decl-sep': [
+ include('spaces'),
+ (r'\]', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'array-decl')),
+ ],
+
+ 'array-access': [
+ include('spaces'),
+ default(('#pop', 'array-access-close', 'expr')),
+ ],
+
+ 'array-access-close': [
+ include('spaces'),
+ (r'\]', Punctuation, '#pop'),
+ ],
+
+ 'comma': [
+ include('spaces'),
+ (r',', Punctuation, '#pop'),
+ ],
+
+ 'colon': [
+ include('spaces'),
+ (r':', Punctuation, '#pop'),
+ ],
+
+ 'semicolon': [
+ include('spaces'),
+ (r';', Punctuation, '#pop'),
+ ],
+
+ 'optional-semicolon': [
+ include('spaces'),
+ (r';', Punctuation, '#pop'),
+ default('#pop'),
+ ],
+
+ # identity that CAN be a Haxe keyword
+ 'ident': [
+ include('spaces'),
+ (ident, Name, '#pop'),
+ ],
+
+ 'dollar': [
+ include('spaces'),
+ (r'\{', Punctuation, ('#pop', 'expr-chain', 'bracket-close', 'expr')),
+ default(('#pop', 'expr-chain')),
+ ],
+
+ 'type-name': [
+ include('spaces'),
+ (typeid, Name, '#pop'),
+ ],
+
+ 'type-full-name': [
+ include('spaces'),
+ (r'\.', Punctuation, 'ident'),
+ default('#pop'),
+ ],
+
+ 'type': [
+ include('spaces'),
+ (r'\?', Punctuation),
+ (ident, Name, ('#pop', 'type-check', 'type-full-name')),
+ (r'\{', Punctuation, ('#pop', 'type-check', 'type-struct')),
+ (r'\(', Punctuation, ('#pop', 'type-check', 'type-parenthesis')),
+ ],
+
+ 'type-parenthesis': [
+ include('spaces'),
+ default(('#pop', 'parenthesis-close', 'type')),
+ ],
+
+ 'type-check': [
+ include('spaces'),
+ (r'->', Punctuation, ('#pop', 'type')),
+ (r'<(?!=)', Punctuation, 'type-param'),
+ default('#pop'),
+ ],
+
+ 'type-struct': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ (r'\?', Punctuation),
+ (r'>', Punctuation, ('comma', 'type')),
+ (ident_no_keyword, Name, ('#pop', 'type-struct-sep', 'type', 'colon')),
+ include('class-body'),
+ ],
+
+ 'type-struct-sep': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'type-struct')),
+ ],
+
+ # type-param can be a normal type or a constant literal...
+ 'type-param-type': [
+ # Float
+ (r'\.[0-9]+', Number.Float, '#pop'),
+ (r'[0-9]+[eE][\+\-]?[0-9]+', Number.Float, '#pop'),
+ (r'[0-9]+\.[0-9]*[eE][\+\-]?[0-9]+', Number.Float, '#pop'),
+ (r'[0-9]+\.[0-9]+', Number.Float, '#pop'),
+ (r'[0-9]+\.(?!' + ident + '|\.\.)', Number.Float, '#pop'),
+
+ # Int
+ (r'0x[0-9a-fA-F]+', Number.Hex, '#pop'),
+ (r'[0-9]+', Number.Integer, '#pop'),
+
+ # String
+ (r"'", String.Single, ('#pop', 'string-single')),
+ (r'"', String.Double, ('#pop', 'string-double')),
+
+ # EReg
+ (r'~/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex, '#pop'),
+
+ # Array
+ (r'\[', Operator, ('#pop', 'array-decl')),
+
+ include('type'),
+ ],
+
+ # type-param part of a type
+ # ie. the <A,B> path in Map<A,B>
+ 'type-param': [
+ include('spaces'),
+ default(('#pop', 'type-param-sep', 'type-param-type')),
+ ],
+
+ 'type-param-sep': [
+ include('spaces'),
+ (r'>', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'type-param')),
+ ],
+
+ # optional type-param that may include constraint
+ # ie. <T:Constraint, T2:(ConstraintA,ConstraintB)>
+ 'type-param-constraint': [
+ include('spaces'),
+ (r'<(?!=)', Punctuation, ('#pop', 'type-param-constraint-sep',
+ 'type-param-constraint-flag', 'type-name')),
+ default('#pop'),
+ ],
+
+ 'type-param-constraint-sep': [
+ include('spaces'),
+ (r'>', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'type-param-constraint-sep',
+ 'type-param-constraint-flag', 'type-name')),
+ ],
+
+ # the optional constraint inside type-param
+ 'type-param-constraint-flag': [
+ include('spaces'),
+ (r':', Punctuation, ('#pop', 'type-param-constraint-flag-type')),
+ default('#pop'),
+ ],
+
+ 'type-param-constraint-flag-type': [
+ include('spaces'),
+ (r'\(', Punctuation, ('#pop', 'type-param-constraint-flag-type-sep',
+ 'type')),
+ default(('#pop', 'type')),
+ ],
+
+ 'type-param-constraint-flag-type-sep': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ (r',', Punctuation, 'type'),
+ ],
+
+ # a parenthesis expr that contain exactly one expr
+ 'parenthesis': [
+ include('spaces'),
+ default(('#pop', 'parenthesis-close', 'flag', 'expr')),
+ ],
+
+ 'parenthesis-open': [
+ include('spaces'),
+ (r'\(', Punctuation, '#pop'),
+ ],
+
+ 'parenthesis-close': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ ],
+
+ 'var': [
+ include('spaces'),
+ (ident_no_keyword, Text, ('#pop', 'var-sep', 'assign', 'flag', 'prop-get-set')),
+ ],
+
+ # optional more var decl.
+ 'var-sep': [
+ include('spaces'),
+ (r',', Punctuation, ('#pop', 'var')),
+ default('#pop'),
+ ],
+
+ # optional assignment
+ 'assign': [
+ include('spaces'),
+ (r'=', Operator, ('#pop', 'expr')),
+ default('#pop'),
+ ],
+
+ # optional type flag
+ 'flag': [
+ include('spaces'),
+ (r':', Punctuation, ('#pop', 'type')),
+ default('#pop'),
+ ],
+
+ # colon as part of a ternary operator (?:)
+ 'ternary': [
+ include('spaces'),
+ (r':', Operator, '#pop'),
+ ],
+
+ # function call
+ 'call': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ default(('#pop', 'call-sep', 'expr')),
+ ],
+
+ # after a call param
+ 'call-sep': [
+ include('spaces'),
+ (r'\)', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'call')),
+ ],
+
+ # bracket can be block or object
+ 'bracket': [
+ include('spaces'),
+ (r'(?!(?:\$\s*[a-z]\b|\$(?!'+ident+')))' + ident_no_keyword, Name,
+ ('#pop', 'bracket-check')),
+ (r"'", String.Single, ('#pop', 'bracket-check', 'string-single')),
+ (r'"', String.Double, ('#pop', 'bracket-check', 'string-double')),
+ default(('#pop', 'block')),
+ ],
+
+ 'bracket-check': [
+ include('spaces'),
+ (r':', Punctuation, ('#pop', 'object-sep', 'expr')), # is object
+ default(('#pop', 'block', 'optional-semicolon', 'expr-chain')), # is block
+ ],
+
+ # code block
+ 'block': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ default('expr-statement'),
+ ],
+
+ # object in key-value pairs
+ 'object': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ default(('#pop', 'object-sep', 'expr', 'colon', 'ident-or-string'))
+ ],
+
+ # a key of an object
+ 'ident-or-string': [
+ include('spaces'),
+ (ident_no_keyword, Name, '#pop'),
+ (r"'", String.Single, ('#pop', 'string-single')),
+ (r'"', String.Double, ('#pop', 'string-double')),
+ ],
+
+ # after a key-value pair in object
+ 'object-sep': [
+ include('spaces'),
+ (r'\}', Punctuation, '#pop'),
+ (r',', Punctuation, ('#pop', 'object')),
+ ],
+
+
+
+ }
+
+ def analyse_text(text):
+ if re.match(r'\w+\s*:\s*\w', text):
+ return 0.3
+
+
+class HxmlLexer(RegexLexer):
+ """
+ Lexer for `haXe build <http://haxe.org/doc/compiler>`_ files.
+
+ .. versionadded:: 1.6
+ """
+ name = 'Hxml'
+ aliases = ['haxeml', 'hxml']
+ filenames = ['*.hxml']
+
+ tokens = {
+ 'root': [
+ # Seperator
+ (r'(--)(next)', bygroups(Punctuation, Generic.Heading)),
+ # Compiler switches with one dash
+ (r'(-)(prompt|debug|v)', bygroups(Punctuation, Keyword.Keyword)),
+ # Compilerswitches with two dashes
+ (r'(--)(neko-source|flash-strict|flash-use-stage|no-opt|no-traces|'
+ r'no-inline|times|no-output)', bygroups(Punctuation, Keyword)),
+ # Targets and other options that take an argument
+ (r'(-)(cpp|js|neko|x|as3|swf9?|swf-lib|php|xml|main|lib|D|resource|'
+ r'cp|cmd)( +)(.+)',
+ bygroups(Punctuation, Keyword, Whitespace, String)),
+ # Options that take only numerical arguments
+ (r'(-)(swf-version)( +)(\d+)',
+ bygroups(Punctuation, Keyword, Number.Integer)),
+ # An Option that defines the size, the fps and the background
+ # color of an flash movie
+ (r'(-)(swf-header)( +)(\d+)(:)(\d+)(:)(\d+)(:)([A-Fa-f0-9]{6})',
+ bygroups(Punctuation, Keyword, Whitespace, Number.Integer,
+ Punctuation, Number.Integer, Punctuation, Number.Integer,
+ Punctuation, Number.Hex)),
+ # options with two dashes that takes arguments
+ (r'(--)(js-namespace|php-front|php-lib|remap|gen-hx-classes)( +)'
+ r'(.+)', bygroups(Punctuation, Keyword, Whitespace, String)),
+ # Single line comment, multiline ones are not allowed.
+ (r'#.*', Comment.Single)
+ ]
+ }
diff --git a/pygments/lexers/html.py b/pygments/lexers/html.py
new file mode 100644
index 00000000..47c84787
--- /dev/null
+++ b/pygments/lexers/html.py
@@ -0,0 +1,589 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.html
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for HTML, XML and related markup.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, bygroups, \
+ default, using
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Punctuation
+from pygments.util import looks_like_xml, html_doctype_matches
+
+from pygments.lexers.javascript import JavascriptLexer
+from pygments.lexers.jvm import ScalaLexer
+from pygments.lexers.css import CssLexer, _indentation, _starts_block
+from pygments.lexers.ruby import RubyLexer
+
+__all__ = ['HtmlLexer', 'DtdLexer', 'XmlLexer', 'XsltLexer', 'HamlLexer',
+ 'ScamlLexer', 'JadeLexer']
+
+
+class HtmlLexer(RegexLexer):
+ """
+ For HTML 4 and XHTML 1 markup. Nested JavaScript and CSS is highlighted
+ by the appropriate lexer.
+ """
+
+ name = 'HTML'
+ aliases = ['html']
+ filenames = ['*.html', '*.htm', '*.xhtml', '*.xslt']
+ mimetypes = ['text/html', 'application/xhtml+xml']
+
+ flags = re.IGNORECASE | re.DOTALL
+ tokens = {
+ 'root': [
+ ('[^<&]+', Text),
+ (r'&\S*?;', Name.Entity),
+ (r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc),
+ ('<!--', Comment, 'comment'),
+ (r'<\?.*?\?>', Comment.Preproc),
+ ('<![^>]*>', Comment.Preproc),
+ (r'<\s*script\s*', Name.Tag, ('script-content', 'tag')),
+ (r'<\s*style\s*', Name.Tag, ('style-content', 'tag')),
+ # note: this allows tag names not used in HTML like <x:with-dash>,
+ # this is to support yet-unknown template engines and the like
+ (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
+ (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
+ ],
+ 'comment': [
+ ('[^-]+', Comment),
+ ('-->', Comment, '#pop'),
+ ('-', Comment),
+ ],
+ 'tag': [
+ (r'\s+', Text),
+ (r'([\w:-]+\s*=)(\s*)', bygroups(Name.Attribute, Text), 'attr'),
+ (r'[\w:-]+', Name.Attribute),
+ (r'/?\s*>', Name.Tag, '#pop'),
+ ],
+ 'script-content': [
+ (r'<\s*/\s*script\s*>', Name.Tag, '#pop'),
+ (r'.+?(?=<\s*/\s*script\s*>)', using(JavascriptLexer)),
+ ],
+ 'style-content': [
+ (r'<\s*/\s*style\s*>', Name.Tag, '#pop'),
+ (r'.+?(?=<\s*/\s*style\s*>)', using(CssLexer)),
+ ],
+ 'attr': [
+ ('".*?"', String, '#pop'),
+ ("'.*?'", String, '#pop'),
+ (r'[^\s>]+', String, '#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ if html_doctype_matches(text):
+ return 0.5
+
+
+class DtdLexer(RegexLexer):
+ """
+ A lexer for DTDs (Document Type Definitions).
+
+ .. versionadded:: 1.5
+ """
+
+ flags = re.MULTILINE | re.DOTALL
+
+ name = 'DTD'
+ aliases = ['dtd']
+ filenames = ['*.dtd']
+ mimetypes = ['application/xml-dtd']
+
+ tokens = {
+ 'root': [
+ include('common'),
+
+ (r'(<!ELEMENT)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Tag), 'element'),
+ (r'(<!ATTLIST)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Tag), 'attlist'),
+ (r'(<!ENTITY)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Entity), 'entity'),
+ (r'(<!NOTATION)(\s+)(\S+)',
+ bygroups(Keyword, Text, Name.Tag), 'notation'),
+ (r'(<!\[)([^\[\s]+)(\s*)(\[)', # conditional sections
+ bygroups(Keyword, Name.Entity, Text, Keyword)),
+
+ (r'(<!DOCTYPE)(\s+)([^>\s]+)',
+ bygroups(Keyword, Text, Name.Tag)),
+ (r'PUBLIC|SYSTEM', Keyword.Constant),
+ (r'[\[\]>]', Keyword),
+ ],
+
+ 'common': [
+ (r'\s+', Text),
+ (r'(%|&)[^;]*;', Name.Entity),
+ ('<!--', Comment, 'comment'),
+ (r'[(|)*,?+]', Operator),
+ (r'"[^"]*"', String.Double),
+ (r'\'[^\']*\'', String.Single),
+ ],
+
+ 'comment': [
+ ('[^-]+', Comment),
+ ('-->', Comment, '#pop'),
+ ('-', Comment),
+ ],
+
+ 'element': [
+ include('common'),
+ (r'EMPTY|ANY|#PCDATA', Keyword.Constant),
+ (r'[^>\s\|()?+*,]+', Name.Tag),
+ (r'>', Keyword, '#pop'),
+ ],
+
+ 'attlist': [
+ include('common'),
+ (r'CDATA|IDREFS|IDREF|ID|NMTOKENS|NMTOKEN|ENTITIES|ENTITY|NOTATION',
+ Keyword.Constant),
+ (r'#REQUIRED|#IMPLIED|#FIXED', Keyword.Constant),
+ (r'xml:space|xml:lang', Keyword.Reserved),
+ (r'[^>\s\|()?+*,]+', Name.Attribute),
+ (r'>', Keyword, '#pop'),
+ ],
+
+ 'entity': [
+ include('common'),
+ (r'SYSTEM|PUBLIC|NDATA', Keyword.Constant),
+ (r'[^>\s\|()?+*,]+', Name.Entity),
+ (r'>', Keyword, '#pop'),
+ ],
+
+ 'notation': [
+ include('common'),
+ (r'SYSTEM|PUBLIC', Keyword.Constant),
+ (r'[^>\s\|()?+*,]+', Name.Attribute),
+ (r'>', Keyword, '#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ if not looks_like_xml(text) and \
+ ('<!ELEMENT' in text or '<!ATTLIST' in text or '<!ENTITY' in text):
+ return 0.8
+
+
+class XmlLexer(RegexLexer):
+ """
+ Generic lexer for XML (eXtensible Markup Language).
+ """
+
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
+
+ name = 'XML'
+ aliases = ['xml']
+ filenames = ['*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd',
+ '*.wsdl', '*.wsf']
+ mimetypes = ['text/xml', 'application/xml', 'image/svg+xml',
+ 'application/rss+xml', 'application/atom+xml']
+
+ tokens = {
+ 'root': [
+ ('[^<&]+', Text),
+ (r'&\S*?;', Name.Entity),
+ (r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc),
+ ('<!--', Comment, 'comment'),
+ (r'<\?.*?\?>', Comment.Preproc),
+ ('<![^>]*>', Comment.Preproc),
+ (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
+ (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
+ ],
+ 'comment': [
+ ('[^-]+', Comment),
+ ('-->', Comment, '#pop'),
+ ('-', Comment),
+ ],
+ 'tag': [
+ (r'\s+', Text),
+ (r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
+ (r'/?\s*>', Name.Tag, '#pop'),
+ ],
+ 'attr': [
+ ('\s+', Text),
+ ('".*?"', String, '#pop'),
+ ("'.*?'", String, '#pop'),
+ (r'[^\s>]+', String, '#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ if looks_like_xml(text):
+ return 0.5
+
+
+class XsltLexer(XmlLexer):
+ """
+ A lexer for XSLT.
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'XSLT'
+ aliases = ['xslt']
+ filenames = ['*.xsl', '*.xslt', '*.xpl'] # xpl is XProc
+ mimetypes = ['application/xsl+xml', 'application/xslt+xml']
+
+ EXTRA_KEYWORDS = set((
+ 'apply-imports', 'apply-templates', 'attribute',
+ 'attribute-set', 'call-template', 'choose', 'comment',
+ 'copy', 'copy-of', 'decimal-format', 'element', 'fallback',
+ 'for-each', 'if', 'import', 'include', 'key', 'message',
+ 'namespace-alias', 'number', 'otherwise', 'output', 'param',
+ 'preserve-space', 'processing-instruction', 'sort',
+ 'strip-space', 'stylesheet', 'template', 'text', 'transform',
+ 'value-of', 'variable', 'when', 'with-param'
+ ))
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in XmlLexer.get_tokens_unprocessed(self, text):
+ m = re.match('</?xsl:([^>]*)/?>?', value)
+
+ if token is Name.Tag and m and m.group(1) in self.EXTRA_KEYWORDS:
+ yield index, Keyword, value
+ else:
+ yield index, token, value
+
+ def analyse_text(text):
+ if looks_like_xml(text) and '<xsl' in text:
+ return 0.8
+
+
+class HamlLexer(ExtendedRegexLexer):
+ """
+ For Haml markup.
+
+ .. versionadded:: 1.3
+ """
+
+ name = 'Haml'
+ aliases = ['haml']
+ filenames = ['*.haml']
+ mimetypes = ['text/x-haml']
+
+ flags = re.IGNORECASE
+ # Haml can include " |\n" anywhere,
+ # which is ignored and used to wrap long lines.
+ # To accomodate this, use this custom faux dot instead.
+ _dot = r'(?: \|\n(?=.* \|)|.)'
+
+ # In certain places, a comma at the end of the line
+ # allows line wrapping as well.
+ _comma_dot = r'(?:,\s*\n|' + _dot + ')'
+ tokens = {
+ 'root': [
+ (r'[ \t]*\n', Text),
+ (r'[ \t]*', _indentation),
+ ],
+
+ 'css': [
+ (r'\.[\w:-]+', Name.Class, 'tag'),
+ (r'\#[\w:-]+', Name.Function, 'tag'),
+ ],
+
+ 'eval-or-plain': [
+ (r'[&!]?==', Punctuation, 'plain'),
+ (r'([&!]?[=~])(' + _comma_dot + r'*\n)',
+ bygroups(Punctuation, using(RubyLexer)),
+ 'root'),
+ default('plain'),
+ ],
+
+ 'content': [
+ include('css'),
+ (r'%[\w:-]+', Name.Tag, 'tag'),
+ (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
+ (r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)',
+ bygroups(Comment, Comment.Special, Comment),
+ '#pop'),
+ (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
+ '#pop'),
+ (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
+ 'haml-comment-block'), '#pop'),
+ (r'(-)(' + _comma_dot + r'*\n)',
+ bygroups(Punctuation, using(RubyLexer)),
+ '#pop'),
+ (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
+ '#pop'),
+ include('eval-or-plain'),
+ ],
+
+ 'tag': [
+ include('css'),
+ (r'\{(,\n|' + _dot + ')*?\}', using(RubyLexer)),
+ (r'\[' + _dot + '*?\]', using(RubyLexer)),
+ (r'\(', Text, 'html-attributes'),
+ (r'/[ \t]*\n', Punctuation, '#pop:2'),
+ (r'[<>]{1,2}(?=[ \t=])', Punctuation),
+ include('eval-or-plain'),
+ ],
+
+ 'plain': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
+ (r'(#\{)(' + _dot + '*?)(\})',
+ bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'html-attributes': [
+ (r'\s+', Text),
+ (r'[\w:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
+ (r'[\w:-]+', Name.Attribute),
+ (r'\)', Text, '#pop'),
+ ],
+
+ 'html-attribute-value': [
+ (r'[ \t]+', Text),
+ (r'\w+', Name.Variable, '#pop'),
+ (r'@\w+', Name.Variable.Instance, '#pop'),
+ (r'\$\w+', Name.Variable.Global, '#pop'),
+ (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
+ (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
+ ],
+
+ 'html-comment-block': [
+ (_dot + '+', Comment),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'haml-comment-block': [
+ (_dot + '+', Comment.Preproc),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'filter-block': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
+ (r'(#\{)(' + _dot + '*?)(\})',
+ bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+ }
+
+
+class ScamlLexer(ExtendedRegexLexer):
+ """
+ For `Scaml markup <http://scalate.fusesource.org/>`_. Scaml is Haml for Scala.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'Scaml'
+ aliases = ['scaml']
+ filenames = ['*.scaml']
+ mimetypes = ['text/x-scaml']
+
+ flags = re.IGNORECASE
+ # Scaml does not yet support the " |\n" notation to
+ # wrap long lines. Once it does, use the custom faux
+ # dot instead.
+ # _dot = r'(?: \|\n(?=.* \|)|.)'
+ _dot = r'.'
+
+ tokens = {
+ 'root': [
+ (r'[ \t]*\n', Text),
+ (r'[ \t]*', _indentation),
+ ],
+
+ 'css': [
+ (r'\.[\w:-]+', Name.Class, 'tag'),
+ (r'\#[\w:-]+', Name.Function, 'tag'),
+ ],
+
+ 'eval-or-plain': [
+ (r'[&!]?==', Punctuation, 'plain'),
+ (r'([&!]?[=~])(' + _dot + r'*\n)',
+ bygroups(Punctuation, using(ScalaLexer)),
+ 'root'),
+ default('plain'),
+ ],
+
+ 'content': [
+ include('css'),
+ (r'%[\w:-]+', Name.Tag, 'tag'),
+ (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
+ (r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)',
+ bygroups(Comment, Comment.Special, Comment),
+ '#pop'),
+ (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
+ '#pop'),
+ (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
+ 'scaml-comment-block'), '#pop'),
+ (r'(-@\s*)(import)?(' + _dot + r'*\n)',
+ bygroups(Punctuation, Keyword, using(ScalaLexer)),
+ '#pop'),
+ (r'(-)(' + _dot + r'*\n)',
+ bygroups(Punctuation, using(ScalaLexer)),
+ '#pop'),
+ (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
+ '#pop'),
+ include('eval-or-plain'),
+ ],
+
+ 'tag': [
+ include('css'),
+ (r'\{(,\n|' + _dot + ')*?\}', using(ScalaLexer)),
+ (r'\[' + _dot + '*?\]', using(ScalaLexer)),
+ (r'\(', Text, 'html-attributes'),
+ (r'/[ \t]*\n', Punctuation, '#pop:2'),
+ (r'[<>]{1,2}(?=[ \t=])', Punctuation),
+ include('eval-or-plain'),
+ ],
+
+ 'plain': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
+ (r'(#\{)(' + _dot + '*?)(\})',
+ bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'html-attributes': [
+ (r'\s+', Text),
+ (r'[\w:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
+ (r'[\w:-]+', Name.Attribute),
+ (r'\)', Text, '#pop'),
+ ],
+
+ 'html-attribute-value': [
+ (r'[ \t]+', Text),
+ (r'\w+', Name.Variable, '#pop'),
+ (r'@\w+', Name.Variable.Instance, '#pop'),
+ (r'\$\w+', Name.Variable.Global, '#pop'),
+ (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
+ (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
+ ],
+
+ 'html-comment-block': [
+ (_dot + '+', Comment),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'scaml-comment-block': [
+ (_dot + '+', Comment.Preproc),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'filter-block': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
+ (r'(#\{)(' + _dot + '*?)(\})',
+ bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+ }
+
+
+class JadeLexer(ExtendedRegexLexer):
+ """
+ For Jade markup.
+ Jade is a variant of Scaml, see:
+ http://scalate.fusesource.org/documentation/scaml-reference.html
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'Jade'
+ aliases = ['jade']
+ filenames = ['*.jade']
+ mimetypes = ['text/x-jade']
+
+ flags = re.IGNORECASE
+ _dot = r'.'
+
+ tokens = {
+ 'root': [
+ (r'[ \t]*\n', Text),
+ (r'[ \t]*', _indentation),
+ ],
+
+ 'css': [
+ (r'\.[\w:-]+', Name.Class, 'tag'),
+ (r'\#[\w:-]+', Name.Function, 'tag'),
+ ],
+
+ 'eval-or-plain': [
+ (r'[&!]?==', Punctuation, 'plain'),
+ (r'([&!]?[=~])(' + _dot + r'*\n)',
+ bygroups(Punctuation, using(ScalaLexer)), 'root'),
+ default('plain'),
+ ],
+
+ 'content': [
+ include('css'),
+ (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
+ (r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)',
+ bygroups(Comment, Comment.Special, Comment),
+ '#pop'),
+ (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
+ '#pop'),
+ (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
+ 'scaml-comment-block'), '#pop'),
+ (r'(-@\s*)(import)?(' + _dot + r'*\n)',
+ bygroups(Punctuation, Keyword, using(ScalaLexer)),
+ '#pop'),
+ (r'(-)(' + _dot + r'*\n)',
+ bygroups(Punctuation, using(ScalaLexer)),
+ '#pop'),
+ (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
+ '#pop'),
+ (r'[\w:-]+', Name.Tag, 'tag'),
+ (r'\|', Text, 'eval-or-plain'),
+ ],
+
+ 'tag': [
+ include('css'),
+ (r'\{(,\n|' + _dot + ')*?\}', using(ScalaLexer)),
+ (r'\[' + _dot + '*?\]', using(ScalaLexer)),
+ (r'\(', Text, 'html-attributes'),
+ (r'/[ \t]*\n', Punctuation, '#pop:2'),
+ (r'[<>]{1,2}(?=[ \t=])', Punctuation),
+ include('eval-or-plain'),
+ ],
+
+ 'plain': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
+ (r'(#\{)(' + _dot + '*?)(\})',
+ bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'html-attributes': [
+ (r'\s+', Text),
+ (r'[\w:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
+ (r'[\w:-]+', Name.Attribute),
+ (r'\)', Text, '#pop'),
+ ],
+
+ 'html-attribute-value': [
+ (r'[ \t]+', Text),
+ (r'\w+', Name.Variable, '#pop'),
+ (r'@\w+', Name.Variable.Instance, '#pop'),
+ (r'\$\w+', Name.Variable.Global, '#pop'),
+ (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
+ (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
+ ],
+
+ 'html-comment-block': [
+ (_dot + '+', Comment),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'scaml-comment-block': [
+ (_dot + '+', Comment.Preproc),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'filter-block': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
+ (r'(#\{)(' + _dot + '*?)(\})',
+ bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+ }
diff --git a/pygments/lexers/idl.py b/pygments/lexers/idl.py
new file mode 100644
index 00000000..7941028c
--- /dev/null
+++ b/pygments/lexers/idl.py
@@ -0,0 +1,262 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.idl
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for IDL.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, Number
+
+__all__ = ['IDLLexer']
+
+
+class IDLLexer(RegexLexer):
+ """
+ Pygments Lexer for IDL (Interactive Data Language).
+
+ .. versionadded:: 1.6
+ """
+ name = 'IDL'
+ aliases = ['idl']
+ filenames = ['*.pro']
+ mimetypes = ['text/idl']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ _RESERVED = (
+ 'and', 'begin', 'break', 'case', 'common', 'compile_opt',
+ 'continue', 'do', 'else', 'end', 'endcase', 'elseelse',
+ 'endfor', 'endforeach', 'endif', 'endrep', 'endswitch',
+ 'endwhile', 'eq', 'for', 'foreach', 'forward_function',
+ 'function', 'ge', 'goto', 'gt', 'if', 'inherits', 'le',
+ 'lt', 'mod', 'ne', 'not', 'of', 'on_ioerror', 'or', 'pro',
+ 'repeat', 'switch', 'then', 'until', 'while', 'xor')
+ """Reserved words from: http://www.exelisvis.com/docs/reswords.html"""
+
+ _BUILTIN_LIB = (
+ 'abs', 'acos', 'adapt_hist_equal', 'alog', 'alog10',
+ 'amoeba', 'annotate', 'app_user_dir', 'app_user_dir_query',
+ 'arg_present', 'array_equal', 'array_indices', 'arrow',
+ 'ascii_template', 'asin', 'assoc', 'atan', 'axis',
+ 'a_correlate', 'bandpass_filter', 'bandreject_filter',
+ 'barplot', 'bar_plot', 'beseli', 'beselj', 'beselk',
+ 'besely', 'beta', 'bilinear', 'binary_template', 'bindgen',
+ 'binomial', 'bin_date', 'bit_ffs', 'bit_population',
+ 'blas_axpy', 'blk_con', 'box_cursor', 'breakpoint',
+ 'broyden', 'butterworth', 'bytarr', 'byte', 'byteorder',
+ 'bytscl', 'caldat', 'calendar', 'call_external',
+ 'call_function', 'call_method', 'call_procedure', 'canny',
+ 'catch', 'cd', 'cdf_\w*', 'ceil', 'chebyshev',
+ 'check_math',
+ 'chisqr_cvf', 'chisqr_pdf', 'choldc', 'cholsol', 'cindgen',
+ 'cir_3pnt', 'close', 'cluster', 'cluster_tree', 'clust_wts',
+ 'cmyk_convert', 'colorbar', 'colorize_sample',
+ 'colormap_applicable', 'colormap_gradient',
+ 'colormap_rotation', 'colortable', 'color_convert',
+ 'color_exchange', 'color_quan', 'color_range_map', 'comfit',
+ 'command_line_args', 'complex', 'complexarr', 'complexround',
+ 'compute_mesh_normals', 'cond', 'congrid', 'conj',
+ 'constrained_min', 'contour', 'convert_coord', 'convol',
+ 'convol_fft', 'coord2to3', 'copy_lun', 'correlate', 'cos',
+ 'cosh', 'cpu', 'cramer', 'create_cursor', 'create_struct',
+ 'create_view', 'crossp', 'crvlength', 'cti_test',
+ 'ct_luminance', 'cursor', 'curvefit', 'cvttobm', 'cv_coord',
+ 'cw_animate', 'cw_animate_getp', 'cw_animate_load',
+ 'cw_animate_run', 'cw_arcball', 'cw_bgroup', 'cw_clr_index',
+ 'cw_colorsel', 'cw_defroi', 'cw_field', 'cw_filesel',
+ 'cw_form', 'cw_fslider', 'cw_light_editor',
+ 'cw_light_editor_get', 'cw_light_editor_set', 'cw_orient',
+ 'cw_palette_editor', 'cw_palette_editor_get',
+ 'cw_palette_editor_set', 'cw_pdmenu', 'cw_rgbslider',
+ 'cw_tmpl', 'cw_zoom', 'c_correlate', 'dblarr', 'db_exists',
+ 'dcindgen', 'dcomplex', 'dcomplexarr', 'define_key',
+ 'define_msgblk', 'define_msgblk_from_file', 'defroi',
+ 'defsysv', 'delvar', 'dendrogram', 'dendro_plot', 'deriv',
+ 'derivsig', 'determ', 'device', 'dfpmin', 'diag_matrix',
+ 'dialog_dbconnect', 'dialog_message', 'dialog_pickfile',
+ 'dialog_printersetup', 'dialog_printjob',
+ 'dialog_read_image', 'dialog_write_image', 'digital_filter',
+ 'dilate', 'dindgen', 'dissolve', 'dist', 'distance_measure',
+ 'dlm_load', 'dlm_register', 'doc_library', 'double',
+ 'draw_roi', 'edge_dog', 'efont', 'eigenql', 'eigenvec',
+ 'ellipse', 'elmhes', 'emboss', 'empty', 'enable_sysrtn',
+ 'eof', 'eos_\w*', 'erase', 'erf', 'erfc', 'erfcx',
+ 'erode', 'errorplot', 'errplot', 'estimator_filter',
+ 'execute', 'exit', 'exp', 'expand', 'expand_path', 'expint',
+ 'extrac', 'extract_slice', 'factorial', 'fft', 'filepath',
+ 'file_basename', 'file_chmod', 'file_copy', 'file_delete',
+ 'file_dirname', 'file_expand_path', 'file_info',
+ 'file_lines', 'file_link', 'file_mkdir', 'file_move',
+ 'file_poll_input', 'file_readlink', 'file_same',
+ 'file_search', 'file_test', 'file_which', 'findgen',
+ 'finite', 'fix', 'flick', 'float', 'floor', 'flow3',
+ 'fltarr', 'flush', 'format_axis_values', 'free_lun',
+ 'fstat', 'fulstr', 'funct', 'fv_test', 'fx_root',
+ 'fz_roots', 'f_cvf', 'f_pdf', 'gamma', 'gamma_ct',
+ 'gauss2dfit', 'gaussfit', 'gaussian_function', 'gaussint',
+ 'gauss_cvf', 'gauss_pdf', 'gauss_smooth', 'getenv',
+ 'getwindows', 'get_drive_list', 'get_dxf_objects',
+ 'get_kbrd', 'get_login_info', 'get_lun', 'get_screen_size',
+ 'greg2jul', 'grib_\w*', 'grid3', 'griddata',
+ 'grid_input', 'grid_tps', 'gs_iter',
+ 'h5[adfgirst]_\w*', 'h5_browser', 'h5_close',
+ 'h5_create', 'h5_get_libversion', 'h5_open', 'h5_parse',
+ 'hanning', 'hash', 'hdf_\w*', 'heap_free',
+ 'heap_gc', 'heap_nosave', 'heap_refcount', 'heap_save',
+ 'help', 'hilbert', 'histogram', 'hist_2d', 'hist_equal',
+ 'hls', 'hough', 'hqr', 'hsv', 'h_eq_ct', 'h_eq_int',
+ 'i18n_multibytetoutf8', 'i18n_multibytetowidechar',
+ 'i18n_utf8tomultibyte', 'i18n_widechartomultibyte',
+ 'ibeta', 'icontour', 'iconvertcoord', 'idelete', 'identity',
+ 'idlexbr_assistant', 'idlitsys_createtool', 'idl_base64',
+ 'idl_validname', 'iellipse', 'igamma', 'igetcurrent',
+ 'igetdata', 'igetid', 'igetproperty', 'iimage', 'image',
+ 'image_cont', 'image_statistics', 'imaginary', 'imap',
+ 'indgen', 'intarr', 'interpol', 'interpolate',
+ 'interval_volume', 'int_2d', 'int_3d', 'int_tabulated',
+ 'invert', 'ioctl', 'iopen', 'iplot', 'ipolygon',
+ 'ipolyline', 'iputdata', 'iregister', 'ireset', 'iresolve',
+ 'irotate', 'ir_filter', 'isa', 'isave', 'iscale',
+ 'isetcurrent', 'isetproperty', 'ishft', 'isocontour',
+ 'isosurface', 'isurface', 'itext', 'itranslate', 'ivector',
+ 'ivolume', 'izoom', 'i_beta', 'journal', 'json_parse',
+ 'json_serialize', 'jul2greg', 'julday', 'keyword_set',
+ 'krig2d', 'kurtosis', 'kw_test', 'l64indgen', 'label_date',
+ 'label_region', 'ladfit', 'laguerre', 'laplacian',
+ 'la_choldc', 'la_cholmprove', 'la_cholsol', 'la_determ',
+ 'la_eigenproblem', 'la_eigenql', 'la_eigenvec', 'la_elmhes',
+ 'la_gm_linear_model', 'la_hqr', 'la_invert',
+ 'la_least_squares', 'la_least_square_equality',
+ 'la_linear_equation', 'la_ludc', 'la_lumprove', 'la_lusol',
+ 'la_svd', 'la_tridc', 'la_trimprove', 'la_triql',
+ 'la_trired', 'la_trisol', 'least_squares_filter', 'leefilt',
+ 'legend', 'legendre', 'linbcg', 'lindgen', 'linfit',
+ 'linkimage', 'list', 'll_arc_distance', 'lmfit', 'lmgr',
+ 'lngamma', 'lnp_test', 'loadct', 'locale_get',
+ 'logical_and', 'logical_or', 'logical_true', 'lon64arr',
+ 'lonarr', 'long', 'long64', 'lsode', 'ludc', 'lumprove',
+ 'lusol', 'lu_complex', 'machar', 'make_array', 'make_dll',
+ 'make_rt', 'map', 'mapcontinents', 'mapgrid', 'map_2points',
+ 'map_continents', 'map_grid', 'map_image', 'map_patch',
+ 'map_proj_forward', 'map_proj_image', 'map_proj_info',
+ 'map_proj_init', 'map_proj_inverse', 'map_set',
+ 'matrix_multiply', 'matrix_power', 'max', 'md_test',
+ 'mean', 'meanabsdev', 'mean_filter', 'median', 'memory',
+ 'mesh_clip', 'mesh_decimate', 'mesh_issolid', 'mesh_merge',
+ 'mesh_numtriangles', 'mesh_obj', 'mesh_smooth',
+ 'mesh_surfacearea', 'mesh_validate', 'mesh_volume',
+ 'message', 'min', 'min_curve_surf', 'mk_html_help',
+ 'modifyct', 'moment', 'morph_close', 'morph_distance',
+ 'morph_gradient', 'morph_hitormiss', 'morph_open',
+ 'morph_thin', 'morph_tophat', 'multi', 'm_correlate',
+ 'ncdf_\w*', 'newton', 'noise_hurl', 'noise_pick',
+ 'noise_scatter', 'noise_slur', 'norm', 'n_elements',
+ 'n_params', 'n_tags', 'objarr', 'obj_class', 'obj_destroy',
+ 'obj_hasmethod', 'obj_isa', 'obj_new', 'obj_valid',
+ 'online_help', 'on_error', 'open', 'oplot', 'oploterr',
+ 'parse_url', 'particle_trace', 'path_cache', 'path_sep',
+ 'pcomp', 'plot', 'plot3d', 'ploterr', 'plots', 'plot_3dbox',
+ 'plot_field', 'pnt_line', 'point_lun', 'polarplot',
+ 'polar_contour', 'polar_surface', 'poly', 'polyfill',
+ 'polyfillv', 'polygon', 'polyline', 'polyshade', 'polywarp',
+ 'poly_2d', 'poly_area', 'poly_fit', 'popd', 'powell',
+ 'pref_commit', 'pref_get', 'pref_set', 'prewitt', 'primes',
+ 'print', 'printd', 'product', 'profile', 'profiler',
+ 'profiles', 'project_vol', 'psafm', 'pseudo',
+ 'ps_show_fonts', 'ptrarr', 'ptr_free', 'ptr_new',
+ 'ptr_valid', 'pushd', 'p_correlate', 'qgrid3', 'qhull',
+ 'qromb', 'qromo', 'qsimp', 'query_ascii', 'query_bmp',
+ 'query_csv', 'query_dicom', 'query_gif', 'query_image',
+ 'query_jpeg', 'query_jpeg2000', 'query_mrsid', 'query_pict',
+ 'query_png', 'query_ppm', 'query_srf', 'query_tiff',
+ 'query_wav', 'radon', 'randomn', 'randomu', 'ranks',
+ 'rdpix', 'read', 'reads', 'readu', 'read_ascii',
+ 'read_binary', 'read_bmp', 'read_csv', 'read_dicom',
+ 'read_gif', 'read_image', 'read_interfile', 'read_jpeg',
+ 'read_jpeg2000', 'read_mrsid', 'read_pict', 'read_png',
+ 'read_ppm', 'read_spr', 'read_srf', 'read_sylk',
+ 'read_tiff', 'read_wav', 'read_wave', 'read_x11_bitmap',
+ 'read_xwd', 'real_part', 'rebin', 'recall_commands',
+ 'recon3', 'reduce_colors', 'reform', 'region_grow',
+ 'register_cursor', 'regress', 'replicate',
+ 'replicate_inplace', 'resolve_all', 'resolve_routine',
+ 'restore', 'retall', 'return', 'reverse', 'rk4', 'roberts',
+ 'rot', 'rotate', 'round', 'routine_filepath',
+ 'routine_info', 'rs_test', 'r_correlate', 'r_test',
+ 'save', 'savgol', 'scale3', 'scale3d', 'scope_level',
+ 'scope_traceback', 'scope_varfetch', 'scope_varname',
+ 'search2d', 'search3d', 'sem_create', 'sem_delete',
+ 'sem_lock', 'sem_release', 'setenv', 'set_plot',
+ 'set_shading', 'sfit', 'shade_surf', 'shade_surf_irr',
+ 'shade_volume', 'shift', 'shift_diff', 'shmdebug', 'shmmap',
+ 'shmunmap', 'shmvar', 'show3', 'showfont', 'simplex', 'sin',
+ 'sindgen', 'sinh', 'size', 'skewness', 'skip_lun',
+ 'slicer3', 'slide_image', 'smooth', 'sobel', 'socket',
+ 'sort', 'spawn', 'spher_harm', 'sph_4pnt', 'sph_scat',
+ 'spline', 'spline_p', 'spl_init', 'spl_interp', 'sprsab',
+ 'sprsax', 'sprsin', 'sprstp', 'sqrt', 'standardize',
+ 'stddev', 'stop', 'strarr', 'strcmp', 'strcompress',
+ 'streamline', 'stregex', 'stretch', 'string', 'strjoin',
+ 'strlen', 'strlowcase', 'strmatch', 'strmessage', 'strmid',
+ 'strpos', 'strput', 'strsplit', 'strtrim', 'struct_assign',
+ 'struct_hide', 'strupcase', 'surface', 'surfr', 'svdc',
+ 'svdfit', 'svsol', 'swap_endian', 'swap_endian_inplace',
+ 'symbol', 'systime', 's_test', 't3d', 'tag_names', 'tan',
+ 'tanh', 'tek_color', 'temporary', 'tetra_clip',
+ 'tetra_surface', 'tetra_volume', 'text', 'thin', 'threed',
+ 'timegen', 'time_test2', 'tm_test', 'total', 'trace',
+ 'transpose', 'triangulate', 'trigrid', 'triql', 'trired',
+ 'trisol', 'tri_surf', 'truncate_lun', 'ts_coef', 'ts_diff',
+ 'ts_fcast', 'ts_smooth', 'tv', 'tvcrs', 'tvlct', 'tvrd',
+ 'tvscl', 'typename', 't_cvt', 't_pdf', 'uindgen', 'uint',
+ 'uintarr', 'ul64indgen', 'ulindgen', 'ulon64arr', 'ulonarr',
+ 'ulong', 'ulong64', 'uniq', 'unsharp_mask', 'usersym',
+ 'value_locate', 'variance', 'vector', 'vector_field', 'vel',
+ 'velovect', 'vert_t3d', 'voigt', 'voronoi', 'voxel_proj',
+ 'wait', 'warp_tri', 'watershed', 'wdelete', 'wf_draw',
+ 'where', 'widget_base', 'widget_button', 'widget_combobox',
+ 'widget_control', 'widget_displaycontextmen', 'widget_draw',
+ 'widget_droplist', 'widget_event', 'widget_info',
+ 'widget_label', 'widget_list', 'widget_propertysheet',
+ 'widget_slider', 'widget_tab', 'widget_table',
+ 'widget_text', 'widget_tree', 'widget_tree_move',
+ 'widget_window', 'wiener_filter', 'window', 'writeu',
+ 'write_bmp', 'write_csv', 'write_gif', 'write_image',
+ 'write_jpeg', 'write_jpeg2000', 'write_nrif', 'write_pict',
+ 'write_png', 'write_ppm', 'write_spr', 'write_srf',
+ 'write_sylk', 'write_tiff', 'write_wav', 'write_wave',
+ 'wset', 'wshow', 'wtn', 'wv_applet', 'wv_cwt',
+ 'wv_cw_wavelet', 'wv_denoise', 'wv_dwt', 'wv_fn_coiflet',
+ 'wv_fn_daubechies', 'wv_fn_gaussian', 'wv_fn_haar',
+ 'wv_fn_morlet', 'wv_fn_paul', 'wv_fn_symlet',
+ 'wv_import_data', 'wv_import_wavelet', 'wv_plot3d_wps',
+ 'wv_plot_multires', 'wv_pwt', 'wv_tool_denoise',
+ 'xbm_edit', 'xdisplayfile', 'xdxf', 'xfont',
+ 'xinteranimate', 'xloadct', 'xmanager', 'xmng_tmpl',
+ 'xmtool', 'xobjview', 'xobjview_rotate',
+ 'xobjview_write_image', 'xpalette', 'xpcolor', 'xplot3d',
+ 'xregistered', 'xroi', 'xsq_test', 'xsurface', 'xvaredit',
+ 'xvolume', 'xvolume_rotate', 'xvolume_write_image',
+ 'xyouts', 'zoom', 'zoom_24')
+ """Functions from: http://www.exelisvis.com/docs/routines-1.html"""
+
+ tokens = {
+ 'root': [
+ (r'^\s*;.*?\n', Comment.Singleline),
+ (words(_RESERVED, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(_BUILTIN_LIB, prefix=r'\b', suffix=r'\b'), Name.Builtin),
+ (r'\+=|-=|\^=|\*=|/=|#=|##=|<=|>=|=', Operator),
+ (r'\+\+|--|->|\+|-|##|#|\*|/|<|>|&&|\^|~|\|\|\?|:', Operator),
+ (r'\b(mod=|lt=|le=|eq=|ne=|ge=|gt=|not=|and=|or=|xor=)', Operator),
+ (r'\b(mod|lt|le|eq|ne|ge|gt|not|and|or|xor)\b', Operator),
+ (r'\b[0-9](L|B|S|UL|ULL|LL)?\b', Number),
+ (r'.', Text),
+ ]
+ }
diff --git a/pygments/lexers/igor.py b/pygments/lexers/igor.py
new file mode 100644
index 00000000..0bf9b04a
--- /dev/null
+++ b/pygments/lexers/igor.py
@@ -0,0 +1,280 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.igor
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Igor Pro.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, words
+from pygments.token import Text, Comment, Keyword, Name, String
+
+__all__ = ['IgorLexer']
+
+
+class IgorLexer(RegexLexer):
+ """
+ Pygments Lexer for Igor Pro procedure files (.ipf).
+ See http://www.wavemetrics.com/ and http://www.igorexchange.com/.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Igor'
+ aliases = ['igor', 'igorpro']
+ filenames = ['*.ipf']
+ mimetypes = ['text/ipf']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ flowControl = (
+ 'if', 'else', 'elseif', 'endif', 'for', 'endfor', 'strswitch', 'switch',
+ 'case', 'default', 'endswitch', 'do', 'while', 'try', 'catch', 'endtry',
+ 'break', 'continue', 'return',
+ )
+ types = (
+ 'variable', 'string', 'constant', 'strconstant', 'NVAR', 'SVAR', 'WAVE',
+ 'STRUCT', 'dfref'
+ )
+ keywords = (
+ 'override', 'ThreadSafe', 'static', 'FuncFit', 'Proc', 'Picture',
+ 'Prompt', 'DoPrompt', 'macro', 'window', 'graph', 'function', 'end',
+ 'Structure', 'EndStructure', 'EndMacro', 'Menu', 'SubMenu', 'Prompt',
+ 'DoPrompt',
+ )
+ operations = (
+ 'Abort', 'AddFIFOData', 'AddFIFOVectData', 'AddMovieAudio',
+ 'AddMovieFrame', 'APMath', 'Append', 'AppendImage',
+ 'AppendLayoutObject', 'AppendMatrixContour', 'AppendText',
+ 'AppendToGraph', 'AppendToLayout', 'AppendToTable', 'AppendXYZContour',
+ 'AutoPositionWindow', 'BackgroundInfo', 'Beep', 'BoundingBall',
+ 'BrowseURL', 'BuildMenu', 'Button', 'cd', 'Chart', 'CheckBox',
+ 'CheckDisplayed', 'ChooseColor', 'Close', 'CloseMovie', 'CloseProc',
+ 'ColorScale', 'ColorTab2Wave', 'Concatenate', 'ControlBar',
+ 'ControlInfo', 'ControlUpdate', 'ConvexHull', 'Convolve', 'CopyFile',
+ 'CopyFolder', 'CopyScales', 'Correlate', 'CreateAliasShortcut', 'Cross',
+ 'CtrlBackground', 'CtrlFIFO', 'CtrlNamedBackground', 'Cursor',
+ 'CurveFit', 'CustomControl', 'CWT', 'Debugger', 'DebuggerOptions',
+ 'DefaultFont', 'DefaultGuiControls', 'DefaultGuiFont', 'DefineGuide',
+ 'DelayUpdate', 'DeleteFile', 'DeleteFolder', 'DeletePoints',
+ 'Differentiate', 'dir', 'Display', 'DisplayHelpTopic',
+ 'DisplayProcedure', 'DoAlert', 'DoIgorMenu', 'DoUpdate', 'DoWindow',
+ 'DoXOPIdle', 'DrawAction', 'DrawArc', 'DrawBezier', 'DrawLine',
+ 'DrawOval', 'DrawPICT', 'DrawPoly', 'DrawRect', 'DrawRRect', 'DrawText',
+ 'DSPDetrend', 'DSPPeriodogram', 'Duplicate', 'DuplicateDataFolder',
+ 'DWT', 'EdgeStats', 'Edit', 'ErrorBars', 'Execute', 'ExecuteScriptText',
+ 'ExperimentModified', 'Extract', 'FastGaussTransform', 'FastOp',
+ 'FBinRead', 'FBinWrite', 'FFT', 'FIFO2Wave', 'FIFOStatus', 'FilterFIR',
+ 'FilterIIR', 'FindLevel', 'FindLevels', 'FindPeak', 'FindPointsInPoly',
+ 'FindRoots', 'FindSequence', 'FindValue', 'FPClustering', 'fprintf',
+ 'FReadLine', 'FSetPos', 'FStatus', 'FTPDelete', 'FTPDownload',
+ 'FTPUpload', 'FuncFit', 'FuncFitMD', 'GetAxis', 'GetFileFolderInfo',
+ 'GetLastUserMenuInfo', 'GetMarquee', 'GetSelection', 'GetWindow',
+ 'GraphNormal', 'GraphWaveDraw', 'GraphWaveEdit', 'Grep', 'GroupBox',
+ 'Hanning', 'HideIgorMenus', 'HideInfo', 'HideProcedures', 'HideTools',
+ 'HilbertTransform', 'Histogram', 'IFFT', 'ImageAnalyzeParticles',
+ 'ImageBlend', 'ImageBoundaryToMask', 'ImageEdgeDetection',
+ 'ImageFileInfo', 'ImageFilter', 'ImageFocus', 'ImageGenerateROIMask',
+ 'ImageHistModification', 'ImageHistogram', 'ImageInterpolate',
+ 'ImageLineProfile', 'ImageLoad', 'ImageMorphology', 'ImageRegistration',
+ 'ImageRemoveBackground', 'ImageRestore', 'ImageRotate', 'ImageSave',
+ 'ImageSeedFill', 'ImageSnake', 'ImageStats', 'ImageThreshold',
+ 'ImageTransform', 'ImageUnwrapPhase', 'ImageWindow', 'IndexSort',
+ 'InsertPoints', 'Integrate', 'IntegrateODE', 'Interp3DPath',
+ 'Interpolate3D', 'KillBackground', 'KillControl', 'KillDataFolder',
+ 'KillFIFO', 'KillFreeAxis', 'KillPath', 'KillPICTs', 'KillStrings',
+ 'KillVariables', 'KillWaves', 'KillWindow', 'KMeans', 'Label', 'Layout',
+ 'Legend', 'LinearFeedbackShiftRegister', 'ListBox', 'LoadData',
+ 'LoadPackagePreferences', 'LoadPICT', 'LoadWave', 'Loess',
+ 'LombPeriodogram', 'Make', 'MakeIndex', 'MarkPerfTestTime',
+ 'MatrixConvolve', 'MatrixCorr', 'MatrixEigenV', 'MatrixFilter',
+ 'MatrixGaussJ', 'MatrixInverse', 'MatrixLinearSolve',
+ 'MatrixLinearSolveTD', 'MatrixLLS', 'MatrixLUBkSub', 'MatrixLUD',
+ 'MatrixMultiply', 'MatrixOP', 'MatrixSchur', 'MatrixSolve',
+ 'MatrixSVBkSub', 'MatrixSVD', 'MatrixTranspose', 'MeasureStyledText',
+ 'Modify', 'ModifyContour', 'ModifyControl', 'ModifyControlList',
+ 'ModifyFreeAxis', 'ModifyGraph', 'ModifyImage', 'ModifyLayout',
+ 'ModifyPanel', 'ModifyTable', 'ModifyWaterfall', 'MoveDataFolder',
+ 'MoveFile', 'MoveFolder', 'MoveString', 'MoveSubwindow', 'MoveVariable',
+ 'MoveWave', 'MoveWindow', 'NeuralNetworkRun', 'NeuralNetworkTrain',
+ 'NewDataFolder', 'NewFIFO', 'NewFIFOChan', 'NewFreeAxis', 'NewImage',
+ 'NewLayout', 'NewMovie', 'NewNotebook', 'NewPanel', 'NewPath',
+ 'NewWaterfall', 'Note', 'Notebook', 'NotebookAction', 'Open',
+ 'OpenNotebook', 'Optimize', 'ParseOperationTemplate', 'PathInfo',
+ 'PauseForUser', 'PauseUpdate', 'PCA', 'PlayMovie', 'PlayMovieAction',
+ 'PlaySnd', 'PlaySound', 'PopupContextualMenu', 'PopupMenu',
+ 'Preferences', 'PrimeFactors', 'Print', 'printf', 'PrintGraphs',
+ 'PrintLayout', 'PrintNotebook', 'PrintSettings', 'PrintTable',
+ 'Project', 'PulseStats', 'PutScrapText', 'pwd', 'Quit',
+ 'RatioFromNumber', 'Redimension', 'Remove', 'RemoveContour',
+ 'RemoveFromGraph', 'RemoveFromLayout', 'RemoveFromTable', 'RemoveImage',
+ 'RemoveLayoutObjects', 'RemovePath', 'Rename', 'RenameDataFolder',
+ 'RenamePath', 'RenamePICT', 'RenameWindow', 'ReorderImages',
+ 'ReorderTraces', 'ReplaceText', 'ReplaceWave', 'Resample',
+ 'ResumeUpdate', 'Reverse', 'Rotate', 'Save', 'SaveData',
+ 'SaveExperiment', 'SaveGraphCopy', 'SaveNotebook',
+ 'SavePackagePreferences', 'SavePICT', 'SaveTableCopy',
+ 'SetActiveSubwindow', 'SetAxis', 'SetBackground', 'SetDashPattern',
+ 'SetDataFolder', 'SetDimLabel', 'SetDrawEnv', 'SetDrawLayer',
+ 'SetFileFolderInfo', 'SetFormula', 'SetIgorHook', 'SetIgorMenuMode',
+ 'SetIgorOption', 'SetMarquee', 'SetProcessSleep', 'SetRandomSeed',
+ 'SetScale', 'SetVariable', 'SetWaveLock', 'SetWindow', 'ShowIgorMenus',
+ 'ShowInfo', 'ShowTools', 'Silent', 'Sleep', 'Slider', 'Smooth',
+ 'SmoothCustom', 'Sort', 'SoundInRecord', 'SoundInSet',
+ 'SoundInStartChart', 'SoundInStatus', 'SoundInStopChart',
+ 'SphericalInterpolate', 'SphericalTriangulate', 'SplitString',
+ 'sprintf', 'sscanf', 'Stack', 'StackWindows',
+ 'StatsAngularDistanceTest', 'StatsANOVA1Test', 'StatsANOVA2NRTest',
+ 'StatsANOVA2RMTest', 'StatsANOVA2Test', 'StatsChiTest',
+ 'StatsCircularCorrelationTest', 'StatsCircularMeans',
+ 'StatsCircularMoments', 'StatsCircularTwoSampleTest',
+ 'StatsCochranTest', 'StatsContingencyTable', 'StatsDIPTest',
+ 'StatsDunnettTest', 'StatsFriedmanTest', 'StatsFTest',
+ 'StatsHodgesAjneTest', 'StatsJBTest', 'StatsKendallTauTest',
+ 'StatsKSTest', 'StatsKWTest', 'StatsLinearCorrelationTest',
+ 'StatsLinearRegression', 'StatsMultiCorrelationTest',
+ 'StatsNPMCTest', 'StatsNPNominalSRTest', 'StatsQuantiles',
+ 'StatsRankCorrelationTest', 'StatsResample', 'StatsSample',
+ 'StatsScheffeTest', 'StatsSignTest', 'StatsSRTest', 'StatsTTest',
+ 'StatsTukeyTest', 'StatsVariancesTest', 'StatsWatsonUSquaredTest',
+ 'StatsWatsonWilliamsTest', 'StatsWheelerWatsonTest',
+ 'StatsWilcoxonRankTest', 'StatsWRCorrelationTest', 'String',
+ 'StructGet', 'StructPut', 'TabControl', 'Tag', 'TextBox', 'Tile',
+ 'TileWindows', 'TitleBox', 'ToCommandLine', 'ToolsGrid',
+ 'Triangulate3d', 'Unwrap', 'ValDisplay', 'Variable', 'WaveMeanStdv',
+ 'WaveStats', 'WaveTransform', 'wfprintf', 'WignerTransform',
+ 'WindowFunction',
+ )
+ functions = (
+ 'abs', 'acos', 'acosh', 'AiryA', 'AiryAD', 'AiryB', 'AiryBD', 'alog',
+ 'area', 'areaXY', 'asin', 'asinh', 'atan', 'atan2', 'atanh',
+ 'AxisValFromPixel', 'Besseli', 'Besselj', 'Besselk', 'Bessely', 'bessi',
+ 'bessj', 'bessk', 'bessy', 'beta', 'betai', 'BinarySearch',
+ 'BinarySearchInterp', 'binomial', 'binomialln', 'binomialNoise', 'cabs',
+ 'CaptureHistoryStart', 'ceil', 'cequal', 'char2num', 'chebyshev',
+ 'chebyshevU', 'CheckName', 'cmplx', 'cmpstr', 'conj', 'ContourZ', 'cos',
+ 'cosh', 'cot', 'CountObjects', 'CountObjectsDFR', 'cpowi',
+ 'CreationDate', 'csc', 'DataFolderExists', 'DataFolderRefsEqual',
+ 'DataFolderRefStatus', 'date2secs', 'datetime', 'DateToJulian',
+ 'Dawson', 'DDEExecute', 'DDEInitiate', 'DDEPokeString', 'DDEPokeWave',
+ 'DDERequestWave', 'DDEStatus', 'DDETerminate', 'deltax', 'digamma',
+ 'DimDelta', 'DimOffset', 'DimSize', 'ei', 'enoise', 'equalWaves', 'erf',
+ 'erfc', 'exists', 'exp', 'expInt', 'expNoise', 'factorial', 'fakedata',
+ 'faverage', 'faverageXY', 'FindDimLabel', 'FindListItem', 'floor',
+ 'FontSizeHeight', 'FontSizeStringWidth', 'FresnelCos', 'FresnelSin',
+ 'gamma', 'gammaInc', 'gammaNoise', 'gammln', 'gammp', 'gammq', 'Gauss',
+ 'Gauss1D', 'Gauss2D', 'gcd', 'GetDefaultFontSize',
+ 'GetDefaultFontStyle', 'GetKeyState', 'GetRTError', 'gnoise',
+ 'GrepString', 'hcsr', 'hermite', 'hermiteGauss', 'HyperG0F1',
+ 'HyperG1F1', 'HyperG2F1', 'HyperGNoise', 'HyperGPFQ', 'IgorVersion',
+ 'ilim', 'imag', 'Inf', 'Integrate1D', 'interp', 'Interp2D', 'Interp3D',
+ 'inverseERF', 'inverseERFC', 'ItemsInList', 'jlim', 'Laguerre',
+ 'LaguerreA', 'LaguerreGauss', 'leftx', 'LegendreA', 'limit', 'ln',
+ 'log', 'logNormalNoise', 'lorentzianNoise', 'magsqr', 'MandelbrotPoint',
+ 'MarcumQ', 'MatrixDet', 'MatrixDot', 'MatrixRank', 'MatrixTrace', 'max',
+ 'mean', 'min', 'mod', 'ModDate', 'NaN', 'norm', 'NumberByKey',
+ 'numpnts', 'numtype', 'NumVarOrDefault', 'NVAR_Exists', 'p2rect',
+ 'ParamIsDefault', 'pcsr', 'Pi', 'PixelFromAxisVal', 'pnt2x',
+ 'poissonNoise', 'poly', 'poly2D', 'PolygonArea', 'qcsr', 'r2polar',
+ 'real', 'rightx', 'round', 'sawtooth', 'ScreenResolution', 'sec',
+ 'SelectNumber', 'sign', 'sin', 'sinc', 'sinh', 'SphericalBessJ',
+ 'SphericalBessJD', 'SphericalBessY', 'SphericalBessYD',
+ 'SphericalHarmonics', 'sqrt', 'StartMSTimer', 'StatsBetaCDF',
+ 'StatsBetaPDF', 'StatsBinomialCDF', 'StatsBinomialPDF',
+ 'StatsCauchyCDF', 'StatsCauchyPDF', 'StatsChiCDF', 'StatsChiPDF',
+ 'StatsCMSSDCDF', 'StatsCorrelation', 'StatsDExpCDF', 'StatsDExpPDF',
+ 'StatsErlangCDF', 'StatsErlangPDF', 'StatsErrorPDF', 'StatsEValueCDF',
+ 'StatsEValuePDF', 'StatsExpCDF', 'StatsExpPDF', 'StatsFCDF',
+ 'StatsFPDF', 'StatsFriedmanCDF', 'StatsGammaCDF', 'StatsGammaPDF',
+ 'StatsGeometricCDF', 'StatsGeometricPDF', 'StatsHyperGCDF',
+ 'StatsHyperGPDF', 'StatsInvBetaCDF', 'StatsInvBinomialCDF',
+ 'StatsInvCauchyCDF', 'StatsInvChiCDF', 'StatsInvCMSSDCDF',
+ 'StatsInvDExpCDF', 'StatsInvEValueCDF', 'StatsInvExpCDF',
+ 'StatsInvFCDF', 'StatsInvFriedmanCDF', 'StatsInvGammaCDF',
+ 'StatsInvGeometricCDF', 'StatsInvKuiperCDF', 'StatsInvLogisticCDF',
+ 'StatsInvLogNormalCDF', 'StatsInvMaxwellCDF', 'StatsInvMooreCDF',
+ 'StatsInvNBinomialCDF', 'StatsInvNCChiCDF', 'StatsInvNCFCDF',
+ 'StatsInvNormalCDF', 'StatsInvParetoCDF', 'StatsInvPoissonCDF',
+ 'StatsInvPowerCDF', 'StatsInvQCDF', 'StatsInvQpCDF',
+ 'StatsInvRayleighCDF', 'StatsInvRectangularCDF', 'StatsInvSpearmanCDF',
+ 'StatsInvStudentCDF', 'StatsInvTopDownCDF', 'StatsInvTriangularCDF',
+ 'StatsInvUsquaredCDF', 'StatsInvVonMisesCDF', 'StatsInvWeibullCDF',
+ 'StatsKuiperCDF', 'StatsLogisticCDF', 'StatsLogisticPDF',
+ 'StatsLogNormalCDF', 'StatsLogNormalPDF', 'StatsMaxwellCDF',
+ 'StatsMaxwellPDF', 'StatsMedian', 'StatsMooreCDF', 'StatsNBinomialCDF',
+ 'StatsNBinomialPDF', 'StatsNCChiCDF', 'StatsNCChiPDF', 'StatsNCFCDF',
+ 'StatsNCFPDF', 'StatsNCTCDF', 'StatsNCTPDF', 'StatsNormalCDF',
+ 'StatsNormalPDF', 'StatsParetoCDF', 'StatsParetoPDF', 'StatsPermute',
+ 'StatsPoissonCDF', 'StatsPoissonPDF', 'StatsPowerCDF',
+ 'StatsPowerNoise', 'StatsPowerPDF', 'StatsQCDF', 'StatsQpCDF',
+ 'StatsRayleighCDF', 'StatsRayleighPDF', 'StatsRectangularCDF',
+ 'StatsRectangularPDF', 'StatsRunsCDF', 'StatsSpearmanRhoCDF',
+ 'StatsStudentCDF', 'StatsStudentPDF', 'StatsTopDownCDF',
+ 'StatsTriangularCDF', 'StatsTriangularPDF', 'StatsTrimmedMean',
+ 'StatsUSquaredCDF', 'StatsVonMisesCDF', 'StatsVonMisesNoise',
+ 'StatsVonMisesPDF', 'StatsWaldCDF', 'StatsWaldPDF', 'StatsWeibullCDF',
+ 'StatsWeibullPDF', 'StopMSTimer', 'str2num', 'stringCRC', 'stringmatch',
+ 'strlen', 'strsearch', 'StudentA', 'StudentT', 'sum', 'SVAR_Exists',
+ 'TagVal', 'tan', 'tanh', 'ThreadGroupCreate', 'ThreadGroupRelease',
+ 'ThreadGroupWait', 'ThreadProcessorCount', 'ThreadReturnValue', 'ticks',
+ 'trunc', 'Variance', 'vcsr', 'WaveCRC', 'WaveDims', 'WaveExists',
+ 'WaveMax', 'WaveMin', 'WaveRefsEqual', 'WaveType', 'WhichListItem',
+ 'WinType', 'WNoise', 'x', 'x2pnt', 'xcsr', 'y', 'z', 'zcsr', 'ZernikeR',
+ )
+ functions += (
+ 'AddListItem', 'AnnotationInfo', 'AnnotationList', 'AxisInfo',
+ 'AxisList', 'CaptureHistory', 'ChildWindowList', 'CleanupName',
+ 'ContourInfo', 'ContourNameList', 'ControlNameList', 'CsrInfo',
+ 'CsrWave', 'CsrXWave', 'CTabList', 'DataFolderDir', 'date',
+ 'DDERequestString', 'FontList', 'FuncRefInfo', 'FunctionInfo',
+ 'FunctionList', 'FunctionPath', 'GetDataFolder', 'GetDefaultFont',
+ 'GetDimLabel', 'GetErrMessage', 'GetFormula',
+ 'GetIndependentModuleName', 'GetIndexedObjName', 'GetIndexedObjNameDFR',
+ 'GetRTErrMessage', 'GetRTStackInfo', 'GetScrapText', 'GetUserData',
+ 'GetWavesDataFolder', 'GrepList', 'GuideInfo', 'GuideNameList', 'Hash',
+ 'IgorInfo', 'ImageInfo', 'ImageNameList', 'IndexedDir', 'IndexedFile',
+ 'JulianToDate', 'LayoutInfo', 'ListMatch', 'LowerStr', 'MacroList',
+ 'NameOfWave', 'note', 'num2char', 'num2istr', 'num2str',
+ 'OperationList', 'PadString', 'ParseFilePath', 'PathList', 'PICTInfo',
+ 'PICTList', 'PossiblyQuoteName', 'ProcedureText', 'RemoveByKey',
+ 'RemoveEnding', 'RemoveFromList', 'RemoveListItem',
+ 'ReplaceNumberByKey', 'ReplaceString', 'ReplaceStringByKey',
+ 'Secs2Date', 'Secs2Time', 'SelectString', 'SortList',
+ 'SpecialCharacterInfo', 'SpecialCharacterList', 'SpecialDirPath',
+ 'StringByKey', 'StringFromList', 'StringList', 'StrVarOrDefault',
+ 'TableInfo', 'TextFile', 'ThreadGroupGetDF', 'time', 'TraceFromPixel',
+ 'TraceInfo', 'TraceNameList', 'UniqueName', 'UnPadString', 'UpperStr',
+ 'VariableList', 'WaveInfo', 'WaveList', 'WaveName', 'WaveUnits',
+ 'WinList', 'WinName', 'WinRecreation', 'XWaveName',
+ 'ContourNameToWaveRef', 'CsrWaveRef', 'CsrXWaveRef',
+ 'ImageNameToWaveRef', 'NewFreeWave', 'TagWaveRef', 'TraceNameToWaveRef',
+ 'WaveRefIndexed', 'XWaveRefFromTrace', 'GetDataFolderDFR',
+ 'GetWavesDataFolderDFR', 'NewFreeDataFolder', 'ThreadGroupGetDFR',
+ )
+
+ tokens = {
+ 'root': [
+ (r'//.*$', Comment.Single),
+ (r'"([^"\\]|\\.)*"', String),
+ # Flow Control.
+ (words(flowControl, prefix=r'\b', suffix=r'\b'), Keyword),
+ # Types.
+ (words(types, prefix=r'\b', suffix=r'\b'), Keyword.Type),
+ # Keywords.
+ (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword.Reserved),
+ # Built-in operations.
+ (words(operations, prefix=r'\b', suffix=r'\b'), Name.Class),
+ # Built-in functions.
+ (words(functions, prefix=r'\b', suffix=r'\b'), Name.Function),
+ # Compiler directives.
+ (r'^#(include|pragma|define|ifdef|ifndef|endif)',
+ Name.Decorator),
+ (r'[^a-z"/]+$', Text),
+ (r'.', Text),
+ ],
+ }
diff --git a/pygments/lexers/installers.py b/pygments/lexers/installers.py
new file mode 100644
index 00000000..54369da1
--- /dev/null
+++ b/pygments/lexers/installers.py
@@ -0,0 +1,322 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.installers
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for installer/packager DSLs and formats.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, this
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Punctuation, Generic, Number, Whitespace
+
+__all__ = ['NSISLexer', 'RPMSpecLexer', 'SourcesListLexer',
+ 'DebianControlLexer']
+
+
+class NSISLexer(RegexLexer):
+ """
+ For `NSIS <http://nsis.sourceforge.net/>`_ scripts.
+
+ .. versionadded:: 1.6
+ """
+ name = 'NSIS'
+ aliases = ['nsis', 'nsi', 'nsh']
+ filenames = ['*.nsi', '*.nsh']
+ mimetypes = ['text/x-nsis']
+
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'[;\#].*\n', Comment),
+ (r"'.*?'", String.Single),
+ (r'"', String.Double, 'str_double'),
+ (r'`', String.Backtick, 'str_backtick'),
+ include('macro'),
+ include('interpol'),
+ include('basic'),
+ (r'\$\{[a-z_|][\w|]*\}', Keyword.Pseudo),
+ (r'/[a-z_]\w*', Name.Attribute),
+ ('.', Text),
+ ],
+ 'basic': [
+ (r'(\n)(Function)(\s+)([._a-z][.\w]*)\b',
+ bygroups(Text, Keyword, Text, Name.Function)),
+ (r'\b([_a-z]\w*)(::)([a-z][a-z0-9]*)\b',
+ bygroups(Keyword.Namespace, Punctuation, Name.Function)),
+ (r'\b([_a-z]\w*)(:)', bygroups(Name.Label, Punctuation)),
+ (r'(\b[ULS]|\B)([\!\<\>=]?=|\<\>?|\>)\B', Operator),
+ (r'[|+-]', Operator),
+ (r'\\', Punctuation),
+ (r'\b(Abort|Add(?:BrandingImage|Size)|'
+ r'Allow(?:RootDirInstall|SkipFiles)|AutoCloseWindow|'
+ r'BG(?:Font|Gradient)|BrandingText|BringToFront|Call(?:InstDLL)?|'
+ r'(?:Sub)?Caption|ChangeUI|CheckBitmap|ClearErrors|CompletedText|'
+ r'ComponentText|CopyFiles|CRCCheck|'
+ r'Create(?:Directory|Font|Shortcut)|Delete(?:INI(?:Sec|Str)|'
+ r'Reg(?:Key|Value))?|DetailPrint|DetailsButtonText|'
+ r'Dir(?:Show|Text|Var|Verify)|(?:Disabled|Enabled)Bitmap|'
+ r'EnableWindow|EnumReg(?:Key|Value)|Exch|Exec(?:Shell|Wait)?|'
+ r'ExpandEnvStrings|File(?:BufSize|Close|ErrorText|Open|'
+ r'Read(?:Byte)?|Seek|Write(?:Byte)?)?|'
+ r'Find(?:Close|First|Next|Window)|FlushINI|Function(?:End)?|'
+ r'Get(?:CurInstType|CurrentAddress|DlgItem|DLLVersion(?:Local)?|'
+ r'ErrorLevel|FileTime(?:Local)?|FullPathName|FunctionAddress|'
+ r'InstDirError|LabelAddress|TempFileName)|'
+ r'Goto|HideWindow|Icon|'
+ r'If(?:Abort|Errors|FileExists|RebootFlag|Silent)|'
+ r'InitPluginsDir|Install(?:ButtonText|Colors|Dir(?:RegKey)?)|'
+ r'Inst(?:ProgressFlags|Type(?:[GS]etText)?)|Int(?:CmpU?|Fmt|Op)|'
+ r'IsWindow|LangString(?:UP)?|'
+ r'License(?:BkColor|Data|ForceSelection|LangString|Text)|'
+ r'LoadLanguageFile|LockWindow|Log(?:Set|Text)|MessageBox|'
+ r'MiscButtonText|Name|Nop|OutFile|(?:Uninst)?Page(?:Ex(?:End)?)?|'
+ r'PluginDir|Pop|Push|Quit|Read(?:(?:Env|INI|Reg)Str|RegDWORD)|'
+ r'Reboot|(?:Un)?RegDLL|Rename|RequestExecutionLevel|ReserveFile|'
+ r'Return|RMDir|SearchPath|Section(?:Divider|End|'
+ r'(?:(?:Get|Set)(?:Flags|InstTypes|Size|Text))|Group(?:End)?|In)?|'
+ r'SendMessage|Set(?:AutoClose|BrandingImage|Compress(?:ionLevel|'
+ r'or(?:DictSize)?)?|CtlColors|CurInstType|DatablockOptimize|'
+ r'DateSave|Details(?:Print|View)|Error(?:s|Level)|FileAttributes|'
+ r'Font|OutPath|Overwrite|PluginUnload|RebootFlag|ShellVarContext|'
+ r'Silent|StaticBkColor)|'
+ r'Show(?:(?:I|Uni)nstDetails|Window)|Silent(?:Un)?Install|Sleep|'
+ r'SpaceTexts|Str(?:CmpS?|Cpy|Len)|SubSection(?:End)?|'
+ r'Uninstall(?:ButtonText|(?:Sub)?Caption|EXEName|Icon|Text)|'
+ r'UninstPage|Var|VI(?:AddVersionKey|ProductVersion)|WindowIcon|'
+ r'Write(?:INIStr|Reg(:?Bin|DWORD|(?:Expand)?Str)|Uninstaller)|'
+ r'XPStyle)\b', Keyword),
+ (r'\b(CUR|END|(?:FILE_ATTRIBUTE_)?'
+ r'(?:ARCHIVE|HIDDEN|NORMAL|OFFLINE|READONLY|SYSTEM|TEMPORARY)|'
+ r'HK(CC|CR|CU|DD|LM|PD|U)|'
+ r'HKEY_(?:CLASSES_ROOT|CURRENT_(?:CONFIG|USER)|DYN_DATA|'
+ r'LOCAL_MACHINE|PERFORMANCE_DATA|USERS)|'
+ r'ID(?:ABORT|CANCEL|IGNORE|NO|OK|RETRY|YES)|'
+ r'MB_(?:ABORTRETRYIGNORE|DEFBUTTON[1-4]|'
+ r'ICON(?:EXCLAMATION|INFORMATION|QUESTION|STOP)|'
+ r'OK(?:CANCEL)?|RETRYCANCEL|RIGHT|SETFOREGROUND|TOPMOST|USERICON|'
+ r'YESNO(?:CANCEL)?)|SET|SHCTX|'
+ r'SW_(?:HIDE|SHOW(?:MAXIMIZED|MINIMIZED|NORMAL))|'
+ r'admin|all|auto|both|bottom|bzip2|checkbox|colored|current|false|'
+ r'force|hide|highest|if(?:diff|newer)|lastused|leave|left|'
+ r'listonly|lzma|nevershow|none|normal|off|on|pop|push|'
+ r'radiobuttons|right|show|silent|silentlog|smooth|textonly|top|'
+ r'true|try|user|zlib)\b', Name.Constant),
+ ],
+ 'macro': [
+ (r'\!(addincludedir(?:dir)?|addplugindir|appendfile|cd|define|'
+ r'delfilefile|echo(?:message)?|else|endif|error|execute|'
+ r'if(?:macro)?n?(?:def)?|include|insertmacro|macro(?:end)?|packhdr|'
+ r'search(?:parse|replace)|system|tempfilesymbol|undef|verbose|'
+ r'warning)\b', Comment.Preproc),
+ ],
+ 'interpol': [
+ (r'\$(R?[0-9])', Name.Builtin.Pseudo), # registers
+ (r'\$(ADMINTOOLS|APPDATA|CDBURN_AREA|COOKIES|COMMONFILES(?:32|64)|'
+ r'DESKTOP|DOCUMENTS|EXE(?:DIR|FILE|PATH)|FAVORITES|FONTS|HISTORY|'
+ r'HWNDPARENT|INTERNET_CACHE|LOCALAPPDATA|MUSIC|NETHOOD|PICTURES|'
+ r'PLUGINSDIR|PRINTHOOD|PROFILE|PROGRAMFILES(?:32|64)|QUICKLAUNCH|'
+ r'RECENT|RESOURCES(?:_LOCALIZED)?|SENDTO|SM(?:PROGRAMS|STARTUP)|'
+ r'STARTMENU|SYSDIR|TEMP(?:LATES)?|VIDEOS|WINDIR|\{NSISDIR\})',
+ Name.Builtin),
+ (r'\$(CMDLINE|INSTDIR|OUTDIR|LANGUAGE)', Name.Variable.Global),
+ (r'\$[a-z_]\w*', Name.Variable),
+ ],
+ 'str_double': [
+ (r'"', String, '#pop'),
+ (r'\$(\\[nrt"]|\$)', String.Escape),
+ include('interpol'),
+ (r'.', String.Double),
+ ],
+ 'str_backtick': [
+ (r'`', String, '#pop'),
+ (r'\$(\\[nrt"]|\$)', String.Escape),
+ include('interpol'),
+ (r'.', String.Double),
+ ],
+ }
+
+
+class RPMSpecLexer(RegexLexer):
+ """
+ For RPM ``.spec`` files.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'RPMSpec'
+ aliases = ['spec']
+ filenames = ['*.spec']
+ mimetypes = ['text/x-rpm-spec']
+
+ _directives = ('(?:package|prep|build|install|clean|check|pre[a-z]*|'
+ 'post[a-z]*|trigger[a-z]*|files)')
+
+ tokens = {
+ 'root': [
+ (r'#.*\n', Comment),
+ include('basic'),
+ ],
+ 'description': [
+ (r'^(%' + _directives + ')(.*)$',
+ bygroups(Name.Decorator, Text), '#pop'),
+ (r'\n', Text),
+ (r'.', Text),
+ ],
+ 'changelog': [
+ (r'\*.*\n', Generic.Subheading),
+ (r'^(%' + _directives + ')(.*)$',
+ bygroups(Name.Decorator, Text), '#pop'),
+ (r'\n', Text),
+ (r'.', Text),
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ include('interpol'),
+ (r'.', String.Double),
+ ],
+ 'basic': [
+ include('macro'),
+ (r'(?i)^(Name|Version|Release|Epoch|Summary|Group|License|Packager|'
+ r'Vendor|Icon|URL|Distribution|Prefix|Patch[0-9]*|Source[0-9]*|'
+ r'Requires\(?[a-z]*\)?|[a-z]+Req|Obsoletes|Suggests|Provides|Conflicts|'
+ r'Build[a-z]+|[a-z]+Arch|Auto[a-z]+)(:)(.*)$',
+ bygroups(Generic.Heading, Punctuation, using(this))),
+ (r'^%description', Name.Decorator, 'description'),
+ (r'^%changelog', Name.Decorator, 'changelog'),
+ (r'^(%' + _directives + ')(.*)$', bygroups(Name.Decorator, Text)),
+ (r'%(attr|defattr|dir|doc(?:dir)?|setup|config(?:ure)?|'
+ r'make(?:install)|ghost|patch[0-9]+|find_lang|exclude|verify)',
+ Keyword),
+ include('interpol'),
+ (r"'.*?'", String.Single),
+ (r'"', String.Double, 'string'),
+ (r'.', Text),
+ ],
+ 'macro': [
+ (r'%define.*\n', Comment.Preproc),
+ (r'%\{\!\?.*%define.*\}', Comment.Preproc),
+ (r'(%(?:if(?:n?arch)?|else(?:if)?|endif))(.*)$',
+ bygroups(Comment.Preproc, Text)),
+ ],
+ 'interpol': [
+ (r'%\{?__[a-z_]+\}?', Name.Function),
+ (r'%\{?_([a-z_]+dir|[a-z_]+path|prefix)\}?', Keyword.Pseudo),
+ (r'%\{\?\w+\}', Name.Variable),
+ (r'\$\{?RPM_[A-Z0-9_]+\}?', Name.Variable.Global),
+ (r'%\{[a-zA-Z]\w+\}', Keyword.Constant),
+ ]
+ }
+
+
+class SourcesListLexer(RegexLexer):
+ """
+ Lexer that highlights debian sources.list files.
+
+ .. versionadded:: 0.7
+ """
+
+ name = 'Debian Sourcelist'
+ aliases = ['sourceslist', 'sources.list', 'debsources']
+ filenames = ['sources.list']
+ mimetype = ['application/x-debian-sourceslist']
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'#.*?$', Comment),
+ (r'^(deb(?:-src)?)(\s+)',
+ bygroups(Keyword, Text), 'distribution')
+ ],
+ 'distribution': [
+ (r'#.*?$', Comment, '#pop'),
+ (r'\$\(ARCH\)', Name.Variable),
+ (r'[^\s$[]+', String),
+ (r'\[', String.Other, 'escaped-distribution'),
+ (r'\$', String),
+ (r'\s+', Text, 'components')
+ ],
+ 'escaped-distribution': [
+ (r'\]', String.Other, '#pop'),
+ (r'\$\(ARCH\)', Name.Variable),
+ (r'[^\]$]+', String.Other),
+ (r'\$', String.Other)
+ ],
+ 'components': [
+ (r'#.*?$', Comment, '#pop:2'),
+ (r'$', Text, '#pop:2'),
+ (r'\s+', Text),
+ (r'\S+', Keyword.Pseudo),
+ ]
+ }
+
+ def analyse_text(text):
+ for line in text.splitlines():
+ line = line.strip()
+ if line.startswith('deb ') or line.startswith('deb-src '):
+ return True
+
+
+class DebianControlLexer(RegexLexer):
+ """
+ Lexer for Debian ``control`` files and ``apt-cache show <pkg>`` outputs.
+
+ .. versionadded:: 0.9
+ """
+ name = 'Debian Control file'
+ aliases = ['control', 'debcontrol']
+ filenames = ['control']
+
+ tokens = {
+ 'root': [
+ (r'^(Description)', Keyword, 'description'),
+ (r'^(Maintainer)(:\s*)', bygroups(Keyword, Text), 'maintainer'),
+ (r'^((Build-)?Depends)', Keyword, 'depends'),
+ (r'^((?:Python-)?Version)(:\s*)(\S+)$',
+ bygroups(Keyword, Text, Number)),
+ (r'^((?:Installed-)?Size)(:\s*)(\S+)$',
+ bygroups(Keyword, Text, Number)),
+ (r'^(MD5Sum|SHA1|SHA256)(:\s*)(\S+)$',
+ bygroups(Keyword, Text, Number)),
+ (r'^([a-zA-Z\-0-9\.]*?)(:\s*)(.*?)$',
+ bygroups(Keyword, Whitespace, String)),
+ ],
+ 'maintainer': [
+ (r'<[^>]+>', Generic.Strong),
+ (r'<[^>]+>$', Generic.Strong, '#pop'),
+ (r',\n?', Text),
+ (r'.', Text),
+ ],
+ 'description': [
+ (r'(.*)(Homepage)(: )(\S+)',
+ bygroups(Text, String, Name, Name.Class)),
+ (r':.*\n', Generic.Strong),
+ (r' .*\n', Text),
+ ('', Text, '#pop'),
+ ],
+ 'depends': [
+ (r':\s*', Text),
+ (r'(\$)(\{)(\w+\s*:\s*\w+)', bygroups(Operator, Text, Name.Entity)),
+ (r'\(', Text, 'depend_vers'),
+ (r',', Text),
+ (r'\|', Operator),
+ (r'[\s]+', Text),
+ (r'[}\)]\s*$', Text, '#pop'),
+ (r'}', Text),
+ (r'[^,]$', Name.Function, '#pop'),
+ (r'([\+\.a-zA-Z0-9-])(\s*)', bygroups(Name.Function, Text)),
+ (r'\[.*?\]', Name.Entity),
+ ],
+ 'depend_vers': [
+ (r'\),', Text, '#pop'),
+ (r'\)[^,]', Text, '#pop:2'),
+ (r'([><=]+)(\s*)([^\)]+)', bygroups(Operator, Text, Number))
+ ]
+ }
diff --git a/pygments/lexers/int_fiction.py b/pygments/lexers/int_fiction.py
new file mode 100644
index 00000000..95f4437c
--- /dev/null
+++ b/pygments/lexers/int_fiction.py
@@ -0,0 +1,724 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.int_fiction
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for interactive fiction languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, \
+ this, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error, Generic
+
+__all__ = ['Inform6Lexer', 'Inform6TemplateLexer', 'Inform7Lexer']
+
+
+class Inform6Lexer(RegexLexer):
+ """
+ For `Inform 6 <http://inform-fiction.org/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Inform 6'
+ aliases = ['inform6', 'i6']
+ filenames = ['*.inf']
+
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
+
+ _name = r'[a-zA-Z_][a-zA-Z_0-9]*'
+
+ # Inform 7 maps these four character classes to their ASCII
+ # equivalents. To support Inform 6 inclusions within Inform 7,
+ # Inform6Lexer maps them too.
+ _dash = u'\\-\u2010-\u2014'
+ _dquote = u'"\u201c\u201d'
+ _squote = u"'\u2018\u2019"
+ _newline = u'\\n\u0085\u2028\u2029'
+
+ tokens = {
+ 'root': [
+ (r'(\A(!%%[^%s]*[%s])+)?' % (_newline, _newline), Comment.Preproc,
+ 'directive')
+ ],
+ '_whitespace': [
+ (r'\s+', Text),
+ (r'![^%s]*' % _newline, Comment.Single)
+ ],
+ 'default': [
+ include('_whitespace'),
+ (r'\[', Punctuation, 'many-values'), # Array initialization
+ (r':|(?=;)', Punctuation, '#pop'),
+ (r'<', Punctuation), # Second angle bracket in an action statement
+ default(('expression', '_expression'))
+ ],
+
+ # Expressions
+ '_expression': [
+ include('_whitespace'),
+ (r'(?=sp\b)', Text, '#pop'),
+ (r'(?=[%s%s$0-9#a-zA-Z_])' % (_dquote, _squote), Text,
+ ('#pop', 'value')),
+ (r'\+\+|[%s]{1,2}(?!>)|~~?' % _dash, Operator),
+ (r'(?=[()\[%s,?@{:;])' % _dash, Text, '#pop')
+ ],
+ 'expression': [
+ include('_whitespace'),
+ (r'\(', Punctuation, ('expression', '_expression')),
+ (r'\)', Punctuation, '#pop'),
+ (r'\[', Punctuation, ('#pop', 'statements', 'locals')),
+ (r'>(?=(\s+|(![^%s]*))*[>;])' % _newline, Punctuation),
+ (r'\+\+|[%s]{2}(?!>)' % _dash, Operator),
+ (r',', Punctuation, '_expression'),
+ (r'&&?|\|\|?|[=~><]?=|[%s]{1,2}>?|\.\.?[&#]?|::|[<>+*/%%]' % _dash,
+ Operator, '_expression'),
+ (r'(has|hasnt|in|notin|ofclass|or|provides)\b', Operator.Word,
+ '_expression'),
+ (r'sp\b', Name),
+ (r'\?~?', Name.Label, 'label?'),
+ (r'[@{]', Error),
+ default('#pop')
+ ],
+ '_assembly-expression': [
+ (r'\(', Punctuation, ('#push', '_expression')),
+ (r'[\[\]]', Punctuation),
+ (r'[%s]>' % _dash, Punctuation, '_expression'),
+ (r'sp\b', Keyword.Pseudo),
+ (r';', Punctuation, '#pop:3'),
+ include('expression')
+ ],
+ '_for-expression': [
+ (r'\)', Punctuation, '#pop:2'),
+ (r':', Punctuation, '#pop'),
+ include('expression')
+ ],
+ '_keyword-expression': [
+ (r'(from|near|to)\b', Keyword, '_expression'),
+ include('expression')
+ ],
+ '_list-expression': [
+ (r',', Punctuation, '#pop'),
+ include('expression')
+ ],
+ '_object-expression': [
+ (r'has\b', Keyword.Declaration, '#pop'),
+ include('_list-expression')
+ ],
+
+ # Values
+ 'value': [
+ include('_whitespace'),
+ # Strings
+ (r'[%s][^@][%s]' % (_squote, _squote), String.Char, '#pop'),
+ (r'([%s])(@{[0-9a-fA-F]{1,4}})([%s])' % (_squote, _squote),
+ bygroups(String.Char, String.Escape, String.Char), '#pop'),
+ (r'([%s])(@..)([%s])' % (_squote, _squote),
+ bygroups(String.Char, String.Escape, String.Char), '#pop'),
+ (r'[%s]' % _squote, String.Single, ('#pop', 'dictionary-word')),
+ (r'[%s]' % _dquote, String.Double, ('#pop', 'string')),
+ # Numbers
+ (r'\$[+%s][0-9]*\.?[0-9]*([eE][+%s]?[0-9]+)?' % (_dash, _dash),
+ Number.Float, '#pop'),
+ (r'\$[0-9a-fA-F]+', Number.Hex, '#pop'),
+ (r'\$\$[01]+', Number.Bin, '#pop'),
+ (r'[0-9]+', Number.Integer, '#pop'),
+ # Values prefixed by hashes
+ (r'(##|#a\$)(%s)' % _name, bygroups(Operator, Name), '#pop'),
+ (r'(#g\$)(%s)' % _name,
+ bygroups(Operator, Name.Variable.Global), '#pop'),
+ (r'#[nw]\$', Operator, ('#pop', 'obsolete-dictionary-word')),
+ (r'(#r\$)(%s)' % _name, bygroups(Operator, Name.Function), '#pop'),
+ (r'#', Name.Builtin, ('#pop', 'system-constant')),
+ # System functions
+ (words((
+ 'child', 'children', 'elder', 'eldest', 'glk', 'indirect', 'metaclass',
+ 'parent', 'random', 'sibling', 'younger', 'youngest'), suffix=r'\b'),
+ Name.Builtin, '#pop'),
+ # Metaclasses
+ (r'(?i)(Class|Object|Routine|String)\b', Name.Builtin, '#pop'),
+ # Veneer routines
+ (words((
+ 'Box__Routine', 'CA__Pr', 'CDefArt', 'CInDefArt', 'Cl__Ms',
+ 'Copy__Primitive', 'CP__Tab', 'DA__Pr', 'DB__Pr', 'DefArt', 'Dynam__String',
+ 'EnglishNumber', 'Glk__Wrap', 'IA__Pr', 'IB__Pr', 'InDefArt', 'Main__',
+ 'Meta__class', 'OB__Move', 'OB__Remove', 'OC__Cl', 'OP__Pr', 'Print__Addr',
+ 'Print__PName', 'PrintShortName', 'RA__Pr', 'RA__Sc', 'RL__Pr', 'R_Process',
+ 'RT__ChG', 'RT__ChGt', 'RT__ChLDB', 'RT__ChLDW', 'RT__ChPR', 'RT__ChPrintA',
+ 'RT__ChPrintC', 'RT__ChPrintO', 'RT__ChPrintS', 'RT__ChPS', 'RT__ChR',
+ 'RT__ChSTB', 'RT__ChSTW', 'RT__ChT', 'RT__Err', 'RT__TrPS', 'RV__Pr',
+ 'Symb__Tab', 'Unsigned__Compare', 'WV__Pr', 'Z__Region'),
+ prefix='(?i)', suffix=r'\b'),
+ Name.Builtin, '#pop'),
+ # Other built-in symbols
+ (words((
+ 'call', 'copy', 'create', 'DEBUG', 'destroy', 'DICT_CHAR_SIZE',
+ 'DICT_ENTRY_BYTES', 'DICT_IS_UNICODE', 'DICT_WORD_SIZE', 'false',
+ 'FLOAT_INFINITY', 'FLOAT_NAN', 'FLOAT_NINFINITY', 'GOBJFIELD_CHAIN',
+ 'GOBJFIELD_CHILD', 'GOBJFIELD_NAME', 'GOBJFIELD_PARENT',
+ 'GOBJFIELD_PROPTAB', 'GOBJFIELD_SIBLING', 'GOBJ_EXT_START',
+ 'GOBJ_TOTAL_LENGTH', 'Grammar__Version', 'INDIV_PROP_START', 'INFIX',
+ 'infix__watching', 'MODULE_MODE', 'name', 'nothing', 'NUM_ATTR_BYTES', 'print',
+ 'print_to_array', 'recreate', 'remaining', 'self', 'sender', 'STRICT_MODE',
+ 'sw__var', 'sys__glob0', 'sys__glob1', 'sys__glob2', 'sys_statusline_flag',
+ 'TARGET_GLULX', 'TARGET_ZCODE', 'temp__global2', 'temp__global3',
+ 'temp__global4', 'temp_global', 'true', 'USE_MODULES', 'WORDSIZE'),
+ prefix='(?i)', suffix=r'\b'),
+ Name.Builtin, '#pop'),
+ # Other values
+ (_name, Name, '#pop')
+ ],
+ # Strings
+ 'dictionary-word': [
+ (r'[~^]+', String.Escape),
+ (r'[^~^\\@({%s]+' % _squote, String.Single),
+ (r'[({]', String.Single),
+ (r'@{[0-9a-fA-F]{,4}}', String.Escape),
+ (r'@..', String.Escape),
+ (r'[%s]' % _squote, String.Single, '#pop')
+ ],
+ 'string': [
+ (r'[~^]+', String.Escape),
+ (r'[^~^\\@({%s]+' % _dquote, String.Double),
+ (r'[({]', String.Double),
+ (r'\\', String.Escape),
+ (r'@(\\\s*[%s]\s*)*@((\\\s*[%s]\s*)*[0-9])*' %
+ (_newline, _newline), String.Escape),
+ (r'@(\\\s*[%s]\s*)*{((\\\s*[%s]\s*)*[0-9a-fA-F]){,4}'
+ r'(\\\s*[%s]\s*)*}' % (_newline, _newline, _newline),
+ String.Escape),
+ (r'@(\\\s*[%s]\s*)*.(\\\s*[%s]\s*)*.' % (_newline, _newline),
+ String.Escape),
+ (r'[%s]' % _dquote, String.Double, '#pop')
+ ],
+ 'plain-string': [
+ (r'[^~^\\({\[\]%s]+' % _dquote, String.Double),
+ (r'[~^({\[\]]', String.Double),
+ (r'\\', String.Escape),
+ (r'[%s]' % _dquote, String.Double, '#pop')
+ ],
+ # Names
+ '_constant': [
+ include('_whitespace'),
+ (_name, Name.Constant, '#pop'),
+ include('value')
+ ],
+ '_global': [
+ include('_whitespace'),
+ (_name, Name.Variable.Global, '#pop'),
+ include('value')
+ ],
+ 'label?': [
+ include('_whitespace'),
+ (r'(%s)?' % _name, Name.Label, '#pop')
+ ],
+ 'variable?': [
+ include('_whitespace'),
+ (r'(%s)?' % _name, Name.Variable, '#pop')
+ ],
+ # Values after hashes
+ 'obsolete-dictionary-word': [
+ (r'\S[a-zA-Z_0-9]*', String.Other, '#pop')
+ ],
+ 'system-constant': [
+ include('_whitespace'),
+ (_name, Name.Builtin, '#pop')
+ ],
+
+ # Directives
+ 'directive': [
+ include('_whitespace'),
+ (r'#', Punctuation),
+ (r';', Punctuation, '#pop'),
+ (r'\[', Punctuation,
+ ('default', 'statements', 'locals', 'routine-name?')),
+ (words((
+ 'abbreviate', 'endif', 'dictionary', 'ifdef', 'iffalse', 'ifndef', 'ifnot',
+ 'iftrue', 'ifv3', 'ifv5', 'release', 'serial', 'switches', 'system_file',
+ 'version'), prefix='(?i)', suffix=r'\b'),
+ Keyword, 'default'),
+ (r'(?i)(array|global)\b', Keyword,
+ ('default', 'directive-keyword?', '_global')),
+ (r'(?i)attribute\b', Keyword, ('default', 'alias?', '_constant')),
+ (r'(?i)class\b', Keyword,
+ ('object-body', 'duplicates', 'class-name')),
+ (r'(?i)(constant|default)\b', Keyword,
+ ('default', 'expression', '_constant')),
+ (r'(?i)(end\b)(.*)', bygroups(Keyword, Text)),
+ (r'(?i)(extend|verb)\b', Keyword, 'grammar'),
+ (r'(?i)fake_action\b', Keyword, ('default', '_constant')),
+ (r'(?i)import\b', Keyword, 'manifest'),
+ (r'(?i)(include|link)\b', Keyword,
+ ('default', 'before-plain-string')),
+ (r'(?i)(lowstring|undef)\b', Keyword, ('default', '_constant')),
+ (r'(?i)message\b', Keyword, ('default', 'diagnostic')),
+ (r'(?i)(nearby|object)\b', Keyword,
+ ('object-body', '_object-head')),
+ (r'(?i)property\b', Keyword,
+ ('default', 'alias?', '_constant', 'property-keyword*')),
+ (r'(?i)replace\b', Keyword,
+ ('default', 'routine-name?', 'routine-name?')),
+ (r'(?i)statusline\b', Keyword, ('default', 'directive-keyword?')),
+ (r'(?i)stub\b', Keyword, ('default', 'routine-name?')),
+ (r'(?i)trace\b', Keyword,
+ ('default', 'trace-keyword?', 'trace-keyword?')),
+ (r'(?i)zcharacter\b', Keyword,
+ ('default', 'directive-keyword?', 'directive-keyword?')),
+ (_name, Name.Class, ('object-body', '_object-head'))
+ ],
+ # [, Replace, Stub
+ 'routine-name?': [
+ include('_whitespace'),
+ (r'(%s)?' % _name, Name.Function, '#pop')
+ ],
+ 'locals': [
+ include('_whitespace'),
+ (r';', Punctuation, '#pop'),
+ (r'\*', Punctuation),
+ (_name, Name.Variable)
+ ],
+ # Array
+ 'many-values': [
+ include('_whitespace'),
+ (r';', Punctuation),
+ (r'\]', Punctuation, '#pop'),
+ (r':', Error),
+ default(('expression', '_expression'))
+ ],
+ # Attribute, Property
+ 'alias?': [
+ include('_whitespace'),
+ (r'alias\b', Keyword, ('#pop', '_constant')),
+ default('#pop')
+ ],
+ # Class, Object, Nearby
+ 'class-name': [
+ include('_whitespace'),
+ (r'(?=[,;]|(class|has|private|with)\b)', Text, '#pop'),
+ (_name, Name.Class, '#pop')
+ ],
+ 'duplicates': [
+ include('_whitespace'),
+ (r'\(', Punctuation, ('#pop', 'expression', '_expression')),
+ default('#pop')
+ ],
+ '_object-head': [
+ (r'[%s]>' % _dash, Punctuation),
+ (r'(class|has|private|with)\b', Keyword.Declaration, '#pop'),
+ include('_global')
+ ],
+ 'object-body': [
+ include('_whitespace'),
+ (r';', Punctuation, '#pop:2'),
+ (r',', Punctuation),
+ (r'class\b', Keyword.Declaration, 'class-segment'),
+ (r'(has|private|with)\b', Keyword.Declaration),
+ (r':', Error),
+ default(('_object-expression', '_expression'))
+ ],
+ 'class-segment': [
+ include('_whitespace'),
+ (r'(?=[,;]|(class|has|private|with)\b)', Text, '#pop'),
+ (_name, Name.Class),
+ default('value')
+ ],
+ # Extend, Verb
+ 'grammar': [
+ include('_whitespace'),
+ (r'=', Punctuation, ('#pop', 'default')),
+ (r'\*', Punctuation, ('#pop', 'grammar-line')),
+ default('_directive-keyword')
+ ],
+ 'grammar-line': [
+ include('_whitespace'),
+ (r';', Punctuation, '#pop'),
+ (r'[/*]', Punctuation),
+ (r'[%s]>' % _dash, Punctuation, 'value'),
+ (r'(noun|scope)\b', Keyword, '=routine'),
+ default('_directive-keyword')
+ ],
+ '=routine': [
+ include('_whitespace'),
+ (r'=', Punctuation, 'routine-name?'),
+ default('#pop')
+ ],
+ # Import
+ 'manifest': [
+ include('_whitespace'),
+ (r';', Punctuation, '#pop'),
+ (r',', Punctuation),
+ (r'(?i)(global\b)?', Keyword, '_global')
+ ],
+ # Include, Link, Message
+ 'diagnostic': [
+ include('_whitespace'),
+ (r'[%s]' % _dquote, String.Double, ('#pop', 'message-string')),
+ default(('#pop', 'before-plain-string', 'directive-keyword?'))
+ ],
+ 'before-plain-string': [
+ include('_whitespace'),
+ (r'[%s]' % _dquote, String.Double, ('#pop', 'plain-string'))
+ ],
+ 'message-string': [
+ (r'[~^]+', String.Escape),
+ include('plain-string')
+ ],
+
+ # Keywords used in directives
+ '_directive-keyword!': [
+ include('_whitespace'),
+ (words((
+ 'additive', 'alias', 'buffer', 'class', 'creature', 'data', 'error', 'fatalerror',
+ 'first', 'has', 'held', 'initial', 'initstr', 'last', 'long', 'meta', 'multi',
+ 'multiexcept', 'multiheld', 'multiinside', 'noun', 'number', 'only', 'private',
+ 'replace', 'reverse', 'scope', 'score', 'special', 'string', 'table', 'terminating',
+ 'time', 'topic', 'warning', 'with'), suffix=r'\b'),
+ Keyword, '#pop'),
+ (r'[%s]{1,2}>|[+=]' % _dash, Punctuation, '#pop')
+ ],
+ '_directive-keyword': [
+ include('_directive-keyword!'),
+ include('value')
+ ],
+ 'directive-keyword?': [
+ include('_directive-keyword!'),
+ default('#pop')
+ ],
+ 'property-keyword*': [
+ include('_whitespace'),
+ (r'(additive|long)\b', Keyword),
+ default('#pop')
+ ],
+ 'trace-keyword?': [
+ include('_whitespace'),
+ (words((
+ 'assembly', 'dictionary', 'expressions', 'lines', 'linker',
+ 'objects', 'off', 'on', 'symbols', 'tokens', 'verbs'), suffix=r'\b'),
+ Keyword, '#pop'),
+ default('#pop')
+ ],
+
+ # Statements
+ 'statements': [
+ include('_whitespace'),
+ (r'\]', Punctuation, '#pop'),
+ (r'[;{}]', Punctuation),
+ (words((
+ 'box', 'break', 'continue', 'default', 'give', 'inversion',
+ 'new_line', 'quit', 'read', 'remove', 'return', 'rfalse', 'rtrue',
+ 'spaces', 'string', 'until'), suffix=r'\b'),
+ Keyword, 'default'),
+ (r'(do|else)\b', Keyword),
+ (r'(font|style)\b', Keyword,
+ ('default', 'miscellaneous-keyword?')),
+ (r'for\b', Keyword, ('for', '(?')),
+ (r'(if|switch|while)', Keyword,
+ ('expression', '_expression', '(?')),
+ (r'(jump|save|restore)\b', Keyword, ('default', 'label?')),
+ (r'objectloop\b', Keyword,
+ ('_keyword-expression', 'variable?', '(?')),
+ (r'print(_ret)?\b|(?=[%s])' % _dquote, Keyword, 'print-list'),
+ (r'\.', Name.Label, 'label?'),
+ (r'@', Keyword, 'opcode'),
+ (r'#(?![agrnw]\$|#)', Punctuation, 'directive'),
+ (r'<', Punctuation, 'default'),
+ (r'(move\b)?', Keyword,
+ ('default', '_keyword-expression', '_expression'))
+ ],
+ 'miscellaneous-keyword?': [
+ include('_whitespace'),
+ (r'(bold|fixed|from|near|off|on|reverse|roman|to|underline)\b',
+ Keyword, '#pop'),
+ (r'(a|A|an|address|char|name|number|object|property|string|the|'
+ r'The)\b(?=(\s+|(![^%s]*))*\))' % _newline, Keyword.Pseudo,
+ '#pop'),
+ (r'%s(?=(\s+|(![^%s]*))*\))' % (_name, _newline), Name.Function,
+ '#pop'),
+ default('#pop')
+ ],
+ '(?': [
+ include('_whitespace'),
+ (r'\(?', Punctuation, '#pop')
+ ],
+ 'for': [
+ include('_whitespace'),
+ (r';?', Punctuation, ('_for-expression', '_expression'))
+ ],
+ 'print-list': [
+ include('_whitespace'),
+ (r';', Punctuation, '#pop'),
+ (r':', Error),
+ default(('_list-expression', '_expression', '_list-expression', 'form'))
+ ],
+ 'form': [
+ include('_whitespace'),
+ (r'\(', Punctuation, ('#pop', 'miscellaneous-keyword?')),
+ default('#pop')
+ ],
+
+ # Assembly
+ 'opcode': [
+ include('_whitespace'),
+ (r'[%s]' % _dquote, String.Double, ('operands', 'plain-string')),
+ (_name, Keyword, 'operands')
+ ],
+ 'operands': [
+ (r':', Error),
+ default(('_assembly-expression', '_expression'))
+ ]
+ }
+
+ def get_tokens_unprocessed(self, text):
+ # 'in' is either a keyword or an operator.
+ # If the token two tokens after 'in' is ')', 'in' is a keyword:
+ # objectloop(a in b)
+ # Otherwise, it is an operator:
+ # objectloop(a in b && true)
+ objectloop_queue = []
+ objectloop_token_count = -1
+ previous_token = None
+ for index, token, value in RegexLexer.get_tokens_unprocessed(self,
+ text):
+ if previous_token is Name.Variable and value == 'in':
+ objectloop_queue = [[index, token, value]]
+ objectloop_token_count = 2
+ elif objectloop_token_count > 0:
+ if token not in Comment and token not in Text:
+ objectloop_token_count -= 1
+ objectloop_queue.append((index, token, value))
+ else:
+ if objectloop_token_count == 0:
+ if objectloop_queue[-1][2] == ')':
+ objectloop_queue[0][1] = Keyword
+ while objectloop_queue:
+ yield objectloop_queue.pop(0)
+ objectloop_token_count = -1
+ yield index, token, value
+ if token not in Comment and token not in Text:
+ previous_token = token
+ while objectloop_queue:
+ yield objectloop_queue.pop(0)
+
+
+class Inform7Lexer(RegexLexer):
+ """
+ For `Inform 7 <http://inform7.com/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Inform 7'
+ aliases = ['inform7', 'i7']
+ filenames = ['*.ni', '*.i7x']
+
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
+
+ _dash = Inform6Lexer._dash
+ _dquote = Inform6Lexer._dquote
+ _newline = Inform6Lexer._newline
+ _start = r'\A|(?<=[%s])' % _newline
+
+ # There are three variants of Inform 7, differing in how to
+ # interpret at signs and braces in I6T. In top-level inclusions, at
+ # signs in the first column are inweb syntax. In phrase definitions
+ # and use options, tokens in braces are treated as I7. Use options
+ # also interpret "{N}".
+ tokens = {}
+ token_variants = ['+i6t-not-inline', '+i6t-inline', '+i6t-use-option']
+
+ for level in token_variants:
+ tokens[level] = {
+ '+i6-root': list(Inform6Lexer.tokens['root']),
+ '+i6t-root': [ # For Inform6TemplateLexer
+ (r'[^%s]*' % Inform6Lexer._newline, Comment.Preproc,
+ ('directive', '+p'))
+ ],
+ 'root': [
+ (r'(\|?\s)+', Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'[%s]' % _dquote, Generic.Heading,
+ ('+main', '+titling', '+titling-string')),
+ default(('+main', '+heading?'))
+ ],
+ '+titling-string': [
+ (r'[^%s]+' % _dquote, Generic.Heading),
+ (r'[%s]' % _dquote, Generic.Heading, '#pop')
+ ],
+ '+titling': [
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'[^%s.;:|%s]+' % (_dquote, _newline), Generic.Heading),
+ (r'[%s]' % _dquote, Generic.Heading, '+titling-string'),
+ (r'[%s]{2}|(?<=[\s%s])\|[\s%s]' % (_newline, _dquote, _dquote),
+ Text, ('#pop', '+heading?')),
+ (r'[.;:]|(?<=[\s%s])\|' % _dquote, Text, '#pop'),
+ (r'[|%s]' % _newline, Generic.Heading)
+ ],
+ '+main': [
+ (r'(?i)[^%s:a\[(|%s]+' % (_dquote, _newline), Text),
+ (r'[%s]' % _dquote, String.Double, '+text'),
+ (r':', Text, '+phrase-definition'),
+ (r'(?i)\bas\b', Text, '+use-option'),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
+ bygroups(Punctuation,
+ using(this, state=('+i6-root', 'directive'),
+ i6t='+i6t-not-inline'), Punctuation)),
+ (r'(%s|(?<=[\s;:.%s]))\|\s|[%s]{2,}' %
+ (_start, _dquote, _newline), Text, '+heading?'),
+ (r'(?i)[a(|%s]' % _newline, Text)
+ ],
+ '+phrase-definition': [
+ (r'\s+', Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
+ bygroups(Punctuation,
+ using(this, state=('+i6-root', 'directive',
+ 'default', 'statements'),
+ i6t='+i6t-inline'), Punctuation), '#pop'),
+ default('#pop')
+ ],
+ '+use-option': [
+ (r'\s+', Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash),
+ bygroups(Punctuation,
+ using(this, state=('+i6-root', 'directive'),
+ i6t='+i6t-use-option'), Punctuation), '#pop'),
+ default('#pop')
+ ],
+ '+comment': [
+ (r'[^\[\]]+', Comment.Multiline),
+ (r'\[', Comment.Multiline, '#push'),
+ (r'\]', Comment.Multiline, '#pop')
+ ],
+ '+text': [
+ (r'[^\[%s]+' % _dquote, String.Double),
+ (r'\[.*?\]', String.Interpol),
+ (r'[%s]' % _dquote, String.Double, '#pop')
+ ],
+ '+heading?': [
+ (r'(\|?\s)+', Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'[%s]{4}\s+' % _dash, Text, '+documentation-heading'),
+ (r'[%s]{1,3}' % _dash, Text),
+ (r'(?i)(volume|book|part|chapter|section)\b[^%s]*' % _newline,
+ Generic.Heading, '#pop'),
+ default('#pop')
+ ],
+ '+documentation-heading': [
+ (r'\s+', Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'(?i)documentation\s+', Text, '+documentation-heading2'),
+ default('#pop')
+ ],
+ '+documentation-heading2': [
+ (r'\s+', Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ (r'[%s]{4}\s' % _dash, Text, '+documentation'),
+ default('#pop:2')
+ ],
+ '+documentation': [
+ (r'(?i)(%s)\s*(chapter|example)\s*:[^%s]*' %
+ (_start, _newline), Generic.Heading),
+ (r'(?i)(%s)\s*section\s*:[^%s]*' % (_start, _newline),
+ Generic.Subheading),
+ (r'((%s)\t.*?[%s])+' % (_start, _newline),
+ using(this, state='+main')),
+ (r'[^%s\[]+|[%s\[]' % (_newline, _newline), Text),
+ (r'\[', Comment.Multiline, '+comment'),
+ ],
+ '+i6t-not-inline': [
+ (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
+ Comment.Preproc),
+ (r'(%s)@([%s]+|Purpose:)[^%s]*' % (_start, _dash, _newline),
+ Comment.Preproc),
+ (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
+ Generic.Heading, '+p')
+ ],
+ '+i6t-use-option': [
+ include('+i6t-not-inline'),
+ (r'({)(N)(})', bygroups(Punctuation, Text, Punctuation))
+ ],
+ '+i6t-inline': [
+ (r'({)(\S[^}]*)?(})',
+ bygroups(Punctuation, using(this, state='+main'),
+ Punctuation))
+ ],
+ '+i6t': [
+ (r'({[%s])(![^}]*)(}?)' % _dash,
+ bygroups(Punctuation, Comment.Single, Punctuation)),
+ (r'({[%s])(lines)(:)([^}]*)(}?)' % _dash,
+ bygroups(Punctuation, Keyword, Punctuation, Text,
+ Punctuation), '+lines'),
+ (r'({[%s])([^:}]*)(:?)([^}]*)(}?)' % _dash,
+ bygroups(Punctuation, Keyword, Punctuation, Text,
+ Punctuation)),
+ (r'(\(\+)(.*?)(\+\)|\Z)',
+ bygroups(Punctuation, using(this, state='+main'),
+ Punctuation))
+ ],
+ '+p': [
+ (r'[^@]+', Comment.Preproc),
+ (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
+ Comment.Preproc, '#pop'),
+ (r'(%s)@([%s]|Purpose:)' % (_start, _dash), Comment.Preproc),
+ (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
+ Generic.Heading),
+ (r'@', Comment.Preproc)
+ ],
+ '+lines': [
+ (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline),
+ Comment.Preproc),
+ (r'(%s)@([%s]|Purpose:)[^%s]*' % (_start, _dash, _newline),
+ Comment.Preproc),
+ (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline),
+ Generic.Heading, '+p'),
+ (r'(%s)@[a-zA-Z_0-9]*[ %s]' % (_start, _newline), Keyword),
+ (r'![^%s]*' % _newline, Comment.Single),
+ (r'({)([%s]endlines)(})' % _dash,
+ bygroups(Punctuation, Keyword, Punctuation), '#pop'),
+ (r'[^@!{]+?([%s]|\Z)|.' % _newline, Text)
+ ]
+ }
+ # Inform 7 can include snippets of Inform 6 template language,
+ # so all of Inform6Lexer's states are copied here, with
+ # modifications to account for template syntax. Inform7Lexer's
+ # own states begin with '+' to avoid name conflicts. Some of
+ # Inform6Lexer's states begin with '_': these are not modified.
+ # They deal with template syntax either by including modified
+ # states, or by matching r'' then pushing to modified states.
+ for token in Inform6Lexer.tokens:
+ if token == 'root':
+ continue
+ tokens[level][token] = list(Inform6Lexer.tokens[token])
+ if not token.startswith('_'):
+ tokens[level][token][:0] = [include('+i6t'), include(level)]
+
+ def __init__(self, **options):
+ level = options.get('i6t', '+i6t-not-inline')
+ if level not in self._all_tokens:
+ self._tokens = self.__class__.process_tokendef(level)
+ else:
+ self._tokens = self._all_tokens[level]
+ RegexLexer.__init__(self, **options)
+
+
+class Inform6TemplateLexer(Inform7Lexer):
+ """
+ For `Inform 6 template
+ <http://inform7.com/sources/src/i6template/Woven/index.html>`_ code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Inform 6 template'
+ aliases = ['i6t']
+ filenames = ['*.i6t']
+
+ def get_tokens_unprocessed(self, text, stack=('+i6t-root',)):
+ return Inform7Lexer.get_tokens_unprocessed(self, text, stack)
diff --git a/pygments/lexers/iolang.py b/pygments/lexers/iolang.py
new file mode 100644
index 00000000..0bf86f56
--- /dev/null
+++ b/pygments/lexers/iolang.py
@@ -0,0 +1,63 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.iolang
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Io language.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number
+
+__all__ = ['IoLexer']
+
+
+class IoLexer(RegexLexer):
+ """
+ For `Io <http://iolanguage.com/>`_ (a small, prototype-based
+ programming language) source.
+
+ .. versionadded:: 0.10
+ """
+ name = 'Io'
+ filenames = ['*.io']
+ aliases = ['io']
+ mimetypes = ['text/x-iosrc']
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'\s+', Text),
+ # Comments
+ (r'//(.*?)\n', Comment.Single),
+ (r'#(.*?)\n', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'/\+', Comment.Multiline, 'nestedcomment'),
+ # DoubleQuotedString
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # Operators
+ (r'::=|:=|=|\(|\)|;|,|\*|-|\+|>|<|@|!|/|\||\^|\.|%|&|\[|\]|\{|\}',
+ Operator),
+ # keywords
+ (r'(clone|do|doFile|doString|method|for|if|else|elseif|then)\b',
+ Keyword),
+ # constants
+ (r'(nil|false|true)\b', Name.Constant),
+ # names
+ (r'(Object|list|List|Map|args|Sequence|Coroutine|File)\b',
+ Name.Builtin),
+ ('[a-zA-Z_]\w*', Name),
+ # numbers
+ (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+', Number.Integer)
+ ],
+ 'nestedcomment': [
+ (r'[^+/]+', Comment.Multiline),
+ (r'/\+', Comment.Multiline, '#push'),
+ (r'\+/', Comment.Multiline, '#pop'),
+ (r'[+/]', Comment.Multiline),
+ ]
+ }
diff --git a/pygments/lexers/javascript.py b/pygments/lexers/javascript.py
new file mode 100644
index 00000000..8b5f9c76
--- /dev/null
+++ b/pygments/lexers/javascript.py
@@ -0,0 +1,1199 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.javascript
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for JavaScript and related languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, default, \
+ using, this
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Other
+from pygments.util import get_bool_opt, iteritems
+import pygments.unistring as uni
+
+__all__ = ['JavascriptLexer', 'KalLexer', 'LiveScriptLexer', 'DartLexer',
+ 'TypeScriptLexer', 'LassoLexer', 'ObjectiveJLexer',
+ 'CoffeeScriptLexer', 'MaskLexer']
+
+JS_IDENT_START = ('(?:[$_' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Lo + uni.Nl
+ + ']|\\\\u[a-fA-F0-9]{4})')
+JS_IDENT_PART = ('(?:[$_' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Lo + uni.Nl
+ + uni.Mn + uni.Mc + uni.Nd + uni.Pc
+ + u'\u200c\u200d]|\\\\u[a-fA-F0-9]{4})')
+JS_IDENT = JS_IDENT_START + '(?:' + JS_IDENT_PART + ')*'
+
+class JavascriptLexer(RegexLexer):
+ """
+ For JavaScript source code.
+ """
+
+ name = 'JavaScript'
+ aliases = ['js', 'javascript']
+ filenames = ['*.js', ]
+ mimetypes = ['application/javascript', 'application/x-javascript',
+ 'text/x-javascript', 'text/javascript', ]
+
+ flags = re.DOTALL
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'<!--', Comment),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ default('#pop')
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
+ r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
+ r'throw|try|catch|finally|new|delete|typeof|instanceof|void|yield|'
+ r'this)\b', Keyword, 'slashstartsregex'),
+ (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
+ r'extends|final|float|goto|implements|import|int|interface|long|native|'
+ r'package|private|protected|public|short|static|super|synchronized|throws|'
+ r'transient|volatile)\b', Keyword.Reserved),
+ (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
+ (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
+ r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
+ r'window)\b', Name.Builtin),
+ (JS_IDENT, Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ ]
+ }
+
+
+class KalLexer(RegexLexer):
+ """
+ For `Kal`_ source code.
+
+ .. _Kal: http://rzimmerman.github.io/kal
+
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Kal'
+ aliases = ['kal']
+ filenames = ['*.kal']
+ mimetypes = ['text/kal', 'application/kal']
+
+ flags = re.DOTALL
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'###[^#].*?###', Comment.Multiline),
+ (r'#(?!##[^#]).*?\n', Comment.Single),
+ ],
+ 'functiondef': [
+ (r'[$a-zA-Z_][\w\$]*\s*', Name.Function, '#pop'),
+ include('commentsandwhitespace'),
+ ],
+ 'classdef': [
+ (r'\binherits\s+from\b', Keyword),
+ (r'[$a-zA-Z_][\w\$]*\s*\n', Name.Class, '#pop'),
+ (r'[$a-zA-Z_][\w\$]*\s*', Name.Class),
+ include('commentsandwhitespace'),
+ ],
+ 'listcomprehension': [
+ (r'\]', Punctuation, '#pop'),
+ (r'\b(property|value)\b', Keyword),
+ include('root'),
+ ],
+ 'waitfor': [
+ (r'\n', Punctuation, '#pop'),
+ (r'\bfrom\b', Keyword),
+ include('root'),
+ ],
+ 'root': [
+ include('commentsandwhitespace'),
+ (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex),
+ (r'\?|:|_(?=\n)|==?|!=|-(?!>)|[<>+*/-]=?',
+ Operator),
+ (r'\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|'
+ r'\bbut\b|\bbitwise\b|\bmod\b|\^|\bxor\b|\bexists\b|\bdoesnt\s+exist\b',
+ Operator.Word),
+ (r'(?:\([^()]+\))?\s*>', Name.Function),
+ (r'[{(]', Punctuation),
+ (r'\[', Punctuation, 'listcomprehension'),
+ (r'[})\]\.\,]', Punctuation),
+ (r'\b(function|method|task)\b', Keyword.Declaration, 'functiondef'),
+ (r'\bclass\b', Keyword.Declaration, 'classdef'),
+ (r'\b(safe\s+)?wait\s+for\b', Keyword, 'waitfor'),
+ (r'\b(me|this)(\.[$a-zA-Z_][\w\.\$]*)?\b', Name.Variable.Instance),
+ (r'(?<![\.\$])(for(\s+(parallel|series))?|in|of|while|until|'
+ r'break|return|continue|'
+ r'when|if|unless|else|otherwise|except\s+when|'
+ r'throw|raise|fail\s+with|try|catch|finally|new|delete|'
+ r'typeof|instanceof|super|run\s+in\s+parallel|'
+ r'inherits\s+from)\b', Keyword),
+ (r'(?<![\.\$])(true|false|yes|no|on|off|null|nothing|none|'
+ r'NaN|Infinity|undefined)\b',
+ Keyword.Constant),
+ (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
+ r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'eval|isFinite|isNaN|parseFloat|parseInt|document|window|'
+ r'print)\b',
+ Name.Builtin),
+ (r'[$a-zA-Z_][\w\.\$]*\s*(:|[\+\-\*\/]?\=)?\b', Name.Variable),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ ('"""', String, 'tdqs'),
+ ("'''", String, 'tsqs'),
+ ('"', String, 'dqs'),
+ ("'", String, 'sqs'),
+ ],
+ 'strings': [
+ (r'[^#\\\'"]+', String),
+ # note that all kal strings are multi-line.
+ # hashmarks, quotes and backslashes must be parsed one at a time
+ ],
+ 'interpoling_string': [
+ (r'}', String.Interpol, "#pop"),
+ include('root')
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ (r'\\.|\'', String), # double-quoted string don't need ' escapes
+ (r'#{', String.Interpol, "interpoling_string"),
+ include('strings')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop'),
+ (r'#|\\.|"', String), # single quoted strings don't need " escapses
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ (r'\\.|\'|"', String), # no need to escape quotes in triple-string
+ (r'#{', String.Interpol, "interpoling_string"),
+ include('strings'),
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop'),
+ (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
+ include('strings')
+ ],
+ }
+
+
+class LiveScriptLexer(RegexLexer):
+ """
+ For `LiveScript`_ source code.
+
+ .. _LiveScript: http://gkz.github.com/LiveScript/
+
+ New in Pygments 1.6.
+ """
+
+ name = 'LiveScript'
+ aliases = ['live-script', 'livescript']
+ filenames = ['*.ls']
+ mimetypes = ['text/livescript']
+
+ flags = re.DOTALL
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'#.*?\n', Comment.Single),
+ ],
+ 'multilineregex': [
+ include('commentsandwhitespace'),
+ (r'//([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'/', String.Regex),
+ (r'[^/#]+', String.Regex)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'//', String.Regex, ('#pop', 'multilineregex')),
+ (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ default('#pop'),
+ ],
+ 'root': [
+ # this next expr leads to infinite loops root -> slashstartsregex
+ # (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
+ (r'(?:\([^()]+\))?[ ]*[~-]{1,2}>|'
+ r'(?:\(?[^()\n]+\)?)?[ ]*<[~-]{1,2}', Name.Function),
+ (r'\+\+|&&|(?<![\.\$])\b(?:and|x?or|is|isnt|not)\b|\?|:|=|'
+ r'\|\||\\(?=\n)|(<<|>>>?|==?|!=?|'
+ r'~(?!\~?>)|-(?!\-?>)|<(?!\[)|(?<!\])>|'
+ r'[+*`%&\|\^/])=?',
+ Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'(?<![\.\$])(for|own|in|of|while|until|loop|break|'
+ r'return|continue|switch|when|then|if|unless|else|'
+ r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
+ r'extends|this|class|by|const|var|to|til)\b', Keyword,
+ 'slashstartsregex'),
+ (r'(?<![\.\$])(true|false|yes|no|on|off|'
+ r'null|NaN|Infinity|undefined|void)\b',
+ Keyword.Constant),
+ (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
+ r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b',
+ Name.Builtin),
+ (r'[$a-zA-Z_][\w\.\-:\$]*\s*[:=]\s', Name.Variable,
+ 'slashstartsregex'),
+ (r'@[$a-zA-Z_][\w\.\-:\$]*\s*[:=]\s', Name.Variable.Instance,
+ 'slashstartsregex'),
+ (r'@', Name.Other, 'slashstartsregex'),
+ (r'@?[$a-zA-Z_][\w\-]*', Name.Other, 'slashstartsregex'),
+ (r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?(?:[a-zA-Z_]+)?', Number.Float),
+ (r'[0-9]+(~[0-9a-z]+)?(?:[a-zA-Z_]+)?', Number.Integer),
+ ('"""', String, 'tdqs'),
+ ("'''", String, 'tsqs'),
+ ('"', String, 'dqs'),
+ ("'", String, 'sqs'),
+ (r'\\\S+', String),
+ (r'<\[.*?\]>', String),
+ ],
+ 'strings': [
+ (r'[^#\\\'"]+', String),
+ # note that all coffee script strings are multi-line.
+ # hashmarks, quotes and backslashes must be parsed one at a time
+ ],
+ 'interpoling_string': [
+ (r'}', String.Interpol, "#pop"),
+ include('root')
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ (r'\\.|\'', String), # double-quoted string don't need ' escapes
+ (r'#{', String.Interpol, "interpoling_string"),
+ (r'#', String),
+ include('strings')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop'),
+ (r'#|\\.|"', String), # single quoted strings don't need " escapses
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ (r'\\.|\'|"', String), # no need to escape quotes in triple-string
+ (r'#{', String.Interpol, "interpoling_string"),
+ (r'#', String),
+ include('strings'),
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop'),
+ (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
+ include('strings')
+ ],
+ }
+
+
+class DartLexer(RegexLexer):
+ """
+ For `Dart <http://dartlang.org/>`_ source code.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Dart'
+ aliases = ['dart']
+ filenames = ['*.dart']
+ mimetypes = ['text/x-dart']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ include('string_literal'),
+ (r'#!(.*?)$', Comment.Preproc),
+ (r'\b(import|export)\b', Keyword, 'import_decl'),
+ (r'\b(library|source|part of|part)\b', Keyword),
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'\b(class)\b(\s+)',
+ bygroups(Keyword.Declaration, Text), 'class'),
+ (r'\b(assert|break|case|catch|continue|default|do|else|finally|for|'
+ r'if|in|is|new|return|super|switch|this|throw|try|while)\b',
+ Keyword),
+ (r'\b(abstract|const|extends|factory|final|get|implements|'
+ r'native|operator|set|static|typedef|var)\b', Keyword.Declaration),
+ (r'\b(bool|double|Dynamic|int|num|Object|String|void)\b', Keyword.Type),
+ (r'\b(false|null|true)\b', Keyword.Constant),
+ (r'[~!%^&*+=|?:<>/-]|as\b', Operator),
+ (r'[a-zA-Z_$]\w*:', Name.Label),
+ (r'[a-zA-Z_$]\w*', Name),
+ (r'[(){}\[\],.;]', Punctuation),
+ (r'0[xX][0-9a-fA-F]+', Number.Hex),
+ # DIGIT+ (‘.’ DIGIT*)? EXPONENT?
+ (r'\d+(\.\d*)?([eE][+-]?\d+)?', Number),
+ (r'\.\d+([eE][+-]?\d+)?', Number), # ‘.’ DIGIT+ EXPONENT?
+ (r'\n', Text)
+ # pseudo-keyword negate intentionally left out
+ ],
+ 'class': [
+ (r'[a-zA-Z_$]\w*', Name.Class, '#pop')
+ ],
+ 'import_decl': [
+ include('string_literal'),
+ (r'\s+', Text),
+ (r'\b(as|show|hide)\b', Keyword),
+ (r'[a-zA-Z_$]\w*', Name),
+ (r'\,', Punctuation),
+ (r'\;', Punctuation, '#pop')
+ ],
+ 'string_literal': [
+ # Raw strings.
+ (r'r"""([\s|\S]*?)"""', String.Double),
+ (r"r'''([\s|\S]*?)'''", String.Single),
+ (r'r"(.*?)"', String.Double),
+ (r"r'(.*?)'", String.Single),
+ # Normal Strings.
+ (r'"""', String.Double, 'string_double_multiline'),
+ (r"'''", String.Single, 'string_single_multiline'),
+ (r'"', String.Double, 'string_double'),
+ (r"'", String.Single, 'string_single')
+ ],
+ 'string_common': [
+ (r"\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|u\{[0-9A-Fa-f]*\}|[a-z\'\"$\\])",
+ String.Escape),
+ (r'(\$)([a-zA-Z_]\w*)', bygroups(String.Interpol, Name)),
+ (r'(\$\{)(.*?)(\})',
+ bygroups(String.Interpol, using(this), String.Interpol))
+ ],
+ 'string_double': [
+ (r'"', String.Double, '#pop'),
+ (r'[^\"$\\\n]+', String.Double),
+ include('string_common'),
+ (r'\$+', String.Double)
+ ],
+ 'string_double_multiline': [
+ (r'"""', String.Double, '#pop'),
+ (r'[^\"$\\]+', String.Double),
+ include('string_common'),
+ (r'(\$|\")+', String.Double)
+ ],
+ 'string_single': [
+ (r"'", String.Single, '#pop'),
+ (r"[^\'$\\\n]+", String.Single),
+ include('string_common'),
+ (r'\$+', String.Single)
+ ],
+ 'string_single_multiline': [
+ (r"'''", String.Single, '#pop'),
+ (r'[^\'$\\]+', String.Single),
+ include('string_common'),
+ (r'(\$|\')+', String.Single)
+ ]
+ }
+
+
+class TypeScriptLexer(RegexLexer):
+ """
+ For `TypeScript <http://typescriptlang.org/>`_ source code.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'TypeScript'
+ aliases = ['ts']
+ filenames = ['*.ts']
+ mimetypes = ['text/x-typescript']
+
+ flags = re.DOTALL
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'<!--', Comment),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ default('#pop')
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
+ r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
+ r'throw|try|catch|finally|new|delete|typeof|instanceof|void|'
+ r'this)\b', Keyword, 'slashstartsregex'),
+ (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
+ r'extends|final|float|goto|implements|import|int|interface|long|native|'
+ r'package|private|protected|public|short|static|super|synchronized|throws|'
+ r'transient|volatile)\b', Keyword.Reserved),
+ (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
+ (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
+ r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
+ r'window)\b', Name.Builtin),
+ # Match stuff like: module name {...}
+ (r'\b(module)(\s*)(\s*[\w?.$][\w?.$]*)(\s*)',
+ bygroups(Keyword.Reserved, Text, Name.Other, Text), 'slashstartsregex'),
+ # Match variable type keywords
+ (r'\b(string|bool|number)\b', Keyword.Type),
+ # Match stuff like: constructor
+ (r'\b(constructor|declare|interface|as|AS)\b', Keyword.Reserved),
+ # Match stuff like: super(argument, list)
+ (r'(super)(\s*)(\([a-zA-Z0-9,_?.$\s]+\s*\))',
+ bygroups(Keyword.Reserved, Text), 'slashstartsregex'),
+ # Match stuff like: function() {...}
+ (r'([a-zA-Z_?.$][\w?.$]*)\(\) \{', Name.Other, 'slashstartsregex'),
+ # Match stuff like: (function: return type)
+ (r'([\w?.$][\w?.$]*)(\s*:\s*)([\w?.$][\w?.$]*)',
+ bygroups(Name.Other, Text, Keyword.Type)),
+ (r'[$a-zA-Z_]\w*', Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ ]
+ }
+
+
+class LassoLexer(RegexLexer):
+ """
+ For `Lasso <http://www.lassosoft.com/>`_ source code, covering both Lasso 9
+ syntax and LassoScript for Lasso 8.6 and earlier. For Lasso embedded in
+ HTML, use the `LassoHtmlLexer`.
+
+ Additional options accepted:
+
+ `builtinshighlighting`
+ If given and ``True``, highlight builtin types, traits, methods, and
+ members (default: ``True``).
+ `requiredelimiters`
+ If given and ``True``, only highlight code between delimiters as Lasso
+ (default: ``False``).
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Lasso'
+ aliases = ['lasso', 'lassoscript']
+ filenames = ['*.lasso', '*.lasso[89]']
+ alias_filenames = ['*.incl', '*.inc', '*.las']
+ mimetypes = ['text/x-lasso']
+ flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ (r'^#!.+lasso9\b', Comment.Preproc, 'lasso'),
+ (r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'),
+ (r'\[noprocess\]', Comment.Preproc, ('delimiters', 'noprocess')),
+ (r'\[', Comment.Preproc, ('delimiters', 'squarebrackets')),
+ (r'<\?(LassoScript|lasso|=)', Comment.Preproc,
+ ('delimiters', 'anglebrackets')),
+ (r'<(!--.*?-->)?', Other, 'delimiters'),
+ (r'\s+', Other),
+ default(('delimiters', 'lassofile')),
+ ],
+ 'delimiters': [
+ (r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'),
+ (r'\[noprocess\]', Comment.Preproc, 'noprocess'),
+ (r'\[', Comment.Preproc, 'squarebrackets'),
+ (r'<\?(LassoScript|lasso|=)', Comment.Preproc, 'anglebrackets'),
+ (r'<(!--.*?-->)?', Other),
+ (r'[^[<]+', Other),
+ ],
+ 'nosquarebrackets': [
+ (r'<\?(LassoScript|lasso|=)', Comment.Preproc, 'anglebrackets'),
+ (r'<', Other),
+ (r'[^<]+', Other),
+ ],
+ 'noprocess': [
+ (r'\[/noprocess\]', Comment.Preproc, '#pop'),
+ (r'\[', Other),
+ (r'[^[]', Other),
+ ],
+ 'squarebrackets': [
+ (r'\]', Comment.Preproc, '#pop'),
+ include('lasso'),
+ ],
+ 'anglebrackets': [
+ (r'\?>', Comment.Preproc, '#pop'),
+ include('lasso'),
+ ],
+ 'lassofile': [
+ (r'\]|\?>', Comment.Preproc, '#pop'),
+ include('lasso'),
+ ],
+ 'whitespacecomments': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*\*!.*?\*/', String.Doc),
+ (r'/\*.*?\*/', Comment.Multiline),
+ ],
+ 'lasso': [
+ # whitespace/comments
+ include('whitespacecomments'),
+
+ # literals
+ (r'\d*\.\d+(e[+-]?\d+)?', Number.Float),
+ (r'0x[\da-f]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ (r'([+-]?)(infinity|NaN)\b', bygroups(Operator, Number)),
+ (r"'", String.Single, 'singlestring'),
+ (r'"', String.Double, 'doublestring'),
+ (r'`[^`]*`', String.Backtick),
+
+ # names
+ (r'\$[a-z_][\w.]*', Name.Variable),
+ (r'#([a-z_][\w.]*|\d+)', Name.Variable.Instance),
+ (r"(\.)('[a-z_][\w.]*')",
+ bygroups(Name.Builtin.Pseudo, Name.Variable.Class)),
+ (r"(self)(\s*->\s*)('[a-z_][\w.]*')",
+ bygroups(Name.Builtin.Pseudo, Operator, Name.Variable.Class)),
+ (r'(\.\.?)([a-z_][\w.]*(=(?!=))?)',
+ bygroups(Name.Builtin.Pseudo, Name.Other.Member)),
+ (r'(->\\?\s*|&\s*)([a-z_][\w.]*(=(?!=))?)',
+ bygroups(Operator, Name.Other.Member)),
+ (r'(self|inherited)\b', Name.Builtin.Pseudo),
+ (r'-[a-z_][\w.]*', Name.Attribute),
+ (r'::\s*[a-z_][\w.]*', Name.Label),
+ (r'(error_(code|msg)_\w+|Error_AddError|Error_ColumnRestriction|'
+ r'Error_DatabaseConnectionUnavailable|Error_DatabaseTimeout|'
+ r'Error_DeleteError|Error_FieldRestriction|Error_FileNotFound|'
+ r'Error_InvalidDatabase|Error_InvalidPassword|'
+ r'Error_InvalidUsername|Error_ModuleNotFound|'
+ r'Error_NoError|Error_NoPermission|Error_OutOfMemory|'
+ r'Error_ReqColumnMissing|Error_ReqFieldMissing|'
+ r'Error_RequiredColumnMissing|Error_RequiredFieldMissing|'
+ r'Error_UpdateError)\b', Name.Exception),
+
+ # definitions
+ (r'(define)(\s+)([a-z_][\w.]*)(\s*=>\s*)(type|trait|thread)\b',
+ bygroups(Keyword.Declaration, Text, Name.Class, Operator, Keyword)),
+ (r'(define)(\s+)([a-z_][\w.]*)(\s*->\s*)([a-z_][\w.]*=?|[-+*/%])',
+ bygroups(Keyword.Declaration, Text, Name.Class, Operator,
+ Name.Function), 'signature'),
+ (r'(define)(\s+)([a-z_][\w.]*)',
+ bygroups(Keyword.Declaration, Text, Name.Function), 'signature'),
+ (r'(public|protected|private|provide)(\s+)(([a-z_][\w.]*=?|[-+*/%])'
+ r'(?=\s*\())', bygroups(Keyword, Text, Name.Function),
+ 'signature'),
+ (r'(public|protected|private|provide)(\s+)([a-z_][\w.]*)',
+ bygroups(Keyword, Text, Name.Function)),
+
+ # keywords
+ (r'(true|false|none|minimal|full|all|void)\b', Keyword.Constant),
+ (r'(local|var|variable|global|data(?=\s))\b', Keyword.Declaration),
+ (r'(array|date|decimal|duration|integer|map|pair|string|tag|xml|'
+ r'null|bytes|list|queue|set|stack|staticarray|tie)\b', Keyword.Type),
+ (r'([a-z_][\w.]*)(\s+)(in)\b', bygroups(Name, Text, Keyword)),
+ (r'(let|into)(\s+)([a-z_][\w.]*)', bygroups(Keyword, Text, Name)),
+ (r'require\b', Keyword, 'requiresection'),
+ (r'(/?)(Namespace_Using)\b', bygroups(Punctuation, Keyword.Namespace)),
+ (r'(/?)(Cache|Database_Names|Database_SchemaNames|'
+ r'Database_TableNames|Define_Tag|Define_Type|Email_Batch|'
+ r'Encode_Set|HTML_Comment|Handle|Handle_Error|Header|If|Inline|'
+ r'Iterate|LJAX_Target|Link|Link_CurrentAction|Link_CurrentGroup|'
+ r'Link_CurrentRecord|Link_Detail|Link_FirstGroup|'
+ r'Link_FirstRecord|Link_LastGroup|Link_LastRecord|Link_NextGroup|'
+ r'Link_NextRecord|Link_PrevGroup|Link_PrevRecord|Log|Loop|'
+ r'NoProcess|Output_None|Portal|Private|Protect|Records|Referer|'
+ r'Referrer|Repeating|ResultSet|Rows|Search_Args|Search_Arguments|'
+ r'Select|Sort_Args|Sort_Arguments|Thread_Atomic|Value_List|While|'
+ r'Abort|Case|Else|If_Empty|If_False|If_Null|If_True|Loop_Abort|'
+ r'Loop_Continue|Loop_Count|Params|Params_Up|Return|Return_Value|'
+ r'Run_Children|SOAP_DefineTag|SOAP_LastRequest|SOAP_LastResponse|'
+ r'Tag_Name|ascending|average|by|define|descending|do|equals|'
+ r'frozen|group|handle_failure|import|in|into|join|let|match|max|'
+ r'min|on|order|parent|protected|provide|public|require|returnhome|'
+ r'skip|split_thread|sum|take|thread|to|trait|type|where|with|'
+ r'yield|yieldhome)\b',
+ bygroups(Punctuation, Keyword)),
+
+ # other
+ (r',', Punctuation, 'commamember'),
+ (r'(and|or|not)\b', Operator.Word),
+ (r'([a-z_][\w.]*)(\s*::\s*[a-z_][\w.]*)?(\s*=(?!=))',
+ bygroups(Name, Name.Label, Operator)),
+ (r'(/?)([\w.]+)', bygroups(Punctuation, Name.Other)),
+ (r'(=)(n?bw|n?ew|n?cn|lte?|gte?|n?eq|n?rx|ft)\b',
+ bygroups(Operator, Operator.Word)),
+ (r':=|[-+*/%=<>&|!?\\]+', Operator),
+ (r'[{}():;,@^]', Punctuation),
+ ],
+ 'singlestring': [
+ (r"'", String.Single, '#pop'),
+ (r"[^'\\]+", String.Single),
+ include('escape'),
+ (r"\\", String.Single),
+ ],
+ 'doublestring': [
+ (r'"', String.Double, '#pop'),
+ (r'[^"\\]+', String.Double),
+ include('escape'),
+ (r'\\', String.Double),
+ ],
+ 'escape': [
+ (r'\\(U[\da-f]{8}|u[\da-f]{4}|x[\da-f]{1,2}|[0-7]{1,3}|:[^:]+:|'
+ r'[abefnrtv?\"\'\\]|$)', String.Escape),
+ ],
+ 'signature': [
+ (r'=>', Operator, '#pop'),
+ (r'\)', Punctuation, '#pop'),
+ (r'[(,]', Punctuation, 'parameter'),
+ include('lasso'),
+ ],
+ 'parameter': [
+ (r'\)', Punctuation, '#pop'),
+ (r'-?[a-z_][\w.]*', Name.Attribute, '#pop'),
+ (r'\.\.\.', Name.Builtin.Pseudo),
+ include('lasso'),
+ ],
+ 'requiresection': [
+ (r'(([a-z_][\w.]*=?|[-+*/%])(?=\s*\())', Name, 'requiresignature'),
+ (r'(([a-z_][\w.]*=?|[-+*/%])(?=(\s*::\s*[\w.]+)?\s*,))', Name),
+ (r'[a-z_][\w.]*=?|[-+*/%]', Name, '#pop'),
+ (r'::\s*[a-z_][\w.]*', Name.Label),
+ (r',', Punctuation),
+ include('whitespacecomments'),
+ ],
+ 'requiresignature': [
+ (r'(\)(?=(\s*::\s*[\w.]+)?\s*,))', Punctuation, '#pop'),
+ (r'\)', Punctuation, '#pop:2'),
+ (r'-?[a-z_][\w.]*', Name.Attribute),
+ (r'::\s*[a-z_][\w.]*', Name.Label),
+ (r'\.\.\.', Name.Builtin.Pseudo),
+ (r'[(,]', Punctuation),
+ include('whitespacecomments'),
+ ],
+ 'commamember': [
+ (r'(([a-z_][\w.]*=?|[-+*/%])'
+ r'(?=\s*(\(([^()]*\([^()]*\))*[^)]*\)\s*)?(::[\w.\s]+)?=>))',
+ Name.Function, 'signature'),
+ include('whitespacecomments'),
+ default('#pop'),
+ ],
+ }
+
+ def __init__(self, **options):
+ self.builtinshighlighting = get_bool_opt(
+ options, 'builtinshighlighting', True)
+ self.requiredelimiters = get_bool_opt(
+ options, 'requiredelimiters', False)
+
+ self._builtins = set()
+ self._members = set()
+ if self.builtinshighlighting:
+ from pygments.lexers._lasso_builtins import BUILTINS, MEMBERS
+ for key, value in iteritems(BUILTINS):
+ self._builtins.update(value)
+ for key, value in iteritems(MEMBERS):
+ self._members.update(value)
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ stack = ['root']
+ if self.requiredelimiters:
+ stack.append('delimiters')
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text, stack):
+ if (token is Name.Other and value.lower() in self._builtins or
+ token is Name.Other.Member and
+ value.lower().rstrip('=') in self._members):
+ yield index, Name.Builtin, value
+ continue
+ yield index, token, value
+
+ def analyse_text(text):
+ rv = 0.0
+ if 'bin/lasso9' in text:
+ rv += 0.8
+ if re.search(r'<\?(=|lasso)|\A\[', text, re.I):
+ rv += 0.4
+ if re.search(r'local\(', text, re.I):
+ rv += 0.4
+ if '?>' in text:
+ rv += 0.1
+ return rv
+
+
+class ObjectiveJLexer(RegexLexer):
+ """
+ For Objective-J source code with preprocessor directives.
+
+ .. versionadded:: 1.3
+ """
+
+ name = 'Objective-J'
+ aliases = ['objective-j', 'objectivej', 'obj-j', 'objj']
+ filenames = ['*.j']
+ mimetypes = ['text/x-objective-j']
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)*'
+
+ flags = re.DOTALL | re.MULTILINE
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+
+ # function definition
+ (r'^(' + _ws + r'[\+-]' + _ws + r')([\(a-zA-Z_].*?[^\(])(' + _ws + '{)',
+ bygroups(using(this), using(this, state='function_signature'),
+ using(this))),
+
+ # class definition
+ (r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text),
+ 'classname'),
+ (r'(@class|@protocol)(\s*)', bygroups(Keyword, Text),
+ 'forward_classname'),
+ (r'(\s*)(@end)(\s*)', bygroups(Text, Keyword, Text)),
+
+ include('statements'),
+ ('[{\(\)}]', Punctuation),
+ (';', Punctuation),
+ ],
+ 'whitespace': [
+ (r'(@import)(\s+)("(?:\\\\|\\"|[^"])*")',
+ bygroups(Comment.Preproc, Text, String.Double)),
+ (r'(@import)(\s+)(<(?:\\\\|\\>|[^>])*>)',
+ bygroups(Comment.Preproc, Text, String.Double)),
+ (r'(#(?:include|import))(\s+)("(?:\\\\|\\"|[^"])*")',
+ bygroups(Comment.Preproc, Text, String.Double)),
+ (r'(#(?:include|import))(\s+)(<(?:\\\\|\\>|[^>])*>)',
+ bygroups(Comment.Preproc, Text, String.Double)),
+
+ (r'#if\s+0', Comment.Preproc, 'if0'),
+ (r'#', Comment.Preproc, 'macro'),
+
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
+ (r'<!--', Comment),
+ ],
+ 'slashstartsregex': [
+ include('whitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ default('#pop'),
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop'),
+ ],
+ 'statements': [
+ (r'(L|@)?"', String, 'string'),
+ (r"(L|@)?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
+ String.Char),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
+ (r'0[0-7]+[Ll]?', Number.Oct),
+ (r'\d+[Ll]?', Number.Integer),
+
+ (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
+ r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?',
+ Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+
+ (r'(for|in|while|do|break|return|continue|switch|case|default|if|'
+ r'else|throw|try|catch|finally|new|delete|typeof|instanceof|void|'
+ r'prototype|__proto__)\b', Keyword, 'slashstartsregex'),
+
+ (r'(var|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
+
+ (r'(@selector|@private|@protected|@public|@encode|'
+ r'@synchronized|@try|@throw|@catch|@finally|@end|@property|'
+ r'@synthesize|@dynamic|@for|@accessors|new)\b', Keyword),
+
+ (r'(int|long|float|short|double|char|unsigned|signed|void|'
+ r'id|BOOL|bool|boolean|IBOutlet|IBAction|SEL|@outlet|@action)\b',
+ Keyword.Type),
+
+ (r'(self|super)\b', Name.Builtin),
+
+ (r'(TRUE|YES|FALSE|NO|Nil|nil|NULL)\b', Keyword.Constant),
+ (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
+ (r'(ABS|ASIN|ACOS|ATAN|ATAN2|SIN|COS|TAN|EXP|POW|CEIL|FLOOR|ROUND|'
+ r'MIN|MAX|RAND|SQRT|E|LN2|LN10|LOG2E|LOG10E|PI|PI2|PI_2|SQRT1_2|'
+ r'SQRT2)\b', Keyword.Constant),
+
+ (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
+ r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
+ r'window)\b', Name.Builtin),
+
+ (r'([$a-zA-Z_]\w*)(' + _ws + r')(?=\()',
+ bygroups(Name.Function, using(this))),
+
+ (r'[$a-zA-Z_]\w*', Name),
+ ],
+ 'classname': [
+ # interface definition that inherits
+ (r'([a-zA-Z_]\w*)(' + _ws + r':' + _ws +
+ r')([a-zA-Z_]\w*)?',
+ bygroups(Name.Class, using(this), Name.Class), '#pop'),
+ # interface definition for a category
+ (r'([a-zA-Z_]\w*)(' + _ws + r'\()([a-zA-Z_]\w*)(\))',
+ bygroups(Name.Class, using(this), Name.Label, Text), '#pop'),
+ # simple interface / implementation
+ (r'([a-zA-Z_]\w*)', Name.Class, '#pop'),
+ ],
+ 'forward_classname': [
+ (r'([a-zA-Z_]\w*)(\s*,\s*)',
+ bygroups(Name.Class, Text), '#push'),
+ (r'([a-zA-Z_]\w*)(\s*;?)',
+ bygroups(Name.Class, Text), '#pop'),
+ ],
+ 'function_signature': [
+ include('whitespace'),
+
+ # start of a selector w/ parameters
+ (r'(\(' + _ws + r')' # open paren
+ r'([a-zA-Z_]\w+)' # return type
+ r'(' + _ws + r'\)' + _ws + r')' # close paren
+ r'([$a-zA-Z_]\w+' + _ws + r':)', # function name
+ bygroups(using(this), Keyword.Type, using(this),
+ Name.Function), 'function_parameters'),
+
+ # no-param function
+ (r'(\(' + _ws + r')' # open paren
+ r'([a-zA-Z_]\w+)' # return type
+ r'(' + _ws + r'\)' + _ws + r')' # close paren
+ r'([$a-zA-Z_]\w+)', # function name
+ bygroups(using(this), Keyword.Type, using(this),
+ Name.Function), "#pop"),
+
+ # no return type given, start of a selector w/ parameters
+ (r'([$a-zA-Z_]\w+' + _ws + r':)', # function name
+ bygroups(Name.Function), 'function_parameters'),
+
+ # no return type given, no-param function
+ (r'([$a-zA-Z_]\w+)', # function name
+ bygroups(Name.Function), "#pop"),
+
+ ('', Text, '#pop'),
+ ],
+ 'function_parameters': [
+ include('whitespace'),
+
+ # parameters
+ (r'(\(' + _ws + ')' # open paren
+ r'([^\)]+)' # type
+ r'(' + _ws + r'\)' + _ws + r')' # close paren
+ r'([$a-zA-Z_]\w+)', # param name
+ bygroups(using(this), Keyword.Type, using(this), Text)),
+
+ # one piece of a selector name
+ (r'([$a-zA-Z_]\w+' + _ws + r':)', # function name
+ Name.Function),
+
+ # smallest possible selector piece
+ (r'(:)', Name.Function),
+
+ # var args
+ (r'(,' + _ws + r'\.\.\.)', using(this)),
+
+ # param name
+ (r'([$a-zA-Z_]\w+)', Text),
+ ],
+ 'expression': [
+ (r'([$a-zA-Z_]\w*)(\()', bygroups(Name.Function,
+ Punctuation)),
+ (r'(\))', Punctuation, "#pop"),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ 'macro': [
+ (r'[^/\n]+', Comment.Preproc),
+ (r'/[*](.|\n)*?[*]/', Comment.Multiline),
+ (r'//.*?\n', Comment.Single, '#pop'),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'if0': [
+ (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
+ (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
+ (r'.*?\n', Comment),
+ ]
+ }
+
+ def analyse_text(text):
+ if re.search('^\s*@import\s+[<"]', text, re.MULTILINE):
+ # special directive found in most Objective-J files
+ return True
+ return False
+
+
+class CoffeeScriptLexer(RegexLexer):
+ """
+ For `CoffeeScript`_ source code.
+
+ .. _CoffeeScript: http://coffeescript.org
+
+ .. versionadded:: 1.3
+ """
+
+ name = 'CoffeeScript'
+ aliases = ['coffee-script', 'coffeescript', 'coffee']
+ filenames = ['*.coffee']
+ mimetypes = ['text/coffeescript']
+
+ flags = re.DOTALL
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'###[^#].*?###', Comment.Multiline),
+ (r'#(?!##[^#]).*?\n', Comment.Single),
+ ],
+ 'multilineregex': [
+ (r'[^/#]+', String.Regex),
+ (r'///([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'#{', String.Interpol, 'interpoling_string'),
+ (r'[/#]', String.Regex),
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'///', String.Regex, ('#pop', 'multilineregex')),
+ (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ default('#pop'),
+ ],
+ 'root': [
+ # this next expr leads to infinite loops root -> slashstartsregex
+ # (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
+ (r'\+\+|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|'
+ r'\|\||\\(?=\n)|'
+ r'(<<|>>>?|==?(?!>)|!=?|=(?!>)|-(?!>)|[<>+*`%&\|\^/])=?',
+ Operator, 'slashstartsregex'),
+ (r'(?:\([^()]*\))?\s*[=-]>', Name.Function),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'(?<![\.\$])(for|own|in|of|while|until|'
+ r'loop|break|return|continue|'
+ r'switch|when|then|if|unless|else|'
+ r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
+ r'extends|this|class|by)\b', Keyword, 'slashstartsregex'),
+ (r'(?<![\.\$])(true|false|yes|no|on|off|null|'
+ r'NaN|Infinity|undefined)\b',
+ Keyword.Constant),
+ (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
+ r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b',
+ Name.Builtin),
+ (r'[$a-zA-Z_][\w\.:\$]*\s*[:=]\s', Name.Variable,
+ 'slashstartsregex'),
+ (r'@[$a-zA-Z_][\w\.:\$]*\s*[:=]\s', Name.Variable.Instance,
+ 'slashstartsregex'),
+ (r'@', Name.Other, 'slashstartsregex'),
+ (r'@?[$a-zA-Z_][\w\$]*', Name.Other, 'slashstartsregex'),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ ('"""', String, 'tdqs'),
+ ("'''", String, 'tsqs'),
+ ('"', String, 'dqs'),
+ ("'", String, 'sqs'),
+ ],
+ 'strings': [
+ (r'[^#\\\'"]+', String),
+ # note that all coffee script strings are multi-line.
+ # hashmarks, quotes and backslashes must be parsed one at a time
+ ],
+ 'interpoling_string': [
+ (r'}', String.Interpol, "#pop"),
+ include('root')
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ (r'\\.|\'', String), # double-quoted string don't need ' escapes
+ (r'#{', String.Interpol, "interpoling_string"),
+ (r'#', String),
+ include('strings')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop'),
+ (r'#|\\.|"', String), # single quoted strings don't need " escapses
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ (r'\\.|\'|"', String), # no need to escape quotes in triple-string
+ (r'#{', String.Interpol, "interpoling_string"),
+ (r'#', String),
+ include('strings'),
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop'),
+ (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
+ include('strings')
+ ],
+ }
+
+
+class MaskLexer(RegexLexer):
+ """
+ For `Mask <http://github.com/atmajs/MaskJS>`__ markup.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Mask'
+ aliases = ['mask']
+ filenames = ['*.mask']
+ mimetypes = ['text/x-mask']
+
+ flags = re.MULTILINE | re.IGNORECASE | re.DOTALL
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'[\{\};>]', Punctuation),
+ (r"'''", String, 'string-trpl-single'),
+ (r'"""', String, 'string-trpl-double'),
+ (r"'", String, 'string-single'),
+ (r'"', String, 'string-double'),
+ (r'([\w-]+)', Name.Tag, 'node'),
+ (r'([^\.#;{>\s]+)', Name.Class, 'node'),
+ (r'(#[\w-]+)', Name.Function, 'node'),
+ (r'(\.[\w-]+)', Name.Variable.Class, 'node')
+ ],
+ 'string-base': [
+ (r'\\.', String.Escape),
+ (r'~\[', String.Interpol, 'interpolation'),
+ (r'.', String.Single),
+ ],
+ 'string-single': [
+ (r"'", String.Single, '#pop'),
+ include('string-base')
+ ],
+ 'string-double': [
+ (r'"', String.Single, '#pop'),
+ include('string-base')
+ ],
+ 'string-trpl-single': [
+ (r"'''", String.Single, '#pop'),
+ include('string-base')
+ ],
+ 'string-trpl-double': [
+ (r'"""', String.Single, '#pop'),
+ include('string-base')
+ ],
+ 'interpolation': [
+ (r'\]', String.Interpol, '#pop'),
+ (r'\s*:', String.Interpol, 'expression'),
+ (r'\s*\w+:', Name.Other),
+ (r'[^\]]+', String.Interpol)
+ ],
+ 'expression': [
+ (r'[^\]]+', using(JavascriptLexer), '#pop')
+ ],
+ 'node': [
+ (r'\s+', Text),
+ (r'\.', Name.Variable.Class, 'node-class'),
+ (r'\#', Name.Function, 'node-id'),
+ (r'style[ \t]*=', Name.Attribute, 'node-attr-style-value'),
+ (r'[\w:-]+[ \t]*=', Name.Attribute, 'node-attr-value'),
+ (r'[\w:-]+', Name.Attribute),
+ (r'[>{;]', Punctuation, '#pop')
+ ],
+ 'node-class': [
+ (r'[\w-]+', Name.Variable.Class),
+ (r'~\[', String.Interpol, 'interpolation'),
+ default('#pop')
+ ],
+ 'node-id': [
+ (r'[\w-]+', Name.Function),
+ (r'~\[', String.Interpol, 'interpolation'),
+ default('#pop')
+ ],
+ 'node-attr-value': [
+ (r'\s+', Text),
+ (r'\w+', Name.Variable, '#pop'),
+ (r"'", String, 'string-single-pop2'),
+ (r'"', String, 'string-double-pop2'),
+ default('#pop')
+ ],
+ 'node-attr-style-value': [
+ (r'\s+', Text),
+ (r"'", String.Single, 'css-single-end'),
+ (r'"', String.Single, 'css-double-end'),
+ include('node-attr-value')
+ ],
+ 'css-base': [
+ (r'\s+', Text),
+ (r";", Punctuation),
+ (r"[\w\-]+\s*:", Name.Builtin)
+ ],
+ 'css-single-end': [
+ include('css-base'),
+ (r"'", String.Single, '#pop:2'),
+ (r"[^;']+", Name.Entity)
+ ],
+ 'css-double-end': [
+ include('css-base'),
+ (r'"', String.Single, '#pop:2'),
+ (r"[^;\"]+", Name.Entity)
+ ],
+ 'string-single-pop2': [
+ (r"'", String.Single, '#pop:2'),
+ include('string-base')
+ ],
+ 'string-double-pop2': [
+ (r'"', String.Single, '#pop:2'),
+ include('string-base')
+ ],
+ }
diff --git a/pygments/lexers/julia.py b/pygments/lexers/julia.py
new file mode 100644
index 00000000..a9836ee2
--- /dev/null
+++ b/pygments/lexers/julia.py
@@ -0,0 +1,194 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.julia
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Julia language.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, bygroups, combined, do_insertions
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic
+from pygments.util import shebang_matches
+
+__all__ = ['JuliaLexer', 'JuliaConsoleLexer']
+
+
+class JuliaLexer(RegexLexer):
+ """
+ For `Julia <http://julialang.org/>`_ source code.
+
+ .. versionadded:: 1.6
+ """
+ name = 'Julia'
+ aliases = ['julia', 'jl']
+ filenames = ['*.jl']
+ mimetypes = ['text/x-julia', 'application/x-julia']
+
+ builtins = [
+ 'exit', 'whos', 'edit', 'load', 'is', 'isa', 'isequal', 'typeof', 'tuple',
+ 'ntuple', 'uid', 'hash', 'finalizer', 'convert', 'promote', 'subtype',
+ 'typemin', 'typemax', 'realmin', 'realmax', 'sizeof', 'eps', 'promote_type',
+ 'method_exists', 'applicable', 'invoke', 'dlopen', 'dlsym', 'system',
+ 'error', 'throw', 'assert', 'new', 'Inf', 'Nan', 'pi', 'im',
+ ]
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'[^\S\n]+', Text),
+ (r'#=', Comment.Multiline, "blockcomment"),
+ (r'#.*$', Comment),
+ (r'[]{}:(),;[@]', Punctuation),
+ (r'\\\n', Text),
+ (r'\\', Text),
+
+ # keywords
+ (r'(begin|while|for|in|return|break|continue|'
+ r'macro|quote|let|if|elseif|else|try|catch|end|'
+ r'bitstype|ccall|do|using|module|import|export|'
+ r'importall|baremodule|immutable)\b', Keyword),
+ (r'(local|global|const)\b', Keyword.Declaration),
+ (r'(Bool|Int|Int8|Int16|Int32|Int64|Uint|Uint8|Uint16|Uint32|Uint64'
+ r'|Float32|Float64|Complex64|Complex128|Any|Nothing|None)\b',
+ Keyword.Type),
+
+ # functions
+ (r'(function)((?:\s|\\\s)+)',
+ bygroups(Keyword, Name.Function), 'funcname'),
+
+ # types
+ (r'(type|typealias|abstract)((?:\s|\\\s)+)',
+ bygroups(Keyword, Name.Class), 'typename'),
+
+ # operators
+ (r'==|!=|<=|>=|->|&&|\|\||::|<:|[-~+/*%=<>&^|.?!$]', Operator),
+ (r'\.\*|\.\^|\.\\|\.\/|\\', Operator),
+
+ # builtins
+ ('(' + '|'.join(builtins) + r')\b', Name.Builtin),
+
+ # backticks
+ (r'`(?s).*?`', String.Backtick),
+
+ # chars
+ (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,3}|\\u[a-fA-F0-9]{1,4}|"
+ r"\\U[a-fA-F0-9]{1,6}|[^\\\'\n])'", String.Char),
+
+ # try to match trailing transpose
+ (r'(?<=[.\w\)\]])\'+', Operator),
+
+ # strings
+ (r'(?:[IL])"', String, 'string'),
+ (r'[E]?"', String, combined('stringescape', 'string')),
+
+ # names
+ (r'@[\w.]+', Name.Decorator),
+ (r'[a-zA-Z_]\w*', Name),
+
+ # numbers
+ (r'(\d+(_\d+)+\.\d*|\d*\.\d+(_\d+)+)([eEf][+-]?[0-9]+)?', Number.Float),
+ (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
+ (r'\d+(_\d+)+[eEf][+-]?[0-9]+', Number.Float),
+ (r'\d+[eEf][+-]?[0-9]+', Number.Float),
+ (r'0b[01]+(_[01]+)+', Number.Bin),
+ (r'0b[01]+', Number.Bin),
+ (r'0o[0-7]+(_[0-7]+)+', Number.Oct),
+ (r'0o[0-7]+', Number.Oct),
+ (r'0x[a-fA-F0-9]+(_[a-fA-F0-9]+)+', Number.Hex),
+ (r'0x[a-fA-F0-9]+', Number.Hex),
+ (r'\d+(_\d+)+', Number.Integer),
+ (r'\d+', Number.Integer)
+ ],
+
+ 'funcname': [
+ ('[a-zA-Z_]\w*', Name.Function, '#pop'),
+ ('\([^\s\w{]{1,2}\)', Operator, '#pop'),
+ ('[^\s\w{]{1,2}', Operator, '#pop'),
+ ],
+
+ 'typename': [
+ ('[a-zA-Z_]\w*', Name.Class, '#pop')
+ ],
+
+ 'stringescape': [
+ (r'\\([\\abfnrtv"\']|\n|N{.*?}|u[a-fA-F0-9]{4}|'
+ r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ "blockcomment": [
+ (r'[^=#]', Comment.Multiline),
+ (r'#=', Comment.Multiline, '#push'),
+ (r'=#', Comment.Multiline, '#pop'),
+ (r'[=#]', Comment.Multiline),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
+ (r'\$(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?',
+ String.Interpol),
+ (r'[^\\"$]+', String),
+ # quotes, dollar signs, and backslashes must be parsed one at a time
+ (r'["\\]', String),
+ # unhandled string formatting sign
+ (r'\$', String)
+ ],
+ }
+
+ def analyse_text(text):
+ return shebang_matches(text, r'julia')
+
+
+line_re = re.compile('.*?\n')
+
+
+class JuliaConsoleLexer(Lexer):
+ """
+ For Julia console sessions. Modeled after MatlabSessionLexer.
+
+ .. versionadded:: 1.6
+ """
+ name = 'Julia console'
+ aliases = ['jlcon']
+
+ def get_tokens_unprocessed(self, text):
+ jllexer = JuliaLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+
+ for match in line_re.finditer(text):
+ line = match.group()
+
+ if line.startswith('julia>'):
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:6])]))
+ curcode += line[6:]
+
+ elif line.startswith(' '):
+
+ idx = len(curcode)
+
+ # without is showing error on same line as before...?
+ line = "\n" + line
+ token = (0, Generic.Traceback, line)
+ insertions.append((idx, [token]))
+
+ else:
+ if curcode:
+ for item in do_insertions(
+ insertions, jllexer.get_tokens_unprocessed(curcode)):
+ yield item
+ curcode = ''
+ insertions = []
+
+ yield match.start(), Generic.Output, line
+
+ if curcode: # or item:
+ for item in do_insertions(
+ insertions, jllexer.get_tokens_unprocessed(curcode)):
+ yield item
diff --git a/pygments/lexers/jvm.py b/pygments/lexers/jvm.py
index fa05b7ed..136a0fd7 100644
--- a/pygments/lexers/jvm.py
+++ b/pygments/lexers/jvm.py
@@ -12,12 +12,12 @@
import re
from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
- this, combined, default
+ this, combined, default, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Punctuation
+ Number, Punctuation
+from pygments.util import shebang_matches
from pygments import unistring as uni
-
__all__ = ['JavaLexer', 'ScalaLexer', 'GosuLexer', 'GosuTemplateLexer',
'GroovyLexer', 'IokeLexer', 'ClojureLexer', 'ClojureScriptLexer',
'KotlinLexer', 'XtendLexer', 'AspectJLexer', 'CeylonLexer',
@@ -42,9 +42,9 @@ class JavaLexer(RegexLexer):
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
# method names
- (r'((?:(?:[^\W\d]|\$)[\w\.\[\]\$<>]*\s+)+?)' # return arguments
- r'((?:[^\W\d]|\$)[\w\$]*)' # method name
- r'(\s*)(\()', # signature start
+ (r'((?:(?:[^\W\d]|\$)[\w\.\[\]\$<>]*\s+)+?)' # return arguments
+ r'((?:[^\W\d]|\$)[\w\$]*)' # method name
+ r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'@[^\W\d][\w\.]*', Name.Decorator),
(r'(assert|break|case|catch|continue|default|do|else|finally|for|'
@@ -62,7 +62,7 @@ class JavaLexer(RegexLexer):
(r'"(\\\\|\\"|[^"])*"', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
(r'(\.)((?:[^\W\d]|\$)[\w\$]*)', bygroups(Operator, Name.Attribute)),
- (r'([^\W\d]|\$)[\w\$]*:', Name.Label),
+ (r'^\s*([^\W\d]|\$)[\w\$]*:', Name.Label),
(r'([^\W\d]|\$)[\w\$]*', Name),
(r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
@@ -91,7 +91,7 @@ class AspectJLexer(JavaLexer):
filenames = ['*.aj']
mimetypes = ['text/x-aspectj']
- aj_keywords = [
+ aj_keywords = set((
'aspect', 'pointcut', 'privileged', 'call', 'execution',
'initialization', 'preinitialization', 'handler', 'get', 'set',
'staticinitialization', 'target', 'args', 'within', 'withincode',
@@ -101,9 +101,9 @@ class AspectJLexer(JavaLexer):
'thisJoinPointStaticPart', 'thisEnclosingJoinPointStaticPart',
'issingleton', 'perthis', 'pertarget', 'percflow', 'percflowbelow',
'pertypewithin', 'lock', 'unlock', 'thisAspectInstance'
- ]
- aj_inter_type = ['parents:', 'warning:', 'error:', 'soft:', 'precedence:']
- aj_inter_type_annotation = ['@type', '@method', '@constructor', '@field']
+ ))
+ aj_inter_type = set(('parents:', 'warning:', 'error:', 'soft:', 'precedence:'))
+ aj_inter_type_annotation = set(('@type', '@method', '@constructor', '@field'))
def get_tokens_unprocessed(self, text):
for index, token, value in JavaLexer.get_tokens_unprocessed(self, text):
@@ -244,6 +244,7 @@ class ScalaLexer(RegexLexer):
u'\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\uff21-\uff3a]')
idrest = u'%s(?:%s|[0-9])*(?:(?<=_)%s)?' % (letter, letter, op)
+ letter_letter_digit = u'%s(?:%s|\d)*' % (letter, letter)
tokens = {
'root': [
@@ -268,8 +269,11 @@ class ScalaLexer(RegexLexer):
(r'""".*?"""(?!")', String),
(r'"(\\\\|\\"|[^"])*"', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
-# (ur'(\.)(%s|%s|`[^`]+`)' % (idrest, op), bygroups(Operator,
-# Name.Attribute)),
+ (r'[fs]"""', String, 'interptriplestring'), # interpolated strings
+ (r'[fs]"', String, 'interpstring'), # interpolated strings
+ (r'raw"(\\\\|\\"|[^"])*"', String), # raw strings
+ # (ur'(\.)(%s|%s|`[^`]+`)' % (idrest, op), bygroups(Operator,
+ # Name.Attribute)),
(idrest, Name),
(r'`[^`]+`', Name),
(r'\[', Operator, 'typeparam'),
@@ -320,6 +324,27 @@ class ScalaLexer(RegexLexer):
'import': [
(u'(%s|\\.)+' % idrest, Name.Namespace, '#pop')
],
+ 'interpstringcommon': [
+ (r'[^"$\\]+', String),
+ (r'\$\$', String),
+ (r'\$' + letter_letter_digit, String.Interpol),
+ (r'\$\{', String.Interpol, 'interpbrace'),
+ (r'\\.', String),
+ ],
+ 'interptriplestring': [
+ (r'"""(?!")', String, '#pop'),
+ (r'"', String),
+ include('interpstringcommon'),
+ ],
+ 'interpstring': [
+ (r'"', String, '#pop'),
+ include('interpstringcommon'),
+ ],
+ 'interpbrace': [
+ (r'\}', String.Interpol, '#pop'),
+ (r'\{', String.Interpol, '#push'),
+ include('root'),
+ ],
}
@@ -340,9 +365,9 @@ class GosuLexer(RegexLexer):
tokens = {
'root': [
# method names
- (r'^(\s*(?:[a-zA-Z_][\w\.\[\]]*\s+)+?)' # modifiers etc.
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*)(\()', # signature start
+ (r'^(\s*(?:[a-zA-Z_][\w\.\[\]]*\s+)+?)' # modifiers etc.
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
@@ -376,28 +401,28 @@ class GosuLexer(RegexLexer):
(r'\n', Text)
],
'templateText': [
- (r'(\\<)|(\\\$)', String),
- (r'(<%@\s+)(extends|params)',
- bygroups(Operator, Name.Decorator), 'stringTemplate'),
- (r'<%!--.*?--%>', Comment.Multiline),
- (r'(<%)|(<%=)', Operator, 'stringTemplate'),
- (r'\$\{', Operator, 'stringTemplateShorthand'),
- (r'.', String)
+ (r'(\\<)|(\\\$)', String),
+ (r'(<%@\s+)(extends|params)',
+ bygroups(Operator, Name.Decorator), 'stringTemplate'),
+ (r'<%!--.*?--%>', Comment.Multiline),
+ (r'(<%)|(<%=)', Operator, 'stringTemplate'),
+ (r'\$\{', Operator, 'stringTemplateShorthand'),
+ (r'.', String)
],
'string': [
- (r'"', String, '#pop'),
- include('templateText')
+ (r'"', String, '#pop'),
+ include('templateText')
],
'stringTemplate': [
- (r'"', String, 'string'),
- (r'%>', Operator, '#pop'),
- include('root')
+ (r'"', String, 'string'),
+ (r'%>', Operator, '#pop'),
+ include('root')
],
'stringTemplateShorthand': [
- (r'"', String, 'string'),
- (r'\{', Operator, 'stringTemplateShorthand'),
- (r'\}', Operator, '#pop'),
- include('root')
+ (r'"', String, 'string'),
+ (r'\{', Operator, 'stringTemplateShorthand'),
+ (r'\}', Operator, '#pop'),
+ include('root')
],
}
@@ -413,11 +438,11 @@ class GosuTemplateLexer(Lexer):
aliases = ['gst']
filenames = ['*.gst']
mimetypes = ['text/x-gosu-template']
- lexer = GosuLexer()
def get_tokens_unprocessed(self, text):
+ lexer = GosuLexer()
stack = ['templateText']
- for item in self.lexer.get_tokens_unprocessed(text, stack):
+ for item in lexer.get_tokens_unprocessed(text, stack):
yield item
@@ -443,9 +468,9 @@ class GroovyLexer(RegexLexer):
],
'base': [
# method names
- (r'^(\s*(?:[a-zA-Z_][\w\.\[\]]*\s+)+?)' # return arguments
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*)(\()', # signature start
+ (r'^(\s*(?:[a-zA-Z_][\w\.\[\]]*\s+)+?)' # return arguments
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
@@ -506,44 +531,44 @@ class IokeLexer(RegexLexer):
(r'(\\b|\\e|\\t|\\n|\\f|\\r|\\"|\\\\|\\#|\\\Z|\\u[0-9a-fA-F]{1,4}'
r'|\\[0-3]?[0-7]?[0-7])', String.Escape),
(r'#{', Punctuation, 'textInterpolationRoot')
- ],
+ ],
'text': [
(r'(?<!\\)"', String, '#pop'),
include('interpolatableText'),
(r'[^"]', String)
- ],
+ ],
'documentation': [
(r'(?<!\\)"', String.Doc, '#pop'),
include('interpolatableText'),
(r'[^"]', String.Doc)
- ],
+ ],
'textInterpolationRoot': [
(r'}', Punctuation, '#pop'),
include('root')
- ],
+ ],
'slashRegexp': [
(r'(?<!\\)/[oxpniums]*', String.Regex, '#pop'),
include('interpolatableText'),
(r'\\/', String.Regex),
(r'[^/]', String.Regex)
- ],
+ ],
'squareRegexp': [
(r'(?<!\\)][oxpniums]*', String.Regex, '#pop'),
include('interpolatableText'),
(r'\\]', String.Regex),
(r'[^\]]', String.Regex)
- ],
+ ],
'squareText': [
(r'(?<!\\)]', String, '#pop'),
include('interpolatableText'),
(r'[^\]]', String)
- ],
+ ],
'root': [
(r'\n', Text),
@@ -553,28 +578,28 @@ class IokeLexer(RegexLexer):
(r';(.*?)\n', Comment),
(r'\A#!(.*?)\n', Comment),
- #Regexps
+ # Regexps
(r'#/', String.Regex, 'slashRegexp'),
(r'#r\[', String.Regex, 'squareRegexp'),
- #Symbols
+ # Symbols
(r':[\w!:?]+', String.Symbol),
(r'[\w!:?]+:(?![\w!?])', String.Other),
(r':"(\\\\|\\"|[^"])*"', String.Symbol),
- #Documentation
+ # Documentation
(r'((?<=fn\()|(?<=fnx\()|(?<=method\()|(?<=macro\()|(?<=lecro\()'
r'|(?<=syntax\()|(?<=dmacro\()|(?<=dlecro\()|(?<=dlecrox\()'
r'|(?<=dsyntax\())\s*"', String.Doc, 'documentation'),
- #Text
+ # Text
(r'"', String, 'text'),
(r'#\[', String, 'squareText'),
- #Mimic
+ # Mimic
(r'\w[a-zA-Z0-9!?_:]+(?=\s*=.*mimic\s)', Name.Entity),
- #Assignment
+ # Assignment
(r'[a-zA-Z_][\w!:?]*(?=[\s]*[+*/-]?=[^=].*($|\.))',
Name.Variable),
@@ -594,20 +619,20 @@ class IokeLexer(RegexLexer):
# Ground
(r'(stackTraceAsText)(?![a-zA-Z0-9!:_?])', Keyword),
- #DefaultBehaviour Literals
+ # DefaultBehaviour Literals
(r'(dict|list|message|set)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
- #DefaultBehaviour Case
+ # DefaultBehaviour Case
(r'(case|case:and|case:else|case:nand|case:nor|case:not|case:or|'
r'case:otherwise|case:xor)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
- #DefaultBehaviour Reflection
+ # DefaultBehaviour Reflection
(r'(asText|become\!|derive|freeze\!|frozen\?|in\?|is\?|kind\?|'
r'mimic\!|mimics|mimics\?|prependMimic\!|removeAllMimics\!|'
r'removeMimic\!|same\?|send|thaw\!|uniqueHexId)'
r'(?![a-zA-Z0-9!:_?])', Keyword),
- #DefaultBehaviour Aspects
+ # DefaultBehaviour Aspects
(r'(after|around|before)(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
# DefaultBehaviour
@@ -615,18 +640,18 @@ class IokeLexer(RegexLexer):
r'(?![a-zA-Z0-9!:_?])', Keyword),
(r'(use|destructuring)', Keyword.Reserved),
- #DefaultBehavior BaseBehavior
+ # DefaultBehavior BaseBehavior
(r'(cell\?|cellOwner\?|cellOwner|cellNames|cells|cell|'
r'documentation|identity|removeCell!|undefineCell)'
r'(?![a-zA-Z0-9!:_?])', Keyword),
- #DefaultBehavior Internal
+ # DefaultBehavior Internal
(r'(internal:compositeRegexp|internal:concatenateText|'
r'internal:createDecimal|internal:createNumber|'
r'internal:createRegexp|internal:createText)'
r'(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
- #DefaultBehaviour Conditions
+ # DefaultBehaviour Conditions
(r'(availableRestarts|bind|error\!|findRestart|handle|'
r'invokeRestart|rescue|restart|signal\!|warn\!)'
r'(?![a-zA-Z0-9!:_?])', Keyword.Reserved),
@@ -658,7 +683,7 @@ class IokeLexer(RegexLexer):
(r'#\(', Punctuation),
- # Operators
+ # Operators
(r'(&&>>|\|\|>>|\*\*>>|:::|::|\.\.\.|===|\*\*>|\*\*=|&&>|&&=|'
r'\|\|>|\|\|=|\->>|\+>>|!>>|<>>>|<>>|&>>|%>>|#>>|@>>|/>>|\*>>|'
r'\?>>|\|>>|\^>>|~>>|\$>>|=>>|<<=|>>=|<=>|<\->|=~|!~|=>|\+\+|'
@@ -672,10 +697,10 @@ class IokeLexer(RegexLexer):
# Punctuation
(r'(\`\`|\`|\'\'|\'|\.|\,|@@|@|\[|\]|\(|\)|{|})', Punctuation),
- #kinds
+ # kinds
(r'[A-Z][\w!:?]*', Name.Class),
- #default cellnames
+ # default cellnames
(r'[a-z_][\w!:?]*', Name)
]
}
@@ -692,19 +717,19 @@ class ClojureLexer(RegexLexer):
filenames = ['*.clj']
mimetypes = ['text/x-clojure', 'application/x-clojure']
- special_forms = [
+ special_forms = (
'.', 'def', 'do', 'fn', 'if', 'let', 'new', 'quote', 'var', 'loop'
- ]
+ )
# It's safe to consider 'ns' a declaration thing because it defines a new
# namespace.
- declarations = [
+ declarations = (
'def-', 'defn', 'defn-', 'defmacro', 'defmulti', 'defmethod',
'defstruct', 'defonce', 'declare', 'definline', 'definterface',
'defprotocol', 'defrecord', 'deftype', 'defproject', 'ns'
- ]
+ )
- builtins = [
+ builtins = (
'*', '+', '-', '->', '/', '<', '<=', '=', '==', '>', '>=', '..',
'accessor', 'agent', 'agent-errors', 'aget', 'alength', 'all-ns',
'alter', 'and', 'append-child', 'apply', 'array-map', 'aset',
@@ -753,7 +778,7 @@ class ClojureLexer(RegexLexer):
'val', 'vals', 'var-get', 'var-set', 'var?', 'vector', 'vector-zip',
'vector?', 'when', 'when-first', 'when-let', 'when-not',
'with-local-vars', 'with-meta', 'with-open', 'with-out-str',
- 'xml-seq', 'xml-zip', 'zero?', 'zipmap', 'zipper']
+ 'xml-seq', 'xml-zip', 'zero?', 'zipmap', 'zipper')
# valid names for identifiers
# well, names can only not consist fully of numbers
@@ -763,9 +788,6 @@ class ClojureLexer(RegexLexer):
# but that's hard, so just pretend / is part of the name
valid_name = r'(?!#)[\w!$%*+<=>?/.#-]+'
- def _multi_escape(entries):
- return '(%s)' % ('|'.join(re.escape(entry) + ' ' for entry in entries))
-
tokens = {
'root': [
# the comments - always starting with semicolon
@@ -792,17 +814,17 @@ class ClojureLexer(RegexLexer):
(r'~@|[`\'#^~&@]', Operator),
# highlight the special forms
- (_multi_escape(special_forms), Keyword),
+ (words(special_forms, suffix=' '), Keyword),
# Technically, only the special forms are 'keywords'. The problem
# is that only treating them as keywords means that things like
# 'defn' and 'ns' need to be highlighted as builtins. This is ugly
# and weird for most styles. So, as a compromise we're going to
# highlight them as Keyword.Declarations.
- (_multi_escape(declarations), Keyword.Declaration),
+ (words(declarations, suffix=' '), Keyword.Declaration),
# highlight the builtins
- (_multi_escape(builtins), Name.Builtin),
+ (words(builtins, suffix=' '), Name.Builtin),
# the remaining functions
(r'(?<=\()' + valid_name, Name.Function),
@@ -848,9 +870,9 @@ class TeaLangLexer(RegexLexer):
tokens = {
'root': [
# method names
- (r'^(\s*(?:[a-zA-Z_][\w\.\[\]]*\s+)+?)' # return arguments
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*)(\()', # signature start
+ (r'^(\s*(?:[a-zA-Z_][\w\.\[\]]*\s+)+?)' # return arguments
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
@@ -902,9 +924,9 @@ class CeylonLexer(RegexLexer):
tokens = {
'root': [
# method names
- (r'^(\s*(?:[a-zA-Z_][\w\.\[\]]*\s+)+?)' # return arguments
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*)(\()', # signature start
+ (r'^(\s*(?:[a-zA-Z_][\w\.\[\]]*\s+)+?)' # return arguments
+ r'([a-zA-Z_]\w*)' # method name
+ r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
@@ -986,7 +1008,7 @@ class KotlinLexer(RegexLexer):
'root': [
(r'^\s*\[.*?\]', Name.Attribute),
(r'[^\S\n]+', Text),
- (r'\\\n', Text), # line continuation
+ (r'\\\n', Text), # line continuation
(r'//.*?\n', Comment.Single),
(r'/[*].*?[*]/', Comment.Multiline),
(r'\n', Text),
@@ -1042,9 +1064,9 @@ class XtendLexer(RegexLexer):
tokens = {
'root': [
# method names
- (r'^(\s*(?:[a-zA-Z_][\w\.\[\]]*\s+)+?)' # return arguments
- r'([a-zA-Z_$][\w$]*)' # method name
- r'(\s*)(\()', # signature start
+ (r'^(\s*(?:[a-zA-Z_][\w\.\[\]]*\s+)+?)' # return arguments
+ r'([a-zA-Z_$][\w$]*)' # method name
+ r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
@@ -1089,6 +1111,7 @@ class XtendLexer(RegexLexer):
],
}
+
class PigLexer(RegexLexer):
"""
For `Pig Latin <https://pig.apache.org/>`_ source code.
@@ -1124,7 +1147,7 @@ class PigLexer(RegexLexer):
bygroups(Name.Function, Text, Punctuation)),
(r'[()#:]', Text),
(r'[^(:#\'\")\s]+', Text),
- (r'\S+\s+', Text) # TODO: make tests pass without \s+
+ (r'\S+\s+', Text) # TODO: make tests pass without \s+
],
'keywords': [
(r'(assert|and|any|all|arrange|as|asc|bag|by|cache|CASE|cat|cd|cp|'
@@ -1219,6 +1242,7 @@ class GoloLexer(RegexLexer):
(r'-?\d[\d_]*', Number.Integer),
('`?[a-zA-Z_][\w$]*', Name),
+ (r'@[a-zA-Z_][\w$._]*', Name.Decorator),
(r'"""', String, combined('stringescape', 'triplestring')),
(r'"', String, combined('stringescape', 'doublestring')),
@@ -1327,41 +1351,44 @@ class JasminLexer(RegexLexer):
(r'method%s' % _break, Keyword.Reserved, 'enclosing-method'),
# Instructions
- (r'(aaload|aastore|aconst_null|aload|aload_0|aload_1|aload_2|'
- r'aload_3|aload_w|areturn|arraylength|astore|astore_0|astore_1|'
- r'astore_2|astore_3|astore_w|athrow|baload|bastore|bipush|'
- r'breakpoint|caload|castore|d2f|d2i|d2l|dadd|daload|dastore|'
- r'dcmpg|dcmpl|dconst_0|dconst_1|ddiv|dload|dload_0|dload_1|'
- r'dload_2|dload_3|dload_w|dmul|dneg|drem|dreturn|dstore|dstore_0|'
- r'dstore_1|dstore_2|dstore_3|dstore_w|dsub|dup|dup2|dup2_x1|'
- r'dup2_x2|dup_x1|dup_x2|f2d|f2i|f2l|fadd|faload|fastore|fcmpg|'
- r'fcmpl|fconst_0|fconst_1|fconst_2|fdiv|fload|fload_0|fload_1|'
- r'fload_2|fload_3|fload_w|fmul|fneg|frem|freturn|fstore|fstore_0|'
- r'fstore_1|fstore_2|fstore_3|fstore_w|fsub|i2b|i2c|i2d|i2f|i2l|'
- r'i2s|iadd|iaload|iand|iastore|iconst_0|iconst_1|iconst_2|'
- r'iconst_3|iconst_4|iconst_5|iconst_m1|idiv|iinc|iinc_w|iload|'
- r'iload_0|iload_1|iload_2|iload_3|iload_w|imul|ineg|int2byte|'
- r'int2char|int2short|ior|irem|ireturn|ishl|ishr|istore|istore_0|'
- r'istore_1|istore_2|istore_3|istore_w|isub|iushr|ixor|l2d|l2f|'
- r'l2i|ladd|laload|land|lastore|lcmp|lconst_0|lconst_1|ldc2_w|'
- r'ldiv|lload|lload_0|lload_1|lload_2|lload_3|lload_w|lmul|lneg|'
- r'lookupswitch|lor|lrem|lreturn|lshl|lshr|lstore|lstore_0|'
- r'lstore_1|lstore_2|lstore_3|lstore_w|lsub|lushr|lxor|'
- r'monitorenter|monitorexit|nop|pop|pop2|ret|ret_w|return|saload|'
- r'sastore|sipush|swap)%s' % _break, Keyword.Reserved),
+ (words((
+ 'aaload', 'aastore', 'aconst_null', 'aload', 'aload_0', 'aload_1', 'aload_2',
+ 'aload_3', 'aload_w', 'areturn', 'arraylength', 'astore', 'astore_0', 'astore_1',
+ 'astore_2', 'astore_3', 'astore_w', 'athrow', 'baload', 'bastore', 'bipush',
+ 'breakpoint', 'caload', 'castore', 'd2f', 'd2i', 'd2l', 'dadd', 'daload', 'dastore',
+ 'dcmpg', 'dcmpl', 'dconst_0', 'dconst_1', 'ddiv', 'dload', 'dload_0', 'dload_1',
+ 'dload_2', 'dload_3', 'dload_w', 'dmul', 'dneg', 'drem', 'dreturn', 'dstore', 'dstore_0',
+ 'dstore_1', 'dstore_2', 'dstore_3', 'dstore_w', 'dsub', 'dup', 'dup2', 'dup2_x1',
+ 'dup2_x2', 'dup_x1', 'dup_x2', 'f2d', 'f2i', 'f2l', 'fadd', 'faload', 'fastore', 'fcmpg',
+ 'fcmpl', 'fconst_0', 'fconst_1', 'fconst_2', 'fdiv', 'fload', 'fload_0', 'fload_1',
+ 'fload_2', 'fload_3', 'fload_w', 'fmul', 'fneg', 'frem', 'freturn', 'fstore', 'fstore_0',
+ 'fstore_1', 'fstore_2', 'fstore_3', 'fstore_w', 'fsub', 'i2b', 'i2c', 'i2d', 'i2f', 'i2l',
+ 'i2s', 'iadd', 'iaload', 'iand', 'iastore', 'iconst_0', 'iconst_1', 'iconst_2',
+ 'iconst_3', 'iconst_4', 'iconst_5', 'iconst_m1', 'idiv', 'iinc', 'iinc_w', 'iload',
+ 'iload_0', 'iload_1', 'iload_2', 'iload_3', 'iload_w', 'imul', 'ineg', 'int2byte',
+ 'int2char', 'int2short', 'ior', 'irem', 'ireturn', 'ishl', 'ishr', 'istore', 'istore_0',
+ 'istore_1', 'istore_2', 'istore_3', 'istore_w', 'isub', 'iushr', 'ixor', 'l2d', 'l2f',
+ 'l2i', 'ladd', 'laload', 'land', 'lastore', 'lcmp', 'lconst_0', 'lconst_1', 'ldc2_w',
+ 'ldiv', 'lload', 'lload_0', 'lload_1', 'lload_2', 'lload_3', 'lload_w', 'lmul', 'lneg',
+ 'lookupswitch', 'lor', 'lrem', 'lreturn', 'lshl', 'lshr', 'lstore', 'lstore_0',
+ 'lstore_1', 'lstore_2', 'lstore_3', 'lstore_w', 'lsub', 'lushr', 'lxor',
+ 'monitorenter', 'monitorexit', 'nop', 'pop', 'pop2', 'ret', 'ret_w', 'return', 'saload',
+ 'sastore', 'sipush', 'swap'), suffix=_break), Keyword.Reserved),
(r'(anewarray|checkcast|instanceof|ldc|ldc_w|new)%s' % _break,
Keyword.Reserved, 'class/no-dots'),
- (r'(invokedynamic|invokeinterface|invokenonvirtual|invokespecial|'
- r'invokestatic|invokevirtual)%s' % _break, Keyword.Reserved,
+ (r'invoke(dynamic|interface|nonvirtual|special|'
+ r'static|virtual)%s' % _break, Keyword.Reserved,
'invocation'),
(r'(getfield|putfield)%s' % _break, Keyword.Reserved,
('descriptor/no-dots', 'field')),
(r'(getstatic|putstatic)%s' % _break, Keyword.Reserved,
('descriptor/no-dots', 'static')),
- (r'(goto|goto_w|if_acmpeq|if_acmpne|if_icmpeq|if_icmpge|if_icmpgt|'
- r'if_icmple|if_icmplt|if_icmpne|ifeq|ifge|ifgt|ifle|iflt|ifne|'
- r'ifnonnull|ifnull|jsr|jsr_w)%s' % _break, Keyword.Reserved,
- 'label'),
+ (words((
+ 'goto', 'goto_w', 'if_acmpeq', 'if_acmpne', 'if_icmpeq',
+ 'if_icmpge', 'if_icmpgt', 'if_icmple', 'if_icmplt', 'if_icmpne',
+ 'ifeq', 'ifge', 'ifgt', 'ifle', 'iflt', 'ifne', 'ifnonnull',
+ 'ifnull', 'jsr', 'jsr_w'), suffix=_break),
+ Keyword.Reserved, 'label'),
(r'(multianewarray|newarray)%s' % _break, Keyword.Reserved,
'descriptor/convert-dots'),
(r'tableswitch%s' % _break, Keyword.Reserved, 'table')
diff --git a/pygments/lexers/lisp.py b/pygments/lexers/lisp.py
new file mode 100644
index 00000000..a0a08efe
--- /dev/null
+++ b/pygments/lexers/lisp.py
@@ -0,0 +1,1480 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.lisp
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Lispy languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, words, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Literal, Error
+
+from pygments.lexers.python import PythonLexer
+
+__all__ = ['SchemeLexer', 'CommonLispLexer', 'HyLexer', 'RacketLexer',
+ 'NewLispLexer']
+
+
+class SchemeLexer(RegexLexer):
+ """
+ A Scheme lexer, parsing a stream and outputting the tokens
+ needed to highlight scheme code.
+ This lexer could be most probably easily subclassed to parse
+ other LISP-Dialects like Common Lisp, Emacs Lisp or AutoLisp.
+
+ This parser is checked with pastes from the LISP pastebin
+ at http://paste.lisp.org/ to cover as much syntax as possible.
+
+ It supports the full Scheme syntax as defined in R5RS.
+
+ .. versionadded:: 0.6
+ """
+ name = 'Scheme'
+ aliases = ['scheme', 'scm']
+ filenames = ['*.scm', '*.ss']
+ mimetypes = ['text/x-scheme', 'application/x-scheme']
+
+ # list of known keywords and builtins taken form vim 6.4 scheme.vim
+ # syntax file.
+ keywords = (
+ 'lambda', 'define', 'if', 'else', 'cond', 'and', 'or', 'case', 'let',
+ 'let*', 'letrec', 'begin', 'do', 'delay', 'set!', '=>', 'quote',
+ 'quasiquote', 'unquote', 'unquote-splicing', 'define-syntax',
+ 'let-syntax', 'letrec-syntax', 'syntax-rules'
+ )
+ builtins = (
+ '*', '+', '-', '/', '<', '<=', '=', '>', '>=', 'abs', 'acos', 'angle',
+ 'append', 'apply', 'asin', 'assoc', 'assq', 'assv', 'atan',
+ 'boolean?', 'caaaar', 'caaadr', 'caaar', 'caadar', 'caaddr', 'caadr',
+ 'caar', 'cadaar', 'cadadr', 'cadar', 'caddar', 'cadddr', 'caddr',
+ 'cadr', 'call-with-current-continuation', 'call-with-input-file',
+ 'call-with-output-file', 'call-with-values', 'call/cc', 'car',
+ 'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cdar',
+ 'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr', 'cdddr', 'cddr',
+ 'cdr', 'ceiling', 'char->integer', 'char-alphabetic?', 'char-ci<=?',
+ 'char-ci<?', 'char-ci=?', 'char-ci>=?', 'char-ci>?', 'char-downcase',
+ 'char-lower-case?', 'char-numeric?', 'char-ready?', 'char-upcase',
+ 'char-upper-case?', 'char-whitespace?', 'char<=?', 'char<?', 'char=?',
+ 'char>=?', 'char>?', 'char?', 'close-input-port', 'close-output-port',
+ 'complex?', 'cons', 'cos', 'current-input-port', 'current-output-port',
+ 'denominator', 'display', 'dynamic-wind', 'eof-object?', 'eq?',
+ 'equal?', 'eqv?', 'eval', 'even?', 'exact->inexact', 'exact?', 'exp',
+ 'expt', 'floor', 'for-each', 'force', 'gcd', 'imag-part',
+ 'inexact->exact', 'inexact?', 'input-port?', 'integer->char',
+ 'integer?', 'interaction-environment', 'lcm', 'length', 'list',
+ 'list->string', 'list->vector', 'list-ref', 'list-tail', 'list?',
+ 'load', 'log', 'magnitude', 'make-polar', 'make-rectangular',
+ 'make-string', 'make-vector', 'map', 'max', 'member', 'memq', 'memv',
+ 'min', 'modulo', 'negative?', 'newline', 'not', 'null-environment',
+ 'null?', 'number->string', 'number?', 'numerator', 'odd?',
+ 'open-input-file', 'open-output-file', 'output-port?', 'pair?',
+ 'peek-char', 'port?', 'positive?', 'procedure?', 'quotient',
+ 'rational?', 'rationalize', 'read', 'read-char', 'real-part', 'real?',
+ 'remainder', 'reverse', 'round', 'scheme-report-environment',
+ 'set-car!', 'set-cdr!', 'sin', 'sqrt', 'string', 'string->list',
+ 'string->number', 'string->symbol', 'string-append', 'string-ci<=?',
+ 'string-ci<?', 'string-ci=?', 'string-ci>=?', 'string-ci>?',
+ 'string-copy', 'string-fill!', 'string-length', 'string-ref',
+ 'string-set!', 'string<=?', 'string<?', 'string=?', 'string>=?',
+ 'string>?', 'string?', 'substring', 'symbol->string', 'symbol?',
+ 'tan', 'transcript-off', 'transcript-on', 'truncate', 'values',
+ 'vector', 'vector->list', 'vector-fill!', 'vector-length',
+ 'vector-ref', 'vector-set!', 'vector?', 'with-input-from-file',
+ 'with-output-to-file', 'write', 'write-char', 'zero?'
+ )
+
+ # valid names for identifiers
+ # well, names can only not consist fully of numbers
+ # but this should be good enough for now
+ valid_name = r'[a-zA-Z0-9!$%&*+,/:<=>?@^_~|-]+'
+
+ tokens = {
+ 'root': [
+ # the comments
+ # and going to the end of the line
+ (r';.*$', Comment.Single),
+ # multi-line comment
+ (r'#\|', Comment.Multiline, 'multiline-comment'),
+ # commented form (entire sexpr folliwng)
+ (r'#;\s*\(', Comment, 'commented-form'),
+ # signifies that the program text that follows is written with the
+ # lexical and datum syntax described in r6rs
+ (r'#!r6rs', Comment),
+
+ # whitespaces - usually not relevant
+ (r'\s+', Text),
+
+ # numbers
+ (r'-?\d+\.\d+', Number.Float),
+ (r'-?\d+', Number.Integer),
+ # support for uncommon kinds of numbers -
+ # have to figure out what the characters mean
+ # (r'(#e|#i|#b|#o|#d|#x)[\d.]+', Number),
+
+ # strings, symbols and characters
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'" + valid_name, String.Symbol),
+ (r"#\\([()/'\"._!§$%& ?=+-]{1}|[a-zA-Z0-9]+)", String.Char),
+
+ # constants
+ (r'(#t|#f)', Name.Constant),
+
+ # special operators
+ (r"('|#|`|,@|,|\.)", Operator),
+
+ # highlight the keywords
+ ('(%s)' % '|'.join(re.escape(entry) + ' ' for entry in keywords),
+ Keyword),
+
+ # first variable in a quoted string like
+ # '(this is syntactic sugar)
+ (r"(?<='\()" + valid_name, Name.Variable),
+ (r"(?<=#\()" + valid_name, Name.Variable),
+
+ # highlight the builtins
+ ("(?<=\()(%s)" % '|'.join(re.escape(entry) + ' ' for entry in builtins),
+ Name.Builtin),
+
+ # the remaining functions
+ (r'(?<=\()' + valid_name, Name.Function),
+ # find the remaining variables
+ (valid_name, Name.Variable),
+
+ # the famous parentheses!
+ (r'(\(|\))', Punctuation),
+ (r'(\[|\])', Punctuation),
+ ],
+ 'multiline-comment': [
+ (r'#\|', Comment.Multiline, '#push'),
+ (r'\|#', Comment.Multiline, '#pop'),
+ (r'[^|#]+', Comment.Multiline),
+ (r'[|#]', Comment.Multiline),
+ ],
+ 'commented-form': [
+ (r'\(', Comment, '#push'),
+ (r'\)', Comment, '#pop'),
+ (r'[^()]+', Comment),
+ ],
+ }
+
+
+class CommonLispLexer(RegexLexer):
+ """
+ A Common Lisp lexer.
+
+ .. versionadded:: 0.9
+ """
+ name = 'Common Lisp'
+ aliases = ['common-lisp', 'cl', 'lisp', 'elisp', 'emacs', 'emacs-lisp']
+ filenames = ['*.cl', '*.lisp', '*.el'] # use for Elisp too
+ mimetypes = ['text/x-common-lisp']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ # couple of useful regexes
+
+ # characters that are not macro-characters and can be used to begin a symbol
+ nonmacro = r'\\.|[\w!$%&*+-/<=>?@\[\]^{}~]'
+ constituent = nonmacro + '|[#.:]'
+ terminated = r'(?=[ "()\'\n,;`])' # whitespace or terminating macro characters
+
+ # symbol token, reverse-engineered from hyperspec
+ # Take a deep breath...
+ symbol = r'(\|[^|]+\||(?:%s)(?:%s)*)' % (nonmacro, constituent)
+
+ def __init__(self, **options):
+ from pygments.lexers._cl_builtins import BUILTIN_FUNCTIONS, \
+ SPECIAL_FORMS, MACROS, LAMBDA_LIST_KEYWORDS, DECLARATIONS, \
+ BUILTIN_TYPES, BUILTIN_CLASSES
+ self.builtin_function = BUILTIN_FUNCTIONS
+ self.special_forms = SPECIAL_FORMS
+ self.macros = MACROS
+ self.lambda_list_keywords = LAMBDA_LIST_KEYWORDS
+ self.declarations = DECLARATIONS
+ self.builtin_types = BUILTIN_TYPES
+ self.builtin_classes = BUILTIN_CLASSES
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ stack = ['root']
+ for index, token, value in RegexLexer.get_tokens_unprocessed(self, text, stack):
+ if token is Name.Variable:
+ if value in self.builtin_function:
+ yield index, Name.Builtin, value
+ continue
+ if value in self.special_forms:
+ yield index, Keyword, value
+ continue
+ if value in self.macros:
+ yield index, Name.Builtin, value
+ continue
+ if value in self.lambda_list_keywords:
+ yield index, Keyword, value
+ continue
+ if value in self.declarations:
+ yield index, Keyword, value
+ continue
+ if value in self.builtin_types:
+ yield index, Keyword.Type, value
+ continue
+ if value in self.builtin_classes:
+ yield index, Name.Class, value
+ continue
+ yield index, token, value
+
+ tokens = {
+ 'root': [
+ ('', Text, 'body'),
+ ],
+ 'multiline-comment': [
+ (r'#\|', Comment.Multiline, '#push'), # (cf. Hyperspec 2.4.8.19)
+ (r'\|#', Comment.Multiline, '#pop'),
+ (r'[^|#]+', Comment.Multiline),
+ (r'[|#]', Comment.Multiline),
+ ],
+ 'commented-form': [
+ (r'\(', Comment.Preproc, '#push'),
+ (r'\)', Comment.Preproc, '#pop'),
+ (r'[^()]+', Comment.Preproc),
+ ],
+ 'body': [
+ # whitespace
+ (r'\s+', Text),
+
+ # single-line comment
+ (r';.*$', Comment.Single),
+
+ # multi-line comment
+ (r'#\|', Comment.Multiline, 'multiline-comment'),
+
+ # encoding comment (?)
+ (r'#\d*Y.*$', Comment.Special),
+
+ # strings and characters
+ (r'"(\\.|\\\n|[^"\\])*"', String),
+ # quoting
+ (r":" + symbol, String.Symbol),
+ (r"::" + symbol, String.Symbol),
+ (r":#" + symbol, String.Symbol),
+ (r"'" + symbol, String.Symbol),
+ (r"'", Operator),
+ (r"`", Operator),
+
+ # decimal numbers
+ (r'[-+]?\d+\.?' + terminated, Number.Integer),
+ (r'[-+]?\d+/\d+' + terminated, Number),
+ (r'[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)'
+ + terminated, Number.Float),
+
+ # sharpsign strings and characters
+ (r"#\\." + terminated, String.Char),
+ (r"#\\" + symbol, String.Char),
+
+ # vector
+ (r'#\(', Operator, 'body'),
+
+ # bitstring
+ (r'#\d*\*[01]*', Literal.Other),
+
+ # uninterned symbol
+ (r'#:' + symbol, String.Symbol),
+
+ # read-time and load-time evaluation
+ (r'#[.,]', Operator),
+
+ # function shorthand
+ (r'#\'', Name.Function),
+
+ # binary rational
+ (r'#[bB][+-]?[01]+(/[01]+)?', Number.Bin),
+
+ # octal rational
+ (r'#[oO][+-]?[0-7]+(/[0-7]+)?', Number.Oct),
+
+ # hex rational
+ (r'#[xX][+-]?[0-9a-f]+(/[0-9a-f]+)?', Number.Hex),
+
+ # radix rational
+ (r'#\d+[rR][+-]?[0-9a-z]+(/[0-9a-z]+)?', Number),
+
+ # complex
+ (r'(#[cC])(\()', bygroups(Number, Punctuation), 'body'),
+
+ # array
+ (r'(#\d+[aA])(\()', bygroups(Literal.Other, Punctuation), 'body'),
+
+ # structure
+ (r'(#[sS])(\()', bygroups(Literal.Other, Punctuation), 'body'),
+
+ # path
+ (r'#[pP]?"(\\.|[^"])*"', Literal.Other),
+
+ # reference
+ (r'#\d+=', Operator),
+ (r'#\d+#', Operator),
+
+ # read-time comment
+ (r'#+nil' + terminated + '\s*\(', Comment.Preproc, 'commented-form'),
+
+ # read-time conditional
+ (r'#[+-]', Operator),
+
+ # special operators that should have been parsed already
+ (r'(,@|,|\.)', Operator),
+
+ # special constants
+ (r'(t|nil)' + terminated, Name.Constant),
+
+ # functions and variables
+ (r'\*' + symbol + '\*', Name.Variable.Global),
+ (symbol, Name.Variable),
+
+ # parentheses
+ (r'\(', Punctuation, 'body'),
+ (r'\)', Punctuation, '#pop'),
+ ],
+ }
+
+
+class HyLexer(RegexLexer):
+ """
+ Lexer for `Hy <http://hylang.org/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Hy'
+ aliases = ['hylang']
+ filenames = ['*.hy']
+ mimetypes = ['text/x-hy', 'application/x-hy']
+
+ special_forms = (
+ 'cond', 'for', '->', '->>', 'car',
+ 'cdr', 'first', 'rest', 'let', 'when', 'unless',
+ 'import', 'do', 'progn', 'get', 'slice', 'assoc', 'with-decorator',
+ ',', 'list_comp', 'kwapply', '~', 'is', 'in', 'is-not', 'not-in',
+ 'quasiquote', 'unquote', 'unquote-splice', 'quote', '|', '<<=', '>>=',
+ 'foreach', 'while',
+ 'eval-and-compile', 'eval-when-compile'
+ )
+
+ declarations = (
+ 'def', 'defn', 'defun', 'defmacro', 'defclass', 'lambda', 'fn', 'setv'
+ )
+
+ hy_builtins = ()
+
+ hy_core = (
+ 'cycle', 'dec', 'distinct', 'drop', 'even?', 'filter', 'inc',
+ 'instance?', 'iterable?', 'iterate', 'iterator?', 'neg?',
+ 'none?', 'nth', 'numeric?', 'odd?', 'pos?', 'remove', 'repeat',
+ 'repeatedly', 'take', 'take_nth', 'take_while', 'zero?'
+ )
+
+ builtins = hy_builtins + hy_core
+
+ # valid names for identifiers
+ # well, names can only not consist fully of numbers
+ # but this should be good enough for now
+ valid_name = r'(?!#)[\w!$%*+<=>?/.#-]+'
+
+ def _multi_escape(entries):
+ return words(entries, suffix=' ')
+
+ tokens = {
+ 'root': [
+ # the comments - always starting with semicolon
+ # and going to the end of the line
+ (r';.*$', Comment.Single),
+
+ # whitespaces - usually not relevant
+ (r'[,\s]+', Text),
+
+ # numbers
+ (r'-?\d+\.\d+', Number.Float),
+ (r'-?\d+', Number.Integer),
+ (r'0[0-7]+j?', Number.Oct),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+
+ # strings, symbols and characters
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'" + valid_name, String.Symbol),
+ (r"\\(.|[a-z]+)", String.Char),
+ (r'^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
+ (r"^(\s*)([rRuU]{,2}'''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
+
+ # keywords
+ (r'::?' + valid_name, String.Symbol),
+
+ # special operators
+ (r'~@|[`\'#^~&@]', Operator),
+
+ include('py-keywords'),
+ include('py-builtins'),
+
+ # highlight the special forms
+ (_multi_escape(special_forms), Keyword),
+
+ # Technically, only the special forms are 'keywords'. The problem
+ # is that only treating them as keywords means that things like
+ # 'defn' and 'ns' need to be highlighted as builtins. This is ugly
+ # and weird for most styles. So, as a compromise we're going to
+ # highlight them as Keyword.Declarations.
+ (_multi_escape(declarations), Keyword.Declaration),
+
+ # highlight the builtins
+ (_multi_escape(builtins), Name.Builtin),
+
+ # the remaining functions
+ (r'(?<=\()' + valid_name, Name.Function),
+
+ # find the remaining variables
+ (valid_name, Name.Variable),
+
+ # Hy accepts vector notation
+ (r'(\[|\])', Punctuation),
+
+ # Hy accepts map notation
+ (r'(\{|\})', Punctuation),
+
+ # the famous parentheses!
+ (r'(\(|\))', Punctuation),
+
+ ],
+ 'py-keywords': PythonLexer.tokens['keywords'],
+ 'py-builtins': PythonLexer.tokens['builtins'],
+ }
+
+ def analyse_text(text):
+ if '(import ' in text or '(defn ' in text:
+ return 0.9
+
+
+class RacketLexer(RegexLexer):
+ """
+ Lexer for `Racket <http://racket-lang.org/>`_ source code (formerly
+ known as PLT Scheme).
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Racket'
+ aliases = ['racket', 'rkt']
+ filenames = ['*.rkt', '*.rktd', '*.rktl']
+ mimetypes = ['text/x-racket', 'application/x-racket']
+
+ # Generated by example.rkt
+ _keywords = (
+ '#%app', '#%datum', '#%declare', '#%expression', '#%module-begin',
+ '#%plain-app', '#%plain-lambda', '#%plain-module-begin',
+ '#%printing-module-begin', '#%provide', '#%require',
+ '#%stratified-body', '#%top', '#%top-interaction',
+ '#%variable-reference', '->', '->*', '->*m', '->d', '->dm', '->i',
+ '->m', '...', ':do-in', '==', '=>', '_', 'absent', 'abstract',
+ 'all-defined-out', 'all-from-out', 'and', 'any', 'augment', 'augment*',
+ 'augment-final', 'augment-final*', 'augride', 'augride*', 'begin',
+ 'begin-for-syntax', 'begin0', 'case', 'case->', 'case->m',
+ 'case-lambda', 'class', 'class*', 'class-field-accessor',
+ 'class-field-mutator', 'class/c', 'class/derived', 'combine-in',
+ 'combine-out', 'command-line', 'compound-unit', 'compound-unit/infer',
+ 'cond', 'contract', 'contract-out', 'contract-struct', 'contracted',
+ 'define', 'define-compound-unit', 'define-compound-unit/infer',
+ 'define-contract-struct', 'define-custom-hash-types',
+ 'define-custom-set-types', 'define-for-syntax',
+ 'define-local-member-name', 'define-logger', 'define-match-expander',
+ 'define-member-name', 'define-module-boundary-contract',
+ 'define-namespace-anchor', 'define-opt/c', 'define-sequence-syntax',
+ 'define-serializable-class', 'define-serializable-class*',
+ 'define-signature', 'define-signature-form', 'define-struct',
+ 'define-struct/contract', 'define-struct/derived', 'define-syntax',
+ 'define-syntax-rule', 'define-syntaxes', 'define-unit',
+ 'define-unit-binding', 'define-unit-from-context',
+ 'define-unit/contract', 'define-unit/new-import-export',
+ 'define-unit/s', 'define-values', 'define-values-for-export',
+ 'define-values-for-syntax', 'define-values/invoke-unit',
+ 'define-values/invoke-unit/infer', 'define/augment',
+ 'define/augment-final', 'define/augride', 'define/contract',
+ 'define/final-prop', 'define/match', 'define/overment',
+ 'define/override', 'define/override-final', 'define/private',
+ 'define/public', 'define/public-final', 'define/pubment',
+ 'define/subexpression-pos-prop', 'delay', 'delay/idle', 'delay/name',
+ 'delay/strict', 'delay/sync', 'delay/thread', 'do', 'else', 'except',
+ 'except-in', 'except-out', 'export', 'extends', 'failure-cont',
+ 'false', 'false/c', 'field', 'field-bound?', 'file',
+ 'flat-murec-contract', 'flat-rec-contract', 'for', 'for*', 'for*/and',
+ 'for*/first', 'for*/fold', 'for*/fold/derived', 'for*/hash',
+ 'for*/hasheq', 'for*/hasheqv', 'for*/last', 'for*/list', 'for*/lists',
+ 'for*/mutable-set', 'for*/mutable-seteq', 'for*/mutable-seteqv',
+ 'for*/or', 'for*/product', 'for*/set', 'for*/seteq', 'for*/seteqv',
+ 'for*/sum', 'for*/vector', 'for*/weak-set', 'for*/weak-seteq',
+ 'for*/weak-seteqv', 'for-label', 'for-meta', 'for-syntax',
+ 'for-template', 'for/and', 'for/first', 'for/fold', 'for/fold/derived',
+ 'for/hash', 'for/hasheq', 'for/hasheqv', 'for/last', 'for/list',
+ 'for/lists', 'for/mutable-set', 'for/mutable-seteq',
+ 'for/mutable-seteqv', 'for/or', 'for/product', 'for/set', 'for/seteq',
+ 'for/seteqv', 'for/sum', 'for/vector', 'for/weak-set',
+ 'for/weak-seteq', 'for/weak-seteqv', 'gen:custom-write', 'gen:dict',
+ 'gen:equal+hash', 'gen:set', 'gen:stream', 'generic', 'get-field',
+ 'if', 'implies', 'import', 'include', 'include-at/relative-to',
+ 'include-at/relative-to/reader', 'include/reader', 'inherit',
+ 'inherit-field', 'inherit/inner', 'inherit/super', 'init',
+ 'init-depend', 'init-field', 'init-rest', 'inner', 'inspect',
+ 'instantiate', 'interface', 'interface*', 'invoke-unit',
+ 'invoke-unit/infer', 'lambda', 'lazy', 'let', 'let*', 'let*-values',
+ 'let-syntax', 'let-syntaxes', 'let-values', 'let/cc', 'let/ec',
+ 'letrec', 'letrec-syntax', 'letrec-syntaxes', 'letrec-syntaxes+values',
+ 'letrec-values', 'lib', 'link', 'local', 'local-require', 'log-debug',
+ 'log-error', 'log-fatal', 'log-info', 'log-warning', 'match', 'match*',
+ 'match*/derived', 'match-define', 'match-define-values',
+ 'match-lambda', 'match-lambda*', 'match-lambda**', 'match-let',
+ 'match-let*', 'match-let*-values', 'match-let-values', 'match-letrec',
+ 'match/derived', 'match/values', 'member-name-key', 'method-contract?',
+ 'mixin', 'module', 'module*', 'module+', 'nand', 'new', 'nor',
+ 'object-contract', 'object/c', 'only', 'only-in', 'only-meta-in',
+ 'open', 'opt/c', 'or', 'overment', 'overment*', 'override',
+ 'override*', 'override-final', 'override-final*', 'parameterize',
+ 'parameterize*', 'parameterize-break', 'parametric->/c', 'place',
+ 'place*', 'planet', 'prefix', 'prefix-in', 'prefix-out', 'private',
+ 'private*', 'prompt-tag/c', 'protect-out', 'provide',
+ 'provide-signature-elements', 'provide/contract', 'public', 'public*',
+ 'public-final', 'public-final*', 'pubment', 'pubment*', 'quasiquote',
+ 'quasisyntax', 'quasisyntax/loc', 'quote', 'quote-syntax',
+ 'quote-syntax/prune', 'recontract-out', 'recursive-contract',
+ 'relative-in', 'rename', 'rename-in', 'rename-inner', 'rename-out',
+ 'rename-super', 'require', 'send', 'send*', 'send+', 'send-generic',
+ 'send/apply', 'send/keyword-apply', 'set!', 'set!-values',
+ 'set-field!', 'shared', 'stream', 'stream-cons', 'struct', 'struct*',
+ 'struct-copy', 'struct-field-index', 'struct-out', 'struct/c',
+ 'struct/ctc', 'struct/dc', 'submod', 'super', 'super-instantiate',
+ 'super-make-object', 'super-new', 'syntax', 'syntax-case',
+ 'syntax-case*', 'syntax-id-rules', 'syntax-rules', 'syntax/loc', 'tag',
+ 'this', 'this%', 'thunk', 'thunk*', 'time', 'unconstrained-domain->',
+ 'unit', 'unit-from-context', 'unit/c', 'unit/new-import-export',
+ 'unit/s', 'unless', 'unquote', 'unquote-splicing', 'unsyntax',
+ 'unsyntax-splicing', 'values/drop', 'when', 'with-continuation-mark',
+ 'with-contract', 'with-handlers', 'with-handlers*', 'with-method',
+ 'with-syntax', u'λ'
+ )
+
+ # Generated by example.rkt
+ _builtins = (
+ '*', '+', '-', '/', '<', '</c', '<=', '<=/c', '=', '=/c', '>', '>/c',
+ '>=', '>=/c', 'abort-current-continuation', 'abs', 'absolute-path?',
+ 'acos', 'add-between', 'add1', 'alarm-evt', 'always-evt', 'and/c',
+ 'andmap', 'angle', 'any/c', 'append', 'append*', 'append-map', 'apply',
+ 'argmax', 'argmin', 'arithmetic-shift', 'arity-at-least',
+ 'arity-at-least-value', 'arity-at-least?', 'arity-checking-wrapper',
+ 'arity-includes?', 'arity=?', 'asin', 'assf', 'assoc', 'assq', 'assv',
+ 'atan', 'bad-number-of-results', 'banner', 'base->-doms/c',
+ 'base->-rngs/c', 'base->?', 'between/c', 'bitwise-and',
+ 'bitwise-bit-field', 'bitwise-bit-set?', 'bitwise-ior', 'bitwise-not',
+ 'bitwise-xor', 'blame-add-car-context', 'blame-add-cdr-context',
+ 'blame-add-context', 'blame-add-missing-party',
+ 'blame-add-nth-arg-context', 'blame-add-or-context',
+ 'blame-add-range-context', 'blame-add-unknown-context',
+ 'blame-context', 'blame-contract', 'blame-fmt->-string',
+ 'blame-negative', 'blame-original?', 'blame-positive',
+ 'blame-replace-negative', 'blame-source', 'blame-swap',
+ 'blame-swapped?', 'blame-update', 'blame-value', 'blame?', 'boolean=?',
+ 'boolean?', 'bound-identifier=?', 'box', 'box-cas!', 'box-immutable',
+ 'box-immutable/c', 'box/c', 'box?', 'break-enabled', 'break-thread',
+ 'build-chaperone-contract-property', 'build-compound-type-name',
+ 'build-contract-property', 'build-flat-contract-property',
+ 'build-list', 'build-path', 'build-path/convention-type',
+ 'build-string', 'build-vector', 'byte-pregexp', 'byte-pregexp?',
+ 'byte-ready?', 'byte-regexp', 'byte-regexp?', 'byte?', 'bytes',
+ 'bytes->immutable-bytes', 'bytes->list', 'bytes->path',
+ 'bytes->path-element', 'bytes->string/latin-1', 'bytes->string/locale',
+ 'bytes->string/utf-8', 'bytes-append', 'bytes-append*',
+ 'bytes-close-converter', 'bytes-convert', 'bytes-convert-end',
+ 'bytes-converter?', 'bytes-copy', 'bytes-copy!',
+ 'bytes-environment-variable-name?', 'bytes-fill!', 'bytes-join',
+ 'bytes-length', 'bytes-no-nuls?', 'bytes-open-converter', 'bytes-ref',
+ 'bytes-set!', 'bytes-utf-8-index', 'bytes-utf-8-length',
+ 'bytes-utf-8-ref', 'bytes<?', 'bytes=?', 'bytes>?', 'bytes?', 'caaaar',
+ 'caaadr', 'caaar', 'caadar', 'caaddr', 'caadr', 'caar', 'cadaar',
+ 'cadadr', 'cadar', 'caddar', 'cadddr', 'caddr', 'cadr',
+ 'call-in-nested-thread', 'call-with-atomic-output-file',
+ 'call-with-break-parameterization',
+ 'call-with-composable-continuation', 'call-with-continuation-barrier',
+ 'call-with-continuation-prompt', 'call-with-current-continuation',
+ 'call-with-default-reading-parameterization',
+ 'call-with-escape-continuation', 'call-with-exception-handler',
+ 'call-with-file-lock/timeout', 'call-with-immediate-continuation-mark',
+ 'call-with-input-bytes', 'call-with-input-file',
+ 'call-with-input-file*', 'call-with-input-string',
+ 'call-with-output-bytes', 'call-with-output-file',
+ 'call-with-output-file*', 'call-with-output-string',
+ 'call-with-parameterization', 'call-with-semaphore',
+ 'call-with-semaphore/enable-break', 'call-with-values', 'call/cc',
+ 'call/ec', 'car', 'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr',
+ 'cdadr', 'cdar', 'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr',
+ 'cdddr', 'cddr', 'cdr', 'ceiling', 'channel-get', 'channel-put',
+ 'channel-put-evt', 'channel-put-evt?', 'channel-try-get', 'channel/c',
+ 'channel?', 'chaperone-box', 'chaperone-channel',
+ 'chaperone-continuation-mark-key', 'chaperone-contract-property?',
+ 'chaperone-contract?', 'chaperone-evt', 'chaperone-hash',
+ 'chaperone-of?', 'chaperone-procedure', 'chaperone-prompt-tag',
+ 'chaperone-struct', 'chaperone-struct-type', 'chaperone-vector',
+ 'chaperone?', 'char->integer', 'char-alphabetic?', 'char-blank?',
+ 'char-ci<=?', 'char-ci<?', 'char-ci=?', 'char-ci>=?', 'char-ci>?',
+ 'char-downcase', 'char-foldcase', 'char-general-category',
+ 'char-graphic?', 'char-iso-control?', 'char-lower-case?',
+ 'char-numeric?', 'char-punctuation?', 'char-ready?', 'char-symbolic?',
+ 'char-title-case?', 'char-titlecase', 'char-upcase',
+ 'char-upper-case?', 'char-utf-8-length', 'char-whitespace?', 'char<=?',
+ 'char<?', 'char=?', 'char>=?', 'char>?', 'char?',
+ 'check-duplicate-identifier', 'checked-procedure-check-and-extract',
+ 'choice-evt', 'class->interface', 'class-info', 'class?',
+ 'cleanse-path', 'close-input-port', 'close-output-port',
+ 'coerce-chaperone-contract', 'coerce-chaperone-contracts',
+ 'coerce-contract', 'coerce-contract/f', 'coerce-contracts',
+ 'coerce-flat-contract', 'coerce-flat-contracts', 'collect-garbage',
+ 'collection-file-path', 'collection-path', 'compile',
+ 'compile-allow-set!-undefined', 'compile-context-preservation-enabled',
+ 'compile-enforce-module-constants', 'compile-syntax',
+ 'compiled-expression?', 'compiled-module-expression?',
+ 'complete-path?', 'complex?', 'compose', 'compose1', 'conjugate',
+ 'cons', 'cons/c', 'cons?', 'const', 'continuation-mark-key/c',
+ 'continuation-mark-key?', 'continuation-mark-set->context',
+ 'continuation-mark-set->list', 'continuation-mark-set->list*',
+ 'continuation-mark-set-first', 'continuation-mark-set?',
+ 'continuation-marks', 'continuation-prompt-available?',
+ 'continuation-prompt-tag?', 'continuation?',
+ 'contract-continuation-mark-key', 'contract-first-order',
+ 'contract-first-order-passes?', 'contract-name', 'contract-proc',
+ 'contract-projection', 'contract-property?',
+ 'contract-random-generate', 'contract-stronger?',
+ 'contract-struct-exercise', 'contract-struct-generate',
+ 'contract-val-first-projection', 'contract?', 'convert-stream',
+ 'copy-directory/files', 'copy-file', 'copy-port', 'cos', 'cosh',
+ 'count', 'current-blame-format', 'current-break-parameterization',
+ 'current-code-inspector', 'current-command-line-arguments',
+ 'current-compile', 'current-compiled-file-roots',
+ 'current-continuation-marks', 'current-contract-region',
+ 'current-custodian', 'current-directory', 'current-directory-for-user',
+ 'current-drive', 'current-environment-variables', 'current-error-port',
+ 'current-eval', 'current-evt-pseudo-random-generator',
+ 'current-future', 'current-gc-milliseconds',
+ 'current-get-interaction-input-port', 'current-inexact-milliseconds',
+ 'current-input-port', 'current-inspector',
+ 'current-library-collection-links', 'current-library-collection-paths',
+ 'current-load', 'current-load-extension',
+ 'current-load-relative-directory', 'current-load/use-compiled',
+ 'current-locale', 'current-logger', 'current-memory-use',
+ 'current-milliseconds', 'current-module-declare-name',
+ 'current-module-declare-source', 'current-module-name-resolver',
+ 'current-module-path-for-load', 'current-namespace',
+ 'current-output-port', 'current-parameterization',
+ 'current-preserved-thread-cell-values', 'current-print',
+ 'current-process-milliseconds', 'current-prompt-read',
+ 'current-pseudo-random-generator', 'current-read-interaction',
+ 'current-reader-guard', 'current-readtable', 'current-seconds',
+ 'current-security-guard', 'current-subprocess-custodian-mode',
+ 'current-thread', 'current-thread-group',
+ 'current-thread-initial-stack-size',
+ 'current-write-relative-directory', 'curry', 'curryr',
+ 'custodian-box-value', 'custodian-box?', 'custodian-limit-memory',
+ 'custodian-managed-list', 'custodian-memory-accounting-available?',
+ 'custodian-require-memory', 'custodian-shutdown-all', 'custodian?',
+ 'custom-print-quotable-accessor', 'custom-print-quotable?',
+ 'custom-write-accessor', 'custom-write-property-proc', 'custom-write?',
+ 'date', 'date*', 'date*-nanosecond', 'date*-time-zone-name', 'date*?',
+ 'date-day', 'date-dst?', 'date-hour', 'date-minute', 'date-month',
+ 'date-second', 'date-time-zone-offset', 'date-week-day', 'date-year',
+ 'date-year-day', 'date?', 'datum->syntax', 'datum-intern-literal',
+ 'default-continuation-prompt-tag', 'degrees->radians',
+ 'delete-directory', 'delete-directory/files', 'delete-file',
+ 'denominator', 'dict->list', 'dict-can-functional-set?',
+ 'dict-can-remove-keys?', 'dict-clear', 'dict-clear!', 'dict-copy',
+ 'dict-count', 'dict-empty?', 'dict-for-each', 'dict-has-key?',
+ 'dict-implements/c', 'dict-implements?', 'dict-iter-contract',
+ 'dict-iterate-first', 'dict-iterate-key', 'dict-iterate-next',
+ 'dict-iterate-value', 'dict-key-contract', 'dict-keys', 'dict-map',
+ 'dict-mutable?', 'dict-ref', 'dict-ref!', 'dict-remove',
+ 'dict-remove!', 'dict-set', 'dict-set!', 'dict-set*', 'dict-set*!',
+ 'dict-update', 'dict-update!', 'dict-value-contract', 'dict-values',
+ 'dict?', 'directory-exists?', 'directory-list', 'display',
+ 'display-lines', 'display-lines-to-file', 'display-to-file',
+ 'displayln', 'double-flonum?', 'drop', 'drop-right', 'dropf',
+ 'dropf-right', 'dump-memory-stats', 'dup-input-port',
+ 'dup-output-port', 'dynamic-get-field', 'dynamic-place',
+ 'dynamic-place*', 'dynamic-require', 'dynamic-require-for-syntax',
+ 'dynamic-send', 'dynamic-set-field!', 'dynamic-wind', 'eighth',
+ 'empty', 'empty-sequence', 'empty-stream', 'empty?',
+ 'environment-variables-copy', 'environment-variables-names',
+ 'environment-variables-ref', 'environment-variables-set!',
+ 'environment-variables?', 'eof', 'eof-evt', 'eof-object?',
+ 'ephemeron-value', 'ephemeron?', 'eprintf', 'eq-contract-val',
+ 'eq-contract?', 'eq-hash-code', 'eq?', 'equal-contract-val',
+ 'equal-contract?', 'equal-hash-code', 'equal-secondary-hash-code',
+ 'equal<%>', 'equal?', 'equal?/recur', 'eqv-hash-code', 'eqv?', 'error',
+ 'error-display-handler', 'error-escape-handler',
+ 'error-print-context-length', 'error-print-source-location',
+ 'error-print-width', 'error-value->string-handler', 'eval',
+ 'eval-jit-enabled', 'eval-syntax', 'even?', 'evt/c', 'evt?',
+ 'exact->inexact', 'exact-ceiling', 'exact-floor', 'exact-integer?',
+ 'exact-nonnegative-integer?', 'exact-positive-integer?', 'exact-round',
+ 'exact-truncate', 'exact?', 'executable-yield-handler', 'exit',
+ 'exit-handler', 'exn', 'exn-continuation-marks', 'exn-message',
+ 'exn:break', 'exn:break-continuation', 'exn:break:hang-up',
+ 'exn:break:hang-up?', 'exn:break:terminate', 'exn:break:terminate?',
+ 'exn:break?', 'exn:fail', 'exn:fail:contract',
+ 'exn:fail:contract:arity', 'exn:fail:contract:arity?',
+ 'exn:fail:contract:blame', 'exn:fail:contract:blame-object',
+ 'exn:fail:contract:blame?', 'exn:fail:contract:continuation',
+ 'exn:fail:contract:continuation?', 'exn:fail:contract:divide-by-zero',
+ 'exn:fail:contract:divide-by-zero?',
+ 'exn:fail:contract:non-fixnum-result',
+ 'exn:fail:contract:non-fixnum-result?', 'exn:fail:contract:variable',
+ 'exn:fail:contract:variable-id', 'exn:fail:contract:variable?',
+ 'exn:fail:contract?', 'exn:fail:filesystem',
+ 'exn:fail:filesystem:errno', 'exn:fail:filesystem:errno-errno',
+ 'exn:fail:filesystem:errno?', 'exn:fail:filesystem:exists',
+ 'exn:fail:filesystem:exists?', 'exn:fail:filesystem:missing-module',
+ 'exn:fail:filesystem:missing-module-path',
+ 'exn:fail:filesystem:missing-module?', 'exn:fail:filesystem:version',
+ 'exn:fail:filesystem:version?', 'exn:fail:filesystem?',
+ 'exn:fail:network', 'exn:fail:network:errno',
+ 'exn:fail:network:errno-errno', 'exn:fail:network:errno?',
+ 'exn:fail:network?', 'exn:fail:object', 'exn:fail:object?',
+ 'exn:fail:out-of-memory', 'exn:fail:out-of-memory?', 'exn:fail:read',
+ 'exn:fail:read-srclocs', 'exn:fail:read:eof', 'exn:fail:read:eof?',
+ 'exn:fail:read:non-char', 'exn:fail:read:non-char?', 'exn:fail:read?',
+ 'exn:fail:syntax', 'exn:fail:syntax-exprs',
+ 'exn:fail:syntax:missing-module',
+ 'exn:fail:syntax:missing-module-path',
+ 'exn:fail:syntax:missing-module?', 'exn:fail:syntax:unbound',
+ 'exn:fail:syntax:unbound?', 'exn:fail:syntax?', 'exn:fail:unsupported',
+ 'exn:fail:unsupported?', 'exn:fail:user', 'exn:fail:user?',
+ 'exn:fail?', 'exn:misc:match?', 'exn:missing-module-accessor',
+ 'exn:missing-module?', 'exn:srclocs-accessor', 'exn:srclocs?', 'exn?',
+ 'exp', 'expand', 'expand-once', 'expand-syntax', 'expand-syntax-once',
+ 'expand-syntax-to-top-form', 'expand-to-top-form', 'expand-user-path',
+ 'explode-path', 'expt', 'externalizable<%>', 'false?', 'field-names',
+ 'fifth', 'file->bytes', 'file->bytes-lines', 'file->lines',
+ 'file->list', 'file->string', 'file->value', 'file-exists?',
+ 'file-name-from-path', 'file-or-directory-identity',
+ 'file-or-directory-modify-seconds', 'file-or-directory-permissions',
+ 'file-position', 'file-position*', 'file-size',
+ 'file-stream-buffer-mode', 'file-stream-port?', 'file-truncate',
+ 'filename-extension', 'filesystem-change-evt',
+ 'filesystem-change-evt-cancel', 'filesystem-change-evt?',
+ 'filesystem-root-list', 'filter', 'filter-map', 'filter-not',
+ 'filter-read-input-port', 'find-executable-path', 'find-files',
+ 'find-library-collection-links', 'find-library-collection-paths',
+ 'find-relative-path', 'find-system-path', 'findf', 'first', 'fixnum?',
+ 'flat-contract', 'flat-contract-predicate', 'flat-contract-property?',
+ 'flat-contract?', 'flat-named-contract', 'flatten',
+ 'floating-point-bytes->real', 'flonum?', 'floor', 'flush-output',
+ 'fold-files', 'foldl', 'foldr', 'for-each', 'force', 'format',
+ 'fourth', 'fprintf', 'free-identifier=?', 'free-label-identifier=?',
+ 'free-template-identifier=?', 'free-transformer-identifier=?',
+ 'fsemaphore-count', 'fsemaphore-post', 'fsemaphore-try-wait?',
+ 'fsemaphore-wait', 'fsemaphore?', 'future', 'future?',
+ 'futures-enabled?', 'gcd', 'generate-member-key',
+ 'generate-temporaries', 'generic-set?', 'generic?', 'gensym',
+ 'get-output-bytes', 'get-output-string', 'get-preference',
+ 'get/build-val-first-projection', 'getenv',
+ 'global-port-print-handler', 'group-execute-bit', 'group-read-bit',
+ 'group-write-bit', 'guard-evt', 'handle-evt', 'handle-evt?',
+ 'has-contract?', 'hash', 'hash->list', 'hash-clear', 'hash-clear!',
+ 'hash-copy', 'hash-copy-clear', 'hash-count', 'hash-empty?',
+ 'hash-eq?', 'hash-equal?', 'hash-eqv?', 'hash-for-each',
+ 'hash-has-key?', 'hash-iterate-first', 'hash-iterate-key',
+ 'hash-iterate-next', 'hash-iterate-value', 'hash-keys', 'hash-map',
+ 'hash-placeholder?', 'hash-ref', 'hash-ref!', 'hash-remove',
+ 'hash-remove!', 'hash-set', 'hash-set!', 'hash-set*', 'hash-set*!',
+ 'hash-update', 'hash-update!', 'hash-values', 'hash-weak?', 'hash/c',
+ 'hash?', 'hasheq', 'hasheqv', 'identifier-binding',
+ 'identifier-binding-symbol', 'identifier-label-binding',
+ 'identifier-prune-lexical-context',
+ 'identifier-prune-to-source-module',
+ 'identifier-remove-from-definition-context',
+ 'identifier-template-binding', 'identifier-transformer-binding',
+ 'identifier?', 'identity', 'imag-part', 'immutable?',
+ 'impersonate-box', 'impersonate-channel',
+ 'impersonate-continuation-mark-key', 'impersonate-hash',
+ 'impersonate-procedure', 'impersonate-prompt-tag',
+ 'impersonate-struct', 'impersonate-vector', 'impersonator-contract?',
+ 'impersonator-ephemeron', 'impersonator-of?',
+ 'impersonator-prop:application-mark', 'impersonator-prop:contracted',
+ 'impersonator-property-accessor-procedure?', 'impersonator-property?',
+ 'impersonator?', 'implementation?', 'implementation?/c', 'in-bytes',
+ 'in-bytes-lines', 'in-cycle', 'in-dict', 'in-dict-keys',
+ 'in-dict-pairs', 'in-dict-values', 'in-directory', 'in-hash',
+ 'in-hash-keys', 'in-hash-pairs', 'in-hash-values', 'in-indexed',
+ 'in-input-port-bytes', 'in-input-port-chars', 'in-lines', 'in-list',
+ 'in-mlist', 'in-naturals', 'in-parallel', 'in-permutations', 'in-port',
+ 'in-producer', 'in-range', 'in-sequences', 'in-set', 'in-stream',
+ 'in-string', 'in-value', 'in-values*-sequence', 'in-values-sequence',
+ 'in-vector', 'inexact->exact', 'inexact-real?', 'inexact?',
+ 'infinite?', 'input-port-append', 'input-port?', 'inspector?',
+ 'instanceof/c', 'integer->char', 'integer->integer-bytes',
+ 'integer-bytes->integer', 'integer-in', 'integer-length',
+ 'integer-sqrt', 'integer-sqrt/remainder', 'integer?',
+ 'interface->method-names', 'interface-extension?', 'interface?',
+ 'internal-definition-context-seal', 'internal-definition-context?',
+ 'is-a?', 'is-a?/c', 'keyword->string', 'keyword-apply', 'keyword<?',
+ 'keyword?', 'keywords-match', 'kill-thread', 'last', 'last-pair',
+ 'lcm', 'length', 'liberal-define-context?', 'link-exists?', 'list',
+ 'list*', 'list->bytes', 'list->mutable-set', 'list->mutable-seteq',
+ 'list->mutable-seteqv', 'list->set', 'list->seteq', 'list->seteqv',
+ 'list->string', 'list->vector', 'list->weak-set', 'list->weak-seteq',
+ 'list->weak-seteqv', 'list-ref', 'list-tail', 'list/c', 'list?',
+ 'listof', 'load', 'load-extension', 'load-on-demand-enabled',
+ 'load-relative', 'load-relative-extension', 'load/cd',
+ 'load/use-compiled', 'local-expand', 'local-expand/capture-lifts',
+ 'local-transformer-expand', 'local-transformer-expand/capture-lifts',
+ 'locale-string-encoding', 'log', 'log-level?', 'log-max-level',
+ 'log-message', 'log-receiver?', 'logger-name', 'logger?', 'magnitude',
+ 'make-arity-at-least', 'make-base-empty-namespace',
+ 'make-base-namespace', 'make-bytes', 'make-channel',
+ 'make-chaperone-contract', 'make-continuation-mark-key',
+ 'make-continuation-prompt-tag', 'make-contract', 'make-custodian',
+ 'make-custodian-box', 'make-custom-hash', 'make-custom-hash-types',
+ 'make-custom-set', 'make-custom-set-types', 'make-date', 'make-date*',
+ 'make-derived-parameter', 'make-directory', 'make-directory*',
+ 'make-do-sequence', 'make-empty-namespace',
+ 'make-environment-variables', 'make-ephemeron', 'make-exn',
+ 'make-exn:break', 'make-exn:break:hang-up', 'make-exn:break:terminate',
+ 'make-exn:fail', 'make-exn:fail:contract',
+ 'make-exn:fail:contract:arity', 'make-exn:fail:contract:blame',
+ 'make-exn:fail:contract:continuation',
+ 'make-exn:fail:contract:divide-by-zero',
+ 'make-exn:fail:contract:non-fixnum-result',
+ 'make-exn:fail:contract:variable', 'make-exn:fail:filesystem',
+ 'make-exn:fail:filesystem:errno', 'make-exn:fail:filesystem:exists',
+ 'make-exn:fail:filesystem:missing-module',
+ 'make-exn:fail:filesystem:version', 'make-exn:fail:network',
+ 'make-exn:fail:network:errno', 'make-exn:fail:object',
+ 'make-exn:fail:out-of-memory', 'make-exn:fail:read',
+ 'make-exn:fail:read:eof', 'make-exn:fail:read:non-char',
+ 'make-exn:fail:syntax', 'make-exn:fail:syntax:missing-module',
+ 'make-exn:fail:syntax:unbound', 'make-exn:fail:unsupported',
+ 'make-exn:fail:user', 'make-file-or-directory-link',
+ 'make-flat-contract', 'make-fsemaphore', 'make-generic',
+ 'make-handle-get-preference-locked', 'make-hash',
+ 'make-hash-placeholder', 'make-hasheq', 'make-hasheq-placeholder',
+ 'make-hasheqv', 'make-hasheqv-placeholder',
+ 'make-immutable-custom-hash', 'make-immutable-hash',
+ 'make-immutable-hasheq', 'make-immutable-hasheqv',
+ 'make-impersonator-property', 'make-input-port',
+ 'make-input-port/read-to-peek', 'make-inspector',
+ 'make-keyword-procedure', 'make-known-char-range-list',
+ 'make-limited-input-port', 'make-list', 'make-lock-file-name',
+ 'make-log-receiver', 'make-logger', 'make-mixin-contract',
+ 'make-mutable-custom-set', 'make-none/c', 'make-object',
+ 'make-output-port', 'make-parameter', 'make-phantom-bytes',
+ 'make-pipe', 'make-pipe-with-specials', 'make-placeholder',
+ 'make-polar', 'make-prefab-struct', 'make-primitive-class',
+ 'make-proj-contract', 'make-pseudo-random-generator',
+ 'make-reader-graph', 'make-readtable', 'make-rectangular',
+ 'make-rename-transformer', 'make-resolved-module-path',
+ 'make-security-guard', 'make-semaphore', 'make-set!-transformer',
+ 'make-shared-bytes', 'make-sibling-inspector', 'make-special-comment',
+ 'make-srcloc', 'make-string', 'make-struct-field-accessor',
+ 'make-struct-field-mutator', 'make-struct-type',
+ 'make-struct-type-property', 'make-syntax-delta-introducer',
+ 'make-syntax-introducer', 'make-temporary-file',
+ 'make-tentative-pretty-print-output-port', 'make-thread-cell',
+ 'make-thread-group', 'make-vector', 'make-weak-box',
+ 'make-weak-custom-hash', 'make-weak-custom-set', 'make-weak-hash',
+ 'make-weak-hasheq', 'make-weak-hasheqv', 'make-will-executor', 'map',
+ 'match-equality-test', 'matches-arity-exactly?', 'max', 'mcar', 'mcdr',
+ 'mcons', 'member', 'member-name-key-hash-code', 'member-name-key=?',
+ 'member-name-key?', 'memf', 'memq', 'memv', 'merge-input',
+ 'method-in-interface?', 'min', 'mixin-contract', 'module->exports',
+ 'module->imports', 'module->language-info', 'module->namespace',
+ 'module-compiled-cross-phase-persistent?', 'module-compiled-exports',
+ 'module-compiled-imports', 'module-compiled-language-info',
+ 'module-compiled-name', 'module-compiled-submodules',
+ 'module-declared?', 'module-path-index-join',
+ 'module-path-index-resolve', 'module-path-index-split',
+ 'module-path-index-submodule', 'module-path-index?', 'module-path?',
+ 'module-predefined?', 'module-provide-protected?', 'modulo', 'mpair?',
+ 'mutable-set', 'mutable-seteq', 'mutable-seteqv', 'n->th',
+ 'nack-guard-evt', 'namespace-anchor->empty-namespace',
+ 'namespace-anchor->namespace', 'namespace-anchor?',
+ 'namespace-attach-module', 'namespace-attach-module-declaration',
+ 'namespace-base-phase', 'namespace-mapped-symbols',
+ 'namespace-module-identifier', 'namespace-module-registry',
+ 'namespace-require', 'namespace-require/constant',
+ 'namespace-require/copy', 'namespace-require/expansion-time',
+ 'namespace-set-variable-value!', 'namespace-symbol->identifier',
+ 'namespace-syntax-introduce', 'namespace-undefine-variable!',
+ 'namespace-unprotect-module', 'namespace-variable-value', 'namespace?',
+ 'nan?', 'natural-number/c', 'negate', 'negative?', 'never-evt',
+ u'new-∀/c', u'new-∃/c', 'newline', 'ninth', 'non-empty-listof',
+ 'none/c', 'normal-case-path', 'normalize-arity', 'normalize-path',
+ 'normalized-arity?', 'not', 'not/c', 'null', 'null?', 'number->string',
+ 'number?', 'numerator', 'object%', 'object->vector', 'object-info',
+ 'object-interface', 'object-method-arity-includes?', 'object-name',
+ 'object=?', 'object?', 'odd?', 'one-of/c', 'open-input-bytes',
+ 'open-input-file', 'open-input-output-file', 'open-input-string',
+ 'open-output-bytes', 'open-output-file', 'open-output-nowhere',
+ 'open-output-string', 'or/c', 'order-of-magnitude', 'ormap',
+ 'other-execute-bit', 'other-read-bit', 'other-write-bit',
+ 'output-port?', 'pair?', 'parameter-procedure=?', 'parameter/c',
+ 'parameter?', 'parameterization?', 'parse-command-line', 'partition',
+ 'path->bytes', 'path->complete-path', 'path->directory-path',
+ 'path->string', 'path-add-suffix', 'path-convention-type',
+ 'path-element->bytes', 'path-element->string', 'path-element?',
+ 'path-for-some-system?', 'path-list-string->path-list', 'path-only',
+ 'path-replace-suffix', 'path-string?', 'path<?', 'path?',
+ 'pathlist-closure', 'peek-byte', 'peek-byte-or-special', 'peek-bytes',
+ 'peek-bytes!', 'peek-bytes!-evt', 'peek-bytes-avail!',
+ 'peek-bytes-avail!*', 'peek-bytes-avail!-evt',
+ 'peek-bytes-avail!/enable-break', 'peek-bytes-evt', 'peek-char',
+ 'peek-char-or-special', 'peek-string', 'peek-string!',
+ 'peek-string!-evt', 'peek-string-evt', 'peeking-input-port',
+ 'permutations', 'phantom-bytes?', 'pi', 'pi.f', 'pipe-content-length',
+ 'place-break', 'place-channel', 'place-channel-get',
+ 'place-channel-put', 'place-channel-put/get', 'place-channel?',
+ 'place-dead-evt', 'place-enabled?', 'place-kill', 'place-location?',
+ 'place-message-allowed?', 'place-sleep', 'place-wait', 'place?',
+ 'placeholder-get', 'placeholder-set!', 'placeholder?',
+ 'poll-guard-evt', 'port->bytes', 'port->bytes-lines', 'port->lines',
+ 'port->list', 'port->string', 'port-closed-evt', 'port-closed?',
+ 'port-commit-peeked', 'port-count-lines!', 'port-count-lines-enabled',
+ 'port-counts-lines?', 'port-display-handler', 'port-file-identity',
+ 'port-file-unlock', 'port-next-location', 'port-print-handler',
+ 'port-progress-evt', 'port-provides-progress-evts?',
+ 'port-read-handler', 'port-try-file-lock?', 'port-write-handler',
+ 'port-writes-atomic?', 'port-writes-special?', 'port?', 'positive?',
+ 'predicate/c', 'prefab-key->struct-type', 'prefab-key?',
+ 'prefab-struct-key', 'preferences-lock-file-mode', 'pregexp',
+ 'pregexp?', 'pretty-display', 'pretty-format', 'pretty-print',
+ 'pretty-print-.-symbol-without-bars',
+ 'pretty-print-abbreviate-read-macros', 'pretty-print-columns',
+ 'pretty-print-current-style-table', 'pretty-print-depth',
+ 'pretty-print-exact-as-decimal', 'pretty-print-extend-style-table',
+ 'pretty-print-handler', 'pretty-print-newline',
+ 'pretty-print-post-print-hook', 'pretty-print-pre-print-hook',
+ 'pretty-print-print-hook', 'pretty-print-print-line',
+ 'pretty-print-remap-stylable', 'pretty-print-show-inexactness',
+ 'pretty-print-size-hook', 'pretty-print-style-table?',
+ 'pretty-printing', 'pretty-write', 'primitive-closure?',
+ 'primitive-result-arity', 'primitive?', 'print', 'print-as-expression',
+ 'print-boolean-long-form', 'print-box', 'print-graph',
+ 'print-hash-table', 'print-mpair-curly-braces',
+ 'print-pair-curly-braces', 'print-reader-abbreviations',
+ 'print-struct', 'print-syntax-width', 'print-unreadable',
+ 'print-vector-length', 'printable/c', 'printable<%>', 'printf',
+ 'procedure->method', 'procedure-arity', 'procedure-arity-includes/c',
+ 'procedure-arity-includes?', 'procedure-arity?',
+ 'procedure-closure-contents-eq?', 'procedure-extract-target',
+ 'procedure-keywords', 'procedure-reduce-arity',
+ 'procedure-reduce-keyword-arity', 'procedure-rename',
+ 'procedure-struct-type?', 'procedure?', 'process', 'process*',
+ 'process*/ports', 'process/ports', 'processor-count', 'progress-evt?',
+ 'promise-forced?', 'promise-running?', 'promise/c', 'promise?',
+ 'prop:arity-string', 'prop:chaperone-contract',
+ 'prop:checked-procedure', 'prop:contract', 'prop:contracted',
+ 'prop:custom-print-quotable', 'prop:custom-write', 'prop:dict',
+ 'prop:dict/contract', 'prop:equal+hash', 'prop:evt',
+ 'prop:exn:missing-module', 'prop:exn:srclocs', 'prop:flat-contract',
+ 'prop:impersonator-of', 'prop:input-port',
+ 'prop:liberal-define-context', 'prop:opt-chaperone-contract',
+ 'prop:opt-chaperone-contract-get-test', 'prop:opt-chaperone-contract?',
+ 'prop:output-port', 'prop:place-location', 'prop:procedure',
+ 'prop:rename-transformer', 'prop:sequence', 'prop:set!-transformer',
+ 'prop:stream', 'proper-subset?', 'pseudo-random-generator->vector',
+ 'pseudo-random-generator-vector?', 'pseudo-random-generator?',
+ 'put-preferences', 'putenv', 'quotient', 'quotient/remainder',
+ 'radians->degrees', 'raise', 'raise-argument-error',
+ 'raise-arguments-error', 'raise-arity-error', 'raise-blame-error',
+ 'raise-contract-error', 'raise-mismatch-error',
+ 'raise-not-cons-blame-error', 'raise-range-error',
+ 'raise-result-error', 'raise-syntax-error', 'raise-type-error',
+ 'raise-user-error', 'random', 'random-seed', 'range', 'rational?',
+ 'rationalize', 'read', 'read-accept-bar-quote', 'read-accept-box',
+ 'read-accept-compiled', 'read-accept-dot', 'read-accept-graph',
+ 'read-accept-infix-dot', 'read-accept-lang', 'read-accept-quasiquote',
+ 'read-accept-reader', 'read-byte', 'read-byte-or-special',
+ 'read-bytes', 'read-bytes!', 'read-bytes!-evt', 'read-bytes-avail!',
+ 'read-bytes-avail!*', 'read-bytes-avail!-evt',
+ 'read-bytes-avail!/enable-break', 'read-bytes-evt', 'read-bytes-line',
+ 'read-bytes-line-evt', 'read-case-sensitive', 'read-char',
+ 'read-char-or-special', 'read-curly-brace-as-paren',
+ 'read-decimal-as-inexact', 'read-eval-print-loop', 'read-language',
+ 'read-line', 'read-line-evt', 'read-on-demand-source',
+ 'read-square-bracket-as-paren', 'read-string', 'read-string!',
+ 'read-string!-evt', 'read-string-evt', 'read-syntax',
+ 'read-syntax/recursive', 'read/recursive', 'readtable-mapping',
+ 'readtable?', 'real->decimal-string', 'real->double-flonum',
+ 'real->floating-point-bytes', 'real->single-flonum', 'real-in',
+ 'real-part', 'real?', 'reencode-input-port', 'reencode-output-port',
+ 'regexp', 'regexp-match', 'regexp-match*', 'regexp-match-evt',
+ 'regexp-match-exact?', 'regexp-match-peek',
+ 'regexp-match-peek-immediate', 'regexp-match-peek-positions',
+ 'regexp-match-peek-positions*',
+ 'regexp-match-peek-positions-immediate',
+ 'regexp-match-peek-positions-immediate/end',
+ 'regexp-match-peek-positions/end', 'regexp-match-positions',
+ 'regexp-match-positions*', 'regexp-match-positions/end',
+ 'regexp-match/end', 'regexp-match?', 'regexp-max-lookbehind',
+ 'regexp-quote', 'regexp-replace', 'regexp-replace*',
+ 'regexp-replace-quote', 'regexp-replaces', 'regexp-split',
+ 'regexp-try-match', 'regexp?', 'relative-path?', 'relocate-input-port',
+ 'relocate-output-port', 'remainder', 'remove', 'remove*',
+ 'remove-duplicates', 'remq', 'remq*', 'remv', 'remv*',
+ 'rename-file-or-directory', 'rename-transformer-target',
+ 'rename-transformer?', 'reroot-path', 'resolve-path',
+ 'resolved-module-path-name', 'resolved-module-path?', 'rest',
+ 'reverse', 'round', 'second', 'seconds->date', 'security-guard?',
+ 'semaphore-peek-evt', 'semaphore-peek-evt?', 'semaphore-post',
+ 'semaphore-try-wait?', 'semaphore-wait', 'semaphore-wait/enable-break',
+ 'semaphore?', 'sequence->list', 'sequence->stream',
+ 'sequence-add-between', 'sequence-andmap', 'sequence-append',
+ 'sequence-count', 'sequence-filter', 'sequence-fold',
+ 'sequence-for-each', 'sequence-generate', 'sequence-generate*',
+ 'sequence-length', 'sequence-map', 'sequence-ormap', 'sequence-ref',
+ 'sequence-tail', 'sequence?', 'set', 'set!-transformer-procedure',
+ 'set!-transformer?', 'set->list', 'set->stream', 'set-add', 'set-add!',
+ 'set-box!', 'set-clear', 'set-clear!', 'set-copy', 'set-copy-clear',
+ 'set-count', 'set-empty?', 'set-eq?', 'set-equal?', 'set-eqv?',
+ 'set-first', 'set-for-each', 'set-implements/c', 'set-implements?',
+ 'set-intersect', 'set-intersect!', 'set-map', 'set-mcar!', 'set-mcdr!',
+ 'set-member?', 'set-mutable?', 'set-phantom-bytes!',
+ 'set-port-next-location!', 'set-remove', 'set-remove!', 'set-rest',
+ 'set-subtract', 'set-subtract!', 'set-symmetric-difference',
+ 'set-symmetric-difference!', 'set-union', 'set-union!', 'set-weak?',
+ 'set/c', 'set=?', 'set?', 'seteq', 'seteqv', 'seventh', 'sgn',
+ 'shared-bytes', 'shell-execute', 'shrink-path-wrt', 'shuffle',
+ 'simple-form-path', 'simplify-path', 'sin', 'single-flonum?', 'sinh',
+ 'sixth', 'skip-projection-wrapper?', 'sleep',
+ 'some-system-path->string', 'sort', 'special-comment-value',
+ 'special-comment?', 'special-filter-input-port', 'split-at',
+ 'split-at-right', 'split-path', 'splitf-at', 'splitf-at-right', 'sqr',
+ 'sqrt', 'srcloc', 'srcloc->string', 'srcloc-column', 'srcloc-line',
+ 'srcloc-position', 'srcloc-source', 'srcloc-span', 'srcloc?',
+ 'stop-after', 'stop-before', 'stream->list', 'stream-add-between',
+ 'stream-andmap', 'stream-append', 'stream-count', 'stream-empty?',
+ 'stream-filter', 'stream-first', 'stream-fold', 'stream-for-each',
+ 'stream-length', 'stream-map', 'stream-ormap', 'stream-ref',
+ 'stream-rest', 'stream-tail', 'stream?', 'string',
+ 'string->bytes/latin-1', 'string->bytes/locale', 'string->bytes/utf-8',
+ 'string->immutable-string', 'string->keyword', 'string->list',
+ 'string->number', 'string->path', 'string->path-element',
+ 'string->some-system-path', 'string->symbol',
+ 'string->uninterned-symbol', 'string->unreadable-symbol',
+ 'string-append', 'string-append*', 'string-ci<=?', 'string-ci<?',
+ 'string-ci=?', 'string-ci>=?', 'string-ci>?', 'string-copy',
+ 'string-copy!', 'string-downcase', 'string-environment-variable-name?',
+ 'string-fill!', 'string-foldcase', 'string-join', 'string-len/c',
+ 'string-length', 'string-locale-ci<?', 'string-locale-ci=?',
+ 'string-locale-ci>?', 'string-locale-downcase', 'string-locale-upcase',
+ 'string-locale<?', 'string-locale=?', 'string-locale>?',
+ 'string-no-nuls?', 'string-normalize-nfc', 'string-normalize-nfd',
+ 'string-normalize-nfkc', 'string-normalize-nfkd',
+ 'string-normalize-spaces', 'string-ref', 'string-replace',
+ 'string-set!', 'string-split', 'string-titlecase', 'string-trim',
+ 'string-upcase', 'string-utf-8-length', 'string<=?', 'string<?',
+ 'string=?', 'string>=?', 'string>?', 'string?', 'struct->vector',
+ 'struct-accessor-procedure?', 'struct-constructor-procedure?',
+ 'struct-info', 'struct-mutator-procedure?',
+ 'struct-predicate-procedure?', 'struct-type-info',
+ 'struct-type-make-constructor', 'struct-type-make-predicate',
+ 'struct-type-property-accessor-procedure?', 'struct-type-property/c',
+ 'struct-type-property?', 'struct-type?', 'struct:arity-at-least',
+ 'struct:date', 'struct:date*', 'struct:exn', 'struct:exn:break',
+ 'struct:exn:break:hang-up', 'struct:exn:break:terminate',
+ 'struct:exn:fail', 'struct:exn:fail:contract',
+ 'struct:exn:fail:contract:arity', 'struct:exn:fail:contract:blame',
+ 'struct:exn:fail:contract:continuation',
+ 'struct:exn:fail:contract:divide-by-zero',
+ 'struct:exn:fail:contract:non-fixnum-result',
+ 'struct:exn:fail:contract:variable', 'struct:exn:fail:filesystem',
+ 'struct:exn:fail:filesystem:errno',
+ 'struct:exn:fail:filesystem:exists',
+ 'struct:exn:fail:filesystem:missing-module',
+ 'struct:exn:fail:filesystem:version', 'struct:exn:fail:network',
+ 'struct:exn:fail:network:errno', 'struct:exn:fail:object',
+ 'struct:exn:fail:out-of-memory', 'struct:exn:fail:read',
+ 'struct:exn:fail:read:eof', 'struct:exn:fail:read:non-char',
+ 'struct:exn:fail:syntax', 'struct:exn:fail:syntax:missing-module',
+ 'struct:exn:fail:syntax:unbound', 'struct:exn:fail:unsupported',
+ 'struct:exn:fail:user', 'struct:srcloc',
+ 'struct:wrapped-extra-arg-arrow', 'struct?', 'sub1', 'subbytes',
+ 'subclass?', 'subclass?/c', 'subprocess', 'subprocess-group-enabled',
+ 'subprocess-kill', 'subprocess-pid', 'subprocess-status',
+ 'subprocess-wait', 'subprocess?', 'subset?', 'substring',
+ 'symbol->string', 'symbol-interned?', 'symbol-unreadable?', 'symbol<?',
+ 'symbol=?', 'symbol?', 'symbols', 'sync', 'sync/enable-break',
+ 'sync/timeout', 'sync/timeout/enable-break', 'syntax->datum',
+ 'syntax->list', 'syntax-arm', 'syntax-column', 'syntax-disarm',
+ 'syntax-e', 'syntax-line', 'syntax-local-bind-syntaxes',
+ 'syntax-local-certifier', 'syntax-local-context',
+ 'syntax-local-expand-expression', 'syntax-local-get-shadower',
+ 'syntax-local-introduce', 'syntax-local-lift-context',
+ 'syntax-local-lift-expression',
+ 'syntax-local-lift-module-end-declaration',
+ 'syntax-local-lift-provide', 'syntax-local-lift-require',
+ 'syntax-local-lift-values-expression',
+ 'syntax-local-make-definition-context',
+ 'syntax-local-make-delta-introducer',
+ 'syntax-local-module-defined-identifiers',
+ 'syntax-local-module-exports',
+ 'syntax-local-module-required-identifiers', 'syntax-local-name',
+ 'syntax-local-phase-level', 'syntax-local-submodules',
+ 'syntax-local-transforming-module-provides?', 'syntax-local-value',
+ 'syntax-local-value/immediate', 'syntax-original?', 'syntax-position',
+ 'syntax-property', 'syntax-property-symbol-keys', 'syntax-protect',
+ 'syntax-rearm', 'syntax-recertify', 'syntax-shift-phase-level',
+ 'syntax-source', 'syntax-source-module', 'syntax-span', 'syntax-taint',
+ 'syntax-tainted?', 'syntax-track-origin',
+ 'syntax-transforming-module-expression?', 'syntax-transforming?',
+ 'syntax/c', 'syntax?', 'system', 'system*', 'system*/exit-code',
+ 'system-big-endian?', 'system-idle-evt', 'system-language+country',
+ 'system-library-subpath', 'system-path-convention-type', 'system-type',
+ 'system/exit-code', 'tail-marks-match?', 'take', 'take-right', 'takef',
+ 'takef-right', 'tan', 'tanh', 'tcp-abandon-port', 'tcp-accept',
+ 'tcp-accept-evt', 'tcp-accept-ready?', 'tcp-accept/enable-break',
+ 'tcp-addresses', 'tcp-close', 'tcp-connect',
+ 'tcp-connect/enable-break', 'tcp-listen', 'tcp-listener?', 'tcp-port?',
+ 'tentative-pretty-print-port-cancel',
+ 'tentative-pretty-print-port-transfer', 'tenth', 'terminal-port?',
+ 'the-unsupplied-arg', 'third', 'thread', 'thread-cell-ref',
+ 'thread-cell-set!', 'thread-cell-values?', 'thread-cell?',
+ 'thread-dead-evt', 'thread-dead?', 'thread-group?', 'thread-receive',
+ 'thread-receive-evt', 'thread-resume', 'thread-resume-evt',
+ 'thread-rewind-receive', 'thread-running?', 'thread-send',
+ 'thread-suspend', 'thread-suspend-evt', 'thread-try-receive',
+ 'thread-wait', 'thread/suspend-to-kill', 'thread?', 'time-apply',
+ 'touch', 'transplant-input-port', 'transplant-output-port', 'true',
+ 'truncate', 'udp-addresses', 'udp-bind!', 'udp-bound?', 'udp-close',
+ 'udp-connect!', 'udp-connected?', 'udp-multicast-interface',
+ 'udp-multicast-join-group!', 'udp-multicast-leave-group!',
+ 'udp-multicast-loopback?', 'udp-multicast-set-interface!',
+ 'udp-multicast-set-loopback!', 'udp-multicast-set-ttl!',
+ 'udp-multicast-ttl', 'udp-open-socket', 'udp-receive!',
+ 'udp-receive!*', 'udp-receive!-evt', 'udp-receive!/enable-break',
+ 'udp-receive-ready-evt', 'udp-send', 'udp-send*', 'udp-send-evt',
+ 'udp-send-ready-evt', 'udp-send-to', 'udp-send-to*', 'udp-send-to-evt',
+ 'udp-send-to/enable-break', 'udp-send/enable-break', 'udp?', 'unbox',
+ 'uncaught-exception-handler', 'unit?', 'unspecified-dom',
+ 'unsupplied-arg?', 'use-collection-link-paths',
+ 'use-compiled-file-paths', 'use-user-specific-search-paths',
+ 'user-execute-bit', 'user-read-bit', 'user-write-bit',
+ 'value-contract', 'values', 'variable-reference->empty-namespace',
+ 'variable-reference->module-base-phase',
+ 'variable-reference->module-declaration-inspector',
+ 'variable-reference->module-path-index',
+ 'variable-reference->module-source', 'variable-reference->namespace',
+ 'variable-reference->phase',
+ 'variable-reference->resolved-module-path',
+ 'variable-reference-constant?', 'variable-reference?', 'vector',
+ 'vector->immutable-vector', 'vector->list',
+ 'vector->pseudo-random-generator', 'vector->pseudo-random-generator!',
+ 'vector->values', 'vector-append', 'vector-argmax', 'vector-argmin',
+ 'vector-copy', 'vector-copy!', 'vector-count', 'vector-drop',
+ 'vector-drop-right', 'vector-fill!', 'vector-filter',
+ 'vector-filter-not', 'vector-immutable', 'vector-immutable/c',
+ 'vector-immutableof', 'vector-length', 'vector-map', 'vector-map!',
+ 'vector-member', 'vector-memq', 'vector-memv', 'vector-ref',
+ 'vector-set!', 'vector-set*!', 'vector-set-performance-stats!',
+ 'vector-split-at', 'vector-split-at-right', 'vector-take',
+ 'vector-take-right', 'vector/c', 'vector?', 'vectorof', 'version',
+ 'void', 'void?', 'weak-box-value', 'weak-box?', 'weak-set',
+ 'weak-seteq', 'weak-seteqv', 'will-execute', 'will-executor?',
+ 'will-register', 'will-try-execute', 'with-input-from-bytes',
+ 'with-input-from-file', 'with-input-from-string',
+ 'with-output-to-bytes', 'with-output-to-file', 'with-output-to-string',
+ 'would-be-future', 'wrap-evt', 'wrapped-extra-arg-arrow',
+ 'wrapped-extra-arg-arrow-extra-neg-party-argument',
+ 'wrapped-extra-arg-arrow-real-func', 'wrapped-extra-arg-arrow?',
+ 'writable<%>', 'write', 'write-byte', 'write-bytes',
+ 'write-bytes-avail', 'write-bytes-avail*', 'write-bytes-avail-evt',
+ 'write-bytes-avail/enable-break', 'write-char', 'write-special',
+ 'write-special-avail*', 'write-special-evt', 'write-string',
+ 'write-to-file', 'xor', 'zero?', '~.a', '~.s', '~.v', '~a', '~e', '~r',
+ '~s', '~v'
+ )
+
+ _opening_parenthesis = r'[([{]'
+ _closing_parenthesis = r'[)\]}]'
+ _delimiters = r'()[\]{}",\'`;\s'
+ _symbol = r'(?u)(?:\|[^|]*\||\\[\w\W]|[^|\\%s]+)+' % _delimiters
+ _exact_decimal_prefix = r'(?:#e)?(?:#d)?(?:#e)?'
+ _exponent = r'(?:[defls][-+]?\d+)'
+ _inexact_simple_no_hashes = r'(?:\d+(?:/\d+|\.\d*)?|\.\d+)'
+ _inexact_simple = (r'(?:%s|(?:\d+#+(?:\.#*|/\d+#*)?|\.\d+#+|'
+ r'\d+(?:\.\d*#+|/\d+#+)))' % _inexact_simple_no_hashes)
+ _inexact_normal_no_hashes = r'(?:%s%s?)' % (_inexact_simple_no_hashes,
+ _exponent)
+ _inexact_normal = r'(?:%s%s?)' % (_inexact_simple, _exponent)
+ _inexact_special = r'(?:(?:inf|nan)\.[0f])'
+ _inexact_real = r'(?:[-+]?%s|[-+]%s)' % (_inexact_normal,
+ _inexact_special)
+ _inexact_unsigned = r'(?:%s|%s)' % (_inexact_normal, _inexact_special)
+
+ tokens = {
+ 'root': [
+ (_closing_parenthesis, Error),
+ (r'(?!\Z)', Text, 'unquoted-datum')
+ ],
+ 'datum': [
+ (r'(?s)#;|#![ /]([^\\\n]|\\.)*', Comment),
+ (u';[^\\n\\r\x85\u2028\u2029]*', Comment.Single),
+ (r'#\|', Comment.Multiline, 'block-comment'),
+
+ # Whitespaces
+ (r'(?u)\s+', Text),
+
+ # Numbers: Keep in mind Racket reader hash prefixes, which
+ # can denote the base or the type. These don't map neatly
+ # onto Pygments token types; some judgment calls here.
+
+ # #d or no prefix
+ (r'(?i)%s[-+]?\d+(?=[%s])' % (_exact_decimal_prefix, _delimiters),
+ Number.Integer, '#pop'),
+ (r'(?i)%s[-+]?(\d+(\.\d*)?|\.\d+)([deflst][-+]?\d+)?(?=[%s])' %
+ (_exact_decimal_prefix, _delimiters), Number.Float, '#pop'),
+ (r'(?i)%s[-+]?(%s([-+]%s?i)?|[-+]%s?i)(?=[%s])' %
+ (_exact_decimal_prefix, _inexact_normal_no_hashes,
+ _inexact_normal_no_hashes, _inexact_normal_no_hashes,
+ _delimiters), Number, '#pop'),
+
+ # Inexact without explicit #i
+ (r'(?i)(#d)?(%s([-+]%s?i)?|[-+]%s?i|%s@%s)(?=[%s])' %
+ (_inexact_real, _inexact_unsigned, _inexact_unsigned,
+ _inexact_real, _inexact_real, _delimiters), Number.Float,
+ '#pop'),
+
+ # The remaining extflonums
+ (r'(?i)(([-+]?%st[-+]?\d+)|[-+](inf|nan)\.t)(?=[%s])' %
+ (_inexact_simple, _delimiters), Number.Float, '#pop'),
+
+ # #b
+ (r'(?i)(#[ei])?#b%s' % _symbol, Number.Bin, '#pop'),
+
+ # #o
+ (r'(?i)(#[ei])?#o%s' % _symbol, Number.Oct, '#pop'),
+
+ # #x
+ (r'(?i)(#[ei])?#x%s' % _symbol, Number.Hex, '#pop'),
+
+ # #i is always inexact, i.e. float
+ (r'(?i)(#d)?#i%s' % _symbol, Number.Float, '#pop'),
+
+ # Strings and characters
+ (r'#?"', String.Double, ('#pop', 'string')),
+ (r'#<<(.+)\n(^(?!\1$).*$\n)*^\1$', String.Heredoc, '#pop'),
+ (r'#\\(u[\da-fA-F]{1,4}|U[\da-fA-F]{1,8})', String.Char, '#pop'),
+ (r'(?is)#\\([0-7]{3}|[a-z]+|.)', String.Char, '#pop'),
+ (r'(?s)#[pr]x#?"(\\?.)*?"', String.Regex, '#pop'),
+
+ # Constants
+ (r'#(true|false|[tTfF])', Name.Constant, '#pop'),
+
+ # Keyword argument names (e.g. #:keyword)
+ (r'#:%s' % _symbol, Keyword.Declaration, '#pop'),
+
+ # Reader extensions
+ (r'(#lang |#!)(\S+)',
+ bygroups(Keyword.Namespace, Name.Namespace)),
+ (r'#reader', Keyword.Namespace, 'quoted-datum'),
+
+ # Other syntax
+ (r"(?i)\.(?=[%s])|#c[is]|#['`]|#,@?" % _delimiters, Operator),
+ (r"'|#[s&]|#hash(eqv?)?|#\d*(?=%s)" % _opening_parenthesis,
+ Operator, ('#pop', 'quoted-datum'))
+ ],
+ 'datum*': [
+ (r'`|,@?', Operator),
+ (_symbol, String.Symbol, '#pop'),
+ (r'[|\\]', Error),
+ default('#pop')
+ ],
+ 'list': [
+ (_closing_parenthesis, Punctuation, '#pop')
+ ],
+ 'unquoted-datum': [
+ include('datum'),
+ (r'quote(?=[%s])' % _delimiters, Keyword,
+ ('#pop', 'quoted-datum')),
+ (r'`', Operator, ('#pop', 'quasiquoted-datum')),
+ (r'quasiquote(?=[%s])' % _delimiters, Keyword,
+ ('#pop', 'quasiquoted-datum')),
+ (_opening_parenthesis, Punctuation, ('#pop', 'unquoted-list')),
+ (words(_keywords, prefix='(?u)', suffix='(?=[%s])' % _delimiters),
+ Keyword, '#pop'),
+ (words(_builtins, prefix='(?u)', suffix='(?=[%s])' % _delimiters),
+ Name.Builtin, '#pop'),
+ (_symbol, Name, '#pop'),
+ include('datum*')
+ ],
+ 'unquoted-list': [
+ include('list'),
+ (r'(?!\Z)', Text, 'unquoted-datum')
+ ],
+ 'quasiquoted-datum': [
+ include('datum'),
+ (r',@?', Operator, ('#pop', 'unquoted-datum')),
+ (r'unquote(-splicing)?(?=[%s])' % _delimiters, Keyword,
+ ('#pop', 'unquoted-datum')),
+ (_opening_parenthesis, Punctuation, ('#pop', 'quasiquoted-list')),
+ include('datum*')
+ ],
+ 'quasiquoted-list': [
+ include('list'),
+ (r'(?!\Z)', Text, 'quasiquoted-datum')
+ ],
+ 'quoted-datum': [
+ include('datum'),
+ (_opening_parenthesis, Punctuation, ('#pop', 'quoted-list')),
+ include('datum*')
+ ],
+ 'quoted-list': [
+ include('list'),
+ (r'(?!\Z)', Text, 'quoted-datum')
+ ],
+ 'block-comment': [
+ (r'#\|', Comment.Multiline, '#push'),
+ (r'\|#', Comment.Multiline, '#pop'),
+ (r'[^#|]+|.', Comment.Multiline)
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'(?s)\\([0-7]{1,3}|x[\da-fA-F]{1,2}|u[\da-fA-F]{1,4}|'
+ r'U[\da-fA-F]{1,8}|.)', String.Escape),
+ (r'[^\\"]+', String.Double)
+ ]
+ }
+
+
+class NewLispLexer(RegexLexer):
+ """
+ For `newLISP. <www.newlisp.org>`_ source code (version 10.3.0).
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'NewLisp'
+ aliases = ['newlisp']
+ filenames = ['*.lsp', '*.nl']
+ mimetypes = ['text/x-newlisp', 'application/x-newlisp']
+
+ flags = re.IGNORECASE | re.MULTILINE | re.UNICODE
+
+ # list of built-in functions for newLISP version 10.3
+ builtins = (
+ '^', '--', '-', ':', '!', '!=', '?', '@', '*', '/', '&', '%', '+', '++',
+ '<', '<<', '<=', '=', '>', '>=', '>>', '|', '~', '$', '$0', '$1', '$10',
+ '$11', '$12', '$13', '$14', '$15', '$2', '$3', '$4', '$5', '$6', '$7',
+ '$8', '$9', '$args', '$idx', '$it', '$main-args', 'abort', 'abs',
+ 'acos', 'acosh', 'add', 'address', 'amb', 'and', 'and', 'append-file',
+ 'append', 'apply', 'args', 'array-list', 'array?', 'array', 'asin',
+ 'asinh', 'assoc', 'atan', 'atan2', 'atanh', 'atom?', 'base64-dec',
+ 'base64-enc', 'bayes-query', 'bayes-train', 'begin', 'begin', 'begin',
+ 'beta', 'betai', 'bind', 'binomial', 'bits', 'callback', 'case', 'case',
+ 'case', 'catch', 'ceil', 'change-dir', 'char', 'chop', 'Class', 'clean',
+ 'close', 'command-event', 'cond', 'cond', 'cond', 'cons', 'constant',
+ 'context?', 'context', 'copy-file', 'copy', 'cos', 'cosh', 'count',
+ 'cpymem', 'crc32', 'crit-chi2', 'crit-z', 'current-line', 'curry',
+ 'date-list', 'date-parse', 'date-value', 'date', 'debug', 'dec',
+ 'def-new', 'default', 'define-macro', 'define-macro', 'define',
+ 'delete-file', 'delete-url', 'delete', 'destroy', 'det', 'device',
+ 'difference', 'directory?', 'directory', 'div', 'do-until', 'do-while',
+ 'doargs', 'dolist', 'dostring', 'dotimes', 'dotree', 'dump', 'dup',
+ 'empty?', 'encrypt', 'ends-with', 'env', 'erf', 'error-event',
+ 'eval-string', 'eval', 'exec', 'exists', 'exit', 'exp', 'expand',
+ 'explode', 'extend', 'factor', 'fft', 'file-info', 'file?', 'filter',
+ 'find-all', 'find', 'first', 'flat', 'float?', 'float', 'floor', 'flt',
+ 'fn', 'for-all', 'for', 'fork', 'format', 'fv', 'gammai', 'gammaln',
+ 'gcd', 'get-char', 'get-float', 'get-int', 'get-long', 'get-string',
+ 'get-url', 'global?', 'global', 'if-not', 'if', 'ifft', 'import', 'inc',
+ 'index', 'inf?', 'int', 'integer?', 'integer', 'intersect', 'invert',
+ 'irr', 'join', 'lambda-macro', 'lambda?', 'lambda', 'last-error',
+ 'last', 'legal?', 'length', 'let', 'let', 'let', 'letex', 'letn',
+ 'letn', 'letn', 'list?', 'list', 'load', 'local', 'log', 'lookup',
+ 'lower-case', 'macro?', 'main-args', 'MAIN', 'make-dir', 'map', 'mat',
+ 'match', 'max', 'member', 'min', 'mod', 'module', 'mul', 'multiply',
+ 'NaN?', 'net-accept', 'net-close', 'net-connect', 'net-error',
+ 'net-eval', 'net-interface', 'net-ipv', 'net-listen', 'net-local',
+ 'net-lookup', 'net-packet', 'net-peek', 'net-peer', 'net-ping',
+ 'net-receive-from', 'net-receive-udp', 'net-receive', 'net-select',
+ 'net-send-to', 'net-send-udp', 'net-send', 'net-service',
+ 'net-sessions', 'new', 'nil?', 'nil', 'normal', 'not', 'now', 'nper',
+ 'npv', 'nth', 'null?', 'number?', 'open', 'or', 'ostype', 'pack',
+ 'parse-date', 'parse', 'peek', 'pipe', 'pmt', 'pop-assoc', 'pop',
+ 'post-url', 'pow', 'prefix', 'pretty-print', 'primitive?', 'print',
+ 'println', 'prob-chi2', 'prob-z', 'process', 'prompt-event',
+ 'protected?', 'push', 'put-url', 'pv', 'quote?', 'quote', 'rand',
+ 'random', 'randomize', 'read', 'read-char', 'read-expr', 'read-file',
+ 'read-key', 'read-line', 'read-utf8', 'read', 'reader-event',
+ 'real-path', 'receive', 'ref-all', 'ref', 'regex-comp', 'regex',
+ 'remove-dir', 'rename-file', 'replace', 'reset', 'rest', 'reverse',
+ 'rotate', 'round', 'save', 'search', 'seed', 'seek', 'select', 'self',
+ 'semaphore', 'send', 'sequence', 'series', 'set-locale', 'set-ref-all',
+ 'set-ref', 'set', 'setf', 'setq', 'sgn', 'share', 'signal', 'silent',
+ 'sin', 'sinh', 'sleep', 'slice', 'sort', 'source', 'spawn', 'sqrt',
+ 'starts-with', 'string?', 'string', 'sub', 'swap', 'sym', 'symbol?',
+ 'symbols', 'sync', 'sys-error', 'sys-info', 'tan', 'tanh', 'term',
+ 'throw-error', 'throw', 'time-of-day', 'time', 'timer', 'title-case',
+ 'trace-highlight', 'trace', 'transpose', 'Tree', 'trim', 'true?',
+ 'true', 'unicode', 'unify', 'unique', 'unless', 'unpack', 'until',
+ 'upper-case', 'utf8', 'utf8len', 'uuid', 'wait-pid', 'when', 'while',
+ 'write', 'write-char', 'write-file', 'write-line', 'write',
+ 'xfer-event', 'xml-error', 'xml-parse', 'xml-type-tags', 'zero?',
+ )
+
+ # valid names
+ valid_name = r'([\w!$%&*+.,/<=>?@^~|-])+|(\[.*?\])+'
+
+ tokens = {
+ 'root': [
+ # shebang
+ (r'#!(.*?)$', Comment.Preproc),
+ # comments starting with semicolon
+ (r';.*$', Comment.Single),
+ # comments starting with #
+ (r'#.*$', Comment.Single),
+
+ # whitespace
+ (r'\s+', Text),
+
+ # strings, symbols and characters
+ (r'"(\\\\|\\"|[^"])*"', String),
+
+ # braces
+ (r"{", String, "bracestring"),
+
+ # [text] ... [/text] delimited strings
+ (r'\[text\]*', String, "tagstring"),
+
+ # 'special' operators...
+ (r"('|:)", Operator),
+
+ # highlight the builtins
+ (words(builtins, suffix=r'\b'),
+ Keyword),
+
+ # the remaining functions
+ (r'(?<=\()' + valid_name, Name.Variable),
+
+ # the remaining variables
+ (valid_name, String.Symbol),
+
+ # parentheses
+ (r'(\(|\))', Punctuation),
+ ],
+
+ # braced strings...
+ 'bracestring': [
+ ("{", String, "#push"),
+ ("}", String, "#pop"),
+ ("[^{}]+", String),
+ ],
+
+ # tagged [text]...[/text] delimited strings...
+ 'tagstring': [
+ (r'(?s)(.*?)(\[/text\])', String, '#pop'),
+ ],
+ }
diff --git a/pygments/lexers/make.py b/pygments/lexers/make.py
new file mode 100644
index 00000000..76ca7376
--- /dev/null
+++ b/pygments/lexers/make.py
@@ -0,0 +1,199 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.make
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Makefiles and similar.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, include, bygroups, \
+ do_insertions, using
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Punctuation
+from pygments.lexers.shell import BashLexer
+
+__all__ = ['MakefileLexer', 'BaseMakefileLexer', 'CMakeLexer']
+
+
+class MakefileLexer(Lexer):
+ """
+ Lexer for BSD and GNU make extensions (lenient enough to handle both in
+ the same file even).
+
+ *Rewritten in Pygments 0.10.*
+ """
+
+ name = 'Makefile'
+ aliases = ['make', 'makefile', 'mf', 'bsdmake']
+ filenames = ['*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile']
+ mimetypes = ['text/x-makefile']
+
+ r_special = re.compile(
+ r'^(?:'
+ # BSD Make
+ r'\.\s*(include|undef|error|warning|if|else|elif|endif|for|endfor)|'
+ # GNU Make
+ r'\s*(ifeq|ifneq|ifdef|ifndef|else|endif|-?include|define|endef|:|vpath))(?=\s)')
+ r_comment = re.compile(r'^\s*@?#')
+
+ def get_tokens_unprocessed(self, text):
+ ins = []
+ lines = text.splitlines(True)
+ done = ''
+ lex = BaseMakefileLexer(**self.options)
+ backslashflag = False
+ for line in lines:
+ if self.r_special.match(line) or backslashflag:
+ ins.append((len(done), [(0, Comment.Preproc, line)]))
+ backslashflag = line.strip().endswith('\\')
+ elif self.r_comment.match(line):
+ ins.append((len(done), [(0, Comment, line)]))
+ else:
+ done += line
+ for item in do_insertions(ins, lex.get_tokens_unprocessed(done)):
+ yield item
+
+ def analyse_text(text):
+ # Many makefiles have $(BIG_CAPS) style variables
+ if re.search(r'\$\([A-Z_]+\)', text):
+ return 0.1
+
+
+class BaseMakefileLexer(RegexLexer):
+ """
+ Lexer for simple Makefiles (no preprocessing).
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'Base Makefile'
+ aliases = ['basemake']
+ filenames = []
+ mimetypes = []
+
+ tokens = {
+ 'root': [
+ # recipes (need to allow spaces because of expandtabs)
+ (r'^(?:[\t ]+.*\n|\n)+', using(BashLexer)),
+ # special variables
+ (r'\$[<@$+%?|*]', Keyword),
+ (r'\s+', Text),
+ (r'#.*?\n', Comment),
+ (r'(export)(\s+)(?=[\w${}\t -]+\n)',
+ bygroups(Keyword, Text), 'export'),
+ (r'export\s+', Keyword),
+ # assignment
+ (r'([\w${}.-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n)+|.*\n)',
+ bygroups(Name.Variable, Text, Operator, Text, using(BashLexer))),
+ # strings
+ (r'(?s)"(\\\\|\\.|[^"\\])*"', String.Double),
+ (r"(?s)'(\\\\|\\.|[^'\\])*'", String.Single),
+ # targets
+ (r'([^\n:]+)(:+)([ \t]*)', bygroups(Name.Function, Operator, Text),
+ 'block-header'),
+ # expansions
+ (r'\$\(', Keyword, 'expansion'),
+ ],
+ 'expansion': [
+ (r'[^$a-zA-Z_)]+', Text),
+ (r'[a-zA-Z_]+', Name.Variable),
+ (r'\$', Keyword),
+ (r'\(', Keyword, '#push'),
+ (r'\)', Keyword, '#pop'),
+ ],
+ 'export': [
+ (r'[\w${}-]+', Name.Variable),
+ (r'\n', Text, '#pop'),
+ (r'\s+', Text),
+ ],
+ 'block-header': [
+ (r'[,|]', Punctuation),
+ (r'#.*?\n', Comment, '#pop'),
+ (r'\\\n', Text), # line continuation
+ (r'\$\(', Keyword, 'expansion'),
+ (r'[a-zA-Z_]+', Name),
+ (r'\n', Text, '#pop'),
+ (r'.', Text),
+ ],
+ }
+
+
+class CMakeLexer(RegexLexer):
+ """
+ Lexer for `CMake <http://cmake.org/Wiki/CMake>`_ files.
+
+ .. versionadded:: 1.2
+ """
+ name = 'CMake'
+ aliases = ['cmake']
+ filenames = ['*.cmake', 'CMakeLists.txt']
+ mimetypes = ['text/x-cmake']
+
+ tokens = {
+ 'root': [
+ # (r'(ADD_CUSTOM_COMMAND|ADD_CUSTOM_TARGET|ADD_DEFINITIONS|'
+ # r'ADD_DEPENDENCIES|ADD_EXECUTABLE|ADD_LIBRARY|ADD_SUBDIRECTORY|'
+ # r'ADD_TEST|AUX_SOURCE_DIRECTORY|BUILD_COMMAND|BUILD_NAME|'
+ # r'CMAKE_MINIMUM_REQUIRED|CONFIGURE_FILE|CREATE_TEST_SOURCELIST|'
+ # r'ELSE|ELSEIF|ENABLE_LANGUAGE|ENABLE_TESTING|ENDFOREACH|'
+ # r'ENDFUNCTION|ENDIF|ENDMACRO|ENDWHILE|EXEC_PROGRAM|'
+ # r'EXECUTE_PROCESS|EXPORT_LIBRARY_DEPENDENCIES|FILE|FIND_FILE|'
+ # r'FIND_LIBRARY|FIND_PACKAGE|FIND_PATH|FIND_PROGRAM|FLTK_WRAP_UI|'
+ # r'FOREACH|FUNCTION|GET_CMAKE_PROPERTY|GET_DIRECTORY_PROPERTY|'
+ # r'GET_FILENAME_COMPONENT|GET_SOURCE_FILE_PROPERTY|'
+ # r'GET_TARGET_PROPERTY|GET_TEST_PROPERTY|IF|INCLUDE|'
+ # r'INCLUDE_DIRECTORIES|INCLUDE_EXTERNAL_MSPROJECT|'
+ # r'INCLUDE_REGULAR_EXPRESSION|INSTALL|INSTALL_FILES|'
+ # r'INSTALL_PROGRAMS|INSTALL_TARGETS|LINK_DIRECTORIES|'
+ # r'LINK_LIBRARIES|LIST|LOAD_CACHE|LOAD_COMMAND|MACRO|'
+ # r'MAKE_DIRECTORY|MARK_AS_ADVANCED|MATH|MESSAGE|OPTION|'
+ # r'OUTPUT_REQUIRED_FILES|PROJECT|QT_WRAP_CPP|QT_WRAP_UI|REMOVE|'
+ # r'REMOVE_DEFINITIONS|SEPARATE_ARGUMENTS|SET|'
+ # r'SET_DIRECTORY_PROPERTIES|SET_SOURCE_FILES_PROPERTIES|'
+ # r'SET_TARGET_PROPERTIES|SET_TESTS_PROPERTIES|SITE_NAME|'
+ # r'SOURCE_GROUP|STRING|SUBDIR_DEPENDS|SUBDIRS|'
+ # r'TARGET_LINK_LIBRARIES|TRY_COMPILE|TRY_RUN|UNSET|'
+ # r'USE_MANGLED_MESA|UTILITY_SOURCE|VARIABLE_REQUIRES|'
+ # r'VTK_MAKE_INSTANTIATOR|VTK_WRAP_JAVA|VTK_WRAP_PYTHON|'
+ # r'VTK_WRAP_TCL|WHILE|WRITE_FILE|'
+ # r'COUNTARGS)\b', Name.Builtin, 'args'),
+ (r'\b(\w+)([ \t]*)(\()', bygroups(Name.Builtin, Text,
+ Punctuation), 'args'),
+ include('keywords'),
+ include('ws')
+ ],
+ 'args': [
+ (r'\(', Punctuation, '#push'),
+ (r'\)', Punctuation, '#pop'),
+ (r'(\${)(.+?)(})', bygroups(Operator, Name.Variable, Operator)),
+ (r'(\$<)(.+?)(>)', bygroups(Operator, Name.Variable, Operator)),
+ (r'(?s)".*?"', String.Double),
+ (r'\\\S+', String),
+ (r'[^\)$"# \t\n]+', String),
+ (r'\n', Text), # explicitly legal
+ include('keywords'),
+ include('ws')
+ ],
+ 'string': [
+
+ ],
+ 'keywords': [
+ (r'\b(WIN32|UNIX|APPLE|CYGWIN|BORLAND|MINGW|MSVC|MSVC_IDE|MSVC60|'
+ r'MSVC70|MSVC71|MSVC80|MSVC90)\b', Keyword),
+ ],
+ 'ws': [
+ (r'[ \t]+', Text),
+ (r'#.*\n', Comment),
+ ]
+ }
+
+ def analyse_text(text):
+ exp = r'^ *CMAKE_MINIMUM_REQUIRED *\( *VERSION *\d(\.\d)* *( FATAL_ERROR)? *\) *$'
+ if re.search(exp, text, flags=re.MULTILINE | re.IGNORECASE):
+ return 0.8
+ return 0.0
diff --git a/pygments/lexers/markup.py b/pygments/lexers/markup.py
new file mode 100644
index 00000000..71a9aa82
--- /dev/null
+++ b/pygments/lexers/markup.py
@@ -0,0 +1,380 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.markup
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for non-HTML markup languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, this, \
+ do_insertions, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic
+from pygments.util import get_bool_opt, ClassNotFound
+
+__all__ = ['BBCodeLexer', 'MoinWikiLexer', 'RstLexer', 'TexLexer', 'GroffLexer']
+
+
+class BBCodeLexer(RegexLexer):
+ """
+ A lexer that highlights BBCode(-like) syntax.
+
+ .. versionadded:: 0.6
+ """
+
+ name = 'BBCode'
+ aliases = ['bbcode']
+ mimetypes = ['text/x-bbcode']
+
+ tokens = {
+ 'root': [
+ (r'[^[]+', Text),
+ # tag/end tag begin
+ (r'\[/?\w+', Keyword, 'tag'),
+ # stray bracket
+ (r'\[', Text),
+ ],
+ 'tag': [
+ (r'\s+', Text),
+ # attribute with value
+ (r'(\w+)(=)("?[^\s"\]]+"?)',
+ bygroups(Name.Attribute, Operator, String)),
+ # tag argument (a la [color=green])
+ (r'(=)("?[^\s"\]]+"?)',
+ bygroups(Operator, String)),
+ # tag end
+ (r'\]', Keyword, '#pop'),
+ ],
+ }
+
+
+class MoinWikiLexer(RegexLexer):
+ """
+ For MoinMoin (and Trac) Wiki markup.
+
+ .. versionadded:: 0.7
+ """
+
+ name = 'MoinMoin/Trac Wiki markup'
+ aliases = ['trac-wiki', 'moin']
+ filenames = []
+ mimetypes = ['text/x-trac-wiki']
+ flags = re.MULTILINE | re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'^#.*$', Comment),
+ (r'(!)(\S+)', bygroups(Keyword, Text)), # Ignore-next
+ # Titles
+ (r'^(=+)([^=]+)(=+)(\s*#.+)?$',
+ bygroups(Generic.Heading, using(this), Generic.Heading, String)),
+ # Literal code blocks, with optional shebang
+ (r'({{{)(\n#!.+)?', bygroups(Name.Builtin, Name.Namespace), 'codeblock'),
+ (r'(\'\'\'?|\|\||`|__|~~|\^|,,|::)', Comment), # Formatting
+ # Lists
+ (r'^( +)([.*-])( )', bygroups(Text, Name.Builtin, Text)),
+ (r'^( +)([a-z]{1,5}\.)( )', bygroups(Text, Name.Builtin, Text)),
+ # Other Formatting
+ (r'\[\[\w+.*?\]\]', Keyword), # Macro
+ (r'(\[[^\s\]]+)(\s+[^\]]+?)?(\])',
+ bygroups(Keyword, String, Keyword)), # Link
+ (r'^----+$', Keyword), # Horizontal rules
+ (r'[^\n\'\[{!_~^,|]+', Text),
+ (r'\n', Text),
+ (r'.', Text),
+ ],
+ 'codeblock': [
+ (r'}}}', Name.Builtin, '#pop'),
+ # these blocks are allowed to be nested in Trac, but not MoinMoin
+ (r'{{{', Text, '#push'),
+ (r'[^{}]+', Comment.Preproc), # slurp boring text
+ (r'.', Comment.Preproc), # allow loose { or }
+ ],
+ }
+
+
+class RstLexer(RegexLexer):
+ """
+ For `reStructuredText <http://docutils.sf.net/rst.html>`_ markup.
+
+ .. versionadded:: 0.7
+
+ Additional options accepted:
+
+ `handlecodeblocks`
+ Highlight the contents of ``.. sourcecode:: language``,
+ ``.. code:: language`` and ``.. code-block:: language``
+ directives with a lexer for the given language (default:
+ ``True``).
+
+ .. versionadded:: 0.8
+ """
+ name = 'reStructuredText'
+ aliases = ['rst', 'rest', 'restructuredtext']
+ filenames = ['*.rst', '*.rest']
+ mimetypes = ["text/x-rst", "text/prs.fallenstein.rst"]
+ flags = re.MULTILINE
+
+ def _handle_sourcecode(self, match):
+ from pygments.lexers import get_lexer_by_name
+
+ # section header
+ yield match.start(1), Punctuation, match.group(1)
+ yield match.start(2), Text, match.group(2)
+ yield match.start(3), Operator.Word, match.group(3)
+ yield match.start(4), Punctuation, match.group(4)
+ yield match.start(5), Text, match.group(5)
+ yield match.start(6), Keyword, match.group(6)
+ yield match.start(7), Text, match.group(7)
+
+ # lookup lexer if wanted and existing
+ lexer = None
+ if self.handlecodeblocks:
+ try:
+ lexer = get_lexer_by_name(match.group(6).strip())
+ except ClassNotFound:
+ pass
+ indention = match.group(8)
+ indention_size = len(indention)
+ code = (indention + match.group(9) + match.group(10) + match.group(11))
+
+ # no lexer for this language. handle it like it was a code block
+ if lexer is None:
+ yield match.start(8), String, code
+ return
+
+ # highlight the lines with the lexer.
+ ins = []
+ codelines = code.splitlines(True)
+ code = ''
+ for line in codelines:
+ if len(line) > indention_size:
+ ins.append((len(code), [(0, Text, line[:indention_size])]))
+ code += line[indention_size:]
+ else:
+ code += line
+ for item in do_insertions(ins, lexer.get_tokens_unprocessed(code)):
+ yield item
+
+ # from docutils.parsers.rst.states
+ closers = u'\'")]}>\u2019\u201d\xbb!?'
+ unicode_delimiters = u'\u2010\u2011\u2012\u2013\u2014\u00a0'
+ end_string_suffix = (r'((?=$)|(?=[-/:.,; \n\x00%s%s]))'
+ % (re.escape(unicode_delimiters),
+ re.escape(closers)))
+
+ tokens = {
+ 'root': [
+ # Heading with overline
+ (r'^(=+|-+|`+|:+|\.+|\'+|"+|~+|\^+|_+|\*+|\++|#+)([ \t]*\n)'
+ r'(.+)(\n)(\1)(\n)',
+ bygroups(Generic.Heading, Text, Generic.Heading,
+ Text, Generic.Heading, Text)),
+ # Plain heading
+ (r'^(\S.*)(\n)(={3,}|-{3,}|`{3,}|:{3,}|\.{3,}|\'{3,}|"{3,}|'
+ r'~{3,}|\^{3,}|_{3,}|\*{3,}|\+{3,}|#{3,})(\n)',
+ bygroups(Generic.Heading, Text, Generic.Heading, Text)),
+ # Bulleted lists
+ (r'^(\s*)([-*+])( .+\n(?:\1 .+\n)*)',
+ bygroups(Text, Number, using(this, state='inline'))),
+ # Numbered lists
+ (r'^(\s*)([0-9#ivxlcmIVXLCM]+\.)( .+\n(?:\1 .+\n)*)',
+ bygroups(Text, Number, using(this, state='inline'))),
+ (r'^(\s*)(\(?[0-9#ivxlcmIVXLCM]+\))( .+\n(?:\1 .+\n)*)',
+ bygroups(Text, Number, using(this, state='inline'))),
+ # Numbered, but keep words at BOL from becoming lists
+ (r'^(\s*)([A-Z]+\.)( .+\n(?:\1 .+\n)+)',
+ bygroups(Text, Number, using(this, state='inline'))),
+ (r'^(\s*)(\(?[A-Za-z]+\))( .+\n(?:\1 .+\n)+)',
+ bygroups(Text, Number, using(this, state='inline'))),
+ # Line blocks
+ (r'^(\s*)(\|)( .+\n(?:\| .+\n)*)',
+ bygroups(Text, Operator, using(this, state='inline'))),
+ # Sourcecode directives
+ (r'^( *\.\.)(\s*)((?:source)?code(?:-block)?)(::)([ \t]*)([^\n]+)'
+ r'(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\8.*|)\n)+)',
+ _handle_sourcecode),
+ # A directive
+ (r'^( *\.\.)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))',
+ bygroups(Punctuation, Text, Operator.Word, Punctuation, Text,
+ using(this, state='inline'))),
+ # A reference target
+ (r'^( *\.\.)(\s*)(_(?:[^:\\]|\\.)+:)(.*?)$',
+ bygroups(Punctuation, Text, Name.Tag, using(this, state='inline'))),
+ # A footnote/citation target
+ (r'^( *\.\.)(\s*)(\[.+\])(.*?)$',
+ bygroups(Punctuation, Text, Name.Tag, using(this, state='inline'))),
+ # A substitution def
+ (r'^( *\.\.)(\s*)(\|.+\|)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))',
+ bygroups(Punctuation, Text, Name.Tag, Text, Operator.Word,
+ Punctuation, Text, using(this, state='inline'))),
+ # Comments
+ (r'^ *\.\..*(\n( +.*\n|\n)+)?', Comment.Preproc),
+ # Field list
+ (r'^( *)(:[a-zA-Z-]+:)(\s*)$', bygroups(Text, Name.Class, Text)),
+ (r'^( *)(:.*?:)([ \t]+)(.*?)$',
+ bygroups(Text, Name.Class, Text, Name.Function)),
+ # Definition list
+ (r'^(\S.*(?<!::)\n)((?:(?: +.*)\n)+)',
+ bygroups(using(this, state='inline'), using(this, state='inline'))),
+ # Code blocks
+ (r'(::)(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\3.*|)\n)+)',
+ bygroups(String.Escape, Text, String, String, Text, String)),
+ include('inline'),
+ ],
+ 'inline': [
+ (r'\\.', Text), # escape
+ (r'``', String, 'literal'), # code
+ (r'(`.+?)(<.+?>)(`__?)', # reference with inline target
+ bygroups(String, String.Interpol, String)),
+ (r'`.+?`__?', String), # reference
+ (r'(`.+?`)(:[a-zA-Z0-9:-]+?:)?',
+ bygroups(Name.Variable, Name.Attribute)), # role
+ (r'(:[a-zA-Z0-9:-]+?:)(`.+?`)',
+ bygroups(Name.Attribute, Name.Variable)), # role (content first)
+ (r'\*\*.+?\*\*', Generic.Strong), # Strong emphasis
+ (r'\*.+?\*', Generic.Emph), # Emphasis
+ (r'\[.*?\]_', String), # Footnote or citation
+ (r'<.+?>', Name.Tag), # Hyperlink
+ (r'[^\\\n\[*`:]+', Text),
+ (r'.', Text),
+ ],
+ 'literal': [
+ (r'[^`]+', String),
+ (r'``' + end_string_suffix, String, '#pop'),
+ (r'`', String),
+ ]
+ }
+
+ def __init__(self, **options):
+ self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True)
+ RegexLexer.__init__(self, **options)
+
+ def analyse_text(text):
+ if text[:2] == '..' and text[2:3] != '.':
+ return 0.3
+ p1 = text.find("\n")
+ p2 = text.find("\n", p1 + 1)
+ if (p2 > -1 and # has two lines
+ p1 * 2 + 1 == p2 and # they are the same length
+ text[p1+1] in '-=' and # the next line both starts and ends with
+ text[p1+1] == text[p2-1]): # ...a sufficiently high header
+ return 0.5
+
+
+class TexLexer(RegexLexer):
+ """
+ Lexer for the TeX and LaTeX typesetting languages.
+ """
+
+ name = 'TeX'
+ aliases = ['tex', 'latex']
+ filenames = ['*.tex', '*.aux', '*.toc']
+ mimetypes = ['text/x-tex', 'text/x-latex']
+
+ tokens = {
+ 'general': [
+ (r'%.*?\n', Comment),
+ (r'[{}]', Name.Builtin),
+ (r'[&_^]', Name.Builtin),
+ ],
+ 'root': [
+ (r'\\\[', String.Backtick, 'displaymath'),
+ (r'\\\(', String, 'inlinemath'),
+ (r'\$\$', String.Backtick, 'displaymath'),
+ (r'\$', String, 'inlinemath'),
+ (r'\\([a-zA-Z]+|.)', Keyword, 'command'),
+ (r'\\$', Keyword),
+ include('general'),
+ (r'[^\\$%&_^{}]+', Text),
+ ],
+ 'math': [
+ (r'\\([a-zA-Z]+|.)', Name.Variable),
+ include('general'),
+ (r'[0-9]+', Number),
+ (r'[-=!+*/()\[\]]', Operator),
+ (r'[^=!+*/()\[\]\\$%&_^{}0-9-]+', Name.Builtin),
+ ],
+ 'inlinemath': [
+ (r'\\\)', String, '#pop'),
+ (r'\$', String, '#pop'),
+ include('math'),
+ ],
+ 'displaymath': [
+ (r'\\\]', String, '#pop'),
+ (r'\$\$', String, '#pop'),
+ (r'\$', Name.Builtin),
+ include('math'),
+ ],
+ 'command': [
+ (r'\[.*?\]', Name.Attribute),
+ (r'\*', Keyword),
+ default('#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ for start in ("\\documentclass", "\\input", "\\documentstyle",
+ "\\relax"):
+ if text[:len(start)] == start:
+ return True
+
+
+class GroffLexer(RegexLexer):
+ """
+ Lexer for the (g)roff typesetting language, supporting groff
+ extensions. Mainly useful for highlighting manpage sources.
+
+ .. versionadded:: 0.6
+ """
+
+ name = 'Groff'
+ aliases = ['groff', 'nroff', 'man']
+ filenames = ['*.[1234567]', '*.man']
+ mimetypes = ['application/x-troff', 'text/troff']
+
+ tokens = {
+ 'root': [
+ (r'(\.)(\w+)', bygroups(Text, Keyword), 'request'),
+ (r'\.', Punctuation, 'request'),
+ # Regular characters, slurp till we find a backslash or newline
+ (r'[^\\\n]*', Text, 'textline'),
+ ],
+ 'textline': [
+ include('escapes'),
+ (r'[^\\\n]+', Text),
+ (r'\n', Text, '#pop'),
+ ],
+ 'escapes': [
+ # groff has many ways to write escapes.
+ (r'\\"[^\n]*', Comment),
+ (r'\\[fn]\w', String.Escape),
+ (r'\\\(.{2}', String.Escape),
+ (r'\\.\[.*\]', String.Escape),
+ (r'\\.', String.Escape),
+ (r'\\\n', Text, 'request'),
+ ],
+ 'request': [
+ (r'\n', Text, '#pop'),
+ include('escapes'),
+ (r'"[^\n"]+"', String.Double),
+ (r'\d+', Number),
+ (r'\S+', String),
+ (r'\s+', Text),
+ ],
+ }
+
+ def analyse_text(text):
+ if text[:1] != '.':
+ return False
+ if text[:3] == '.\\"':
+ return True
+ if text[:4] == '.TH ':
+ return True
+ if text[1:3].isalnum() and text[3].isspace():
+ return 0.9
diff --git a/pygments/lexers/math.py b/pygments/lexers/math.py
index b324c614..4abdfdc2 100644
--- a/pygments/lexers/math.py
+++ b/pygments/lexers/math.py
@@ -3,2284 +3,19 @@
pygments.lexers.math
~~~~~~~~~~~~~~~~~~~~
- Lexers for math languages.
+ Just export lexers that were contained in this module.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-from __future__ import print_function
+from pygments.lexers.python import NumPyLexer
+from pygments.lexers.matlab import MatlabLexer, MatlabSessionLexer, \
+ OctaveLexer, ScilabLexer
+from pygments.lexers.julia import JuliaLexer, JuliaConsoleLexer
+from pygments.lexers.r import RConsoleLexer, SLexer, RdLexer
+from pygments.lexers.modeling import BugsLexer, JagsLexer, StanLexer
+from pygments.lexers.idl import IDLLexer
+from pygments.lexers.algebra import MuPADLexer
-import re
-
-from pygments.util import shebang_matches
-from pygments.lexer import Lexer, RegexLexer, bygroups, include, \
- combined, do_insertions
-from pygments.token import Comment, String, Punctuation, Keyword, Name, \
- Operator, Number, Text, Generic
-
-from pygments.lexers.agile import PythonLexer
-from pygments.lexers import _scilab_builtins
-from pygments.lexers import _stan_builtins
-
-__all__ = ['JuliaLexer', 'JuliaConsoleLexer', 'MuPADLexer', 'MatlabLexer',
- 'MatlabSessionLexer', 'OctaveLexer', 'ScilabLexer', 'NumPyLexer',
- 'RConsoleLexer', 'SLexer', 'JagsLexer', 'BugsLexer', 'StanLexer',
- 'IDLLexer', 'RdLexer', 'IgorLexer', 'MathematicaLexer', 'GAPLexer']
-
-
-class JuliaLexer(RegexLexer):
- """
- For `Julia <http://julialang.org/>`_ source code.
-
- .. versionadded:: 1.6
- """
- name = 'Julia'
- aliases = ['julia','jl']
- filenames = ['*.jl']
- mimetypes = ['text/x-julia','application/x-julia']
-
- builtins = [
- 'exit','whos','edit','load','is','isa','isequal','typeof','tuple',
- 'ntuple','uid','hash','finalizer','convert','promote','subtype',
- 'typemin','typemax','realmin','realmax','sizeof','eps','promote_type',
- 'method_exists','applicable','invoke','dlopen','dlsym','system',
- 'error','throw','assert','new','Inf','Nan','pi','im',
- ]
-
- tokens = {
- 'root': [
- (r'\n', Text),
- (r'[^\S\n]+', Text),
- (r'#.*$', Comment),
- (r'[]{}:(),;[@]', Punctuation),
- (r'\\\n', Text),
- (r'\\', Text),
-
- # keywords
- (r'(begin|while|for|in|return|break|continue|'
- r'macro|quote|let|if|elseif|else|try|catch|end|'
- r'bitstype|ccall|do|using|module|import|export|'
- r'importall|baremodule|immutable)\b', Keyword),
- (r'(local|global|const)\b', Keyword.Declaration),
- (r'(Bool|Int|Int8|Int16|Int32|Int64|Uint|Uint8|Uint16|Uint32|Uint64'
- r'|Float32|Float64|Complex64|Complex128|Any|Nothing|None)\b',
- Keyword.Type),
-
- # functions
- (r'(function)((?:\s|\\\s)+)',
- bygroups(Keyword,Name.Function), 'funcname'),
-
- # types
- (r'(type|typealias|abstract)((?:\s|\\\s)+)',
- bygroups(Keyword,Name.Class), 'typename'),
-
- # operators
- (r'==|!=|<=|>=|->|&&|\|\||::|<:|[-~+/*%=<>&^|.?!$]', Operator),
- (r'\.\*|\.\^|\.\\|\.\/|\\', Operator),
-
- # builtins
- ('(' + '|'.join(builtins) + r')\b', Name.Builtin),
-
- # backticks
- (r'`(?s).*?`', String.Backtick),
-
- # chars
- (r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,3}|\\u[a-fA-F0-9]{1,4}|"
- r"\\U[a-fA-F0-9]{1,6}|[^\\\'\n])'", String.Char),
-
- # try to match trailing transpose
- (r'(?<=[.\w\)\]])\'+', Operator),
-
- # strings
- (r'(?:[IL])"', String, 'string'),
- (r'[E]?"', String, combined('stringescape', 'string')),
-
- # names
- (r'@[\w.]+', Name.Decorator),
- (r'[a-zA-Z_]\w*', Name),
-
- # numbers
- (r'(\d+(_\d+)+\.\d*|\d*\.\d+(_\d+)+)([eEf][+-]?[0-9]+)?', Number.Float),
- (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
- (r'\d+(_\d+)+[eEf][+-]?[0-9]+', Number.Float),
- (r'\d+[eEf][+-]?[0-9]+', Number.Float),
- (r'0b[01]+(_[01]+)+', Number.Bin),
- (r'0b[01]+', Number.Bin),
- (r'0o[0-7]+(_[0-7]+)+', Number.Oct),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[a-fA-F0-9]+(_[a-fA-F0-9]+)+', Number.Hex),
- (r'0x[a-fA-F0-9]+', Number.Hex),
- (r'\d+(_\d+)+', Number.Integer),
- (r'\d+', Number.Integer)
- ],
-
- 'funcname': [
- ('[a-zA-Z_]\w*', Name.Function, '#pop'),
- ('\([^\s\w{]{1,2}\)', Operator, '#pop'),
- ('[^\s\w{]{1,2}', Operator, '#pop'),
- ],
-
- 'typename': [
- ('[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
-
- 'stringescape': [
- (r'\\([\\abfnrtv"\']|\n|N{.*?}|u[a-fA-F0-9]{4}|'
- r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
- ],
-
- 'string': [
- (r'"', String, '#pop'),
- (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
- (r'\$(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?',
- String.Interpol),
- (r'[^\\"$]+', String),
- # quotes, dollar signs, and backslashes must be parsed one at a time
- (r'["\\]', String),
- # unhandled string formatting sign
- (r'\$', String)
- ],
- }
-
- def analyse_text(text):
- return shebang_matches(text, r'julia')
-
-
-line_re = re.compile('.*?\n')
-
-class JuliaConsoleLexer(Lexer):
- """
- For Julia console sessions. Modeled after MatlabSessionLexer.
-
- .. versionadded:: 1.6
- """
- name = 'Julia console'
- aliases = ['jlcon']
-
- def get_tokens_unprocessed(self, text):
- jllexer = JuliaLexer(**self.options)
-
- curcode = ''
- insertions = []
-
- for match in line_re.finditer(text):
- line = match.group()
-
- if line.startswith('julia>'):
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:6])]))
- curcode += line[6:]
-
- elif line.startswith(' '):
-
- idx = len(curcode)
-
- # without is showing error on same line as before...?
- line = "\n" + line
- token = (0, Generic.Traceback, line)
- insertions.append((idx, [token]))
-
- else:
- if curcode:
- for item in do_insertions(
- insertions, jllexer.get_tokens_unprocessed(curcode)):
- yield item
- curcode = ''
- insertions = []
-
- yield match.start(), Generic.Output, line
-
- if curcode: # or item:
- for item in do_insertions(
- insertions, jllexer.get_tokens_unprocessed(curcode)):
- yield item
-
-
-class MuPADLexer(RegexLexer):
- """
- A `MuPAD <http://www.mupad.com>`_ lexer.
- Contributed by Christopher Creutzig <christopher@creutzig.de>.
-
- .. versionadded:: 0.8
- """
- name = 'MuPAD'
- aliases = ['mupad']
- filenames = ['*.mu']
-
- tokens = {
- 'root' : [
- (r'//.*?$', Comment.Single),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'"(?:[^"\\]|\\.)*"', String),
- (r'\(|\)|\[|\]|\{|\}', Punctuation),
- (r'''(?x)\b(?:
- next|break|end|
- axiom|end_axiom|category|end_category|domain|end_domain|inherits|
- if|%if|then|elif|else|end_if|
- case|of|do|otherwise|end_case|
- while|end_while|
- repeat|until|end_repeat|
- for|from|to|downto|step|end_for|
- proc|local|option|save|begin|end_proc|
- delete|frame
- )\b''', Keyword),
- (r'''(?x)\b(?:
- DOM_ARRAY|DOM_BOOL|DOM_COMPLEX|DOM_DOMAIN|DOM_EXEC|DOM_EXPR|
- DOM_FAIL|DOM_FLOAT|DOM_FRAME|DOM_FUNC_ENV|DOM_HFARRAY|DOM_IDENT|
- DOM_INT|DOM_INTERVAL|DOM_LIST|DOM_NIL|DOM_NULL|DOM_POLY|DOM_PROC|
- DOM_PROC_ENV|DOM_RAT|DOM_SET|DOM_STRING|DOM_TABLE|DOM_VAR
- )\b''', Name.Class),
- (r'''(?x)\b(?:
- PI|EULER|E|CATALAN|
- NIL|FAIL|undefined|infinity|
- TRUE|FALSE|UNKNOWN
- )\b''',
- Name.Constant),
- (r'\b(?:dom|procname)\b', Name.Builtin.Pseudo),
- (r'\.|,|:|;|=|\+|-|\*|/|\^|@|>|<|\$|\||!|\'|%|~=', Operator),
- (r'''(?x)\b(?:
- and|or|not|xor|
- assuming|
- div|mod|
- union|minus|intersect|in|subset
- )\b''',
- Operator.Word),
- (r'\b(?:I|RDN_INF|RD_NINF|RD_NAN)\b', Number),
- #(r'\b(?:adt|linalg|newDomain|hold)\b', Name.Builtin),
- (r'''(?x)
- ((?:[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)
- (?:::[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)*)(\s*)([(])''',
- bygroups(Name.Function, Text, Punctuation)),
- (r'''(?x)
- (?:[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)
- (?:::[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)*''', Name.Variable),
- (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number),
- (r'\.[0-9]+(?:e[0-9]+)?', Number),
- (r'.', Text)
- ],
- 'comment' : [
- (r'[^*/]', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ]
- }
-
-
-class MatlabLexer(RegexLexer):
- """
- For Matlab source code.
-
- .. versionadded:: 0.10
- """
- name = 'Matlab'
- aliases = ['matlab']
- filenames = ['*.m']
- mimetypes = ['text/matlab']
-
- #
- # These lists are generated automatically.
- # Run the following in bash shell:
- #
- # for f in elfun specfun elmat; do
- # echo -n "$f = "
- # matlab -nojvm -r "help $f;exit;" | perl -ne \
- # 'push(@c,$1) if /^ (\w+)\s+-/; END {print q{["}.join(q{","},@c).qq{"]\n};}'
- # done
- #
- # elfun: Elementary math functions
- # specfun: Special Math functions
- # elmat: Elementary matrices and matrix manipulation
- #
- # taken from Matlab version 7.4.0.336 (R2007a)
- #
- elfun = ["sin","sind","sinh","asin","asind","asinh","cos","cosd","cosh",
- "acos","acosd","acosh","tan","tand","tanh","atan","atand","atan2",
- "atanh","sec","secd","sech","asec","asecd","asech","csc","cscd",
- "csch","acsc","acscd","acsch","cot","cotd","coth","acot","acotd",
- "acoth","hypot","exp","expm1","log","log1p","log10","log2","pow2",
- "realpow","reallog","realsqrt","sqrt","nthroot","nextpow2","abs",
- "angle","complex","conj","imag","real","unwrap","isreal","cplxpair",
- "fix","floor","ceil","round","mod","rem","sign"]
- specfun = ["airy","besselj","bessely","besselh","besseli","besselk","beta",
- "betainc","betaln","ellipj","ellipke","erf","erfc","erfcx",
- "erfinv","expint","gamma","gammainc","gammaln","psi","legendre",
- "cross","dot","factor","isprime","primes","gcd","lcm","rat",
- "rats","perms","nchoosek","factorial","cart2sph","cart2pol",
- "pol2cart","sph2cart","hsv2rgb","rgb2hsv"]
- elmat = ["zeros","ones","eye","repmat","rand","randn","linspace","logspace",
- "freqspace","meshgrid","accumarray","size","length","ndims","numel",
- "disp","isempty","isequal","isequalwithequalnans","cat","reshape",
- "diag","blkdiag","tril","triu","fliplr","flipud","flipdim","rot90",
- "find","end","sub2ind","ind2sub","bsxfun","ndgrid","permute",
- "ipermute","shiftdim","circshift","squeeze","isscalar","isvector",
- "ans","eps","realmax","realmin","pi","i","inf","nan","isnan",
- "isinf","isfinite","j","why","compan","gallery","hadamard","hankel",
- "hilb","invhilb","magic","pascal","rosser","toeplitz","vander",
- "wilkinson"]
-
- tokens = {
- 'root': [
- # line starting with '!' is sent as a system command. not sure what
- # label to use...
- (r'^!.*', String.Other),
- (r'%\{\s*\n', Comment.Multiline, 'blockcomment'),
- (r'%.*$', Comment),
- (r'^\s*function', Keyword, 'deffunc'),
-
- # from 'iskeyword' on version 7.11 (R2010):
- (r'(break|case|catch|classdef|continue|else|elseif|end|enumerated|'
- r'events|for|function|global|if|methods|otherwise|parfor|'
- r'persistent|properties|return|spmd|switch|try|while)\b', Keyword),
-
- ("(" + "|".join(elfun+specfun+elmat) + r')\b', Name.Builtin),
-
- # line continuation with following comment:
- (r'\.\.\..*$', Comment),
-
- # operators:
- (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
- # operators requiring escape for re:
- (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
-
- # punctuation:
- (r'\[|\]|\(|\)|\{|\}|:|@|\.|,', Punctuation),
- (r'=|:|;', Punctuation),
-
- # quote can be transpose, instead of string:
- # (not great, but handles common cases...)
- (r'(?<=[\w\)\].])\'+', Operator),
-
- (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eEf][+-]?[0-9]+', Number.Float),
- (r'\d+', Number.Integer),
-
- (r'(?<![\w\)\].])\'', String, 'string'),
- ('[a-zA-Z_]\w*', Name),
- (r'.', Text),
- ],
- 'string': [
- (r'[^\']*\'', String, '#pop')
- ],
- 'blockcomment': [
- (r'^\s*%\}', Comment.Multiline, '#pop'),
- (r'^.*\n', Comment.Multiline),
- (r'.', Comment.Multiline),
- ],
- 'deffunc': [
- (r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
- bygroups(Text.Whitespace, Text, Text.Whitespace, Punctuation,
- Text.Whitespace, Name.Function, Punctuation, Text,
- Punctuation, Text.Whitespace), '#pop'),
- ],
- }
-
- def analyse_text(text):
- if re.match('^\s*%', text, re.M): # comment
- return 0.2
- elif re.match('^!\w+', text, re.M): # system cmd
- return 0.2
-
-
-line_re = re.compile('.*?\n')
-
-class MatlabSessionLexer(Lexer):
- """
- For Matlab sessions. Modeled after PythonConsoleLexer.
- Contributed by Ken Schutte <kschutte@csail.mit.edu>.
-
- .. versionadded:: 0.10
- """
- name = 'Matlab session'
- aliases = ['matlabsession']
-
- def get_tokens_unprocessed(self, text):
- mlexer = MatlabLexer(**self.options)
-
- curcode = ''
- insertions = []
-
- for match in line_re.finditer(text):
- line = match.group()
-
- if line.startswith('>> '):
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:3])]))
- curcode += line[3:]
-
- elif line.startswith('>>'):
- insertions.append((len(curcode),
- [(0, Generic.Prompt, line[:2])]))
- curcode += line[2:]
-
- elif line.startswith('???'):
-
- idx = len(curcode)
-
- # without is showing error on same line as before...?
- #line = "\n" + line
- token = (0, Generic.Traceback, line)
- insertions.append((idx, [token]))
-
- else:
- if curcode:
- for item in do_insertions(
- insertions, mlexer.get_tokens_unprocessed(curcode)):
- yield item
- curcode = ''
- insertions = []
-
- yield match.start(), Generic.Output, line
-
- print(insertions)
- if curcode: # or item:
- for item in do_insertions(
- insertions, mlexer.get_tokens_unprocessed(curcode)):
- yield item
-
-
-class OctaveLexer(RegexLexer):
- """
- For GNU Octave source code.
-
- .. versionadded:: 1.5
- """
- name = 'Octave'
- aliases = ['octave']
- filenames = ['*.m']
- mimetypes = ['text/octave']
-
- # These lists are generated automatically.
- # Run the following in bash shell:
- #
- # First dump all of the Octave manual into a plain text file:
- #
- # $ info octave --subnodes -o octave-manual
- #
- # Now grep through it:
-
- # for i in \
- # "Built-in Function" "Command" "Function File" \
- # "Loadable Function" "Mapping Function";
- # do
- # perl -e '@name = qw('"$i"');
- # print lc($name[0]),"_kw = [\n"';
- #
- # perl -n -e 'print "\"$1\",\n" if /-- '"$i"': .* (\w*) \(/;' \
- # octave-manual | sort | uniq ;
- # echo "]" ;
- # echo;
- # done
-
- # taken from Octave Mercurial changeset 8cc154f45e37 (30-jan-2011)
-
- builtin_kw = [ "addlistener", "addpath", "addproperty", "all",
- "and", "any", "argnames", "argv", "assignin",
- "atexit", "autoload",
- "available_graphics_toolkits", "beep_on_error",
- "bitand", "bitmax", "bitor", "bitshift", "bitxor",
- "cat", "cell", "cellstr", "char", "class", "clc",
- "columns", "command_line_path",
- "completion_append_char", "completion_matches",
- "complex", "confirm_recursive_rmdir", "cputime",
- "crash_dumps_octave_core", "ctranspose", "cumprod",
- "cumsum", "debug_on_error", "debug_on_interrupt",
- "debug_on_warning", "default_save_options",
- "dellistener", "diag", "diff", "disp",
- "doc_cache_file", "do_string_escapes", "double",
- "drawnow", "e", "echo_executing_commands", "eps",
- "eq", "errno", "errno_list", "error", "eval",
- "evalin", "exec", "exist", "exit", "eye", "false",
- "fclear", "fclose", "fcntl", "fdisp", "feof",
- "ferror", "feval", "fflush", "fgetl", "fgets",
- "fieldnames", "file_in_loadpath", "file_in_path",
- "filemarker", "filesep", "find_dir_in_path",
- "fixed_point_format", "fnmatch", "fopen", "fork",
- "formula", "fprintf", "fputs", "fread", "freport",
- "frewind", "fscanf", "fseek", "fskipl", "ftell",
- "functions", "fwrite", "ge", "genpath", "get",
- "getegid", "getenv", "geteuid", "getgid",
- "getpgrp", "getpid", "getppid", "getuid", "glob",
- "gt", "gui_mode", "history_control",
- "history_file", "history_size",
- "history_timestamp_format_string", "home",
- "horzcat", "hypot", "ifelse",
- "ignore_function_time_stamp", "inferiorto",
- "info_file", "info_program", "inline", "input",
- "intmax", "intmin", "ipermute",
- "is_absolute_filename", "isargout", "isbool",
- "iscell", "iscellstr", "ischar", "iscomplex",
- "isempty", "isfield", "isfloat", "isglobal",
- "ishandle", "isieee", "isindex", "isinteger",
- "islogical", "ismatrix", "ismethod", "isnull",
- "isnumeric", "isobject", "isreal",
- "is_rooted_relative_filename", "issorted",
- "isstruct", "isvarname", "kbhit", "keyboard",
- "kill", "lasterr", "lasterror", "lastwarn",
- "ldivide", "le", "length", "link", "linspace",
- "logical", "lstat", "lt", "make_absolute_filename",
- "makeinfo_program", "max_recursion_depth", "merge",
- "methods", "mfilename", "minus", "mislocked",
- "mkdir", "mkfifo", "mkstemp", "mldivide", "mlock",
- "mouse_wheel_zoom", "mpower", "mrdivide", "mtimes",
- "munlock", "nargin", "nargout",
- "native_float_format", "ndims", "ne", "nfields",
- "nnz", "norm", "not", "numel", "nzmax",
- "octave_config_info", "octave_core_file_limit",
- "octave_core_file_name",
- "octave_core_file_options", "ones", "or",
- "output_max_field_width", "output_precision",
- "page_output_immediately", "page_screen_output",
- "path", "pathsep", "pause", "pclose", "permute",
- "pi", "pipe", "plus", "popen", "power",
- "print_empty_dimensions", "printf",
- "print_struct_array_contents", "prod",
- "program_invocation_name", "program_name",
- "putenv", "puts", "pwd", "quit", "rats", "rdivide",
- "readdir", "readlink", "read_readline_init_file",
- "realmax", "realmin", "rehash", "rename",
- "repelems", "re_read_readline_init_file", "reset",
- "reshape", "resize", "restoredefaultpath",
- "rethrow", "rmdir", "rmfield", "rmpath", "rows",
- "save_header_format_string", "save_precision",
- "saving_history", "scanf", "set", "setenv",
- "shell_cmd", "sighup_dumps_octave_core",
- "sigterm_dumps_octave_core", "silent_functions",
- "single", "size", "size_equal", "sizemax",
- "sizeof", "sleep", "source", "sparse_auto_mutate",
- "split_long_rows", "sprintf", "squeeze", "sscanf",
- "stat", "stderr", "stdin", "stdout", "strcmp",
- "strcmpi", "string_fill_char", "strncmp",
- "strncmpi", "struct", "struct_levels_to_print",
- "strvcat", "subsasgn", "subsref", "sum", "sumsq",
- "superiorto", "suppress_verbose_help_message",
- "symlink", "system", "tic", "tilde_expand",
- "times", "tmpfile", "tmpnam", "toc", "toupper",
- "transpose", "true", "typeinfo", "umask", "uminus",
- "uname", "undo_string_escapes", "unlink", "uplus",
- "upper", "usage", "usleep", "vec", "vectorize",
- "vertcat", "waitpid", "warning", "warranty",
- "whos_line_format", "yes_or_no", "zeros",
- "inf", "Inf", "nan", "NaN"]
-
- command_kw = [ "close", "load", "who", "whos", ]
-
- function_kw = [ "accumarray", "accumdim", "acosd", "acotd",
- "acscd", "addtodate", "allchild", "ancestor",
- "anova", "arch_fit", "arch_rnd", "arch_test",
- "area", "arma_rnd", "arrayfun", "ascii", "asctime",
- "asecd", "asind", "assert", "atand",
- "autoreg_matrix", "autumn", "axes", "axis", "bar",
- "barh", "bartlett", "bartlett_test", "beep",
- "betacdf", "betainv", "betapdf", "betarnd",
- "bicgstab", "bicubic", "binary", "binocdf",
- "binoinv", "binopdf", "binornd", "bitcmp",
- "bitget", "bitset", "blackman", "blanks",
- "blkdiag", "bone", "box", "brighten", "calendar",
- "cast", "cauchy_cdf", "cauchy_inv", "cauchy_pdf",
- "cauchy_rnd", "caxis", "celldisp", "center", "cgs",
- "chisquare_test_homogeneity",
- "chisquare_test_independence", "circshift", "cla",
- "clabel", "clf", "clock", "cloglog", "closereq",
- "colon", "colorbar", "colormap", "colperm",
- "comet", "common_size", "commutation_matrix",
- "compan", "compare_versions", "compass",
- "computer", "cond", "condest", "contour",
- "contourc", "contourf", "contrast", "conv",
- "convhull", "cool", "copper", "copyfile", "cor",
- "corrcoef", "cor_test", "cosd", "cotd", "cov",
- "cplxpair", "cross", "cscd", "cstrcat", "csvread",
- "csvwrite", "ctime", "cumtrapz", "curl", "cut",
- "cylinder", "date", "datenum", "datestr",
- "datetick", "datevec", "dblquad", "deal",
- "deblank", "deconv", "delaunay", "delaunayn",
- "delete", "demo", "detrend", "diffpara", "diffuse",
- "dir", "discrete_cdf", "discrete_inv",
- "discrete_pdf", "discrete_rnd", "display",
- "divergence", "dlmwrite", "dos", "dsearch",
- "dsearchn", "duplication_matrix", "durbinlevinson",
- "ellipsoid", "empirical_cdf", "empirical_inv",
- "empirical_pdf", "empirical_rnd", "eomday",
- "errorbar", "etime", "etreeplot", "example",
- "expcdf", "expinv", "expm", "exppdf", "exprnd",
- "ezcontour", "ezcontourf", "ezmesh", "ezmeshc",
- "ezplot", "ezpolar", "ezsurf", "ezsurfc", "factor",
- "factorial", "fail", "fcdf", "feather", "fftconv",
- "fftfilt", "fftshift", "figure", "fileattrib",
- "fileparts", "fill", "findall", "findobj",
- "findstr", "finv", "flag", "flipdim", "fliplr",
- "flipud", "fpdf", "fplot", "fractdiff", "freqz",
- "freqz_plot", "frnd", "fsolve",
- "f_test_regression", "ftp", "fullfile", "fzero",
- "gamcdf", "gaminv", "gampdf", "gamrnd", "gca",
- "gcbf", "gcbo", "gcf", "genvarname", "geocdf",
- "geoinv", "geopdf", "geornd", "getfield", "ginput",
- "glpk", "gls", "gplot", "gradient",
- "graphics_toolkit", "gray", "grid", "griddata",
- "griddatan", "gtext", "gunzip", "gzip", "hadamard",
- "hamming", "hankel", "hanning", "hggroup",
- "hidden", "hilb", "hist", "histc", "hold", "hot",
- "hotelling_test", "housh", "hsv", "hurst",
- "hygecdf", "hygeinv", "hygepdf", "hygernd",
- "idivide", "ifftshift", "image", "imagesc",
- "imfinfo", "imread", "imshow", "imwrite", "index",
- "info", "inpolygon", "inputname", "interpft",
- "interpn", "intersect", "invhilb", "iqr", "isa",
- "isdefinite", "isdir", "is_duplicate_entry",
- "isequal", "isequalwithequalnans", "isfigure",
- "ishermitian", "ishghandle", "is_leap_year",
- "isletter", "ismac", "ismember", "ispc", "isprime",
- "isprop", "isscalar", "issquare", "isstrprop",
- "issymmetric", "isunix", "is_valid_file_id",
- "isvector", "jet", "kendall",
- "kolmogorov_smirnov_cdf",
- "kolmogorov_smirnov_test", "kruskal_wallis_test",
- "krylov", "kurtosis", "laplace_cdf", "laplace_inv",
- "laplace_pdf", "laplace_rnd", "legend", "legendre",
- "license", "line", "linkprop", "list_primes",
- "loadaudio", "loadobj", "logistic_cdf",
- "logistic_inv", "logistic_pdf", "logistic_rnd",
- "logit", "loglog", "loglogerr", "logm", "logncdf",
- "logninv", "lognpdf", "lognrnd", "logspace",
- "lookfor", "ls_command", "lsqnonneg", "magic",
- "mahalanobis", "manova", "matlabroot",
- "mcnemar_test", "mean", "meansq", "median", "menu",
- "mesh", "meshc", "meshgrid", "meshz", "mexext",
- "mget", "mkpp", "mode", "moment", "movefile",
- "mpoles", "mput", "namelengthmax", "nargchk",
- "nargoutchk", "nbincdf", "nbininv", "nbinpdf",
- "nbinrnd", "nchoosek", "ndgrid", "newplot", "news",
- "nonzeros", "normcdf", "normest", "norminv",
- "normpdf", "normrnd", "now", "nthroot", "null",
- "ocean", "ols", "onenormest", "optimget",
- "optimset", "orderfields", "orient", "orth",
- "pack", "pareto", "parseparams", "pascal", "patch",
- "pathdef", "pcg", "pchip", "pcolor", "pcr",
- "peaks", "periodogram", "perl", "perms", "pie",
- "pink", "planerot", "playaudio", "plot",
- "plotmatrix", "plotyy", "poisscdf", "poissinv",
- "poisspdf", "poissrnd", "polar", "poly",
- "polyaffine", "polyarea", "polyderiv", "polyfit",
- "polygcd", "polyint", "polyout", "polyreduce",
- "polyval", "polyvalm", "postpad", "powerset",
- "ppder", "ppint", "ppjumps", "ppplot", "ppval",
- "pqpnonneg", "prepad", "primes", "print",
- "print_usage", "prism", "probit", "qp", "qqplot",
- "quadcc", "quadgk", "quadl", "quadv", "quiver",
- "qzhess", "rainbow", "randi", "range", "rank",
- "ranks", "rat", "reallog", "realpow", "realsqrt",
- "record", "rectangle_lw", "rectangle_sw",
- "rectint", "refresh", "refreshdata",
- "regexptranslate", "repmat", "residue", "ribbon",
- "rindex", "roots", "rose", "rosser", "rotdim",
- "rref", "run", "run_count", "rundemos", "run_test",
- "runtests", "saveas", "saveaudio", "saveobj",
- "savepath", "scatter", "secd", "semilogx",
- "semilogxerr", "semilogy", "semilogyerr",
- "setaudio", "setdiff", "setfield", "setxor",
- "shading", "shift", "shiftdim", "sign_test",
- "sinc", "sind", "sinetone", "sinewave", "skewness",
- "slice", "sombrero", "sortrows", "spaugment",
- "spconvert", "spdiags", "spearman", "spectral_adf",
- "spectral_xdf", "specular", "speed", "spencer",
- "speye", "spfun", "sphere", "spinmap", "spline",
- "spones", "sprand", "sprandn", "sprandsym",
- "spring", "spstats", "spy", "sqp", "stairs",
- "statistics", "std", "stdnormal_cdf",
- "stdnormal_inv", "stdnormal_pdf", "stdnormal_rnd",
- "stem", "stft", "strcat", "strchr", "strjust",
- "strmatch", "strread", "strsplit", "strtok",
- "strtrim", "strtrunc", "structfun", "studentize",
- "subplot", "subsindex", "subspace", "substr",
- "substruct", "summer", "surf", "surface", "surfc",
- "surfl", "surfnorm", "svds", "swapbytes",
- "sylvester_matrix", "symvar", "synthesis", "table",
- "tand", "tar", "tcdf", "tempdir", "tempname",
- "test", "text", "textread", "textscan", "tinv",
- "title", "toeplitz", "tpdf", "trace", "trapz",
- "treelayout", "treeplot", "triangle_lw",
- "triangle_sw", "tril", "trimesh", "triplequad",
- "triplot", "trisurf", "triu", "trnd", "tsearchn",
- "t_test", "t_test_regression", "type", "unidcdf",
- "unidinv", "unidpdf", "unidrnd", "unifcdf",
- "unifinv", "unifpdf", "unifrnd", "union", "unique",
- "unix", "unmkpp", "unpack", "untabify", "untar",
- "unwrap", "unzip", "u_test", "validatestring",
- "vander", "var", "var_test", "vech", "ver",
- "version", "view", "voronoi", "voronoin",
- "waitforbuttonpress", "wavread", "wavwrite",
- "wblcdf", "wblinv", "wblpdf", "wblrnd", "weekday",
- "welch_test", "what", "white", "whitebg",
- "wienrnd", "wilcoxon_test", "wilkinson", "winter",
- "xlabel", "xlim", "ylabel", "yulewalker", "zip",
- "zlabel", "z_test", ]
-
- loadable_kw = [ "airy", "amd", "balance", "besselh", "besseli",
- "besselj", "besselk", "bessely", "bitpack",
- "bsxfun", "builtin", "ccolamd", "cellfun",
- "cellslices", "chol", "choldelete", "cholinsert",
- "cholinv", "cholshift", "cholupdate", "colamd",
- "colloc", "convhulln", "convn", "csymamd",
- "cummax", "cummin", "daspk", "daspk_options",
- "dasrt", "dasrt_options", "dassl", "dassl_options",
- "dbclear", "dbdown", "dbstack", "dbstatus",
- "dbstop", "dbtype", "dbup", "dbwhere", "det",
- "dlmread", "dmperm", "dot", "eig", "eigs",
- "endgrent", "endpwent", "etree", "fft", "fftn",
- "fftw", "filter", "find", "full", "gcd",
- "getgrent", "getgrgid", "getgrnam", "getpwent",
- "getpwnam", "getpwuid", "getrusage", "givens",
- "gmtime", "gnuplot_binary", "hess", "ifft",
- "ifftn", "inv", "isdebugmode", "issparse", "kron",
- "localtime", "lookup", "lsode", "lsode_options",
- "lu", "luinc", "luupdate", "matrix_type", "max",
- "min", "mktime", "pinv", "qr", "qrdelete",
- "qrinsert", "qrshift", "qrupdate", "quad",
- "quad_options", "qz", "rand", "rande", "randg",
- "randn", "randp", "randperm", "rcond", "regexp",
- "regexpi", "regexprep", "schur", "setgrent",
- "setpwent", "sort", "spalloc", "sparse", "spparms",
- "sprank", "sqrtm", "strfind", "strftime",
- "strptime", "strrep", "svd", "svd_driver", "syl",
- "symamd", "symbfact", "symrcm", "time", "tsearch",
- "typecast", "urlread", "urlwrite", ]
-
- mapping_kw = [ "abs", "acos", "acosh", "acot", "acoth", "acsc",
- "acsch", "angle", "arg", "asec", "asech", "asin",
- "asinh", "atan", "atanh", "beta", "betainc",
- "betaln", "bincoeff", "cbrt", "ceil", "conj", "cos",
- "cosh", "cot", "coth", "csc", "csch", "erf", "erfc",
- "erfcx", "erfinv", "exp", "finite", "fix", "floor",
- "fmod", "gamma", "gammainc", "gammaln", "imag",
- "isalnum", "isalpha", "isascii", "iscntrl",
- "isdigit", "isfinite", "isgraph", "isinf",
- "islower", "isna", "isnan", "isprint", "ispunct",
- "isspace", "isupper", "isxdigit", "lcm", "lgamma",
- "log", "lower", "mod", "real", "rem", "round",
- "roundb", "sec", "sech", "sign", "sin", "sinh",
- "sqrt", "tan", "tanh", "toascii", "tolower", "xor",
- ]
-
- builtin_consts = [ "EDITOR", "EXEC_PATH", "I", "IMAGE_PATH", "NA",
- "OCTAVE_HOME", "OCTAVE_VERSION", "PAGER",
- "PAGER_FLAGS", "SEEK_CUR", "SEEK_END", "SEEK_SET",
- "SIG", "S_ISBLK", "S_ISCHR", "S_ISDIR", "S_ISFIFO",
- "S_ISLNK", "S_ISREG", "S_ISSOCK", "WCONTINUE",
- "WCOREDUMP", "WEXITSTATUS", "WIFCONTINUED",
- "WIFEXITED", "WIFSIGNALED", "WIFSTOPPED", "WNOHANG",
- "WSTOPSIG", "WTERMSIG", "WUNTRACED", ]
-
- tokens = {
- 'root': [
- #We should look into multiline comments
- (r'[%#].*$', Comment),
- (r'^\s*function', Keyword, 'deffunc'),
-
- # from 'iskeyword' on hg changeset 8cc154f45e37
- (r'(__FILE__|__LINE__|break|case|catch|classdef|continue|do|else|'
- r'elseif|end|end_try_catch|end_unwind_protect|endclassdef|'
- r'endevents|endfor|endfunction|endif|endmethods|endproperties|'
- r'endswitch|endwhile|events|for|function|get|global|if|methods|'
- r'otherwise|persistent|properties|return|set|static|switch|try|'
- r'until|unwind_protect|unwind_protect_cleanup|while)\b', Keyword),
-
- ("(" + "|".join( builtin_kw + command_kw
- + function_kw + loadable_kw
- + mapping_kw) + r')\b', Name.Builtin),
-
- ("(" + "|".join(builtin_consts) + r')\b', Name.Constant),
-
- # operators in Octave but not Matlab:
- (r'-=|!=|!|/=|--', Operator),
- # operators:
- (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
- # operators in Octave but not Matlab requiring escape for re:
- (r'\*=|\+=|\^=|\/=|\\=|\*\*|\+\+|\.\*\*',Operator),
- # operators requiring escape for re:
- (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
-
-
- # punctuation:
- (r'\[|\]|\(|\)|\{|\}|:|@|\.|,', Punctuation),
- (r'=|:|;', Punctuation),
-
- (r'"[^"]*"', String),
-
- (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eEf][+-]?[0-9]+', Number.Float),
- (r'\d+', Number.Integer),
-
- # quote can be transpose, instead of string:
- # (not great, but handles common cases...)
- (r'(?<=[\w\)\].])\'+', Operator),
- (r'(?<![\w\)\].])\'', String, 'string'),
-
- ('[a-zA-Z_]\w*', Name),
- (r'.', Text),
- ],
- 'string': [
- (r"[^']*'", String, '#pop'),
- ],
- 'deffunc': [
- (r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
- bygroups(Text.Whitespace, Text, Text.Whitespace, Punctuation,
- Text.Whitespace, Name.Function, Punctuation, Text,
- Punctuation, Text.Whitespace), '#pop'),
- ],
- }
-
-
-class ScilabLexer(RegexLexer):
- """
- For Scilab source code.
-
- .. versionadded:: 1.5
- """
- name = 'Scilab'
- aliases = ['scilab']
- filenames = ['*.sci', '*.sce', '*.tst']
- mimetypes = ['text/scilab']
-
- tokens = {
- 'root': [
- (r'//.*?$', Comment.Single),
- (r'^\s*function', Keyword, 'deffunc'),
-
- (r'(__FILE__|__LINE__|break|case|catch|classdef|continue|do|else|'
- r'elseif|end|end_try_catch|end_unwind_protect|endclassdef|'
- r'endevents|endfor|endfunction|endif|endmethods|endproperties|'
- r'endswitch|endwhile|events|for|function|get|global|if|methods|'
- r'otherwise|persistent|properties|return|set|static|switch|try|'
- r'until|unwind_protect|unwind_protect_cleanup|while)\b', Keyword),
-
- ("(" + "|".join(_scilab_builtins.functions_kw +
- _scilab_builtins.commands_kw +
- _scilab_builtins.macros_kw
- ) + r')\b', Name.Builtin),
-
- (r'(%s)\b' % "|".join(map(re.escape, _scilab_builtins.builtin_consts)),
- Name.Constant),
-
- # operators:
- (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
- # operators requiring escape for re:
- (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
-
- # punctuation:
- (r'[\[\](){}@.,=:;]', Punctuation),
-
- (r'"[^"]*"', String),
-
- # quote can be transpose, instead of string:
- # (not great, but handles common cases...)
- (r'(?<=[\w\)\].])\'+', Operator),
- (r'(?<![\w\)\].])\'', String, 'string'),
-
- (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eEf][+-]?[0-9]+', Number.Float),
- (r'\d+', Number.Integer),
-
- ('[a-zA-Z_]\w*', Name),
- (r'.', Text),
- ],
- 'string': [
- (r"[^']*'", String, '#pop'),
- (r'.', String, '#pop'),
- ],
- 'deffunc': [
- (r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
- bygroups(Text.Whitespace, Text, Text.Whitespace, Punctuation,
- Text.Whitespace, Name.Function, Punctuation, Text,
- Punctuation, Text.Whitespace), '#pop'),
- ],
- }
-
-
-class NumPyLexer(PythonLexer):
- """
- A Python lexer recognizing Numerical Python builtins.
-
- .. versionadded:: 0.10
- """
-
- name = 'NumPy'
- aliases = ['numpy']
-
- # override the mimetypes to not inherit them from python
- mimetypes = []
- filenames = []
-
- EXTRA_KEYWORDS = set([
- 'abs', 'absolute', 'accumulate', 'add', 'alen', 'all', 'allclose',
- 'alltrue', 'alterdot', 'amax', 'amin', 'angle', 'any', 'append',
- 'apply_along_axis', 'apply_over_axes', 'arange', 'arccos', 'arccosh',
- 'arcsin', 'arcsinh', 'arctan', 'arctan2', 'arctanh', 'argmax', 'argmin',
- 'argsort', 'argwhere', 'around', 'array', 'array2string', 'array_equal',
- 'array_equiv', 'array_repr', 'array_split', 'array_str', 'arrayrange',
- 'asanyarray', 'asarray', 'asarray_chkfinite', 'ascontiguousarray',
- 'asfarray', 'asfortranarray', 'asmatrix', 'asscalar', 'astype',
- 'atleast_1d', 'atleast_2d', 'atleast_3d', 'average', 'bartlett',
- 'base_repr', 'beta', 'binary_repr', 'bincount', 'binomial',
- 'bitwise_and', 'bitwise_not', 'bitwise_or', 'bitwise_xor', 'blackman',
- 'bmat', 'broadcast', 'byte_bounds', 'bytes', 'byteswap', 'c_',
- 'can_cast', 'ceil', 'choose', 'clip', 'column_stack', 'common_type',
- 'compare_chararrays', 'compress', 'concatenate', 'conj', 'conjugate',
- 'convolve', 'copy', 'corrcoef', 'correlate', 'cos', 'cosh', 'cov',
- 'cross', 'cumprod', 'cumproduct', 'cumsum', 'delete', 'deprecate',
- 'diag', 'diagflat', 'diagonal', 'diff', 'digitize', 'disp', 'divide',
- 'dot', 'dsplit', 'dstack', 'dtype', 'dump', 'dumps', 'ediff1d', 'empty',
- 'empty_like', 'equal', 'exp', 'expand_dims', 'expm1', 'extract', 'eye',
- 'fabs', 'fastCopyAndTranspose', 'fft', 'fftfreq', 'fftshift', 'fill',
- 'finfo', 'fix', 'flat', 'flatnonzero', 'flatten', 'fliplr', 'flipud',
- 'floor', 'floor_divide', 'fmod', 'frexp', 'fromarrays', 'frombuffer',
- 'fromfile', 'fromfunction', 'fromiter', 'frompyfunc', 'fromstring',
- 'generic', 'get_array_wrap', 'get_include', 'get_numarray_include',
- 'get_numpy_include', 'get_printoptions', 'getbuffer', 'getbufsize',
- 'geterr', 'geterrcall', 'geterrobj', 'getfield', 'gradient', 'greater',
- 'greater_equal', 'gumbel', 'hamming', 'hanning', 'histogram',
- 'histogram2d', 'histogramdd', 'hsplit', 'hstack', 'hypot', 'i0',
- 'identity', 'ifft', 'imag', 'index_exp', 'indices', 'inf', 'info',
- 'inner', 'insert', 'int_asbuffer', 'interp', 'intersect1d',
- 'intersect1d_nu', 'inv', 'invert', 'iscomplex', 'iscomplexobj',
- 'isfinite', 'isfortran', 'isinf', 'isnan', 'isneginf', 'isposinf',
- 'isreal', 'isrealobj', 'isscalar', 'issctype', 'issubclass_',
- 'issubdtype', 'issubsctype', 'item', 'itemset', 'iterable', 'ix_',
- 'kaiser', 'kron', 'ldexp', 'left_shift', 'less', 'less_equal', 'lexsort',
- 'linspace', 'load', 'loads', 'loadtxt', 'log', 'log10', 'log1p', 'log2',
- 'logical_and', 'logical_not', 'logical_or', 'logical_xor', 'logspace',
- 'lstsq', 'mat', 'matrix', 'max', 'maximum', 'maximum_sctype',
- 'may_share_memory', 'mean', 'median', 'meshgrid', 'mgrid', 'min',
- 'minimum', 'mintypecode', 'mod', 'modf', 'msort', 'multiply', 'nan',
- 'nan_to_num', 'nanargmax', 'nanargmin', 'nanmax', 'nanmin', 'nansum',
- 'ndenumerate', 'ndim', 'ndindex', 'negative', 'newaxis', 'newbuffer',
- 'newbyteorder', 'nonzero', 'not_equal', 'obj2sctype', 'ogrid', 'ones',
- 'ones_like', 'outer', 'permutation', 'piecewise', 'pinv', 'pkgload',
- 'place', 'poisson', 'poly', 'poly1d', 'polyadd', 'polyder', 'polydiv',
- 'polyfit', 'polyint', 'polymul', 'polysub', 'polyval', 'power', 'prod',
- 'product', 'ptp', 'put', 'putmask', 'r_', 'randint', 'random_integers',
- 'random_sample', 'ranf', 'rank', 'ravel', 'real', 'real_if_close',
- 'recarray', 'reciprocal', 'reduce', 'remainder', 'repeat', 'require',
- 'reshape', 'resize', 'restoredot', 'right_shift', 'rint', 'roll',
- 'rollaxis', 'roots', 'rot90', 'round', 'round_', 'row_stack', 's_',
- 'sample', 'savetxt', 'sctype2char', 'searchsorted', 'seed', 'select',
- 'set_numeric_ops', 'set_printoptions', 'set_string_function',
- 'setbufsize', 'setdiff1d', 'seterr', 'seterrcall', 'seterrobj',
- 'setfield', 'setflags', 'setmember1d', 'setxor1d', 'shape',
- 'show_config', 'shuffle', 'sign', 'signbit', 'sin', 'sinc', 'sinh',
- 'size', 'slice', 'solve', 'sometrue', 'sort', 'sort_complex', 'source',
- 'split', 'sqrt', 'square', 'squeeze', 'standard_normal', 'std',
- 'subtract', 'sum', 'svd', 'swapaxes', 'take', 'tan', 'tanh', 'tensordot',
- 'test', 'tile', 'tofile', 'tolist', 'tostring', 'trace', 'transpose',
- 'trapz', 'tri', 'tril', 'trim_zeros', 'triu', 'true_divide', 'typeDict',
- 'typename', 'uniform', 'union1d', 'unique', 'unique1d', 'unravel_index',
- 'unwrap', 'vander', 'var', 'vdot', 'vectorize', 'view', 'vonmises',
- 'vsplit', 'vstack', 'weibull', 'where', 'who', 'zeros', 'zeros_like'
- ])
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in \
- PythonLexer.get_tokens_unprocessed(self, text):
- if token is Name and value in self.EXTRA_KEYWORDS:
- yield index, Keyword.Pseudo, value
- else:
- yield index, token, value
-
- def analyse_text(text):
- return (shebang_matches(text, r'pythonw?(2(\.\d)?)?') or
- 'import ' in text[:1000]) \
- and ('import numpy' in text or 'from numpy import' in text)
-
-
-class RConsoleLexer(Lexer):
- """
- For R console transcripts or R CMD BATCH output files.
- """
-
- name = 'RConsole'
- aliases = ['rconsole', 'rout']
- filenames = ['*.Rout']
-
- def get_tokens_unprocessed(self, text):
- slexer = SLexer(**self.options)
-
- current_code_block = ''
- insertions = []
-
- for match in line_re.finditer(text):
- line = match.group()
- if line.startswith('>') or line.startswith('+'):
- # Colorize the prompt as such,
- # then put rest of line into current_code_block
- insertions.append((len(current_code_block),
- [(0, Generic.Prompt, line[:2])]))
- current_code_block += line[2:]
- else:
- # We have reached a non-prompt line!
- # If we have stored prompt lines, need to process them first.
- if current_code_block:
- # Weave together the prompts and highlight code.
- for item in do_insertions(insertions,
- slexer.get_tokens_unprocessed(current_code_block)):
- yield item
- # Reset vars for next code block.
- current_code_block = ''
- insertions = []
- # Now process the actual line itself, this is output from R.
- yield match.start(), Generic.Output, line
-
- # If we happen to end on a code block with nothing after it, need to
- # process the last code block. This is neither elegant nor DRY so
- # should be changed.
- if current_code_block:
- for item in do_insertions(insertions,
- slexer.get_tokens_unprocessed(current_code_block)):
- yield item
-
-
-class SLexer(RegexLexer):
- """
- For S, S-plus, and R source code.
-
- .. versionadded:: 0.10
- """
-
- name = 'S'
- aliases = ['splus', 's', 'r']
- filenames = ['*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron']
- mimetypes = ['text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r',
- 'text/x-R', 'text/x-r-history', 'text/x-r-profile']
-
- builtins_base = [
- 'Arg', 'Conj', 'Cstack_info', 'Encoding', 'FALSE',
- 'Filter', 'Find', 'I', 'ISOdate', 'ISOdatetime', 'Im', 'Inf',
- 'La\.svd', 'Map', 'Math\.Date', 'Math\.POSIXt', 'Math\.data\.frame',
- 'Math\.difftime', 'Math\.factor', 'Mod', 'NA_character_',
- 'NA_complex_', 'NA_real_', 'NCOL', 'NROW', 'NULLNA_integer_', 'NaN',
- 'Negate', 'NextMethod', 'Ops\.Date', 'Ops\.POSIXt', 'Ops\.data\.frame',
- 'Ops\.difftime', 'Ops\.factor', 'Ops\.numeric_version', 'Ops\.ordered',
- 'Position', 'R\.Version', 'R\.home', 'R\.version', 'R\.version\.string',
- 'RNGkind', 'RNGversion', 'R_system_version', 'Re', 'Recall',
- 'Reduce', 'Summary\.Date', 'Summary\.POSIXct', 'Summary\.POSIXlt',
- 'Summary\.data\.frame', 'Summary\.difftime', 'Summary\.factor',
- 'Summary\.numeric_version', 'Summary\.ordered', 'Sys\.Date',
- 'Sys\.chmod', 'Sys\.getenv', 'Sys\.getlocale', 'Sys\.getpid',
- 'Sys\.glob', 'Sys\.info', 'Sys\.localeconv', 'Sys\.readlink',
- 'Sys\.setFileTime', 'Sys\.setenv', 'Sys\.setlocale', 'Sys\.sleep',
- 'Sys\.time', 'Sys\.timezone', 'Sys\.umask', 'Sys\.unsetenv',
- 'Sys\.which', 'TRUE', 'UseMethod', 'Vectorize', 'abbreviate', 'abs',
- 'acos', 'acosh', 'addNA', 'addTaskCallback', 'agrep', 'alist',
- 'all', 'all\.equal', 'all\.equal\.POSIXct', 'all\.equal\.character',
- 'all\.equal\.default', 'all\.equal\.factor', 'all\.equal\.formula',
- 'all\.equal\.language', 'all\.equal\.list', 'all\.equal\.numeric',
- 'all\.equal\.raw', 'all\.names', 'all\.vars', 'any', 'anyDuplicated',
- 'anyDuplicated\.array', 'anyDuplicated\.data\.frame',
- 'anyDuplicated\.default', 'anyDuplicated\.matrix', 'aperm',
- 'aperm\.default', 'aperm\.table', 'append', 'apply', 'args',
- 'arrayInd', 'as\.Date', 'as\.Date\.POSIXct', 'as\.Date\.POSIXlt',
- 'as\.Date\.character', 'as\.Date\.date', 'as\.Date\.dates',
- 'as\.Date\.default', 'as\.Date\.factor', 'as\.Date\.numeric',
- 'as\.POSIXct', 'as\.POSIXct\.Date', 'as\.POSIXct\.POSIXlt',
- 'as\.POSIXct\.date', 'as\.POSIXct\.dates', 'as\.POSIXct\.default',
- 'as\.POSIXct\.numeric', 'as\.POSIXlt', 'as\.POSIXlt\.Date',
- 'as\.POSIXlt\.POSIXct', 'as\.POSIXlt\.character', 'as\.POSIXlt\.date',
- 'as\.POSIXlt\.dates', 'as\.POSIXlt\.default', 'as\.POSIXlt\.factor',
- 'as\.POSIXlt\.numeric', 'as\.array', 'as\.array\.default', 'as\.call',
- 'as\.character', 'as\.character\.Date', 'as\.character\.POSIXt',
- 'as\.character\.condition', 'as\.character\.default',
- 'as\.character\.error', 'as\.character\.factor', 'as\.character\.hexmode',
- 'as\.character\.numeric_version', 'as\.character\.octmode',
- 'as\.character\.srcref', 'as\.complex', 'as\.data\.frame',
- 'as\.data\.frame\.AsIs', 'as\.data\.frame\.Date', 'as\.data\.frame\.POSIXct',
- 'as\.data\.frame\.POSIXlt', 'as\.data\.frame\.array',
- 'as\.data\.frame\.character', 'as\.data\.frame\.complex',
- 'as\.data\.frame\.data\.frame', 'as\.data\.frame\.default',
- 'as\.data\.frame\.difftime', 'as\.data\.frame\.factor',
- 'as\.data\.frame\.integer', 'as\.data\.frame\.list',
- 'as\.data\.frame\.logical', 'as\.data\.frame\.matrix',
- 'as\.data\.frame\.model\.matrix', 'as\.data\.frame\.numeric',
- 'as\.data\.frame\.numeric_version', 'as\.data\.frame\.ordered',
- 'as\.data\.frame\.raw', 'as\.data\.frame\.table', 'as\.data\.frame\.ts',
- 'as\.data\.frame\.vector', 'as\.difftime', 'as\.double',
- 'as\.double\.POSIXlt', 'as\.double\.difftime', 'as\.environment',
- 'as\.expression', 'as\.expression\.default', 'as\.factor',
- 'as\.function', 'as\.function\.default', 'as\.hexmode', 'as\.integer',
- 'as\.list', 'as\.list\.Date', 'as\.list\.POSIXct', 'as\.list\.data\.frame',
- 'as\.list\.default', 'as\.list\.environment', 'as\.list\.factor',
- 'as\.list\.function', 'as\.list\.numeric_version', 'as\.logical',
- 'as\.logical\.factor', 'as\.matrix', 'as\.matrix\.POSIXlt',
- 'as\.matrix\.data\.frame', 'as\.matrix\.default', 'as\.matrix\.noquote',
- 'as\.name', 'as\.null', 'as\.null\.default', 'as\.numeric',
- 'as\.numeric_version', 'as\.octmode', 'as\.ordered',
- 'as\.package_version', 'as\.pairlist', 'as\.qr', 'as\.raw', 'as\.single',
- 'as\.single\.default', 'as\.symbol', 'as\.table', 'as\.table\.default',
- 'as\.vector', 'as\.vector\.factor', 'asNamespace', 'asS3', 'asS4',
- 'asin', 'asinh', 'assign', 'atan', 'atan2', 'atanh',
- 'attachNamespace', 'attr', 'attr\.all\.equal', 'attributes',
- 'autoload', 'autoloader', 'backsolve', 'baseenv', 'basename',
- 'besselI', 'besselJ', 'besselK', 'besselY', 'beta',
- 'bindingIsActive', 'bindingIsLocked', 'bindtextdomain', 'bitwAnd',
- 'bitwNot', 'bitwOr', 'bitwShiftL', 'bitwShiftR', 'bitwXor', 'body',
- 'bquote', 'browser', 'browserCondition', 'browserSetDebug',
- 'browserText', 'builtins', 'by', 'by\.data\.frame', 'by\.default',
- 'bzfile', 'c\.Date', 'c\.POSIXct', 'c\.POSIXlt', 'c\.noquote',
- 'c\.numeric_version', 'call', 'callCC', 'capabilities', 'casefold',
- 'cat', 'category', 'cbind', 'cbind\.data\.frame', 'ceiling',
- 'char\.expand', 'charToRaw', 'charmatch', 'chartr', 'check_tzones',
- 'chol', 'chol\.default', 'chol2inv', 'choose', 'class',
- 'clearPushBack', 'close', 'close\.connection', 'close\.srcfile',
- 'close\.srcfilealias', 'closeAllConnections', 'col', 'colMeans',
- 'colSums', 'colnames', 'commandArgs', 'comment', 'computeRestarts',
- 'conditionCall', 'conditionCall\.condition', 'conditionMessage',
- 'conditionMessage\.condition', 'conflicts', 'contributors', 'cos',
- 'cosh', 'crossprod', 'cummax', 'cummin', 'cumprod', 'cumsum', 'cut',
- 'cut\.Date', 'cut\.POSIXt', 'cut\.default', 'dQuote', 'data\.class',
- 'data\.matrix', 'date', 'debug', 'debugonce',
- 'default\.stringsAsFactors', 'delayedAssign', 'deparse', 'det',
- 'determinant', 'determinant\.matrix', 'dget', 'diag', 'diff',
- 'diff\.Date', 'diff\.POSIXt', 'diff\.default', 'difftime', 'digamma',
- 'dim', 'dim\.data\.frame', 'dimnames', 'dimnames\.data\.frame', 'dir',
- 'dir\.create', 'dirname', 'do\.call', 'dput', 'drop', 'droplevels',
- 'droplevels\.data\.frame', 'droplevels\.factor', 'dump', 'duplicated',
- 'duplicated\.POSIXlt', 'duplicated\.array', 'duplicated\.data\.frame',
- 'duplicated\.default', 'duplicated\.matrix',
- 'duplicated\.numeric_version', 'dyn\.load', 'dyn\.unload', 'eapply',
- 'eigen', 'else', 'emptyenv', 'enc2native', 'enc2utf8',
- 'encodeString', 'enquote', 'env\.profile', 'environment',
- 'environmentIsLocked', 'environmentName', 'eval', 'eval\.parent',
- 'evalq', 'exists', 'exp', 'expand\.grid', 'expm1', 'expression',
- 'factor', 'factorial', 'fifo', 'file', 'file\.access', 'file\.append',
- 'file\.choose', 'file\.copy', 'file\.create', 'file\.exists',
- 'file\.info', 'file\.link', 'file\.path', 'file\.remove', 'file\.rename',
- 'file\.show', 'file\.symlink', 'find\.package', 'findInterval',
- 'findPackageEnv', 'findRestart', 'floor', 'flush',
- 'flush\.connection', 'force', 'formals', 'format',
- 'format\.AsIs', 'format\.Date', 'format\.POSIXct', 'format\.POSIXlt',
- 'format\.data\.frame', 'format\.default', 'format\.difftime',
- 'format\.factor', 'format\.hexmode', 'format\.info',
- 'format\.libraryIQR', 'format\.numeric_version', 'format\.octmode',
- 'format\.packageInfo', 'format\.pval', 'format\.summaryDefault',
- 'formatC', 'formatDL', 'forwardsolve', 'gamma', 'gc', 'gc\.time',
- 'gcinfo', 'gctorture', 'gctorture2', 'get', 'getAllConnections',
- 'getCallingDLL', 'getCallingDLLe', 'getConnection',
- 'getDLLRegisteredRoutines', 'getDLLRegisteredRoutines\.DLLInfo',
- 'getDLLRegisteredRoutines\.character', 'getElement',
- 'getExportedValue', 'getHook', 'getLoadedDLLs', 'getNamespace',
- 'getNamespaceExports', 'getNamespaceImports', 'getNamespaceInfo',
- 'getNamespaceName', 'getNamespaceUsers', 'getNamespaceVersion',
- 'getNativeSymbolInfo', 'getOption', 'getRversion', 'getSrcLines',
- 'getTaskCallbackNames', 'geterrmessage', 'gettext', 'gettextf',
- 'getwd', 'gl', 'globalenv', 'gregexpr', 'grep', 'grepRaw', 'grepl',
- 'gsub', 'gzcon', 'gzfile', 'head', 'iconv', 'iconvlist',
- 'icuSetCollate', 'identical', 'identity', 'ifelse', 'importIntoEnv',
- 'in', 'inherits', 'intToBits', 'intToUtf8', 'interaction', 'interactive',
- 'intersect', 'inverse\.rle', 'invisible', 'invokeRestart',
- 'invokeRestartInteractively', 'is\.R', 'is\.array', 'is\.atomic',
- 'is\.call', 'is\.character', 'is\.complex', 'is\.data\.frame',
- 'is\.double', 'is\.element', 'is\.environment', 'is\.expression',
- 'is\.factor', 'is\.finite', 'is\.function', 'is\.infinite',
- 'is\.integer', 'is\.language', 'is\.list', 'is\.loaded', 'is\.logical',
- 'is\.matrix', 'is\.na', 'is\.na\.POSIXlt', 'is\.na\.data\.frame',
- 'is\.na\.numeric_version', 'is\.name', 'is\.nan', 'is\.null',
- 'is\.numeric', 'is\.numeric\.Date', 'is\.numeric\.POSIXt',
- 'is\.numeric\.difftime', 'is\.numeric_version', 'is\.object',
- 'is\.ordered', 'is\.package_version', 'is\.pairlist', 'is\.primitive',
- 'is\.qr', 'is\.raw', 'is\.recursive', 'is\.single', 'is\.symbol',
- 'is\.table', 'is\.unsorted', 'is\.vector', 'isBaseNamespace',
- 'isIncomplete', 'isNamespace', 'isOpen', 'isRestart', 'isS4',
- 'isSeekable', 'isSymmetric', 'isSymmetric\.matrix', 'isTRUE',
- 'isatty', 'isdebugged', 'jitter', 'julian', 'julian\.Date',
- 'julian\.POSIXt', 'kappa', 'kappa\.default', 'kappa\.lm', 'kappa\.qr',
- 'kronecker', 'l10n_info', 'labels', 'labels\.default', 'lapply',
- 'lazyLoad', 'lazyLoadDBexec', 'lazyLoadDBfetch', 'lbeta', 'lchoose',
- 'length', 'length\.POSIXlt', 'letters', 'levels', 'levels\.default',
- 'lfactorial', 'lgamma', 'library\.dynam', 'library\.dynam\.unload',
- 'licence', 'license', 'list\.dirs', 'list\.files', 'list2env', 'load',
- 'loadNamespace', 'loadedNamespaces', 'loadingNamespaceInfo',
- 'local', 'lockBinding', 'lockEnvironment', 'log', 'log10', 'log1p',
- 'log2', 'logb', 'lower\.tri', 'ls', 'make\.names', 'make\.unique',
- 'makeActiveBinding', 'mapply', 'margin\.table', 'mat\.or\.vec',
- 'match', 'match\.arg', 'match\.call', 'match\.fun', 'max', 'max\.col',
- 'mean', 'mean\.Date', 'mean\.POSIXct', 'mean\.POSIXlt', 'mean\.default',
- 'mean\.difftime', 'mem\.limits', 'memCompress', 'memDecompress',
- 'memory\.profile', 'merge', 'merge\.data\.frame', 'merge\.default',
- 'message', 'mget', 'min', 'missing', 'mode', 'month\.abb',
- 'month\.name', 'months', 'months\.Date', 'months\.POSIXt',
- 'months\.abb', 'months\.nameletters', 'names', 'names\.POSIXlt',
- 'namespaceExport', 'namespaceImport', 'namespaceImportClasses',
- 'namespaceImportFrom', 'namespaceImportMethods', 'nargs', 'nchar',
- 'ncol', 'new\.env', 'ngettext', 'nlevels', 'noquote', 'norm',
- 'normalizePath', 'nrow', 'numeric_version', 'nzchar', 'objects',
- 'oldClass', 'on\.exit', 'open', 'open\.connection', 'open\.srcfile',
- 'open\.srcfilealias', 'open\.srcfilecopy', 'options', 'order',
- 'ordered', 'outer', 'packBits', 'packageEvent',
- 'packageHasNamespace', 'packageStartupMessage', 'package_version',
- 'pairlist', 'parent\.env', 'parent\.frame', 'parse',
- 'parseNamespaceFile', 'paste', 'paste0', 'path\.expand',
- 'path\.package', 'pipe', 'pmatch', 'pmax', 'pmax\.int', 'pmin',
- 'pmin\.int', 'polyroot', 'pos\.to\.env', 'pretty', 'pretty\.default',
- 'prettyNum', 'print', 'print\.AsIs', 'print\.DLLInfo',
- 'print\.DLLInfoList', 'print\.DLLRegisteredRoutines', 'print\.Date',
- 'print\.NativeRoutineList', 'print\.POSIXct', 'print\.POSIXlt',
- 'print\.by', 'print\.condition', 'print\.connection',
- 'print\.data\.frame', 'print\.default', 'print\.difftime',
- 'print\.factor', 'print\.function', 'print\.hexmode',
- 'print\.libraryIQR', 'print\.listof', 'print\.noquote',
- 'print\.numeric_version', 'print\.octmode', 'print\.packageInfo',
- 'print\.proc_time', 'print\.restart', 'print\.rle',
- 'print\.simple\.list', 'print\.srcfile', 'print\.srcref',
- 'print\.summary\.table', 'print\.summaryDefault', 'print\.table',
- 'print\.warnings', 'prmatrix', 'proc\.time', 'prod', 'prop\.table',
- 'provideDimnames', 'psigamma', 'pushBack', 'pushBackLength', 'q',
- 'qr', 'qr\.Q', 'qr\.R', 'qr\.X', 'qr\.coef', 'qr\.default', 'qr\.fitted',
- 'qr\.qty', 'qr\.qy', 'qr\.resid', 'qr\.solve', 'quarters',
- 'quarters\.Date', 'quarters\.POSIXt', 'quit', 'quote', 'range',
- 'range\.default', 'rank', 'rapply', 'raw', 'rawConnection',
- 'rawConnectionValue', 'rawShift', 'rawToBits', 'rawToChar', 'rbind',
- 'rbind\.data\.frame', 'rcond', 'read\.dcf', 'readBin', 'readChar',
- 'readLines', 'readRDS', 'readRenviron', 'readline', 'reg\.finalizer',
- 'regexec', 'regexpr', 'registerS3method', 'registerS3methods',
- 'regmatches', 'remove', 'removeTaskCallback', 'rep', 'rep\.Date',
- 'rep\.POSIXct', 'rep\.POSIXlt', 'rep\.factor', 'rep\.int',
- 'rep\.numeric_version', 'rep_len', 'replace', 'replicate',
- 'requireNamespace', 'restartDescription', 'restartFormals',
- 'retracemem', 'rev', 'rev\.default', 'rle', 'rm', 'round',
- 'round\.Date', 'round\.POSIXt', 'row', 'row\.names',
- 'row\.names\.data\.frame', 'row\.names\.default', 'rowMeans', 'rowSums',
- 'rownames', 'rowsum', 'rowsum\.data\.frame', 'rowsum\.default',
- 'sQuote', 'sample', 'sample\.int', 'sapply', 'save', 'save\.image',
- 'saveRDS', 'scale', 'scale\.default', 'scan', 'search',
- 'searchpaths', 'seek', 'seek\.connection', 'seq', 'seq\.Date',
- 'seq\.POSIXt', 'seq\.default', 'seq\.int', 'seq_along', 'seq_len',
- 'sequence', 'serialize', 'set\.seed', 'setHook', 'setNamespaceInfo',
- 'setSessionTimeLimit', 'setTimeLimit', 'setdiff', 'setequal',
- 'setwd', 'shQuote', 'showConnections', 'sign', 'signalCondition',
- 'signif', 'simpleCondition', 'simpleError', 'simpleMessage',
- 'simpleWarning', 'simplify2array', 'sin', 'single',
- 'sinh', 'sink', 'sink\.number', 'slice\.index', 'socketConnection',
- 'socketSelect', 'solve', 'solve\.default', 'solve\.qr', 'sort',
- 'sort\.POSIXlt', 'sort\.default', 'sort\.int', 'sort\.list', 'split',
- 'split\.Date', 'split\.POSIXct', 'split\.data\.frame', 'split\.default',
- 'sprintf', 'sqrt', 'srcfile', 'srcfilealias', 'srcfilecopy',
- 'srcref', 'standardGeneric', 'stderr', 'stdin', 'stdout', 'stop',
- 'stopifnot', 'storage\.mode', 'strftime', 'strptime', 'strsplit',
- 'strtoi', 'strtrim', 'structure', 'strwrap', 'sub', 'subset',
- 'subset\.data\.frame', 'subset\.default', 'subset\.matrix',
- 'substitute', 'substr', 'substring', 'sum', 'summary',
- 'summary\.Date', 'summary\.POSIXct', 'summary\.POSIXlt',
- 'summary\.connection', 'summary\.data\.frame', 'summary\.default',
- 'summary\.factor', 'summary\.matrix', 'summary\.proc_time',
- 'summary\.srcfile', 'summary\.srcref', 'summary\.table',
- 'suppressMessages', 'suppressPackageStartupMessages',
- 'suppressWarnings', 'svd', 'sweep', 'sys\.call', 'sys\.calls',
- 'sys\.frame', 'sys\.frames', 'sys\.function', 'sys\.load\.image',
- 'sys\.nframe', 'sys\.on\.exit', 'sys\.parent', 'sys\.parents',
- 'sys\.save\.image', 'sys\.source', 'sys\.status', 'system',
- 'system\.file', 'system\.time', 'system2', 't', 't\.data\.frame',
- 't\.default', 'table', 'tabulate', 'tail', 'tan', 'tanh', 'tapply',
- 'taskCallbackManager', 'tcrossprod', 'tempdir', 'tempfile',
- 'testPlatformEquivalence', 'textConnection', 'textConnectionValue',
- 'toString', 'toString\.default', 'tolower', 'topenv', 'toupper',
- 'trace', 'traceback', 'tracemem', 'tracingState', 'transform',
- 'transform\.data\.frame', 'transform\.default', 'trigamma', 'trunc',
- 'trunc\.Date', 'trunc\.POSIXt', 'truncate', 'truncate\.connection',
- 'try', 'tryCatch', 'typeof', 'unclass', 'undebug', 'union',
- 'unique', 'unique\.POSIXlt', 'unique\.array', 'unique\.data\.frame',
- 'unique\.default', 'unique\.matrix', 'unique\.numeric_version',
- 'units', 'units\.difftime', 'unix\.time', 'unlink', 'unlist',
- 'unloadNamespace', 'unlockBinding', 'unname', 'unserialize',
- 'unsplit', 'untrace', 'untracemem', 'unz', 'upper\.tri', 'url',
- 'utf8ToInt', 'vapply', 'version', 'warning', 'warnings', 'weekdays',
- 'weekdays\.Date', 'weekdays\.POSIXt', 'which', 'which\.max',
- 'which\.min', 'with', 'with\.default', 'withCallingHandlers',
- 'withRestarts', 'withVisible', 'within', 'within\.data\.frame',
- 'within\.list', 'write', 'write\.dcf', 'writeBin', 'writeChar',
- 'writeLines', 'xor', 'xor\.hexmode', 'xor\.octmode',
- 'xpdrows\.data\.frame', 'xtfrm', 'xtfrm\.AsIs', 'xtfrm\.Date',
- 'xtfrm\.POSIXct', 'xtfrm\.POSIXlt', 'xtfrm\.Surv', 'xtfrm\.default',
- 'xtfrm\.difftime', 'xtfrm\.factor', 'xtfrm\.numeric_version', 'xzfile',
- 'zapsmall'
- ]
-
- tokens = {
- 'comments': [
- (r'#.*$', Comment.Single),
- ],
- 'valid_name': [
- (r'[a-zA-Z][0-9a-zA-Z\._]*', Text),
- # can begin with ., but not if that is followed by a digit
- (r'\.[a-zA-Z_][0-9a-zA-Z\._]*', Text),
- ],
- 'punctuation': [
- (r'\[{1,2}|\]{1,2}|\(|\)|;|,', Punctuation),
- ],
- 'keywords': [
- (r'(' + r'|'.join(builtins_base) + r')'
- r'(?![\w\. =])',
- Keyword.Pseudo),
- (r'(if|else|for|while|repeat|in|next|break|return|switch|function)'
- r'(?![\w\.])',
- Keyword.Reserved),
- (r'(array|category|character|complex|double|function|integer|list|'
- r'logical|matrix|numeric|vector|data.frame|c)'
- r'(?![\w\.])',
- Keyword.Type),
- (r'(library|require|attach|detach|source)'
- r'(?![\w\.])',
- Keyword.Namespace)
- ],
- 'operators': [
- (r'<<?-|->>?|-|==|<=|>=|<|>|&&?|!=|\|\|?|\?', Operator),
- (r'\*|\+|\^|/|!|%[^%]*%|=|~|\$|@|:{1,3}', Operator)
- ],
- 'builtin_symbols': [
- (r'(NULL|NA(_(integer|real|complex|character)_)?|'
- r'letters|LETTERS|Inf|TRUE|FALSE|NaN|pi|\.\.(\.|[0-9]+))'
- r'(?![0-9a-zA-Z\._])',
- Keyword.Constant),
- (r'(T|F)\b', Name.Builtin.Pseudo),
- ],
- 'numbers': [
- # hex number
- (r'0[xX][a-fA-F0-9]+([pP][0-9]+)?[Li]?', Number.Hex),
- # decimal number
- (r'[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)([eE][+-]?[0-9]+)?[Li]?',
- Number),
- ],
- 'statements': [
- include('comments'),
- # whitespaces
- (r'\s+', Text),
- (r'`.*?`', String.Backtick),
- (r'\'', String, 'string_squote'),
- (r'\"', String, 'string_dquote'),
- include('builtin_symbols'),
- include('numbers'),
- include('keywords'),
- include('punctuation'),
- include('operators'),
- include('valid_name'),
- ],
- 'root': [
- include('statements'),
- # blocks:
- (r'\{|\}', Punctuation),
- #(r'\{', Punctuation, 'block'),
- (r'.', Text),
- ],
- #'block': [
- # include('statements'),
- # ('\{', Punctuation, '#push'),
- # ('\}', Punctuation, '#pop')
- #],
- 'string_squote': [
- (r'([^\'\\]|\\.)*\'', String, '#pop'),
- ],
- 'string_dquote': [
- (r'([^"\\]|\\.)*"', String, '#pop'),
- ],
- }
-
- def analyse_text(text):
- if re.search(r'[a-z0-9_\])\s]<-(?!-)', text):
- return 0.11
-
-
-class BugsLexer(RegexLexer):
- """
- Pygments Lexer for `OpenBugs <http://www.openbugs.info/w/>`_ and WinBugs
- models.
-
- .. versionadded:: 1.6
- """
-
- name = 'BUGS'
- aliases = ['bugs', 'winbugs', 'openbugs']
- filenames = ['*.bug']
-
- _FUNCTIONS = [
- # Scalar functions
- 'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
- 'cloglog', 'cos', 'cosh', 'cumulative', 'cut', 'density', 'deviance',
- 'equals', 'expr', 'gammap', 'ilogit', 'icloglog', 'integral', 'log',
- 'logfact', 'loggam', 'logit', 'max', 'min', 'phi', 'post.p.value',
- 'pow', 'prior.p.value', 'probit', 'replicate.post', 'replicate.prior',
- 'round', 'sin', 'sinh', 'solution', 'sqrt', 'step', 'tan', 'tanh',
- 'trunc',
- # Vector functions
- 'inprod', 'interp.lin', 'inverse', 'logdet', 'mean', 'eigen.vals',
- 'ode', 'prod', 'p.valueM', 'rank', 'ranked', 'replicate.postM',
- 'sd', 'sort', 'sum',
- ## Special
- 'D', 'I', 'F', 'T', 'C']
- """ OpenBUGS built-in functions
-
- From http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAII
-
- This also includes
-
- - T, C, I : Truncation and censoring.
- ``T`` and ``C`` are in OpenBUGS. ``I`` in WinBUGS.
- - D : ODE
- - F : Functional http://www.openbugs.info/Examples/Functionals.html
-
- """
-
- _DISTRIBUTIONS = ['dbern', 'dbin', 'dcat', 'dnegbin', 'dpois',
- 'dhyper', 'dbeta', 'dchisqr', 'ddexp', 'dexp',
- 'dflat', 'dgamma', 'dgev', 'df', 'dggamma', 'dgpar',
- 'dloglik', 'dlnorm', 'dlogis', 'dnorm', 'dpar',
- 'dt', 'dunif', 'dweib', 'dmulti', 'ddirch', 'dmnorm',
- 'dmt', 'dwish']
- """ OpenBUGS built-in distributions
-
- Functions from
- http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAI
- """
-
-
- tokens = {
- 'whitespace' : [
- (r"\s+", Text),
- ],
- 'comments' : [
- # Comments
- (r'#.*$', Comment.Single),
- ],
- 'root': [
- # Comments
- include('comments'),
- include('whitespace'),
- # Block start
- (r'(model)(\s+)({)',
- bygroups(Keyword.Namespace, Text, Punctuation)),
- # Reserved Words
- (r'(for|in)(?![0-9a-zA-Z\._])', Keyword.Reserved),
- # Built-in Functions
- (r'(%s)(?=\s*\()'
- % r'|'.join(_FUNCTIONS + _DISTRIBUTIONS),
- Name.Builtin),
- # Regular variable names
- (r'[A-Za-z][\w.]*', Name),
- # Number Literals
- (r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
- # Punctuation
- (r'\[|\]|\(|\)|:|,|;', Punctuation),
- # Assignment operators
- # SLexer makes these tokens Operators.
- (r'<-|~', Operator),
- # Infix and prefix operators
- (r'\+|-|\*|/', Operator),
- # Block
- (r'[{}]', Punctuation),
- ]
- }
-
- def analyse_text(text):
- if re.search(r"^\s*model\s*{", text, re.M):
- return 0.7
- else:
- return 0.0
-
-class JagsLexer(RegexLexer):
- """
- Pygments Lexer for JAGS.
-
- .. versionadded:: 1.6
- """
-
- name = 'JAGS'
- aliases = ['jags']
- filenames = ['*.jag', '*.bug']
-
- ## JAGS
- _FUNCTIONS = [
- 'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
- 'cos', 'cosh', 'cloglog',
- 'equals', 'exp', 'icloglog', 'ifelse', 'ilogit', 'log', 'logfact',
- 'loggam', 'logit', 'phi', 'pow', 'probit', 'round', 'sin', 'sinh',
- 'sqrt', 'step', 'tan', 'tanh', 'trunc', 'inprod', 'interp.lin',
- 'logdet', 'max', 'mean', 'min', 'prod', 'sum', 'sd', 'inverse',
- 'rank', 'sort', 't', 'acos', 'acosh', 'asin', 'asinh', 'atan',
- # Truncation/Censoring (should I include)
- 'T', 'I']
- # Distributions with density, probability and quartile functions
- _DISTRIBUTIONS = ['[dpq]%s' % x for x in
- ['bern', 'beta', 'dchiqsqr', 'ddexp', 'dexp',
- 'df', 'gamma', 'gen.gamma', 'logis', 'lnorm',
- 'negbin', 'nchisqr', 'norm', 'par', 'pois', 'weib']]
- # Other distributions without density and probability
- _OTHER_DISTRIBUTIONS = [
- 'dt', 'dunif', 'dbetabin', 'dbern', 'dbin', 'dcat', 'dhyper',
- 'ddirch', 'dmnorm', 'dwish', 'dmt', 'dmulti', 'dbinom', 'dchisq',
- 'dnbinom', 'dweibull', 'ddirich']
-
- tokens = {
- 'whitespace' : [
- (r"\s+", Text),
- ],
- 'names' : [
- # Regular variable names
- (r'[a-zA-Z][\w.]*\b', Name),
- ],
- 'comments' : [
- # do not use stateful comments
- (r'(?s)/\*.*?\*/', Comment.Multiline),
- # Comments
- (r'#.*$', Comment.Single),
- ],
- 'root': [
- # Comments
- include('comments'),
- include('whitespace'),
- # Block start
- (r'(model|data)(\s+)({)',
- bygroups(Keyword.Namespace, Text, Punctuation)),
- (r'var(?![0-9a-zA-Z\._])', Keyword.Declaration),
- # Reserved Words
- (r'(for|in)(?![0-9a-zA-Z\._])', Keyword.Reserved),
- # Builtins
- # Need to use lookahead because . is a valid char
- (r'(%s)(?=\s*\()' % r'|'.join(_FUNCTIONS
- + _DISTRIBUTIONS
- + _OTHER_DISTRIBUTIONS),
- Name.Builtin),
- # Names
- include('names'),
- # Number Literals
- (r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
- (r'\[|\]|\(|\)|:|,|;', Punctuation),
- # Assignment operators
- (r'<-|~', Operator),
- # # JAGS includes many more than OpenBUGS
- (r'\+|-|\*|\/|\|\|[&]{2}|[<>=]=?|\^|%.*?%', Operator),
- (r'[{}]', Punctuation),
- ]
- }
-
- def analyse_text(text):
- if re.search(r'^\s*model\s*\{', text, re.M):
- if re.search(r'^\s*data\s*\{', text, re.M):
- return 0.9
- elif re.search(r'^\s*var', text, re.M):
- return 0.9
- else:
- return 0.3
- else:
- return 0
-
-class StanLexer(RegexLexer):
- """Pygments Lexer for Stan models.
-
- The Stan modeling language is specified in the *Stan Modeling Language User's Guide and Reference Manual, v2.2.0*,
- `pdf <https://github.com/stan-dev/stan/releases/download/v2.2.0/stan-reference-2.2.0.pdf>`__.
-
- .. versionadded:: 1.6
- """
-
- name = 'Stan'
- aliases = ['stan']
- filenames = ['*.stan']
-
- tokens = {
- 'whitespace' : [
- (r"\s+", Text),
- ],
- 'comments' : [
- (r'(?s)/\*.*?\*/', Comment.Multiline),
- # Comments
- (r'(//|#).*$', Comment.Single),
- ],
- 'root': [
- # Stan is more restrictive on strings than this regex
- (r'"[^"]*"', String),
- # Comments
- include('comments'),
- # block start
- include('whitespace'),
- # Block start
- (r'(%s)(\s*)({)' %
- r'|'.join(('data', r'transformed\s+?data',
- 'parameters', r'transformed\s+parameters',
- 'model', r'generated\s+quantities')),
- bygroups(Keyword.Namespace, Text, Punctuation)),
- # Reserved Words
- (r'(%s)\b' % r'|'.join(_stan_builtins.KEYWORDS), Keyword),
- # Truncation
- (r'T(?=\s*\[)', Keyword),
- # Data types
- (r'(%s)\b' % r'|'.join(_stan_builtins.TYPES), Keyword.Type),
- # Punctuation
- (r"[;:,\[\]()]", Punctuation),
- # Builtin
- (r'(%s)(?=\s*\()'
- % r'|'.join(_stan_builtins.FUNCTIONS
- + _stan_builtins.DISTRIBUTIONS),
- Name.Builtin),
- # Special names ending in __, like lp__
- (r'[A-Za-z]\w*__\b', Name.Builtin.Pseudo),
- (r'(%s)\b' % r'|'.join(_stan_builtins.RESERVED), Keyword.Reserved),
- # Regular variable names
- (r'[A-Za-z]\w*\b', Name),
- # Real Literals
- (r'-?[0-9]+(\.[0-9]+)?[eE]-?[0-9]+', Number.Float),
- (r'-?[0-9]*\.[0-9]*', Number.Float),
- # Integer Literals
- (r'-?[0-9]+', Number.Integer),
- # Assignment operators
- # SLexer makes these tokens Operators.
- (r'<-|~', Operator),
- # Infix and prefix operators (and = )
- (r"\+|-|\.?\*|\.?/|\\|'|==?|!=?|<=?|>=?|\|\||&&", Operator),
- # Block delimiters
- (r'[{}]', Punctuation),
- ]
- }
-
- def analyse_text(text):
- if re.search(r'^\s*parameters\s*\{', text, re.M):
- return 1.0
- else:
- return 0.0
-
-
-class IDLLexer(RegexLexer):
- """
- Pygments Lexer for IDL (Interactive Data Language).
-
- .. versionadded:: 1.6
- """
- name = 'IDL'
- aliases = ['idl']
- filenames = ['*.pro']
- mimetypes = ['text/idl']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- _RESERVED = ['and', 'begin', 'break', 'case', 'common', 'compile_opt',
- 'continue', 'do', 'else', 'end', 'endcase', 'elseelse',
- 'endfor', 'endforeach', 'endif', 'endrep', 'endswitch',
- 'endwhile', 'eq', 'for', 'foreach', 'forward_function',
- 'function', 'ge', 'goto', 'gt', 'if', 'inherits', 'le',
- 'lt', 'mod', 'ne', 'not', 'of', 'on_ioerror', 'or', 'pro',
- 'repeat', 'switch', 'then', 'until', 'while', 'xor']
- """Reserved words from: http://www.exelisvis.com/docs/reswords.html"""
-
- _BUILTIN_LIB = ['abs', 'acos', 'adapt_hist_equal', 'alog', 'alog10',
- 'amoeba', 'annotate', 'app_user_dir', 'app_user_dir_query',
- 'arg_present', 'array_equal', 'array_indices', 'arrow',
- 'ascii_template', 'asin', 'assoc', 'atan', 'axis',
- 'a_correlate', 'bandpass_filter', 'bandreject_filter',
- 'barplot', 'bar_plot', 'beseli', 'beselj', 'beselk',
- 'besely', 'beta', 'bilinear', 'binary_template', 'bindgen',
- 'binomial', 'bin_date', 'bit_ffs', 'bit_population',
- 'blas_axpy', 'blk_con', 'box_cursor', 'breakpoint',
- 'broyden', 'butterworth', 'bytarr', 'byte', 'byteorder',
- 'bytscl', 'caldat', 'calendar', 'call_external',
- 'call_function', 'call_method', 'call_procedure', 'canny',
- 'catch', 'cd', 'cdf_\w*', 'ceil', 'chebyshev',
- 'check_math',
- 'chisqr_cvf', 'chisqr_pdf', 'choldc', 'cholsol', 'cindgen',
- 'cir_3pnt', 'close', 'cluster', 'cluster_tree', 'clust_wts',
- 'cmyk_convert', 'colorbar', 'colorize_sample',
- 'colormap_applicable', 'colormap_gradient',
- 'colormap_rotation', 'colortable', 'color_convert',
- 'color_exchange', 'color_quan', 'color_range_map', 'comfit',
- 'command_line_args', 'complex', 'complexarr', 'complexround',
- 'compute_mesh_normals', 'cond', 'congrid', 'conj',
- 'constrained_min', 'contour', 'convert_coord', 'convol',
- 'convol_fft', 'coord2to3', 'copy_lun', 'correlate', 'cos',
- 'cosh', 'cpu', 'cramer', 'create_cursor', 'create_struct',
- 'create_view', 'crossp', 'crvlength', 'cti_test',
- 'ct_luminance', 'cursor', 'curvefit', 'cvttobm', 'cv_coord',
- 'cw_animate', 'cw_animate_getp', 'cw_animate_load',
- 'cw_animate_run', 'cw_arcball', 'cw_bgroup', 'cw_clr_index',
- 'cw_colorsel', 'cw_defroi', 'cw_field', 'cw_filesel',
- 'cw_form', 'cw_fslider', 'cw_light_editor',
- 'cw_light_editor_get', 'cw_light_editor_set', 'cw_orient',
- 'cw_palette_editor', 'cw_palette_editor_get',
- 'cw_palette_editor_set', 'cw_pdmenu', 'cw_rgbslider',
- 'cw_tmpl', 'cw_zoom', 'c_correlate', 'dblarr', 'db_exists',
- 'dcindgen', 'dcomplex', 'dcomplexarr', 'define_key',
- 'define_msgblk', 'define_msgblk_from_file', 'defroi',
- 'defsysv', 'delvar', 'dendrogram', 'dendro_plot', 'deriv',
- 'derivsig', 'determ', 'device', 'dfpmin', 'diag_matrix',
- 'dialog_dbconnect', 'dialog_message', 'dialog_pickfile',
- 'dialog_printersetup', 'dialog_printjob',
- 'dialog_read_image', 'dialog_write_image', 'digital_filter',
- 'dilate', 'dindgen', 'dissolve', 'dist', 'distance_measure',
- 'dlm_load', 'dlm_register', 'doc_library', 'double',
- 'draw_roi', 'edge_dog', 'efont', 'eigenql', 'eigenvec',
- 'ellipse', 'elmhes', 'emboss', 'empty', 'enable_sysrtn',
- 'eof', 'eos_\w*', 'erase', 'erf', 'erfc', 'erfcx',
- 'erode', 'errorplot', 'errplot', 'estimator_filter',
- 'execute', 'exit', 'exp', 'expand', 'expand_path', 'expint',
- 'extrac', 'extract_slice', 'factorial', 'fft', 'filepath',
- 'file_basename', 'file_chmod', 'file_copy', 'file_delete',
- 'file_dirname', 'file_expand_path', 'file_info',
- 'file_lines', 'file_link', 'file_mkdir', 'file_move',
- 'file_poll_input', 'file_readlink', 'file_same',
- 'file_search', 'file_test', 'file_which', 'findgen',
- 'finite', 'fix', 'flick', 'float', 'floor', 'flow3',
- 'fltarr', 'flush', 'format_axis_values', 'free_lun',
- 'fstat', 'fulstr', 'funct', 'fv_test', 'fx_root',
- 'fz_roots', 'f_cvf', 'f_pdf', 'gamma', 'gamma_ct',
- 'gauss2dfit', 'gaussfit', 'gaussian_function', 'gaussint',
- 'gauss_cvf', 'gauss_pdf', 'gauss_smooth', 'getenv',
- 'getwindows', 'get_drive_list', 'get_dxf_objects',
- 'get_kbrd', 'get_login_info', 'get_lun', 'get_screen_size',
- 'greg2jul', 'grib_\w*', 'grid3', 'griddata',
- 'grid_input', 'grid_tps', 'gs_iter',
- 'h5[adfgirst]_\w*', 'h5_browser', 'h5_close',
- 'h5_create', 'h5_get_libversion', 'h5_open', 'h5_parse',
- 'hanning', 'hash', 'hdf_\w*', 'heap_free',
- 'heap_gc', 'heap_nosave', 'heap_refcount', 'heap_save',
- 'help', 'hilbert', 'histogram', 'hist_2d', 'hist_equal',
- 'hls', 'hough', 'hqr', 'hsv', 'h_eq_ct', 'h_eq_int',
- 'i18n_multibytetoutf8', 'i18n_multibytetowidechar',
- 'i18n_utf8tomultibyte', 'i18n_widechartomultibyte',
- 'ibeta', 'icontour', 'iconvertcoord', 'idelete', 'identity',
- 'idlexbr_assistant', 'idlitsys_createtool', 'idl_base64',
- 'idl_validname', 'iellipse', 'igamma', 'igetcurrent',
- 'igetdata', 'igetid', 'igetproperty', 'iimage', 'image',
- 'image_cont', 'image_statistics', 'imaginary', 'imap',
- 'indgen', 'intarr', 'interpol', 'interpolate',
- 'interval_volume', 'int_2d', 'int_3d', 'int_tabulated',
- 'invert', 'ioctl', 'iopen', 'iplot', 'ipolygon',
- 'ipolyline', 'iputdata', 'iregister', 'ireset', 'iresolve',
- 'irotate', 'ir_filter', 'isa', 'isave', 'iscale',
- 'isetcurrent', 'isetproperty', 'ishft', 'isocontour',
- 'isosurface', 'isurface', 'itext', 'itranslate', 'ivector',
- 'ivolume', 'izoom', 'i_beta', 'journal', 'json_parse',
- 'json_serialize', 'jul2greg', 'julday', 'keyword_set',
- 'krig2d', 'kurtosis', 'kw_test', 'l64indgen', 'label_date',
- 'label_region', 'ladfit', 'laguerre', 'laplacian',
- 'la_choldc', 'la_cholmprove', 'la_cholsol', 'la_determ',
- 'la_eigenproblem', 'la_eigenql', 'la_eigenvec', 'la_elmhes',
- 'la_gm_linear_model', 'la_hqr', 'la_invert',
- 'la_least_squares', 'la_least_square_equality',
- 'la_linear_equation', 'la_ludc', 'la_lumprove', 'la_lusol',
- 'la_svd', 'la_tridc', 'la_trimprove', 'la_triql',
- 'la_trired', 'la_trisol', 'least_squares_filter', 'leefilt',
- 'legend', 'legendre', 'linbcg', 'lindgen', 'linfit',
- 'linkimage', 'list', 'll_arc_distance', 'lmfit', 'lmgr',
- 'lngamma', 'lnp_test', 'loadct', 'locale_get',
- 'logical_and', 'logical_or', 'logical_true', 'lon64arr',
- 'lonarr', 'long', 'long64', 'lsode', 'ludc', 'lumprove',
- 'lusol', 'lu_complex', 'machar', 'make_array', 'make_dll',
- 'make_rt', 'map', 'mapcontinents', 'mapgrid', 'map_2points',
- 'map_continents', 'map_grid', 'map_image', 'map_patch',
- 'map_proj_forward', 'map_proj_image', 'map_proj_info',
- 'map_proj_init', 'map_proj_inverse', 'map_set',
- 'matrix_multiply', 'matrix_power', 'max', 'md_test',
- 'mean', 'meanabsdev', 'mean_filter', 'median', 'memory',
- 'mesh_clip', 'mesh_decimate', 'mesh_issolid', 'mesh_merge',
- 'mesh_numtriangles', 'mesh_obj', 'mesh_smooth',
- 'mesh_surfacearea', 'mesh_validate', 'mesh_volume',
- 'message', 'min', 'min_curve_surf', 'mk_html_help',
- 'modifyct', 'moment', 'morph_close', 'morph_distance',
- 'morph_gradient', 'morph_hitormiss', 'morph_open',
- 'morph_thin', 'morph_tophat', 'multi', 'm_correlate',
- 'ncdf_\w*', 'newton', 'noise_hurl', 'noise_pick',
- 'noise_scatter', 'noise_slur', 'norm', 'n_elements',
- 'n_params', 'n_tags', 'objarr', 'obj_class', 'obj_destroy',
- 'obj_hasmethod', 'obj_isa', 'obj_new', 'obj_valid',
- 'online_help', 'on_error', 'open', 'oplot', 'oploterr',
- 'parse_url', 'particle_trace', 'path_cache', 'path_sep',
- 'pcomp', 'plot', 'plot3d', 'ploterr', 'plots', 'plot_3dbox',
- 'plot_field', 'pnt_line', 'point_lun', 'polarplot',
- 'polar_contour', 'polar_surface', 'poly', 'polyfill',
- 'polyfillv', 'polygon', 'polyline', 'polyshade', 'polywarp',
- 'poly_2d', 'poly_area', 'poly_fit', 'popd', 'powell',
- 'pref_commit', 'pref_get', 'pref_set', 'prewitt', 'primes',
- 'print', 'printd', 'product', 'profile', 'profiler',
- 'profiles', 'project_vol', 'psafm', 'pseudo',
- 'ps_show_fonts', 'ptrarr', 'ptr_free', 'ptr_new',
- 'ptr_valid', 'pushd', 'p_correlate', 'qgrid3', 'qhull',
- 'qromb', 'qromo', 'qsimp', 'query_ascii', 'query_bmp',
- 'query_csv', 'query_dicom', 'query_gif', 'query_image',
- 'query_jpeg', 'query_jpeg2000', 'query_mrsid', 'query_pict',
- 'query_png', 'query_ppm', 'query_srf', 'query_tiff',
- 'query_wav', 'radon', 'randomn', 'randomu', 'ranks',
- 'rdpix', 'read', 'reads', 'readu', 'read_ascii',
- 'read_binary', 'read_bmp', 'read_csv', 'read_dicom',
- 'read_gif', 'read_image', 'read_interfile', 'read_jpeg',
- 'read_jpeg2000', 'read_mrsid', 'read_pict', 'read_png',
- 'read_ppm', 'read_spr', 'read_srf', 'read_sylk',
- 'read_tiff', 'read_wav', 'read_wave', 'read_x11_bitmap',
- 'read_xwd', 'real_part', 'rebin', 'recall_commands',
- 'recon3', 'reduce_colors', 'reform', 'region_grow',
- 'register_cursor', 'regress', 'replicate',
- 'replicate_inplace', 'resolve_all', 'resolve_routine',
- 'restore', 'retall', 'return', 'reverse', 'rk4', 'roberts',
- 'rot', 'rotate', 'round', 'routine_filepath',
- 'routine_info', 'rs_test', 'r_correlate', 'r_test',
- 'save', 'savgol', 'scale3', 'scale3d', 'scope_level',
- 'scope_traceback', 'scope_varfetch', 'scope_varname',
- 'search2d', 'search3d', 'sem_create', 'sem_delete',
- 'sem_lock', 'sem_release', 'setenv', 'set_plot',
- 'set_shading', 'sfit', 'shade_surf', 'shade_surf_irr',
- 'shade_volume', 'shift', 'shift_diff', 'shmdebug', 'shmmap',
- 'shmunmap', 'shmvar', 'show3', 'showfont', 'simplex', 'sin',
- 'sindgen', 'sinh', 'size', 'skewness', 'skip_lun',
- 'slicer3', 'slide_image', 'smooth', 'sobel', 'socket',
- 'sort', 'spawn', 'spher_harm', 'sph_4pnt', 'sph_scat',
- 'spline', 'spline_p', 'spl_init', 'spl_interp', 'sprsab',
- 'sprsax', 'sprsin', 'sprstp', 'sqrt', 'standardize',
- 'stddev', 'stop', 'strarr', 'strcmp', 'strcompress',
- 'streamline', 'stregex', 'stretch', 'string', 'strjoin',
- 'strlen', 'strlowcase', 'strmatch', 'strmessage', 'strmid',
- 'strpos', 'strput', 'strsplit', 'strtrim', 'struct_assign',
- 'struct_hide', 'strupcase', 'surface', 'surfr', 'svdc',
- 'svdfit', 'svsol', 'swap_endian', 'swap_endian_inplace',
- 'symbol', 'systime', 's_test', 't3d', 'tag_names', 'tan',
- 'tanh', 'tek_color', 'temporary', 'tetra_clip',
- 'tetra_surface', 'tetra_volume', 'text', 'thin', 'threed',
- 'timegen', 'time_test2', 'tm_test', 'total', 'trace',
- 'transpose', 'triangulate', 'trigrid', 'triql', 'trired',
- 'trisol', 'tri_surf', 'truncate_lun', 'ts_coef', 'ts_diff',
- 'ts_fcast', 'ts_smooth', 'tv', 'tvcrs', 'tvlct', 'tvrd',
- 'tvscl', 'typename', 't_cvt', 't_pdf', 'uindgen', 'uint',
- 'uintarr', 'ul64indgen', 'ulindgen', 'ulon64arr', 'ulonarr',
- 'ulong', 'ulong64', 'uniq', 'unsharp_mask', 'usersym',
- 'value_locate', 'variance', 'vector', 'vector_field', 'vel',
- 'velovect', 'vert_t3d', 'voigt', 'voronoi', 'voxel_proj',
- 'wait', 'warp_tri', 'watershed', 'wdelete', 'wf_draw',
- 'where', 'widget_base', 'widget_button', 'widget_combobox',
- 'widget_control', 'widget_displaycontextmen', 'widget_draw',
- 'widget_droplist', 'widget_event', 'widget_info',
- 'widget_label', 'widget_list', 'widget_propertysheet',
- 'widget_slider', 'widget_tab', 'widget_table',
- 'widget_text', 'widget_tree', 'widget_tree_move',
- 'widget_window', 'wiener_filter', 'window', 'writeu',
- 'write_bmp', 'write_csv', 'write_gif', 'write_image',
- 'write_jpeg', 'write_jpeg2000', 'write_nrif', 'write_pict',
- 'write_png', 'write_ppm', 'write_spr', 'write_srf',
- 'write_sylk', 'write_tiff', 'write_wav', 'write_wave',
- 'wset', 'wshow', 'wtn', 'wv_applet', 'wv_cwt',
- 'wv_cw_wavelet', 'wv_denoise', 'wv_dwt', 'wv_fn_coiflet',
- 'wv_fn_daubechies', 'wv_fn_gaussian', 'wv_fn_haar',
- 'wv_fn_morlet', 'wv_fn_paul', 'wv_fn_symlet',
- 'wv_import_data', 'wv_import_wavelet', 'wv_plot3d_wps',
- 'wv_plot_multires', 'wv_pwt', 'wv_tool_denoise',
- 'xbm_edit', 'xdisplayfile', 'xdxf', 'xfont',
- 'xinteranimate', 'xloadct', 'xmanager', 'xmng_tmpl',
- 'xmtool', 'xobjview', 'xobjview_rotate',
- 'xobjview_write_image', 'xpalette', 'xpcolor', 'xplot3d',
- 'xregistered', 'xroi', 'xsq_test', 'xsurface', 'xvaredit',
- 'xvolume', 'xvolume_rotate', 'xvolume_write_image',
- 'xyouts', 'zoom', 'zoom_24']
- """Functions from: http://www.exelisvis.com/docs/routines-1.html"""
-
- tokens = {
- 'root': [
- (r'^\s*;.*?\n', Comment.Singleline),
- (r'\b(' + '|'.join(_RESERVED) + r')\b', Keyword),
- (r'\b(' + '|'.join(_BUILTIN_LIB) + r')\b', Name.Builtin),
- (r'\+=|-=|\^=|\*=|/=|#=|##=|<=|>=|=', Operator),
- (r'\+\+|--|->|\+|-|##|#|\*|/|<|>|&&|\^|~|\|\|\?|:', Operator),
- (r'\b(mod=|lt=|le=|eq=|ne=|ge=|gt=|not=|and=|or=|xor=)', Operator),
- (r'\b(mod|lt|le|eq|ne|ge|gt|not|and|or|xor)\b', Operator),
- (r'\b[0-9](L|B|S|UL|ULL|LL)?\b', Number),
- (r'.', Text),
- ]
- }
-
-
-class RdLexer(RegexLexer):
- """
- Pygments Lexer for R documentation (Rd) files
-
- This is a very minimal implementation, highlighting little more
- than the macros. A description of Rd syntax is found in `Writing R
- Extensions <http://cran.r-project.org/doc/manuals/R-exts.html>`_
- and `Parsing Rd files <developer.r-project.org/parseRd.pdf>`_.
-
- .. versionadded:: 1.6
- """
- name = 'Rd'
- aliases = ['rd']
- filenames = ['*.Rd']
- mimetypes = ['text/x-r-doc']
-
- # To account for verbatim / LaTeX-like / and R-like areas
- # would require parsing.
- tokens = {
- 'root' : [
- # catch escaped brackets and percent sign
- (r'\\[\\{}%]', String.Escape),
- # comments
- (r'%.*$', Comment),
- # special macros with no arguments
- (r'\\(?:cr|l?dots|R|tab)\b', Keyword.Constant),
- # macros
- (r'\\[a-zA-Z]+\b', Keyword),
- # special preprocessor macros
- (r'^\s*#(?:ifn?def|endif).*\b', Comment.Preproc),
- # non-escaped brackets
- (r'[{}]', Name.Builtin),
- # everything else
- (r'[^\\%\n{}]+', Text),
- (r'.', Text),
- ]
- }
-
-
-class IgorLexer(RegexLexer):
- """
- Pygments Lexer for Igor Pro procedure files (.ipf).
- See http://www.wavemetrics.com/ and http://www.igorexchange.com/.
-
- .. versionadded:: 2.0
- """
-
- name = 'Igor'
- aliases = ['igor', 'igorpro']
- filenames = ['*.ipf']
- mimetypes = ['text/ipf']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- flowControl = [
- 'if', 'else', 'elseif', 'endif', 'for', 'endfor', 'strswitch', 'switch',
- 'case', 'default', 'endswitch', 'do', 'while', 'try', 'catch', 'endtry',
- 'break', 'continue', 'return',
- ]
- types = [
- 'variable', 'string', 'constant', 'strconstant', 'NVAR', 'SVAR', 'WAVE',
- 'STRUCT', 'dfref'
- ]
- keywords = [
- 'override', 'ThreadSafe', 'static', 'FuncFit', 'Proc', 'Picture',
- 'Prompt', 'DoPrompt', 'macro', 'window', 'graph', 'function', 'end',
- 'Structure', 'EndStructure', 'EndMacro', 'Menu', 'SubMenu', 'Prompt',
- 'DoPrompt',
- ]
- operations = [
- 'Abort', 'AddFIFOData', 'AddFIFOVectData', 'AddMovieAudio',
- 'AddMovieFrame', 'APMath', 'Append', 'AppendImage',
- 'AppendLayoutObject', 'AppendMatrixContour', 'AppendText',
- 'AppendToGraph', 'AppendToLayout', 'AppendToTable', 'AppendXYZContour',
- 'AutoPositionWindow', 'BackgroundInfo', 'Beep', 'BoundingBall',
- 'BrowseURL', 'BuildMenu', 'Button', 'cd', 'Chart', 'CheckBox',
- 'CheckDisplayed', 'ChooseColor', 'Close', 'CloseMovie', 'CloseProc',
- 'ColorScale', 'ColorTab2Wave', 'Concatenate', 'ControlBar',
- 'ControlInfo', 'ControlUpdate', 'ConvexHull', 'Convolve', 'CopyFile',
- 'CopyFolder', 'CopyScales', 'Correlate', 'CreateAliasShortcut', 'Cross',
- 'CtrlBackground', 'CtrlFIFO', 'CtrlNamedBackground', 'Cursor',
- 'CurveFit', 'CustomControl', 'CWT', 'Debugger', 'DebuggerOptions',
- 'DefaultFont', 'DefaultGuiControls', 'DefaultGuiFont', 'DefineGuide',
- 'DelayUpdate', 'DeleteFile', 'DeleteFolder', 'DeletePoints',
- 'Differentiate', 'dir', 'Display', 'DisplayHelpTopic',
- 'DisplayProcedure', 'DoAlert', 'DoIgorMenu', 'DoUpdate', 'DoWindow',
- 'DoXOPIdle', 'DrawAction', 'DrawArc', 'DrawBezier', 'DrawLine',
- 'DrawOval', 'DrawPICT', 'DrawPoly', 'DrawRect', 'DrawRRect', 'DrawText',
- 'DSPDetrend', 'DSPPeriodogram', 'Duplicate', 'DuplicateDataFolder',
- 'DWT', 'EdgeStats', 'Edit', 'ErrorBars', 'Execute', 'ExecuteScriptText',
- 'ExperimentModified', 'Extract', 'FastGaussTransform', 'FastOp',
- 'FBinRead', 'FBinWrite', 'FFT', 'FIFO2Wave', 'FIFOStatus', 'FilterFIR',
- 'FilterIIR', 'FindLevel', 'FindLevels', 'FindPeak', 'FindPointsInPoly',
- 'FindRoots', 'FindSequence', 'FindValue', 'FPClustering', 'fprintf',
- 'FReadLine', 'FSetPos', 'FStatus', 'FTPDelete', 'FTPDownload',
- 'FTPUpload', 'FuncFit', 'FuncFitMD', 'GetAxis', 'GetFileFolderInfo',
- 'GetLastUserMenuInfo', 'GetMarquee', 'GetSelection', 'GetWindow',
- 'GraphNormal', 'GraphWaveDraw', 'GraphWaveEdit', 'Grep', 'GroupBox',
- 'Hanning', 'HideIgorMenus', 'HideInfo', 'HideProcedures', 'HideTools',
- 'HilbertTransform', 'Histogram', 'IFFT', 'ImageAnalyzeParticles',
- 'ImageBlend', 'ImageBoundaryToMask', 'ImageEdgeDetection',
- 'ImageFileInfo', 'ImageFilter', 'ImageFocus', 'ImageGenerateROIMask',
- 'ImageHistModification', 'ImageHistogram', 'ImageInterpolate',
- 'ImageLineProfile', 'ImageLoad', 'ImageMorphology', 'ImageRegistration',
- 'ImageRemoveBackground', 'ImageRestore', 'ImageRotate', 'ImageSave',
- 'ImageSeedFill', 'ImageSnake', 'ImageStats', 'ImageThreshold',
- 'ImageTransform', 'ImageUnwrapPhase', 'ImageWindow', 'IndexSort',
- 'InsertPoints', 'Integrate', 'IntegrateODE', 'Interp3DPath',
- 'Interpolate3D', 'KillBackground', 'KillControl', 'KillDataFolder',
- 'KillFIFO', 'KillFreeAxis', 'KillPath', 'KillPICTs', 'KillStrings',
- 'KillVariables', 'KillWaves', 'KillWindow', 'KMeans', 'Label', 'Layout',
- 'Legend', 'LinearFeedbackShiftRegister', 'ListBox', 'LoadData',
- 'LoadPackagePreferences', 'LoadPICT', 'LoadWave', 'Loess',
- 'LombPeriodogram', 'Make', 'MakeIndex', 'MarkPerfTestTime',
- 'MatrixConvolve', 'MatrixCorr', 'MatrixEigenV', 'MatrixFilter',
- 'MatrixGaussJ', 'MatrixInverse', 'MatrixLinearSolve',
- 'MatrixLinearSolveTD', 'MatrixLLS', 'MatrixLUBkSub', 'MatrixLUD',
- 'MatrixMultiply', 'MatrixOP', 'MatrixSchur', 'MatrixSolve',
- 'MatrixSVBkSub', 'MatrixSVD', 'MatrixTranspose', 'MeasureStyledText',
- 'Modify', 'ModifyContour', 'ModifyControl', 'ModifyControlList',
- 'ModifyFreeAxis', 'ModifyGraph', 'ModifyImage', 'ModifyLayout',
- 'ModifyPanel', 'ModifyTable', 'ModifyWaterfall', 'MoveDataFolder',
- 'MoveFile', 'MoveFolder', 'MoveString', 'MoveSubwindow', 'MoveVariable',
- 'MoveWave', 'MoveWindow', 'NeuralNetworkRun', 'NeuralNetworkTrain',
- 'NewDataFolder', 'NewFIFO', 'NewFIFOChan', 'NewFreeAxis', 'NewImage',
- 'NewLayout', 'NewMovie', 'NewNotebook', 'NewPanel', 'NewPath',
- 'NewWaterfall', 'Note', 'Notebook', 'NotebookAction', 'Open',
- 'OpenNotebook', 'Optimize', 'ParseOperationTemplate', 'PathInfo',
- 'PauseForUser', 'PauseUpdate', 'PCA', 'PlayMovie', 'PlayMovieAction',
- 'PlaySnd', 'PlaySound', 'PopupContextualMenu', 'PopupMenu',
- 'Preferences', 'PrimeFactors', 'Print', 'printf', 'PrintGraphs',
- 'PrintLayout', 'PrintNotebook', 'PrintSettings', 'PrintTable',
- 'Project', 'PulseStats', 'PutScrapText', 'pwd', 'Quit',
- 'RatioFromNumber', 'Redimension', 'Remove', 'RemoveContour',
- 'RemoveFromGraph', 'RemoveFromLayout', 'RemoveFromTable', 'RemoveImage',
- 'RemoveLayoutObjects', 'RemovePath', 'Rename', 'RenameDataFolder',
- 'RenamePath', 'RenamePICT', 'RenameWindow', 'ReorderImages',
- 'ReorderTraces', 'ReplaceText', 'ReplaceWave', 'Resample',
- 'ResumeUpdate', 'Reverse', 'Rotate', 'Save', 'SaveData',
- 'SaveExperiment', 'SaveGraphCopy', 'SaveNotebook',
- 'SavePackagePreferences', 'SavePICT', 'SaveTableCopy',
- 'SetActiveSubwindow', 'SetAxis', 'SetBackground', 'SetDashPattern',
- 'SetDataFolder', 'SetDimLabel', 'SetDrawEnv', 'SetDrawLayer',
- 'SetFileFolderInfo', 'SetFormula', 'SetIgorHook', 'SetIgorMenuMode',
- 'SetIgorOption', 'SetMarquee', 'SetProcessSleep', 'SetRandomSeed',
- 'SetScale', 'SetVariable', 'SetWaveLock', 'SetWindow', 'ShowIgorMenus',
- 'ShowInfo', 'ShowTools', 'Silent', 'Sleep', 'Slider', 'Smooth',
- 'SmoothCustom', 'Sort', 'SoundInRecord', 'SoundInSet',
- 'SoundInStartChart', 'SoundInStatus', 'SoundInStopChart',
- 'SphericalInterpolate', 'SphericalTriangulate', 'SplitString',
- 'sprintf', 'sscanf', 'Stack', 'StackWindows',
- 'StatsAngularDistanceTest', 'StatsANOVA1Test', 'StatsANOVA2NRTest',
- 'StatsANOVA2RMTest', 'StatsANOVA2Test', 'StatsChiTest',
- 'StatsCircularCorrelationTest', 'StatsCircularMeans',
- 'StatsCircularMoments', 'StatsCircularTwoSampleTest',
- 'StatsCochranTest', 'StatsContingencyTable', 'StatsDIPTest',
- 'StatsDunnettTest', 'StatsFriedmanTest', 'StatsFTest',
- 'StatsHodgesAjneTest', 'StatsJBTest', 'StatsKendallTauTest',
- 'StatsKSTest', 'StatsKWTest', 'StatsLinearCorrelationTest',
- 'StatsLinearRegression', 'StatsMultiCorrelationTest',
- 'StatsNPMCTest', 'StatsNPNominalSRTest', 'StatsQuantiles',
- 'StatsRankCorrelationTest', 'StatsResample', 'StatsSample',
- 'StatsScheffeTest', 'StatsSignTest', 'StatsSRTest', 'StatsTTest',
- 'StatsTukeyTest', 'StatsVariancesTest', 'StatsWatsonUSquaredTest',
- 'StatsWatsonWilliamsTest', 'StatsWheelerWatsonTest',
- 'StatsWilcoxonRankTest', 'StatsWRCorrelationTest', 'String',
- 'StructGet', 'StructPut', 'TabControl', 'Tag', 'TextBox', 'Tile',
- 'TileWindows', 'TitleBox', 'ToCommandLine', 'ToolsGrid',
- 'Triangulate3d', 'Unwrap', 'ValDisplay', 'Variable', 'WaveMeanStdv',
- 'WaveStats', 'WaveTransform', 'wfprintf', 'WignerTransform',
- 'WindowFunction',
- ]
- functions = [
- 'abs', 'acos', 'acosh', 'AiryA', 'AiryAD', 'AiryB', 'AiryBD', 'alog',
- 'area', 'areaXY', 'asin', 'asinh', 'atan', 'atan2', 'atanh',
- 'AxisValFromPixel', 'Besseli', 'Besselj', 'Besselk', 'Bessely', 'bessi',
- 'bessj', 'bessk', 'bessy', 'beta', 'betai', 'BinarySearch',
- 'BinarySearchInterp', 'binomial', 'binomialln', 'binomialNoise', 'cabs',
- 'CaptureHistoryStart', 'ceil', 'cequal', 'char2num', 'chebyshev',
- 'chebyshevU', 'CheckName', 'cmplx', 'cmpstr', 'conj', 'ContourZ', 'cos',
- 'cosh', 'cot', 'CountObjects', 'CountObjectsDFR', 'cpowi',
- 'CreationDate', 'csc', 'DataFolderExists', 'DataFolderRefsEqual',
- 'DataFolderRefStatus', 'date2secs', 'datetime', 'DateToJulian',
- 'Dawson', 'DDEExecute', 'DDEInitiate', 'DDEPokeString', 'DDEPokeWave',
- 'DDERequestWave', 'DDEStatus', 'DDETerminate', 'deltax', 'digamma',
- 'DimDelta', 'DimOffset', 'DimSize', 'ei', 'enoise', 'equalWaves', 'erf',
- 'erfc', 'exists', 'exp', 'expInt', 'expNoise', 'factorial', 'fakedata',
- 'faverage', 'faverageXY', 'FindDimLabel', 'FindListItem', 'floor',
- 'FontSizeHeight', 'FontSizeStringWidth', 'FresnelCos', 'FresnelSin',
- 'gamma', 'gammaInc', 'gammaNoise', 'gammln', 'gammp', 'gammq', 'Gauss',
- 'Gauss1D', 'Gauss2D', 'gcd', 'GetDefaultFontSize',
- 'GetDefaultFontStyle', 'GetKeyState', 'GetRTError', 'gnoise',
- 'GrepString', 'hcsr', 'hermite', 'hermiteGauss', 'HyperG0F1',
- 'HyperG1F1', 'HyperG2F1', 'HyperGNoise', 'HyperGPFQ', 'IgorVersion',
- 'ilim', 'imag', 'Inf', 'Integrate1D', 'interp', 'Interp2D', 'Interp3D',
- 'inverseERF', 'inverseERFC', 'ItemsInList', 'jlim', 'Laguerre',
- 'LaguerreA', 'LaguerreGauss', 'leftx', 'LegendreA', 'limit', 'ln',
- 'log', 'logNormalNoise', 'lorentzianNoise', 'magsqr', 'MandelbrotPoint',
- 'MarcumQ', 'MatrixDet', 'MatrixDot', 'MatrixRank', 'MatrixTrace', 'max',
- 'mean', 'min', 'mod', 'ModDate', 'NaN', 'norm', 'NumberByKey',
- 'numpnts', 'numtype', 'NumVarOrDefault', 'NVAR_Exists', 'p2rect',
- 'ParamIsDefault', 'pcsr', 'Pi', 'PixelFromAxisVal', 'pnt2x',
- 'poissonNoise', 'poly', 'poly2D', 'PolygonArea', 'qcsr', 'r2polar',
- 'real', 'rightx', 'round', 'sawtooth', 'ScreenResolution', 'sec',
- 'SelectNumber', 'sign', 'sin', 'sinc', 'sinh', 'SphericalBessJ',
- 'SphericalBessJD', 'SphericalBessY', 'SphericalBessYD',
- 'SphericalHarmonics', 'sqrt', 'StartMSTimer', 'StatsBetaCDF',
- 'StatsBetaPDF', 'StatsBinomialCDF', 'StatsBinomialPDF',
- 'StatsCauchyCDF', 'StatsCauchyPDF', 'StatsChiCDF', 'StatsChiPDF',
- 'StatsCMSSDCDF', 'StatsCorrelation', 'StatsDExpCDF', 'StatsDExpPDF',
- 'StatsErlangCDF', 'StatsErlangPDF', 'StatsErrorPDF', 'StatsEValueCDF',
- 'StatsEValuePDF', 'StatsExpCDF', 'StatsExpPDF', 'StatsFCDF',
- 'StatsFPDF', 'StatsFriedmanCDF', 'StatsGammaCDF', 'StatsGammaPDF',
- 'StatsGeometricCDF', 'StatsGeometricPDF', 'StatsHyperGCDF',
- 'StatsHyperGPDF', 'StatsInvBetaCDF', 'StatsInvBinomialCDF',
- 'StatsInvCauchyCDF', 'StatsInvChiCDF', 'StatsInvCMSSDCDF',
- 'StatsInvDExpCDF', 'StatsInvEValueCDF', 'StatsInvExpCDF',
- 'StatsInvFCDF', 'StatsInvFriedmanCDF', 'StatsInvGammaCDF',
- 'StatsInvGeometricCDF', 'StatsInvKuiperCDF', 'StatsInvLogisticCDF',
- 'StatsInvLogNormalCDF', 'StatsInvMaxwellCDF', 'StatsInvMooreCDF',
- 'StatsInvNBinomialCDF', 'StatsInvNCChiCDF', 'StatsInvNCFCDF',
- 'StatsInvNormalCDF', 'StatsInvParetoCDF', 'StatsInvPoissonCDF',
- 'StatsInvPowerCDF', 'StatsInvQCDF', 'StatsInvQpCDF',
- 'StatsInvRayleighCDF', 'StatsInvRectangularCDF', 'StatsInvSpearmanCDF',
- 'StatsInvStudentCDF', 'StatsInvTopDownCDF', 'StatsInvTriangularCDF',
- 'StatsInvUsquaredCDF', 'StatsInvVonMisesCDF', 'StatsInvWeibullCDF',
- 'StatsKuiperCDF', 'StatsLogisticCDF', 'StatsLogisticPDF',
- 'StatsLogNormalCDF', 'StatsLogNormalPDF', 'StatsMaxwellCDF',
- 'StatsMaxwellPDF', 'StatsMedian', 'StatsMooreCDF', 'StatsNBinomialCDF',
- 'StatsNBinomialPDF', 'StatsNCChiCDF', 'StatsNCChiPDF', 'StatsNCFCDF',
- 'StatsNCFPDF', 'StatsNCTCDF', 'StatsNCTPDF', 'StatsNormalCDF',
- 'StatsNormalPDF', 'StatsParetoCDF', 'StatsParetoPDF', 'StatsPermute',
- 'StatsPoissonCDF', 'StatsPoissonPDF', 'StatsPowerCDF',
- 'StatsPowerNoise', 'StatsPowerPDF', 'StatsQCDF', 'StatsQpCDF',
- 'StatsRayleighCDF', 'StatsRayleighPDF', 'StatsRectangularCDF',
- 'StatsRectangularPDF', 'StatsRunsCDF', 'StatsSpearmanRhoCDF',
- 'StatsStudentCDF', 'StatsStudentPDF', 'StatsTopDownCDF',
- 'StatsTriangularCDF', 'StatsTriangularPDF', 'StatsTrimmedMean',
- 'StatsUSquaredCDF', 'StatsVonMisesCDF', 'StatsVonMisesNoise',
- 'StatsVonMisesPDF', 'StatsWaldCDF', 'StatsWaldPDF', 'StatsWeibullCDF',
- 'StatsWeibullPDF', 'StopMSTimer', 'str2num', 'stringCRC', 'stringmatch',
- 'strlen', 'strsearch', 'StudentA', 'StudentT', 'sum', 'SVAR_Exists',
- 'TagVal', 'tan', 'tanh', 'ThreadGroupCreate', 'ThreadGroupRelease',
- 'ThreadGroupWait', 'ThreadProcessorCount', 'ThreadReturnValue', 'ticks',
- 'trunc', 'Variance', 'vcsr', 'WaveCRC', 'WaveDims', 'WaveExists',
- 'WaveMax', 'WaveMin', 'WaveRefsEqual', 'WaveType', 'WhichListItem',
- 'WinType', 'WNoise', 'x', 'x2pnt', 'xcsr', 'y', 'z', 'zcsr', 'ZernikeR',
- ]
- functions += [
- 'AddListItem', 'AnnotationInfo', 'AnnotationList', 'AxisInfo',
- 'AxisList', 'CaptureHistory', 'ChildWindowList', 'CleanupName',
- 'ContourInfo', 'ContourNameList', 'ControlNameList', 'CsrInfo',
- 'CsrWave', 'CsrXWave', 'CTabList', 'DataFolderDir', 'date',
- 'DDERequestString', 'FontList', 'FuncRefInfo', 'FunctionInfo',
- 'FunctionList', 'FunctionPath', 'GetDataFolder', 'GetDefaultFont',
- 'GetDimLabel', 'GetErrMessage', 'GetFormula',
- 'GetIndependentModuleName', 'GetIndexedObjName', 'GetIndexedObjNameDFR',
- 'GetRTErrMessage', 'GetRTStackInfo', 'GetScrapText', 'GetUserData',
- 'GetWavesDataFolder', 'GrepList', 'GuideInfo', 'GuideNameList', 'Hash',
- 'IgorInfo', 'ImageInfo', 'ImageNameList', 'IndexedDir', 'IndexedFile',
- 'JulianToDate', 'LayoutInfo', 'ListMatch', 'LowerStr', 'MacroList',
- 'NameOfWave', 'note', 'num2char', 'num2istr', 'num2str',
- 'OperationList', 'PadString', 'ParseFilePath', 'PathList', 'PICTInfo',
- 'PICTList', 'PossiblyQuoteName', 'ProcedureText', 'RemoveByKey',
- 'RemoveEnding', 'RemoveFromList', 'RemoveListItem',
- 'ReplaceNumberByKey', 'ReplaceString', 'ReplaceStringByKey',
- 'Secs2Date', 'Secs2Time', 'SelectString', 'SortList',
- 'SpecialCharacterInfo', 'SpecialCharacterList', 'SpecialDirPath',
- 'StringByKey', 'StringFromList', 'StringList', 'StrVarOrDefault',
- 'TableInfo', 'TextFile', 'ThreadGroupGetDF', 'time', 'TraceFromPixel',
- 'TraceInfo', 'TraceNameList', 'UniqueName', 'UnPadString', 'UpperStr',
- 'VariableList', 'WaveInfo', 'WaveList', 'WaveName', 'WaveUnits',
- 'WinList', 'WinName', 'WinRecreation', 'XWaveName',
- 'ContourNameToWaveRef', 'CsrWaveRef', 'CsrXWaveRef',
- 'ImageNameToWaveRef', 'NewFreeWave', 'TagWaveRef', 'TraceNameToWaveRef',
- 'WaveRefIndexed', 'XWaveRefFromTrace', 'GetDataFolderDFR',
- 'GetWavesDataFolderDFR', 'NewFreeDataFolder', 'ThreadGroupGetDFR',
- ]
-
- tokens = {
- 'root': [
- (r'//.*$', Comment.Single),
- (r'"([^"\\]|\\.)*"', String),
- # Flow Control.
- (r'\b(%s)\b' % '|'.join(flowControl), Keyword),
- # Types.
- (r'\b(%s)\b' % '|'.join(types), Keyword.Type),
- # Keywords.
- (r'\b(%s)\b' % '|'.join(keywords), Keyword.Reserved),
- # Built-in operations.
- (r'\b(%s)\b' % '|'.join(operations), Name.Class),
- # Built-in functions.
- (r'\b(%s)\b' % '|'.join(functions), Name.Function),
- # Compiler directives.
- (r'^#(include|pragma|define|ifdef|ifndef|endif)',
- Name.Decorator),
- (r'[^a-z"/]+$', Text),
- (r'.', Text),
- ],
- }
-
-
-class MathematicaLexer(RegexLexer):
- """
- Lexer for `Mathematica <http://www.wolfram.com/mathematica/>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'Mathematica'
- aliases = ['mathematica', 'mma', 'nb']
- filenames = ['*.nb', '*.cdf', '*.nbp', '*.ma']
- mimetypes = ['application/mathematica',
- 'application/vnd.wolfram.mathematica',
- 'application/vnd.wolfram.mathematica.package',
- 'application/vnd.wolfram.cdf']
-
- # http://reference.wolfram.com/mathematica/guide/Syntax.html
- operators = [
- ";;", "=", "=.", "!=" "==", ":=", "->", ":>", "/.", "+", "-", "*", "/",
- "^", "&&", "||", "!", "<>", "|", "/;", "?", "@", "//", "/@", "@@",
- "@@@", "~~", "===", "&"]
- operators.sort(reverse=True)
-
- punctuation = [",", ";", "(", ")", "[", "]", "{", "}"]
-
- def _multi_escape(entries):
- return '(%s)' % ('|'.join(re.escape(entry) for entry in entries))
-
- tokens = {
- 'root': [
- (r'(?s)\(\*.*?\*\)', Comment),
-
- (r'([a-zA-Z]+[A-Za-z0-9]*`)', Name.Namespace),
- (r'([A-Za-z0-9]*_+[A-Za-z0-9]*)', Name.Variable),
- (r'#\d*', Name.Variable),
- (r'([a-zA-Z]+[a-zA-Z0-9]*)', Name),
-
- (r'-?[0-9]+\.[0-9]*', Number.Float),
- (r'-?[0-9]*\.[0-9]+', Number.Float),
- (r'-?[0-9]+', Number.Integer),
-
- (_multi_escape(operators), Operator),
- (_multi_escape(punctuation), Punctuation),
- (r'".*?"', String),
- (r'\s+', Text.Whitespace),
- ],
- }
-
-class GAPLexer(RegexLexer):
- """
- For `GAP <http://www.gap-system.org>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'GAP'
- aliases = ['gap']
- filenames = ['*.g', '*.gd', '*.gi', '*.gap']
-
- tokens = {
- 'root' : [
- (r'#.*$', Comment.Single),
- (r'"(?:[^"\\]|\\.)*"', String),
- (r'\(|\)|\[|\]|\{|\}', Punctuation),
- (r'''(?x)\b(?:
- if|then|elif|else|fi|
- for|while|do|od|
- repeat|until|
- break|continue|
- function|local|return|end|
- rec|
- quit|QUIT|
- IsBound|Unbind|
- TryNextMethod|
- Info|Assert
- )\b''', Keyword),
- (r'''(?x)\b(?:
- true|false|fail|infinity
- )\b''',
- Name.Constant),
- (r'''(?x)\b(?:
- (Declare|Install)([A-Z][A-Za-z]+)|
- BindGlobal|BIND_GLOBAL
- )\b''',
- Name.Builtin),
- (r'\.|,|:=|;|=|\+|-|\*|/|\^|>|<', Operator),
- (r'''(?x)\b(?:
- and|or|not|mod|in
- )\b''',
- Operator.Word),
- (r'''(?x)
- (?:[a-zA-Z_0-9]+|`[^`]*`)
- (?:::[a-zA-Z_0-9]+|`[^`]*`)*''', Name.Variable),
- (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number),
- (r'\.[0-9]+(?:e[0-9]+)?', Number),
- (r'.', Text)
- ]
- }
+__all__ = []
diff --git a/pygments/lexers/matlab.py b/pygments/lexers/matlab.py
new file mode 100644
index 00000000..b7436cd1
--- /dev/null
+++ b/pygments/lexers/matlab.py
@@ -0,0 +1,663 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.matlab
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Matlab and related languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, bygroups, words, do_insertions
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic, Whitespace
+
+from pygments.lexers import _scilab_builtins
+
+__all__ = ['MatlabLexer', 'MatlabSessionLexer', 'OctaveLexer', 'ScilabLexer']
+
+
+class MatlabLexer(RegexLexer):
+ """
+ For Matlab source code.
+
+ .. versionadded:: 0.10
+ """
+ name = 'Matlab'
+ aliases = ['matlab']
+ filenames = ['*.m']
+ mimetypes = ['text/matlab']
+
+ #
+ # These lists are generated automatically.
+ # Run the following in bash shell:
+ #
+ # for f in elfun specfun elmat; do
+ # echo -n "$f = "
+ # matlab -nojvm -r "help $f;exit;" | perl -ne \
+ # 'push(@c,$1) if /^ (\w+)\s+-/; END {print q{["}.join(q{","},@c).qq{"]\n};}'
+ # done
+ #
+ # elfun: Elementary math functions
+ # specfun: Special Math functions
+ # elmat: Elementary matrices and matrix manipulation
+ #
+ # taken from Matlab version 7.4.0.336 (R2007a)
+ #
+ elfun = ("sin", "sind", "sinh", "asin", "asind", "asinh", "cos", "cosd", "cosh",
+ "acos", "acosd", "acosh", "tan", "tand", "tanh", "atan", "atand", "atan2",
+ "atanh", "sec", "secd", "sech", "asec", "asecd", "asech", "csc", "cscd",
+ "csch", "acsc", "acscd", "acsch", "cot", "cotd", "coth", "acot", "acotd",
+ "acoth", "hypot", "exp", "expm1", "log", "log1p", "log10", "log2", "pow2",
+ "realpow", "reallog", "realsqrt", "sqrt", "nthroot", "nextpow2", "abs",
+ "angle", "complex", "conj", "imag", "real", "unwrap", "isreal", "cplxpair",
+ "fix", "floor", "ceil", "round", "mod", "rem", "sign")
+ specfun = ("airy", "besselj", "bessely", "besselh", "besseli", "besselk", "beta",
+ "betainc", "betaln", "ellipj", "ellipke", "erf", "erfc", "erfcx",
+ "erfinv", "expint", "gamma", "gammainc", "gammaln", "psi", "legendre",
+ "cross", "dot", "factor", "isprime", "primes", "gcd", "lcm", "rat",
+ "rats", "perms", "nchoosek", "factorial", "cart2sph", "cart2pol",
+ "pol2cart", "sph2cart", "hsv2rgb", "rgb2hsv")
+ elmat = ("zeros", "ones", "eye", "repmat", "rand", "randn", "linspace", "logspace",
+ "freqspace", "meshgrid", "accumarray", "size", "length", "ndims", "numel",
+ "disp", "isempty", "isequal", "isequalwithequalnans", "cat", "reshape",
+ "diag", "blkdiag", "tril", "triu", "fliplr", "flipud", "flipdim", "rot90",
+ "find", "end", "sub2ind", "ind2sub", "bsxfun", "ndgrid", "permute",
+ "ipermute", "shiftdim", "circshift", "squeeze", "isscalar", "isvector",
+ "ans", "eps", "realmax", "realmin", "pi", "i", "inf", "nan", "isnan",
+ "isinf", "isfinite", "j", "why", "compan", "gallery", "hadamard", "hankel",
+ "hilb", "invhilb", "magic", "pascal", "rosser", "toeplitz", "vander",
+ "wilkinson")
+
+ tokens = {
+ 'root': [
+ # line starting with '!' is sent as a system command. not sure what
+ # label to use...
+ (r'^!.*', String.Other),
+ (r'%\{\s*\n', Comment.Multiline, 'blockcomment'),
+ (r'%.*$', Comment),
+ (r'^\s*function', Keyword, 'deffunc'),
+
+ # from 'iskeyword' on version 7.11 (R2010):
+ (words((
+ 'break', 'case', 'catch', 'classdef', 'continue', 'else', 'elseif',
+ 'end', 'enumerated', 'events', 'for', 'function', 'global', 'if',
+ 'methods', 'otherwise', 'parfor', 'persistent', 'properties',
+ 'return', 'spmd', 'switch', 'try', 'while'), suffix=r'\b'),
+ Keyword),
+
+ ("(" + "|".join(elfun + specfun + elmat) + r')\b', Name.Builtin),
+
+ # line continuation with following comment:
+ (r'\.\.\..*$', Comment),
+
+ # operators:
+ (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
+ # operators requiring escape for re:
+ (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
+
+ # punctuation:
+ (r'\[|\]|\(|\)|\{|\}|:|@|\.|,', Punctuation),
+ (r'=|:|;', Punctuation),
+
+ # quote can be transpose, instead of string:
+ # (not great, but handles common cases...)
+ (r'(?<=[\w\)\].])\'+', Operator),
+
+ (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eEf][+-]?[0-9]+', Number.Float),
+ (r'\d+', Number.Integer),
+
+ (r'(?<![\w\)\].])\'', String, 'string'),
+ (r'[a-zA-Z_]\w*', Name),
+ (r'.', Text),
+ ],
+ 'string': [
+ (r'[^\']*\'', String, '#pop')
+ ],
+ 'blockcomment': [
+ (r'^\s*%\}', Comment.Multiline, '#pop'),
+ (r'^.*\n', Comment.Multiline),
+ (r'.', Comment.Multiline),
+ ],
+ 'deffunc': [
+ (r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
+ bygroups(Whitespace, Text, Whitespace, Punctuation,
+ Whitespace, Name.Function, Punctuation, Text,
+ Punctuation, Whitespace), '#pop'),
+ # function with no args
+ (r'(\s*)([a-zA-Z_]\w*)', bygroups(Text, Name.Function), '#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ if re.match('^\s*%', text, re.M): # comment
+ return 0.2
+ elif re.match('^!\w+', text, re.M): # system cmd
+ return 0.2
+
+
+line_re = re.compile('.*?\n')
+
+
+class MatlabSessionLexer(Lexer):
+ """
+ For Matlab sessions. Modeled after PythonConsoleLexer.
+ Contributed by Ken Schutte <kschutte@csail.mit.edu>.
+
+ .. versionadded:: 0.10
+ """
+ name = 'Matlab session'
+ aliases = ['matlabsession']
+
+ def get_tokens_unprocessed(self, text):
+ mlexer = MatlabLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+
+ for match in line_re.finditer(text):
+ line = match.group()
+
+ if line.startswith('>> '):
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:3])]))
+ curcode += line[3:]
+
+ elif line.startswith('>>'):
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:2])]))
+ curcode += line[2:]
+
+ elif line.startswith('???'):
+
+ idx = len(curcode)
+
+ # without is showing error on same line as before...?
+ # line = "\n" + line
+ token = (0, Generic.Traceback, line)
+ insertions.append((idx, [token]))
+
+ else:
+ if curcode:
+ for item in do_insertions(
+ insertions, mlexer.get_tokens_unprocessed(curcode)):
+ yield item
+ curcode = ''
+ insertions = []
+
+ yield match.start(), Generic.Output, line
+
+ if curcode: # or item:
+ for item in do_insertions(
+ insertions, mlexer.get_tokens_unprocessed(curcode)):
+ yield item
+
+
+class OctaveLexer(RegexLexer):
+ """
+ For GNU Octave source code.
+
+ .. versionadded:: 1.5
+ """
+ name = 'Octave'
+ aliases = ['octave']
+ filenames = ['*.m']
+ mimetypes = ['text/octave']
+
+ # These lists are generated automatically.
+ # Run the following in bash shell:
+ #
+ # First dump all of the Octave manual into a plain text file:
+ #
+ # $ info octave --subnodes -o octave-manual
+ #
+ # Now grep through it:
+
+ # for i in \
+ # "Built-in Function" "Command" "Function File" \
+ # "Loadable Function" "Mapping Function";
+ # do
+ # perl -e '@name = qw('"$i"');
+ # print lc($name[0]),"_kw = [\n"';
+ #
+ # perl -n -e 'print "\"$1\",\n" if /-- '"$i"': .* (\w*) \(/;' \
+ # octave-manual | sort | uniq ;
+ # echo "]" ;
+ # echo;
+ # done
+
+ # taken from Octave Mercurial changeset 8cc154f45e37 (30-jan-2011)
+
+ builtin_kw = (
+ "addlistener", "addpath", "addproperty", "all",
+ "and", "any", "argnames", "argv", "assignin",
+ "atexit", "autoload",
+ "available_graphics_toolkits", "beep_on_error",
+ "bitand", "bitmax", "bitor", "bitshift", "bitxor",
+ "cat", "cell", "cellstr", "char", "class", "clc",
+ "columns", "command_line_path",
+ "completion_append_char", "completion_matches",
+ "complex", "confirm_recursive_rmdir", "cputime",
+ "crash_dumps_octave_core", "ctranspose", "cumprod",
+ "cumsum", "debug_on_error", "debug_on_interrupt",
+ "debug_on_warning", "default_save_options",
+ "dellistener", "diag", "diff", "disp",
+ "doc_cache_file", "do_string_escapes", "double",
+ "drawnow", "e", "echo_executing_commands", "eps",
+ "eq", "errno", "errno_list", "error", "eval",
+ "evalin", "exec", "exist", "exit", "eye", "false",
+ "fclear", "fclose", "fcntl", "fdisp", "feof",
+ "ferror", "feval", "fflush", "fgetl", "fgets",
+ "fieldnames", "file_in_loadpath", "file_in_path",
+ "filemarker", "filesep", "find_dir_in_path",
+ "fixed_point_format", "fnmatch", "fopen", "fork",
+ "formula", "fprintf", "fputs", "fread", "freport",
+ "frewind", "fscanf", "fseek", "fskipl", "ftell",
+ "functions", "fwrite", "ge", "genpath", "get",
+ "getegid", "getenv", "geteuid", "getgid",
+ "getpgrp", "getpid", "getppid", "getuid", "glob",
+ "gt", "gui_mode", "history_control",
+ "history_file", "history_size",
+ "history_timestamp_format_string", "home",
+ "horzcat", "hypot", "ifelse",
+ "ignore_function_time_stamp", "inferiorto",
+ "info_file", "info_program", "inline", "input",
+ "intmax", "intmin", "ipermute",
+ "is_absolute_filename", "isargout", "isbool",
+ "iscell", "iscellstr", "ischar", "iscomplex",
+ "isempty", "isfield", "isfloat", "isglobal",
+ "ishandle", "isieee", "isindex", "isinteger",
+ "islogical", "ismatrix", "ismethod", "isnull",
+ "isnumeric", "isobject", "isreal",
+ "is_rooted_relative_filename", "issorted",
+ "isstruct", "isvarname", "kbhit", "keyboard",
+ "kill", "lasterr", "lasterror", "lastwarn",
+ "ldivide", "le", "length", "link", "linspace",
+ "logical", "lstat", "lt", "make_absolute_filename",
+ "makeinfo_program", "max_recursion_depth", "merge",
+ "methods", "mfilename", "minus", "mislocked",
+ "mkdir", "mkfifo", "mkstemp", "mldivide", "mlock",
+ "mouse_wheel_zoom", "mpower", "mrdivide", "mtimes",
+ "munlock", "nargin", "nargout",
+ "native_float_format", "ndims", "ne", "nfields",
+ "nnz", "norm", "not", "numel", "nzmax",
+ "octave_config_info", "octave_core_file_limit",
+ "octave_core_file_name",
+ "octave_core_file_options", "ones", "or",
+ "output_max_field_width", "output_precision",
+ "page_output_immediately", "page_screen_output",
+ "path", "pathsep", "pause", "pclose", "permute",
+ "pi", "pipe", "plus", "popen", "power",
+ "print_empty_dimensions", "printf",
+ "print_struct_array_contents", "prod",
+ "program_invocation_name", "program_name",
+ "putenv", "puts", "pwd", "quit", "rats", "rdivide",
+ "readdir", "readlink", "read_readline_init_file",
+ "realmax", "realmin", "rehash", "rename",
+ "repelems", "re_read_readline_init_file", "reset",
+ "reshape", "resize", "restoredefaultpath",
+ "rethrow", "rmdir", "rmfield", "rmpath", "rows",
+ "save_header_format_string", "save_precision",
+ "saving_history", "scanf", "set", "setenv",
+ "shell_cmd", "sighup_dumps_octave_core",
+ "sigterm_dumps_octave_core", "silent_functions",
+ "single", "size", "size_equal", "sizemax",
+ "sizeof", "sleep", "source", "sparse_auto_mutate",
+ "split_long_rows", "sprintf", "squeeze", "sscanf",
+ "stat", "stderr", "stdin", "stdout", "strcmp",
+ "strcmpi", "string_fill_char", "strncmp",
+ "strncmpi", "struct", "struct_levels_to_print",
+ "strvcat", "subsasgn", "subsref", "sum", "sumsq",
+ "superiorto", "suppress_verbose_help_message",
+ "symlink", "system", "tic", "tilde_expand",
+ "times", "tmpfile", "tmpnam", "toc", "toupper",
+ "transpose", "true", "typeinfo", "umask", "uminus",
+ "uname", "undo_string_escapes", "unlink", "uplus",
+ "upper", "usage", "usleep", "vec", "vectorize",
+ "vertcat", "waitpid", "warning", "warranty",
+ "whos_line_format", "yes_or_no", "zeros",
+ "inf", "Inf", "nan", "NaN")
+
+ command_kw = ("close", "load", "who", "whos")
+
+ function_kw = (
+ "accumarray", "accumdim", "acosd", "acotd",
+ "acscd", "addtodate", "allchild", "ancestor",
+ "anova", "arch_fit", "arch_rnd", "arch_test",
+ "area", "arma_rnd", "arrayfun", "ascii", "asctime",
+ "asecd", "asind", "assert", "atand",
+ "autoreg_matrix", "autumn", "axes", "axis", "bar",
+ "barh", "bartlett", "bartlett_test", "beep",
+ "betacdf", "betainv", "betapdf", "betarnd",
+ "bicgstab", "bicubic", "binary", "binocdf",
+ "binoinv", "binopdf", "binornd", "bitcmp",
+ "bitget", "bitset", "blackman", "blanks",
+ "blkdiag", "bone", "box", "brighten", "calendar",
+ "cast", "cauchy_cdf", "cauchy_inv", "cauchy_pdf",
+ "cauchy_rnd", "caxis", "celldisp", "center", "cgs",
+ "chisquare_test_homogeneity",
+ "chisquare_test_independence", "circshift", "cla",
+ "clabel", "clf", "clock", "cloglog", "closereq",
+ "colon", "colorbar", "colormap", "colperm",
+ "comet", "common_size", "commutation_matrix",
+ "compan", "compare_versions", "compass",
+ "computer", "cond", "condest", "contour",
+ "contourc", "contourf", "contrast", "conv",
+ "convhull", "cool", "copper", "copyfile", "cor",
+ "corrcoef", "cor_test", "cosd", "cotd", "cov",
+ "cplxpair", "cross", "cscd", "cstrcat", "csvread",
+ "csvwrite", "ctime", "cumtrapz", "curl", "cut",
+ "cylinder", "date", "datenum", "datestr",
+ "datetick", "datevec", "dblquad", "deal",
+ "deblank", "deconv", "delaunay", "delaunayn",
+ "delete", "demo", "detrend", "diffpara", "diffuse",
+ "dir", "discrete_cdf", "discrete_inv",
+ "discrete_pdf", "discrete_rnd", "display",
+ "divergence", "dlmwrite", "dos", "dsearch",
+ "dsearchn", "duplication_matrix", "durbinlevinson",
+ "ellipsoid", "empirical_cdf", "empirical_inv",
+ "empirical_pdf", "empirical_rnd", "eomday",
+ "errorbar", "etime", "etreeplot", "example",
+ "expcdf", "expinv", "expm", "exppdf", "exprnd",
+ "ezcontour", "ezcontourf", "ezmesh", "ezmeshc",
+ "ezplot", "ezpolar", "ezsurf", "ezsurfc", "factor",
+ "factorial", "fail", "fcdf", "feather", "fftconv",
+ "fftfilt", "fftshift", "figure", "fileattrib",
+ "fileparts", "fill", "findall", "findobj",
+ "findstr", "finv", "flag", "flipdim", "fliplr",
+ "flipud", "fpdf", "fplot", "fractdiff", "freqz",
+ "freqz_plot", "frnd", "fsolve",
+ "f_test_regression", "ftp", "fullfile", "fzero",
+ "gamcdf", "gaminv", "gampdf", "gamrnd", "gca",
+ "gcbf", "gcbo", "gcf", "genvarname", "geocdf",
+ "geoinv", "geopdf", "geornd", "getfield", "ginput",
+ "glpk", "gls", "gplot", "gradient",
+ "graphics_toolkit", "gray", "grid", "griddata",
+ "griddatan", "gtext", "gunzip", "gzip", "hadamard",
+ "hamming", "hankel", "hanning", "hggroup",
+ "hidden", "hilb", "hist", "histc", "hold", "hot",
+ "hotelling_test", "housh", "hsv", "hurst",
+ "hygecdf", "hygeinv", "hygepdf", "hygernd",
+ "idivide", "ifftshift", "image", "imagesc",
+ "imfinfo", "imread", "imshow", "imwrite", "index",
+ "info", "inpolygon", "inputname", "interpft",
+ "interpn", "intersect", "invhilb", "iqr", "isa",
+ "isdefinite", "isdir", "is_duplicate_entry",
+ "isequal", "isequalwithequalnans", "isfigure",
+ "ishermitian", "ishghandle", "is_leap_year",
+ "isletter", "ismac", "ismember", "ispc", "isprime",
+ "isprop", "isscalar", "issquare", "isstrprop",
+ "issymmetric", "isunix", "is_valid_file_id",
+ "isvector", "jet", "kendall",
+ "kolmogorov_smirnov_cdf",
+ "kolmogorov_smirnov_test", "kruskal_wallis_test",
+ "krylov", "kurtosis", "laplace_cdf", "laplace_inv",
+ "laplace_pdf", "laplace_rnd", "legend", "legendre",
+ "license", "line", "linkprop", "list_primes",
+ "loadaudio", "loadobj", "logistic_cdf",
+ "logistic_inv", "logistic_pdf", "logistic_rnd",
+ "logit", "loglog", "loglogerr", "logm", "logncdf",
+ "logninv", "lognpdf", "lognrnd", "logspace",
+ "lookfor", "ls_command", "lsqnonneg", "magic",
+ "mahalanobis", "manova", "matlabroot",
+ "mcnemar_test", "mean", "meansq", "median", "menu",
+ "mesh", "meshc", "meshgrid", "meshz", "mexext",
+ "mget", "mkpp", "mode", "moment", "movefile",
+ "mpoles", "mput", "namelengthmax", "nargchk",
+ "nargoutchk", "nbincdf", "nbininv", "nbinpdf",
+ "nbinrnd", "nchoosek", "ndgrid", "newplot", "news",
+ "nonzeros", "normcdf", "normest", "norminv",
+ "normpdf", "normrnd", "now", "nthroot", "null",
+ "ocean", "ols", "onenormest", "optimget",
+ "optimset", "orderfields", "orient", "orth",
+ "pack", "pareto", "parseparams", "pascal", "patch",
+ "pathdef", "pcg", "pchip", "pcolor", "pcr",
+ "peaks", "periodogram", "perl", "perms", "pie",
+ "pink", "planerot", "playaudio", "plot",
+ "plotmatrix", "plotyy", "poisscdf", "poissinv",
+ "poisspdf", "poissrnd", "polar", "poly",
+ "polyaffine", "polyarea", "polyderiv", "polyfit",
+ "polygcd", "polyint", "polyout", "polyreduce",
+ "polyval", "polyvalm", "postpad", "powerset",
+ "ppder", "ppint", "ppjumps", "ppplot", "ppval",
+ "pqpnonneg", "prepad", "primes", "print",
+ "print_usage", "prism", "probit", "qp", "qqplot",
+ "quadcc", "quadgk", "quadl", "quadv", "quiver",
+ "qzhess", "rainbow", "randi", "range", "rank",
+ "ranks", "rat", "reallog", "realpow", "realsqrt",
+ "record", "rectangle_lw", "rectangle_sw",
+ "rectint", "refresh", "refreshdata",
+ "regexptranslate", "repmat", "residue", "ribbon",
+ "rindex", "roots", "rose", "rosser", "rotdim",
+ "rref", "run", "run_count", "rundemos", "run_test",
+ "runtests", "saveas", "saveaudio", "saveobj",
+ "savepath", "scatter", "secd", "semilogx",
+ "semilogxerr", "semilogy", "semilogyerr",
+ "setaudio", "setdiff", "setfield", "setxor",
+ "shading", "shift", "shiftdim", "sign_test",
+ "sinc", "sind", "sinetone", "sinewave", "skewness",
+ "slice", "sombrero", "sortrows", "spaugment",
+ "spconvert", "spdiags", "spearman", "spectral_adf",
+ "spectral_xdf", "specular", "speed", "spencer",
+ "speye", "spfun", "sphere", "spinmap", "spline",
+ "spones", "sprand", "sprandn", "sprandsym",
+ "spring", "spstats", "spy", "sqp", "stairs",
+ "statistics", "std", "stdnormal_cdf",
+ "stdnormal_inv", "stdnormal_pdf", "stdnormal_rnd",
+ "stem", "stft", "strcat", "strchr", "strjust",
+ "strmatch", "strread", "strsplit", "strtok",
+ "strtrim", "strtrunc", "structfun", "studentize",
+ "subplot", "subsindex", "subspace", "substr",
+ "substruct", "summer", "surf", "surface", "surfc",
+ "surfl", "surfnorm", "svds", "swapbytes",
+ "sylvester_matrix", "symvar", "synthesis", "table",
+ "tand", "tar", "tcdf", "tempdir", "tempname",
+ "test", "text", "textread", "textscan", "tinv",
+ "title", "toeplitz", "tpdf", "trace", "trapz",
+ "treelayout", "treeplot", "triangle_lw",
+ "triangle_sw", "tril", "trimesh", "triplequad",
+ "triplot", "trisurf", "triu", "trnd", "tsearchn",
+ "t_test", "t_test_regression", "type", "unidcdf",
+ "unidinv", "unidpdf", "unidrnd", "unifcdf",
+ "unifinv", "unifpdf", "unifrnd", "union", "unique",
+ "unix", "unmkpp", "unpack", "untabify", "untar",
+ "unwrap", "unzip", "u_test", "validatestring",
+ "vander", "var", "var_test", "vech", "ver",
+ "version", "view", "voronoi", "voronoin",
+ "waitforbuttonpress", "wavread", "wavwrite",
+ "wblcdf", "wblinv", "wblpdf", "wblrnd", "weekday",
+ "welch_test", "what", "white", "whitebg",
+ "wienrnd", "wilcoxon_test", "wilkinson", "winter",
+ "xlabel", "xlim", "ylabel", "yulewalker", "zip",
+ "zlabel", "z_test")
+
+ loadable_kw = (
+ "airy", "amd", "balance", "besselh", "besseli",
+ "besselj", "besselk", "bessely", "bitpack",
+ "bsxfun", "builtin", "ccolamd", "cellfun",
+ "cellslices", "chol", "choldelete", "cholinsert",
+ "cholinv", "cholshift", "cholupdate", "colamd",
+ "colloc", "convhulln", "convn", "csymamd",
+ "cummax", "cummin", "daspk", "daspk_options",
+ "dasrt", "dasrt_options", "dassl", "dassl_options",
+ "dbclear", "dbdown", "dbstack", "dbstatus",
+ "dbstop", "dbtype", "dbup", "dbwhere", "det",
+ "dlmread", "dmperm", "dot", "eig", "eigs",
+ "endgrent", "endpwent", "etree", "fft", "fftn",
+ "fftw", "filter", "find", "full", "gcd",
+ "getgrent", "getgrgid", "getgrnam", "getpwent",
+ "getpwnam", "getpwuid", "getrusage", "givens",
+ "gmtime", "gnuplot_binary", "hess", "ifft",
+ "ifftn", "inv", "isdebugmode", "issparse", "kron",
+ "localtime", "lookup", "lsode", "lsode_options",
+ "lu", "luinc", "luupdate", "matrix_type", "max",
+ "min", "mktime", "pinv", "qr", "qrdelete",
+ "qrinsert", "qrshift", "qrupdate", "quad",
+ "quad_options", "qz", "rand", "rande", "randg",
+ "randn", "randp", "randperm", "rcond", "regexp",
+ "regexpi", "regexprep", "schur", "setgrent",
+ "setpwent", "sort", "spalloc", "sparse", "spparms",
+ "sprank", "sqrtm", "strfind", "strftime",
+ "strptime", "strrep", "svd", "svd_driver", "syl",
+ "symamd", "symbfact", "symrcm", "time", "tsearch",
+ "typecast", "urlread", "urlwrite")
+
+ mapping_kw = (
+ "abs", "acos", "acosh", "acot", "acoth", "acsc",
+ "acsch", "angle", "arg", "asec", "asech", "asin",
+ "asinh", "atan", "atanh", "beta", "betainc",
+ "betaln", "bincoeff", "cbrt", "ceil", "conj", "cos",
+ "cosh", "cot", "coth", "csc", "csch", "erf", "erfc",
+ "erfcx", "erfinv", "exp", "finite", "fix", "floor",
+ "fmod", "gamma", "gammainc", "gammaln", "imag",
+ "isalnum", "isalpha", "isascii", "iscntrl",
+ "isdigit", "isfinite", "isgraph", "isinf",
+ "islower", "isna", "isnan", "isprint", "ispunct",
+ "isspace", "isupper", "isxdigit", "lcm", "lgamma",
+ "log", "lower", "mod", "real", "rem", "round",
+ "roundb", "sec", "sech", "sign", "sin", "sinh",
+ "sqrt", "tan", "tanh", "toascii", "tolower", "xor")
+
+ builtin_consts = (
+ "EDITOR", "EXEC_PATH", "I", "IMAGE_PATH", "NA",
+ "OCTAVE_HOME", "OCTAVE_VERSION", "PAGER",
+ "PAGER_FLAGS", "SEEK_CUR", "SEEK_END", "SEEK_SET",
+ "SIG", "S_ISBLK", "S_ISCHR", "S_ISDIR", "S_ISFIFO",
+ "S_ISLNK", "S_ISREG", "S_ISSOCK", "WCONTINUE",
+ "WCOREDUMP", "WEXITSTATUS", "WIFCONTINUED",
+ "WIFEXITED", "WIFSIGNALED", "WIFSTOPPED", "WNOHANG",
+ "WSTOPSIG", "WTERMSIG", "WUNTRACED")
+
+ tokens = {
+ 'root': [
+ # We should look into multiline comments
+ (r'[%#].*$', Comment),
+ (r'^\s*function', Keyword, 'deffunc'),
+
+ # from 'iskeyword' on hg changeset 8cc154f45e37
+ (words((
+ '__FILE__', '__LINE__', 'break', 'case', 'catch', 'classdef', 'continue', 'do', 'else',
+ 'elseif', 'end', 'end_try_catch', 'end_unwind_protect', 'endclassdef',
+ 'endevents', 'endfor', 'endfunction', 'endif', 'endmethods', 'endproperties',
+ 'endswitch', 'endwhile', 'events', 'for', 'function', 'get', 'global', 'if', 'methods',
+ 'otherwise', 'persistent', 'properties', 'return', 'set', 'static', 'switch', 'try',
+ 'until', 'unwind_protect', 'unwind_protect_cleanup', 'while'), suffix=r'\b'),
+ Keyword),
+
+ (words(builtin_kw + command_kw + function_kw + loadable_kw + mapping_kw,
+ suffix=r'\b'), Name.Builtin),
+
+ (words(builtin_consts, suffix=r'\b'), Name.Constant),
+
+ # operators in Octave but not Matlab:
+ (r'-=|!=|!|/=|--', Operator),
+ # operators:
+ (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
+ # operators in Octave but not Matlab requiring escape for re:
+ (r'\*=|\+=|\^=|\/=|\\=|\*\*|\+\+|\.\*\*', Operator),
+ # operators requiring escape for re:
+ (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
+
+
+ # punctuation:
+ (r'\[|\]|\(|\)|\{|\}|:|@|\.|,', Punctuation),
+ (r'=|:|;', Punctuation),
+
+ (r'"[^"]*"', String),
+
+ (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eEf][+-]?[0-9]+', Number.Float),
+ (r'\d+', Number.Integer),
+
+ # quote can be transpose, instead of string:
+ # (not great, but handles common cases...)
+ (r'(?<=[\w\)\].])\'+', Operator),
+ (r'(?<![\w\)\].])\'', String, 'string'),
+
+ (r'[a-zA-Z_]\w*', Name),
+ (r'.', Text),
+ ],
+ 'string': [
+ (r"[^']*'", String, '#pop'),
+ ],
+ 'deffunc': [
+ (r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
+ bygroups(Whitespace, Text, Whitespace, Punctuation,
+ Whitespace, Name.Function, Punctuation, Text,
+ Punctuation, Whitespace), '#pop'),
+ # function with no args
+ (r'(\s*)([a-zA-Z_]\w*)', bygroups(Text, Name.Function), '#pop'),
+ ],
+ }
+
+
+class ScilabLexer(RegexLexer):
+ """
+ For Scilab source code.
+
+ .. versionadded:: 1.5
+ """
+ name = 'Scilab'
+ aliases = ['scilab']
+ filenames = ['*.sci', '*.sce', '*.tst']
+ mimetypes = ['text/scilab']
+
+ tokens = {
+ 'root': [
+ (r'//.*?$', Comment.Single),
+ (r'^\s*function', Keyword, 'deffunc'),
+
+ (words((
+ '__FILE__', '__LINE__', 'break', 'case', 'catch', 'classdef', 'continue', 'do', 'else',
+ 'elseif', 'end', 'end_try_catch', 'end_unwind_protect', 'endclassdef',
+ 'endevents', 'endfor', 'endfunction', 'endif', 'endmethods', 'endproperties',
+ 'endswitch', 'endwhile', 'events', 'for', 'function', 'get', 'global', 'if', 'methods',
+ 'otherwise', 'persistent', 'properties', 'return', 'set', 'static', 'switch', 'try',
+ 'until', 'unwind_protect', 'unwind_protect_cleanup', 'while'), suffix=r'\b'),
+ Keyword),
+
+ (words(_scilab_builtins.functions_kw +
+ _scilab_builtins.commands_kw +
+ _scilab_builtins.macros_kw, suffix=r'\b'), Name.Builtin),
+
+ (words(_scilab_builtins.variables_kw, suffix=r'\b'), Name.Constant),
+
+ # operators:
+ (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
+ # operators requiring escape for re:
+ (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
+
+ # punctuation:
+ (r'[\[\](){}@.,=:;]', Punctuation),
+
+ (r'"[^"]*"', String),
+
+ # quote can be transpose, instead of string:
+ # (not great, but handles common cases...)
+ (r'(?<=[\w\)\].])\'+', Operator),
+ (r'(?<![\w\)\].])\'', String, 'string'),
+
+ (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eEf][+-]?[0-9]+', Number.Float),
+ (r'\d+', Number.Integer),
+
+ (r'[a-zA-Z_]\w*', Name),
+ (r'.', Text),
+ ],
+ 'string': [
+ (r"[^']*'", String, '#pop'),
+ (r'.', String, '#pop'),
+ ],
+ 'deffunc': [
+ (r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
+ bygroups(Whitespace, Text, Whitespace, Punctuation,
+ Whitespace, Name.Function, Punctuation, Text,
+ Punctuation, Whitespace), '#pop'),
+ # function with no args
+ (r'(\s*)([a-zA-Z_]\w*)', bygroups(Text, Name.Function), '#pop'),
+ ],
+ }
diff --git a/pygments/lexers/ml.py b/pygments/lexers/ml.py
new file mode 100644
index 00000000..81de39b9
--- /dev/null
+++ b/pygments/lexers/ml.py
@@ -0,0 +1,768 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.ml
+ ~~~~~~~~~~~~~~~~~~
+
+ Lexers for ML family languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+
+__all__ = ['SMLLexer', 'OcamlLexer', 'OpaLexer']
+
+
+class SMLLexer(RegexLexer):
+ """
+ For the Standard ML language.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Standard ML'
+ aliases = ['sml']
+ filenames = ['*.sml', '*.sig', '*.fun']
+ mimetypes = ['text/x-standardml', 'application/x-standardml']
+
+ alphanumid_reserved = set((
+ # Core
+ 'abstype', 'and', 'andalso', 'as', 'case', 'datatype', 'do', 'else',
+ 'end', 'exception', 'fn', 'fun', 'handle', 'if', 'in', 'infix',
+ 'infixr', 'let', 'local', 'nonfix', 'of', 'op', 'open', 'orelse',
+ 'raise', 'rec', 'then', 'type', 'val', 'with', 'withtype', 'while',
+ # Modules
+ 'eqtype', 'functor', 'include', 'sharing', 'sig', 'signature',
+ 'struct', 'structure', 'where',
+ ))
+
+ symbolicid_reserved = set((
+ # Core
+ ':', '\|', '=', '=>', '->', '#',
+ # Modules
+ ':>',
+ ))
+
+ nonid_reserved = set(('(', ')', '[', ']', '{', '}', ',', ';', '...', '_'))
+
+ alphanumid_re = r"[a-zA-Z][\w']*"
+ symbolicid_re = r"[!%&$#+\-/:<=>?@\\~`^|*]+"
+
+ # A character constant is a sequence of the form #s, where s is a string
+ # constant denoting a string of size one character. This setup just parses
+ # the entire string as either a String.Double or a String.Char (depending
+ # on the argument), even if the String.Char is an erronous
+ # multiple-character string.
+ def stringy(whatkind):
+ return [
+ (r'[^"\\]', whatkind),
+ (r'\\[\\\"abtnvfr]', String.Escape),
+ # Control-character notation is used for codes < 32,
+ # where \^@ == \000
+ (r'\\\^[\x40-\x5e]', String.Escape),
+ # Docs say 'decimal digits'
+ (r'\\[0-9]{3}', String.Escape),
+ (r'\\u[0-9a-fA-F]{4}', String.Escape),
+ (r'\\\s+\\', String.Interpol),
+ (r'"', whatkind, '#pop'),
+ ]
+
+ # Callbacks for distinguishing tokens and reserved words
+ def long_id_callback(self, match):
+ if match.group(1) in self.alphanumid_reserved:
+ token = Error
+ else:
+ token = Name.Namespace
+ yield match.start(1), token, match.group(1)
+ yield match.start(2), Punctuation, match.group(2)
+
+ def end_id_callback(self, match):
+ if match.group(1) in self.alphanumid_reserved:
+ token = Error
+ elif match.group(1) in self.symbolicid_reserved:
+ token = Error
+ else:
+ token = Name
+ yield match.start(1), token, match.group(1)
+
+ def id_callback(self, match):
+ str = match.group(1)
+ if str in self.alphanumid_reserved:
+ token = Keyword.Reserved
+ elif str in self.symbolicid_reserved:
+ token = Punctuation
+ else:
+ token = Name
+ yield match.start(1), token, str
+
+ tokens = {
+ # Whitespace and comments are (almost) everywhere
+ 'whitespace': [
+ (r'\s+', Text),
+ (r'\(\*', Comment.Multiline, 'comment'),
+ ],
+
+ 'delimiters': [
+ # This lexer treats these delimiters specially:
+ # Delimiters define scopes, and the scope is how the meaning of
+ # the `|' is resolved - is it a case/handle expression, or function
+ # definition by cases? (This is not how the Definition works, but
+ # it's how MLton behaves, see http://mlton.org/SMLNJDeviations)
+ (r'\(|\[|{', Punctuation, 'main'),
+ (r'\)|\]|}', Punctuation, '#pop'),
+ (r'\b(let|if|local)\b(?!\')', Keyword.Reserved, ('main', 'main')),
+ (r'\b(struct|sig|while)\b(?!\')', Keyword.Reserved, 'main'),
+ (r'\b(do|else|end|in|then)\b(?!\')', Keyword.Reserved, '#pop'),
+ ],
+
+ 'core': [
+ # Punctuation that doesn't overlap symbolic identifiers
+ (r'(%s)' % '|'.join(re.escape(z) for z in nonid_reserved),
+ Punctuation),
+
+ # Special constants: strings, floats, numbers in decimal and hex
+ (r'#"', String.Char, 'char'),
+ (r'"', String.Double, 'string'),
+ (r'~?0x[0-9a-fA-F]+', Number.Hex),
+ (r'0wx[0-9a-fA-F]+', Number.Hex),
+ (r'0w\d+', Number.Integer),
+ (r'~?\d+\.\d+[eE]~?\d+', Number.Float),
+ (r'~?\d+\.\d+', Number.Float),
+ (r'~?\d+[eE]~?\d+', Number.Float),
+ (r'~?\d+', Number.Integer),
+
+ # Labels
+ (r'#\s*[1-9][0-9]*', Name.Label),
+ (r'#\s*(%s)' % alphanumid_re, Name.Label),
+ (r'#\s+(%s)' % symbolicid_re, Name.Label),
+ # Some reserved words trigger a special, local lexer state change
+ (r'\b(datatype|abstype)\b(?!\')', Keyword.Reserved, 'dname'),
+ (r'(?=\b(exception)\b(?!\'))', Text, ('ename')),
+ (r'\b(functor|include|open|signature|structure)\b(?!\')',
+ Keyword.Reserved, 'sname'),
+ (r'\b(type|eqtype)\b(?!\')', Keyword.Reserved, 'tname'),
+
+ # Regular identifiers, long and otherwise
+ (r'\'[\w\']*', Name.Decorator),
+ (r'(%s)(\.)' % alphanumid_re, long_id_callback, "dotted"),
+ (r'(%s)' % alphanumid_re, id_callback),
+ (r'(%s)' % symbolicid_re, id_callback),
+ ],
+ 'dotted': [
+ (r'(%s)(\.)' % alphanumid_re, long_id_callback),
+ (r'(%s)' % alphanumid_re, end_id_callback, "#pop"),
+ (r'(%s)' % symbolicid_re, end_id_callback, "#pop"),
+ (r'\s+', Error),
+ (r'\S+', Error),
+ ],
+
+
+ # Main parser (prevents errors in files that have scoping errors)
+ 'root': [
+ default('main')
+ ],
+
+ # In this scope, I expect '|' to not be followed by a function name,
+ # and I expect 'and' to be followed by a binding site
+ 'main': [
+ include('whitespace'),
+
+ # Special behavior of val/and/fun
+ (r'\b(val|and)\b(?!\')', Keyword.Reserved, 'vname'),
+ (r'\b(fun)\b(?!\')', Keyword.Reserved,
+ ('#pop', 'main-fun', 'fname')),
+
+ include('delimiters'),
+ include('core'),
+ (r'\S+', Error),
+ ],
+
+ # In this scope, I expect '|' and 'and' to be followed by a function
+ 'main-fun': [
+ include('whitespace'),
+
+ (r'\s', Text),
+ (r'\(\*', Comment.Multiline, 'comment'),
+
+ # Special behavior of val/and/fun
+ (r'\b(fun|and)\b(?!\')', Keyword.Reserved, 'fname'),
+ (r'\b(val)\b(?!\')', Keyword.Reserved,
+ ('#pop', 'main', 'vname')),
+
+ # Special behavior of '|' and '|'-manipulating keywords
+ (r'\|', Punctuation, 'fname'),
+ (r'\b(case|handle)\b(?!\')', Keyword.Reserved,
+ ('#pop', 'main')),
+
+ include('delimiters'),
+ include('core'),
+ (r'\S+', Error),
+ ],
+
+ # Character and string parsers
+ 'char': stringy(String.Char),
+ 'string': stringy(String.Double),
+
+ 'breakout': [
+ (r'(?=\b(%s)\b(?!\'))' % '|'.join(alphanumid_reserved), Text, '#pop'),
+ ],
+
+ # Dealing with what comes after module system keywords
+ 'sname': [
+ include('whitespace'),
+ include('breakout'),
+
+ (r'(%s)' % alphanumid_re, Name.Namespace),
+ default('#pop'),
+ ],
+
+ # Dealing with what comes after the 'fun' (or 'and' or '|') keyword
+ 'fname': [
+ include('whitespace'),
+ (r'\'[0-9a-zA-Z_\']*', Name.Decorator),
+ (r'\(', Punctuation, 'tyvarseq'),
+
+ (r'(%s)' % alphanumid_re, Name.Function, '#pop'),
+ (r'(%s)' % symbolicid_re, Name.Function, '#pop'),
+
+ # Ignore interesting function declarations like "fun (x + y) = ..."
+ default('#pop'),
+ ],
+
+ # Dealing with what comes after the 'val' (or 'and') keyword
+ 'vname': [
+ include('whitespace'),
+ (r'\'[0-9a-zA-Z_\']*', Name.Decorator),
+ (r'\(', Punctuation, 'tyvarseq'),
+
+ (r'(%s)(\s*)(=(?!%s))' % (alphanumid_re, symbolicid_re),
+ bygroups(Name.Variable, Text, Punctuation), '#pop'),
+ (r'(%s)(\s*)(=(?!%s))' % (symbolicid_re, symbolicid_re),
+ bygroups(Name.Variable, Text, Punctuation), '#pop'),
+ (r'(%s)' % alphanumid_re, Name.Variable, '#pop'),
+ (r'(%s)' % symbolicid_re, Name.Variable, '#pop'),
+
+ # Ignore interesting patterns like 'val (x, y)'
+ default('#pop'),
+ ],
+
+ # Dealing with what comes after the 'type' (or 'and') keyword
+ 'tname': [
+ include('whitespace'),
+ include('breakout'),
+
+ (r'\'[0-9a-zA-Z_\']*', Name.Decorator),
+ (r'\(', Punctuation, 'tyvarseq'),
+ (r'=(?!%s)' % symbolicid_re, Punctuation, ('#pop', 'typbind')),
+
+ (r'(%s)' % alphanumid_re, Keyword.Type),
+ (r'(%s)' % symbolicid_re, Keyword.Type),
+ (r'\S+', Error, '#pop'),
+ ],
+
+ # A type binding includes most identifiers
+ 'typbind': [
+ include('whitespace'),
+
+ (r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')),
+
+ include('breakout'),
+ include('core'),
+ (r'\S+', Error, '#pop'),
+ ],
+
+ # Dealing with what comes after the 'datatype' (or 'and') keyword
+ 'dname': [
+ include('whitespace'),
+ include('breakout'),
+
+ (r'\'[0-9a-zA-Z_\']*', Name.Decorator),
+ (r'\(', Punctuation, 'tyvarseq'),
+ (r'(=)(\s*)(datatype)',
+ bygroups(Punctuation, Text, Keyword.Reserved), '#pop'),
+ (r'=(?!%s)' % symbolicid_re, Punctuation,
+ ('#pop', 'datbind', 'datcon')),
+
+ (r'(%s)' % alphanumid_re, Keyword.Type),
+ (r'(%s)' % symbolicid_re, Keyword.Type),
+ (r'\S+', Error, '#pop'),
+ ],
+
+ # common case - A | B | C of int
+ 'datbind': [
+ include('whitespace'),
+
+ (r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'dname')),
+ (r'\b(withtype)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')),
+ (r'\b(of)\b(?!\')', Keyword.Reserved),
+
+ (r'(\|)(\s*)(%s)' % alphanumid_re,
+ bygroups(Punctuation, Text, Name.Class)),
+ (r'(\|)(\s+)(%s)' % symbolicid_re,
+ bygroups(Punctuation, Text, Name.Class)),
+
+ include('breakout'),
+ include('core'),
+ (r'\S+', Error),
+ ],
+
+ # Dealing with what comes after an exception
+ 'ename': [
+ include('whitespace'),
+
+ (r'(exception|and)\b(\s+)(%s)' % alphanumid_re,
+ bygroups(Keyword.Reserved, Text, Name.Class)),
+ (r'(exception|and)\b(\s*)(%s)' % symbolicid_re,
+ bygroups(Keyword.Reserved, Text, Name.Class)),
+ (r'\b(of)\b(?!\')', Keyword.Reserved),
+
+ include('breakout'),
+ include('core'),
+ (r'\S+', Error),
+ ],
+
+ 'datcon': [
+ include('whitespace'),
+ (r'(%s)' % alphanumid_re, Name.Class, '#pop'),
+ (r'(%s)' % symbolicid_re, Name.Class, '#pop'),
+ (r'\S+', Error, '#pop'),
+ ],
+
+ # Series of type variables
+ 'tyvarseq': [
+ (r'\s', Text),
+ (r'\(\*', Comment.Multiline, 'comment'),
+
+ (r'\'[0-9a-zA-Z_\']*', Name.Decorator),
+ (alphanumid_re, Name),
+ (r',', Punctuation),
+ (r'\)', Punctuation, '#pop'),
+ (symbolicid_re, Name),
+ ],
+
+ 'comment': [
+ (r'[^(*)]', Comment.Multiline),
+ (r'\(\*', Comment.Multiline, '#push'),
+ (r'\*\)', Comment.Multiline, '#pop'),
+ (r'[(*)]', Comment.Multiline),
+ ],
+ }
+
+
+class OcamlLexer(RegexLexer):
+ """
+ For the OCaml language.
+
+ .. versionadded:: 0.7
+ """
+
+ name = 'OCaml'
+ aliases = ['ocaml']
+ filenames = ['*.ml', '*.mli', '*.mll', '*.mly']
+ mimetypes = ['text/x-ocaml']
+
+ keywords = (
+ 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
+ 'downto', 'else', 'end', 'exception', 'external', 'false',
+ 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
+ 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
+ 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
+ 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
+ 'type', 'value', 'val', 'virtual', 'when', 'while', 'with',
+ )
+ keyopts = (
+ '!=', '#', '&', '&&', '\(', '\)', '\*', '\+', ',', '-',
+ '-\.', '->', '\.', '\.\.', ':', '::', ':=', ':>', ';', ';;', '<',
+ '<-', '=', '>', '>]', '>}', '\?', '\?\?', '\[', '\[<', '\[>', '\[\|',
+ ']', '_', '`', '{', '{<', '\|', '\|]', '}', '~'
+ )
+
+ operators = r'[!$%&*+\./:<=>?@^|~-]'
+ word_operators = ('and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or')
+ prefix_syms = r'[!?~]'
+ infix_syms = r'[=<>@^|&+\*/$%-]'
+ primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array')
+
+ tokens = {
+ 'escape-sequence': [
+ (r'\\[\\\"\'ntbr]', String.Escape),
+ (r'\\[0-9]{3}', String.Escape),
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
+ ],
+ 'root': [
+ (r'\s+', Text),
+ (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
+ (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
+ (r'\b([A-Z][\w\']*)', Name.Class),
+ (r'\(\*(?![)])', Comment, 'comment'),
+ (r'\b(%s)\b' % '|'.join(keywords), Keyword),
+ (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
+ (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
+ (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
+ (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
+
+ (r"[^\W\d][\w']*", Name),
+
+ (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
+ (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
+ (r'0[oO][0-7][0-7_]*', Number.Oct),
+ (r'0[bB][01][01_]*', Number.Bin),
+ (r'\d[\d_]*', Number.Integer),
+
+ (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
+ String.Char),
+ (r"'.'", String.Char),
+ (r"'", Keyword), # a stray quote is another syntax element
+
+ (r'"', String.Double, 'string'),
+
+ (r'[~?][a-z][\w\']*:', Name.Variable),
+ ],
+ 'comment': [
+ (r'[^(*)]+', Comment),
+ (r'\(\*', Comment, '#push'),
+ (r'\*\)', Comment, '#pop'),
+ (r'[(*)]', Comment),
+ ],
+ 'string': [
+ (r'[^\\"]+', String.Double),
+ include('escape-sequence'),
+ (r'\\\n', String.Double),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'dotted': [
+ (r'\s+', Text),
+ (r'\.', Punctuation),
+ (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
+ (r'[A-Z][\w\']*', Name.Class, '#pop'),
+ (r'[a-z_][\w\']*', Name, '#pop'),
+ ],
+ }
+
+
+class OpaLexer(RegexLexer):
+ """
+ Lexer for the Opa language (http://opalang.org).
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Opa'
+ aliases = ['opa']
+ filenames = ['*.opa']
+ mimetypes = ['text/x-opa']
+
+ # most of these aren't strictly keywords
+ # but if you color only real keywords, you might just
+ # as well not color anything
+ keywords = (
+ 'and', 'as', 'begin', 'case', 'client', 'css', 'database', 'db', 'do',
+ 'else', 'end', 'external', 'forall', 'function', 'if', 'import',
+ 'match', 'module', 'or', 'package', 'parser', 'rec', 'server', 'then',
+ 'type', 'val', 'with', 'xml_parser',
+ )
+
+ # matches both stuff and `stuff`
+ ident_re = r'(([a-zA-Z_]\w*)|(`[^`]*`))'
+
+ op_re = r'[.=\-<>,@~%/+?*&^!]'
+ punc_re = r'[()\[\],;|]' # '{' and '}' are treated elsewhere
+ # because they are also used for inserts
+
+ tokens = {
+ # copied from the caml lexer, should be adapted
+ 'escape-sequence': [
+ (r'\\[\\\"\'ntr}]', String.Escape),
+ (r'\\[0-9]{3}', String.Escape),
+ (r'\\x[0-9a-fA-F]{2}', String.Escape),
+ ],
+
+ # factorizing these rules, because they are inserted many times
+ 'comments': [
+ (r'/\*', Comment, 'nested-comment'),
+ (r'//.*?$', Comment),
+ ],
+ 'comments-and-spaces': [
+ include('comments'),
+ (r'\s+', Text),
+ ],
+
+ 'root': [
+ include('comments-and-spaces'),
+ # keywords
+ (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
+ # directives
+ # we could parse the actual set of directives instead of anything
+ # starting with @, but this is troublesome
+ # because it needs to be adjusted all the time
+ # and assuming we parse only sources that compile, it is useless
+ (r'@' + ident_re + r'\b', Name.Builtin.Pseudo),
+
+ # number literals
+ (r'-?.[\d]+([eE][+\-]?\d+)', Number.Float),
+ (r'-?\d+.\d*([eE][+\-]?\d+)', Number.Float),
+ (r'-?\d+[eE][+\-]?\d+', Number.Float),
+ (r'0[xX][\da-fA-F]+', Number.Hex),
+ (r'0[oO][0-7]+', Number.Oct),
+ (r'0[bB][01]+', Number.Bin),
+ (r'\d+', Number.Integer),
+ # color literals
+ (r'#[\da-fA-F]{3,6}', Number.Integer),
+
+ # string literals
+ (r'"', String.Double, 'string'),
+ # char literal, should be checked because this is the regexp from
+ # the caml lexer
+ (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2})|.)'",
+ String.Char),
+
+ # this is meant to deal with embedded exprs in strings
+ # every time we find a '}' we pop a state so that if we were
+ # inside a string, we are back in the string state
+ # as a consequence, we must also push a state every time we find a
+ # '{' or else we will have errors when parsing {} for instance
+ (r'{', Operator, '#push'),
+ (r'}', Operator, '#pop'),
+
+ # html literals
+ # this is a much more strict that the actual parser,
+ # since a<b would not be parsed as html
+ # but then again, the parser is way too lax, and we can't hope
+ # to have something as tolerant
+ (r'<(?=[a-zA-Z>])', String.Single, 'html-open-tag'),
+
+ # db path
+ # matching the '[_]' in '/a[_]' because it is a part
+ # of the syntax of the db path definition
+ # unfortunately, i don't know how to match the ']' in
+ # /a[1], so this is somewhat inconsistent
+ (r'[@?!]?(/\w+)+(\[_\])?', Name.Variable),
+ # putting the same color on <- as on db path, since
+ # it can be used only to mean Db.write
+ (r'<-(?!'+op_re+r')', Name.Variable),
+
+ # 'modules'
+ # although modules are not distinguished by their names as in caml
+ # the standard library seems to follow the convention that modules
+ # only area capitalized
+ (r'\b([A-Z]\w*)(?=\.)', Name.Namespace),
+
+ # operators
+ # = has a special role because this is the only
+ # way to syntactic distinguish binding constructions
+ # unfortunately, this colors the equal in {x=2} too
+ (r'=(?!'+op_re+r')', Keyword),
+ (r'(%s)+' % op_re, Operator),
+ (r'(%s)+' % punc_re, Operator),
+
+ # coercions
+ (r':', Operator, 'type'),
+ # type variables
+ # we need this rule because we don't parse specially type
+ # definitions so in "type t('a) = ...", "'a" is parsed by 'root'
+ ("'"+ident_re, Keyword.Type),
+
+ # id literal, #something, or #{expr}
+ (r'#'+ident_re, String.Single),
+ (r'#(?={)', String.Single),
+
+ # identifiers
+ # this avoids to color '2' in 'a2' as an integer
+ (ident_re, Text),
+
+ # default, not sure if that is needed or not
+ # (r'.', Text),
+ ],
+
+ # it is quite painful to have to parse types to know where they end
+ # this is the general rule for a type
+ # a type is either:
+ # * -> ty
+ # * type-with-slash
+ # * type-with-slash -> ty
+ # * type-with-slash (, type-with-slash)+ -> ty
+ #
+ # the code is pretty funky in here, but this code would roughly
+ # translate in caml to:
+ # let rec type stream =
+ # match stream with
+ # | [< "->"; stream >] -> type stream
+ # | [< ""; stream >] ->
+ # type_with_slash stream
+ # type_lhs_1 stream;
+ # and type_1 stream = ...
+ 'type': [
+ include('comments-and-spaces'),
+ (r'->', Keyword.Type),
+ default(('#pop', 'type-lhs-1', 'type-with-slash')),
+ ],
+
+ # parses all the atomic or closed constructions in the syntax of type
+ # expressions: record types, tuple types, type constructors, basic type
+ # and type variables
+ 'type-1': [
+ include('comments-and-spaces'),
+ (r'\(', Keyword.Type, ('#pop', 'type-tuple')),
+ (r'~?{', Keyword.Type, ('#pop', 'type-record')),
+ (ident_re+r'\(', Keyword.Type, ('#pop', 'type-tuple')),
+ (ident_re, Keyword.Type, '#pop'),
+ ("'"+ident_re, Keyword.Type),
+ # this case is not in the syntax but sometimes
+ # we think we are parsing types when in fact we are parsing
+ # some css, so we just pop the states until we get back into
+ # the root state
+ default('#pop'),
+ ],
+
+ # type-with-slash is either:
+ # * type-1
+ # * type-1 (/ type-1)+
+ 'type-with-slash': [
+ include('comments-and-spaces'),
+ default(('#pop', 'slash-type-1', 'type-1')),
+ ],
+ 'slash-type-1': [
+ include('comments-and-spaces'),
+ ('/', Keyword.Type, ('#pop', 'type-1')),
+ # same remark as above
+ default('#pop'),
+ ],
+
+ # we go in this state after having parsed a type-with-slash
+ # while trying to parse a type
+ # and at this point we must determine if we are parsing an arrow
+ # type (in which case we must continue parsing) or not (in which
+ # case we stop)
+ 'type-lhs-1': [
+ include('comments-and-spaces'),
+ (r'->', Keyword.Type, ('#pop', 'type')),
+ (r'(?=,)', Keyword.Type, ('#pop', 'type-arrow')),
+ default('#pop'),
+ ],
+ 'type-arrow': [
+ include('comments-and-spaces'),
+ # the look ahead here allows to parse f(x : int, y : float -> truc)
+ # correctly
+ (r',(?=[^:]*?->)', Keyword.Type, 'type-with-slash'),
+ (r'->', Keyword.Type, ('#pop', 'type')),
+ # same remark as above
+ default('#pop'),
+ ],
+
+ # no need to do precise parsing for tuples and records
+ # because they are closed constructions, so we can simply
+ # find the closing delimiter
+ # note that this function would be not work if the source
+ # contained identifiers like `{)` (although it could be patched
+ # to support it)
+ 'type-tuple': [
+ include('comments-and-spaces'),
+ (r'[^\(\)/*]+', Keyword.Type),
+ (r'[/*]', Keyword.Type),
+ (r'\(', Keyword.Type, '#push'),
+ (r'\)', Keyword.Type, '#pop'),
+ ],
+ 'type-record': [
+ include('comments-and-spaces'),
+ (r'[^{}/*]+', Keyword.Type),
+ (r'[/*]', Keyword.Type),
+ (r'{', Keyword.Type, '#push'),
+ (r'}', Keyword.Type, '#pop'),
+ ],
+
+ # 'type-tuple': [
+ # include('comments-and-spaces'),
+ # (r'\)', Keyword.Type, '#pop'),
+ # default(('#pop', 'type-tuple-1', 'type-1')),
+ # ],
+ # 'type-tuple-1': [
+ # include('comments-and-spaces'),
+ # (r',?\s*\)', Keyword.Type, '#pop'), # ,) is a valid end of tuple, in (1,)
+ # (r',', Keyword.Type, 'type-1'),
+ # ],
+ # 'type-record':[
+ # include('comments-and-spaces'),
+ # (r'}', Keyword.Type, '#pop'),
+ # (r'~?(?:\w+|`[^`]*`)', Keyword.Type, 'type-record-field-expr'),
+ # ],
+ # 'type-record-field-expr': [
+ #
+ # ],
+
+ 'nested-comment': [
+ (r'[^/*]+', Comment),
+ (r'/\*', Comment, '#push'),
+ (r'\*/', Comment, '#pop'),
+ (r'[/*]', Comment),
+ ],
+
+ # the copy pasting between string and single-string
+ # is kinda sad. Is there a way to avoid that??
+ 'string': [
+ (r'[^\\"{]+', String.Double),
+ (r'"', String.Double, '#pop'),
+ (r'{', Operator, 'root'),
+ include('escape-sequence'),
+ ],
+ 'single-string': [
+ (r'[^\\\'{]+', String.Double),
+ (r'\'', String.Double, '#pop'),
+ (r'{', Operator, 'root'),
+ include('escape-sequence'),
+ ],
+
+ # all the html stuff
+ # can't really reuse some existing html parser
+ # because we must be able to parse embedded expressions
+
+ # we are in this state after someone parsed the '<' that
+ # started the html literal
+ 'html-open-tag': [
+ (r'[\w\-:]+', String.Single, ('#pop', 'html-attr')),
+ (r'>', String.Single, ('#pop', 'html-content')),
+ ],
+
+ # we are in this state after someone parsed the '</' that
+ # started the end of the closing tag
+ 'html-end-tag': [
+ # this is a star, because </> is allowed
+ (r'[\w\-:]*>', String.Single, '#pop'),
+ ],
+
+ # we are in this state after having parsed '<ident(:ident)?'
+ # we thus parse a possibly empty list of attributes
+ 'html-attr': [
+ (r'\s+', Text),
+ (r'[\w\-:]+=', String.Single, 'html-attr-value'),
+ (r'/>', String.Single, '#pop'),
+ (r'>', String.Single, ('#pop', 'html-content')),
+ ],
+
+ 'html-attr-value': [
+ (r"'", String.Single, ('#pop', 'single-string')),
+ (r'"', String.Single, ('#pop', 'string')),
+ (r'#'+ident_re, String.Single, '#pop'),
+ (r'#(?={)', String.Single, ('#pop', 'root')),
+ (r'[^"\'{`=<>]+', String.Single, '#pop'),
+ (r'{', Operator, ('#pop', 'root')), # this is a tail call!
+ ],
+
+ # we should probably deal with '\' escapes here
+ 'html-content': [
+ (r'<!--', Comment, 'html-comment'),
+ (r'</', String.Single, ('#pop', 'html-end-tag')),
+ (r'<', String.Single, 'html-open-tag'),
+ (r'{', Operator, 'root'),
+ (r'[^<{]+', String.Single),
+ ],
+
+ 'html-comment': [
+ (r'-->', Comment, '#pop'),
+ (r'[^\-]+|-', Comment),
+ ],
+ }
diff --git a/pygments/lexers/modeling.py b/pygments/lexers/modeling.py
new file mode 100644
index 00000000..b6bf62f9
--- /dev/null
+++ b/pygments/lexers/modeling.py
@@ -0,0 +1,375 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.modeling
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for modeling languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+from pygments.lexers.html import HtmlLexer
+from pygments.lexers import _stan_builtins
+
+__all__ = ['ModelicaLexer', 'BugsLexer', 'JagsLexer', 'StanLexer']
+
+
+class ModelicaLexer(RegexLexer):
+ """
+ For `Modelica <http://www.modelica.org/>`_ source code.
+
+ .. versionadded:: 1.1
+ """
+ name = 'Modelica'
+ aliases = ['modelica']
+ filenames = ['*.mo']
+ mimetypes = ['text/x-modelica']
+
+ flags = re.IGNORECASE | re.DOTALL
+
+ tokens = {
+ 'whitespace': [
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'//(\n|(.|\n)*?[^\\]\n)', Comment),
+ (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment),
+ ],
+ 'statements': [
+ (r'"', String, 'string'),
+ (r'(\d+\.\d*|\.\d+|\d+|\d.)[eE][+-]?\d+[lL]?', Number.Float),
+ (r'(\d+\.\d*|\.\d+)', Number.Float),
+ (r'\d+[Ll]?', Number.Integer),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'(true|false|NULL|Real|Integer|Boolean)\b', Name.Builtin),
+ (r'([a-z_][\w]*|\'[^\']+\')'
+ r'([\[\d,:\]]*)'
+ r'(\.([a-z_][\w]*|\'[^\']+\'))+'
+ r'([\[\d,:\]]*)', Name.Class),
+ (r'(\'[\w\+\-\*\/\^]+\'|\w+)', Name),
+ (r'[()\[\]{},.;]', Punctuation),
+ (r'\'', Name, 'quoted_ident'),
+ ],
+ 'root': [
+ include('whitespace'),
+ include('classes'),
+ include('functions'),
+ include('keywords'),
+ include('operators'),
+ (r'("<html>|<html>)', Name.Tag, 'html-content'),
+ include('statements'),
+ ],
+ 'keywords': [
+ (r'(algorithm|annotation|break|connect|constant|constrainedby|'
+ r'discrete|each|end|else|elseif|elsewhen|encapsulated|enumeration|'
+ r'equation|exit|expandable|extends|'
+ r'external|false|final|flow|for|if|import|impure|in|initial\sequation|'
+ r'inner|input|loop|nondiscrete|outer|output|parameter|partial|'
+ r'protected|public|pure|redeclare|replaceable|stream|time|then|true|'
+ r'when|while|within)\b', Keyword),
+ ],
+ 'functions': [
+ (r'(abs|acos|acosh|asin|asinh|atan|atan2|atan3|ceil|cos|cosh|'
+ r'cross|diagonal|div|exp|fill|floor|getInstanceName|identity|'
+ r'linspace|log|log10|matrix|mod|max|min|ndims|ones|outerProduct|'
+ r'product|rem|scalar|semiLinear|skew|sign|sin|sinh|size|'
+ r'spatialDistribution|sum|sqrt|symmetric|tan|tanh|transpose|'
+ r'vector|zeros)\b', Name.Function),
+ ],
+ 'operators': [
+ (r'(actualStream|and|assert|backSample|cardinality|change|Clock|'
+ r'delay|der|edge|hold|homotopy|initial|inStream|noClock|noEvent|'
+ r'not|or|pre|previous|reinit|return|sample|smooth|'
+ r'spatialDistribution|shiftSample|subSample|superSample|terminal|'
+ r'terminate)\b', Name.Builtin),
+ ],
+ 'classes': [
+ (r'(operator)?(\s+)?(block|class|connector|end|function|model|'
+ r'operator|package|record|type)(\s+)'
+ r'((?!if|for|when|while)[a-z_]\w*|\'[^\']+\')([;]?)',
+ bygroups(Keyword, Text, Keyword, Text, Name.Class, Text))
+ ],
+ 'quoted_ident': [
+ (r'\'', Name, '#pop'),
+ (r'[^\']+', Name), # all other characters
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})',
+ String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ 'html-content': [
+ (r'<\s*/\s*html\s*>"', Name.Tag, '#pop'),
+ (r'.+?(?=<\s*/\s*html\s*>)', using(HtmlLexer)),
+ ]
+ }
+
+
+class BugsLexer(RegexLexer):
+ """
+ Pygments Lexer for `OpenBugs <http://www.openbugs.net/>`_ and WinBugs
+ models.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'BUGS'
+ aliases = ['bugs', 'winbugs', 'openbugs']
+ filenames = ['*.bug']
+
+ _FUNCTIONS = (
+ # Scalar functions
+ 'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
+ 'cloglog', 'cos', 'cosh', 'cumulative', 'cut', 'density', 'deviance',
+ 'equals', 'expr', 'gammap', 'ilogit', 'icloglog', 'integral', 'log',
+ 'logfact', 'loggam', 'logit', 'max', 'min', 'phi', 'post.p.value',
+ 'pow', 'prior.p.value', 'probit', 'replicate.post', 'replicate.prior',
+ 'round', 'sin', 'sinh', 'solution', 'sqrt', 'step', 'tan', 'tanh',
+ 'trunc',
+ # Vector functions
+ 'inprod', 'interp.lin', 'inverse', 'logdet', 'mean', 'eigen.vals',
+ 'ode', 'prod', 'p.valueM', 'rank', 'ranked', 'replicate.postM',
+ 'sd', 'sort', 'sum',
+ # Special
+ 'D', 'I', 'F', 'T', 'C')
+ """ OpenBUGS built-in functions
+
+ From http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAII
+
+ This also includes
+
+ - T, C, I : Truncation and censoring.
+ ``T`` and ``C`` are in OpenBUGS. ``I`` in WinBUGS.
+ - D : ODE
+ - F : Functional http://www.openbugs.info/Examples/Functionals.html
+
+ """
+
+ _DISTRIBUTIONS = ('dbern', 'dbin', 'dcat', 'dnegbin', 'dpois',
+ 'dhyper', 'dbeta', 'dchisqr', 'ddexp', 'dexp',
+ 'dflat', 'dgamma', 'dgev', 'df', 'dggamma', 'dgpar',
+ 'dloglik', 'dlnorm', 'dlogis', 'dnorm', 'dpar',
+ 'dt', 'dunif', 'dweib', 'dmulti', 'ddirch', 'dmnorm',
+ 'dmt', 'dwish')
+ """ OpenBUGS built-in distributions
+
+ Functions from
+ http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAI
+ """
+
+ tokens = {
+ 'whitespace': [
+ (r"\s+", Text),
+ ],
+ 'comments': [
+ # Comments
+ (r'#.*$', Comment.Single),
+ ],
+ 'root': [
+ # Comments
+ include('comments'),
+ include('whitespace'),
+ # Block start
+ (r'(model)(\s+)({)',
+ bygroups(Keyword.Namespace, Text, Punctuation)),
+ # Reserved Words
+ (r'(for|in)(?![0-9a-zA-Z\._])', Keyword.Reserved),
+ # Built-in Functions
+ (r'(%s)(?=\s*\()'
+ % r'|'.join(_FUNCTIONS + _DISTRIBUTIONS),
+ Name.Builtin),
+ # Regular variable names
+ (r'[A-Za-z][\w.]*', Name),
+ # Number Literals
+ (r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
+ # Punctuation
+ (r'\[|\]|\(|\)|:|,|;', Punctuation),
+ # Assignment operators
+ # SLexer makes these tokens Operators.
+ (r'<-|~', Operator),
+ # Infix and prefix operators
+ (r'\+|-|\*|/', Operator),
+ # Block
+ (r'[{}]', Punctuation),
+ ]
+ }
+
+ def analyse_text(text):
+ if re.search(r"^\s*model\s*{", text, re.M):
+ return 0.7
+ else:
+ return 0.0
+
+
+class JagsLexer(RegexLexer):
+ """
+ Pygments Lexer for JAGS.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'JAGS'
+ aliases = ['jags']
+ filenames = ['*.jag', '*.bug']
+
+ # JAGS
+ _FUNCTIONS = (
+ 'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
+ 'cos', 'cosh', 'cloglog',
+ 'equals', 'exp', 'icloglog', 'ifelse', 'ilogit', 'log', 'logfact',
+ 'loggam', 'logit', 'phi', 'pow', 'probit', 'round', 'sin', 'sinh',
+ 'sqrt', 'step', 'tan', 'tanh', 'trunc', 'inprod', 'interp.lin',
+ 'logdet', 'max', 'mean', 'min', 'prod', 'sum', 'sd', 'inverse',
+ 'rank', 'sort', 't', 'acos', 'acosh', 'asin', 'asinh', 'atan',
+ # Truncation/Censoring (should I include)
+ 'T', 'I')
+ # Distributions with density, probability and quartile functions
+ _DISTRIBUTIONS = tuple('[dpq]%s' % x for x in
+ ('bern', 'beta', 'dchiqsqr', 'ddexp', 'dexp',
+ 'df', 'gamma', 'gen.gamma', 'logis', 'lnorm',
+ 'negbin', 'nchisqr', 'norm', 'par', 'pois', 'weib'))
+ # Other distributions without density and probability
+ _OTHER_DISTRIBUTIONS = (
+ 'dt', 'dunif', 'dbetabin', 'dbern', 'dbin', 'dcat', 'dhyper',
+ 'ddirch', 'dmnorm', 'dwish', 'dmt', 'dmulti', 'dbinom', 'dchisq',
+ 'dnbinom', 'dweibull', 'ddirich')
+
+ tokens = {
+ 'whitespace': [
+ (r"\s+", Text),
+ ],
+ 'names': [
+ # Regular variable names
+ (r'[a-zA-Z][\w.]*\b', Name),
+ ],
+ 'comments': [
+ # do not use stateful comments
+ (r'(?s)/\*.*?\*/', Comment.Multiline),
+ # Comments
+ (r'#.*$', Comment.Single),
+ ],
+ 'root': [
+ # Comments
+ include('comments'),
+ include('whitespace'),
+ # Block start
+ (r'(model|data)(\s+)({)',
+ bygroups(Keyword.Namespace, Text, Punctuation)),
+ (r'var(?![0-9a-zA-Z\._])', Keyword.Declaration),
+ # Reserved Words
+ (r'(for|in)(?![0-9a-zA-Z\._])', Keyword.Reserved),
+ # Builtins
+ # Need to use lookahead because . is a valid char
+ (r'(%s)(?=\s*\()' % r'|'.join(_FUNCTIONS
+ + _DISTRIBUTIONS
+ + _OTHER_DISTRIBUTIONS),
+ Name.Builtin),
+ # Names
+ include('names'),
+ # Number Literals
+ (r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
+ (r'\[|\]|\(|\)|:|,|;', Punctuation),
+ # Assignment operators
+ (r'<-|~', Operator),
+ # # JAGS includes many more than OpenBUGS
+ (r'\+|-|\*|\/|\|\|[&]{2}|[<>=]=?|\^|%.*?%', Operator),
+ (r'[{}]', Punctuation),
+ ]
+ }
+
+ def analyse_text(text):
+ if re.search(r'^\s*model\s*\{', text, re.M):
+ if re.search(r'^\s*data\s*\{', text, re.M):
+ return 0.9
+ elif re.search(r'^\s*var', text, re.M):
+ return 0.9
+ else:
+ return 0.3
+ else:
+ return 0
+
+
+class StanLexer(RegexLexer):
+ """Pygments Lexer for Stan models.
+
+ The Stan modeling language is specified in the *Stan Modeling Language
+ User's Guide and Reference Manual, v2.4.0*,
+ `pdf <https://github.com/stan-dev/stan/releases/download/v2.4.0/stan-reference-2.4.0.pdf>`__.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Stan'
+ aliases = ['stan']
+ filenames = ['*.stan']
+
+ tokens = {
+ 'whitespace': [
+ (r"\s+", Text),
+ ],
+ 'comments': [
+ (r'(?s)/\*.*?\*/', Comment.Multiline),
+ # Comments
+ (r'(//|#).*$', Comment.Single),
+ ],
+ 'root': [
+ # Stan is more restrictive on strings than this regex
+ (r'"[^"]*"', String),
+ # Comments
+ include('comments'),
+ # block start
+ include('whitespace'),
+ # Block start
+ (r'(%s)(\s*)({)' %
+ r'|'.join(('functions', 'data', r'transformed\s+?data',
+ 'parameters', r'transformed\s+parameters',
+ 'model', r'generated\s+quantities')),
+ bygroups(Keyword.Namespace, Text, Punctuation)),
+ # Reserved Words
+ (r'(%s)\b' % r'|'.join(_stan_builtins.KEYWORDS), Keyword),
+ # Truncation
+ (r'T(?=\s*\[)', Keyword),
+ # Data types
+ (r'(%s)\b' % r'|'.join(_stan_builtins.TYPES), Keyword.Type),
+ # Punctuation
+ (r"[;:,\[\]()]", Punctuation),
+ # Builtin
+ (r'(%s)(?=\s*\()'
+ % r'|'.join(_stan_builtins.FUNCTIONS
+ + _stan_builtins.DISTRIBUTIONS),
+ Name.Builtin),
+ # Special names ending in __, like lp__
+ (r'[A-Za-z]\w*__\b', Name.Builtin.Pseudo),
+ (r'(%s)\b' % r'|'.join(_stan_builtins.RESERVED), Keyword.Reserved),
+ # Regular variable names
+ (r'[A-Za-z]\w*\b', Name),
+ # Real Literals
+ (r'-?[0-9]+(\.[0-9]+)?[eE]-?[0-9]+', Number.Float),
+ (r'-?[0-9]*\.[0-9]*', Number.Float),
+ # Integer Literals
+ (r'-?[0-9]+', Number.Integer),
+ # Assignment operators
+ # SLexer makes these tokens Operators.
+ (r'<-|~', Operator),
+ # Infix, prefix and postfix operators (and = )
+ (r"\+|-|\.?\*|\.?/|\\|'|\^|==?|!=?|<=?|>=?|\|\||&&", Operator),
+ # Block delimiters
+ (r'[{}]', Punctuation),
+ ]
+ }
+
+ def analyse_text(text):
+ if re.search(r'^\s*parameters\s*\{', text, re.M):
+ return 1.0
+ else:
+ return 0.0
diff --git a/pygments/lexers/nimrod.py b/pygments/lexers/nimrod.py
new file mode 100644
index 00000000..9b056fc4
--- /dev/null
+++ b/pygments/lexers/nimrod.py
@@ -0,0 +1,159 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.nimrod
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Nimrod language.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+
+__all__ = ['NimrodLexer']
+
+
+class NimrodLexer(RegexLexer):
+ """
+ For `Nimrod <http://nimrod-code.org/>`_ source code.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Nimrod'
+ aliases = ['nimrod', 'nim']
+ filenames = ['*.nim', '*.nimrod']
+ mimetypes = ['text/x-nimrod']
+
+ flags = re.MULTILINE | re.IGNORECASE | re.UNICODE
+
+ def underscorize(words):
+ newWords = []
+ new = ""
+ for word in words:
+ for ch in word:
+ new += (ch + "_?")
+ newWords.append(new)
+ new = ""
+ return "|".join(newWords)
+
+ keywords = [
+ 'addr', 'and', 'as', 'asm', 'atomic', 'bind', 'block', 'break',
+ 'case', 'cast', 'const', 'continue', 'converter', 'discard',
+ 'distinct', 'div', 'elif', 'else', 'end', 'enum', 'except', 'finally',
+ 'for', 'generic', 'if', 'implies', 'in', 'yield',
+ 'is', 'isnot', 'iterator', 'lambda', 'let', 'macro', 'method',
+ 'mod', 'not', 'notin', 'object', 'of', 'or', 'out', 'proc',
+ 'ptr', 'raise', 'ref', 'return', 'shl', 'shr', 'template', 'try',
+ 'tuple', 'type', 'when', 'while', 'with', 'without', 'xor'
+ ]
+
+ keywordsPseudo = [
+ 'nil', 'true', 'false'
+ ]
+
+ opWords = [
+ 'and', 'or', 'not', 'xor', 'shl', 'shr', 'div', 'mod', 'in',
+ 'notin', 'is', 'isnot'
+ ]
+
+ types = [
+ 'int', 'int8', 'int16', 'int32', 'int64', 'float', 'float32', 'float64',
+ 'bool', 'char', 'range', 'array', 'seq', 'set', 'string'
+ ]
+
+ tokens = {
+ 'root': [
+ (r'##.*$', String.Doc),
+ (r'#.*$', Comment),
+ (r'\*|=|>|<|\+|-|/|@|\$|~|&|%|\!|\?|\||\\|\[|\]', Operator),
+ (r'\.\.|\.|,|\[\.|\.\]|{\.|\.}|\(\.|\.\)|{|}|\(|\)|:|\^|`|;',
+ Punctuation),
+
+ # Strings
+ (r'(?:[\w]+)"', String, 'rdqs'),
+ (r'"""', String, 'tdqs'),
+ ('"', String, 'dqs'),
+
+ # Char
+ ("'", String.Char, 'chars'),
+
+ # Keywords
+ (r'(%s)\b' % underscorize(opWords), Operator.Word),
+ (r'(p_?r_?o_?c_?\s)(?![\(\[\]])', Keyword, 'funcname'),
+ (r'(%s)\b' % underscorize(keywords), Keyword),
+ (r'(%s)\b' % underscorize(['from', 'import', 'include']),
+ Keyword.Namespace),
+ (r'(v_?a_?r)\b', Keyword.Declaration),
+ (r'(%s)\b' % underscorize(types), Keyword.Type),
+ (r'(%s)\b' % underscorize(keywordsPseudo), Keyword.Pseudo),
+ # Identifiers
+ (r'\b((?![_\d])\w)(((?!_)\w)|(_(?!_)\w))*', Name),
+ # Numbers
+ (r'[0-9][0-9_]*(?=([eE.]|\'[fF](32|64)))',
+ Number.Float, ('float-suffix', 'float-number')),
+ (r'0[xX][a-f0-9][a-f0-9_]*', Number.Hex, 'int-suffix'),
+ (r'0[bB][01][01_]*', Number.Bin, 'int-suffix'),
+ (r'0o[0-7][0-7_]*', Number.Oct, 'int-suffix'),
+ (r'[0-9][0-9_]*', Number.Integer, 'int-suffix'),
+ # Whitespace
+ (r'\s+', Text),
+ (r'.+$', Error),
+ ],
+ 'chars': [
+ (r'\\([\\abcefnrtvl"\']|x[a-f0-9]{2}|[0-9]{1,3})', String.Escape),
+ (r"'", String.Char, '#pop'),
+ (r".", String.Char)
+ ],
+ 'strings': [
+ (r'(?<!\$)\$(\d+|#|\w+)+', String.Interpol),
+ (r'[^\\\'"\$\n]+', String),
+ # quotes, dollars and backslashes must be parsed one at a time
+ (r'[\'"\\]', String),
+ # unhandled string formatting sign
+ (r'\$', String)
+ # newlines are an error (use "nl" state)
+ ],
+ 'dqs': [
+ (r'\\([\\abcefnrtvl"\']|\n|x[a-f0-9]{2}|[0-9]{1,3})',
+ String.Escape),
+ (r'"', String, '#pop'),
+ include('strings')
+ ],
+ 'rdqs': [
+ (r'"(?!")', String, '#pop'),
+ (r'""', String.Escape),
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""(?!")', String, '#pop'),
+ include('strings'),
+ include('nl')
+ ],
+ 'funcname': [
+ (r'((?![\d_])\w)(((?!_)\w)|(_(?!_)\w))*', Name.Function, '#pop'),
+ (r'`.+`', Name.Function, '#pop')
+ ],
+ 'nl': [
+ (r'\n', String)
+ ],
+ 'float-number': [
+ (r'\.(?!\.)[0-9_]*', Number.Float),
+ (r'[eE][+-]?[0-9][0-9_]*', Number.Float),
+ default('#pop')
+ ],
+ 'float-suffix': [
+ (r'\'[fF](32|64)', Number.Float),
+ default('#pop')
+ ],
+ 'int-suffix': [
+ (r'\'[iI](32|64)', Number.Integer.Long),
+ (r'\'[iI](8|16)', Number.Integer),
+ default('#pop')
+ ],
+ }
diff --git a/pygments/lexers/nit.py b/pygments/lexers/nit.py
new file mode 100644
index 00000000..84690cf2
--- /dev/null
+++ b/pygments/lexers/nit.py
@@ -0,0 +1,64 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.nit
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexer for the Nit language.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['NitLexer']
+
+
+class NitLexer(RegexLexer):
+ """
+ For `nit <http://nitlanguage.org>`_ source.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Nit'
+ aliases = ['nit']
+ filenames = ['*.nit']
+ tokens = {
+ 'root': [
+ (r'#.*?$', Comment.Single),
+ (words((
+ 'package', 'module', 'import', 'class', 'abstract', 'interface',
+ 'universal', 'enum', 'end', 'fun', 'type', 'init', 'redef',
+ 'isa', 'do', 'readable', 'writable', 'var', 'intern', 'extern',
+ 'public', 'protected', 'private', 'intrude', 'if', 'then',
+ 'else', 'while', 'loop', 'for', 'in', 'and', 'or', 'not',
+ 'implies', 'return', 'continue', 'break', 'abort', 'assert',
+ 'new', 'is', 'once', 'super', 'self', 'true', 'false', 'nullable',
+ 'null', 'as', 'isset', 'label', '__debug__'), suffix=r'(?=[\r\n\t\( ])'),
+ Keyword),
+ (r'[A-Z][A-Za-z0-9_]*', Name.Class),
+ (r'"""(([^\'\\]|\\.)|\\r|\\n)*(({{?)?(""?{{?)*""""*)', String), # Simple long string
+ (r'\'\'\'(((\\.|[^\'\\])|\\r|\\n)|\'((\\.|[^\'\\])|\\r|\\n)|'
+ r'\'\'((\\.|[^\'\\])|\\r|\\n))*\'\'\'', String), # Simple long string alt
+ (r'"""(([^\'\\]|\\.)|\\r|\\n)*((""?)?({{?""?)*{{{{*)', String), # Start long string
+ (r'}}}(((\\.|[^\'\\])|\\r|\\n))*(""?)?({{?""?)*{{{{*', String), # Mid long string
+ (r'}}}(((\\.|[^\'\\])|\\r|\\n))*({{?)?(""?{{?)*""""*', String), # End long string
+ (r'"(\\.|([^"}{\\]))*"', String), # Simple String
+ (r'"(\\.|([^"}{\\]))*{', String), # Start string
+ (r'}(\\.|([^"}{\\]))*{', String), # Mid String
+ (r'}(\\.|([^"}{\\]))*"', String), # End String
+ (r'(\'[^\'\\]\')|(\'\\.\')', String.Char),
+ (r'[0-9]+', Number.Integer),
+ (r'[0-9]*.[0-9]+', Number.Float),
+ (r'0(x|X)[0-9A-Fa-f]+', Number.Hex),
+ (r'[a-z][A-Za-z0-9_]*', Name),
+ (r'_[A-Za-z0-9_]+', Name.Variable.Instance),
+ (r'==|!=|<==>|>=|>>|>|<=|<<|<|\+|-|=|/|\*|%|\+=|-=|!|@', Operator),
+ (r'\(|\)|\[|\]|,|\.\.\.|\.\.|\.|::|:', Punctuation),
+ (r'`{[^`]*`}', Text), # Extern blocks won't be Lexed by Nit
+ (r'[\r\n\t ]+', Text),
+ ],
+ }
diff --git a/pygments/lexers/nix.py b/pygments/lexers/nix.py
new file mode 100644
index 00000000..1ac78cde
--- /dev/null
+++ b/pygments/lexers/nix.py
@@ -0,0 +1,140 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.nix
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the NixOS Nix language.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Literal
+
+__all__ = ['NixLexer']
+
+
+class NixLexer(RegexLexer):
+ """
+ For the `Nix language <http://nixos.org/nix/>`_.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Nix'
+ aliases = ['nixos', 'nix']
+ filenames = ['*.nix']
+ mimetypes = ['text/x-nix']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ keywords = ['rec', 'with', 'let', 'in', 'inherit', 'assert', 'if',
+ 'else', 'then', '...']
+ builtins = ['import', 'abort', 'baseNameOf', 'dirOf', 'isNull', 'builtins',
+ 'map', 'removeAttrs', 'throw', 'toString', 'derivation']
+ operators = ['++', '+', '?', '.', '!', '//', '==',
+ '!=', '&&', '||', '->', '=']
+
+ punctuations = ["(", ")", "[", "]", ";", "{", "}", ":", ",", "@"]
+
+ tokens = {
+ 'root': [
+ # comments starting with #
+ (r'#.*$', Comment.Single),
+
+ # multiline comments
+ (r'/\*', Comment.Multiline, 'comment'),
+
+ # whitespace
+ (r'\s+', Text),
+
+ # keywords
+ ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in keywords), Keyword),
+
+ # highlight the builtins
+ ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in builtins),
+ Name.Builtin),
+
+ (r'\b(true|false|null)\b', Name.Constant),
+
+ # operators
+ ('(%s)' % '|'.join(re.escape(entry) for entry in operators),
+ Operator),
+
+ # word operators
+ (r'\b(or|and)\b', Operator.Word),
+
+ # punctuations
+ ('(%s)' % '|'.join(re.escape(entry) for entry in punctuations), Punctuation),
+
+ # integers
+ (r'[0-9]+', Number.Integer),
+
+ # strings
+ (r'"', String.Double, 'doublequote'),
+ (r"''", String.Single, 'singlequote'),
+
+ # paths
+ (r'[\w.+-]*(\/[\w.+-]+)+', Literal),
+ (r'\<[\w.+-]+(\/[\w.+-]+)*\>', Literal),
+
+ # urls
+ (r'[a-zA-Z][a-zA-Z0-9\+\-\.]*\:[\w%/?:@&=+$,\\.!~*\'-]+', Literal),
+
+ # names of variables
+ (r'[\w-]+\s*=', String.Symbol),
+ (r'[a-zA-Z_][\w\'-]*', Text),
+
+ ],
+ 'comment': [
+ (r'[^/\*]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[\*/]', Comment.Multiline),
+ ],
+ 'singlequote': [
+ (r"'''", String.Escape),
+ (r"''\$\{", String.Escape),
+ (r"''\n", String.Escape),
+ (r"''\r", String.Escape),
+ (r"''\t", String.Escape),
+ (r"''", String.Single, '#pop'),
+ (r'\$\{', String.Interpol, 'antiquote'),
+ (r"[^']", String.Single),
+ ],
+ 'doublequote': [
+ (r'\\', String.Escape),
+ (r'\\"', String.Escape),
+ (r'\\${', String.Escape),
+ (r'"', String.Double, '#pop'),
+ (r'\$\{', String.Interpol, 'antiquote'),
+ (r'[^"]', String.Double),
+ ],
+ 'antiquote': [
+ (r"}", String.Interpol, '#pop'),
+ # TODO: we should probably escape also here ''${ \${
+ (r"\$\{", String.Interpol, '#push'),
+ include('root'),
+ ],
+ }
+
+ def analyse_text(text):
+ rv = 0.0
+ # TODO: let/in
+ if re.search(r'import.+?<[^>]+>', text):
+ rv += 0.4
+ if re.search(r'mkDerivation\s+(\(|\{|rec)', text):
+ rv += 0.4
+ if re.search(r'with\s+[a-zA-Z\.]+;', text):
+ rv += 0.2
+ if re.search(r'inherit\s+[a-zA-Z()\.];', text):
+ rv += 0.2
+ if re.search(r'=\s+mkIf\s+', text):
+ rv += 0.4
+ if re.search(r'\{[a-zA-Z,\s]+\}:', text):
+ rv += 0.1
+ return rv
diff --git a/pygments/lexers/objective.py b/pygments/lexers/objective.py
new file mode 100644
index 00000000..df564cf1
--- /dev/null
+++ b/pygments/lexers/objective.py
@@ -0,0 +1,322 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.objective
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Objective-C family languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import include, bygroups, using, this, words, inherit
+from pygments.token import Text, Keyword, Name, String, Operator, \
+ Number, Punctuation, Literal
+
+from pygments.lexers.c_cpp import CLexer, CppLexer
+
+__all__ = ['ObjectiveCLexer', 'ObjectiveCppLexer', 'LogosLexer', 'SwiftLexer']
+
+
+def objective(baselexer):
+ """
+ Generate a subclass of baselexer that accepts the Objective-C syntax
+ extensions.
+ """
+
+ # Have to be careful not to accidentally match JavaDoc/Doxygen syntax here,
+ # since that's quite common in ordinary C/C++ files. It's OK to match
+ # JavaDoc/Doxygen keywords that only apply to Objective-C, mind.
+ #
+ # The upshot of this is that we CANNOT match @class or @interface
+ _oc_keywords = re.compile(r'@(?:end|implementation|protocol)')
+
+ # Matches [ <ws>? identifier <ws> ( identifier <ws>? ] | identifier? : )
+ # (note the identifier is *optional* when there is a ':'!)
+ _oc_message = re.compile(r'\[\s*[a-zA-Z_]\w*\s+'
+ r'(?:[a-zA-Z_]\w*\s*\]|'
+ r'(?:[a-zA-Z_]\w*)?:)')
+
+ class GeneratedObjectiveCVariant(baselexer):
+ """
+ Implements Objective-C syntax on top of an existing C family lexer.
+ """
+
+ tokens = {
+ 'statements': [
+ (r'@"', String, 'string'),
+ (r'@(YES|NO)', Number),
+ (r"@'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+ (r'@(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
+ (r'@(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'@0x[0-9a-fA-F]+[Ll]?', Number.Hex),
+ (r'@0[0-7]+[Ll]?', Number.Oct),
+ (r'@\d+[Ll]?', Number.Integer),
+ (r'@\(', Literal, 'literal_number'),
+ (r'@\[', Literal, 'literal_array'),
+ (r'@\{', Literal, 'literal_dictionary'),
+ (words((
+ '@selector', '@private', '@protected', '@public', '@encode',
+ '@synchronized', '@try', '@throw', '@catch', '@finally',
+ '@end', '@property', '@synthesize', '__bridge', '__bridge_transfer',
+ '__autoreleasing', '__block', '__weak', '__strong', 'weak', 'strong',
+ 'copy', 'retain', 'assign', 'unsafe_unretained', 'atomic', 'nonatomic',
+ 'readonly', 'readwrite', 'setter', 'getter', 'typeof', 'in',
+ 'out', 'inout', 'release', 'class', '@dynamic', '@optional',
+ '@required', '@autoreleasepool'), suffix=r'\b'),
+ Keyword),
+ (words(('id', 'instancetype', 'Class', 'IMP', 'SEL', 'BOOL',
+ 'IBOutlet', 'IBAction', 'unichar'), suffix=r'\b'),
+ Keyword.Type),
+ (r'@(true|false|YES|NO)\n', Name.Builtin),
+ (r'(YES|NO|nil|self|super)\b', Name.Builtin),
+ # Carbon types
+ (r'(Boolean|UInt8|SInt8|UInt16|SInt16|UInt32|SInt32)\b', Keyword.Type),
+ # Carbon built-ins
+ (r'(TRUE|FALSE)\b', Name.Builtin),
+ (r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text),
+ ('#pop', 'oc_classname')),
+ (r'(@class|@protocol)(\s+)', bygroups(Keyword, Text),
+ ('#pop', 'oc_forward_classname')),
+ # @ can also prefix other expressions like @{...} or @(...)
+ (r'@', Punctuation),
+ inherit,
+ ],
+ 'oc_classname': [
+ # interface definition that inherits
+ ('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?(\s*)({)',
+ bygroups(Name.Class, Text, Name.Class, Text, Punctuation),
+ ('#pop', 'oc_ivars')),
+ ('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?',
+ bygroups(Name.Class, Text, Name.Class), '#pop'),
+ # interface definition for a category
+ ('([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))(\s*)({)',
+ bygroups(Name.Class, Text, Name.Label, Text, Punctuation),
+ ('#pop', 'oc_ivars')),
+ ('([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))',
+ bygroups(Name.Class, Text, Name.Label), '#pop'),
+ # simple interface / implementation
+ ('([a-zA-Z$_][\w$]*)(\s*)({)',
+ bygroups(Name.Class, Text, Punctuation), ('#pop', 'oc_ivars')),
+ ('([a-zA-Z$_][\w$]*)', Name.Class, '#pop')
+ ],
+ 'oc_forward_classname': [
+ ('([a-zA-Z$_][\w$]*)(\s*,\s*)',
+ bygroups(Name.Class, Text), 'oc_forward_classname'),
+ ('([a-zA-Z$_][\w$]*)(\s*;?)',
+ bygroups(Name.Class, Text), '#pop')
+ ],
+ 'oc_ivars': [
+ include('whitespace'),
+ include('statements'),
+ (';', Punctuation),
+ ('{', Punctuation, '#push'),
+ ('}', Punctuation, '#pop'),
+ ],
+ 'root': [
+ # methods
+ (r'^([-+])(\s*)' # method marker
+ r'(\(.*?\))?(\s*)' # return type
+ r'([a-zA-Z$_][\w$]*:?)', # begin of method name
+ bygroups(Punctuation, Text, using(this),
+ Text, Name.Function),
+ 'method'),
+ inherit,
+ ],
+ 'method': [
+ include('whitespace'),
+ # TODO unsure if ellipses are allowed elsewhere, see
+ # discussion in Issue 789
+ (r',', Punctuation),
+ (r'\.\.\.', Punctuation),
+ (r'(\(.*?\))(\s*)([a-zA-Z$_][\w$]*)',
+ bygroups(using(this), Text, Name.Variable)),
+ (r'[a-zA-Z$_][\w$]*:', Name.Function),
+ (';', Punctuation, '#pop'),
+ ('{', Punctuation, 'function'),
+ ('', Text, '#pop'),
+ ],
+ 'literal_number': [
+ (r'\(', Punctuation, 'literal_number_inner'),
+ (r'\)', Literal, '#pop'),
+ include('statement'),
+ ],
+ 'literal_number_inner': [
+ (r'\(', Punctuation, '#push'),
+ (r'\)', Punctuation, '#pop'),
+ include('statement'),
+ ],
+ 'literal_array': [
+ (r'\[', Punctuation, 'literal_array_inner'),
+ (r'\]', Literal, '#pop'),
+ include('statement'),
+ ],
+ 'literal_array_inner': [
+ (r'\[', Punctuation, '#push'),
+ (r'\]', Punctuation, '#pop'),
+ include('statement'),
+ ],
+ 'literal_dictionary': [
+ (r'\}', Literal, '#pop'),
+ include('statement'),
+ ],
+ }
+
+ def analyse_text(text):
+ if _oc_keywords.search(text):
+ return 1.0
+ elif '@"' in text: # strings
+ return 0.8
+ elif re.search('@[0-9]+', text):
+ return 0.7
+ elif _oc_message.search(text):
+ return 0.8
+ return 0
+
+ def get_tokens_unprocessed(self, text):
+ from pygments.lexers._cocoa_builtins import COCOA_INTERFACES, \
+ COCOA_PROTOCOLS, COCOA_PRIMITIVES
+
+ for index, token, value in \
+ baselexer.get_tokens_unprocessed(self, text):
+ if token is Name or token is Name.Class:
+ if value in COCOA_INTERFACES or value in COCOA_PROTOCOLS \
+ or value in COCOA_PRIMITIVES:
+ token = Name.Builtin.Pseudo
+
+ yield index, token, value
+
+ return GeneratedObjectiveCVariant
+
+
+class ObjectiveCLexer(objective(CLexer)):
+ """
+ For Objective-C source code with preprocessor directives.
+ """
+
+ name = 'Objective-C'
+ aliases = ['objective-c', 'objectivec', 'obj-c', 'objc']
+ filenames = ['*.m', '*.h']
+ mimetypes = ['text/x-objective-c']
+ priority = 0.05 # Lower than C
+
+
+class ObjectiveCppLexer(objective(CppLexer)):
+ """
+ For Objective-C++ source code with preprocessor directives.
+ """
+
+ name = 'Objective-C++'
+ aliases = ['objective-c++', 'objectivec++', 'obj-c++', 'objc++']
+ filenames = ['*.mm', '*.hh']
+ mimetypes = ['text/x-objective-c++']
+ priority = 0.05 # Lower than C++
+
+
+class LogosLexer(ObjectiveCppLexer):
+ """
+ For Logos + Objective-C source code with preprocessor directives.
+
+ .. versionadded:: 1.6
+ """
+
+ name = 'Logos'
+ aliases = ['logos']
+ filenames = ['*.x', '*.xi', '*.xm', '*.xmi']
+ mimetypes = ['text/x-logos']
+ priority = 0.25
+
+ tokens = {
+ 'statements': [
+ (r'(%orig|%log)\b', Keyword),
+ (r'(%c)\b(\()(\s*)([a-zA-Z$_][\w$]*)(\s*)(\))',
+ bygroups(Keyword, Punctuation, Text, Name.Class, Text, Punctuation)),
+ (r'(%init)\b(\()',
+ bygroups(Keyword, Punctuation), 'logos_init_directive'),
+ (r'(%init)(?=\s*;)', bygroups(Keyword)),
+ (r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)',
+ bygroups(Keyword, Text, Name.Class), '#pop'),
+ (r'(%subclass)(\s+)', bygroups(Keyword, Text),
+ ('#pop', 'logos_classname')),
+ inherit,
+ ],
+ 'logos_init_directive': [
+ ('\s+', Text),
+ (',', Punctuation, ('logos_init_directive', '#pop')),
+ ('([a-zA-Z$_][\w$]*)(\s*)(=)(\s*)([^);]*)',
+ bygroups(Name.Class, Text, Punctuation, Text, Text)),
+ ('([a-zA-Z$_][\w$]*)', Name.Class),
+ ('\)', Punctuation, '#pop'),
+ ],
+ 'logos_classname': [
+ ('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?',
+ bygroups(Name.Class, Text, Name.Class), '#pop'),
+ ('([a-zA-Z$_][\w$]*)', Name.Class, '#pop')
+ ],
+ 'root': [
+ (r'(%subclass)(\s+)', bygroups(Keyword, Text),
+ 'logos_classname'),
+ (r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)',
+ bygroups(Keyword, Text, Name.Class)),
+ (r'(%config)(\s*\(\s*)(\w+)(\s*=\s*)(.*?)(\s*\)\s*)',
+ bygroups(Keyword, Text, Name.Variable, Text, String, Text)),
+ (r'(%ctor)(\s*)({)', bygroups(Keyword, Text, Punctuation),
+ 'function'),
+ (r'(%new)(\s*)(\()(\s*.*?\s*)(\))',
+ bygroups(Keyword, Text, Keyword, String, Keyword)),
+ (r'(\s*)(%end)(\s*)', bygroups(Text, Keyword, Text)),
+ inherit,
+ ],
+ }
+
+ _logos_keywords = re.compile(r'%(?:hook|ctor|init|c\()')
+
+ def analyse_text(text):
+ if LogosLexer._logos_keywords.search(text):
+ return 1.0
+ return 0
+
+
+class SwiftLexer(ObjectiveCLexer):
+ """
+ For `Swift <https://developer.apple.com/swift/>`_ source.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Swift'
+ filenames = ['*.swift']
+ aliases = ['swift']
+ mimetypes = ['text/x-swift']
+
+ keywords_decl = set(('class', 'deinit', 'enum', 'extension', 'func', 'import',
+ 'init', 'let', 'protocol', 'static', 'struct', 'subscript',
+ 'typealias', 'var'))
+ keywords_stmt = set(('break', 'case', 'continue', 'default', 'do', 'else',
+ 'fallthrough', 'if', 'in', 'for', 'return', 'switch',
+ 'where', 'while'))
+ keywords_type = set(('as', 'dynamicType', 'is', 'new', 'super', 'self', 'Self',
+ 'Type', '__COLUMN__', '__FILE__', '__FUNCTION__',
+ '__LINE__'))
+ keywords_resrv = set(('associativity', 'didSet', 'get', 'infix', 'inout', 'left',
+ 'mutating', 'none', 'nonmutating', 'operator', 'override',
+ 'postfix', 'precedence', 'prefix', 'right', 'set',
+ 'unowned', 'unowned(safe)', 'unowned(unsafe)', 'weak',
+ 'willSet'))
+ operators = set(('->',))
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in ObjectiveCLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ if value in self.keywords_decl:
+ token = Keyword
+ elif value in self.keywords_stmt:
+ token = Keyword
+ elif value in self.keywords_type:
+ token = Keyword.Type
+ elif value in self.keywords_resrv:
+ token = Keyword.Reserved
+ elif value in self.operators:
+ token = Operator
+ yield index, token, value
diff --git a/pygments/lexers/ooc.py b/pygments/lexers/ooc.py
new file mode 100644
index 00000000..7d55b2a6
--- /dev/null
+++ b/pygments/lexers/ooc.py
@@ -0,0 +1,85 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.ooc
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Ooc language.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['OocLexer']
+
+
+class OocLexer(RegexLexer):
+ """
+ For `Ooc <http://ooc-lang.org/>`_ source code
+
+ .. versionadded:: 1.2
+ """
+ name = 'Ooc'
+ aliases = ['ooc']
+ filenames = ['*.ooc']
+ mimetypes = ['text/x-ooc']
+
+ tokens = {
+ 'root': [
+ (words((
+ 'class', 'interface', 'implement', 'abstract', 'extends', 'from',
+ 'this', 'super', 'new', 'const', 'final', 'static', 'import',
+ 'use', 'extern', 'inline', 'proto', 'break', 'continue',
+ 'fallthrough', 'operator', 'if', 'else', 'for', 'while', 'do',
+ 'switch', 'case', 'as', 'in', 'version', 'return', 'true',
+ 'false', 'null'), prefix=r'\b', suffix=r'\b'),
+ Keyword),
+ (r'include\b', Keyword, 'include'),
+ (r'(cover)([ \t]+)(from)([ \t]+)(\w+[*@]?)',
+ bygroups(Keyword, Text, Keyword, Text, Name.Class)),
+ (r'(func)((?:[ \t]|\\\n)+)(~[a-z_]\w*)',
+ bygroups(Keyword, Text, Name.Function)),
+ (r'\bfunc\b', Keyword),
+ # Note: %= and ^= not listed on http://ooc-lang.org/syntax
+ (r'//.*', Comment),
+ (r'(?s)/\*.*?\*/', Comment.Multiline),
+ (r'(==?|\+=?|-[=>]?|\*=?|/=?|:=|!=?|%=?|\?|>{1,3}=?|<{1,3}=?|\.\.|'
+ r'&&?|\|\|?|\^=?)', Operator),
+ (r'(\.)([ \t]*)([a-z]\w*)', bygroups(Operator, Text,
+ Name.Function)),
+ (r'[A-Z][A-Z0-9_]+', Name.Constant),
+ (r'[A-Z]\w*([@*]|\[[ \t]*\])?', Name.Class),
+
+ (r'([a-z]\w*(?:~[a-z]\w*)?)((?:[ \t]|\\\n)*)(?=\()',
+ bygroups(Name.Function, Text)),
+ (r'[a-z]\w*', Name.Variable),
+
+ # : introduces types
+ (r'[:(){}\[\];,]', Punctuation),
+
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'0c[0-9]+', Number.Oct),
+ (r'0b[01]+', Number.Bin),
+ (r'[0-9_]\.[0-9_]*(?!\.)', Number.Float),
+ (r'[0-9_]+', Number.Decimal),
+
+ (r'"(?:\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\"])*"',
+ String.Double),
+ (r"'(?:\\.|\\[0-9]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
+ String.Char),
+ (r'@', Punctuation), # pointer dereference
+ (r'\.', Punctuation), # imports or chain operator
+
+ (r'\\[ \t\n]', Text),
+ (r'[ \t]+', Text),
+ ],
+ 'include': [
+ (r'[\w/]+', Name),
+ (r',', Punctuation),
+ (r'[ \t]', Text),
+ (r'[;\n]', Text, '#pop'),
+ ],
+ }
diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py
index 608e499d..cde764a2 100644
--- a/pygments/lexers/other.py
+++ b/pygments/lexers/other.py
@@ -3,4490 +3,38 @@
pygments.lexers.other
~~~~~~~~~~~~~~~~~~~~~
- Lexers for other languages.
+ Just export lexer classes previously contained in this module.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, \
- this, combined, ExtendedRegexLexer, default
-from pygments.token import Error, Punctuation, Literal, Token, \
- Text, Comment, Operator, Keyword, Name, String, Number, Generic, \
- Whitespace
-from pygments.util import get_bool_opt
-from pygments.lexers.web import HtmlLexer
-
-from pygments.lexers._openedgebuiltins import OPENEDGEKEYWORDS
-from pygments.lexers._robotframeworklexer import RobotFrameworkLexer
-
-# backwards compatibility
from pygments.lexers.sql import SqlLexer, MySqlLexer, SqliteConsoleLexer
from pygments.lexers.shell import BashLexer, BashSessionLexer, BatchLexer, \
- TcshLexer
-
-__all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'MOOCodeLexer',
- 'SmalltalkLexer', 'LogtalkLexer', 'GnuplotLexer', 'PovrayLexer',
- 'AppleScriptLexer', 'ModelicaLexer', 'RebolLexer', 'ABAPLexer',
- 'NewspeakLexer', 'GherkinLexer', 'AsymptoteLexer', 'PostScriptLexer',
- 'AutohotkeyLexer', 'GoodDataCLLexer', 'MaqlLexer', 'ProtoBufLexer',
- 'HybrisLexer', 'AwkLexer', 'Cfengine3Lexer', 'SnobolLexer',
- 'ECLLexer', 'UrbiscriptLexer', 'OpenEdgeLexer', 'BroLexer',
- 'MscgenLexer', 'KconfigLexer', 'VGLLexer', 'SourcePawnLexer',
- 'RobotFrameworkLexer', 'PuppetLexer', 'NSISLexer', 'RPMSpecLexer',
- 'CbmBasicV2Lexer', 'AutoItLexer', 'RexxLexer', 'APLLexer',
- 'LSLLexer', 'AmbientTalkLexer', 'PawnLexer', 'VCTreeStatusLexer',
- 'RslLexer', 'PanLexer', 'RedLexer', 'AlloyLexer']
-
-
-class LSLLexer(RegexLexer):
- """
- For Second Life's Linden Scripting Language source code.
- """
-
- name = 'LSL'
- aliases = ['lsl']
- filenames = ['*.lsl']
- mimetypes = ['text/x-lsl']
-
- flags = re.MULTILINE
-
- lsl_keywords = r'\b(?:do|else|for|if|jump|return|while)\b'
- lsl_types = r'\b(?:float|integer|key|list|quaternion|rotation|string|vector)\b'
- lsl_states = r'\b(?:(?:state)\s+\w+|default)\b'
- lsl_events = r'\b(?:state_(?:entry|exit)|touch(?:_(?:start|end))?|(?:land_)?collision(?:_(?:start|end))?|timer|listen|(?:no_)?sensor|control|(?:not_)?at_(?:rot_)?target|money|email|run_time_permissions|changed|attach|dataserver|moving_(?:start|end)|link_message|(?:on|object)_rez|remote_data|http_re(?:sponse|quest)|path_update|transaction_result)\b'
- lsl_functions_builtin = r'\b(?:ll(?:ReturnObjectsBy(?:ID|Owner)|Json(?:2List|[GS]etValue|ValueType)|Sin|Cos|Tan|Atan2|Sqrt|Pow|Abs|Fabs|Frand|Floor|Ceil|Round|Vec(?:Mag|Norm|Dist)|Rot(?:Between|2(?:Euler|Fwd|Left|Up))|(?:Euler|Axes)2Rot|Whisper|(?:Region|Owner)?Say|Shout|Listen(?:Control|Remove)?|Sensor(?:Repeat|Remove)?|Detected(?:Name|Key|Owner|Type|Pos|Vel|Grab|Rot|Group|LinkNumber)|Die|Ground|Wind|(?:[GS]et)(?:AnimationOverride|MemoryLimit|PrimMediaParams|ParcelMusicURL|Object(?:Desc|Name)|PhysicsMaterial|Status|Scale|Color|Alpha|Texture|Pos|Rot|Force|Torque)|ResetAnimationOverride|(?:Scale|Offset|Rotate)Texture|(?:Rot)?Target(?:Remove)?|(?:Stop)?MoveToTarget|Apply(?:Rotational)?Impulse|Set(?:KeyframedMotion|ContentType|RegionPos|(?:Angular)?Velocity|Buoyancy|HoverHeight|ForceAndTorque|TimerEvent|ScriptState|Damage|TextureAnim|Sound(?:Queueing|Radius)|Vehicle(?:Type|(?:Float|Vector|Rotation)Param)|(?:Touch|Sit)?Text|Camera(?:Eye|At)Offset|PrimitiveParams|ClickAction|Link(?:Alpha|Color|PrimitiveParams(?:Fast)?|Texture(?:Anim)?|Camera|Media)|RemoteScriptAccessPin|PayPrice|LocalRot)|ScaleByFactor|Get(?:(?:Max|Min)ScaleFactor|ClosestNavPoint|StaticPath|SimStats|Env|PrimitiveParams|Link(?:PrimitiveParams|Number(?:OfSides)?|Key|Name|Media)|HTTPHeader|FreeURLs|Object(?:Details|PermMask|PrimCount)|Parcel(?:MaxPrims|Details|Prim(?:Count|Owners))|Attached|(?:SPMax|Free|Used)Memory|Region(?:Name|TimeDilation|FPS|Corner|AgentCount)|Root(?:Position|Rotation)|UnixTime|(?:Parcel|Region)Flags|(?:Wall|GMT)clock|SimulatorHostname|BoundingBox|GeometricCenter|Creator|NumberOf(?:Prims|NotecardLines|Sides)|Animation(?:List)?|(?:Camera|Local)(?:Pos|Rot)|Vel|Accel|Omega|Time(?:stamp|OfDay)|(?:Object|CenterOf)?Mass|MassMKS|Energy|Owner|(?:Owner)?Key|SunDirection|Texture(?:Offset|Scale|Rot)|Inventory(?:Number|Name|Key|Type|Creator|PermMask)|Permissions(?:Key)?|StartParameter|List(?:Length|EntryType)|Date|Agent(?:Size|Info|Language|List)|LandOwnerAt|NotecardLine|Script(?:Name|State))|(?:Get|Reset|GetAndReset)Time|PlaySound(?:Slave)?|LoopSound(?:Master|Slave)?|(?:Trigger|Stop|Preload)Sound|(?:(?:Get|Delete)Sub|Insert)String|To(?:Upper|Lower)|Give(?:InventoryList|Money)|RezObject|(?:Stop)?LookAt|Sleep|CollisionFilter|(?:Take|Release)Controls|DetachFromAvatar|AttachToAvatar(?:Temp)?|InstantMessage|(?:GetNext)?Email|StopHover|MinEventDelay|RotLookAt|String(?:Length|Trim)|(?:Start|Stop)Animation|TargetOmega|RequestPermissions|(?:Create|Break)Link|BreakAllLinks|(?:Give|Remove)Inventory|Water|PassTouches|Request(?:Agent|Inventory)Data|TeleportAgent(?:Home|GlobalCoords)?|ModifyLand|CollisionSound|ResetScript|MessageLinked|PushObject|PassCollisions|AxisAngle2Rot|Rot2(?:Axis|Angle)|A(?:cos|sin)|AngleBetween|AllowInventoryDrop|SubStringIndex|List2(?:CSV|Integer|Json|Float|String|Key|Vector|Rot|List(?:Strided)?)|DeleteSubList|List(?:Statistics|Sort|Randomize|(?:Insert|Find|Replace)List)|EdgeOfWorld|AdjustSoundVolume|Key2Name|TriggerSoundLimited|EjectFromLand|(?:CSV|ParseString)2List|OverMyLand|SameGroup|UnSit|Ground(?:Slope|Normal|Contour)|GroundRepel|(?:Set|Remove)VehicleFlags|(?:AvatarOn)?(?:Link)?SitTarget|Script(?:Danger|Profiler)|Dialog|VolumeDetect|ResetOtherScript|RemoteLoadScriptPin|(?:Open|Close)RemoteDataChannel|SendRemoteData|RemoteDataReply|(?:Integer|String)ToBase64|XorBase64|Log(?:10)?|Base64To(?:String|Integer)|ParseStringKeepNulls|RezAtRoot|RequestSimulatorData|ForceMouselook|(?:Load|Release|(?:E|Une)scape)URL|ParcelMedia(?:CommandList|Query)|ModPow|MapDestination|(?:RemoveFrom|AddTo|Reset)Land(?:Pass|Ban)List|(?:Set|Clear)CameraParams|HTTP(?:Request|Response)|TextBox|DetectedTouch(?:UV|Face|Pos|(?:N|Bin)ormal|ST)|(?:MD5|SHA1|DumpList2)String|Request(?:Secure)?URL|Clear(?:Prim|Link)Media|(?:Link)?ParticleSystem|(?:Get|Request)(?:Username|DisplayName)|RegionSayTo|CastRay|GenerateKey|TransferLindenDollars|ManageEstateAccess|(?:Create|Delete)Character|ExecCharacterCmd|Evade|FleeFrom|NavigateTo|PatrolPoints|Pursue|UpdateCharacter|WanderWithin))\b'
- lsl_constants_float = r'\b(?:DEG_TO_RAD|PI(?:_BY_TWO)?|RAD_TO_DEG|SQRT2|TWO_PI)\b'
- lsl_constants_integer = r'\b(?:JSON_APPEND|STATUS_(?:PHYSICS|ROTATE_[XYZ]|PHANTOM|SANDBOX|BLOCK_GRAB(?:_OBJECT)?|(?:DIE|RETURN)_AT_EDGE|CAST_SHADOWS|OK|MALFORMED_PARAMS|TYPE_MISMATCH|BOUNDS_ERROR|NOT_(?:FOUND|SUPPORTED)|INTERNAL_ERROR|WHITELIST_FAILED)|AGENT(?:_(?:BY_(?:LEGACY_|USER)NAME|FLYING|ATTACHMENTS|SCRIPTED|MOUSELOOK|SITTING|ON_OBJECT|AWAY|WALKING|IN_AIR|TYPING|CROUCHING|BUSY|ALWAYS_RUN|AUTOPILOT|LIST_(?:PARCEL(?:_OWNER)?|REGION)))?|CAMERA_(?:PITCH|DISTANCE|BEHINDNESS_(?:ANGLE|LAG)|(?:FOCUS|POSITION)(?:_(?:THRESHOLD|LOCKED|LAG))?|FOCUS_OFFSET|ACTIVE)|ANIM_ON|LOOP|REVERSE|PING_PONG|SMOOTH|ROTATE|SCALE|ALL_SIDES|LINK_(?:ROOT|SET|ALL_(?:OTHERS|CHILDREN)|THIS)|ACTIVE|PASSIVE|SCRIPTED|CONTROL_(?:FWD|BACK|(?:ROT_)?(?:LEFT|RIGHT)|UP|DOWN|(?:ML_)?LBUTTON)|PERMISSION_(?:RETURN_OBJECTS|DEBIT|OVERRIDE_ANIMATIONS|SILENT_ESTATE_MANAGEMENT|TAKE_CONTROLS|TRIGGER_ANIMATION|ATTACH|CHANGE_LINKS|(?:CONTROL|TRACK)_CAMERA|TELEPORT)|INVENTORY_(?:TEXTURE|SOUND|OBJECT|SCRIPT|LANDMARK|CLOTHING|NOTECARD|BODYPART|ANIMATION|GESTURE|ALL|NONE)|CHANGED_(?:INVENTORY|COLOR|SHAPE|SCALE|TEXTURE|LINK|ALLOWED_DROP|OWNER|REGION(?:_START)?|TELEPORT|MEDIA)|OBJECT_(?:(?:PHYSICS|SERVER|STREAMING)_COST|UNKNOWN_DETAIL|CHARACTER_TIME|PHANTOM|PHYSICS|TEMP_ON_REZ|NAME|DESC|POS|PRIM_EQUIVALENCE|RETURN_(?:PARCEL(?:_OWNER)?|REGION)|ROO?T|VELOCITY|OWNER|GROUP|CREATOR|ATTACHED_POINT|RENDER_WEIGHT|PATHFINDING_TYPE|(?:RUNNING|TOTAL)_SCRIPT_COUNT|SCRIPT_(?:MEMORY|TIME))|TYPE_(?:INTEGER|FLOAT|STRING|KEY|VECTOR|ROTATION|INVALID)|(?:DEBUG|PUBLIC)_CHANNEL|ATTACH_(?:AVATAR_CENTER|CHEST|HEAD|BACK|PELVIS|MOUTH|CHIN|NECK|NOSE|BELLY|[LR](?:SHOULDER|HAND|FOOT|EAR|EYE|[UL](?:ARM|LEG)|HIP)|(?:LEFT|RIGHT)_PEC|HUD_(?:CENTER_[12]|TOP_(?:RIGHT|CENTER|LEFT)|BOTTOM(?:_(?:RIGHT|LEFT))?))|LAND_(?:LEVEL|RAISE|LOWER|SMOOTH|NOISE|REVERT)|DATA_(?:ONLINE|NAME|BORN|SIM_(?:POS|STATUS|RATING)|PAYINFO)|PAYMENT_INFO_(?:ON_FILE|USED)|REMOTE_DATA_(?:CHANNEL|REQUEST|REPLY)|PSYS_(?:PART_(?:BF_(?:ZERO|ONE(?:_MINUS_(?:DEST_COLOR|SOURCE_(ALPHA|COLOR)))?|DEST_COLOR|SOURCE_(ALPHA|COLOR))|BLEND_FUNC_(DEST|SOURCE)|FLAGS|(?:START|END)_(?:COLOR|ALPHA|SCALE|GLOW)|MAX_AGE|(?:RIBBON|WIND|INTERP_(?:COLOR|SCALE)|BOUNCE|FOLLOW_(?:SRC|VELOCITY)|TARGET_(?:POS|LINEAR)|EMISSIVE)_MASK)|SRC_(?:MAX_AGE|PATTERN|ANGLE_(?:BEGIN|END)|BURST_(?:RATE|PART_COUNT|RADIUS|SPEED_(?:MIN|MAX))|ACCEL|TEXTURE|TARGET_KEY|OMEGA|PATTERN_(?:DROP|EXPLODE|ANGLE(?:_CONE(?:_EMPTY)?)?)))|VEHICLE_(?:REFERENCE_FRAME|TYPE_(?:NONE|SLED|CAR|BOAT|AIRPLANE|BALLOON)|(?:LINEAR|ANGULAR)_(?:FRICTION_TIMESCALE|MOTOR_DIRECTION)|LINEAR_MOTOR_OFFSET|HOVER_(?:HEIGHT|EFFICIENCY|TIMESCALE)|BUOYANCY|(?:LINEAR|ANGULAR)_(?:DEFLECTION_(?:EFFICIENCY|TIMESCALE)|MOTOR_(?:DECAY_)?TIMESCALE)|VERTICAL_ATTRACTION_(?:EFFICIENCY|TIMESCALE)|BANKING_(?:EFFICIENCY|MIX|TIMESCALE)|FLAG_(?:NO_DEFLECTION_UP|LIMIT_(?:ROLL_ONLY|MOTOR_UP)|HOVER_(?:(?:WATER|TERRAIN|UP)_ONLY|GLOBAL_HEIGHT)|MOUSELOOK_(?:STEER|BANK)|CAMERA_DECOUPLED))|PRIM_(?:TYPE(?:_(?:BOX|CYLINDER|PRISM|SPHERE|TORUS|TUBE|RING|SCULPT))?|HOLE_(?:DEFAULT|CIRCLE|SQUARE|TRIANGLE)|MATERIAL(?:_(?:STONE|METAL|GLASS|WOOD|FLESH|PLASTIC|RUBBER))?|SHINY_(?:NONE|LOW|MEDIUM|HIGH)|BUMP_(?:NONE|BRIGHT|DARK|WOOD|BARK|BRICKS|CHECKER|CONCRETE|TILE|STONE|DISKS|GRAVEL|BLOBS|SIDING|LARGETILE|STUCCO|SUCTION|WEAVE)|TEXGEN_(?:DEFAULT|PLANAR)|SCULPT_(?:TYPE_(?:SPHERE|TORUS|PLANE|CYLINDER|MASK)|FLAG_(?:MIRROR|INVERT))|PHYSICS(?:_(?:SHAPE_(?:CONVEX|NONE|PRIM|TYPE)))?|(?:POS|ROT)_LOCAL|SLICE|TEXT|FLEXIBLE|POINT_LIGHT|TEMP_ON_REZ|PHANTOM|POSITION|SIZE|ROTATION|TEXTURE|NAME|OMEGA|DESC|LINK_TARGET|COLOR|BUMP_SHINY|FULLBRIGHT|TEXGEN|GLOW|MEDIA_(?:ALT_IMAGE_ENABLE|CONTROLS|(?:CURRENT|HOME)_URL|AUTO_(?:LOOP|PLAY|SCALE|ZOOM)|FIRST_CLICK_INTERACT|(?:WIDTH|HEIGHT)_PIXELS|WHITELIST(?:_ENABLE)?|PERMS_(?:INTERACT|CONTROL)|PARAM_MAX|CONTROLS_(?:STANDARD|MINI)|PERM_(?:NONE|OWNER|GROUP|ANYONE)|MAX_(?:URL_LENGTH|WHITELIST_(?:SIZE|COUNT)|(?:WIDTH|HEIGHT)_PIXELS)))|MASK_(?:BASE|OWNER|GROUP|EVERYONE|NEXT)|PERM_(?:TRANSFER|MODIFY|COPY|MOVE|ALL)|PARCEL_(?:MEDIA_COMMAND_(?:STOP|PAUSE|PLAY|LOOP|TEXTURE|URL|TIME|AGENT|UNLOAD|AUTO_ALIGN|TYPE|SIZE|DESC|LOOP_SET)|FLAG_(?:ALLOW_(?:FLY|(?:GROUP_)?SCRIPTS|LANDMARK|TERRAFORM|DAMAGE|CREATE_(?:GROUP_)?OBJECTS)|USE_(?:ACCESS_(?:GROUP|LIST)|BAN_LIST|LAND_PASS_LIST)|LOCAL_SOUND_ONLY|RESTRICT_PUSHOBJECT|ALLOW_(?:GROUP|ALL)_OBJECT_ENTRY)|COUNT_(?:TOTAL|OWNER|GROUP|OTHER|SELECTED|TEMP)|DETAILS_(?:NAME|DESC|OWNER|GROUP|AREA|ID|SEE_AVATARS))|LIST_STAT_(?:MAX|MIN|MEAN|MEDIAN|STD_DEV|SUM(?:_SQUARES)?|NUM_COUNT|GEOMETRIC_MEAN|RANGE)|PAY_(?:HIDE|DEFAULT)|REGION_FLAG_(?:ALLOW_DAMAGE|FIXED_SUN|BLOCK_TERRAFORM|SANDBOX|DISABLE_(?:COLLISIONS|PHYSICS)|BLOCK_FLY|ALLOW_DIRECT_TELEPORT|RESTRICT_PUSHOBJECT)|HTTP_(?:METHOD|MIMETYPE|BODY_(?:MAXLENGTH|TRUNCATED)|CUSTOM_HEADER|PRAGMA_NO_CACHE|VERBOSE_THROTTLE|VERIFY_CERT)|STRING_(?:TRIM(?:_(?:HEAD|TAIL))?)|CLICK_ACTION_(?:NONE|TOUCH|SIT|BUY|PAY|OPEN(?:_MEDIA)?|PLAY|ZOOM)|TOUCH_INVALID_FACE|PROFILE_(?:NONE|SCRIPT_MEMORY)|RC_(?:DATA_FLAGS|DETECT_PHANTOM|GET_(?:LINK_NUM|NORMAL|ROOT_KEY)|MAX_HITS|REJECT_(?:TYPES|AGENTS|(?:NON)?PHYSICAL|LAND))|RCERR_(?:CAST_TIME_EXCEEDED|SIM_PERF_LOW|UNKNOWN)|ESTATE_ACCESS_(?:ALLOWED_(?:AGENT|GROUP)_(?:ADD|REMOVE)|BANNED_AGENT_(?:ADD|REMOVE))|DENSITY|FRICTION|RESTITUTION|GRAVITY_MULTIPLIER|KFM_(?:COMMAND|CMD_(?:PLAY|STOP|PAUSE|SET_MODE)|MODE|FORWARD|LOOP|PING_PONG|REVERSE|DATA|ROTATION|TRANSLATION)|ERR_(?:GENERIC|PARCEL_PERMISSIONS|MALFORMED_PARAMS|RUNTIME_PERMISSIONS|THROTTLED)|CHARACTER_(?:CMD_(?:(?:SMOOTH_)?STOP|JUMP)|DESIRED_(?:TURN_)?SPEED|RADIUS|STAY_WITHIN_PARCEL|LENGTH|ORIENTATION|ACCOUNT_FOR_SKIPPED_FRAMES|AVOIDANCE_MODE|TYPE(?:_(?:[ABCD]|NONE))?|MAX_(?:DECEL|TURN_RADIUS|(?:ACCEL|SPEED)))|PURSUIT_(?:OFFSET|FUZZ_FACTOR|GOAL_TOLERANCE|INTERCEPT)|REQUIRE_LINE_OF_SIGHT|FORCE_DIRECT_PATH|VERTICAL|HORIZONTAL|AVOID_(?:CHARACTERS|DYNAMIC_OBSTACLES|NONE)|PU_(?:EVADE_(?:HIDDEN|SPOTTED)|FAILURE_(?:DYNAMIC_PATHFINDING_DISABLED|INVALID_(?:GOAL|START)|NO_(?:NAVMESH|VALID_DESTINATION)|OTHER|TARGET_GONE|(?:PARCEL_)?UNREACHABLE)|(?:GOAL|SLOWDOWN_DISTANCE)_REACHED)|TRAVERSAL_TYPE(?:_(?:FAST|NONE|SLOW))?|CONTENT_TYPE_(?:ATOM|FORM|HTML|JSON|LLSD|RSS|TEXT|XHTML|XML)|GCNP_(?:RADIUS|STATIC)|(?:PATROL|WANDER)_PAUSE_AT_WAYPOINTS|OPT_(?:AVATAR|CHARACTER|EXCLUSION_VOLUME|LEGACY_LINKSET|MATERIAL_VOLUME|OTHER|STATIC_OBSTACLE|WALKABLE)|SIM_STAT_PCT_CHARS_STEPPED)\b'
- lsl_constants_integer_boolean = r'\b(?:FALSE|TRUE)\b'
- lsl_constants_rotation = r'\b(?:ZERO_ROTATION)\b'
- lsl_constants_string = r'\b(?:EOF|JSON_(?:ARRAY|DELETE|FALSE|INVALID|NULL|NUMBER|OBJECT|STRING|TRUE)|NULL_KEY|TEXTURE_(?:BLANK|DEFAULT|MEDIA|PLYWOOD|TRANSPARENT)|URL_REQUEST_(?:GRANTED|DENIED))\b'
- lsl_constants_vector = r'\b(?:TOUCH_INVALID_(?:TEXCOORD|VECTOR)|ZERO_VECTOR)\b'
- lsl_invalid_broken = r'\b(?:LAND_(?:LARGE|MEDIUM|SMALL)_BRUSH)\b'
- lsl_invalid_deprecated = r'\b(?:ATTACH_[LR]PEC|DATA_RATING|OBJECT_ATTACHMENT_(?:GEOMETRY_BYTES|SURFACE_AREA)|PRIM_(?:CAST_SHADOWS|MATERIAL_LIGHT|TYPE_LEGACY)|PSYS_SRC_(?:INNER|OUTER)ANGLE|VEHICLE_FLAG_NO_FLY_UP|ll(?:Cloud|Make(?:Explosion|Fountain|Smoke|Fire)|RemoteDataSetRegion|Sound(?:Preload)?|XorBase64Strings(?:Correct)?))\b'
- lsl_invalid_illegal = r'\b(?:event)\b'
- lsl_invalid_unimplemented = r'\b(?:CHARACTER_(?:MAX_ANGULAR_(?:ACCEL|SPEED)|TURN_SPEED_MULTIPLIER)|PERMISSION_(?:CHANGE_(?:JOINTS|PERMISSIONS)|RELEASE_OWNERSHIP|REMAP_CONTROLS)|PRIM_PHYSICS_MATERIAL|PSYS_SRC_OBJ_REL_MASK|ll(?:CollisionSprite|(?:Stop)?PointAt|(?:(?:Refresh|Set)Prim)URL|(?:Take|Release)Camera|RemoteLoadScript))\b'
- lsl_reserved_godmode = r'\b(?:ll(?:GodLikeRezObject|Set(?:Inventory|Object)PermMask))\b'
- lsl_reserved_log = r'\b(?:print)\b'
- lsl_operators = r'\+\+|\-\-|<<|>>|&&?|\|\|?|\^|~|[!%<>=*+\-\/]=?'
-
- tokens = {
- 'root':
- [
- (r'//.*?\n', Comment.Single),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'"', String.Double, 'string'),
- (lsl_keywords, Keyword),
- (lsl_types, Keyword.Type),
- (lsl_states, Name.Class),
- (lsl_events, Name.Builtin),
- (lsl_functions_builtin, Name.Function),
- (lsl_constants_float, Keyword.Constant),
- (lsl_constants_integer, Keyword.Constant),
- (lsl_constants_integer_boolean, Keyword.Constant),
- (lsl_constants_rotation, Keyword.Constant),
- (lsl_constants_string, Keyword.Constant),
- (lsl_constants_vector, Keyword.Constant),
- (lsl_invalid_broken, Error),
- (lsl_invalid_deprecated, Error),
- (lsl_invalid_illegal, Error),
- (lsl_invalid_unimplemented, Error),
- (lsl_reserved_godmode, Keyword.Reserved),
- (lsl_reserved_log, Keyword.Reserved),
- (r'\b([a-zA-Z_]\w*)\b', Name.Variable),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d*', Number.Float),
- (r'(\d+\.\d*|\.\d+)', Number.Float),
- (r'0[xX][0-9a-fA-F]+', Number.Hex),
- (r'\d+', Number.Integer),
- (lsl_operators, Operator),
- (r':=?', Error),
- (r'[,;{}\(\)\[\]]', Punctuation),
- (r'\n+', Whitespace),
- (r'\s+', Whitespace)
- ],
- 'comment':
- [
- (r'[^*/]+', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- 'string':
- [
- (r'\\([nt"\\])', String.Escape),
- (r'"', String.Double, '#pop'),
- (r'\\.', Error),
- (r'[^"\\]+', String.Double),
- ]
- }
-
-
-class ECLLexer(RegexLexer):
- """
- Lexer for the declarative big-data `ECL
- <http://hpccsystems.com/community/docs/ecl-language-reference/html>`_
- language.
-
- .. versionadded:: 1.5
- """
-
- name = 'ECL'
- aliases = ['ecl']
- filenames = ['*.ecl']
- mimetypes = ['application/x-ecl']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'root': [
- include('whitespace'),
- include('statements'),
- ],
- 'whitespace': [
- (r'\s+', Text),
- (r'\/\/.*', Comment.Single),
- (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
- ],
- 'statements': [
- include('types'),
- include('keywords'),
- include('functions'),
- include('hash'),
- (r'"', String, 'string'),
- (r'\'', String, 'string'),
- (r'(\d+\.\d*|\.\d+|\d+)e[+-]?\d+[lu]*', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-f]+[lu]*', Number.Hex),
- (r'0[0-7]+[lu]*', Number.Oct),
- (r'\d+[LlUu]*', Number.Integer),
- (r'\*/', Error),
- (r'[~!%^&*+=|?:<>/-]+', Operator),
- (r'[{}()\[\],.;]', Punctuation),
- (r'[a-z_]\w*', Name),
- ],
- 'hash': [
- (r'^#.*$', Comment.Preproc),
- ],
- 'types': [
- (r'(RECORD|END)\D', Keyword.Declaration),
- (r'((?:ASCII|BIG_ENDIAN|BOOLEAN|DATA|DECIMAL|EBCDIC|INTEGER|PATTERN|'
- r'QSTRING|REAL|RECORD|RULE|SET OF|STRING|TOKEN|UDECIMAL|UNICODE|'
- r'UNSIGNED|VARSTRING|VARUNICODE)\d*)(\s+)',
- bygroups(Keyword.Type, Text)),
- ],
- 'keywords': [
- (r'(APPLY|ASSERT|BUILD|BUILDINDEX|EVALUATE|FAIL|KEYDIFF|KEYPATCH|'
- r'LOADXML|NOTHOR|NOTIFY|OUTPUT|PARALLEL|SEQUENTIAL|SOAPCALL|WAIT'
- r'CHECKPOINT|DEPRECATED|FAILCODE|FAILMESSAGE|FAILURE|GLOBAL|'
- r'INDEPENDENT|ONWARNING|PERSIST|PRIORITY|RECOVERY|STORED|SUCCESS|'
- r'WAIT|WHEN)\b', Keyword.Reserved),
- # These are classed differently, check later
- (r'(ALL|AND|ANY|AS|ATMOST|BEFORE|BEGINC\+\+|BEST|BETWEEN|CASE|CONST|'
- r'COUNTER|CSV|DESCEND|ENCRYPT|ENDC\+\+|ENDMACRO|EXCEPT|EXCLUSIVE|'
- r'EXPIRE|EXPORT|EXTEND|FALSE|FEW|FIRST|FLAT|FULL|FUNCTION|GROUP|'
- r'HEADER|HEADING|HOLE|IFBLOCK|IMPORT|IN|JOINED|KEEP|KEYED|LAST|'
- r'LEFT|LIMIT|LOAD|LOCAL|LOCALE|LOOKUP|MACRO|MANY|MAXCOUNT|'
- r'MAXLENGTH|MIN SKEW|MODULE|INTERFACE|NAMED|NOCASE|NOROOT|NOSCAN|'
- r'NOSORT|NOT|OF|ONLY|OPT|OR|OUTER|OVERWRITE|PACKED|PARTITION|'
- r'PENALTY|PHYSICALLENGTH|PIPE|QUOTE|RELATIONSHIP|REPEAT|RETURN|'
- r'RIGHT|SCAN|SELF|SEPARATOR|SERVICE|SHARED|SKEW|SKIP|SQL|STORE|'
- r'TERMINATOR|THOR|THRESHOLD|TOKEN|TRANSFORM|TRIM|TRUE|TYPE|'
- r'UNICODEORDER|UNSORTED|VALIDATE|VIRTUAL|WHOLE|WILD|WITHIN|XML|'
- r'XPATH|__COMPRESSED__)\b', Keyword.Reserved),
- ],
- 'functions': [
- (r'(ABS|ACOS|ALLNODES|ASCII|ASIN|ASSTRING|ATAN|ATAN2|AVE|CASE|'
- r'CHOOSE|CHOOSEN|CHOOSESETS|CLUSTERSIZE|COMBINE|CORRELATION|COS|'
- r'COSH|COUNT|COVARIANCE|CRON|DATASET|DEDUP|DEFINE|DENORMALIZE|'
- r'DISTRIBUTE|DISTRIBUTED|DISTRIBUTION|EBCDIC|ENTH|ERROR|EVALUATE|'
- r'EVENT|EVENTEXTRA|EVENTNAME|EXISTS|EXP|FAILCODE|FAILMESSAGE|'
- r'FETCH|FROMUNICODE|GETISVALID|GLOBAL|GRAPH|GROUP|HASH|HASH32|'
- r'HASH64|HASHCRC|HASHMD5|HAVING|IF|INDEX|INTFORMAT|ISVALID|'
- r'ITERATE|JOIN|KEYUNICODE|LENGTH|LIBRARY|LIMIT|LN|LOCAL|LOG|LOOP|'
- r'MAP|MATCHED|MATCHLENGTH|MATCHPOSITION|MATCHTEXT|MATCHUNICODE|'
- r'MAX|MERGE|MERGEJOIN|MIN|NOLOCAL|NONEMPTY|NORMALIZE|PARSE|PIPE|'
- r'POWER|PRELOAD|PROCESS|PROJECT|PULL|RANDOM|RANGE|RANK|RANKED|'
- r'REALFORMAT|RECORDOF|REGEXFIND|REGEXREPLACE|REGROUP|REJECTED|'
- r'ROLLUP|ROUND|ROUNDUP|ROW|ROWDIFF|SAMPLE|SET|SIN|SINH|SIZEOF|'
- r'SOAPCALL|SORT|SORTED|SQRT|STEPPED|STORED|SUM|TABLE|TAN|TANH|'
- r'THISNODE|TOPN|TOUNICODE|TRANSFER|TRIM|TRUNCATE|TYPEOF|UNGROUP|'
- r'UNICODEORDER|VARIANCE|WHICH|WORKUNIT|XMLDECODE|XMLENCODE|'
- r'XMLTEXT|XMLUNICODE)\b', Name.Function),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\'', String, '#pop'),
- (r'[^"\']+', String),
- ],
- }
-
-
-class BrainfuckLexer(RegexLexer):
- """
- Lexer for the esoteric `BrainFuck <http://www.muppetlabs.com/~breadbox/bf/>`_
- language.
- """
-
- name = 'Brainfuck'
- aliases = ['brainfuck', 'bf']
- filenames = ['*.bf', '*.b']
- mimetypes = ['application/x-brainfuck']
-
- tokens = {
- 'common': [
- # use different colors for different instruction types
- (r'[.,]+', Name.Tag),
- (r'[+-]+', Name.Builtin),
- (r'[<>]+', Name.Variable),
- (r'[^.,+\-<>\[\]]+', Comment),
- ],
- 'root': [
- (r'\[', Keyword, 'loop'),
- (r'\]', Error),
- include('common'),
- ],
- 'loop': [
- (r'\[', Keyword, '#push'),
- (r'\]', Keyword, '#pop'),
- include('common'),
- ]
- }
-
-
-class BefungeLexer(RegexLexer):
- """
- Lexer for the esoteric `Befunge <http://en.wikipedia.org/wiki/Befunge>`_
- language.
-
- .. versionadded:: 0.7
- """
- name = 'Befunge'
- aliases = ['befunge']
- filenames = ['*.befunge']
- mimetypes = ['application/x-befunge']
-
- tokens = {
- 'root': [
- (r'[0-9a-f]', Number),
- (r'[\+\*/%!`-]', Operator), # Traditional math
- (r'[<>^v?\[\]rxjk]', Name.Variable), # Move, imperatives
- (r'[:\\$.,n]', Name.Builtin), # Stack ops, imperatives
- (r'[|_mw]', Keyword),
- (r'[{}]', Name.Tag), # Befunge-98 stack ops
- (r'".*?"', String.Double), # Strings don't appear to allow escapes
- (r'\'.', String.Single), # Single character
- (r'[#;]', Comment), # Trampoline... depends on direction hit
- (r'[pg&~=@iotsy]', Keyword), # Misc
- (r'[()A-Z]', Comment), # Fingerprints
- (r'\s+', Text), # Whitespace doesn't matter
- ],
- }
-
-
-class RedcodeLexer(RegexLexer):
- """
- A simple Redcode lexer based on ICWS'94.
- Contributed by Adam Blinkinsop <blinks@acm.org>.
-
- .. versionadded:: 0.8
- """
- name = 'Redcode'
- aliases = ['redcode']
- filenames = ['*.cw']
-
- opcodes = ['DAT','MOV','ADD','SUB','MUL','DIV','MOD',
- 'JMP','JMZ','JMN','DJN','CMP','SLT','SPL',
- 'ORG','EQU','END']
- modifiers = ['A','B','AB','BA','F','X','I']
-
- tokens = {
- 'root': [
- # Whitespace:
- (r'\s+', Text),
- (r';.*$', Comment.Single),
- # Lexemes:
- # Identifiers
- (r'\b(%s)\b' % '|'.join(opcodes), Name.Function),
- (r'\b(%s)\b' % '|'.join(modifiers), Name.Decorator),
- (r'[A-Za-z_][A-Za-z_0-9]+', Name),
- # Operators
- (r'[-+*/%]', Operator),
- (r'[#$@<>]', Operator), # mode
- (r'[.,]', Punctuation), # mode
- # Numbers
- (r'[-+]?\d+', Number.Integer),
- ],
- }
-
-
-class MOOCodeLexer(RegexLexer):
- """
- For `MOOCode <http://www.moo.mud.org/>`_ (the MOO scripting
- language).
-
- .. versionadded:: 0.9
- """
- name = 'MOOCode'
- filenames = ['*.moo']
- aliases = ['moocode', 'moo']
- mimetypes = ['text/x-moocode']
-
- tokens = {
- 'root' : [
- # Numbers
- (r'(0|[1-9][0-9_]*)', Number.Integer),
- # Strings
- (r'"(\\\\|\\"|[^"])*"', String),
- # exceptions
- (r'(E_PERM|E_DIV)', Name.Exception),
- # db-refs
- (r'((#[-0-9]+)|(\$[a-z_A-Z0-9]+))', Name.Entity),
- # Keywords
- (r'\b(if|else|elseif|endif|for|endfor|fork|endfork|while'
- r'|endwhile|break|continue|return|try'
- r'|except|endtry|finally|in)\b', Keyword),
- # builtins
- (r'(random|length)', Name.Builtin),
- # special variables
- (r'(player|caller|this|args)', Name.Variable.Instance),
- # skip whitespace
- (r'\s+', Text),
- (r'\n', Text),
- # other operators
- (r'([!;=,{}&\|:\.\[\]@\(\)\<\>\?]+)', Operator),
- # function call
- (r'([a-z_A-Z0-9]+)(\()', bygroups(Name.Function, Operator)),
- # variables
- (r'([a-zA-Z_0-9]+)', Text),
- ]
- }
-
-
-class SmalltalkLexer(RegexLexer):
- """
- For `Smalltalk <http://www.smalltalk.org/>`_ syntax.
- Contributed by Stefan Matthias Aust.
- Rewritten by Nils Winter.
-
- .. versionadded:: 0.10
- """
- name = 'Smalltalk'
- filenames = ['*.st']
- aliases = ['smalltalk', 'squeak', 'st']
- mimetypes = ['text/x-smalltalk']
-
- tokens = {
- 'root' : [
- (r'(<)(\w+:)(.*?)(>)', bygroups(Text, Keyword, Text, Text)),
- include('squeak fileout'),
- include('whitespaces'),
- include('method definition'),
- (r'(\|)([\w\s]*)(\|)', bygroups(Operator, Name.Variable, Operator)),
- include('objects'),
- (r'\^|\:=|\_', Operator),
- # temporaries
- (r'[\]({}.;!]', Text),
- ],
- 'method definition' : [
- # Not perfect can't allow whitespaces at the beginning and the
- # without breaking everything
- (r'([a-zA-Z]+\w*:)(\s*)(\w+)',
- bygroups(Name.Function, Text, Name.Variable)),
- (r'^(\b[a-zA-Z]+\w*\b)(\s*)$', bygroups(Name.Function, Text)),
- (r'^([-+*/\\~<>=|&!?,@%]+)(\s*)(\w+)(\s*)$',
- bygroups(Name.Function, Text, Name.Variable, Text)),
- ],
- 'blockvariables' : [
- include('whitespaces'),
- (r'(:)(\s*)(\w+)',
- bygroups(Operator, Text, Name.Variable)),
- (r'\|', Operator, '#pop'),
- default('#pop'), # else pop
- ],
- 'literals' : [
- (r"'(''|[^'])*'", String, 'afterobject'),
- (r'\$.', String.Char, 'afterobject'),
- (r'#\(', String.Symbol, 'parenth'),
- (r'\)', Text, 'afterobject'),
- (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number, 'afterobject'),
- ],
- '_parenth_helper' : [
- include('whitespaces'),
- (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number),
- (r'[-+*/\\~<>=|&#!?,@%\w:]+', String.Symbol),
- # literals
- (r"'(''|[^'])*'", String),
- (r'\$.', String.Char),
- (r'#*\(', String.Symbol, 'inner_parenth'),
- ],
- 'parenth' : [
- # This state is a bit tricky since
- # we can't just pop this state
- (r'\)', String.Symbol, ('root', 'afterobject')),
- include('_parenth_helper'),
- ],
- 'inner_parenth': [
- (r'\)', String.Symbol, '#pop'),
- include('_parenth_helper'),
- ],
- 'whitespaces' : [
- # skip whitespace and comments
- (r'\s+', Text),
- (r'"(""|[^"])*"', Comment),
- ],
- 'objects' : [
- (r'\[', Text, 'blockvariables'),
- (r'\]', Text, 'afterobject'),
- (r'\b(self|super|true|false|nil|thisContext)\b',
- Name.Builtin.Pseudo, 'afterobject'),
- (r'\b[A-Z]\w*(?!:)\b', Name.Class, 'afterobject'),
- (r'\b[a-z]\w*(?!:)\b', Name.Variable, 'afterobject'),
- (r'#("(""|[^"])*"|[-+*/\\~<>=|&!?,@%]+|[\w:]+)',
- String.Symbol, 'afterobject'),
- include('literals'),
- ],
- 'afterobject' : [
- (r'! !$', Keyword , '#pop'), # squeak chunk delimiter
- include('whitespaces'),
- (r'\b(ifTrue:|ifFalse:|whileTrue:|whileFalse:|timesRepeat:)',
- Name.Builtin, '#pop'),
- (r'\b(new\b(?!:))', Name.Builtin),
- (r'\:=|\_', Operator, '#pop'),
- (r'\b[a-zA-Z]+\w*:', Name.Function, '#pop'),
- (r'\b[a-zA-Z]+\w*', Name.Function),
- (r'\w+:?|[-+*/\\~<>=|&!?,@%]+', Name.Function, '#pop'),
- (r'\.', Punctuation, '#pop'),
- (r';', Punctuation),
- (r'[\])}]', Text),
- (r'[\[({]', Text, '#pop'),
- ],
- 'squeak fileout' : [
- # Squeak fileout format (optional)
- (r'^"(""|[^"])*"!', Keyword),
- (r"^'(''|[^'])*'!", Keyword),
- (r'^(!)(\w+)( commentStamp: )(.*?)( prior: .*?!\n)(.*?)(!)',
- bygroups(Keyword, Name.Class, Keyword, String, Keyword, Text, Keyword)),
- (r"^(!)(\w+(?: class)?)( methodsFor: )('(?:''|[^'])*')(.*?!)",
- bygroups(Keyword, Name.Class, Keyword, String, Keyword)),
- (r'^(\w+)( subclass: )(#\w+)'
- r'(\s+instanceVariableNames: )(.*?)'
- r'(\s+classVariableNames: )(.*?)'
- r'(\s+poolDictionaries: )(.*?)'
- r'(\s+category: )(.*?)(!)',
- bygroups(Name.Class, Keyword, String.Symbol, Keyword, String, Keyword,
- String, Keyword, String, Keyword, String, Keyword)),
- (r'^(\w+(?: class)?)(\s+instanceVariableNames: )(.*?)(!)',
- bygroups(Name.Class, Keyword, String, Keyword)),
- (r'(!\n)(\].*)(! !)$', bygroups(Keyword, Text, Keyword)),
- (r'! !$', Keyword),
- ],
- }
-
-
-class LogtalkLexer(RegexLexer):
- """
- For `Logtalk <http://logtalk.org/>`_ source code.
-
- .. versionadded:: 0.10
- """
-
- name = 'Logtalk'
- aliases = ['logtalk']
- filenames = ['*.lgt']
- mimetypes = ['text/x-logtalk']
-
- tokens = {
- 'root': [
- # Directives
- (r'^\s*:-\s',Punctuation,'directive'),
- # Comments
- (r'%.*?\n', Comment),
- (r'/\*(.|\n)*?\*/',Comment),
- # Whitespace
- (r'\n', Text),
- (r'\s+', Text),
- # Numbers
- (r"0'.", Number),
- (r'0b[01]+', Number.Bin),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number.Float),
- # Variables
- (r'([A-Z_]\w*)', Name.Variable),
- # Event handlers
- (r'(after|before)(?=[(])', Keyword),
- # Execution-context methods
- (r'(parameter|this|se(lf|nder))(?=[(])', Keyword),
- # Reflection
- (r'(current_predicate|predicate_property)(?=[(])', Keyword),
- # DCGs and term expansion
- (r'(expand_(goal|term)|(goal|term)_expansion|phrase)(?=[(])',
- Keyword),
- # Entity
- (r'(abolish|c(reate|urrent))_(object|protocol|category)(?=[(])',
- Keyword),
- (r'(object|protocol|category)_property(?=[(])', Keyword),
- # Entity relations
- (r'co(mplements_object|nforms_to_protocol)(?=[(])', Keyword),
- (r'extends_(object|protocol|category)(?=[(])', Keyword),
- (r'imp(lements_protocol|orts_category)(?=[(])', Keyword),
- (r'(instantiat|specializ)es_class(?=[(])', Keyword),
- # Events
- (r'(current_event|(abolish|define)_events)(?=[(])', Keyword),
- # Flags
- (r'(current|set)_logtalk_flag(?=[(])', Keyword),
- # Compiling, loading, and library paths
- (r'logtalk_(compile|l(ibrary_path|oad_context|oad))(?=[(])',
- Keyword),
- # Database
- (r'(clause|retract(all)?)(?=[(])', Keyword),
- (r'a(bolish|ssert(a|z))(?=[(])', Keyword),
- # Control constructs
- (r'(ca(ll|tch)|throw)(?=[(])', Keyword),
- (r'(fail|true)\b', Keyword),
- # All solutions
- (r'((bag|set)of|f(ind|or)all)(?=[(])', Keyword),
- # Multi-threading meta-predicates
- (r'threaded(_(call|once|ignore|exit|peek|wait|notify))?(?=[(])',
- Keyword),
- # Term unification
- (r'unify_with_occurs_check(?=[(])', Keyword),
- # Term creation and decomposition
- (r'(functor|arg|copy_term|numbervars)(?=[(])', Keyword),
- # Evaluable functors
- (r'(rem|mod|abs|sign)(?=[(])', Keyword),
- (r'float(_(integer|fractional)_part)?(?=[(])', Keyword),
- (r'(floor|truncate|round|ceiling)(?=[(])', Keyword),
- # Other arithmetic functors
- (r'(cos|atan|exp|log|s(in|qrt))(?=[(])', Keyword),
- # Term testing
- (r'(var|atom(ic)?|integer|float|c(allable|ompound)|n(onvar|umber)|'
- r'ground)(?=[(])', Keyword),
- # Term comparison
- (r'compare(?=[(])', Keyword),
- # Stream selection and control
- (r'(curren|se)t_(in|out)put(?=[(])', Keyword),
- (r'(open|close)(?=[(])', Keyword),
- (r'flush_output(?=[(])', Keyword),
- (r'(at_end_of_stream|flush_output)\b', Keyword),
- (r'(stream_property|at_end_of_stream|set_stream_position)(?=[(])',
- Keyword),
- # Character and byte input/output
- (r'(nl|(get|peek|put)_(byte|c(har|ode)))(?=[(])', Keyword),
- (r'\bnl\b', Keyword),
- # Term input/output
- (r'read(_term)?(?=[(])', Keyword),
- (r'write(q|_(canonical|term))?(?=[(])', Keyword),
- (r'(current_)?op(?=[(])', Keyword),
- (r'(current_)?char_conversion(?=[(])', Keyword),
- # Atomic term processing
- (r'atom_(length|c(hars|o(ncat|des)))(?=[(])', Keyword),
- (r'(char_code|sub_atom)(?=[(])', Keyword),
- (r'number_c(har|ode)s(?=[(])', Keyword),
- # Implementation defined hooks functions
- (r'(se|curren)t_prolog_flag(?=[(])', Keyword),
- (r'\bhalt\b', Keyword),
- (r'halt(?=[(])', Keyword),
- # Message sending operators
- (r'(::|:|\^\^)', Operator),
- # External call
- (r'[{}]', Keyword),
- # Logic and control
- (r'\b(ignore|once)(?=[(])', Keyword),
- (r'\brepeat\b', Keyword),
- # Sorting
- (r'(key)?sort(?=[(])', Keyword),
- # Bitwise functors
- (r'(>>|<<|/\\|\\\\|\\)', Operator),
- # Arithemtic evaluation
- (r'\bis\b', Keyword),
- # Arithemtic comparison
- (r'(=:=|=\\=|<|=<|>=|>)', Operator),
- # Term creation and decomposition
- (r'=\.\.', Operator),
- # Term unification
- (r'(=|\\=)', Operator),
- # Term comparison
- (r'(==|\\==|@=<|@<|@>=|@>)', Operator),
- # Evaluable functors
- (r'(//|[-+*/])', Operator),
- (r'\b(e|pi|mod|rem)\b', Operator),
- # Other arithemtic functors
- (r'\b\*\*\b', Operator),
- # DCG rules
- (r'-->', Operator),
- # Control constructs
- (r'([!;]|->)', Operator),
- # Logic and control
- (r'\\+', Operator),
- # Mode operators
- (r'[?@]', Operator),
- # Existential quantifier
- (r'\^', Operator),
- # Strings
- (r'"(\\\\|\\"|[^"])*"', String),
- # Ponctuation
- (r'[()\[\],.|]', Text),
- # Atoms
- (r"[a-z]\w*", Text),
- (r"'", String, 'quoted_atom'),
- ],
-
- 'quoted_atom': [
- (r"''", String),
- (r"'", String, '#pop'),
- (r'\\([\\abfnrtv"\']|(x[a-fA-F0-9]+|[0-7]+)\\)', String.Escape),
- (r"[^\\'\n]+", String),
- (r'\\', String),
- ],
-
- 'directive': [
- # Conditional compilation directives
- (r'(el)?if(?=[(])', Keyword, 'root'),
- (r'(e(lse|ndif))[.]', Keyword, 'root'),
- # Entity directives
- (r'(category|object|protocol)(?=[(])', Keyword, 'entityrelations'),
- (r'(end_(category|object|protocol))[.]',Keyword, 'root'),
- # Predicate scope directives
- (r'(public|protected|private)(?=[(])', Keyword, 'root'),
- # Other directives
- (r'e(n(coding|sure_loaded)|xport)(?=[(])', Keyword, 'root'),
- (r'in(fo|itialization)(?=[(])', Keyword, 'root'),
- (r'(dynamic|synchronized|threaded)[.]', Keyword, 'root'),
- (r'(alias|d(ynamic|iscontiguous)|m(eta_predicate|ode|ultifile)|'
- r's(et_(logtalk|prolog)_flag|ynchronized))(?=[(])',
- Keyword, 'root'),
- (r'op(?=[(])', Keyword, 'root'),
- (r'(c(alls|oinductive)|reexport|use(s|_module))(?=[(])',
- Keyword, 'root'),
- (r'[a-z]\w*(?=[(])', Text, 'root'),
- (r'[a-z]\w*[.]', Text, 'root'),
- ],
-
- 'entityrelations': [
- (r'(complements|extends|i(nstantiates|mp(lements|orts))|specializes)'
- r'(?=[(])', Keyword),
- # Numbers
- (r"0'.", Number),
- (r'0b[01]+', Number.Bin),
- (r'0o[0-7]+', Number.Oct),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number.Float),
- # Variables
- (r'([A-Z_]\w*)', Name.Variable),
- # Atoms
- (r"[a-z]\w*", Text),
- (r"'", String, 'quoted_atom'),
- # Strings
- (r'"(\\\\|\\"|[^"])*"', String),
- # End of entity-opening directive
- (r'([)]\.)', Text, 'root'),
- # Scope operator
- (r'(::)', Operator),
- # Ponctuation
- (r'[()\[\],.|]', Text),
- # Comments
- (r'%.*?\n', Comment),
- (r'/\*(.|\n)*?\*/',Comment),
- # Whitespace
- (r'\n', Text),
- (r'\s+', Text),
- ]
- }
-
- def analyse_text(text):
- if ':- object(' in text:
- return True
- if ':- protocol(' in text:
- return True
- if ':- category(' in text:
- return True
- return False
-
-
-def _shortened(word):
- dpos = word.find('$')
- return '|'.join([word[:dpos] + word[dpos+1:i] + r'\b'
- for i in range(len(word), dpos, -1)])
-def _shortened_many(*words):
- return '|'.join(map(_shortened, words))
-
-class GnuplotLexer(RegexLexer):
- """
- For `Gnuplot <http://gnuplot.info/>`_ plotting scripts.
-
- .. versionadded:: 0.11
- """
-
- name = 'Gnuplot'
- aliases = ['gnuplot']
- filenames = ['*.plot', '*.plt']
- mimetypes = ['text/x-gnuplot']
-
- tokens = {
- 'root': [
- include('whitespace'),
- (_shortened('bi$nd'), Keyword, 'bind'),
- (_shortened_many('ex$it', 'q$uit'), Keyword, 'quit'),
- (_shortened('f$it'), Keyword, 'fit'),
- (r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation), 'if'),
- (r'else\b', Keyword),
- (_shortened('pa$use'), Keyword, 'pause'),
- (_shortened_many('p$lot', 'rep$lot', 'sp$lot'), Keyword, 'plot'),
- (_shortened('sa$ve'), Keyword, 'save'),
- (_shortened('se$t'), Keyword, ('genericargs', 'optionarg')),
- (_shortened_many('sh$ow', 'uns$et'),
- Keyword, ('noargs', 'optionarg')),
- (_shortened_many('low$er', 'ra$ise', 'ca$ll', 'cd$', 'cl$ear',
- 'h$elp', '\\?$', 'hi$story', 'l$oad', 'pr$int',
- 'pwd$', 're$read', 'res$et', 'scr$eendump',
- 'she$ll', 'sy$stem', 'up$date'),
- Keyword, 'genericargs'),
- (_shortened_many('pwd$', 're$read', 'res$et', 'scr$eendump',
- 'she$ll', 'test$'),
- Keyword, 'noargs'),
- ('([a-zA-Z_]\w*)(\s*)(=)',
- bygroups(Name.Variable, Text, Operator), 'genericargs'),
- ('([a-zA-Z_]\w*)(\s*\(.*?\)\s*)(=)',
- bygroups(Name.Function, Text, Operator), 'genericargs'),
- (r'@[a-zA-Z_]\w*', Name.Constant), # macros
- (r';', Keyword),
- ],
- 'comment': [
- (r'[^\\\n]', Comment),
- (r'\\\n', Comment),
- (r'\\', Comment),
- # don't add the newline to the Comment token
- ('', Comment, '#pop'),
- ],
- 'whitespace': [
- ('#', Comment, 'comment'),
- (r'[ \t\v\f]+', Text),
- ],
- 'noargs': [
- include('whitespace'),
- # semicolon and newline end the argument list
- (r';', Punctuation, '#pop'),
- (r'\n', Text, '#pop'),
- ],
- 'dqstring': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- (r'\n', String, '#pop'), # newline ends the string too
- ],
- 'sqstring': [
- (r"''", String), # escaped single quote
- (r"'", String, '#pop'),
- (r"[^\\'\n]+", String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # normal backslash
- (r'\n', String, '#pop'), # newline ends the string too
- ],
- 'genericargs': [
- include('noargs'),
- (r'"', String, 'dqstring'),
- (r"'", String, 'sqstring'),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
- (r'(\d+\.\d*|\.\d+)', Number.Float),
- (r'-?\d+', Number.Integer),
- ('[,.~!%^&*+=|?:<>/-]', Operator),
- ('[{}()\[\]]', Punctuation),
- (r'(eq|ne)\b', Operator.Word),
- (r'([a-zA-Z_]\w*)(\s*)(\()',
- bygroups(Name.Function, Text, Punctuation)),
- (r'[a-zA-Z_]\w*', Name),
- (r'@[a-zA-Z_]\w*', Name.Constant), # macros
- (r'\\\n', Text),
- ],
- 'optionarg': [
- include('whitespace'),
- (_shortened_many(
- "a$ll","an$gles","ar$row","au$toscale","b$ars","bor$der",
- "box$width","cl$abel","c$lip","cn$trparam","co$ntour","da$ta",
- "data$file","dg$rid3d","du$mmy","enc$oding","dec$imalsign",
- "fit$","font$path","fo$rmat","fu$nction","fu$nctions","g$rid",
- "hid$den3d","his$torysize","is$osamples","k$ey","keyt$itle",
- "la$bel","li$nestyle","ls$","loa$dpath","loc$ale","log$scale",
- "mac$ros","map$ping","map$ping3d","mar$gin","lmar$gin",
- "rmar$gin","tmar$gin","bmar$gin","mo$use","multi$plot",
- "mxt$ics","nomxt$ics","mx2t$ics","nomx2t$ics","myt$ics",
- "nomyt$ics","my2t$ics","nomy2t$ics","mzt$ics","nomzt$ics",
- "mcbt$ics","nomcbt$ics","of$fsets","or$igin","o$utput",
- "pa$rametric","pm$3d","pal$ette","colorb$ox","p$lot",
- "poi$ntsize","pol$ar","pr$int","obj$ect","sa$mples","si$ze",
- "st$yle","su$rface","table$","t$erminal","termo$ptions","ti$cs",
- "ticsc$ale","ticsl$evel","timef$mt","tim$estamp","tit$le",
- "v$ariables","ve$rsion","vi$ew","xyp$lane","xda$ta","x2da$ta",
- "yda$ta","y2da$ta","zda$ta","cbda$ta","xl$abel","x2l$abel",
- "yl$abel","y2l$abel","zl$abel","cbl$abel","xti$cs","noxti$cs",
- "x2ti$cs","nox2ti$cs","yti$cs","noyti$cs","y2ti$cs","noy2ti$cs",
- "zti$cs","nozti$cs","cbti$cs","nocbti$cs","xdti$cs","noxdti$cs",
- "x2dti$cs","nox2dti$cs","ydti$cs","noydti$cs","y2dti$cs",
- "noy2dti$cs","zdti$cs","nozdti$cs","cbdti$cs","nocbdti$cs",
- "xmti$cs","noxmti$cs","x2mti$cs","nox2mti$cs","ymti$cs",
- "noymti$cs","y2mti$cs","noy2mti$cs","zmti$cs","nozmti$cs",
- "cbmti$cs","nocbmti$cs","xr$ange","x2r$ange","yr$ange",
- "y2r$ange","zr$ange","cbr$ange","rr$ange","tr$ange","ur$ange",
- "vr$ange","xzeroa$xis","x2zeroa$xis","yzeroa$xis","y2zeroa$xis",
- "zzeroa$xis","zeroa$xis","z$ero"), Name.Builtin, '#pop'),
- ],
- 'bind': [
- ('!', Keyword, '#pop'),
- (_shortened('all$windows'), Name.Builtin),
- include('genericargs'),
- ],
- 'quit': [
- (r'gnuplot\b', Keyword),
- include('noargs'),
- ],
- 'fit': [
- (r'via\b', Name.Builtin),
- include('plot'),
- ],
- 'if': [
- (r'\)', Punctuation, '#pop'),
- include('genericargs'),
- ],
- 'pause': [
- (r'(mouse|any|button1|button2|button3)\b', Name.Builtin),
- (_shortened('key$press'), Name.Builtin),
- include('genericargs'),
- ],
- 'plot': [
- (_shortened_many('ax$es', 'axi$s', 'bin$ary', 'ev$ery', 'i$ndex',
- 'mat$rix', 's$mooth', 'thru$', 't$itle',
- 'not$itle', 'u$sing', 'w$ith'),
- Name.Builtin),
- include('genericargs'),
- ],
- 'save': [
- (_shortened_many('f$unctions', 's$et', 't$erminal', 'v$ariables'),
- Name.Builtin),
- include('genericargs'),
- ],
- }
-
-
-class PovrayLexer(RegexLexer):
- """
- For `Persistence of Vision Raytracer <http://www.povray.org/>`_ files.
-
- .. versionadded:: 0.11
- """
- name = 'POVRay'
- aliases = ['pov']
- filenames = ['*.pov', '*.inc']
- mimetypes = ['text/x-povray']
-
- tokens = {
- 'root': [
- (r'/\*[\w\W]*?\*/', Comment.Multiline),
- (r'//.*\n', Comment.Single),
- (r'(?s)"(?:\\.|[^"\\])+"', String.Double),
- (r'#(debug|default|else|end|error|fclose|fopen|ifdef|ifndef|'
- r'include|range|read|render|statistics|switch|undef|version|'
- r'warning|while|write|define|macro|local|declare)\b',
- Comment.Preproc),
- (r'\b(aa_level|aa_threshold|abs|acos|acosh|adaptive|adc_bailout|'
- r'agate|agate_turb|all|alpha|ambient|ambient_light|angle|'
- r'aperture|arc_angle|area_light|asc|asin|asinh|assumed_gamma|'
- r'atan|atan2|atanh|atmosphere|atmospheric_attenuation|'
- r'attenuating|average|background|black_hole|blue|blur_samples|'
- r'bounded_by|box_mapping|bozo|break|brick|brick_size|'
- r'brightness|brilliance|bumps|bumpy1|bumpy2|bumpy3|bump_map|'
- r'bump_size|case|caustics|ceil|checker|chr|clipped_by|clock|'
- r'color|color_map|colour|colour_map|component|composite|concat|'
- r'confidence|conic_sweep|constant|control0|control1|cos|cosh|'
- r'count|crackle|crand|cube|cubic_spline|cylindrical_mapping|'
- r'debug|declare|default|degrees|dents|diffuse|direction|'
- r'distance|distance_maximum|div|dust|dust_type|eccentricity|'
- r'else|emitting|end|error|error_bound|exp|exponent|'
- r'fade_distance|fade_power|falloff|falloff_angle|false|'
- r'file_exists|filter|finish|fisheye|flatness|flip|floor|'
- r'focal_point|fog|fog_alt|fog_offset|fog_type|frequency|gif|'
- r'global_settings|glowing|gradient|granite|gray_threshold|'
- r'green|halo|hexagon|hf_gray_16|hierarchy|hollow|hypercomplex|'
- r'if|ifdef|iff|image_map|incidence|include|int|interpolate|'
- r'inverse|ior|irid|irid_wavelength|jitter|lambda|leopard|'
- r'linear|linear_spline|linear_sweep|location|log|looks_like|'
- r'look_at|low_error_factor|mandel|map_type|marble|material_map|'
- r'matrix|max|max_intersections|max_iteration|max_trace_level|'
- r'max_value|metallic|min|minimum_reuse|mod|mortar|'
- r'nearest_count|no|normal|normal_map|no_shadow|number_of_waves|'
- r'octaves|off|offset|omega|omnimax|on|once|onion|open|'
- r'orthographic|panoramic|pattern1|pattern2|pattern3|'
- r'perspective|pgm|phase|phong|phong_size|pi|pigment|'
- r'pigment_map|planar_mapping|png|point_at|pot|pow|ppm|'
- r'precision|pwr|quadratic_spline|quaternion|quick_color|'
- r'quick_colour|quilted|radial|radians|radiosity|radius|rainbow|'
- r'ramp_wave|rand|range|reciprocal|recursion_limit|red|'
- r'reflection|refraction|render|repeat|rgb|rgbf|rgbft|rgbt|'
- r'right|ripples|rotate|roughness|samples|scale|scallop_wave|'
- r'scattering|seed|shadowless|sin|sine_wave|sinh|sky|sky_sphere|'
- r'slice|slope_map|smooth|specular|spherical_mapping|spiral|'
- r'spiral1|spiral2|spotlight|spotted|sqr|sqrt|statistics|str|'
- r'strcmp|strength|strlen|strlwr|strupr|sturm|substr|switch|sys|'
- r't|tan|tanh|test_camera_1|test_camera_2|test_camera_3|'
- r'test_camera_4|texture|texture_map|tga|thickness|threshold|'
- r'tightness|tile2|tiles|track|transform|translate|transmit|'
- r'triangle_wave|true|ttf|turbulence|turb_depth|type|'
- r'ultra_wide_angle|up|use_color|use_colour|use_index|u_steps|'
- r'val|variance|vaxis_rotate|vcross|vdot|version|vlength|'
- r'vnormalize|volume_object|volume_rendered|vol_with_light|'
- r'vrotate|v_steps|warning|warp|water_level|waves|while|width|'
- r'wood|wrinkles|yes)\b', Keyword),
- (r'(bicubic_patch|blob|box|camera|cone|cubic|cylinder|difference|'
- r'disc|height_field|intersection|julia_fractal|lathe|'
- r'light_source|merge|mesh|object|plane|poly|polygon|prism|'
- r'quadric|quartic|smooth_triangle|sor|sphere|superellipsoid|'
- r'text|torus|triangle|union)\b', Name.Builtin),
- # TODO: <=, etc
- (r'[\[\](){}<>;,]', Punctuation),
- (r'[-+*/=]', Operator),
- (r'\b(x|y|z|u|v)\b', Name.Builtin.Pseudo),
- (r'[a-zA-Z_][a-zA-Z_0-9]*', Name),
- (r'[0-9]+\.[0-9]*', Number.Float),
- (r'\.[0-9]+', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'\s+', Text),
- ]
- }
-
-
-class AppleScriptLexer(RegexLexer):
- """
- For `AppleScript source code
- <http://developer.apple.com/documentation/AppleScript/
- Conceptual/AppleScriptLangGuide>`_,
- including `AppleScript Studio
- <http://developer.apple.com/documentation/AppleScript/
- Reference/StudioReference>`_.
- Contributed by Andreas Amann <aamann@mac.com>.
- """
-
- name = 'AppleScript'
- aliases = ['applescript']
- filenames = ['*.applescript']
-
- flags = re.MULTILINE | re.DOTALL
-
- Identifiers = r'[a-zA-Z]\w*'
- Literals = ['AppleScript', 'current application', 'false', 'linefeed',
- 'missing value', 'pi','quote', 'result', 'return', 'space',
- 'tab', 'text item delimiters', 'true', 'version']
- Classes = ['alias ', 'application ', 'boolean ', 'class ', 'constant ',
- 'date ', 'file ', 'integer ', 'list ', 'number ', 'POSIX file ',
- 'real ', 'record ', 'reference ', 'RGB color ', 'script ',
- 'text ', 'unit types', '(?:Unicode )?text', 'string']
- BuiltIn = ['attachment', 'attribute run', 'character', 'day', 'month',
- 'paragraph', 'word', 'year']
- HandlerParams = ['about', 'above', 'against', 'apart from', 'around',
- 'aside from', 'at', 'below', 'beneath', 'beside',
- 'between', 'for', 'given', 'instead of', 'on', 'onto',
- 'out of', 'over', 'since']
- Commands = ['ASCII (character|number)', 'activate', 'beep', 'choose URL',
- 'choose application', 'choose color', 'choose file( name)?',
- 'choose folder', 'choose from list',
- 'choose remote application', 'clipboard info',
- 'close( access)?', 'copy', 'count', 'current date', 'delay',
- 'delete', 'display (alert|dialog)', 'do shell script',
- 'duplicate', 'exists', 'get eof', 'get volume settings',
- 'info for', 'launch', 'list (disks|folder)', 'load script',
- 'log', 'make', 'mount volume', 'new', 'offset',
- 'open( (for access|location))?', 'path to', 'print', 'quit',
- 'random number', 'read', 'round', 'run( script)?',
- 'say', 'scripting components',
- 'set (eof|the clipboard to|volume)', 'store script',
- 'summarize', 'system attribute', 'system info',
- 'the clipboard', 'time to GMT', 'write', 'quoted form']
- References = ['(in )?back of', '(in )?front of', '[0-9]+(st|nd|rd|th)',
- 'first', 'second', 'third', 'fourth', 'fifth', 'sixth',
- 'seventh', 'eighth', 'ninth', 'tenth', 'after', 'back',
- 'before', 'behind', 'every', 'front', 'index', 'last',
- 'middle', 'some', 'that', 'through', 'thru', 'where', 'whose']
- Operators = ["and", "or", "is equal", "equals", "(is )?equal to", "is not",
- "isn't", "isn't equal( to)?", "is not equal( to)?",
- "doesn't equal", "does not equal", "(is )?greater than",
- "comes after", "is not less than or equal( to)?",
- "isn't less than or equal( to)?", "(is )?less than",
- "comes before", "is not greater than or equal( to)?",
- "isn't greater than or equal( to)?",
- "(is )?greater than or equal( to)?", "is not less than",
- "isn't less than", "does not come before",
- "doesn't come before", "(is )?less than or equal( to)?",
- "is not greater than", "isn't greater than",
- "does not come after", "doesn't come after", "starts? with",
- "begins? with", "ends? with", "contains?", "does not contain",
- "doesn't contain", "is in", "is contained by", "is not in",
- "is not contained by", "isn't contained by", "div", "mod",
- "not", "(a )?(ref( to)?|reference to)", "is", "does"]
- Control = ['considering', 'else', 'error', 'exit', 'from', 'if',
- 'ignoring', 'in', 'repeat', 'tell', 'then', 'times', 'to',
- 'try', 'until', 'using terms from', 'while', 'whith',
- 'with timeout( of)?', 'with transaction', 'by', 'continue',
- 'end', 'its?', 'me', 'my', 'return', 'of' , 'as']
- Declarations = ['global', 'local', 'prop(erty)?', 'set', 'get']
- Reserved = ['but', 'put', 'returning', 'the']
- StudioClasses = ['action cell', 'alert reply', 'application', 'box',
- 'browser( cell)?', 'bundle', 'button( cell)?', 'cell',
- 'clip view', 'color well', 'color-panel',
- 'combo box( item)?', 'control',
- 'data( (cell|column|item|row|source))?', 'default entry',
- 'dialog reply', 'document', 'drag info', 'drawer',
- 'event', 'font(-panel)?', 'formatter',
- 'image( (cell|view))?', 'matrix', 'menu( item)?', 'item',
- 'movie( view)?', 'open-panel', 'outline view', 'panel',
- 'pasteboard', 'plugin', 'popup button',
- 'progress indicator', 'responder', 'save-panel',
- 'scroll view', 'secure text field( cell)?', 'slider',
- 'sound', 'split view', 'stepper', 'tab view( item)?',
- 'table( (column|header cell|header view|view))',
- 'text( (field( cell)?|view))?', 'toolbar( item)?',
- 'user-defaults', 'view', 'window']
- StudioEvents = ['accept outline drop', 'accept table drop', 'action',
- 'activated', 'alert ended', 'awake from nib', 'became key',
- 'became main', 'begin editing', 'bounds changed',
- 'cell value', 'cell value changed', 'change cell value',
- 'change item value', 'changed', 'child of item',
- 'choose menu item', 'clicked', 'clicked toolbar item',
- 'closed', 'column clicked', 'column moved',
- 'column resized', 'conclude drop', 'data representation',
- 'deminiaturized', 'dialog ended', 'document nib name',
- 'double clicked', 'drag( (entered|exited|updated))?',
- 'drop', 'end editing', 'exposed', 'idle', 'item expandable',
- 'item value', 'item value changed', 'items changed',
- 'keyboard down', 'keyboard up', 'launched',
- 'load data representation', 'miniaturized', 'mouse down',
- 'mouse dragged', 'mouse entered', 'mouse exited',
- 'mouse moved', 'mouse up', 'moved',
- 'number of browser rows', 'number of items',
- 'number of rows', 'open untitled', 'opened', 'panel ended',
- 'parameters updated', 'plugin loaded', 'prepare drop',
- 'prepare outline drag', 'prepare outline drop',
- 'prepare table drag', 'prepare table drop',
- 'read from file', 'resigned active', 'resigned key',
- 'resigned main', 'resized( sub views)?',
- 'right mouse down', 'right mouse dragged',
- 'right mouse up', 'rows changed', 'scroll wheel',
- 'selected tab view item', 'selection changed',
- 'selection changing', 'should begin editing',
- 'should close', 'should collapse item',
- 'should end editing', 'should expand item',
- 'should open( untitled)?',
- 'should quit( after last window closed)?',
- 'should select column', 'should select item',
- 'should select row', 'should select tab view item',
- 'should selection change', 'should zoom', 'shown',
- 'update menu item', 'update parameters',
- 'update toolbar item', 'was hidden', 'was miniaturized',
- 'will become active', 'will close', 'will dismiss',
- 'will display browser cell', 'will display cell',
- 'will display item cell', 'will display outline cell',
- 'will finish launching', 'will hide', 'will miniaturize',
- 'will move', 'will open', 'will pop up', 'will quit',
- 'will resign active', 'will resize( sub views)?',
- 'will select tab view item', 'will show', 'will zoom',
- 'write to file', 'zoomed']
- StudioCommands = ['animate', 'append', 'call method', 'center',
- 'close drawer', 'close panel', 'display',
- 'display alert', 'display dialog', 'display panel', 'go',
- 'hide', 'highlight', 'increment', 'item for',
- 'load image', 'load movie', 'load nib', 'load panel',
- 'load sound', 'localized string', 'lock focus', 'log',
- 'open drawer', 'path for', 'pause', 'perform action',
- 'play', 'register', 'resume', 'scroll', 'select( all)?',
- 'show', 'size to fit', 'start', 'step back',
- 'step forward', 'stop', 'synchronize', 'unlock focus',
- 'update']
- StudioProperties = ['accepts arrow key', 'action method', 'active',
- 'alignment', 'allowed identifiers',
- 'allows branch selection', 'allows column reordering',
- 'allows column resizing', 'allows column selection',
- 'allows customization',
- 'allows editing text attributes',
- 'allows empty selection', 'allows mixed state',
- 'allows multiple selection', 'allows reordering',
- 'allows undo', 'alpha( value)?', 'alternate image',
- 'alternate increment value', 'alternate title',
- 'animation delay', 'associated file name',
- 'associated object', 'auto completes', 'auto display',
- 'auto enables items', 'auto repeat',
- 'auto resizes( outline column)?',
- 'auto save expanded items', 'auto save name',
- 'auto save table columns', 'auto saves configuration',
- 'auto scroll', 'auto sizes all columns to fit',
- 'auto sizes cells', 'background color', 'bezel state',
- 'bezel style', 'bezeled', 'border rect', 'border type',
- 'bordered', 'bounds( rotation)?', 'box type',
- 'button returned', 'button type',
- 'can choose directories', 'can choose files',
- 'can draw', 'can hide',
- 'cell( (background color|size|type))?', 'characters',
- 'class', 'click count', 'clicked( data)? column',
- 'clicked data item', 'clicked( data)? row',
- 'closeable', 'collating', 'color( (mode|panel))',
- 'command key down', 'configuration',
- 'content(s| (size|view( margins)?))?', 'context',
- 'continuous', 'control key down', 'control size',
- 'control tint', 'control view',
- 'controller visible', 'coordinate system',
- 'copies( on scroll)?', 'corner view', 'current cell',
- 'current column', 'current( field)? editor',
- 'current( menu)? item', 'current row',
- 'current tab view item', 'data source',
- 'default identifiers', 'delta (x|y|z)',
- 'destination window', 'directory', 'display mode',
- 'displayed cell', 'document( (edited|rect|view))?',
- 'double value', 'dragged column', 'dragged distance',
- 'dragged items', 'draws( cell)? background',
- 'draws grid', 'dynamically scrolls', 'echos bullets',
- 'edge', 'editable', 'edited( data)? column',
- 'edited data item', 'edited( data)? row', 'enabled',
- 'enclosing scroll view', 'ending page',
- 'error handling', 'event number', 'event type',
- 'excluded from windows menu', 'executable path',
- 'expanded', 'fax number', 'field editor', 'file kind',
- 'file name', 'file type', 'first responder',
- 'first visible column', 'flipped', 'floating',
- 'font( panel)?', 'formatter', 'frameworks path',
- 'frontmost', 'gave up', 'grid color', 'has data items',
- 'has horizontal ruler', 'has horizontal scroller',
- 'has parent data item', 'has resize indicator',
- 'has shadow', 'has sub menu', 'has vertical ruler',
- 'has vertical scroller', 'header cell', 'header view',
- 'hidden', 'hides when deactivated', 'highlights by',
- 'horizontal line scroll', 'horizontal page scroll',
- 'horizontal ruler view', 'horizontally resizable',
- 'icon image', 'id', 'identifier',
- 'ignores multiple clicks',
- 'image( (alignment|dims when disabled|frame style|'
- 'scaling))?',
- 'imports graphics', 'increment value',
- 'indentation per level', 'indeterminate', 'index',
- 'integer value', 'intercell spacing', 'item height',
- 'key( (code|equivalent( modifier)?|window))?',
- 'knob thickness', 'label', 'last( visible)? column',
- 'leading offset', 'leaf', 'level', 'line scroll',
- 'loaded', 'localized sort', 'location', 'loop mode',
- 'main( (bunde|menu|window))?', 'marker follows cell',
- 'matrix mode', 'maximum( content)? size',
- 'maximum visible columns',
- 'menu( form representation)?', 'miniaturizable',
- 'miniaturized', 'minimized image', 'minimized title',
- 'minimum column width', 'minimum( content)? size',
- 'modal', 'modified', 'mouse down state',
- 'movie( (controller|file|rect))?', 'muted', 'name',
- 'needs display', 'next state', 'next text',
- 'number of tick marks', 'only tick mark values',
- 'opaque', 'open panel', 'option key down',
- 'outline table column', 'page scroll', 'pages across',
- 'pages down', 'palette label', 'pane splitter',
- 'parent data item', 'parent window', 'pasteboard',
- 'path( (names|separator))?', 'playing',
- 'plays every frame', 'plays selection only', 'position',
- 'preferred edge', 'preferred type', 'pressure',
- 'previous text', 'prompt', 'properties',
- 'prototype cell', 'pulls down', 'rate',
- 'released when closed', 'repeated',
- 'requested print time', 'required file type',
- 'resizable', 'resized column', 'resource path',
- 'returns records', 'reuses columns', 'rich text',
- 'roll over', 'row height', 'rulers visible',
- 'save panel', 'scripts path', 'scrollable',
- 'selectable( identifiers)?', 'selected cell',
- 'selected( data)? columns?', 'selected data items?',
- 'selected( data)? rows?', 'selected item identifier',
- 'selection by rect', 'send action on arrow key',
- 'sends action when done editing', 'separates columns',
- 'separator item', 'sequence number', 'services menu',
- 'shared frameworks path', 'shared support path',
- 'sheet', 'shift key down', 'shows alpha',
- 'shows state by', 'size( mode)?',
- 'smart insert delete enabled', 'sort case sensitivity',
- 'sort column', 'sort order', 'sort type',
- 'sorted( data rows)?', 'sound', 'source( mask)?',
- 'spell checking enabled', 'starting page', 'state',
- 'string value', 'sub menu', 'super menu', 'super view',
- 'tab key traverses cells', 'tab state', 'tab type',
- 'tab view', 'table view', 'tag', 'target( printer)?',
- 'text color', 'text container insert',
- 'text container origin', 'text returned',
- 'tick mark position', 'time stamp',
- 'title(d| (cell|font|height|position|rect))?',
- 'tool tip', 'toolbar', 'trailing offset', 'transparent',
- 'treat packages as directories', 'truncated labels',
- 'types', 'unmodified characters', 'update views',
- 'use sort indicator', 'user defaults',
- 'uses data source', 'uses ruler',
- 'uses threaded animation',
- 'uses title from previous column', 'value wraps',
- 'version',
- 'vertical( (line scroll|page scroll|ruler view))?',
- 'vertically resizable', 'view',
- 'visible( document rect)?', 'volume', 'width', 'window',
- 'windows menu', 'wraps', 'zoomable', 'zoomed']
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (u'¬\\n', String.Escape),
- (r"'s\s+", Text), # This is a possessive, consider moving
- (r'(--|#).*?$', Comment),
- (r'\(\*', Comment.Multiline, 'comment'),
- (r'[\(\){}!,.:]', Punctuation),
- (u'(«)([^»]+)(»)',
- bygroups(Text, Name.Builtin, Text)),
- (r'\b((?:considering|ignoring)\s*)'
- r'(application responses|case|diacriticals|hyphens|'
- r'numeric strings|punctuation|white space)',
- bygroups(Keyword, Name.Builtin)),
- (u'(-|\\*|\\+|&|≠|>=?|<=?|=|≥|≤|/|÷|\\^)', Operator),
- (r"\b(%s)\b" % '|'.join(Operators), Operator.Word),
- (r'^(\s*(?:on|end)\s+)'
- r'(%s)' % '|'.join(StudioEvents[::-1]),
- bygroups(Keyword, Name.Function)),
- (r'^(\s*)(in|on|script|to)(\s+)', bygroups(Text, Keyword, Text)),
- (r'\b(as )(%s)\b' % '|'.join(Classes),
- bygroups(Keyword, Name.Class)),
- (r'\b(%s)\b' % '|'.join(Literals), Name.Constant),
- (r'\b(%s)\b' % '|'.join(Commands), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(Control), Keyword),
- (r'\b(%s)\b' % '|'.join(Declarations), Keyword),
- (r'\b(%s)\b' % '|'.join(Reserved), Name.Builtin),
- (r'\b(%s)s?\b' % '|'.join(BuiltIn), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(HandlerParams), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(StudioProperties), Name.Attribute),
- (r'\b(%s)s?\b' % '|'.join(StudioClasses), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(StudioCommands), Name.Builtin),
- (r'\b(%s)\b' % '|'.join(References), Name.Builtin),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r'\b(%s)\b' % Identifiers, Name.Variable),
- (r'[-+]?(\d+\.\d*|\d*\.\d+)(E[-+][0-9]+)?', Number.Float),
- (r'[-+]?\d+', Number.Integer),
- ],
- 'comment': [
- ('\(\*', Comment.Multiline, '#push'),
- ('\*\)', Comment.Multiline, '#pop'),
- ('[^*(]+', Comment.Multiline),
- ('[*(]', Comment.Multiline),
- ],
- }
-
-
-class ModelicaLexer(RegexLexer):
- """
- For `Modelica <http://www.modelica.org/>`_ source code.
-
- .. versionadded:: 1.1
- """
- name = 'Modelica'
- aliases = ['modelica']
- filenames = ['*.mo']
- mimetypes = ['text/x-modelica']
-
- flags = re.IGNORECASE | re.DOTALL
-
- tokens = {
- 'whitespace': [
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'//(\n|(.|\n)*?[^\\]\n)', Comment),
- (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment),
- ],
- 'statements': [
- (r'"', String, 'string'),
- (r'(\d+\.\d*|\.\d+|\d+|\d.)[eE][+-]?\d+[lL]?', Number.Float),
- (r'(\d+\.\d*|\.\d+)', Number.Float),
- (r'\d+[Ll]?', Number.Integer),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'(true|false|NULL|Real|Integer|Boolean)\b', Name.Builtin),
- (r'([a-z_][\w]*|\'[^\']+\')'
- r'([\[\d,:\]]*)'
- r'(\.([a-z_][\w]*|\'[^\']+\'))+'
- r'([\[\d,:\]]*)', Name.Class),
- (r'(\'[\w\+\-\*\/\^]+\'|\w+)', Name),
- (r'[()\[\]{},.;]', Punctuation),
- (r'\'', Name, 'quoted_ident'),
- ],
- 'root': [
- include('whitespace'),
- include('classes'),
- include('functions'),
- include('keywords'),
- include('operators'),
- (r'("<html>|<html>)', Name.Tag, 'html-content'),
- include('statements'),
- ],
- 'keywords': [
- (r'(algorithm|annotation|break|connect|constant|constrainedby|'
- r'discrete|each|end|else|elseif|elsewhen|encapsulated|enumeration|'
- r'equation|exit|expandable|extends|'
- r'external|false|final|flow|for|if|import|impure|in|initial\sequation|'
- r'inner|input|loop|nondiscrete|outer|output|parameter|partial|'
- r'protected|public|pure|redeclare|replaceable|stream|time|then|true|'
- r'when|while|within)\b', Keyword),
- ],
- 'functions': [
- (r'(abs|acos|acosh|asin|asinh|atan|atan2|atan3|ceil|cos|cosh|'
- r'cross|diagonal|div|exp|fill|floor|getInstanceName|identity|'
- r'linspace|log|log10|matrix|mod|max|min|ndims|ones|outerProduct|'
- r'product|rem|scalar|semiLinear|skew|sign|sin|sinh|size|'
- r'spatialDistribution|sum|sqrt|symmetric|tan|tanh|transpose|'
- r'vector|zeros)\b', Name.Function),
- ],
- 'operators': [
- (r'(actualStream|and|assert|backSample|cardinality|change|Clock|'
- r'delay|der|edge|hold|homotopy|initial|inStream|noClock|noEvent|'
- r'not|or|pre|previous|reinit|return|sample|smooth|'
- r'spatialDistribution|shiftSample|subSample|superSample|terminal|'
- r'terminate)\b', Name.Builtin),
- ],
- 'classes': [
- (r'(operator)?(\s+)?(block|class|connector|end|function|model|'
- r'operator|package|record|type)(\s+)'
- r'((?!if|for|when|while)[a-z_]\w*|\'[^\']+\')([;]?)',
- bygroups(Keyword, Text, Keyword, Text, Name.Class, Text))
- ],
- 'quoted_ident': [
- (r'\'', Name, '#pop'),
- (r'[^\']+', Name), # all other characters
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})',
- String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'html-content': [
- (r'<\s*/\s*html\s*>"', Name.Tag, '#pop'),
- (r'.+?(?=<\s*/\s*html\s*>)', using(HtmlLexer)),
- ]
- }
-
-
-class RebolLexer(RegexLexer):
- """
- A `REBOL <http://www.rebol.com/>`_ lexer.
-
- .. versionadded:: 1.1
- """
- name = 'REBOL'
- aliases = ['rebol']
- filenames = ['*.r', '*.r3', '*.reb']
- mimetypes = ['text/x-rebol']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- re.IGNORECASE
-
- escape_re = r'(?:\^\([0-9a-f]{1,4}\)*)'
-
- def word_callback(lexer, match):
- word = match.group()
-
- if re.match(".*:$", word):
- yield match.start(), Generic.Subheading, word
- elif re.match(
- r'(native|alias|all|any|as-string|as-binary|bind|bound\?|case|'
- r'catch|checksum|comment|debase|dehex|exclude|difference|disarm|'
- r'either|else|enbase|foreach|remove-each|form|free|get|get-env|if|'
- r'in|intersect|loop|minimum-of|maximum-of|mold|new-line|'
- r'new-line\?|not|now|prin|print|reduce|compose|construct|repeat|'
- r'reverse|save|script\?|set|shift|switch|throw|to-hex|trace|try|'
- r'type\?|union|unique|unless|unprotect|unset|until|use|value\?|'
- r'while|compress|decompress|secure|open|close|read|read-io|'
- r'write-io|write|update|query|wait|input\?|exp|log-10|log-2|'
- r'log-e|square-root|cosine|sine|tangent|arccosine|arcsine|'
- r'arctangent|protect|lowercase|uppercase|entab|detab|connected\?|'
- r'browse|launch|stats|get-modes|set-modes|to-local-file|'
- r'to-rebol-file|encloak|decloak|create-link|do-browser|bind\?|'
- r'hide|draw|show|size-text|textinfo|offset-to-caret|'
- r'caret-to-offset|local-request-file|rgb-to-hsv|hsv-to-rgb|'
- r'crypt-strength\?|dh-make-key|dh-generate-key|dh-compute-key|'
- r'dsa-make-key|dsa-generate-key|dsa-make-signature|'
- r'dsa-verify-signature|rsa-make-key|rsa-generate-key|'
- r'rsa-encrypt)$', word):
- yield match.start(), Name.Builtin, word
- elif re.match(
- r'(add|subtract|multiply|divide|remainder|power|and~|or~|xor~|'
- r'minimum|maximum|negate|complement|absolute|random|head|tail|'
- r'next|back|skip|at|pick|first|second|third|fourth|fifth|sixth|'
- r'seventh|eighth|ninth|tenth|last|path|find|select|make|to|copy\*|'
- r'insert|remove|change|poke|clear|trim|sort|min|max|abs|cp|'
- r'copy)$', word):
- yield match.start(), Name.Function, word
- elif re.match(
- r'(error|source|input|license|help|install|echo|Usage|with|func|'
- r'throw-on-error|function|does|has|context|probe|\?\?|as-pair|'
- r'mod|modulo|round|repend|about|set-net|append|join|rejoin|reform|'
- r'remold|charset|array|replace|move|extract|forskip|forall|alter|'
- r'first+|also|take|for|forever|dispatch|attempt|what-dir|'
- r'change-dir|clean-path|list-dir|dirize|rename|split-path|delete|'
- r'make-dir|delete-dir|in-dir|confirm|dump-obj|upgrade|what|'
- r'build-tag|process-source|build-markup|decode-cgi|read-cgi|'
- r'write-user|save-user|set-user-name|protect-system|parse-xml|'
- r'cvs-date|cvs-version|do-boot|get-net-info|desktop|layout|'
- r'scroll-para|get-face|alert|set-face|uninstall|unfocus|'
- r'request-dir|center-face|do-events|net-error|decode-url|'
- r'parse-header|parse-header-date|parse-email-addrs|import-email|'
- r'send|build-attach-body|resend|show-popup|hide-popup|open-events|'
- r'find-key-face|do-face|viewtop|confine|find-window|'
- r'insert-event-func|remove-event-func|inform|dump-pane|dump-face|'
- r'flag-face|deflag-face|clear-fields|read-net|vbug|path-thru|'
- r'read-thru|load-thru|do-thru|launch-thru|load-image|'
- r'request-download|do-face-alt|set-font|set-para|get-style|'
- r'set-style|make-face|stylize|choose|hilight-text|hilight-all|'
- r'unlight-text|focus|scroll-drag|clear-face|reset-face|scroll-face|'
- r'resize-face|load-stock|load-stock-block|notify|request|flash|'
- r'request-color|request-pass|request-text|request-list|'
- r'request-date|request-file|dbug|editor|link-relative-path|'
- r'emailer|parse-error)$', word):
- yield match.start(), Keyword.Namespace, word
- elif re.match(
- r'(halt|quit|do|load|q|recycle|call|run|ask|parse|view|unview|'
- r'return|exit|break)$', word):
- yield match.start(), Name.Exception, word
- elif re.match('REBOL$', word):
- yield match.start(), Generic.Heading, word
- elif re.match("to-.*", word):
- yield match.start(), Keyword, word
- elif re.match('(\+|-|\*|/|//|\*\*|and|or|xor|=\?|=|==|<>|<|>|<=|>=)$',
- word):
- yield match.start(), Operator, word
- elif re.match(".*\?$", word):
- yield match.start(), Keyword, word
- elif re.match(".*\!$", word):
- yield match.start(), Keyword.Type, word
- elif re.match("'.*", word):
- yield match.start(), Name.Variable.Instance, word # lit-word
- elif re.match("#.*", word):
- yield match.start(), Name.Label, word # issue
- elif re.match("%.*", word):
- yield match.start(), Name.Decorator, word # file
- else:
- yield match.start(), Name.Variable, word
-
- tokens = {
- 'root': [
- (r'[^R]+', Comment),
- (r'REBOL\s+\[', Generic.Strong, 'script'),
- (r'R', Comment)
- ],
- 'script': [
- (r'\s+', Text),
- (r'#"', String.Char, 'char'),
- (r'#{[0-9a-f]*}', Number.Hex),
- (r'2#{', Number.Hex, 'bin2'),
- (r'64#{[0-9a-z+/=\s]*}', Number.Hex),
- (r'"', String, 'string'),
- (r'{', String, 'string2'),
- (r';#+.*\n', Comment.Special),
- (r';\*+.*\n', Comment.Preproc),
- (r';.*\n', Comment),
- (r'%"', Name.Decorator, 'stringFile'),
- (r'%[^(\^{^")\s\[\]]+', Name.Decorator),
- (r'[+-]?([a-z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money
- (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time
- (r'\d+[\-\/][0-9a-z]+[\-\/]\d+(\/\d+\:\d+((\:\d+)?'
- r'([\.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other), # date
- (r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple
- (r'\d+[xX]\d+', Keyword.Constant), # pair
- (r'[+-]?\d+(\'\d+)?([\.,]\d*)?[eE][+-]?\d+', Number.Float),
- (r'[+-]?\d+(\'\d+)?[\.,]\d*', Number.Float),
- (r'[+-]?\d+(\'\d+)?', Number),
- (r'[\[\]\(\)]', Generic.Strong),
- (r'[a-z]+[^(\^{"\s:)]*://[^(\^{"\s)]*', Name.Decorator), # url
- (r'mailto:[^(\^{"@\s)]+@[^(\^{"@\s)]+', Name.Decorator), # url
- (r'[^(\^{"@\s)]+@[^(\^{"@\s)]+', Name.Decorator), # email
- (r'comment\s"', Comment, 'commentString1'),
- (r'comment\s{', Comment, 'commentString2'),
- (r'comment\s\[', Comment, 'commentBlock'),
- (r'comment\s[^(\s{\"\[]+', Comment),
- (r'/[^(\^{^")\s/[\]]*', Name.Attribute),
- (r'([^(\^{^")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
- (r'<[\w:.-]*>', Name.Tag),
- (r'<[^(<>\s")]+', Name.Tag, 'tag'),
- (r'([^(\^{^")\s]+)', Text),
- ],
- 'string': [
- (r'[^(\^")]+', String),
- (escape_re, String.Escape),
- (r'[\(|\)]+', String),
- (r'\^.', String.Escape),
- (r'"', String, '#pop'),
- ],
- 'string2': [
- (r'[^(\^{^})]+', String),
- (escape_re, String.Escape),
- (r'[\(|\)]+', String),
- (r'\^.', String.Escape),
- (r'{', String, '#push'),
- (r'}', String, '#pop'),
- ],
- 'stringFile': [
- (r'[^(\^")]+', Name.Decorator),
- (escape_re, Name.Decorator),
- (r'\^.', Name.Decorator),
- (r'"', Name.Decorator, '#pop'),
- ],
- 'char': [
- (escape_re + '"', String.Char, '#pop'),
- (r'\^."', String.Char, '#pop'),
- (r'."', String.Char, '#pop'),
- ],
- 'tag': [
- (escape_re, Name.Tag),
- (r'"', Name.Tag, 'tagString'),
- (r'[^(<>\r\n")]+', Name.Tag),
- (r'>', Name.Tag, '#pop'),
- ],
- 'tagString': [
- (r'[^(\^")]+', Name.Tag),
- (escape_re, Name.Tag),
- (r'[\(|\)]+', Name.Tag),
- (r'\^.', Name.Tag),
- (r'"', Name.Tag, '#pop'),
- ],
- 'tuple': [
- (r'(\d+\.)+', Keyword.Constant),
- (r'\d+', Keyword.Constant, '#pop'),
- ],
- 'bin2': [
- (r'\s+', Number.Hex),
- (r'([0-1]\s*){8}', Number.Hex),
- (r'}', Number.Hex, '#pop'),
- ],
- 'commentString1': [
- (r'[^(\^")]+', Comment),
- (escape_re, Comment),
- (r'[\(|\)]+', Comment),
- (r'\^.', Comment),
- (r'"', Comment, '#pop'),
- ],
- 'commentString2': [
- (r'[^(\^{^})]+', Comment),
- (escape_re, Comment),
- (r'[\(|\)]+', Comment),
- (r'\^.', Comment),
- (r'{', Comment, '#push'),
- (r'}', Comment, '#pop'),
- ],
- 'commentBlock': [
- (r'\[', Comment, '#push'),
- (r'\]', Comment, '#pop'),
- (r'"', Comment, "commentString1"),
- (r'{', Comment, "commentString2"),
- (r'[^(\[\]\"{)]+', Comment),
- ],
- }
- def analyse_text(text):
- """
- Check if code contains REBOL header and so it probably not R code
- """
- if re.match(r'^\s*REBOL\s*\[', text, re.IGNORECASE):
- # The code starts with REBOL header
- return 1.0
- elif re.search(r'\s*REBOL\s*[', text, re.IGNORECASE):
- # The code contains REBOL header but also some text before it
- return 0.5
-
-
-class ABAPLexer(RegexLexer):
- """
- Lexer for ABAP, SAP's integrated language.
-
- .. versionadded:: 1.1
- """
- name = 'ABAP'
- aliases = ['abap']
- filenames = ['*.abap']
- mimetypes = ['text/x-abap']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'common': [
- (r'\s+', Text),
- (r'^\*.*$', Comment.Single),
- (r'\".*?\n', Comment.Single),
- ],
- 'variable-names': [
- (r'<\S+>', Name.Variable),
- (r'\w[\w~]*(?:(\[\])|->\*)?', Name.Variable),
- ],
- 'root': [
- include('common'),
- #function calls
- (r'(CALL\s+(?:BADI|CUSTOMER-FUNCTION|FUNCTION))(\s+)(\'?\S+\'?)',
- bygroups(Keyword, Text, Name.Function)),
- (r'(CALL\s+(?:DIALOG|SCREEN|SUBSCREEN|SELECTION-SCREEN|'
- r'TRANSACTION|TRANSFORMATION))\b',
- Keyword),
- (r'(FORM|PERFORM)(\s+)(\w+)',
- bygroups(Keyword, Text, Name.Function)),
- (r'(PERFORM)(\s+)(\()(\w+)(\))',
- bygroups(Keyword, Text, Punctuation, Name.Variable, Punctuation )),
- (r'(MODULE)(\s+)(\S+)(\s+)(INPUT|OUTPUT)',
- bygroups(Keyword, Text, Name.Function, Text, Keyword)),
-
- # method implementation
- (r'(METHOD)(\s+)([\w~]+)',
- bygroups(Keyword, Text, Name.Function)),
- # method calls
- (r'(\s+)([\w\-]+)([=\-]>)([\w\-~]+)',
- bygroups(Text, Name.Variable, Operator, Name.Function)),
- # call methodnames returning style
- (r'(?<=(=|-)>)([\w\-~]+)(?=\()', Name.Function),
-
- # keywords with dashes in them.
- # these need to be first, because for instance the -ID part
- # of MESSAGE-ID wouldn't get highlighted if MESSAGE was
- # first in the list of keywords.
- (r'(ADD-CORRESPONDING|AUTHORITY-CHECK|'
- r'CLASS-DATA|CLASS-EVENTS|CLASS-METHODS|CLASS-POOL|'
- r'DELETE-ADJACENT|DIVIDE-CORRESPONDING|'
- r'EDITOR-CALL|ENHANCEMENT-POINT|ENHANCEMENT-SECTION|EXIT-COMMAND|'
- r'FIELD-GROUPS|FIELD-SYMBOLS|FUNCTION-POOL|'
- r'INTERFACE-POOL|INVERTED-DATE|'
- r'LOAD-OF-PROGRAM|LOG-POINT|'
- r'MESSAGE-ID|MOVE-CORRESPONDING|MULTIPLY-CORRESPONDING|'
- r'NEW-LINE|NEW-PAGE|NEW-SECTION|NO-EXTENSION|'
- r'OUTPUT-LENGTH|PRINT-CONTROL|'
- r'SELECT-OPTIONS|START-OF-SELECTION|SUBTRACT-CORRESPONDING|'
- r'SYNTAX-CHECK|SYSTEM-EXCEPTIONS|'
- r'TYPE-POOL|TYPE-POOLS'
- r')\b', Keyword),
-
- # keyword kombinations
- (r'CREATE\s+(PUBLIC|PRIVATE|DATA|OBJECT)|'
- r'((PUBLIC|PRIVATE|PROTECTED)\s+SECTION|'
- r'(TYPE|LIKE)(\s+(LINE\s+OF|REF\s+TO|'
- r'(SORTED|STANDARD|HASHED)\s+TABLE\s+OF))?|'
- r'FROM\s+(DATABASE|MEMORY)|CALL\s+METHOD|'
- r'(GROUP|ORDER) BY|HAVING|SEPARATED BY|'
- r'GET\s+(BADI|BIT|CURSOR|DATASET|LOCALE|PARAMETER|'
- r'PF-STATUS|(PROPERTY|REFERENCE)\s+OF|'
- r'RUN\s+TIME|TIME\s+(STAMP)?)?|'
- r'SET\s+(BIT|BLANK\s+LINES|COUNTRY|CURSOR|DATASET|EXTENDED\s+CHECK|'
- r'HANDLER|HOLD\s+DATA|LANGUAGE|LEFT\s+SCROLL-BOUNDARY|'
- r'LOCALE|MARGIN|PARAMETER|PF-STATUS|PROPERTY\s+OF|'
- r'RUN\s+TIME\s+(ANALYZER|CLOCK\s+RESOLUTION)|SCREEN|'
- r'TITLEBAR|UPADTE\s+TASK\s+LOCAL|USER-COMMAND)|'
- r'CONVERT\s+((INVERTED-)?DATE|TIME|TIME\s+STAMP|TEXT)|'
- r'(CLOSE|OPEN)\s+(DATASET|CURSOR)|'
- r'(TO|FROM)\s+(DATA BUFFER|INTERNAL TABLE|MEMORY ID|'
- r'DATABASE|SHARED\s+(MEMORY|BUFFER))|'
- r'DESCRIBE\s+(DISTANCE\s+BETWEEN|FIELD|LIST|TABLE)|'
- r'FREE\s(MEMORY|OBJECT)?|'
- r'PROCESS\s+(BEFORE\s+OUTPUT|AFTER\s+INPUT|'
- r'ON\s+(VALUE-REQUEST|HELP-REQUEST))|'
- r'AT\s+(LINE-SELECTION|USER-COMMAND|END\s+OF|NEW)|'
- r'AT\s+SELECTION-SCREEN(\s+(ON(\s+(BLOCK|(HELP|VALUE)-REQUEST\s+FOR|'
- r'END\s+OF|RADIOBUTTON\s+GROUP))?|OUTPUT))?|'
- r'SELECTION-SCREEN:?\s+((BEGIN|END)\s+OF\s+((TABBED\s+)?BLOCK|LINE|'
- r'SCREEN)|COMMENT|FUNCTION\s+KEY|'
- r'INCLUDE\s+BLOCKS|POSITION|PUSHBUTTON|'
- r'SKIP|ULINE)|'
- r'LEAVE\s+(LIST-PROCESSING|PROGRAM|SCREEN|'
- r'TO LIST-PROCESSING|TO TRANSACTION)'
- r'(ENDING|STARTING)\s+AT|'
- r'FORMAT\s+(COLOR|INTENSIFIED|INVERSE|HOTSPOT|INPUT|FRAMES|RESET)|'
- r'AS\s+(CHECKBOX|SUBSCREEN|WINDOW)|'
- r'WITH\s+(((NON-)?UNIQUE)?\s+KEY|FRAME)|'
- r'(BEGIN|END)\s+OF|'
- r'DELETE(\s+ADJACENT\s+DUPLICATES\sFROM)?|'
- r'COMPARING(\s+ALL\s+FIELDS)?|'
- r'INSERT(\s+INITIAL\s+LINE\s+INTO|\s+LINES\s+OF)?|'
- r'IN\s+((BYTE|CHARACTER)\s+MODE|PROGRAM)|'
- r'END-OF-(DEFINITION|PAGE|SELECTION)|'
- r'WITH\s+FRAME(\s+TITLE)|'
-
- # simple kombinations
- r'AND\s+(MARK|RETURN)|CLIENT\s+SPECIFIED|CORRESPONDING\s+FIELDS\s+OF|'
- r'IF\s+FOUND|FOR\s+EVENT|INHERITING\s+FROM|LEAVE\s+TO\s+SCREEN|'
- r'LOOP\s+AT\s+(SCREEN)?|LOWER\s+CASE|MATCHCODE\s+OBJECT|MODIF\s+ID|'
- r'MODIFY\s+SCREEN|NESTING\s+LEVEL|NO\s+INTERVALS|OF\s+STRUCTURE|'
- r'RADIOBUTTON\s+GROUP|RANGE\s+OF|REF\s+TO|SUPPRESS DIALOG|'
- r'TABLE\s+OF|UPPER\s+CASE|TRANSPORTING\s+NO\s+FIELDS|'
- r'VALUE\s+CHECK|VISIBLE\s+LENGTH|HEADER\s+LINE)\b', Keyword),
-
- # single word keywords.
- (r'(^|(?<=(\s|\.)))(ABBREVIATED|ADD|ALIASES|APPEND|ASSERT|'
- r'ASSIGN(ING)?|AT(\s+FIRST)?|'
- r'BACK|BLOCK|BREAK-POINT|'
- r'CASE|CATCH|CHANGING|CHECK|CLASS|CLEAR|COLLECT|COLOR|COMMIT|'
- r'CREATE|COMMUNICATION|COMPONENTS?|COMPUTE|CONCATENATE|CONDENSE|'
- r'CONSTANTS|CONTEXTS|CONTINUE|CONTROLS|'
- r'DATA|DECIMALS|DEFAULT|DEFINE|DEFINITION|DEFERRED|DEMAND|'
- r'DETAIL|DIRECTORY|DIVIDE|DO|'
- r'ELSE(IF)?|ENDAT|ENDCASE|ENDCLASS|ENDDO|ENDFORM|ENDFUNCTION|'
- r'ENDIF|ENDLOOP|ENDMETHOD|ENDMODULE|ENDSELECT|ENDTRY|'
- r'ENHANCEMENT|EVENTS|EXCEPTIONS|EXIT|EXPORT|EXPORTING|EXTRACT|'
- r'FETCH|FIELDS?|FIND|FOR|FORM|FORMAT|FREE|FROM|'
- r'HIDE|'
- r'ID|IF|IMPORT|IMPLEMENTATION|IMPORTING|IN|INCLUDE|INCLUDING|'
- r'INDEX|INFOTYPES|INITIALIZATION|INTERFACE|INTERFACES|INTO|'
- r'LENGTH|LINES|LOAD|LOCAL|'
- r'JOIN|'
- r'KEY|'
- r'MAXIMUM|MESSAGE|METHOD[S]?|MINIMUM|MODULE|MODIFY|MOVE|MULTIPLY|'
- r'NODES|'
- r'OBLIGATORY|OF|OFF|ON|OVERLAY|'
- r'PACK|PARAMETERS|PERCENTAGE|POSITION|PROGRAM|PROVIDE|PUBLIC|PUT|'
- r'RAISE|RAISING|RANGES|READ|RECEIVE|REFRESH|REJECT|REPORT|RESERVE|'
- r'RESUME|RETRY|RETURN|RETURNING|RIGHT|ROLLBACK|'
- r'SCROLL|SEARCH|SELECT|SHIFT|SINGLE|SKIP|SORT|SPLIT|STATICS|STOP|'
- r'SUBMIT|SUBTRACT|SUM|SUMMARY|SUMMING|SUPPLY|'
- r'TABLE|TABLES|TIMES|TITLE|TO|TOP-OF-PAGE|TRANSFER|TRANSLATE|TRY|TYPES|'
- r'ULINE|UNDER|UNPACK|UPDATE|USING|'
- r'VALUE|VALUES|VIA|'
- r'WAIT|WHEN|WHERE|WHILE|WITH|WINDOW|WRITE)\b', Keyword),
-
- # builtins
- (r'(abs|acos|asin|atan|'
- r'boolc|boolx|bit_set|'
- r'char_off|charlen|ceil|cmax|cmin|condense|contains|'
- r'contains_any_of|contains_any_not_of|concat_lines_of|cos|cosh|'
- r'count|count_any_of|count_any_not_of|'
- r'dbmaxlen|distance|'
- r'escape|exp|'
- r'find|find_end|find_any_of|find_any_not_of|floor|frac|from_mixed|'
- r'insert|'
- r'lines|log|log10|'
- r'match|matches|'
- r'nmax|nmin|numofchar|'
- r'repeat|replace|rescale|reverse|round|'
- r'segment|shift_left|shift_right|sign|sin|sinh|sqrt|strlen|'
- r'substring|substring_after|substring_from|substring_before|substring_to|'
- r'tan|tanh|to_upper|to_lower|to_mixed|translate|trunc|'
- r'xstrlen)(\()\b', bygroups(Name.Builtin, Punctuation)),
-
- (r'&[0-9]', Name),
- (r'[0-9]+', Number.Integer),
-
- # operators which look like variable names before
- # parsing variable names.
- (r'(?<=(\s|.))(AND|EQ|NE|GT|LT|GE|LE|CO|CN|CA|NA|CS|NOT|NS|CP|NP|'
- r'BYTE-CO|BYTE-CN|BYTE-CA|BYTE-NA|BYTE-CS|BYTE-NS|'
- r'IS\s+(NOT\s+)?(INITIAL|ASSIGNED|REQUESTED|BOUND))\b', Operator),
-
- include('variable-names'),
-
- # standard oparators after variable names,
- # because < and > are part of field symbols.
- (r'[?*<>=\-+]', Operator),
- (r"'(''|[^'])*'", String.Single),
- (r"`([^`])*`", String.Single),
- (r'[/;:()\[\],\.]', Punctuation)
- ],
- }
-
-
-class NewspeakLexer(RegexLexer):
- """
- For `Newspeak <http://newspeaklanguage.org/>` syntax.
- """
- name = 'Newspeak'
- filenames = ['*.ns2']
- aliases = ['newspeak', ]
- mimetypes = ['text/x-newspeak']
-
- tokens = {
- 'root' : [
- (r'\b(Newsqueak2)\b',Keyword.Declaration),
- (r"'[^']*'",String),
- (r'\b(class)(\s+)(\w+)(\s*)',
- bygroups(Keyword.Declaration,Text,Name.Class,Text)),
- (r'\b(mixin|self|super|private|public|protected|nil|true|false)\b',
- Keyword),
- (r'(\w+\:)(\s*)([a-zA-Z_]\w+)',
- bygroups(Name.Function,Text,Name.Variable)),
- (r'(\w+)(\s*)(=)',
- bygroups(Name.Attribute,Text,Operator)),
- (r'<\w+>', Comment.Special),
- include('expressionstat'),
- include('whitespace')
- ],
-
- 'expressionstat': [
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'\d+', Number.Integer),
- (r':\w+',Name.Variable),
- (r'(\w+)(::)', bygroups(Name.Variable, Operator)),
- (r'\w+:', Name.Function),
- (r'\w+', Name.Variable),
- (r'\(|\)', Punctuation),
- (r'\[|\]', Punctuation),
- (r'\{|\}', Punctuation),
-
- (r'(\^|\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-|:)', Operator),
- (r'\.|;', Punctuation),
- include('whitespace'),
- include('literals'),
- ],
- 'literals': [
- (r'\$.', String),
- (r"'[^']*'", String),
- (r"#'[^']*'", String.Symbol),
- (r"#\w+:?", String.Symbol),
- (r"#(\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-)+", String.Symbol)
-
- ],
- 'whitespace' : [
- (r'\s+', Text),
- (r'"[^"]*"', Comment)
- ]
- }
-
-
-class GherkinLexer(RegexLexer):
- """
- For `Gherkin <http://github.com/aslakhellesoy/gherkin/>` syntax.
-
- .. versionadded:: 1.2
- """
- name = 'Gherkin'
- aliases = ['cucumber', 'gherkin']
- filenames = ['*.feature']
- mimetypes = ['text/x-gherkin']
-
- feature_keywords = u'^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$'
- feature_element_keywords = u'^(\\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$'
- examples_keywords = u'^(\\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$'
- step_keywords = u'^(\\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假如|但是|但し|並且|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y\'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y\'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )'
-
- tokens = {
- 'comments': [
- (r'^\s*#.*$', Comment),
- ],
- 'feature_elements' : [
- (step_keywords, Keyword, "step_content_stack"),
- include('comments'),
- (r"(\s|.)", Name.Function),
- ],
- 'feature_elements_on_stack' : [
- (step_keywords, Keyword, "#pop:2"),
- include('comments'),
- (r"(\s|.)", Name.Function),
- ],
- 'examples_table': [
- (r"\s+\|", Keyword, 'examples_table_header'),
- include('comments'),
- (r"(\s|.)", Name.Function),
- ],
- 'examples_table_header': [
- (r"\s+\|\s*$", Keyword, "#pop:2"),
- include('comments'),
- (r"\\\|", Name.Variable),
- (r"\s*\|", Keyword),
- (r"[^\|]", Name.Variable),
- ],
- 'scenario_sections_on_stack': [
- (feature_element_keywords,
- bygroups(Name.Function, Keyword, Keyword, Name.Function),
- "feature_elements_on_stack"),
- ],
- 'narrative': [
- include('scenario_sections_on_stack'),
- include('comments'),
- (r"(\s|.)", Name.Function),
- ],
- 'table_vars': [
- (r'(<[^>]+>)', Name.Variable),
- ],
- 'numbers': [
- (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', String),
- ],
- 'string': [
- include('table_vars'),
- (r'(\s|.)', String),
- ],
- 'py_string': [
- (r'"""', Keyword, "#pop"),
- include('string'),
- ],
- 'step_content_root':[
- (r"$", Keyword, "#pop"),
- include('step_content'),
- ],
- 'step_content_stack':[
- (r"$", Keyword, "#pop:2"),
- include('step_content'),
- ],
- 'step_content':[
- (r'"', Name.Function, "double_string"),
- include('table_vars'),
- include('numbers'),
- include('comments'),
- (r'(\s|.)', Name.Function),
- ],
- 'table_content': [
- (r"\s+\|\s*$", Keyword, "#pop"),
- include('comments'),
- (r"\\\|", String),
- (r"\s*\|", Keyword),
- include('string'),
- ],
- 'double_string': [
- (r'"', Name.Function, "#pop"),
- include('string'),
- ],
- 'root': [
- (r'\n', Name.Function),
- include('comments'),
- (r'"""', Keyword, "py_string"),
- (r'\s+\|', Keyword, 'table_content'),
- (r'"', Name.Function, "double_string"),
- include('table_vars'),
- include('numbers'),
- (r'(\s*)(@[^@\r\n\t ]+)', bygroups(Name.Function, Name.Tag)),
- (step_keywords, bygroups(Name.Function, Keyword),
- 'step_content_root'),
- (feature_keywords, bygroups(Keyword, Keyword, Name.Function),
- 'narrative'),
- (feature_element_keywords,
- bygroups(Name.Function, Keyword, Keyword, Name.Function),
- 'feature_elements'),
- (examples_keywords,
- bygroups(Name.Function, Keyword, Keyword, Name.Function),
- 'examples_table'),
- (r'(\s|.)', Name.Function),
- ]
- }
-
-class AsymptoteLexer(RegexLexer):
- """
- For `Asymptote <http://asymptote.sf.net/>`_ source code.
-
- .. versionadded:: 1.2
- """
- name = 'Asymptote'
- aliases = ['asy', 'asymptote']
- filenames = ['*.asy']
- mimetypes = ['text/x-asymptote']
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+'
-
- tokens = {
- 'whitespace': [
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'//(\n|(.|\n)*?[^\\]\n)', Comment),
- (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment),
- ],
- 'statements': [
- # simple string (TeX friendly)
- (r'"(\\\\|\\"|[^"])*"', String),
- # C style string (with character escapes)
- (r"'", String, 'string'),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
- (r'0[0-7]+[Ll]?', Number.Oct),
- (r'\d+[Ll]?', Number.Integer),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'[()\[\],.]', Punctuation),
- (r'\b(case)(.+?)(:)', bygroups(Keyword, using(this), Text)),
- (r'(and|controls|tension|atleast|curl|if|else|while|for|do|'
- r'return|break|continue|struct|typedef|new|access|import|'
- r'unravel|from|include|quote|static|public|private|restricted|'
- r'this|explicit|true|false|null|cycle|newframe|operator)\b', Keyword),
- # Since an asy-type-name can be also an asy-function-name,
- # in the following we test if the string " [a-zA-Z]" follows
- # the Keyword.Type.
- # Of course it is not perfect !
- (r'(Braid|FitResult|Label|Legend|TreeNode|abscissa|arc|arrowhead|'
- r'binarytree|binarytreeNode|block|bool|bool3|bounds|bqe|circle|'
- r'conic|coord|coordsys|cputime|ellipse|file|filltype|frame|grid3|'
- r'guide|horner|hsv|hyperbola|indexedTransform|int|inversion|key|'
- r'light|line|linefit|marginT|marker|mass|object|pair|parabola|path|'
- r'path3|pen|picture|point|position|projection|real|revolution|'
- r'scaleT|scientific|segment|side|slice|splitface|string|surface|'
- r'tensionSpecifier|ticklocate|ticksgridT|tickvalues|transform|'
- r'transformation|tree|triangle|trilinear|triple|vector|'
- r'vertex|void)(?=([ ]{1,}[a-zA-Z]))', Keyword.Type),
- # Now the asy-type-name which are not asy-function-name
- # except yours !
- # Perhaps useless
- (r'(Braid|FitResult|TreeNode|abscissa|arrowhead|block|bool|bool3|'
- r'bounds|coord|frame|guide|horner|int|linefit|marginT|pair|pen|'
- r'picture|position|real|revolution|slice|splitface|ticksgridT|'
- r'tickvalues|tree|triple|vertex|void)\b', Keyword.Type),
- ('[a-zA-Z_]\w*:(?!:)', Name.Label),
- ('[a-zA-Z_]\w*', Name),
- ],
- 'root': [
- include('whitespace'),
- # functions
- (r'((?:[\w*\s])+?(?:\s|\*))' # return arguments
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*\([^;]*?\))' # signature
- r'(' + _ws + r')({)',
- bygroups(using(this), Name.Function, using(this), using(this),
- Punctuation),
- 'function'),
- # function declarations
- (r'((?:[\w*\s])+?(?:\s|\*))' # return arguments
- r'([a-zA-Z_]\w*)' # method name
- r'(\s*\([^;]*?\))' # signature
- r'(' + _ws + r')(;)',
- bygroups(using(this), Name.Function, using(this), using(this),
- Punctuation)),
- ('', Text, 'statement'),
- ],
- 'statement' : [
- include('whitespace'),
- include('statements'),
- ('[{}]', Punctuation),
- (';', Punctuation, '#pop'),
- ],
- 'function': [
- include('whitespace'),
- include('statements'),
- (';', Punctuation),
- ('{', Punctuation, '#push'),
- ('}', Punctuation, '#pop'),
- ],
- 'string': [
- (r"'", String, '#pop'),
- (r'\\([\\abfnrtv"\'?]|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'\n', String),
- (r"[^\\'\n]+", String), # all other characters
- (r'\\\n', String),
- (r'\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ]
- }
-
- def get_tokens_unprocessed(self, text):
- from pygments.lexers._asybuiltins import ASYFUNCNAME, ASYVARNAME
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name and value in ASYFUNCNAME:
- token = Name.Function
- elif token is Name and value in ASYVARNAME:
- token = Name.Variable
- yield index, token, value
-
-
-class PostScriptLexer(RegexLexer):
- """
- Lexer for PostScript files.
-
- The PostScript Language Reference published by Adobe at
- <http://partners.adobe.com/public/developer/en/ps/PLRM.pdf>
- is the authority for this.
-
- .. versionadded:: 1.4
- """
- name = 'PostScript'
- aliases = ['postscript', 'postscr']
- filenames = ['*.ps', '*.eps']
- mimetypes = ['application/postscript']
-
- delimiter = r'\(\)\<\>\[\]\{\}\/\%\s'
- delimiter_end = r'(?=[%s])' % delimiter
-
- valid_name_chars = r'[^%s]' % delimiter
- valid_name = r"%s+%s" % (valid_name_chars, delimiter_end)
-
- tokens = {
- 'root': [
- # All comment types
- (r'^%!.+\n', Comment.Preproc),
- (r'%%.*\n', Comment.Special),
- (r'(^%.*\n){2,}', Comment.Multiline),
- (r'%.*\n', Comment.Single),
-
- # String literals are awkward; enter separate state.
- (r'\(', String, 'stringliteral'),
-
- (r'[\{\}(\<\<)(\>\>)\[\]]', Punctuation),
-
- # Numbers
- (r'<[0-9A-Fa-f]+>' + delimiter_end, Number.Hex),
- # Slight abuse: use Oct to signify any explicit base system
- (r'[0-9]+\#(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)'
- r'((e|E)[0-9]+)?' + delimiter_end, Number.Oct),
- (r'(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)((e|E)[0-9]+)?'
- + delimiter_end, Number.Float),
- (r'(\-|\+)?[0-9]+' + delimiter_end, Number.Integer),
-
- # References
- (r'\/%s' % valid_name, Name.Variable),
-
- # Names
- (valid_name, Name.Function), # Anything else is executed
-
- # These keywords taken from
- # <http://www.math.ubc.ca/~cass/graphics/manual/pdf/a1.pdf>
- # Is there an authoritative list anywhere that doesn't involve
- # trawling documentation?
-
- (r'(false|true)' + delimiter_end, Keyword.Constant),
-
- # Conditionals / flow control
- (r'(eq|ne|ge|gt|le|lt|and|or|not|if|ifelse|for|forall)'
- + delimiter_end, Keyword.Reserved),
-
- ('(abs|add|aload|arc|arcn|array|atan|begin|bind|ceiling|charpath|'
- 'clip|closepath|concat|concatmatrix|copy|cos|currentlinewidth|'
- 'currentmatrix|currentpoint|curveto|cvi|cvs|def|defaultmatrix|'
- 'dict|dictstackoverflow|div|dtransform|dup|end|exch|exec|exit|exp|'
- 'fill|findfont|floor|get|getinterval|grestore|gsave|gt|'
- 'identmatrix|idiv|idtransform|index|invertmatrix|itransform|'
- 'length|lineto|ln|load|log|loop|matrix|mod|moveto|mul|neg|newpath|'
- 'pathforall|pathbbox|pop|print|pstack|put|quit|rand|rangecheck|'
- 'rcurveto|repeat|restore|rlineto|rmoveto|roll|rotate|round|run|'
- 'save|scale|scalefont|setdash|setfont|setgray|setlinecap|'
- 'setlinejoin|setlinewidth|setmatrix|setrgbcolor|shfill|show|'
- 'showpage|sin|sqrt|stack|stringwidth|stroke|strokepath|sub|'
- 'syntaxerror|transform|translate|truncate|typecheck|undefined|'
- 'undefinedfilename|undefinedresult)' + delimiter_end,
- Name.Builtin),
-
- (r'\s+', Text),
- ],
-
- 'stringliteral': [
- (r'[^\(\)\\]+', String),
- (r'\\', String.Escape, 'escape'),
- (r'\(', String, '#push'),
- (r'\)', String, '#pop'),
- ],
-
- 'escape': [
- (r'([0-8]{3}|n|r|t|b|f|\\|\(|\))?', String.Escape, '#pop'),
- ],
- }
-
-
-class AutohotkeyLexer(RegexLexer):
- """
- For `autohotkey <http://www.autohotkey.com/>`_ source code.
-
- .. versionadded:: 1.4
- """
- name = 'autohotkey'
- aliases = ['ahk', 'autohotkey']
- filenames = ['*.ahk', '*.ahkl']
- mimetypes = ['text/x-autohotkey']
-
- tokens = {
- 'root': [
- (r'^(\s*)(/\*)', bygroups(Text, Comment.Multiline),
- 'incomment'),
- (r'^(\s*)(\()', bygroups(Text, Generic), 'incontinuation'),
- (r'\s+;.*?$', Comment.Singleline),
- (r'^;.*?$', Comment.Singleline),
- (r'[]{}(),;[]', Punctuation),
- (r'(in|is|and|or|not)\b', Operator.Word),
- (r'\%[a-zA-Z_#@$][\w#@$]*\%', Name.Variable),
- (r'!=|==|:=|\.=|<<|>>|[-~+/*%=<>&^|?:!.]', Operator),
- include('commands'),
- include('labels'),
- include('builtInFunctions'),
- include('builtInVariables'),
- (r'"', String, combined('stringescape', 'dqs')),
- include('numbers'),
- (r'[a-zA-Z_#@$][\w#@$]*', Name),
- (r'\\|\'', Text),
- (r'\`([\,\%\`abfnrtv\-\+;])', String.Escape),
- include('garbage'),
- ],
- 'incomment': [
- (r'^\s*\*/', Comment.Multiline, '#pop'),
- (r'[^*/]', Comment.Multiline),
- (r'[*/]', Comment.Multiline)
- ],
- 'incontinuation': [
- (r'^\s*\)', Generic, '#pop'),
- (r'[^)]', Generic),
- (r'[)]', Generic),
- ],
- 'commands': [
- (r'(?i)^(\s*)(global|local|static|'
- r'#AllowSameLineComments|#ClipboardTimeout|#CommentFlag|'
- r'#ErrorStdOut|#EscapeChar|#HotkeyInterval|#HotkeyModifierTimeout|'
- r'#Hotstring|#IfWinActive|#IfWinExist|#IfWinNotActive|'
- r'#IfWinNotExist|#IncludeAgain|#Include|#InstallKeybdHook|'
- r'#InstallMouseHook|#KeyHistory|#LTrim|#MaxHotkeysPerInterval|'
- r'#MaxMem|#MaxThreads|#MaxThreadsBuffer|#MaxThreadsPerHotkey|'
- r'#NoEnv|#NoTrayIcon|#Persistent|#SingleInstance|#UseHook|'
- r'#WinActivateForce|AutoTrim|BlockInput|Break|Click|ClipWait|'
- r'Continue|Control|ControlClick|ControlFocus|ControlGetFocus|'
- r'ControlGetPos|ControlGetText|ControlGet|ControlMove|ControlSend|'
- r'ControlSendRaw|ControlSetText|CoordMode|Critical|'
- r'DetectHiddenText|DetectHiddenWindows|Drive|DriveGet|'
- r'DriveSpaceFree|Edit|Else|EnvAdd|EnvDiv|EnvGet|EnvMult|EnvSet|'
- r'EnvSub|EnvUpdate|Exit|ExitApp|FileAppend|'
- r'FileCopy|FileCopyDir|FileCreateDir|FileCreateShortcut|'
- r'FileDelete|FileGetAttrib|FileGetShortcut|FileGetSize|'
- r'FileGetTime|FileGetVersion|FileInstall|FileMove|FileMoveDir|'
- r'FileRead|FileReadLine|FileRecycle|FileRecycleEmpty|'
- r'FileRemoveDir|FileSelectFile|FileSelectFolder|FileSetAttrib|'
- r'FileSetTime|FormatTime|GetKeyState|Gosub|Goto|GroupActivate|'
- r'GroupAdd|GroupClose|GroupDeactivate|Gui|GuiControl|'
- r'GuiControlGet|Hotkey|IfEqual|IfExist|IfGreaterOrEqual|IfGreater|'
- r'IfInString|IfLess|IfLessOrEqual|IfMsgBox|IfNotEqual|IfNotExist|'
- r'IfNotInString|IfWinActive|IfWinExist|IfWinNotActive|'
- r'IfWinNotExist|If |ImageSearch|IniDelete|IniRead|IniWrite|'
- r'InputBox|Input|KeyHistory|KeyWait|ListHotkeys|ListLines|'
- r'ListVars|Loop|Menu|MouseClickDrag|MouseClick|MouseGetPos|'
- r'MouseMove|MsgBox|OnExit|OutputDebug|Pause|PixelGetColor|'
- r'PixelSearch|PostMessage|Process|Progress|Random|RegDelete|'
- r'RegRead|RegWrite|Reload|Repeat|Return|RunAs|RunWait|Run|'
- r'SendEvent|SendInput|SendMessage|SendMode|SendPlay|SendRaw|Send|'
- r'SetBatchLines|SetCapslockState|SetControlDelay|'
- r'SetDefaultMouseSpeed|SetEnv|SetFormat|SetKeyDelay|'
- r'SetMouseDelay|SetNumlockState|SetScrollLockState|'
- r'SetStoreCapslockMode|SetTimer|SetTitleMatchMode|'
- r'SetWinDelay|SetWorkingDir|Shutdown|Sleep|Sort|SoundBeep|'
- r'SoundGet|SoundGetWaveVolume|SoundPlay|SoundSet|'
- r'SoundSetWaveVolume|SplashImage|SplashTextOff|SplashTextOn|'
- r'SplitPath|StatusBarGetText|StatusBarWait|StringCaseSense|'
- r'StringGetPos|StringLeft|StringLen|StringLower|StringMid|'
- r'StringReplace|StringRight|StringSplit|StringTrimLeft|'
- r'StringTrimRight|StringUpper|Suspend|SysGet|Thread|ToolTip|'
- r'Transform|TrayTip|URLDownloadToFile|While|WinActivate|'
- r'WinActivateBottom|WinClose|WinGetActiveStats|WinGetActiveTitle|'
- r'WinGetClass|WinGetPos|WinGetText|WinGetTitle|WinGet|WinHide|'
- r'WinKill|WinMaximize|WinMenuSelectItem|WinMinimizeAllUndo|'
- r'WinMinimizeAll|WinMinimize|WinMove|WinRestore|WinSetTitle|'
- r'WinSet|WinShow|WinWaitActive|WinWaitClose|WinWaitNotActive|'
- r'WinWait)\b', bygroups(Text, Name.Builtin)),
- ],
- 'builtInFunctions': [
- (r'(?i)(Abs|ACos|Asc|ASin|ATan|Ceil|Chr|Cos|DllCall|Exp|FileExist|'
- r'Floor|GetKeyState|IL_Add|IL_Create|IL_Destroy|InStr|IsFunc|'
- r'IsLabel|Ln|Log|LV_Add|LV_Delete|LV_DeleteCol|LV_GetCount|'
- r'LV_GetNext|LV_GetText|LV_Insert|LV_InsertCol|LV_Modify|'
- r'LV_ModifyCol|LV_SetImageList|Mod|NumGet|NumPut|OnMessage|'
- r'RegExMatch|RegExReplace|RegisterCallback|Round|SB_SetIcon|'
- r'SB_SetParts|SB_SetText|Sin|Sqrt|StrLen|SubStr|Tan|TV_Add|'
- r'TV_Delete|TV_GetChild|TV_GetCount|TV_GetNext|TV_Get|'
- r'TV_GetParent|TV_GetPrev|TV_GetSelection|TV_GetText|TV_Modify|'
- r'VarSetCapacity|WinActive|WinExist|Object|ComObjActive|'
- r'ComObjArray|ComObjEnwrap|ComObjUnwrap|ComObjParameter|'
- r'ComObjType|ComObjConnect|ComObjCreate|ComObjGet|ComObjError|'
- r'ComObjValue|Insert|MinIndex|MaxIndex|Remove|SetCapacity|'
- r'GetCapacity|GetAddress|_NewEnum|FileOpen|Read|Write|ReadLine|'
- r'WriteLine|ReadNumType|WriteNumType|RawRead|RawWrite|Seek|Tell|'
- r'Close|Next|IsObject|StrPut|StrGet|Trim|LTrim|RTrim)\b',
- Name.Function),
- ],
- 'builtInVariables': [
- (r'(?i)(A_AhkPath|A_AhkVersion|A_AppData|A_AppDataCommon|'
- r'A_AutoTrim|A_BatchLines|A_CaretX|A_CaretY|A_ComputerName|'
- r'A_ControlDelay|A_Cursor|A_DDDD|A_DDD|A_DD|A_DefaultMouseSpeed|'
- r'A_Desktop|A_DesktopCommon|A_DetectHiddenText|'
- r'A_DetectHiddenWindows|A_EndChar|A_EventInfo|A_ExitReason|'
- r'A_FormatFloat|A_FormatInteger|A_Gui|A_GuiEvent|A_GuiControl|'
- r'A_GuiControlEvent|A_GuiHeight|A_GuiWidth|A_GuiX|A_GuiY|A_Hour|'
- r'A_IconFile|A_IconHidden|A_IconNumber|A_IconTip|A_Index|'
- r'A_IPAddress1|A_IPAddress2|A_IPAddress3|A_IPAddress4|A_ISAdmin|'
- r'A_IsCompiled|A_IsCritical|A_IsPaused|A_IsSuspended|A_KeyDelay|'
- r'A_Language|A_LastError|A_LineFile|A_LineNumber|A_LoopField|'
- r'A_LoopFileAttrib|A_LoopFileDir|A_LoopFileExt|A_LoopFileFullPath|'
- r'A_LoopFileLongPath|A_LoopFileName|A_LoopFileShortName|'
- r'A_LoopFileShortPath|A_LoopFileSize|A_LoopFileSizeKB|'
- r'A_LoopFileSizeMB|A_LoopFileTimeAccessed|A_LoopFileTimeCreated|'
- r'A_LoopFileTimeModified|A_LoopReadLine|A_LoopRegKey|'
- r'A_LoopRegName|A_LoopRegSubkey|A_LoopRegTimeModified|'
- r'A_LoopRegType|A_MDAY|A_Min|A_MM|A_MMM|A_MMMM|A_Mon|A_MouseDelay|'
- r'A_MSec|A_MyDocuments|A_Now|A_NowUTC|A_NumBatchLines|A_OSType|'
- r'A_OSVersion|A_PriorHotkey|A_ProgramFiles|A_Programs|'
- r'A_ProgramsCommon|A_ScreenHeight|A_ScreenWidth|A_ScriptDir|'
- r'A_ScriptFullPath|A_ScriptName|A_Sec|A_Space|A_StartMenu|'
- r'A_StartMenuCommon|A_Startup|A_StartupCommon|A_StringCaseSense|'
- r'A_Tab|A_Temp|A_ThisFunc|A_ThisHotkey|A_ThisLabel|A_ThisMenu|'
- r'A_ThisMenuItem|A_ThisMenuItemPos|A_TickCount|A_TimeIdle|'
- r'A_TimeIdlePhysical|A_TimeSincePriorHotkey|A_TimeSinceThisHotkey|'
- r'A_TitleMatchMode|A_TitleMatchModeSpeed|A_UserName|A_WDay|'
- r'A_WinDelay|A_WinDir|A_WorkingDir|A_YDay|A_YEAR|A_YWeek|A_YYYY|'
- r'Clipboard|ClipboardAll|ComSpec|ErrorLevel|ProgramFiles|True|'
- r'False|A_IsUnicode|A_FileEncoding|A_OSVersion|A_PtrSize)\b',
- Name.Variable),
- ],
- 'labels': [
- # hotkeys and labels
- # technically, hotkey names are limited to named keys and buttons
- (r'(^\s*)([^:\s\(\"]+?:{1,2})', bygroups(Text, Name.Label)),
- (r'(^\s*)(::[^:\s]+?::)', bygroups(Text, Name.Label)),
- ],
- 'numbers': [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+', Number.Float),
- (r'0\d+', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+', Number.Integer)
- ],
- 'stringescape': [
- (r'\"\"|\`([\,\%\`abfnrtv])', String.Escape),
- ],
- 'strings': [
- (r'[^"\n]+', String),
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- include('strings')
- ],
- 'garbage': [
- (r'[^\S\n]', Text),
- # (r'.', Text), # no cheating
- ],
- }
-
-
-class MaqlLexer(RegexLexer):
- """
- Lexer for `GoodData MAQL
- <https://secure.gooddata.com/docs/html/advanced.metric.tutorial.html>`_
- scripts.
-
- .. versionadded:: 1.4
- """
-
- name = 'MAQL'
- aliases = ['maql']
- filenames = ['*.maql']
- mimetypes = ['text/x-gooddata-maql','application/x-gooddata-maql']
-
- flags = re.IGNORECASE
- tokens = {
- 'root': [
- # IDENTITY
- (r'IDENTIFIER\b', Name.Builtin),
- # IDENTIFIER
- (r'\{[^}]+\}', Name.Variable),
- # NUMBER
- (r'[0-9]+(?:\.[0-9]+)?(?:[eE][+-]?[0-9]{1,3})?', Literal.Number),
- # STRING
- (r'"', Literal.String, 'string-literal'),
- # RELATION
- (r'\<\>|\!\=', Operator),
- (r'\=|\>\=|\>|\<\=|\<', Operator),
- # :=
- (r'\:\=', Operator),
- # OBJECT
- (r'\[[^]]+\]', Name.Variable.Class),
- # keywords
- (r'(DIMENSIONS?|BOTTOM|METRIC|COUNT|OTHER|FACT|WITH|TOP|OR|'
- r'ATTRIBUTE|CREATE|PARENT|FALSE|ROWS?|FROM|ALL|AS|PF|'
- r'COLUMNS?|DEFINE|REPORT|LIMIT|TABLE|LIKE|AND|BY|'
- r'BETWEEN|EXCEPT|SELECT|MATCH|WHERE|TRUE|FOR|IN|'
- r'WITHOUT|FILTER|ALIAS|ORDER|FACT|WHEN|NOT|ON|'
- r'KEYS|KEY|FULLSET|PRIMARY|LABELS|LABEL|VISUAL|'
- r'TITLE|DESCRIPTION|FOLDER|ALTER|DROP|ADD|DATASET|'
- r'DATATYPE|INT|BIGINT|DOUBLE|DATE|VARCHAR|DECIMAL|'
- r'SYNCHRONIZE|TYPE|DEFAULT|ORDER|ASC|DESC|HYPERLINK|'
- r'INCLUDE|TEMPLATE|MODIFY)\b', Keyword),
- # FUNCNAME
- (r'[a-z]\w*\b', Name.Function),
- # Comments
- (r'#.*', Comment.Single),
- # Punctuation
- (r'[,;\(\)]', Token.Punctuation),
- # Space is not significant
- (r'\s+', Text)
- ],
- 'string-literal': [
- (r'\\[tnrfbae"\\]', String.Escape),
- (r'"', Literal.String, '#pop'),
- (r'[^\\"]+', Literal.String)
- ],
- }
-
-
-class GoodDataCLLexer(RegexLexer):
- """
- Lexer for `GoodData-CL <http://github.com/gooddata/GoodData-CL/raw/master/cli/src/main/resources/com/gooddata/processor/COMMANDS.txt>`_
- script files.
-
- .. versionadded:: 1.4
- """
-
- name = 'GoodData-CL'
- aliases = ['gooddata-cl']
- filenames = ['*.gdc']
- mimetypes = ['text/x-gooddata-cl']
-
- flags = re.IGNORECASE
- tokens = {
- 'root': [
- # Comments
- (r'#.*', Comment.Single),
- # Function call
- (r'[a-z]\w*', Name.Function),
- # Argument list
- (r'\(', Token.Punctuation, 'args-list'),
- # Punctuation
- (r';', Token.Punctuation),
- # Space is not significant
- (r'\s+', Text)
- ],
- 'args-list': [
- (r'\)', Token.Punctuation, '#pop'),
- (r',', Token.Punctuation),
- (r'[a-z]\w*', Name.Variable),
- (r'=', Operator),
- (r'"', Literal.String, 'string-literal'),
- (r'[0-9]+(?:\.[0-9]+)?(?:[eE][+-]?[0-9]{1,3})?', Literal.Number),
- # Space is not significant
- (r'\s', Text)
- ],
- 'string-literal': [
- (r'\\[tnrfbae"\\]', String.Escape),
- (r'"', Literal.String, '#pop'),
- (r'[^\\"]+', Literal.String)
- ]
- }
-
-
-class ProtoBufLexer(RegexLexer):
- """
- Lexer for `Protocol Buffer <http://code.google.com/p/protobuf/>`_
- definition files.
-
- .. versionadded:: 1.4
- """
-
- name = 'Protocol Buffer'
- aliases = ['protobuf', 'proto']
- filenames = ['*.proto']
-
- tokens = {
- 'root': [
- (r'[ \t]+', Text),
- (r'[,;{}\[\]\(\)]', Punctuation),
- (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
- (r'\b(import|option|optional|required|repeated|default|packed|'
- r'ctype|extensions|to|max|rpc|returns)\b', Keyword),
- (r'(int32|int64|uint32|uint64|sint32|sint64|'
- r'fixed32|fixed64|sfixed32|sfixed64|'
- r'float|double|bool|string|bytes)\b', Keyword.Type),
- (r'(true|false)\b', Keyword.Constant),
- (r'(package)(\s+)', bygroups(Keyword.Namespace, Text), 'package'),
- (r'(message|extend)(\s+)',
- bygroups(Keyword.Declaration, Text), 'message'),
- (r'(enum|group|service)(\s+)',
- bygroups(Keyword.Declaration, Text), 'type'),
- (r'\".*\"', String),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'(\-?(inf|nan))', Number.Float),
- (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'0[0-7]+[LlUu]*', Number.Oct),
- (r'\d+[LlUu]*', Number.Integer),
- (r'[+-=]', Operator),
- (r'([a-zA-Z_][\w\.]*)([ \t]*)(=)',
- bygroups(Name.Attribute, Text, Operator)),
- ('[a-zA-Z_][\w\.]*', Name),
- ],
- 'package': [
- (r'[a-zA-Z_]\w*', Name.Namespace, '#pop')
- ],
- 'message': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'type': [
- (r'[a-zA-Z_]\w*', Name, '#pop')
- ],
- }
-
-
-class HybrisLexer(RegexLexer):
- """
- For `Hybris <http://www.hybris-lang.org>`_ source code.
-
- .. versionadded:: 1.4
- """
-
- name = 'Hybris'
- aliases = ['hybris', 'hy']
- filenames = ['*.hy', '*.hyb']
- mimetypes = ['text/x-hybris', 'application/x-hybris']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- # method names
- (r'^(\s*(?:function|method|operator\s+)+?)'
- r'([a-zA-Z_]\w*)'
- r'(\s*)(\()', bygroups(Keyword, Name.Function, Text, Operator)),
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'@[a-zA-Z_][\w\.]*', Name.Decorator),
- (r'(break|case|catch|next|default|do|else|finally|for|foreach|of|'
- r'unless|if|new|return|switch|me|throw|try|while)\b', Keyword),
- (r'(extends|private|protected|public|static|throws|function|method|'
- r'operator)\b', Keyword.Declaration),
- (r'(true|false|null|__FILE__|__LINE__|__VERSION__|__LIB_PATH__|'
- r'__INC_PATH__)\b', Keyword.Constant),
- (r'(class|struct)(\s+)',
- bygroups(Keyword.Declaration, Text), 'class'),
- (r'(import|include)(\s+)',
- bygroups(Keyword.Namespace, Text), 'import'),
- (r'(gc_collect|gc_mm_items|gc_mm_usage|gc_collect_threshold|'
- r'urlencode|urldecode|base64encode|base64decode|sha1|crc32|sha2|'
- r'md5|md5_file|acos|asin|atan|atan2|ceil|cos|cosh|exp|fabs|floor|'
- r'fmod|log|log10|pow|sin|sinh|sqrt|tan|tanh|isint|isfloat|ischar|'
- r'isstring|isarray|ismap|isalias|typeof|sizeof|toint|tostring|'
- r'fromxml|toxml|binary|pack|load|eval|var_names|var_values|'
- r'user_functions|dyn_functions|methods|call|call_method|mknod|'
- r'mkfifo|mount|umount2|umount|ticks|usleep|sleep|time|strtime|'
- r'strdate|dllopen|dlllink|dllcall|dllcall_argv|dllclose|env|exec|'
- r'fork|getpid|wait|popen|pclose|exit|kill|pthread_create|'
- r'pthread_create_argv|pthread_exit|pthread_join|pthread_kill|'
- r'smtp_send|http_get|http_post|http_download|socket|bind|listen|'
- r'accept|getsockname|getpeername|settimeout|connect|server|recv|'
- r'send|close|print|println|printf|input|readline|serial_open|'
- r'serial_fcntl|serial_get_attr|serial_get_ispeed|serial_get_ospeed|'
- r'serial_set_attr|serial_set_ispeed|serial_set_ospeed|serial_write|'
- r'serial_read|serial_close|xml_load|xml_parse|fopen|fseek|ftell|'
- r'fsize|fread|fwrite|fgets|fclose|file|readdir|pcre_replace|size|'
- r'pop|unmap|has|keys|values|length|find|substr|replace|split|trim|'
- r'remove|contains|join)\b', Name.Builtin),
- (r'(MethodReference|Runner|Dll|Thread|Pipe|Process|Runnable|'
- r'CGI|ClientSocket|Socket|ServerSocket|File|Console|Directory|'
- r'Exception)\b', Keyword.Type),
- (r'"(\\\\|\\"|[^"])*"', String),
- (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
- (r'(\.)([a-zA-Z_]\w*)',
- bygroups(Operator, Name.Attribute)),
- (r'[a-zA-Z_]\w*:', Name.Label),
- (r'[a-zA-Z_\$]\w*', Name),
- (r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?\-@]+', Operator),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-f]+', Number.Hex),
- (r'[0-9]+L?', Number.Integer),
- (r'\n', Text),
- ],
- 'class': [
- (r'[a-zA-Z_]\w*', Name.Class, '#pop')
- ],
- 'import': [
- (r'[\w.]+\*?', Name.Namespace, '#pop')
- ],
- }
-
-
-class AwkLexer(RegexLexer):
- """
- For Awk scripts.
-
- .. versionadded:: 1.5
- """
-
- name = 'Awk'
- aliases = ['awk', 'gawk', 'mawk', 'nawk']
- filenames = ['*.awk']
- mimetypes = ['application/x-awk']
-
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'#.*$', Comment.Single)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'\B', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- default('#pop')
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root': [
- (r'^(?=\s|/)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'\+\+|--|\|\||&&|in\b|\$|!?~|'
- r'(\*\*|[-<>+*%\^/!=])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'(break|continue|do|while|exit|for|if|else|'
- r'return)\b', Keyword, 'slashstartsregex'),
- (r'function\b', Keyword.Declaration, 'slashstartsregex'),
- (r'(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|'
- r'length|match|split|sprintf|sub|substr|tolower|toupper|close|'
- r'fflush|getline|next|nextfile|print|printf|strftime|systime|'
- r'delete|system)\b', Keyword.Reserved),
- (r'(ARGC|ARGIND|ARGV|CONVFMT|ENVIRON|ERRNO|FIELDWIDTHS|FILENAME|FNR|FS|'
- r'IGNORECASE|NF|NR|OFMT|OFS|ORFS|RLENGTH|RS|RSTART|RT|'
- r'SUBSEP)\b', Name.Builtin),
- (r'[$a-zA-Z_]\w*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- ]
- }
-
-
-class Cfengine3Lexer(RegexLexer):
- """
- Lexer for `CFEngine3 <http://cfengine.org>`_ policy files.
-
- .. versionadded:: 1.5
- """
-
- name = 'CFEngine3'
- aliases = ['cfengine3', 'cf3']
- filenames = ['*.cf']
- mimetypes = []
-
- tokens = {
- 'root': [
- (r'#.*?\n', Comment),
- (r'(body)(\s+)(\S+)(\s+)(control)',
- bygroups(Keyword, Text, Keyword, Text, Keyword)),
- (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)(\()',
- bygroups(Keyword, Text, Keyword, Text, Name.Function, Punctuation),
- 'arglist'),
- (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)',
- bygroups(Keyword, Text, Keyword, Text, Name.Function)),
- (r'(")([^"]+)(")(\s+)(string|slist|int|real)(\s*)(=>)(\s*)',
- bygroups(Punctuation,Name.Variable,Punctuation,
- Text,Keyword.Type,Text,Operator,Text)),
- (r'(\S+)(\s*)(=>)(\s*)',
- bygroups(Keyword.Reserved,Text,Operator,Text)),
- (r'"', String, 'string'),
- (r'(\w+)(\()', bygroups(Name.Function, Punctuation)),
- (r'([\w.!&|\(\)]+)(::)', bygroups(Name.Class, Punctuation)),
- (r'(\w+)(:)', bygroups(Keyword.Declaration,Punctuation)),
- (r'@[\{\(][^\)\}]+[\}\)]', Name.Variable),
- (r'[(){},;]', Punctuation),
- (r'=>', Operator),
- (r'->', Operator),
- (r'\d+\.\d+', Number.Float),
- (r'\d+', Number.Integer),
- (r'\w+', Name.Function),
- (r'\s+', Text),
- ],
- 'string': [
- (r'\$[\{\(]', String.Interpol, 'interpol'),
- (r'\\.', String.Escape),
- (r'"', String, '#pop'),
- (r'\n', String),
- (r'.', String),
- ],
- 'interpol': [
- (r'\$[\{\(]', String.Interpol, '#push'),
- (r'[\}\)]', String.Interpol, '#pop'),
- (r'[^\$\{\(\)\}]+', String.Interpol),
- ],
- 'arglist': [
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation),
- (r'\w+', Name.Variable),
- (r'\s+', Text),
- ],
- }
-
-
-class SnobolLexer(RegexLexer):
- """
- Lexer for the SNOBOL4 programming language.
-
- Recognizes the common ASCII equivalents of the original SNOBOL4 operators.
- Does not require spaces around binary operators.
-
- .. versionadded:: 1.5
- """
-
- name = "Snobol"
- aliases = ["snobol"]
- filenames = ['*.snobol']
- mimetypes = ['text/x-snobol']
-
- tokens = {
- # root state, start of line
- # comments, continuation lines, and directives start in column 1
- # as do labels
- 'root': [
- (r'\*.*\n', Comment),
- (r'[\+\.] ', Punctuation, 'statement'),
- (r'-.*\n', Comment),
- (r'END\s*\n', Name.Label, 'heredoc'),
- (r'[A-Za-z\$][\w$]*', Name.Label, 'statement'),
- (r'\s+', Text, 'statement'),
- ],
- # statement state, line after continuation or label
- 'statement': [
- (r'\s*\n', Text, '#pop'),
- (r'\s+', Text),
- (r'(?<=[^\w.])(LT|LE|EQ|NE|GE|GT|INTEGER|IDENT|DIFFER|LGT|SIZE|'
- r'REPLACE|TRIM|DUPL|REMDR|DATE|TIME|EVAL|APPLY|OPSYN|LOAD|UNLOAD|'
- r'LEN|SPAN|BREAK|ANY|NOTANY|TAB|RTAB|REM|POS|RPOS|FAIL|FENCE|'
- r'ABORT|ARB|ARBNO|BAL|SUCCEED|INPUT|OUTPUT|TERMINAL)(?=[^\w.])',
- Name.Builtin),
- (r'[A-Za-z][\w\.]*', Name),
- # ASCII equivalents of original operators
- # | for the EBCDIC equivalent, ! likewise
- # \ for EBCDIC negation
- (r'\*\*|[\?\$\.!%\*/#+\-@\|&\\=]', Operator),
- (r'"[^"]*"', String),
- (r"'[^']*'", String),
- # Accept SPITBOL syntax for real numbers
- # as well as Macro SNOBOL4
- (r'[0-9]+(?=[^\.EeDd])', Number.Integer),
- (r'[0-9]+(\.[0-9]*)?([EDed][-+]?[0-9]+)?', Number.Float),
- # Goto
- (r':', Punctuation, 'goto'),
- (r'[\(\)<>,;]', Punctuation),
- ],
- # Goto block
- 'goto': [
- (r'\s*\n', Text, "#pop:2"),
- (r'\s+', Text),
- (r'F|S', Keyword),
- (r'(\()([A-Za-z][\w.]*)(\))',
- bygroups(Punctuation, Name.Label, Punctuation))
- ],
- # everything after the END statement is basically one
- # big heredoc.
- 'heredoc': [
- (r'.*\n', String.Heredoc)
- ]
- }
-
-
-class UrbiscriptLexer(ExtendedRegexLexer):
- """
- For UrbiScript source code.
-
- .. versionadded:: 1.5
- """
-
- name = 'UrbiScript'
- aliases = ['urbiscript']
- filenames = ['*.u']
- mimetypes = ['application/x-urbiscript']
-
- flags = re.DOTALL
-
- ## TODO
- # - handle Experimental and deprecated tags with specific tokens
- # - handle Angles and Durations with specific tokens
-
- def blob_callback(lexer, match, ctx):
- text_before_blob = match.group(1)
- blob_start = match.group(2)
- blob_size_str = match.group(3)
- blob_size = int(blob_size_str)
- yield match.start(), String, text_before_blob
- ctx.pos += len(text_before_blob)
-
- # if blob size doesn't match blob format (example : "\B(2)(aaa)")
- # yield blob as a string
- if ctx.text[match.end() + blob_size] != ")":
- result = "\\B(" + blob_size_str + ")("
- yield match.start(), String, result
- ctx.pos += len(result)
- return
-
- # if blob is well formated, yield as Escape
- blob_text = blob_start + ctx.text[match.end():match.end()+blob_size] + ")"
- yield match.start(), String.Escape, blob_text
- ctx.pos = match.end() + blob_size + 1 # +1 is the ending ")"
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- # comments
- (r'//.*?\n', Comment),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'(?:every|for|loop|while)(?:;|&|\||,)',Keyword),
- (r'(?:assert|at|break|case|catch|closure|compl|continue|'
- r'default|else|enum|every|external|finally|for|freezeif|if|new|'
- r'onleave|return|stopif|switch|this|throw|timeout|try|'
- r'waituntil|whenever|while)\b', Keyword),
- (r'(?:asm|auto|bool|char|const_cast|delete|double|dynamic_cast|'
- r'explicit|export|extern|float|friend|goto|inline|int|'
- r'long|mutable|namespace|register|reinterpret_cast|short|'
- r'signed|sizeof|static_cast|struct|template|typedef|typeid|'
- r'typename|union|unsigned|using|virtual|volatile|'
- r'wchar_t)\b', Keyword.Reserved),
- # deprecated keywords, use a meaningfull token when available
- (r'(?:emit|foreach|internal|loopn|static)\b', Keyword),
- # ignored keywords, use a meaningfull token when available
- (r'(?:private|protected|public)\b', Keyword),
- (r'(?:var|do|const|function|class)\b', Keyword.Declaration),
- (r'(?:true|false|nil|void)\b', Keyword.Constant),
- (r'(?:Barrier|Binary|Boolean|CallMessage|Channel|Code|'
- r'Comparable|Container|Control|Date|Dictionary|Directory|'
- r'Duration|Enumeration|Event|Exception|Executable|File|Finalizable|'
- r'Float|FormatInfo|Formatter|Global|Group|Hash|InputStream|'
- r'IoService|Job|Kernel|Lazy|List|Loadable|Lobby|Location|Logger|Math|'
- r'Mutex|nil|Object|Orderable|OutputStream|Pair|Path|Pattern|Position|'
- r'Primitive|Process|Profile|PseudoLazy|PubSub|RangeIterable|Regexp|'
- r'Semaphore|Server|Singleton|Socket|StackFrame|Stream|String|System|'
- r'Tag|Timeout|Traceable|TrajectoryGenerator|Triplet|Tuple'
- r'|UObject|UValue|UVar)\b', Name.Builtin),
- (r'(?:this)\b', Name.Builtin.Pseudo),
- # don't match single | and &
- (r'(?:[-=+*%/<>~^:]+|\.&?|\|\||&&)', Operator),
- (r'(?:and_eq|and|bitand|bitor|in|not|not_eq|or_eq|or|xor_eq|xor)\b',
- Operator.Word),
- (r'[{}\[\]()]+', Punctuation),
- (r'(?:;|\||,|&|\?|!)+', Punctuation),
- (r'[$a-zA-Z_]\w*', Name.Other),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- # Float, Integer, Angle and Duration
- (r'(?:[0-9]+(?:(?:\.[0-9]+)?(?:[eE][+-]?[0-9]+)?)?'
- r'((?:rad|deg|grad)|(?:ms|s|min|h|d))?)\b', Number.Float),
- # handle binary blob in strings
- (r'"', String.Double, "string.double"),
- (r"'", String.Single, "string.single"),
- ],
- 'string.double': [
- (r'((?:\\\\|\\"|[^"])*?)(\\B\((\d+)\)\()', blob_callback),
- (r'(\\\\|\\"|[^"])*?"', String.Double, '#pop'),
- ],
- 'string.single': [
- (r"((?:\\\\|\\'|[^'])*?)(\\B\((\d+)\)\()", blob_callback),
- (r"(\\\\|\\'|[^'])*?'", String.Single, '#pop'),
- ],
- # from http://pygments.org/docs/lexerdevelopment/#changing-states
- 'comment': [
- (r'[^*/]', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline),
- ]
- }
-
-
-class OpenEdgeLexer(RegexLexer):
- """
- Lexer for `OpenEdge ABL (formerly Progress)
- <http://web.progress.com/en/openedge/abl.html>`_ source code.
-
- .. versionadded:: 1.5
- """
- name = 'OpenEdge ABL'
- aliases = ['openedge', 'abl', 'progress']
- filenames = ['*.p', '*.cls']
- mimetypes = ['text/x-openedge', 'application/x-openedge']
-
- types = (r'(?i)(^|(?<=[^0-9a-z_\-]))(CHARACTER|CHAR|CHARA|CHARAC|CHARACT|CHARACTE|'
- r'COM-HANDLE|DATE|DATETIME|DATETIME-TZ|'
- r'DECIMAL|DEC|DECI|DECIM|DECIMA|HANDLE|'
- r'INT64|INTEGER|INT|INTE|INTEG|INTEGE|'
- r'LOGICAL|LONGCHAR|MEMPTR|RAW|RECID|ROWID)\s*($|(?=[^0-9a-z_\-]))')
-
- keywords = (r'(?i)(^|(?<=[^0-9a-z_\-]))(' +
- r'|'.join(OPENEDGEKEYWORDS) +
- r')\s*($|(?=[^0-9a-z_\-]))')
- tokens = {
- 'root': [
- (r'/\*', Comment.Multiline, 'comment'),
- (r'\{', Comment.Preproc, 'preprocessor'),
- (r'\s*&.*', Comment.Preproc),
- (r'0[xX][0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'(?i)(DEFINE|DEF|DEFI|DEFIN)\b', Keyword.Declaration),
- (types, Keyword.Type),
- (keywords, Name.Builtin),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'[0-9]+', Number.Integer),
- (r'\s+', Text),
- (r'[+*/=-]', Operator),
- (r'[.:()]', Punctuation),
- (r'.', Name.Variable), # Lazy catch-all
- ],
- 'comment': [
- (r'[^*/]', Comment.Multiline),
- (r'/\*', Comment.Multiline, '#push'),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'[*/]', Comment.Multiline)
- ],
- 'preprocessor': [
- (r'[^{}]', Comment.Preproc),
- (r'{', Comment.Preproc, '#push'),
- (r'}', Comment.Preproc, '#pop'),
- ],
- }
-
-
-class BroLexer(RegexLexer):
- """
- For `Bro <http://bro-ids.org/>`_ scripts.
-
- .. versionadded:: 1.5
- """
- name = 'Bro'
- aliases = ['bro']
- filenames = ['*.bro']
-
- _hex = r'[0-9a-fA-F_]+'
- _float = r'((\d*\.?\d+)|(\d+\.?\d*))([eE][-+]?\d+)?'
- _h = r'[A-Za-z0-9][-A-Za-z0-9]*'
-
- tokens = {
- 'root': [
- # Whitespace
- (r'^@.*?\n', Comment.Preproc),
- (r'#.*?\n', Comment.Single),
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text),
- # Keywords
- (r'(add|alarm|break|case|const|continue|delete|do|else|enum|event'
- r'|export|for|function|if|global|hook|local|module|next'
- r'|of|print|redef|return|schedule|switch|type|when|while)\b', Keyword),
- (r'(addr|any|bool|count|counter|double|file|int|interval|net'
- r'|pattern|port|record|set|string|subnet|table|time|timer'
- r'|vector)\b', Keyword.Type),
- (r'(T|F)\b', Keyword.Constant),
- (r'(&)((?:add|delete|expire)_func|attr|(?:create|read|write)_expire'
- r'|default|disable_print_hook|raw_output|encrypt|group|log'
- r'|mergeable|optional|persistent|priority|redef'
- r'|rotate_(?:interval|size)|synchronized)\b', bygroups(Punctuation,
- Keyword)),
- (r'\s+module\b', Keyword.Namespace),
- # Addresses, ports and networks
- (r'\d+/(tcp|udp|icmp|unknown)\b', Number),
- (r'(\d+\.){3}\d+', Number),
- (r'(' + _hex + r'){7}' + _hex, Number),
- (r'0x' + _hex + r'(' + _hex + r'|:)*::(' + _hex + r'|:)*', Number),
- (r'((\d+|:)(' + _hex + r'|:)*)?::(' + _hex + r'|:)*', Number),
- (r'(\d+\.\d+\.|(\d+\.){2}\d+)', Number),
- # Hostnames
- (_h + r'(\.' + _h + r')+', String),
- # Numeric
- (_float + r'\s+(day|hr|min|sec|msec|usec)s?\b', Literal.Date),
- (r'0[xX]' + _hex, Number.Hex),
- (_float, Number.Float),
- (r'\d+', Number.Integer),
- (r'/', String.Regex, 'regex'),
- (r'"', String, 'string'),
- # Operators
- (r'[!%*/+:<=>?~|-]', Operator),
- (r'([-+=&|]{2}|[+=!><-]=)', Operator),
- (r'(in|match)\b', Operator.Word),
- (r'[{}()\[\]$.,;]', Punctuation),
- # Identfier
- (r'([_a-zA-Z]\w*)(::)', bygroups(Name, Name.Namespace)),
- (r'[a-zA-Z_][a-zA-Z_0-9]*', Name)
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String),
- (r'\\\n', String),
- (r'\\', String)
- ],
- 'regex': [
- (r'/', String.Regex, '#pop'),
- (r'\\[\\nt/]', String.Regex), # String.Escape is too intense here.
- (r'[^\\/\n]+', String.Regex),
- (r'\\\n', String.Regex),
- (r'\\', String.Regex)
- ]
- }
-
-
-class CbmBasicV2Lexer(RegexLexer):
- """
- For CBM BASIC V2 sources.
-
- .. versionadded:: 1.6
- """
- name = 'CBM BASIC V2'
- aliases = ['cbmbas']
- filenames = ['*.bas']
-
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'rem.*\n', Comment.Single),
- (r'\s+', Text),
- (r'new|run|end|for|to|next|step|go(to|sub)?|on|return|stop|cont'
- r'|if|then|input#?|read|wait|load|save|verify|poke|sys|print#?'
- r'|list|clr|cmd|open|close|get#?', Keyword.Reserved),
- (r'data|restore|dim|let|def|fn', Keyword.Declaration),
- (r'tab|spc|sgn|int|abs|usr|fre|pos|sqr|rnd|log|exp|cos|sin|tan|atn'
- r'|peek|len|val|asc|(str|chr|left|right|mid)\$', Name.Builtin),
- (r'[-+*/^<>=]', Operator),
- (r'not|and|or', Operator.Word),
- (r'"[^"\n]*.', String),
- (r'\d+|[-+]?\d*\.\d*(e[-+]?\d+)?', Number.Float),
- (r'[\(\),:;]', Punctuation),
- (r'\w+[$%]?', Name),
- ]
- }
-
- def analyse_text(self, text):
- # if it starts with a line number, it shouldn't be a "modern" Basic
- # like VB.net
- if re.match(r'\d+', text):
- return True
-
-
-class MscgenLexer(RegexLexer):
- """
- For `Mscgen <http://www.mcternan.me.uk/mscgen/>`_ files.
-
- .. versionadded:: 1.6
- """
- name = 'Mscgen'
- aliases = ['mscgen', 'msc']
- filenames = ['*.msc']
-
- _var = r'(\w+|"(?:\\"|[^"])*")'
-
- tokens = {
- 'root': [
- (r'msc\b', Keyword.Type),
- # Options
- (r'(hscale|HSCALE|width|WIDTH|wordwraparcs|WORDWRAPARCS'
- r'|arcgradient|ARCGRADIENT)\b', Name.Property),
- # Operators
- (r'(abox|ABOX|rbox|RBOX|box|BOX|note|NOTE)\b', Operator.Word),
- (r'(\.|-|\|){3}', Keyword),
- (r'(?:-|=|\.|:){2}'
- r'|<<=>>|<->|<=>|<<>>|<:>'
- r'|->|=>>|>>|=>|:>|-x|-X'
- r'|<-|<<=|<<|<=|<:|x-|X-|=', Operator),
- # Names
- (r'\*', Name.Builtin),
- (_var, Name.Variable),
- # Other
- (r'\[', Punctuation, 'attrs'),
- (r'\{|\}|,|;', Punctuation),
- include('comments')
- ],
- 'attrs': [
- (r'\]', Punctuation, '#pop'),
- (_var + r'(\s*)(=)(\s*)' + _var,
- bygroups(Name.Attribute, Text.Whitespace, Operator, Text.Whitespace,
- String)),
- (r',', Punctuation),
- include('comments')
- ],
- 'comments': [
- (r'(?://|#).*?\n', Comment.Single),
- (r'/\*(?:.|\n)*?\*/', Comment.Multiline),
- (r'[ \t\r\n]+', Text.Whitespace)
- ]
- }
-
-
-def _rx_indent(level):
- # Kconfig *always* interprets a tab as 8 spaces, so this is the default.
- # Edit this if you are in an environment where KconfigLexer gets expanded
- # input (tabs expanded to spaces) and the expansion tab width is != 8,
- # e.g. in connection with Trac (trac.ini, [mimeviewer], tab_width).
- # Value range here is 2 <= {tab_width} <= 8.
- tab_width = 8
- # Regex matching a given indentation {level}, assuming that indentation is
- # a multiple of {tab_width}. In other cases there might be problems.
- return r'(?:\t| {1,%s}\t| {%s}){%s}.*\n' % (tab_width-1, tab_width, level)
-
-
-class KconfigLexer(RegexLexer):
- """
- For Linux-style Kconfig files.
-
- .. versionadded:: 1.6
- """
-
- name = 'Kconfig'
- aliases = ['kconfig', 'menuconfig', 'linux-config', 'kernel-config']
- # Adjust this if new kconfig file names appear in your environment
- filenames = ['Kconfig', '*Config.in*', 'external.in*',
- 'standard-modules.in']
- mimetypes = ['text/x-kconfig']
- # No re.MULTILINE, indentation-aware help text needs line-by-line handling
- flags = 0
-
- def call_indent(level):
- # If indentation >= {level} is detected, enter state 'indent{level}'
- return (_rx_indent(level), String.Doc, 'indent%s' % level)
-
- def do_indent(level):
- # Print paragraphs of indentation level >= {level} as String.Doc,
- # ignoring blank lines. Then return to 'root' state.
- return [
- (_rx_indent(level), String.Doc),
- (r'\s*\n', Text),
- default('#pop:2')
- ]
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'#.*?\n', Comment.Single),
- (r'(mainmenu|config|menuconfig|choice|endchoice|comment|menu|'
- r'endmenu|visible if|if|endif|source|prompt|select|depends on|'
- r'default|range|option)\b', Keyword),
- (r'(---help---|help)[\t ]*\n', Keyword, 'help'),
- (r'(bool|tristate|string|hex|int|defconfig_list|modules|env)\b',
- Name.Builtin),
- (r'[!=&|]', Operator),
- (r'[()]', Punctuation),
- (r'[0-9]+', Number.Integer),
- (r"'(''|[^'])*'", String.Single),
- (r'"(""|[^"])*"', String.Double),
- (r'\S+', Text),
- ],
- # Help text is indented, multi-line and ends when a lower indentation
- # level is detected.
- 'help': [
- # Skip blank lines after help token, if any
- (r'\s*\n', Text),
- # Determine the first help line's indentation level heuristically(!).
- # Attention: this is not perfect, but works for 99% of "normal"
- # indentation schemes up to a max. indentation level of 7.
- call_indent(7),
- call_indent(6),
- call_indent(5),
- call_indent(4),
- call_indent(3),
- call_indent(2),
- call_indent(1),
- ('', Text, '#pop'), # for incomplete help sections without text
- ],
- # Handle text for indentation levels 7 to 1
- 'indent7': do_indent(7),
- 'indent6': do_indent(6),
- 'indent5': do_indent(5),
- 'indent4': do_indent(4),
- 'indent3': do_indent(3),
- 'indent2': do_indent(2),
- 'indent1': do_indent(1),
- }
-
-
-class VGLLexer(RegexLexer):
- """
- For `SampleManager VGL <http://www.thermoscientific.com/samplemanager>`_
- source code.
-
- .. versionadded:: 1.6
- """
- name = 'VGL'
- aliases = ['vgl']
- filenames = ['*.rpf']
-
- flags = re.MULTILINE | re.DOTALL | re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'\{[^\}]*\}', Comment.Multiline),
- (r'declare', Keyword.Constant),
- (r'(if|then|else|endif|while|do|endwhile|and|or|prompt|object'
- r'|create|on|line|with|global|routine|value|endroutine|constant'
- r'|global|set|join|library|compile_option|file|exists|create|copy'
- r'|delete|enable|windows|name|notprotected)(?! *[=<>.,()])',
- Keyword),
- (r'(true|false|null|empty|error|locked)', Keyword.Constant),
- (r'[~\^\*\#!%&\[\]\(\)<>\|+=:;,./?-]', Operator),
- (r'"[^"]*"', String),
- (r'(\.)([a-z_\$][\w\$]*)', bygroups(Operator, Name.Attribute)),
- (r'[0-9][0-9]*(\.[0-9]+(e[+\-]?[0-9]+)?)?', Number),
- (r'[a-z_\$][\w\$]*', Name),
- (r'[\r\n]+', Text),
- (r'\s+', Text)
- ]
- }
-
-
-class SourcePawnLexer(RegexLexer):
- """
- For SourcePawn source code with preprocessor directives.
-
- .. versionadded:: 1.6
- """
- name = 'SourcePawn'
- aliases = ['sp']
- filenames = ['*.sp']
- mimetypes = ['text/x-sourcepawn']
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+'
-
- tokens = {
- 'root': [
- # preprocessor directives: without whitespace
- ('^#if\s+0', Comment.Preproc, 'if0'),
- ('^#', Comment.Preproc, 'macro'),
- # or with whitespace
- ('^' + _ws + r'#if\s+0', Comment.Preproc, 'if0'),
- ('^' + _ws + '#', Comment.Preproc, 'macro'),
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
- (r'[{}]', Punctuation),
- (r'L?"', String, 'string'),
- (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'0[0-7]+[LlUu]*', Number.Oct),
- (r'\d+[LlUu]*', Number.Integer),
- (r'\*/', Error),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'[()\[\],.;]', Punctuation),
- (r'(case|const|continue|native|'
- r'default|else|enum|for|if|new|operator|'
- r'public|return|sizeof|static|decl|struct|switch)\b', Keyword),
- (r'(bool|Float)\b', Keyword.Type),
- (r'(true|false)\b', Keyword.Constant),
- ('[a-zA-Z_]\w*', Name),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'macro': [
- (r'[^/\n]+', Comment.Preproc),
- (r'/\*(.|\n)*?\*/', Comment.Multiline),
- (r'//.*?\n', Comment.Single, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
- (r'.*?\n', Comment),
- ]
- }
-
- SM_TYPES = set(['Action', 'bool', 'Float', 'Plugin', 'String', 'any',
- 'AdminFlag', 'OverrideType', 'OverrideRule', 'ImmunityType',
- 'GroupId', 'AdminId', 'AdmAccessMode', 'AdminCachePart',
- 'CookieAccess', 'CookieMenu', 'CookieMenuAction', 'NetFlow',
- 'ConVarBounds', 'QueryCookie', 'ReplySource',
- 'ConVarQueryResult', 'ConVarQueryFinished', 'Function',
- 'Action', 'Identity', 'PluginStatus', 'PluginInfo', 'DBResult',
- 'DBBindType', 'DBPriority', 'PropType', 'PropFieldType',
- 'MoveType', 'RenderMode', 'RenderFx', 'EventHookMode',
- 'EventHook', 'FileType', 'FileTimeMode', 'PathType',
- 'ParamType', 'ExecType', 'DialogType', 'Handle', 'KvDataTypes',
- 'NominateResult', 'MapChange', 'MenuStyle', 'MenuAction',
- 'MenuSource', 'RegexError', 'SDKCallType', 'SDKLibrary',
- 'SDKFuncConfSource', 'SDKType', 'SDKPassMethod', 'RayType',
- 'TraceEntityFilter', 'ListenOverride', 'SortOrder', 'SortType',
- 'SortFunc2D', 'APLRes', 'FeatureType', 'FeatureStatus',
- 'SMCResult', 'SMCError', 'TFClassType', 'TFTeam', 'TFCond',
- 'TFResourceType', 'Timer', 'TopMenuAction', 'TopMenuObjectType',
- 'TopMenuPosition', 'TopMenuObject', 'UserMsg'])
-
- def __init__(self, **options):
- self.smhighlighting = get_bool_opt(options,
- 'sourcemod', True)
-
- self._functions = set()
- if self.smhighlighting:
- from pygments.lexers._sourcemodbuiltins import FUNCTIONS
- self._functions.update(FUNCTIONS)
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name:
- if self.smhighlighting:
- if value in self.SM_TYPES:
- token = Keyword.Type
- elif value in self._functions:
- token = Name.Builtin
- yield index, token, value
-
-
-class PuppetLexer(RegexLexer):
- """
- For `Puppet <http://puppetlabs.com/>`__ configuration DSL.
-
- .. versionadded:: 1.6
- """
- name = 'Puppet'
- aliases = ['puppet']
- filenames = ['*.pp']
-
- tokens = {
- 'root': [
- include('comments'),
- include('keywords'),
- include('names'),
- include('numbers'),
- include('operators'),
- include('strings'),
-
- (r'[]{}:(),;[]', Punctuation),
- (r'[^\S\n]+', Text),
- ],
-
- 'comments': [
- (r'\s*#.*$', Comment),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- ],
-
- 'operators': [
- (r'(=>|\?|<|>|=|\+|-|/|\*|~|!|\|)', Operator),
- (r'(in|and|or|not)\b', Operator.Word),
- ],
-
- 'names': [
- ('[a-zA-Z_]\w*', Name.Attribute),
- (r'(\$\S+)(\[)(\S+)(\])', bygroups(Name.Variable, Punctuation,
- String, Punctuation)),
- (r'\$\S+', Name.Variable),
- ],
-
- 'numbers': [
- # Copypasta from the Python lexer
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+j?', Number.Float),
- (r'0[0-7]+j?', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+j?', Number.Integer)
- ],
-
- 'keywords': [
- # Left out 'group' and 'require'
- # Since they're often used as attributes
- (r'(?i)(absent|alert|alias|audit|augeas|before|case|check|class|'
- r'computer|configured|contained|create_resources|crit|cron|debug|'
- r'default|define|defined|directory|else|elsif|emerg|err|exec|'
- r'extlookup|fail|false|file|filebucket|fqdn_rand|generate|host|if|'
- r'import|include|info|inherits|inline_template|installed|'
- r'interface|k5login|latest|link|loglevel|macauthorization|'
- r'mailalias|maillist|mcx|md5|mount|mounted|nagios_command|'
- r'nagios_contact|nagios_contactgroup|nagios_host|'
- r'nagios_hostdependency|nagios_hostescalation|nagios_hostextinfo|'
- r'nagios_hostgroup|nagios_service|nagios_servicedependency|'
- r'nagios_serviceescalation|nagios_serviceextinfo|'
- r'nagios_servicegroup|nagios_timeperiod|node|noop|notice|notify|'
- r'package|present|purged|realize|regsubst|resources|role|router|'
- r'running|schedule|scheduled_task|search|selboolean|selmodule|'
- r'service|sha1|shellquote|split|sprintf|ssh_authorized_key|sshkey|'
- r'stage|stopped|subscribe|tag|tagged|template|tidy|true|undef|'
- r'unmounted|user|versioncmp|vlan|warning|yumrepo|zfs|zone|'
- r'zpool)\b', Keyword),
- ],
-
- 'strings': [
- (r'"([^"])*"', String),
- (r"'(\\'|[^'])*'", String),
- ],
-
- }
-
-
-class NSISLexer(RegexLexer):
- """
- For `NSIS <http://nsis.sourceforge.net/>`_ scripts.
-
- .. versionadded:: 1.6
- """
- name = 'NSIS'
- aliases = ['nsis', 'nsi', 'nsh']
- filenames = ['*.nsi', '*.nsh']
- mimetypes = ['text/x-nsis']
-
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'[;\#].*\n', Comment),
- (r"'.*?'", String.Single),
- (r'"', String.Double, 'str_double'),
- (r'`', String.Backtick, 'str_backtick'),
- include('macro'),
- include('interpol'),
- include('basic'),
- (r'\$\{[a-z_|][\w|]*\}', Keyword.Pseudo),
- (r'/[a-z_]\w*', Name.Attribute),
- ('.', Text),
- ],
- 'basic': [
- (r'(\n)(Function)(\s+)([._a-z][.\w]*)\b',
- bygroups(Text, Keyword, Text, Name.Function)),
- (r'\b([_a-z]\w*)(::)([a-z][a-z0-9]*)\b',
- bygroups(Keyword.Namespace, Punctuation, Name.Function)),
- (r'\b([_a-z]\w*)(:)', bygroups(Name.Label, Punctuation)),
- (r'(\b[ULS]|\B)([\!\<\>=]?=|\<\>?|\>)\B', Operator),
- (r'[|+-]', Operator),
- (r'\\', Punctuation),
- (r'\b(Abort|Add(?:BrandingImage|Size)|'
- r'Allow(?:RootDirInstall|SkipFiles)|AutoCloseWindow|'
- r'BG(?:Font|Gradient)|BrandingText|BringToFront|Call(?:InstDLL)?|'
- r'(?:Sub)?Caption|ChangeUI|CheckBitmap|ClearErrors|CompletedText|'
- r'ComponentText|CopyFiles|CRCCheck|'
- r'Create(?:Directory|Font|Shortcut)|Delete(?:INI(?:Sec|Str)|'
- r'Reg(?:Key|Value))?|DetailPrint|DetailsButtonText|'
- r'Dir(?:Show|Text|Var|Verify)|(?:Disabled|Enabled)Bitmap|'
- r'EnableWindow|EnumReg(?:Key|Value)|Exch|Exec(?:Shell|Wait)?|'
- r'ExpandEnvStrings|File(?:BufSize|Close|ErrorText|Open|'
- r'Read(?:Byte)?|Seek|Write(?:Byte)?)?|'
- r'Find(?:Close|First|Next|Window)|FlushINI|Function(?:End)?|'
- r'Get(?:CurInstType|CurrentAddress|DlgItem|DLLVersion(?:Local)?|'
- r'ErrorLevel|FileTime(?:Local)?|FullPathName|FunctionAddress|'
- r'InstDirError|LabelAddress|TempFileName)|'
- r'Goto|HideWindow|Icon|'
- r'If(?:Abort|Errors|FileExists|RebootFlag|Silent)|'
- r'InitPluginsDir|Install(?:ButtonText|Colors|Dir(?:RegKey)?)|'
- r'Inst(?:ProgressFlags|Type(?:[GS]etText)?)|Int(?:CmpU?|Fmt|Op)|'
- r'IsWindow|LangString(?:UP)?|'
- r'License(?:BkColor|Data|ForceSelection|LangString|Text)|'
- r'LoadLanguageFile|LockWindow|Log(?:Set|Text)|MessageBox|'
- r'MiscButtonText|Name|Nop|OutFile|(?:Uninst)?Page(?:Ex(?:End)?)?|'
- r'PluginDir|Pop|Push|Quit|Read(?:(?:Env|INI|Reg)Str|RegDWORD)|'
- r'Reboot|(?:Un)?RegDLL|Rename|RequestExecutionLevel|ReserveFile|'
- r'Return|RMDir|SearchPath|Section(?:Divider|End|'
- r'(?:(?:Get|Set)(?:Flags|InstTypes|Size|Text))|Group(?:End)?|In)?|'
- r'SendMessage|Set(?:AutoClose|BrandingImage|Compress(?:ionLevel|'
- r'or(?:DictSize)?)?|CtlColors|CurInstType|DatablockOptimize|'
- r'DateSave|Details(?:Print|View)|Error(?:s|Level)|FileAttributes|'
- r'Font|OutPath|Overwrite|PluginUnload|RebootFlag|ShellVarContext|'
- r'Silent|StaticBkColor)|'
- r'Show(?:(?:I|Uni)nstDetails|Window)|Silent(?:Un)?Install|Sleep|'
- r'SpaceTexts|Str(?:CmpS?|Cpy|Len)|SubSection(?:End)?|'
- r'Uninstall(?:ButtonText|(?:Sub)?Caption|EXEName|Icon|Text)|'
- r'UninstPage|Var|VI(?:AddVersionKey|ProductVersion)|WindowIcon|'
- r'Write(?:INIStr|Reg(:?Bin|DWORD|(?:Expand)?Str)|Uninstaller)|'
- r'XPStyle)\b', Keyword),
- (r'\b(CUR|END|(?:FILE_ATTRIBUTE_)?'
- r'(?:ARCHIVE|HIDDEN|NORMAL|OFFLINE|READONLY|SYSTEM|TEMPORARY)|'
- r'HK(CC|CR|CU|DD|LM|PD|U)|'
- r'HKEY_(?:CLASSES_ROOT|CURRENT_(?:CONFIG|USER)|DYN_DATA|'
- r'LOCAL_MACHINE|PERFORMANCE_DATA|USERS)|'
- r'ID(?:ABORT|CANCEL|IGNORE|NO|OK|RETRY|YES)|'
- r'MB_(?:ABORTRETRYIGNORE|DEFBUTTON[1-4]|'
- r'ICON(?:EXCLAMATION|INFORMATION|QUESTION|STOP)|'
- r'OK(?:CANCEL)?|RETRYCANCEL|RIGHT|SETFOREGROUND|TOPMOST|USERICON|'
- r'YESNO(?:CANCEL)?)|SET|SHCTX|'
- r'SW_(?:HIDE|SHOW(?:MAXIMIZED|MINIMIZED|NORMAL))|'
- r'admin|all|auto|both|bottom|bzip2|checkbox|colored|current|false|'
- r'force|hide|highest|if(?:diff|newer)|lastused|leave|left|'
- r'listonly|lzma|nevershow|none|normal|off|on|pop|push|'
- r'radiobuttons|right|show|silent|silentlog|smooth|textonly|top|'
- r'true|try|user|zlib)\b', Name.Constant),
- ],
- 'macro': [
- (r'\!(addincludedir(?:dir)?|addplugindir|appendfile|cd|define|'
- r'delfilefile|echo(?:message)?|else|endif|error|execute|'
- r'if(?:macro)?n?(?:def)?|include|insertmacro|macro(?:end)?|packhdr|'
- r'search(?:parse|replace)|system|tempfilesymbol|undef|verbose|'
- r'warning)\b', Comment.Preproc),
- ],
- 'interpol': [
- (r'\$(R?[0-9])', Name.Builtin.Pseudo), # registers
- (r'\$(ADMINTOOLS|APPDATA|CDBURN_AREA|COOKIES|COMMONFILES(?:32|64)|'
- r'DESKTOP|DOCUMENTS|EXE(?:DIR|FILE|PATH)|FAVORITES|FONTS|HISTORY|'
- r'HWNDPARENT|INTERNET_CACHE|LOCALAPPDATA|MUSIC|NETHOOD|PICTURES|'
- r'PLUGINSDIR|PRINTHOOD|PROFILE|PROGRAMFILES(?:32|64)|QUICKLAUNCH|'
- r'RECENT|RESOURCES(?:_LOCALIZED)?|SENDTO|SM(?:PROGRAMS|STARTUP)|'
- r'STARTMENU|SYSDIR|TEMP(?:LATES)?|VIDEOS|WINDIR|\{NSISDIR\})',
- Name.Builtin),
- (r'\$(CMDLINE|INSTDIR|OUTDIR|LANGUAGE)', Name.Variable.Global),
- (r'\$[a-z_]\w*', Name.Variable),
- ],
- 'str_double': [
- (r'"', String, '#pop'),
- (r'\$(\\[nrt"]|\$)', String.Escape),
- include('interpol'),
- (r'.', String.Double),
- ],
- 'str_backtick': [
- (r'`', String, '#pop'),
- (r'\$(\\[nrt"]|\$)', String.Escape),
- include('interpol'),
- (r'.', String.Double),
- ],
- }
-
-
-class RPMSpecLexer(RegexLexer):
- """
- For RPM ``.spec`` files.
-
- .. versionadded:: 1.6
- """
-
- name = 'RPMSpec'
- aliases = ['spec']
- filenames = ['*.spec']
- mimetypes = ['text/x-rpm-spec']
-
- _directives = ('(?:package|prep|build|install|clean|check|pre[a-z]*|'
- 'post[a-z]*|trigger[a-z]*|files)')
-
- tokens = {
- 'root': [
- (r'#.*\n', Comment),
- include('basic'),
- ],
- 'description': [
- (r'^(%' + _directives + ')(.*)$',
- bygroups(Name.Decorator, Text), '#pop'),
- (r'\n', Text),
- (r'.', Text),
- ],
- 'changelog': [
- (r'\*.*\n', Generic.Subheading),
- (r'^(%' + _directives + ')(.*)$',
- bygroups(Name.Decorator, Text), '#pop'),
- (r'\n', Text),
- (r'.', Text),
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- include('interpol'),
- (r'.', String.Double),
- ],
- 'basic': [
- include('macro'),
- (r'(?i)^(Name|Version|Release|Epoch|Summary|Group|License|Packager|'
- r'Vendor|Icon|URL|Distribution|Prefix|Patch[0-9]*|Source[0-9]*|'
- r'Requires\(?[a-z]*\)?|[a-z]+Req|Obsoletes|Suggests|Provides|Conflicts|'
- r'Build[a-z]+|[a-z]+Arch|Auto[a-z]+)(:)(.*)$',
- bygroups(Generic.Heading, Punctuation, using(this))),
- (r'^%description', Name.Decorator, 'description'),
- (r'^%changelog', Name.Decorator, 'changelog'),
- (r'^(%' + _directives + ')(.*)$', bygroups(Name.Decorator, Text)),
- (r'%(attr|defattr|dir|doc(?:dir)?|setup|config(?:ure)?|'
- r'make(?:install)|ghost|patch[0-9]+|find_lang|exclude|verify)',
- Keyword),
- include('interpol'),
- (r"'.*?'", String.Single),
- (r'"', String.Double, 'string'),
- (r'.', Text),
- ],
- 'macro': [
- (r'%define.*\n', Comment.Preproc),
- (r'%\{\!\?.*%define.*\}', Comment.Preproc),
- (r'(%(?:if(?:n?arch)?|else(?:if)?|endif))(.*)$',
- bygroups(Comment.Preproc, Text)),
- ],
- 'interpol': [
- (r'%\{?__[a-z_]+\}?', Name.Function),
- (r'%\{?_([a-z_]+dir|[a-z_]+path|prefix)\}?', Keyword.Pseudo),
- (r'%\{\?\w+\}', Name.Variable),
- (r'\$\{?RPM_[A-Z0-9_]+\}?', Name.Variable.Global),
- (r'%\{[a-zA-Z]\w+\}', Keyword.Constant),
- ]
- }
-
-
-class AutoItLexer(RegexLexer):
- """
- For `AutoIt <http://www.autoitscript.com/site/autoit/>`_ files.
-
- AutoIt is a freeware BASIC-like scripting language
- designed for automating the Windows GUI and general scripting
-
- .. versionadded:: 1.6
- """
- name = 'AutoIt'
- aliases = ['autoit']
- filenames = ['*.au3']
- mimetypes = ['text/x-autoit']
-
- # Keywords, functions, macros from au3.keywords.properties
- # which can be found in AutoIt installed directory, e.g.
- # c:\Program Files (x86)\AutoIt3\SciTE\au3.keywords.properties
-
- keywords = """\
- #include-once #include #endregion #forcedef #forceref #region
- and byref case continueloop dim do else elseif endfunc endif
- endselect exit exitloop for func global
- if local next not or return select step
- then to until wend while exit""".split()
-
- functions = """\
- abs acos adlibregister adlibunregister asc ascw asin assign atan
- autoitsetoption autoitwingettitle autoitwinsettitle beep binary binarylen
- binarymid binarytostring bitand bitnot bitor bitrotate bitshift bitxor
- blockinput break call cdtray ceiling chr chrw clipget clipput consoleread
- consolewrite consolewriteerror controlclick controlcommand controldisable
- controlenable controlfocus controlgetfocus controlgethandle controlgetpos
- controlgettext controlhide controllistview controlmove controlsend
- controlsettext controlshow controltreeview cos dec dircopy dircreate
- dirgetsize dirmove dirremove dllcall dllcalladdress dllcallbackfree
- dllcallbackgetptr dllcallbackregister dllclose dllopen dllstructcreate
- dllstructgetdata dllstructgetptr dllstructgetsize dllstructsetdata
- drivegetdrive drivegetfilesystem drivegetlabel drivegetserial drivegettype
- drivemapadd drivemapdel drivemapget drivesetlabel drivespacefree
- drivespacetotal drivestatus envget envset envupdate eval execute exp
- filechangedir fileclose filecopy filecreatentfslink filecreateshortcut
- filedelete fileexists filefindfirstfile filefindnextfile fileflush
- filegetattrib filegetencoding filegetlongname filegetpos filegetshortcut
- filegetshortname filegetsize filegettime filegetversion fileinstall filemove
- fileopen fileopendialog fileread filereadline filerecycle filerecycleempty
- filesavedialog fileselectfolder filesetattrib filesetpos filesettime
- filewrite filewriteline floor ftpsetproxy guicreate guictrlcreateavi
- guictrlcreatebutton guictrlcreatecheckbox guictrlcreatecombo
- guictrlcreatecontextmenu guictrlcreatedate guictrlcreatedummy
- guictrlcreateedit guictrlcreategraphic guictrlcreategroup guictrlcreateicon
- guictrlcreateinput guictrlcreatelabel guictrlcreatelist
- guictrlcreatelistview guictrlcreatelistviewitem guictrlcreatemenu
- guictrlcreatemenuitem guictrlcreatemonthcal guictrlcreateobj
- guictrlcreatepic guictrlcreateprogress guictrlcreateradio
- guictrlcreateslider guictrlcreatetab guictrlcreatetabitem
- guictrlcreatetreeview guictrlcreatetreeviewitem guictrlcreateupdown
- guictrldelete guictrlgethandle guictrlgetstate guictrlread guictrlrecvmsg
- guictrlregisterlistviewsort guictrlsendmsg guictrlsendtodummy
- guictrlsetbkcolor guictrlsetcolor guictrlsetcursor guictrlsetdata
- guictrlsetdefbkcolor guictrlsetdefcolor guictrlsetfont guictrlsetgraphic
- guictrlsetimage guictrlsetlimit guictrlsetonevent guictrlsetpos
- guictrlsetresizing guictrlsetstate guictrlsetstyle guictrlsettip guidelete
- guigetcursorinfo guigetmsg guigetstyle guiregistermsg guisetaccelerators
- guisetbkcolor guisetcoord guisetcursor guisetfont guisethelp guiseticon
- guisetonevent guisetstate guisetstyle guistartgroup guiswitch hex hotkeyset
- httpsetproxy httpsetuseragent hwnd inetclose inetget inetgetinfo inetgetsize
- inetread inidelete iniread inireadsection inireadsectionnames
- inirenamesection iniwrite iniwritesection inputbox int isadmin isarray
- isbinary isbool isdeclared isdllstruct isfloat ishwnd isint iskeyword
- isnumber isobj isptr isstring log memgetstats mod mouseclick mouseclickdrag
- mousedown mousegetcursor mousegetpos mousemove mouseup mousewheel msgbox
- number objcreate objcreateinterface objevent objevent objget objname
- onautoitexitregister onautoitexitunregister opt ping pixelchecksum
- pixelgetcolor pixelsearch pluginclose pluginopen processclose processexists
- processgetstats processlist processsetpriority processwait processwaitclose
- progressoff progresson progressset ptr random regdelete regenumkey
- regenumval regread regwrite round run runas runaswait runwait send
- sendkeepactive seterror setextended shellexecute shellexecutewait shutdown
- sin sleep soundplay soundsetwavevolume splashimageon splashoff splashtexton
- sqrt srandom statusbargettext stderrread stdinwrite stdioclose stdoutread
- string stringaddcr stringcompare stringformat stringfromasciiarray
- stringinstr stringisalnum stringisalpha stringisascii stringisdigit
- stringisfloat stringisint stringislower stringisspace stringisupper
- stringisxdigit stringleft stringlen stringlower stringmid stringregexp
- stringregexpreplace stringreplace stringright stringsplit stringstripcr
- stringstripws stringtoasciiarray stringtobinary stringtrimleft
- stringtrimright stringupper tan tcpaccept tcpclosesocket tcpconnect
- tcplisten tcpnametoip tcprecv tcpsend tcpshutdown tcpstartup timerdiff
- timerinit tooltip traycreateitem traycreatemenu traygetmsg trayitemdelete
- trayitemgethandle trayitemgetstate trayitemgettext trayitemsetonevent
- trayitemsetstate trayitemsettext traysetclick trayseticon traysetonevent
- traysetpauseicon traysetstate traysettooltip traytip ubound udpbind
- udpclosesocket udpopen udprecv udpsend udpshutdown udpstartup vargettype
- winactivate winactive winclose winexists winflash wingetcaretpos
- wingetclasslist wingetclientsize wingethandle wingetpos wingetprocess
- wingetstate wingettext wingettitle winkill winlist winmenuselectitem
- winminimizeall winminimizeallundo winmove winsetontop winsetstate
- winsettitle winsettrans winwait winwaitactive winwaitclose
- winwaitnotactive""".split()
-
- macros = """\
- @appdatacommondir @appdatadir @autoitexe @autoitpid @autoitversion
- @autoitx64 @com_eventobj @commonfilesdir @compiled @computername @comspec
- @cpuarch @cr @crlf @desktopcommondir @desktopdepth @desktopdir
- @desktopheight @desktoprefresh @desktopwidth @documentscommondir @error
- @exitcode @exitmethod @extended @favoritescommondir @favoritesdir
- @gui_ctrlhandle @gui_ctrlid @gui_dragfile @gui_dragid @gui_dropid
- @gui_winhandle @homedrive @homepath @homeshare @hotkeypressed @hour
- @ipaddress1 @ipaddress2 @ipaddress3 @ipaddress4 @kblayout @lf
- @logondnsdomain @logondomain @logonserver @mday @min @mon @msec @muilang
- @mydocumentsdir @numparams @osarch @osbuild @oslang @osservicepack @ostype
- @osversion @programfilesdir @programscommondir @programsdir @scriptdir
- @scriptfullpath @scriptlinenumber @scriptname @sec @startmenucommondir
- @startmenudir @startupcommondir @startupdir @sw_disable @sw_enable @sw_hide
- @sw_lock @sw_maximize @sw_minimize @sw_restore @sw_show @sw_showdefault
- @sw_showmaximized @sw_showminimized @sw_showminnoactive @sw_showna
- @sw_shownoactivate @sw_shownormal @sw_unlock @systemdir @tab @tempdir
- @tray_id @trayiconflashing @trayiconvisible @username @userprofiledir @wday
- @windowsdir @workingdir @yday @year""".split()
-
- tokens = {
- 'root': [
- (r';.*\n', Comment.Single),
- (r'(#comments-start|#cs).*?(#comments-end|#ce)', Comment.Multiline),
- (r'[\[\]{}(),;]', Punctuation),
- (r'(and|or|not)\b', Operator.Word),
- (r'[\$|@][a-zA-Z_]\w*', Name.Variable),
- (r'!=|==|:=|\.=|<<|>>|[-~+/*%=<>&^|?:!.]', Operator),
- include('commands'),
- include('labels'),
- include('builtInFunctions'),
- include('builtInMarcros'),
- (r'"', String, combined('stringescape', 'dqs')),
- include('numbers'),
- (r'[a-zA-Z_#@$][\w#@$]*', Name),
- (r'\\|\'', Text),
- (r'\`([\,\%\`abfnrtv\-\+;])', String.Escape),
- (r'_\n', Text), # Line continuation
- include('garbage'),
- ],
- 'commands': [
- (r'(?i)(\s*)(%s)\b' % '|'.join(keywords),
- bygroups(Text, Name.Builtin)),
- ],
- 'builtInFunctions': [
- (r'(?i)(%s)\b' % '|'.join(functions),
- Name.Function),
- ],
- 'builtInMarcros': [
- (r'(?i)(%s)\b' % '|'.join(macros),
- Name.Variable.Global),
- ],
- 'labels': [
- # sendkeys
- (r'(^\s*)({\S+?})', bygroups(Text, Name.Label)),
- ],
- 'numbers': [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+', Number.Float),
- (r'0\d+', Number.Oct),
- (r'0[xX][a-fA-F0-9]+', Number.Hex),
- (r'\d+L', Number.Integer.Long),
- (r'\d+', Number.Integer)
- ],
- 'stringescape': [
- (r'\"\"|\`([\,\%\`abfnrtv])', String.Escape),
- ],
- 'strings': [
- (r'[^"\n]+', String),
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- include('strings')
- ],
- 'garbage': [
- (r'[^\S\n]', Text),
- ],
- }
-
-
-class RexxLexer(RegexLexer):
- """
- `Rexx <http://www.rexxinfo.org/>`_ is a scripting language available for
- a wide range of different platforms with its roots found on mainframe
- systems. It is popular for I/O- and data based tasks and can act as glue
- language to bind different applications together.
-
- .. versionadded:: 2.0
- """
- name = 'Rexx'
- aliases = ['rexx', 'arexx']
- filenames = ['*.rexx', '*.rex', '*.rx', '*.arexx']
- mimetypes = ['text/x-rexx']
- flags = re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'\s', Whitespace),
- (r'/\*', Comment.Multiline, 'comment'),
- (r'"', String, 'string_double'),
- (r"'", String, 'string_single'),
- (r'[0-9]+(\.[0-9]+)?(e[+-]?[0-9])?', Number),
- (r'([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b',
- bygroups(Name.Function, Whitespace, Operator, Whitespace,
- Keyword.Declaration)),
- (r'([a-z_]\w*)(\s*)(:)',
- bygroups(Name.Label, Whitespace, Operator)),
- include('function'),
- include('keyword'),
- include('operator'),
- (r'[a-z_]\w*', Text),
- ],
- 'function': [
- (r'(abbrev|abs|address|arg|b2x|bitand|bitor|bitxor|c2d|c2x|'
- r'center|charin|charout|chars|compare|condition|copies|d2c|'
- r'd2x|datatype|date|delstr|delword|digits|errortext|form|'
- r'format|fuzz|insert|lastpos|left|length|linein|lineout|lines|'
- r'max|min|overlay|pos|queued|random|reverse|right|sign|'
- r'sourceline|space|stream|strip|substr|subword|symbol|time|'
- r'trace|translate|trunc|value|verify|word|wordindex|'
- r'wordlength|wordpos|words|x2b|x2c|x2d|xrange)(\s*)(\()',
- bygroups(Name.Builtin, Whitespace, Operator)),
- ],
- 'keyword': [
- (r'(address|arg|by|call|do|drop|else|end|exit|for|forever|if|'
- r'interpret|iterate|leave|nop|numeric|off|on|options|parse|'
- r'pull|push|queue|return|say|select|signal|to|then|trace|until|'
- r'while)\b', Keyword.Reserved),
- ],
- 'operator': [
- (r'(-|//|/|\(|\)|\*\*|\*|\\<<|\\<|\\==|\\=|\\>>|\\>|\\|\|\||\||'
- r'&&|&|%|\+|<<=|<<|<=|<>|<|==|=|><|>=|>>=|>>|>|¬<<|¬<|¬==|¬=|'
- r'¬>>|¬>|¬|\.|,)', Operator),
- ],
- 'string_double': [
- (r'[^"\n]+', String),
- (r'""', String),
- (r'"', String, '#pop'),
- (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
- ],
- 'string_single': [
- (r'[^\'\n]', String),
- (r'\'\'', String),
- (r'\'', String, '#pop'),
- (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
- ],
- 'comment': [
- (r'[^*]+', Comment.Multiline),
- (r'\*/', Comment.Multiline, '#pop'),
- (r'\*', Comment.Multiline),
- ]
- }
-
- _c = lambda s: re.compile(s, re.MULTILINE)
- _ADDRESS_COMMAND_PATTERN = _c(r'^\s*address\s+command\b')
- _ADDRESS_PATTERN = _c(r'^\s*address\s+')
- _DO_WHILE_PATTERN = _c(r'^\s*do\s+while\b')
- _IF_THEN_DO_PATTERN = _c(r'^\s*if\b.+\bthen\s+do\s*$')
- _PROCEDURE_PATTERN = _c(r'^\s*([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b')
- _ELSE_DO_PATTERN = _c(r'\belse\s+do\s*$')
- _PARSE_ARG_PATTERN = _c(r'^\s*parse\s+(upper\s+)?(arg|value)\b')
- PATTERNS_AND_WEIGHTS = (
- (_ADDRESS_COMMAND_PATTERN, 0.2),
- (_ADDRESS_PATTERN, 0.05),
- (_DO_WHILE_PATTERN, 0.1),
- (_ELSE_DO_PATTERN, 0.1),
- (_IF_THEN_DO_PATTERN, 0.1),
- (_PROCEDURE_PATTERN, 0.5),
- (_PARSE_ARG_PATTERN, 0.2),
- )
-
- def analyse_text(text):
- """
- Check for inital comment and patterns that distinguish Rexx from other
- C-like languages.
- """
- if re.search(r'/\*\**\s*rexx', text, re.IGNORECASE):
- # Header matches MVS Rexx requirements, this is certainly a Rexx
- # script.
- return 1.0
- elif text.startswith('/*'):
- # Header matches general Rexx requirements; the source code might
- # still be any language using C comments such as C++, C# or Java.
- lowerText = text.lower()
- result = sum(weight
- for (pattern, weight) in RexxLexer.PATTERNS_AND_WEIGHTS
- if pattern.search(lowerText)) + 0.01
- return min(result, 1.0)
-
-
-class APLLexer(RegexLexer):
- """
- A simple APL lexer.
-
- .. versionadded:: 2.0
- """
- name = 'APL'
- aliases = ['apl']
- filenames = ['*.apl']
-
- tokens = {
- 'root': [
- # Whitespace
- # ==========
- (r'\s+', Text),
- #
- # Comment
- # =======
- # '⍝' is traditional; '#' is supported by GNU APL and NGN (but not Dyalog)
- (u'[⍝#].*$', Comment.Single),
- #
- # Strings
- # =======
- (r'\'((\'\')|[^\'])*\'', String.Single),
- (r'"(("")|[^"])*"', String.Double), # supported by NGN APL
- #
- # Punctuation
- # ===========
- # This token type is used for diamond and parenthesis
- # but not for bracket and ; (see below)
- (u'[⋄◇()]', Punctuation),
- #
- # Array indexing
- # ==============
- # Since this token type is very important in APL, it is not included in
- # the punctuation token type but rather in the following one
- (r'[\[\];]', String.Regex),
- #
- # Distinguished names
- # ===================
- # following IBM APL2 standard
- (u'⎕[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*', Name.Function),
- #
- # Labels
- # ======
- # following IBM APL2 standard
- # (u'[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*:', Name.Label),
- #
- # Variables
- # =========
- # following IBM APL2 standard
- (u'[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*', Name.Variable),
- #
- # Numbers
- # =======
- (u'¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞)'
- u'([Jj]¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞))?',
- Number),
- #
- # Operators
- # ==========
- (u'[\.\\\/⌿⍀¨⍣⍨⍠⍤∘]', Name.Attribute), # closest token type
- (u'[+\-×÷⌈⌊∣|⍳?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗]',
- Operator),
- #
- # Constant
- # ========
- (u'⍬', Name.Constant),
- #
- # Quad symbol
- # ===========
- (u'[⎕⍞]', Name.Variable.Global),
- #
- # Arrows left/right
- # =================
- (u'[←→]', Keyword.Declaration),
- #
- # D-Fn
- # ====
- (u'[⍺⍵⍶⍹∇:]', Name.Builtin.Pseudo),
- (r'[{}]', Keyword.Type),
- ],
- }
-
-class AmbientTalkLexer(RegexLexer):
- """
- Lexer for `AmbientTalk <https://code.google.com/p/ambienttalk>`_ source code.
-
- .. versionadded:: 2.0
- """
- name = 'AmbientTalk'
- filenames = ['*.at']
- aliases = ['at', 'ambienttalk', 'ambienttalk/2']
- mimetypes = ['text/x-ambienttalk']
-
- flags = re.MULTILINE | re.DOTALL
-
- builtin = ['if:', 'then:', 'else:', 'when:', 'whenever:', 'discovered:',
- 'disconnected:', 'reconnected:', 'takenOffline:', 'becomes:',
- 'export:', 'as:', 'object:', 'actor:', 'mirror:', 'taggedAs:',
- 'mirroredBy:', 'is:']
- tokens = {
- 'root' : [
- (r'\s+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'(def|deftype|import|alias|exclude)\b', Keyword),
- (r"(%s)" % "|".join(builtin), Name.Builtin),
- (r'(true|false|nil)\b', Keyword.Constant),
- (r'(~|lobby|jlobby|/)\.', Keyword.Constant, 'namespace'),
- (r'"(\\\\|\\"|[^"])*"', String),
- (r'\|', Punctuation, 'arglist'),
- (r'<:|[\^\*!%&<>+=,./?-]|:=', Operator),
- (r"`[a-zA-Z_]\w*", String.Symbol),
- (r"[a-zA-Z_]\w*:", Name.Function),
- (r"[\{\}()\[\];`]", Punctuation),
- (r'(self|super)\b', Name.Variable.Instance),
- (r"[a-zA-Z_]\w*", Name.Variable),
- (r"@[a-zA-Z_]\w*", Name.Class),
- (r"@\[", Name.Class, 'annotations'),
- include('numbers'),
- ],
- 'numbers' : [
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+', Number.Integer)
- ],
- 'namespace': [
- (r'[a-zA-Z_]\w*\.', Name.Namespace),
- (r'[a-zA-Z_]\w*:', Name.Function , '#pop'),
- (r'[a-zA-Z_]\w*(?!\.)', Name.Function , '#pop')
- ],
- 'annotations' : [
- (r"(.*?)\]", Name.Class, '#pop')
- ],
- 'arglist' : [
- (r'\|', Punctuation, '#pop'),
- (r'\s*(,)\s*', Punctuation),
- (r'[a-zA-Z_]\w*', Name.Variable),
- ],
- }
-
-
-class PawnLexer(RegexLexer):
- """
- For Pawn source code
- """
-
- name = 'Pawn'
- aliases = ['pawn']
- filenames = ['*.p', '*.pwn', '*.inc']
- mimetypes = ['text/x-pawn']
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/[*][\w\W]*?[*]/)+'
-
- tokens = {
- 'root': [
- # preprocessor directives: without whitespace
- ('^#if\s+0', Comment.Preproc, 'if0'),
- ('^#', Comment.Preproc, 'macro'),
- # or with whitespace
- ('^' + _ws + r'#if\s+0', Comment.Preproc, 'if0'),
- ('^' + _ws + '#', Comment.Preproc, 'macro'),
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?\*[\w\W]*?\*(\\\n)?/', Comment.Multiline),
- (r'[{}]', Punctuation),
- (r'L?"', String, 'string'),
- (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
- (r'0[0-7]+[LlUu]*', Number.Oct),
- (r'\d+[LlUu]*', Number.Integer),
- (r'\*/', Error),
- (r'[~!%^&*+=|?:<>/-]', Operator),
- (r'[()\[\],.;]', Punctuation),
- (r'(switch|case|default|const|new|static|char|continue|break|'
- r'if|else|for|while|do|operator|enum|'
- r'public|return|sizeof|tagof|state|goto)\b', Keyword),
- (r'(bool|Float)\b', Keyword.Type),
- (r'(true|false)\b', Keyword.Constant),
- ('[a-zA-Z_]\w*', Name),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'macro': [
- (r'[^/\n]+', Comment.Preproc),
- (r'/\*(.|\n)*?\*/', Comment.Multiline),
- (r'//.*?\n', Comment.Single, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
- (r'.*?\n', Comment),
- ]
- }
-
-
-class VCTreeStatusLexer(RegexLexer):
- """
- For colorizing output of version control status commans, like "hg
- status" or "svn status".
-
- .. versionadded:: 2.0
- """
- name = 'VCTreeStatus'
- aliases = ['vctreestatus']
- filenames = []
- mimetypes = []
-
- tokens = {
- 'root' : [
- (r'^A \+ C\s+', Generic.Error),
- (r'^A\s+\+?\s+', String),
- (r'^M\s+', Generic.Inserted),
- (r'^C\s+', Generic.Error),
- (r'^D\s+', Generic.Deleted),
- (r'^[\?!]\s+', Comment.Preproc),
- (r' >\s+.*\n', Comment.Preproc),
- (r'.*\n', Text)
- ]
- }
-
-
-class RslLexer(RegexLexer):
- """
- `RSL <http://en.wikipedia.org/wiki/RAISE>`_ is the formal specification
- language used in RAISE (Rigorous Approach to Industrial Software Engineering)
- method.
-
- .. versionadded:: 2.0
- """
- name = 'RSL'
- aliases = ['rsl']
- filenames = ['*.rsl']
- mimetypes = ['text/rsl']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root':[
- (r'\b(Bool|Char|Int|Nat|Real|Text|Unit|abs|all|always|any|as|'
- r'axiom|card|case|channel|chaos|class|devt_relation|dom|elems|'
- r'else|elif|end|exists|extend|false|for|hd|hide|if|in|is|inds|'
- r'initialise|int|inter|isin|len|let|local|ltl_assertion|object|'
- r'of|out|post|pre|read|real|rng|scheme|skip|stop|swap|then|'
- r'thoery|test_case|tl|transition_system|true|type|union|until|'
- r'use|value|variable|while|with|write|~isin|-inflist|-infset|'
- r'-list|-set)\b', Keyword),
- (r'(variable|value)\b', Keyword.Declaration),
- (r'--.*?\n', Comment),
- (r'<:.*?:>', Comment),
- (r'\{!.*?!\}', Comment),
- (r'/\*.*?\*/', Comment),
- (r'^[ \t]*([\w]+)[ \t]*:[^:]', Name.Function),
- (r'(^[ \t]*)([\w]+)([ \t]*\([\w\s,]*\)[ \t]*)(is|as)',
- bygroups(Text, Name.Function, Text, Keyword)),
- (r'\b[A-Z]\w*\b',Keyword.Type),
- (r'(true|false)\b', Keyword.Constant),
- (r'".*"',String),
- (r'\'.\'',String.Char),
- (r'(><|->|-m->|/\\|<=|<<=|<\.|\|\||\|\^\||-~->|-~m->|\\/|>=|>>|'
- r'\.>|\+\+|-\\|<->|=>|:-|~=|\*\*|<<|>>=|\+>|!!|\|=\||#)',
- Operator),
- (r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-f]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'.', Text),
- ],
- }
-
- def analyse_text(text):
- """
- Check for the most common text in the beginning of a RSL file.
- """
- if re.search(r'scheme\s*.*?=\s*class\s*type', text, re.I) is not None:
- return 1.0
- else:
- return 0.01
-
-
-class PanLexer(RegexLexer):
- """
- Lexer for `pan <http://github.com/quattor/pan/>`_ source files.
-
- Based on tcsh lexer.
-
- .. versionadded:: 2.0
- """
-
- name = 'Pan'
- aliases = ['pan']
- filenames = ['*.pan']
-
- tokens = {
- 'root': [
- include('basic'),
- (r'\(', Keyword, 'paren'),
- (r'{', Keyword, 'curly'),
- include('data'),
- ],
- 'basic': [
- (r'\b(if|for|with|else|type|bind|while|valid|final|prefix|unique|'
- r'object|foreach|include|template|function|variable|structure|'
- r'extensible|declaration)\s*\b',
- Keyword),
- (r'\b(file_contents|format|index|length|match|matches|replace|'
- r'splice|split|substr|to_lowercase|to_uppercase|debug|error|'
- r'traceback|deprecated|base64_decode|base64_encode|digest|escape|'
- r'unescape|append|create|first|nlist|key|length|list|merge|next|'
- r'prepend|splice|is_boolean|is_defined|is_double|is_list|is_long|'
- r'is_nlist|is_null|is_number|is_property|is_resource|is_string|'
- r'to_boolean|to_double|to_long|to_string|clone|delete|exists|'
- r'path_exists|if_exists|return|value)\s*\b',
- Name.Builtin),
- (r'#.*', Comment),
- (r'\\[\w\W]', String.Escape),
- (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
- (r'[\[\]{}()=]+', Operator),
- (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
- (r';', Punctuation),
- ],
- 'data': [
- (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
- (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
- (r'\s+', Text),
- (r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
- (r'\d+(?= |\Z)', Number),
- ],
- 'curly': [
- (r'}', Keyword, '#pop'),
- (r':-', Keyword),
- (r'\w+', Name.Variable),
- (r'[^}:"\'`$]+', Punctuation),
- (r':', Punctuation),
- include('root'),
- ],
- 'paren': [
- (r'\)', Keyword, '#pop'),
- include('root'),
- ],
- }
-
-class RedLexer(RegexLexer):
- """
- A `Red-language <http://www.red-lang.org/>`_ lexer.
-
- .. versionadded:: 2.0
- """
- name = 'Red'
- aliases = ['red', 'red/system']
- filenames = ['*.red', '*.reds']
- mimetypes = ['text/x-red', 'text/x-red-system']
-
- flags = re.IGNORECASE | re.MULTILINE
-
- escape_re = r'(?:\^\([0-9a-f]{1,4}\)*)'
-
- def word_callback(lexer, match):
- word = match.group()
-
- if re.match(".*:$", word):
- yield match.start(), Generic.Subheading, word
- elif re.match(
- r'(if|unless|either|any|all|while|until|loop|repeat|'
- r'foreach|forall|func|function|does|has|switch|'
- r'case|reduce|compose|get|set|print|prin|equal\?|'
- r'not-equal\?|strict-equal\?|lesser\?|greater\?|lesser-or-equal\?|'
- r'greater-or-equal\?|same\?|not|type\?|stats|'
- r'bind|union|replace|charset|routine)$', word):
- yield match.start(), Name.Builtin, word
- elif re.match(
- r'(make|random|reflect|to|form|mold|absolute|add|divide|multiply|negate|'
- r'power|remainder|round|subtract|even\?|odd\?|and~|complement|or~|xor~|'
- r'append|at|back|change|clear|copy|find|head|head\?|index\?|insert|'
- r'length\?|next|pick|poke|remove|reverse|select|sort|skip|swap|tail|tail\?|'
- r'take|trim|create|close|delete|modify|open|open\?|query|read|rename|'
- r'update|write)$', word):
- yield match.start(), Name.Function, word
- elif re.match(
- r'(yes|on|no|off|true|false|tab|cr|lf|newline|escape|slash|sp|space|null|'
- r'none|crlf|dot|null-byte)$', word):
- yield match.start(), Name.Builtin.Pseudo, word
- elif re.match(
- r'(#system-global|#include|#enum|#define|#either|#if|#import|#export|'
- r'#switch|#default|#get-definition)$', word):
- yield match.start(), Keyword.Namespace, word
- elif re.match(
- r'(system|halt|quit|quit-return|do|load|q|recycle|call|run|ask|parse|'
- r'raise-error|return|exit|break|alias|push|pop|probe|\?\?|spec-of|body-of|'
- r'quote|forever)$', word):
- yield match.start(), Name.Exception, word
- elif re.match(
- r'(action\?|block\?|char\?|datatype\?|file\?|function\?|get-path\?|zero\?|'
- r'get-word\?|integer\?|issue\?|lit-path\?|lit-word\?|logic\?|native\?|'
- r'op\?|paren\?|path\?|refinement\?|set-path\?|set-word\?|string\?|unset\?|'
- r'any-struct\?|none\?|word\?|any-series\?)$', word):
- yield match.start(), Keyword, word
- elif re.match(r'(JNICALL|stdcall|cdecl|infix)$', word):
- yield match.start(), Keyword.Namespace, word
- elif re.match("to-.*", word):
- yield match.start(), Keyword, word
- elif re.match('(\+|-|\*|/|//|\*\*|and|or|xor|=\?|=|==|===|<>|<|>|<=|>=|<<|>>|<<<|>>>|%|-\*\*)$', word):
- yield match.start(), Operator, word
- elif re.match(".*\!$", word):
- yield match.start(), Keyword.Type, word
- elif re.match("'.*", word):
- yield match.start(), Name.Variable.Instance, word # lit-word
- elif re.match("#.*", word):
- yield match.start(), Name.Label, word # issue
- elif re.match("%.*", word):
- yield match.start(), Name.Decorator, word # file
- elif re.match(":.*", word):
- yield match.start(), Generic.Subheading, word # get-word
- else:
- yield match.start(), Name.Variable, word
-
- tokens = {
- 'root': [
- (r'[^R]+', Comment),
- (r'Red/System\s+\[', Generic.Strong, 'script'),
- (r'Red\s+\[', Generic.Strong, 'script'),
- (r'R', Comment)
- ],
- 'script': [
- (r'\s+', Text),
- (r'#"', String.Char, 'char'),
- (r'#{[0-9a-f\s]*}', Number.Hex),
- (r'2#{', Number.Hex, 'bin2'),
- (r'64#{[0-9a-z+/=\s]*}', Number.Hex),
- (r'([0-9a-f]+)(h)((\s)|(?=[\[\]{}""\(\)]))',
- bygroups(Number.Hex, Name.Variable, Whitespace)),
- (r'"', String, 'string'),
- (r'{', String, 'string2'),
- (r';#+.*\n', Comment.Special),
- (r';\*+.*\n', Comment.Preproc),
- (r';.*\n', Comment),
- (r'%"', Name.Decorator, 'stringFile'),
- (r'%[^(\^{^")\s\[\]]+', Name.Decorator),
- (r'[+-]?([a-z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money
- (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time
- (r'\d+[\-\/][0-9a-z]+[\-\/]\d+(\/\d+\:\d+((\:\d+)?'
- r'([\.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other), # date
- (r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple
- (r'\d+[xX]\d+', Keyword.Constant), # pair
- (r'[+-]?\d+(\'\d+)?([\.,]\d*)?[eE][+-]?\d+', Number.Float),
- (r'[+-]?\d+(\'\d+)?[\.,]\d*', Number.Float),
- (r'[+-]?\d+(\'\d+)?', Number),
- (r'[\[\]\(\)]', Generic.Strong),
- (r'[a-z]+[^(\^{"\s:)]*://[^(\^{"\s)]*', Name.Decorator), # url
- (r'mailto:[^(\^{"@\s)]+@[^(\^{"@\s)]+', Name.Decorator), # url
- (r'[^(\^{"@\s)]+@[^(\^{"@\s)]+', Name.Decorator), # email
- (r'comment\s"', Comment, 'commentString1'),
- (r'comment\s{', Comment, 'commentString2'),
- (r'comment\s\[', Comment, 'commentBlock'),
- (r'comment\s[^(\s{\"\[]+', Comment),
- (r'/[^(\^{^")\s/[\]]*', Name.Attribute),
- (r'([^(\^{^")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
- (r'<[\w:.-]*>', Name.Tag),
- (r'<[^(<>\s")]+', Name.Tag, 'tag'),
- (r'([^(\^{^")\s]+)', Text),
- ],
- 'string': [
- (r'[^(\^")]+', String),
- (escape_re, String.Escape),
- (r'[\(|\)]+', String),
- (r'\^.', String.Escape),
- (r'"', String, '#pop'),
- ],
- 'string2': [
- (r'[^(\^{^})]+', String),
- (escape_re, String.Escape),
- (r'[\(|\)]+', String),
- (r'\^.', String.Escape),
- (r'{', String, '#push'),
- (r'}', String, '#pop'),
- ],
- 'stringFile': [
- (r'[^(\^")]+', Name.Decorator),
- (escape_re, Name.Decorator),
- (r'\^.', Name.Decorator),
- (r'"', Name.Decorator, '#pop'),
- ],
- 'char': [
- (escape_re + '"', String.Char, '#pop'),
- (r'\^."', String.Char, '#pop'),
- (r'."', String.Char, '#pop'),
- ],
- 'tag': [
- (escape_re, Name.Tag),
- (r'"', Name.Tag, 'tagString'),
- (r'[^(<>\r\n")]+', Name.Tag),
- (r'>', Name.Tag, '#pop'),
- ],
- 'tagString': [
- (r'[^(\^")]+', Name.Tag),
- (escape_re, Name.Tag),
- (r'[\(|\)]+', Name.Tag),
- (r'\^.', Name.Tag),
- (r'"', Name.Tag, '#pop'),
- ],
- 'tuple': [
- (r'(\d+\.)+', Keyword.Constant),
- (r'\d+', Keyword.Constant, '#pop'),
- ],
- 'bin2': [
- (r'\s+', Number.Hex),
- (r'([0-1]\s*){8}', Number.Hex),
- (r'}', Number.Hex, '#pop'),
- ],
- 'commentString1': [
- (r'[^(\^")]+', Comment),
- (escape_re, Comment),
- (r'[\(|\)]+', Comment),
- (r'\^.', Comment),
- (r'"', Comment, '#pop'),
- ],
- 'commentString2': [
- (r'[^(\^{^})]+', Comment),
- (escape_re, Comment),
- (r'[\(|\)]+', Comment),
- (r'\^.', Comment),
- (r'{', Comment, '#push'),
- (r'}', Comment, '#pop'),
- ],
- 'commentBlock': [
- (r'\[', Comment, '#push'),
- (r'\]', Comment, '#pop'),
- (r'"', Comment, "commentString1"),
- (r'{', Comment, "commentString2"),
- (r'[^(\[\]\"{)]+', Comment),
- ],
- }
-
-
-class AlloyLexer(RegexLexer):
- """
- For `Alloy <http://alloy.mit.edu>`_ source code.
- """
-
- name = 'Alloy'
- aliases = ['alloy']
- filenames = ['*.als']
- mimetypes = ['text/x-alloy']
-
- flags = re.MULTILINE | re.DOTALL
-
- iden_rex = r'[a-zA-Z_][a-zA-Z0-9_\']*'
- text_tuple = (r'[^\S\n]+', Text)
-
- tokens = {
- 'sig': [
- (r'(extends)\b', Keyword, '#pop'),
- (iden_rex, Name),
- text_tuple,
- (r',', Punctuation),
- (r'\{', Operator, '#pop'),
- ],
- 'module': [
- text_tuple,
- (iden_rex, Name, '#pop'),
- ],
- 'fun': [
- text_tuple,
- (r'\{', Operator, '#pop'),
- (iden_rex, Name, '#pop'),
- ],
- 'root': [
- (r'--.*?$', Comment.Single),
- (r'//.*?$', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- text_tuple,
- (r'(module|open)(\s+)', bygroups(Keyword.Namespace, Text),
- 'module'),
- (r'(sig|enum)(\s+)', bygroups(Keyword.Declaration, Text), 'sig'),
- (r'(iden|univ|none)\b', Keyword.Constant),
- (r'(int|Int)\b', Keyword.Type),
- (r'(this|abstract|extends|set|seq|one|lone|let)\b', Keyword),
- (r'(all|some|no|sum|disj|when|else)\b', Keyword),
- (r'(run|check|for|but|exactly|expect|as)\b', Keyword),
- (r'(and|or|implies|iff|in)\b', Operator.Word),
- (r'(fun|pred|fact|assert)(\s+)', bygroups(Keyword, Text), 'fun'),
- (r'!|#|&&|\+\+|<<|>>|>=|<=>|<=|\.|->', Operator),
- (r'[-+/*%=<>&!^|~\{\}\[\]\(\)\.]', Operator),
- (iden_rex, Name),
- (r'[:,]', Punctuation),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String),
- (r'\n', Text),
- ]
- }
+ TcshLexer
+from pygments.lexers.robotframework import RobotFrameworkLexer
+from pygments.lexers.testing import GherkinLexer
+from pygments.lexers.esoteric import BrainfuckLexer, BefungeLexer, RedcodeLexer
+from pygments.lexers.prolog import LogtalkLexer
+from pygments.lexers.snobol import SnobolLexer
+from pygments.lexers.rebol import RebolLexer
+from pygments.lexers.configs import KconfigLexer, Cfengine3Lexer
+from pygments.lexers.modeling import ModelicaLexer
+from pygments.lexers.scripting import AppleScriptLexer, MOOCodeLexer, \
+ HybrisLexer
+from pygments.lexers.graphics import PostScriptLexer, GnuplotLexer, \
+ AsymptoteLexer, PovrayLexer
+from pygments.lexers.business import ABAPLexer, OpenEdgeLexer, \
+ GoodDataCLLexer, MaqlLexer
+from pygments.lexers.automation import AutoItLexer, AutohotkeyLexer
+from pygments.lexers.dsls import ProtoBufLexer, BroLexer, PuppetLexer, \
+ MscgenLexer, VGLLexer
+from pygments.lexers.basic import CbmBasicV2Lexer
+from pygments.lexers.pawn import SourcePawnLexer, PawnLexer
+from pygments.lexers.ecl import ECLLexer
+from pygments.lexers.urbi import UrbiscriptLexer
+from pygments.lexers.smalltalk import SmalltalkLexer, NewspeakLexer
+from pygments.lexers.installers import NSISLexer, RPMSpecLexer
+from pygments.lexers.textedit import AwkLexer
+
+__all__ = []
diff --git a/pygments/lexers/parsers.py b/pygments/lexers/parsers.py
index 4c23c760..ad6ad71b 100644
--- a/pygments/lexers/parsers.py
+++ b/pygments/lexers/parsers.py
@@ -14,22 +14,24 @@ import re
from pygments.lexer import RegexLexer, DelegatingLexer, \
include, bygroups, using
from pygments.token import Punctuation, Other, Text, Comment, Operator, \
- Keyword, Name, String, Number, Whitespace
-from pygments.lexers.compiled import JavaLexer, CLexer, CppLexer, \
- ObjectiveCLexer, DLexer
+ Keyword, Name, String, Number, Whitespace
+from pygments.lexers.jvm import JavaLexer
+from pygments.lexers.c_cpp import CLexer, CppLexer
+from pygments.lexers.objective import ObjectiveCLexer
+from pygments.lexers.d import DLexer
from pygments.lexers.dotnet import CSharpLexer
-from pygments.lexers.agile import RubyLexer, PythonLexer, PerlLexer
-from pygments.lexers.web import ActionScriptLexer
-
+from pygments.lexers.ruby import RubyLexer
+from pygments.lexers.python import PythonLexer
+from pygments.lexers.perl import PerlLexer
__all__ = ['RagelLexer', 'RagelEmbeddedLexer', 'RagelCLexer', 'RagelDLexer',
'RagelCppLexer', 'RagelObjectiveCLexer', 'RagelRubyLexer',
'RagelJavaLexer', 'AntlrLexer', 'AntlrPythonLexer',
'AntlrPerlLexer', 'AntlrRubyLexer', 'AntlrCppLexer',
- #'AntlrCLexer',
+ # 'AntlrCLexer',
'AntlrCSharpLexer', 'AntlrObjectiveCLexer',
- 'AntlrJavaLexer', "AntlrActionScriptLexer",
- 'TreetopLexer']
+ 'AntlrJavaLexer', 'AntlrActionScriptLexer',
+ 'TreetopLexer', 'EbnfLexer']
class RagelLexer(RegexLexer):
@@ -63,29 +65,29 @@ class RagelLexer(RegexLexer):
(r'[+-]?[0-9]+', Number.Integer),
],
'literals': [
- (r'"(\\\\|\\"|[^"])*"', String), # double quote string
- (r"'(\\\\|\\'|[^'])*'", String), # single quote string
- (r'\[(\\\\|\\\]|[^\]])*\]', String), # square bracket literals
- (r'/(?!\*)(\\\\|\\/|[^/])*/', String.Regex), # regular expressions
+ (r'"(\\\\|\\"|[^"])*"', String), # double quote string
+ (r"'(\\\\|\\'|[^'])*'", String), # single quote string
+ (r'\[(\\\\|\\\]|[^\]])*\]', String), # square bracket literals
+ (r'/(?!\*)(\\\\|\\/|[^/])*/', String.Regex), # regular expressions
],
'identifiers': [
(r'[a-zA-Z_][a-zA-Z_0-9]*', Name.Variable),
],
'operators': [
- (r',', Operator), # Join
- (r'\||&|--?', Operator), # Union, Intersection and Subtraction
- (r'\.|<:|:>>?', Operator), # Concatention
- (r':', Operator), # Label
- (r'->', Operator), # Epsilon Transition
- (r'(>|\$|%|<|@|<>)(/|eof\b)', Operator), # EOF Actions
- (r'(>|\$|%|<|@|<>)(!|err\b)', Operator), # Global Error Actions
- (r'(>|\$|%|<|@|<>)(\^|lerr\b)', Operator), # Local Error Actions
- (r'(>|\$|%|<|@|<>)(~|to\b)', Operator), # To-State Actions
- (r'(>|\$|%|<|@|<>)(\*|from\b)', Operator), # From-State Actions
- (r'>|@|\$|%', Operator), # Transition Actions and Priorities
- (r'\*|\?|\+|{[0-9]*,[0-9]*}', Operator), # Repetition
- (r'!|\^', Operator), # Negation
- (r'\(|\)', Operator), # Grouping
+ (r',', Operator), # Join
+ (r'\||&|--?', Operator), # Union, Intersection and Subtraction
+ (r'\.|<:|:>>?', Operator), # Concatention
+ (r':', Operator), # Label
+ (r'->', Operator), # Epsilon Transition
+ (r'(>|\$|%|<|@|<>)(/|eof\b)', Operator), # EOF Actions
+ (r'(>|\$|%|<|@|<>)(!|err\b)', Operator), # Global Error Actions
+ (r'(>|\$|%|<|@|<>)(\^|lerr\b)', Operator), # Local Error Actions
+ (r'(>|\$|%|<|@|<>)(~|to\b)', Operator), # To-State Actions
+ (r'(>|\$|%|<|@|<>)(\*|from\b)', Operator), # From-State Actions
+ (r'>|@|\$|%', Operator), # Transition Actions and Priorities
+ (r'\*|\?|\+|{[0-9]*,[0-9]*}', Operator), # Repetition
+ (r'!|\^', Operator), # Negation
+ (r'\(|\)', Operator), # Grouping
],
'root': [
include('literals'),
@@ -100,16 +102,16 @@ class RagelLexer(RegexLexer):
(r';', Punctuation),
],
'host': [
- (r'(' + r'|'.join(( # keep host code in largest possible chunks
- r'[^{}\'"/#]+', # exclude unsafe characters
- r'[^\\]\\[{}]', # allow escaped { or }
+ (r'(' + r'|'.join(( # keep host code in largest possible chunks
+ r'[^{}\'"/#]+', # exclude unsafe characters
+ r'[^\\]\\[{}]', # allow escaped { or }
# strings and comments may safely contain unsafe characters
- r'"(\\\\|\\"|[^"])*"', # double quote string
- r"'(\\\\|\\'|[^'])*'", # single quote string
- r'//.*$\n?', # single line comment
- r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
- r'\#.*$\n?', # ruby comment
+ r'"(\\\\|\\"|[^"])*"', # double quote string
+ r"'(\\\\|\\'|[^'])*'", # single quote string
+ r'//.*$\n?', # single line comment
+ r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
+ r'\#.*$\n?', # ruby comment
# regular expression: There's no reason for it to start
# with a * and this stops confusion with comments.
@@ -141,17 +143,17 @@ class RagelEmbeddedLexer(RegexLexer):
tokens = {
'root': [
- (r'(' + r'|'.join(( # keep host code in largest possible chunks
- r'[^%\'"/#]+', # exclude unsafe characters
- r'%(?=[^%]|$)', # a single % sign is okay, just not 2 of them
+ (r'(' + r'|'.join(( # keep host code in largest possible chunks
+ r'[^%\'"/#]+', # exclude unsafe characters
+ r'%(?=[^%]|$)', # a single % sign is okay, just not 2 of them
# strings and comments may safely contain unsafe characters
- r'"(\\\\|\\"|[^"])*"', # double quote string
- r"'(\\\\|\\'|[^'])*'", # single quote string
- r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
- r'//.*$\n?', # single line comment
- r'\#.*$\n?', # ruby/ragel comment
- r'/(?!\*)(\\\\|\\/|[^/])*/', # regular expression
+ r'"(\\\\|\\"|[^"])*"', # double quote string
+ r"'(\\\\|\\'|[^'])*'", # single quote string
+ r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
+ r'//.*$\n?', # single line comment
+ r'\#.*$\n?', # ruby/ragel comment
+ r'/(?!\*)(\\\\|\\/|[^/])*/', # regular expression
# / is safe now that we've handled regex and javadoc comments
r'/',
@@ -168,12 +170,12 @@ class RagelEmbeddedLexer(RegexLexer):
(r'(%%%%|%%){', Punctuation, 'multi-line-fsm'),
],
'multi-line-fsm': [
- (r'(' + r'|'.join(( # keep ragel code in largest possible chunks.
+ (r'(' + r'|'.join(( # keep ragel code in largest possible chunks.
r'(' + r'|'.join((
- r'[^}\'"\[/#]', # exclude unsafe characters
- r'}(?=[^%]|$)', # } is okay as long as it's not followed by %
- r'}%(?=[^%]|$)', # ...well, one %'s okay, just not two...
- r'[^\\]\\[{}]', # ...and } is okay if it's escaped
+ r'[^}\'"\[/#]', # exclude unsafe characters
+ r'}(?=[^%]|$)', # } is okay as long as it's not followed by %
+ r'}%(?=[^%]|$)', # ...well, one %'s okay, just not two...
+ r'[^\\]\\[{}]', # ...and } is okay if it's escaped
# allow / if it's preceded with one of these symbols
# (ragel EOF actions)
@@ -189,15 +191,15 @@ class RagelEmbeddedLexer(RegexLexer):
# We want to match as many of these as we can in one block.
# Not sure if we need the + sign here,
# does it help performance?
- )) + r')+',
+ )) + r')+',
# strings and comments may safely contain unsafe characters
- r'"(\\\\|\\"|[^"])*"', # double quote string
- r"'(\\\\|\\'|[^'])*'", # single quote string
- r"\[(\\\\|\\\]|[^\]])*\]", # square bracket literal
- r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
- r'//.*$\n?', # single line comment
- r'\#.*$\n?', # ruby/ragel comment
+ r'"(\\\\|\\"|[^"])*"', # double quote string
+ r"'(\\\\|\\'|[^'])*'", # single quote string
+ r"\[(\\\\|\\\]|[^\]])*\]", # square bracket literal
+ r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
+ r'//.*$\n?', # single line comment
+ r'\#.*$\n?', # ruby/ragel comment
)) + r')+', using(RagelLexer)),
(r'}%%', Punctuation, '#pop'),
@@ -221,7 +223,7 @@ class RagelRubyLexer(DelegatingLexer):
def __init__(self, **options):
super(RagelRubyLexer, self).__init__(RubyLexer, RagelEmbeddedLexer,
- **options)
+ **options)
def analyse_text(text):
return '@LANG: ruby' in text
@@ -336,9 +338,9 @@ class AntlrLexer(RegexLexer):
aliases = ['antlr']
filenames = []
- _id = r'[A-Za-z][A-Za-z_0-9]*'
- _TOKEN_REF = r'[A-Z][A-Za-z_0-9]*'
- _RULE_REF = r'[a-z][A-Za-z_0-9]*'
+ _id = r'[A-Za-z][A-Za-z_0-9]*'
+ _TOKEN_REF = r'[A-Z][A-Za-z_0-9]*'
+ _RULE_REF = r'[a-z][A-Za-z_0-9]*'
_STRING_LITERAL = r'\'(?:\\\\|\\\'|[^\']*)\''
_INT = r'[0-9]+'
@@ -372,7 +374,7 @@ class AntlrLexer(RegexLexer):
bygroups(Name.Label, Whitespace, Punctuation, Whitespace,
Name.Label, Whitespace, Punctuation), 'action'),
# rule
- (r'((?:protected|private|public|fragment)\b)?(\s*)(' + _id + ')(!)?', \
+ (r'((?:protected|private|public|fragment)\b)?(\s*)(' + _id + ')(!)?',
bygroups(Keyword, Whitespace, Name.Label, Punctuation),
('rule-alts', 'rule-prelims')),
],
@@ -395,14 +397,14 @@ class AntlrLexer(RegexLexer):
(r'(throws)(\s+)(' + _id + ')',
bygroups(Keyword, Whitespace, Name.Label)),
(r'(,)(\s*)(' + _id + ')',
- bygroups(Punctuation, Whitespace, Name.Label)), # Additional throws
+ bygroups(Punctuation, Whitespace, Name.Label)), # Additional throws
# optionsSpec
(r'options\b', Keyword, 'options'),
# ruleScopeSpec - scope followed by target language code or name of action
# TODO finish implementing other possibilities for scope
# L173 ANTLRv3.g from ANTLR book
(r'(scope)(\s+)({)', bygroups(Keyword, Whitespace, Punctuation),
- 'action'),
+ 'action'),
(r'(scope)(\s+)(' + _id + ')(\s*)(;)',
bygroups(Keyword, Whitespace, Name.Label, Whitespace, Punctuation)),
# ruleAction
@@ -450,20 +452,20 @@ class AntlrLexer(RegexLexer):
include('comments'),
(r'{', Punctuation),
(r'(' + _id + r')(\s*)(=)(\s*)(' +
- '|'.join((_id, _STRING_LITERAL, _INT, '\*'))+ ')(\s*)(;)',
+ '|'.join((_id, _STRING_LITERAL, _INT, '\*')) + ')(\s*)(;)',
bygroups(Name.Variable, Whitespace, Punctuation, Whitespace,
Text, Whitespace, Punctuation)),
(r'}', Punctuation, '#pop'),
],
'action': [
- (r'(' + r'|'.join(( # keep host code in largest possible chunks
- r'[^\${}\'"/\\]+', # exclude unsafe characters
+ (r'(' + r'|'.join(( # keep host code in largest possible chunks
+ r'[^\${}\'"/\\]+', # exclude unsafe characters
# strings and comments may safely contain unsafe characters
- r'"(\\\\|\\"|[^"])*"', # double quote string
- r"'(\\\\|\\'|[^'])*'", # single quote string
- r'//.*$\n?', # single line comment
- r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
+ r'"(\\\\|\\"|[^"])*"', # double quote string
+ r"'(\\\\|\\'|[^'])*'", # single quote string
+ r'//.*$\n?', # single line comment
+ r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
# regular expression: There's no reason for it to start
# with a * and this stops confusion with comments.
@@ -483,14 +485,14 @@ class AntlrLexer(RegexLexer):
(r'}', Punctuation, '#pop'),
],
'nested-arg-action': [
- (r'(' + r'|'.join(( # keep host code in largest possible chunks.
- r'[^\$\[\]\'"/]+', # exclude unsafe characters
+ (r'(' + r'|'.join(( # keep host code in largest possible chunks.
+ r'[^\$\[\]\'"/]+', # exclude unsafe characters
# strings and comments may safely contain unsafe characters
- r'"(\\\\|\\"|[^"])*"', # double quote string
- r"'(\\\\|\\'|[^'])*'", # single quote string
- r'//.*$\n?', # single line comment
- r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
+ r'"(\\\\|\\"|[^"])*"', # double quote string
+ r"'(\\\\|\\'|[^'])*'", # single quote string
+ r'//.*$\n?', # single line comment
+ r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment
# regular expression: There's no reason for it to start
# with a * and this stops confusion with comments.
@@ -520,7 +522,7 @@ class AntlrLexer(RegexLexer):
# so just assume they're C++. No idea how to make Objective C work in the
# future.
-#class AntlrCLexer(DelegatingLexer):
+# class AntlrCLexer(DelegatingLexer):
# """
# ANTLR with C Target
#
@@ -537,6 +539,7 @@ class AntlrLexer(RegexLexer):
# def analyse_text(text):
# return re.match(r'^\s*language\s*=\s*C\s*;', text)
+
class AntlrCppLexer(DelegatingLexer):
"""
`ANTLR`_ with CPP Target
@@ -553,7 +556,7 @@ class AntlrCppLexer(DelegatingLexer):
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*C\s*;', text, re.M)
+ re.search(r'^\s*language\s*=\s*C\s*;', text, re.M)
class AntlrObjectiveCLexer(DelegatingLexer):
@@ -573,7 +576,7 @@ class AntlrObjectiveCLexer(DelegatingLexer):
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*ObjC\s*;', text)
+ re.search(r'^\s*language\s*=\s*ObjC\s*;', text)
class AntlrCSharpLexer(DelegatingLexer):
@@ -593,7 +596,7 @@ class AntlrCSharpLexer(DelegatingLexer):
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*CSharp2\s*;', text, re.M)
+ re.search(r'^\s*language\s*=\s*CSharp2\s*;', text, re.M)
class AntlrPythonLexer(DelegatingLexer):
@@ -613,7 +616,7 @@ class AntlrPythonLexer(DelegatingLexer):
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*Python\s*;', text, re.M)
+ re.search(r'^\s*language\s*=\s*Python\s*;', text, re.M)
class AntlrJavaLexer(DelegatingLexer):
@@ -653,7 +656,7 @@ class AntlrRubyLexer(DelegatingLexer):
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*Ruby\s*;', text, re.M)
+ re.search(r'^\s*language\s*=\s*Ruby\s*;', text, re.M)
class AntlrPerlLexer(DelegatingLexer):
@@ -673,7 +676,7 @@ class AntlrPerlLexer(DelegatingLexer):
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*Perl5\s*;', text, re.M)
+ re.search(r'^\s*language\s*=\s*Perl5\s*;', text, re.M)
class AntlrActionScriptLexer(DelegatingLexer):
@@ -688,12 +691,14 @@ class AntlrActionScriptLexer(DelegatingLexer):
filenames = ['*.G', '*.g']
def __init__(self, **options):
+ from pygments.lexers.actionscript import ActionScriptLexer
super(AntlrActionScriptLexer, self).__init__(ActionScriptLexer,
AntlrLexer, **options)
def analyse_text(text):
return AntlrLexer.analyse_text(text) and \
- re.search(r'^\s*language\s*=\s*ActionScript\s*;', text, re.M)
+ re.search(r'^\s*language\s*=\s*ActionScript\s*;', text, re.M)
+
class TreetopBaseLexer(RegexLexer):
"""
@@ -763,6 +768,7 @@ class TreetopBaseLexer(RegexLexer):
],
}
+
class TreetopLexer(DelegatingLexer):
"""
A lexer for `Treetop <http://treetop.rubyforge.org/>`_ grammars.
@@ -776,3 +782,54 @@ class TreetopLexer(DelegatingLexer):
def __init__(self, **options):
super(TreetopLexer, self).__init__(RubyLexer, TreetopBaseLexer, **options)
+
+
+class EbnfLexer(RegexLexer):
+ """
+ Lexer for `ISO/IEC 14977 EBNF
+ <http://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_Form>`_
+ grammars.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'EBNF'
+ aliases = ['ebnf']
+ filenames = ['*.ebnf']
+ mimetypes = ['text/x-ebnf']
+
+ tokens = {
+ 'root': [
+ include('whitespace'),
+ include('comment_start'),
+ include('identifier'),
+ (r'=', Operator, 'production'),
+ ],
+ 'production': [
+ include('whitespace'),
+ include('comment_start'),
+ include('identifier'),
+ (r'"[^"]*"', String.Double),
+ (r"'[^']*'", String.Single),
+ (r'(\?[^?]*\?)', Name.Entity),
+ (r'[\[\]{}(),|]', Punctuation),
+ (r'-', Operator),
+ (r';', Punctuation, '#pop'),
+ (r'\.', Punctuation, '#pop'),
+ ],
+ 'whitespace': [
+ (r'\s+', Text),
+ ],
+ 'comment_start': [
+ (r'\(\*', Comment.Multiline, 'comment'),
+ ],
+ 'comment': [
+ (r'[^*)]', Comment.Multiline),
+ include('comment_start'),
+ (r'\*\)', Comment.Multiline, '#pop'),
+ (r'[*)]', Comment.Multiline),
+ ],
+ 'identifier': [
+ (r'([a-zA-Z][a-zA-Z0-9_ \-]*)', Keyword),
+ ],
+ }
diff --git a/pygments/lexers/pascal.py b/pygments/lexers/pascal.py
new file mode 100644
index 00000000..0381f19f
--- /dev/null
+++ b/pygments/lexers/pascal.py
@@ -0,0 +1,833 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.pascal
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Pascal family languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, include, bygroups, words, \
+ using, this, default
+from pygments.util import get_bool_opt, get_list_opt
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+from pygments.scanner import Scanner
+
+__all__ = ['DelphiLexer', 'Modula2Lexer', 'AdaLexer']
+
+
+class DelphiLexer(Lexer):
+ """
+ For `Delphi <http://www.borland.com/delphi/>`_ (Borland Object Pascal),
+ Turbo Pascal and Free Pascal source code.
+
+ Additional options accepted:
+
+ `turbopascal`
+ Highlight Turbo Pascal specific keywords (default: ``True``).
+ `delphi`
+ Highlight Borland Delphi specific keywords (default: ``True``).
+ `freepascal`
+ Highlight Free Pascal specific keywords (default: ``True``).
+ `units`
+ A list of units that should be considered builtin, supported are
+ ``System``, ``SysUtils``, ``Classes`` and ``Math``.
+ Default is to consider all of them builtin.
+ """
+ name = 'Delphi'
+ aliases = ['delphi', 'pas', 'pascal', 'objectpascal']
+ filenames = ['*.pas']
+ mimetypes = ['text/x-pascal']
+
+ TURBO_PASCAL_KEYWORDS = (
+ 'absolute', 'and', 'array', 'asm', 'begin', 'break', 'case',
+ 'const', 'constructor', 'continue', 'destructor', 'div', 'do',
+ 'downto', 'else', 'end', 'file', 'for', 'function', 'goto',
+ 'if', 'implementation', 'in', 'inherited', 'inline', 'interface',
+ 'label', 'mod', 'nil', 'not', 'object', 'of', 'on', 'operator',
+ 'or', 'packed', 'procedure', 'program', 'record', 'reintroduce',
+ 'repeat', 'self', 'set', 'shl', 'shr', 'string', 'then', 'to',
+ 'type', 'unit', 'until', 'uses', 'var', 'while', 'with', 'xor'
+ )
+
+ DELPHI_KEYWORDS = (
+ 'as', 'class', 'except', 'exports', 'finalization', 'finally',
+ 'initialization', 'is', 'library', 'on', 'property', 'raise',
+ 'threadvar', 'try'
+ )
+
+ FREE_PASCAL_KEYWORDS = (
+ 'dispose', 'exit', 'false', 'new', 'true'
+ )
+
+ BLOCK_KEYWORDS = set((
+ 'begin', 'class', 'const', 'constructor', 'destructor', 'end',
+ 'finalization', 'function', 'implementation', 'initialization',
+ 'label', 'library', 'operator', 'procedure', 'program', 'property',
+ 'record', 'threadvar', 'type', 'unit', 'uses', 'var'
+ ))
+
+ FUNCTION_MODIFIERS = set((
+ 'alias', 'cdecl', 'export', 'inline', 'interrupt', 'nostackframe',
+ 'pascal', 'register', 'safecall', 'softfloat', 'stdcall',
+ 'varargs', 'name', 'dynamic', 'near', 'virtual', 'external',
+ 'override', 'assembler'
+ ))
+
+ # XXX: those aren't global. but currently we know no way for defining
+ # them just for the type context.
+ DIRECTIVES = set((
+ 'absolute', 'abstract', 'assembler', 'cppdecl', 'default', 'far',
+ 'far16', 'forward', 'index', 'oldfpccall', 'private', 'protected',
+ 'published', 'public'
+ ))
+
+ BUILTIN_TYPES = set((
+ 'ansichar', 'ansistring', 'bool', 'boolean', 'byte', 'bytebool',
+ 'cardinal', 'char', 'comp', 'currency', 'double', 'dword',
+ 'extended', 'int64', 'integer', 'iunknown', 'longbool', 'longint',
+ 'longword', 'pansichar', 'pansistring', 'pbool', 'pboolean',
+ 'pbyte', 'pbytearray', 'pcardinal', 'pchar', 'pcomp', 'pcurrency',
+ 'pdate', 'pdatetime', 'pdouble', 'pdword', 'pextended', 'phandle',
+ 'pint64', 'pinteger', 'plongint', 'plongword', 'pointer',
+ 'ppointer', 'pshortint', 'pshortstring', 'psingle', 'psmallint',
+ 'pstring', 'pvariant', 'pwidechar', 'pwidestring', 'pword',
+ 'pwordarray', 'pwordbool', 'real', 'real48', 'shortint',
+ 'shortstring', 'single', 'smallint', 'string', 'tclass', 'tdate',
+ 'tdatetime', 'textfile', 'thandle', 'tobject', 'ttime', 'variant',
+ 'widechar', 'widestring', 'word', 'wordbool'
+ ))
+
+ BUILTIN_UNITS = {
+ 'System': (
+ 'abs', 'acquireexceptionobject', 'addr', 'ansitoutf8',
+ 'append', 'arctan', 'assert', 'assigned', 'assignfile',
+ 'beginthread', 'blockread', 'blockwrite', 'break', 'chdir',
+ 'chr', 'close', 'closefile', 'comptocurrency', 'comptodouble',
+ 'concat', 'continue', 'copy', 'cos', 'dec', 'delete',
+ 'dispose', 'doubletocomp', 'endthread', 'enummodules',
+ 'enumresourcemodules', 'eof', 'eoln', 'erase', 'exceptaddr',
+ 'exceptobject', 'exclude', 'exit', 'exp', 'filepos', 'filesize',
+ 'fillchar', 'finalize', 'findclasshinstance', 'findhinstance',
+ 'findresourcehinstance', 'flush', 'frac', 'freemem',
+ 'get8087cw', 'getdir', 'getlasterror', 'getmem',
+ 'getmemorymanager', 'getmodulefilename', 'getvariantmanager',
+ 'halt', 'hi', 'high', 'inc', 'include', 'initialize', 'insert',
+ 'int', 'ioresult', 'ismemorymanagerset', 'isvariantmanagerset',
+ 'length', 'ln', 'lo', 'low', 'mkdir', 'move', 'new', 'odd',
+ 'olestrtostring', 'olestrtostrvar', 'ord', 'paramcount',
+ 'paramstr', 'pi', 'pos', 'pred', 'ptr', 'pucs4chars', 'random',
+ 'randomize', 'read', 'readln', 'reallocmem',
+ 'releaseexceptionobject', 'rename', 'reset', 'rewrite', 'rmdir',
+ 'round', 'runerror', 'seek', 'seekeof', 'seekeoln',
+ 'set8087cw', 'setlength', 'setlinebreakstyle',
+ 'setmemorymanager', 'setstring', 'settextbuf',
+ 'setvariantmanager', 'sin', 'sizeof', 'slice', 'sqr', 'sqrt',
+ 'str', 'stringofchar', 'stringtoolestr', 'stringtowidechar',
+ 'succ', 'swap', 'trunc', 'truncate', 'typeinfo',
+ 'ucs4stringtowidestring', 'unicodetoutf8', 'uniquestring',
+ 'upcase', 'utf8decode', 'utf8encode', 'utf8toansi',
+ 'utf8tounicode', 'val', 'vararrayredim', 'varclear',
+ 'widecharlentostring', 'widecharlentostrvar',
+ 'widechartostring', 'widechartostrvar',
+ 'widestringtoucs4string', 'write', 'writeln'
+ ),
+ 'SysUtils': (
+ 'abort', 'addexitproc', 'addterminateproc', 'adjustlinebreaks',
+ 'allocmem', 'ansicomparefilename', 'ansicomparestr',
+ 'ansicomparetext', 'ansidequotedstr', 'ansiextractquotedstr',
+ 'ansilastchar', 'ansilowercase', 'ansilowercasefilename',
+ 'ansipos', 'ansiquotedstr', 'ansisamestr', 'ansisametext',
+ 'ansistrcomp', 'ansistricomp', 'ansistrlastchar', 'ansistrlcomp',
+ 'ansistrlicomp', 'ansistrlower', 'ansistrpos', 'ansistrrscan',
+ 'ansistrscan', 'ansistrupper', 'ansiuppercase',
+ 'ansiuppercasefilename', 'appendstr', 'assignstr', 'beep',
+ 'booltostr', 'bytetocharindex', 'bytetocharlen', 'bytetype',
+ 'callterminateprocs', 'changefileext', 'charlength',
+ 'chartobyteindex', 'chartobytelen', 'comparemem', 'comparestr',
+ 'comparetext', 'createdir', 'createguid', 'currentyear',
+ 'currtostr', 'currtostrf', 'date', 'datetimetofiledate',
+ 'datetimetostr', 'datetimetostring', 'datetimetosystemtime',
+ 'datetimetotimestamp', 'datetostr', 'dayofweek', 'decodedate',
+ 'decodedatefully', 'decodetime', 'deletefile', 'directoryexists',
+ 'diskfree', 'disksize', 'disposestr', 'encodedate', 'encodetime',
+ 'exceptionerrormessage', 'excludetrailingbackslash',
+ 'excludetrailingpathdelimiter', 'expandfilename',
+ 'expandfilenamecase', 'expanduncfilename', 'extractfiledir',
+ 'extractfiledrive', 'extractfileext', 'extractfilename',
+ 'extractfilepath', 'extractrelativepath', 'extractshortpathname',
+ 'fileage', 'fileclose', 'filecreate', 'filedatetodatetime',
+ 'fileexists', 'filegetattr', 'filegetdate', 'fileisreadonly',
+ 'fileopen', 'fileread', 'filesearch', 'fileseek', 'filesetattr',
+ 'filesetdate', 'filesetreadonly', 'filewrite', 'finalizepackage',
+ 'findclose', 'findcmdlineswitch', 'findfirst', 'findnext',
+ 'floattocurr', 'floattodatetime', 'floattodecimal', 'floattostr',
+ 'floattostrf', 'floattotext', 'floattotextfmt', 'fmtloadstr',
+ 'fmtstr', 'forcedirectories', 'format', 'formatbuf', 'formatcurr',
+ 'formatdatetime', 'formatfloat', 'freeandnil', 'getcurrentdir',
+ 'getenvironmentvariable', 'getfileversion', 'getformatsettings',
+ 'getlocaleformatsettings', 'getmodulename', 'getpackagedescription',
+ 'getpackageinfo', 'gettime', 'guidtostring', 'incamonth',
+ 'includetrailingbackslash', 'includetrailingpathdelimiter',
+ 'incmonth', 'initializepackage', 'interlockeddecrement',
+ 'interlockedexchange', 'interlockedexchangeadd',
+ 'interlockedincrement', 'inttohex', 'inttostr', 'isdelimiter',
+ 'isequalguid', 'isleapyear', 'ispathdelimiter', 'isvalidident',
+ 'languages', 'lastdelimiter', 'loadpackage', 'loadstr',
+ 'lowercase', 'msecstotimestamp', 'newstr', 'nextcharindex', 'now',
+ 'outofmemoryerror', 'quotedstr', 'raiselastoserror',
+ 'raiselastwin32error', 'removedir', 'renamefile', 'replacedate',
+ 'replacetime', 'safeloadlibrary', 'samefilename', 'sametext',
+ 'setcurrentdir', 'showexception', 'sleep', 'stralloc', 'strbufsize',
+ 'strbytetype', 'strcat', 'strcharlength', 'strcomp', 'strcopy',
+ 'strdispose', 'strecopy', 'strend', 'strfmt', 'stricomp',
+ 'stringreplace', 'stringtoguid', 'strlcat', 'strlcomp', 'strlcopy',
+ 'strlen', 'strlfmt', 'strlicomp', 'strlower', 'strmove', 'strnew',
+ 'strnextchar', 'strpas', 'strpcopy', 'strplcopy', 'strpos',
+ 'strrscan', 'strscan', 'strtobool', 'strtobooldef', 'strtocurr',
+ 'strtocurrdef', 'strtodate', 'strtodatedef', 'strtodatetime',
+ 'strtodatetimedef', 'strtofloat', 'strtofloatdef', 'strtoint',
+ 'strtoint64', 'strtoint64def', 'strtointdef', 'strtotime',
+ 'strtotimedef', 'strupper', 'supports', 'syserrormessage',
+ 'systemtimetodatetime', 'texttofloat', 'time', 'timestamptodatetime',
+ 'timestamptomsecs', 'timetostr', 'trim', 'trimleft', 'trimright',
+ 'tryencodedate', 'tryencodetime', 'tryfloattocurr', 'tryfloattodatetime',
+ 'trystrtobool', 'trystrtocurr', 'trystrtodate', 'trystrtodatetime',
+ 'trystrtofloat', 'trystrtoint', 'trystrtoint64', 'trystrtotime',
+ 'unloadpackage', 'uppercase', 'widecomparestr', 'widecomparetext',
+ 'widefmtstr', 'wideformat', 'wideformatbuf', 'widelowercase',
+ 'widesamestr', 'widesametext', 'wideuppercase', 'win32check',
+ 'wraptext'
+ ),
+ 'Classes': (
+ 'activateclassgroup', 'allocatehwnd', 'bintohex', 'checksynchronize',
+ 'collectionsequal', 'countgenerations', 'deallocatehwnd', 'equalrect',
+ 'extractstrings', 'findclass', 'findglobalcomponent', 'getclass',
+ 'groupdescendantswith', 'hextobin', 'identtoint',
+ 'initinheritedcomponent', 'inttoident', 'invalidpoint',
+ 'isuniqueglobalcomponentname', 'linestart', 'objectbinarytotext',
+ 'objectresourcetotext', 'objecttexttobinary', 'objecttexttoresource',
+ 'pointsequal', 'readcomponentres', 'readcomponentresex',
+ 'readcomponentresfile', 'rect', 'registerclass', 'registerclassalias',
+ 'registerclasses', 'registercomponents', 'registerintegerconsts',
+ 'registernoicon', 'registernonactivex', 'smallpoint', 'startclassgroup',
+ 'teststreamformat', 'unregisterclass', 'unregisterclasses',
+ 'unregisterintegerconsts', 'unregistermoduleclasses',
+ 'writecomponentresfile'
+ ),
+ 'Math': (
+ 'arccos', 'arccosh', 'arccot', 'arccoth', 'arccsc', 'arccsch', 'arcsec',
+ 'arcsech', 'arcsin', 'arcsinh', 'arctan2', 'arctanh', 'ceil',
+ 'comparevalue', 'cosecant', 'cosh', 'cot', 'cotan', 'coth', 'csc',
+ 'csch', 'cycletodeg', 'cycletograd', 'cycletorad', 'degtocycle',
+ 'degtograd', 'degtorad', 'divmod', 'doubledecliningbalance',
+ 'ensurerange', 'floor', 'frexp', 'futurevalue', 'getexceptionmask',
+ 'getprecisionmode', 'getroundmode', 'gradtocycle', 'gradtodeg',
+ 'gradtorad', 'hypot', 'inrange', 'interestpayment', 'interestrate',
+ 'internalrateofreturn', 'intpower', 'isinfinite', 'isnan', 'iszero',
+ 'ldexp', 'lnxp1', 'log10', 'log2', 'logn', 'max', 'maxintvalue',
+ 'maxvalue', 'mean', 'meanandstddev', 'min', 'minintvalue', 'minvalue',
+ 'momentskewkurtosis', 'netpresentvalue', 'norm', 'numberofperiods',
+ 'payment', 'periodpayment', 'poly', 'popnstddev', 'popnvariance',
+ 'power', 'presentvalue', 'radtocycle', 'radtodeg', 'radtograd',
+ 'randg', 'randomrange', 'roundto', 'samevalue', 'sec', 'secant',
+ 'sech', 'setexceptionmask', 'setprecisionmode', 'setroundmode',
+ 'sign', 'simpleroundto', 'sincos', 'sinh', 'slndepreciation', 'stddev',
+ 'sum', 'sumint', 'sumofsquares', 'sumsandsquares', 'syddepreciation',
+ 'tan', 'tanh', 'totalvariance', 'variance'
+ )
+ }
+
+ ASM_REGISTERS = set((
+ 'ah', 'al', 'ax', 'bh', 'bl', 'bp', 'bx', 'ch', 'cl', 'cr0',
+ 'cr1', 'cr2', 'cr3', 'cr4', 'cs', 'cx', 'dh', 'di', 'dl', 'dr0',
+ 'dr1', 'dr2', 'dr3', 'dr4', 'dr5', 'dr6', 'dr7', 'ds', 'dx',
+ 'eax', 'ebp', 'ebx', 'ecx', 'edi', 'edx', 'es', 'esi', 'esp',
+ 'fs', 'gs', 'mm0', 'mm1', 'mm2', 'mm3', 'mm4', 'mm5', 'mm6',
+ 'mm7', 'si', 'sp', 'ss', 'st0', 'st1', 'st2', 'st3', 'st4', 'st5',
+ 'st6', 'st7', 'xmm0', 'xmm1', 'xmm2', 'xmm3', 'xmm4', 'xmm5',
+ 'xmm6', 'xmm7'
+ ))
+
+ ASM_INSTRUCTIONS = set((
+ 'aaa', 'aad', 'aam', 'aas', 'adc', 'add', 'and', 'arpl', 'bound',
+ 'bsf', 'bsr', 'bswap', 'bt', 'btc', 'btr', 'bts', 'call', 'cbw',
+ 'cdq', 'clc', 'cld', 'cli', 'clts', 'cmc', 'cmova', 'cmovae',
+ 'cmovb', 'cmovbe', 'cmovc', 'cmovcxz', 'cmove', 'cmovg',
+ 'cmovge', 'cmovl', 'cmovle', 'cmovna', 'cmovnae', 'cmovnb',
+ 'cmovnbe', 'cmovnc', 'cmovne', 'cmovng', 'cmovnge', 'cmovnl',
+ 'cmovnle', 'cmovno', 'cmovnp', 'cmovns', 'cmovnz', 'cmovo',
+ 'cmovp', 'cmovpe', 'cmovpo', 'cmovs', 'cmovz', 'cmp', 'cmpsb',
+ 'cmpsd', 'cmpsw', 'cmpxchg', 'cmpxchg486', 'cmpxchg8b', 'cpuid',
+ 'cwd', 'cwde', 'daa', 'das', 'dec', 'div', 'emms', 'enter', 'hlt',
+ 'ibts', 'icebp', 'idiv', 'imul', 'in', 'inc', 'insb', 'insd',
+ 'insw', 'int', 'int01', 'int03', 'int1', 'int3', 'into', 'invd',
+ 'invlpg', 'iret', 'iretd', 'iretw', 'ja', 'jae', 'jb', 'jbe',
+ 'jc', 'jcxz', 'jcxz', 'je', 'jecxz', 'jg', 'jge', 'jl', 'jle',
+ 'jmp', 'jna', 'jnae', 'jnb', 'jnbe', 'jnc', 'jne', 'jng', 'jnge',
+ 'jnl', 'jnle', 'jno', 'jnp', 'jns', 'jnz', 'jo', 'jp', 'jpe',
+ 'jpo', 'js', 'jz', 'lahf', 'lar', 'lcall', 'lds', 'lea', 'leave',
+ 'les', 'lfs', 'lgdt', 'lgs', 'lidt', 'ljmp', 'lldt', 'lmsw',
+ 'loadall', 'loadall286', 'lock', 'lodsb', 'lodsd', 'lodsw',
+ 'loop', 'loope', 'loopne', 'loopnz', 'loopz', 'lsl', 'lss', 'ltr',
+ 'mov', 'movd', 'movq', 'movsb', 'movsd', 'movsw', 'movsx',
+ 'movzx', 'mul', 'neg', 'nop', 'not', 'or', 'out', 'outsb', 'outsd',
+ 'outsw', 'pop', 'popa', 'popad', 'popaw', 'popf', 'popfd', 'popfw',
+ 'push', 'pusha', 'pushad', 'pushaw', 'pushf', 'pushfd', 'pushfw',
+ 'rcl', 'rcr', 'rdmsr', 'rdpmc', 'rdshr', 'rdtsc', 'rep', 'repe',
+ 'repne', 'repnz', 'repz', 'ret', 'retf', 'retn', 'rol', 'ror',
+ 'rsdc', 'rsldt', 'rsm', 'sahf', 'sal', 'salc', 'sar', 'sbb',
+ 'scasb', 'scasd', 'scasw', 'seta', 'setae', 'setb', 'setbe',
+ 'setc', 'setcxz', 'sete', 'setg', 'setge', 'setl', 'setle',
+ 'setna', 'setnae', 'setnb', 'setnbe', 'setnc', 'setne', 'setng',
+ 'setnge', 'setnl', 'setnle', 'setno', 'setnp', 'setns', 'setnz',
+ 'seto', 'setp', 'setpe', 'setpo', 'sets', 'setz', 'sgdt', 'shl',
+ 'shld', 'shr', 'shrd', 'sidt', 'sldt', 'smi', 'smint', 'smintold',
+ 'smsw', 'stc', 'std', 'sti', 'stosb', 'stosd', 'stosw', 'str',
+ 'sub', 'svdc', 'svldt', 'svts', 'syscall', 'sysenter', 'sysexit',
+ 'sysret', 'test', 'ud1', 'ud2', 'umov', 'verr', 'verw', 'wait',
+ 'wbinvd', 'wrmsr', 'wrshr', 'xadd', 'xbts', 'xchg', 'xlat',
+ 'xlatb', 'xor'
+ ))
+
+ def __init__(self, **options):
+ Lexer.__init__(self, **options)
+ self.keywords = set()
+ if get_bool_opt(options, 'turbopascal', True):
+ self.keywords.update(self.TURBO_PASCAL_KEYWORDS)
+ if get_bool_opt(options, 'delphi', True):
+ self.keywords.update(self.DELPHI_KEYWORDS)
+ if get_bool_opt(options, 'freepascal', True):
+ self.keywords.update(self.FREE_PASCAL_KEYWORDS)
+ self.builtins = set()
+ for unit in get_list_opt(options, 'units', list(self.BUILTIN_UNITS)):
+ self.builtins.update(self.BUILTIN_UNITS[unit])
+
+ def get_tokens_unprocessed(self, text):
+ scanner = Scanner(text, re.DOTALL | re.MULTILINE | re.IGNORECASE)
+ stack = ['initial']
+ in_function_block = False
+ in_property_block = False
+ was_dot = False
+ next_token_is_function = False
+ next_token_is_property = False
+ collect_labels = False
+ block_labels = set()
+ brace_balance = [0, 0]
+
+ while not scanner.eos:
+ token = Error
+
+ if stack[-1] == 'initial':
+ if scanner.scan(r'\s+'):
+ token = Text
+ elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
+ if scanner.match.startswith('$'):
+ token = Comment.Preproc
+ else:
+ token = Comment.Multiline
+ elif scanner.scan(r'//.*?$'):
+ token = Comment.Single
+ elif scanner.scan(r'[-+*\/=<>:;,.@\^]'):
+ token = Operator
+ # stop label highlighting on next ";"
+ if collect_labels and scanner.match == ';':
+ collect_labels = False
+ elif scanner.scan(r'[\(\)\[\]]+'):
+ token = Punctuation
+ # abort function naming ``foo = Function(...)``
+ next_token_is_function = False
+ # if we are in a function block we count the open
+ # braces because ootherwise it's impossible to
+ # determine the end of the modifier context
+ if in_function_block or in_property_block:
+ if scanner.match == '(':
+ brace_balance[0] += 1
+ elif scanner.match == ')':
+ brace_balance[0] -= 1
+ elif scanner.match == '[':
+ brace_balance[1] += 1
+ elif scanner.match == ']':
+ brace_balance[1] -= 1
+ elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
+ lowercase_name = scanner.match.lower()
+ if lowercase_name == 'result':
+ token = Name.Builtin.Pseudo
+ elif lowercase_name in self.keywords:
+ token = Keyword
+ # if we are in a special block and a
+ # block ending keyword occours (and the parenthesis
+ # is balanced) we end the current block context
+ if (in_function_block or in_property_block) and \
+ lowercase_name in self.BLOCK_KEYWORDS and \
+ brace_balance[0] <= 0 and \
+ brace_balance[1] <= 0:
+ in_function_block = False
+ in_property_block = False
+ brace_balance = [0, 0]
+ block_labels = set()
+ if lowercase_name in ('label', 'goto'):
+ collect_labels = True
+ elif lowercase_name == 'asm':
+ stack.append('asm')
+ elif lowercase_name == 'property':
+ in_property_block = True
+ next_token_is_property = True
+ elif lowercase_name in ('procedure', 'operator',
+ 'function', 'constructor',
+ 'destructor'):
+ in_function_block = True
+ next_token_is_function = True
+ # we are in a function block and the current name
+ # is in the set of registered modifiers. highlight
+ # it as pseudo keyword
+ elif in_function_block and \
+ lowercase_name in self.FUNCTION_MODIFIERS:
+ token = Keyword.Pseudo
+ # if we are in a property highlight some more
+ # modifiers
+ elif in_property_block and \
+ lowercase_name in ('read', 'write'):
+ token = Keyword.Pseudo
+ next_token_is_function = True
+ # if the last iteration set next_token_is_function
+ # to true we now want this name highlighted as
+ # function. so do that and reset the state
+ elif next_token_is_function:
+ # Look if the next token is a dot. If yes it's
+ # not a function, but a class name and the
+ # part after the dot a function name
+ if scanner.test(r'\s*\.\s*'):
+ token = Name.Class
+ # it's not a dot, our job is done
+ else:
+ token = Name.Function
+ next_token_is_function = False
+ # same for properties
+ elif next_token_is_property:
+ token = Name.Property
+ next_token_is_property = False
+ # Highlight this token as label and add it
+ # to the list of known labels
+ elif collect_labels:
+ token = Name.Label
+ block_labels.add(scanner.match.lower())
+ # name is in list of known labels
+ elif lowercase_name in block_labels:
+ token = Name.Label
+ elif lowercase_name in self.BUILTIN_TYPES:
+ token = Keyword.Type
+ elif lowercase_name in self.DIRECTIVES:
+ token = Keyword.Pseudo
+ # builtins are just builtins if the token
+ # before isn't a dot
+ elif not was_dot and lowercase_name in self.builtins:
+ token = Name.Builtin
+ else:
+ token = Name
+ elif scanner.scan(r"'"):
+ token = String
+ stack.append('string')
+ elif scanner.scan(r'\#(\d+|\$[0-9A-Fa-f]+)'):
+ token = String.Char
+ elif scanner.scan(r'\$[0-9A-Fa-f]+'):
+ token = Number.Hex
+ elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
+ token = Number.Integer
+ elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
+ token = Number.Float
+ else:
+ # if the stack depth is deeper than once, pop
+ if len(stack) > 1:
+ stack.pop()
+ scanner.get_char()
+
+ elif stack[-1] == 'string':
+ if scanner.scan(r"''"):
+ token = String.Escape
+ elif scanner.scan(r"'"):
+ token = String
+ stack.pop()
+ elif scanner.scan(r"[^']*"):
+ token = String
+ else:
+ scanner.get_char()
+ stack.pop()
+
+ elif stack[-1] == 'asm':
+ if scanner.scan(r'\s+'):
+ token = Text
+ elif scanner.scan(r'end'):
+ token = Keyword
+ stack.pop()
+ elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
+ if scanner.match.startswith('$'):
+ token = Comment.Preproc
+ else:
+ token = Comment.Multiline
+ elif scanner.scan(r'//.*?$'):
+ token = Comment.Single
+ elif scanner.scan(r"'"):
+ token = String
+ stack.append('string')
+ elif scanner.scan(r'@@[A-Za-z_][A-Za-z_0-9]*'):
+ token = Name.Label
+ elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
+ lowercase_name = scanner.match.lower()
+ if lowercase_name in self.ASM_INSTRUCTIONS:
+ token = Keyword
+ elif lowercase_name in self.ASM_REGISTERS:
+ token = Name.Builtin
+ else:
+ token = Name
+ elif scanner.scan(r'[-+*\/=<>:;,.@\^]+'):
+ token = Operator
+ elif scanner.scan(r'[\(\)\[\]]+'):
+ token = Punctuation
+ elif scanner.scan(r'\$[0-9A-Fa-f]+'):
+ token = Number.Hex
+ elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
+ token = Number.Integer
+ elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
+ token = Number.Float
+ else:
+ scanner.get_char()
+ stack.pop()
+
+ # save the dot!!!11
+ if scanner.match.strip():
+ was_dot = scanner.match == '.'
+ yield scanner.start_pos, token, scanner.match or ''
+
+
+class Modula2Lexer(RegexLexer):
+ """
+ For `Modula-2 <http://www.modula2.org/>`_ source code.
+
+ Additional options that determine which keywords are highlighted:
+
+ `pim`
+ Select PIM Modula-2 dialect (default: True).
+ `iso`
+ Select ISO Modula-2 dialect (default: False).
+ `objm2`
+ Select Objective Modula-2 dialect (default: False).
+ `gm2ext`
+ Also highlight GNU extensions (default: False).
+
+ .. versionadded:: 1.3
+ """
+ name = 'Modula-2'
+ aliases = ['modula2', 'm2']
+ filenames = ['*.def', '*.mod']
+ mimetypes = ['text/x-modula2']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'whitespace': [
+ (r'\n+', Text), # blank lines
+ (r'\s+', Text), # whitespace
+ ],
+ 'identifiers': [
+ (r'([a-zA-Z_\$][\w\$]*)', Name),
+ ],
+ 'numliterals': [
+ (r'[01]+B', Number.Bin), # binary number (ObjM2)
+ (r'[0-7]+B', Number.Oct), # octal number (PIM + ISO)
+ (r'[0-7]+C', Number.Oct), # char code (PIM + ISO)
+ (r'[0-9A-F]+C', Number.Hex), # char code (ObjM2)
+ (r'[0-9A-F]+H', Number.Hex), # hexadecimal number
+ (r'[0-9]+\.[0-9]+E[+-][0-9]+', Number.Float), # real number
+ (r'[0-9]+\.[0-9]+', Number.Float), # real number
+ (r'[0-9]+', Number.Integer), # decimal whole number
+ ],
+ 'strings': [
+ (r"'(\\\\|\\'|[^'])*'", String), # single quoted string
+ (r'"(\\\\|\\"|[^"])*"', String), # double quoted string
+ ],
+ 'operators': [
+ (r'[*/+=#~&<>\^-]', Operator),
+ (r':=', Operator), # assignment
+ (r'@', Operator), # pointer deref (ISO)
+ (r'\.\.', Operator), # ellipsis or range
+ (r'`', Operator), # Smalltalk message (ObjM2)
+ (r'::', Operator), # type conversion (ObjM2)
+ ],
+ 'punctuation': [
+ (r'[\(\)\[\]{},.:;|]', Punctuation),
+ ],
+ 'comments': [
+ (r'//.*?\n', Comment.Single), # ObjM2
+ (r'/\*(.*?)\*/', Comment.Multiline), # ObjM2
+ (r'\(\*([^\$].*?)\*\)', Comment.Multiline),
+ # TO DO: nesting of (* ... *) comments
+ ],
+ 'pragmas': [
+ (r'\(\*\$(.*?)\*\)', Comment.Preproc), # PIM
+ (r'<\*(.*?)\*>', Comment.Preproc), # ISO + ObjM2
+ ],
+ 'root': [
+ include('whitespace'),
+ include('comments'),
+ include('pragmas'),
+ include('identifiers'),
+ include('numliterals'),
+ include('strings'),
+ include('operators'),
+ include('punctuation'),
+ ]
+ }
+
+ pim_reserved_words = [
+ # 40 reserved words
+ 'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION',
+ 'DIV', 'DO', 'ELSE', 'ELSIF', 'END', 'EXIT', 'EXPORT', 'FOR',
+ 'FROM', 'IF', 'IMPLEMENTATION', 'IMPORT', 'IN', 'LOOP', 'MOD',
+ 'MODULE', 'NOT', 'OF', 'OR', 'POINTER', 'PROCEDURE', 'QUALIFIED',
+ 'RECORD', 'REPEAT', 'RETURN', 'SET', 'THEN', 'TO', 'TYPE',
+ 'UNTIL', 'VAR', 'WHILE', 'WITH',
+ ]
+
+ pim_pervasives = [
+ # 31 pervasives
+ 'ABS', 'BITSET', 'BOOLEAN', 'CAP', 'CARDINAL', 'CHAR', 'CHR', 'DEC',
+ 'DISPOSE', 'EXCL', 'FALSE', 'FLOAT', 'HALT', 'HIGH', 'INC', 'INCL',
+ 'INTEGER', 'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NEW', 'NIL', 'ODD',
+ 'ORD', 'PROC', 'REAL', 'SIZE', 'TRUE', 'TRUNC', 'VAL',
+ ]
+
+ iso_reserved_words = [
+ # 46 reserved words
+ 'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION', 'DIV',
+ 'DO', 'ELSE', 'ELSIF', 'END', 'EXCEPT', 'EXIT', 'EXPORT', 'FINALLY',
+ 'FOR', 'FORWARD', 'FROM', 'IF', 'IMPLEMENTATION', 'IMPORT', 'IN',
+ 'LOOP', 'MOD', 'MODULE', 'NOT', 'OF', 'OR', 'PACKEDSET', 'POINTER',
+ 'PROCEDURE', 'QUALIFIED', 'RECORD', 'REPEAT', 'REM', 'RETRY',
+ 'RETURN', 'SET', 'THEN', 'TO', 'TYPE', 'UNTIL', 'VAR', 'WHILE',
+ 'WITH',
+ ]
+
+ iso_pervasives = [
+ # 42 pervasives
+ 'ABS', 'BITSET', 'BOOLEAN', 'CAP', 'CARDINAL', 'CHAR', 'CHR', 'CMPLX',
+ 'COMPLEX', 'DEC', 'DISPOSE', 'EXCL', 'FALSE', 'FLOAT', 'HALT', 'HIGH',
+ 'IM', 'INC', 'INCL', 'INT', 'INTEGER', 'INTERRUPTIBLE', 'LENGTH',
+ 'LFLOAT', 'LONGCOMPLEX', 'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NEW',
+ 'NIL', 'ODD', 'ORD', 'PROC', 'PROTECTION', 'RE', 'REAL', 'SIZE',
+ 'TRUE', 'TRUNC', 'UNINTERRUBTIBLE', 'VAL',
+ ]
+
+ objm2_reserved_words = [
+ # base language, 42 reserved words
+ 'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION', 'DIV',
+ 'DO', 'ELSE', 'ELSIF', 'END', 'ENUM', 'EXIT', 'FOR', 'FROM', 'IF',
+ 'IMMUTABLE', 'IMPLEMENTATION', 'IMPORT', 'IN', 'IS', 'LOOP', 'MOD',
+ 'MODULE', 'NOT', 'OF', 'OPAQUE', 'OR', 'POINTER', 'PROCEDURE',
+ 'RECORD', 'REPEAT', 'RETURN', 'SET', 'THEN', 'TO', 'TYPE',
+ 'UNTIL', 'VAR', 'VARIADIC', 'WHILE',
+ # OO extensions, 16 reserved words
+ 'BYCOPY', 'BYREF', 'CLASS', 'CONTINUE', 'CRITICAL', 'INOUT', 'METHOD',
+ 'ON', 'OPTIONAL', 'OUT', 'PRIVATE', 'PROTECTED', 'PROTOCOL', 'PUBLIC',
+ 'SUPER', 'TRY',
+ ]
+
+ objm2_pervasives = [
+ # base language, 38 pervasives
+ 'ABS', 'BITSET', 'BOOLEAN', 'CARDINAL', 'CHAR', 'CHR', 'DISPOSE',
+ 'FALSE', 'HALT', 'HIGH', 'INTEGER', 'INRANGE', 'LENGTH', 'LONGCARD',
+ 'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NEG', 'NEW', 'NEXTV', 'NIL',
+ 'OCTET', 'ODD', 'ORD', 'PRED', 'PROC', 'READ', 'REAL', 'SUCC', 'TMAX',
+ 'TMIN', 'TRUE', 'TSIZE', 'UNICHAR', 'VAL', 'WRITE', 'WRITEF',
+ # OO extensions, 3 pervasives
+ 'OBJECT', 'NO', 'YES',
+ ]
+
+ gnu_reserved_words = [
+ # 10 additional reserved words
+ 'ASM', '__ATTRIBUTE__', '__BUILTIN__', '__COLUMN__', '__DATE__',
+ '__FILE__', '__FUNCTION__', '__LINE__', '__MODULE__', 'VOLATILE',
+ ]
+
+ gnu_pervasives = [
+ # 21 identifiers, actually from pseudo-module SYSTEM
+ # but we will highlight them as if they were pervasives
+ 'BITSET8', 'BITSET16', 'BITSET32', 'CARDINAL8', 'CARDINAL16',
+ 'CARDINAL32', 'CARDINAL64', 'COMPLEX32', 'COMPLEX64', 'COMPLEX96',
+ 'COMPLEX128', 'INTEGER8', 'INTEGER16', 'INTEGER32', 'INTEGER64',
+ 'REAL8', 'REAL16', 'REAL32', 'REAL96', 'REAL128', 'THROW',
+ ]
+
+ def __init__(self, **options):
+ self.reserved_words = set()
+ self.pervasives = set()
+ # ISO Modula-2
+ if get_bool_opt(options, 'iso', False):
+ self.reserved_words.update(self.iso_reserved_words)
+ self.pervasives.update(self.iso_pervasives)
+ # Objective Modula-2
+ elif get_bool_opt(options, 'objm2', False):
+ self.reserved_words.update(self.objm2_reserved_words)
+ self.pervasives.update(self.objm2_pervasives)
+ # PIM Modula-2 (DEFAULT)
+ else:
+ self.reserved_words.update(self.pim_reserved_words)
+ self.pervasives.update(self.pim_pervasives)
+ # GNU extensions
+ if get_bool_opt(options, 'gm2ext', False):
+ self.reserved_words.update(self.gnu_reserved_words)
+ self.pervasives.update(self.gnu_pervasives)
+ # initialise
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
+ # check for reserved words and pervasives
+ if token is Name:
+ if value in self.reserved_words:
+ token = Keyword.Reserved
+ elif value in self.pervasives:
+ token = Keyword.Pervasive
+ # return result
+ yield index, token, value
+
+
+class AdaLexer(RegexLexer):
+ """
+ For Ada source code.
+
+ .. versionadded:: 1.3
+ """
+
+ name = 'Ada'
+ aliases = ['ada', 'ada95', 'ada2005']
+ filenames = ['*.adb', '*.ads', '*.ada']
+ mimetypes = ['text/x-ada']
+
+ flags = re.MULTILINE | re.I # Ignore case
+
+ tokens = {
+ 'root': [
+ (r'[^\S\n]+', Text),
+ (r'--.*?\n', Comment.Single),
+ (r'[^\S\n]+', Text),
+ (r'function|procedure|entry', Keyword.Declaration, 'subprogram'),
+ (r'(subtype|type)(\s+)([a-z0-9_]+)',
+ bygroups(Keyword.Declaration, Text, Keyword.Type), 'type_def'),
+ (r'task|protected', Keyword.Declaration),
+ (r'(subtype)(\s+)', bygroups(Keyword.Declaration, Text)),
+ (r'(end)(\s+)', bygroups(Keyword.Reserved, Text), 'end'),
+ (r'(pragma)(\s+)(\w+)', bygroups(Keyword.Reserved, Text,
+ Comment.Preproc)),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (words((
+ 'Address', 'Byte', 'Boolean', 'Character', 'Controlled', 'Count', 'Cursor',
+ 'Duration', 'File_Mode', 'File_Type', 'Float', 'Generator', 'Integer', 'Long_Float',
+ 'Long_Integer', 'Long_Long_Float', 'Long_Long_Integer', 'Natural', 'Positive',
+ 'Reference_Type', 'Short_Float', 'Short_Integer', 'Short_Short_Float',
+ 'Short_Short_Integer', 'String', 'Wide_Character', 'Wide_String'), suffix=r'\b'),
+ Keyword.Type),
+ (r'(and(\s+then)?|in|mod|not|or(\s+else)|rem)\b', Operator.Word),
+ (r'generic|private', Keyword.Declaration),
+ (r'package', Keyword.Declaration, 'package'),
+ (r'array\b', Keyword.Reserved, 'array_def'),
+ (r'(with|use)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
+ (r'([a-z0-9_]+)(\s*)(:)(\s*)(constant)',
+ bygroups(Name.Constant, Text, Punctuation, Text,
+ Keyword.Reserved)),
+ (r'<<[a-z0-9_]+>>', Name.Label),
+ (r'([a-z0-9_]+)(\s*)(:)(\s*)(declare|begin|loop|for|while)',
+ bygroups(Name.Label, Text, Punctuation, Text, Keyword.Reserved)),
+ (words((
+ 'abort', 'abs', 'abstract', 'accept', 'access', 'aliased', 'all',
+ 'array', 'at', 'begin', 'body', 'case', 'constant', 'declare',
+ 'delay', 'delta', 'digits', 'do', 'else', 'elsif', 'end', 'entry',
+ 'exception', 'exit', 'interface', 'for', 'goto', 'if', 'is', 'limited',
+ 'loop', 'new', 'null', 'of', 'or', 'others', 'out', 'overriding',
+ 'pragma', 'protected', 'raise', 'range', 'record', 'renames', 'requeue',
+ 'return', 'reverse', 'select', 'separate', 'subtype', 'synchronized',
+ 'task', 'tagged', 'terminate', 'then', 'type', 'until', 'when',
+ 'while', 'xor'), prefix=r'\b', suffix=r'\b'),
+ Keyword.Reserved),
+ (r'"[^"]*"', String),
+ include('attribute'),
+ include('numbers'),
+ (r"'[^']'", String.Character),
+ (r'([a-z0-9_]+)(\s*|[(,])', bygroups(Name, using(this))),
+ (r"(<>|=>|:=|[()|:;,.'])", Punctuation),
+ (r'[*<>+=/&-]', Operator),
+ (r'\n+', Text),
+ ],
+ 'numbers': [
+ (r'[0-9_]+#[0-9a-f]+#', Number.Hex),
+ (r'[0-9_]+\.[0-9_]*', Number.Float),
+ (r'[0-9_]+', Number.Integer),
+ ],
+ 'attribute': [
+ (r"(')(\w+)", bygroups(Punctuation, Name.Attribute)),
+ ],
+ 'subprogram': [
+ (r'\(', Punctuation, ('#pop', 'formal_part')),
+ (r';', Punctuation, '#pop'),
+ (r'is\b', Keyword.Reserved, '#pop'),
+ (r'"[^"]+"|[a-z0-9_]+', Name.Function),
+ include('root'),
+ ],
+ 'end': [
+ ('(if|case|record|loop|select)', Keyword.Reserved),
+ ('"[^"]+"|[\w.]+', Name.Function),
+ ('\s+', Text),
+ (';', Punctuation, '#pop'),
+ ],
+ 'type_def': [
+ (r';', Punctuation, '#pop'),
+ (r'\(', Punctuation, 'formal_part'),
+ (r'with|and|use', Keyword.Reserved),
+ (r'array\b', Keyword.Reserved, ('#pop', 'array_def')),
+ (r'record\b', Keyword.Reserved, ('record_def')),
+ (r'(null record)(;)', bygroups(Keyword.Reserved, Punctuation), '#pop'),
+ include('root'),
+ ],
+ 'array_def': [
+ (r';', Punctuation, '#pop'),
+ (r'([a-z0-9_]+)(\s+)(range)', bygroups(Keyword.Type, Text,
+ Keyword.Reserved)),
+ include('root'),
+ ],
+ 'record_def': [
+ (r'end record', Keyword.Reserved, '#pop'),
+ include('root'),
+ ],
+ 'import': [
+ (r'[a-z0-9_.]+', Name.Namespace, '#pop'),
+ default('#pop'),
+ ],
+ 'formal_part': [
+ (r'\)', Punctuation, '#pop'),
+ (r'[a-z0-9_]+', Name.Variable),
+ (r',|:[^=]', Punctuation),
+ (r'(in|not|null|out|access)\b', Keyword.Reserved),
+ include('root'),
+ ],
+ 'package': [
+ ('body', Keyword.Declaration),
+ ('is\s+new|renames', Keyword.Reserved),
+ ('is', Keyword.Reserved, '#pop'),
+ (';', Punctuation, '#pop'),
+ ('\(', Punctuation, 'package_instantiation'),
+ ('([\w.]+)', Name.Class),
+ include('root'),
+ ],
+ 'package_instantiation': [
+ (r'("[^"]+"|[a-z0-9_]+)(\s+)(=>)', bygroups(Name.Variable,
+ Text, Punctuation)),
+ (r'[a-z0-9._\'"]', Text),
+ (r'\)', Punctuation, '#pop'),
+ include('root'),
+ ],
+ }
diff --git a/pygments/lexers/pawn.py b/pygments/lexers/pawn.py
new file mode 100644
index 00000000..d55e2cc6
--- /dev/null
+++ b/pygments/lexers/pawn.py
@@ -0,0 +1,199 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.pawn
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Pawn languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+from pygments.util import get_bool_opt
+
+__all__ = ['SourcePawnLexer', 'PawnLexer']
+
+
+class SourcePawnLexer(RegexLexer):
+ """
+ For SourcePawn source code with preprocessor directives.
+
+ .. versionadded:: 1.6
+ """
+ name = 'SourcePawn'
+ aliases = ['sp']
+ filenames = ['*.sp']
+ mimetypes = ['text/x-sourcepawn']
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+'
+ #: only one /* */ style comment
+ _ws1 = r'\s*(?:/[*].*?[*]/\s*)*'
+
+ tokens = {
+ 'root': [
+ # preprocessor directives: without whitespace
+ ('^#if\s+0', Comment.Preproc, 'if0'),
+ ('^#', Comment.Preproc, 'macro'),
+ # or with whitespace
+ ('^' + _ws1 + r'#if\s+0', Comment.Preproc, 'if0'),
+ ('^' + _ws1 + '#', Comment.Preproc, 'macro'),
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
+ (r'[{}]', Punctuation),
+ (r'L?"', String, 'string'),
+ (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+ (r'0[0-7]+[LlUu]*', Number.Oct),
+ (r'\d+[LlUu]*', Number.Integer),
+ (r'\*/', Error),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'[()\[\],.;]', Punctuation),
+ (r'(case|const|continue|native|'
+ r'default|else|enum|for|if|new|operator|'
+ r'public|return|sizeof|static|decl|struct|switch)\b', Keyword),
+ (r'(bool|Float)\b', Keyword.Type),
+ (r'(true|false)\b', Keyword.Constant),
+ ('[a-zA-Z_]\w*', Name),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ 'macro': [
+ (r'[^/\n]+', Comment.Preproc),
+ (r'/\*(.|\n)*?\*/', Comment.Multiline),
+ (r'//.*?\n', Comment.Single, '#pop'),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'if0': [
+ (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
+ (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
+ (r'.*?\n', Comment),
+ ]
+ }
+
+ SM_TYPES = set(('Action', 'bool', 'Float', 'Plugin', 'String', 'any',
+ 'AdminFlag', 'OverrideType', 'OverrideRule', 'ImmunityType',
+ 'GroupId', 'AdminId', 'AdmAccessMode', 'AdminCachePart',
+ 'CookieAccess', 'CookieMenu', 'CookieMenuAction', 'NetFlow',
+ 'ConVarBounds', 'QueryCookie', 'ReplySource',
+ 'ConVarQueryResult', 'ConVarQueryFinished', 'Function',
+ 'Action', 'Identity', 'PluginStatus', 'PluginInfo', 'DBResult',
+ 'DBBindType', 'DBPriority', 'PropType', 'PropFieldType',
+ 'MoveType', 'RenderMode', 'RenderFx', 'EventHookMode',
+ 'EventHook', 'FileType', 'FileTimeMode', 'PathType',
+ 'ParamType', 'ExecType', 'DialogType', 'Handle', 'KvDataTypes',
+ 'NominateResult', 'MapChange', 'MenuStyle', 'MenuAction',
+ 'MenuSource', 'RegexError', 'SDKCallType', 'SDKLibrary',
+ 'SDKFuncConfSource', 'SDKType', 'SDKPassMethod', 'RayType',
+ 'TraceEntityFilter', 'ListenOverride', 'SortOrder', 'SortType',
+ 'SortFunc2D', 'APLRes', 'FeatureType', 'FeatureStatus',
+ 'SMCResult', 'SMCError', 'TFClassType', 'TFTeam', 'TFCond',
+ 'TFResourceType', 'Timer', 'TopMenuAction', 'TopMenuObjectType',
+ 'TopMenuPosition', 'TopMenuObject', 'UserMsg'))
+
+ def __init__(self, **options):
+ self.smhighlighting = get_bool_opt(options,
+ 'sourcemod', True)
+
+ self._functions = set()
+ if self.smhighlighting:
+ from pygments.lexers._sourcemod_builtins import FUNCTIONS
+ self._functions.update(FUNCTIONS)
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ if self.smhighlighting:
+ if value in self.SM_TYPES:
+ token = Keyword.Type
+ elif value in self._functions:
+ token = Name.Builtin
+ yield index, token, value
+
+
+class PawnLexer(RegexLexer):
+ """
+ For Pawn source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Pawn'
+ aliases = ['pawn']
+ filenames = ['*.p', '*.pwn', '*.inc']
+ mimetypes = ['text/x-pawn']
+
+ #: optional Comment or Whitespace
+ _ws = r'(?:\s|//.*?\n|/[*][\w\W]*?[*]/)+'
+ #: only one /* */ style comment
+ _ws1 = r'\s*(?:/[*].*?[*]/\s*)*'
+
+ tokens = {
+ 'root': [
+ # preprocessor directives: without whitespace
+ ('^#if\s+0', Comment.Preproc, 'if0'),
+ ('^#', Comment.Preproc, 'macro'),
+ # or with whitespace
+ ('^' + _ws1 + r'#if\s+0', Comment.Preproc, 'if0'),
+ ('^' + _ws1 + '#', Comment.Preproc, 'macro'),
+ (r'\n', Text),
+ (r'\s+', Text),
+ (r'\\\n', Text), # line continuation
+ (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
+ (r'/(\\\n)?\*[\w\W]*?\*(\\\n)?/', Comment.Multiline),
+ (r'[{}]', Punctuation),
+ (r'L?"', String, 'string'),
+ (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
+ (r'0[0-7]+[LlUu]*', Number.Oct),
+ (r'\d+[LlUu]*', Number.Integer),
+ (r'\*/', Error),
+ (r'[~!%^&*+=|?:<>/-]', Operator),
+ (r'[()\[\],.;]', Punctuation),
+ (r'(switch|case|default|const|new|static|char|continue|break|'
+ r'if|else|for|while|do|operator|enum|'
+ r'public|return|sizeof|tagof|state|goto)\b', Keyword),
+ (r'(bool|Float)\b', Keyword.Type),
+ (r'(true|false)\b', Keyword.Constant),
+ ('[a-zA-Z_]\w*', Name),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
+ (r'[^\\"\n]+', String), # all other characters
+ (r'\\\n', String), # line continuation
+ (r'\\', String), # stray backslash
+ ],
+ 'macro': [
+ (r'[^/\n]+', Comment.Preproc),
+ (r'/\*(.|\n)*?\*/', Comment.Multiline),
+ (r'//.*?\n', Comment.Single, '#pop'),
+ (r'/', Comment.Preproc),
+ (r'(?<=\\)\n', Comment.Preproc),
+ (r'\n', Comment.Preproc, '#pop'),
+ ],
+ 'if0': [
+ (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
+ (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
+ (r'.*?\n', Comment),
+ ]
+ }
diff --git a/pygments/lexers/perl.py b/pygments/lexers/perl.py
new file mode 100644
index 00000000..c279f56d
--- /dev/null
+++ b/pygments/lexers/perl.py
@@ -0,0 +1,614 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.perl
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Perl and related languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, bygroups, \
+ using, this, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+from pygments.util import shebang_matches
+
+__all__ = ['PerlLexer', 'Perl6Lexer']
+
+
+class PerlLexer(RegexLexer):
+ """
+ For `Perl <http://www.perl.org>`_ source code.
+ """
+
+ name = 'Perl'
+ aliases = ['perl', 'pl']
+ filenames = ['*.pl', '*.pm', '*.t']
+ mimetypes = ['text/x-perl', 'application/x-perl']
+
+ flags = re.DOTALL | re.MULTILINE
+ # TODO: give this to a perl guy who knows how to parse perl...
+ tokens = {
+ 'balanced-regex': [
+ (r'/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*', String.Regex, '#pop'),
+ (r'!(\\\\|\\[^\\]|[^\\!])*![egimosx]*', String.Regex, '#pop'),
+ (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
+ (r'{(\\\\|\\[^\\]|[^\\}])*}[egimosx]*', String.Regex, '#pop'),
+ (r'<(\\\\|\\[^\\]|[^\\>])*>[egimosx]*', String.Regex, '#pop'),
+ (r'\[(\\\\|\\[^\\]|[^\\\]])*\][egimosx]*', String.Regex, '#pop'),
+ (r'\((\\\\|\\[^\\]|[^\\\)])*\)[egimosx]*', String.Regex, '#pop'),
+ (r'@(\\\\|\\[^\\]|[^\\\@])*@[egimosx]*', String.Regex, '#pop'),
+ (r'%(\\\\|\\[^\\]|[^\\\%])*%[egimosx]*', String.Regex, '#pop'),
+ (r'\$(\\\\|\\[^\\]|[^\\\$])*\$[egimosx]*', String.Regex, '#pop'),
+ ],
+ 'root': [
+ (r'\#.*?$', Comment.Single),
+ (r'^=[a-zA-Z0-9]+\s+.*?\n=cut', Comment.Multiline),
+ (words((
+ 'case', 'continue', 'do', 'else', 'elsif', 'for', 'foreach',
+ 'if', 'last', 'my', 'next', 'our', 'redo', 'reset', 'then',
+ 'unless', 'until', 'while', 'use', 'print', 'new', 'BEGIN',
+ 'CHECK', 'INIT', 'END', 'return'), suffix=r'\b'),
+ Keyword),
+ (r'(format)(\s+)(\w+)(\s*)(=)(\s*\n)',
+ bygroups(Keyword, Text, Name, Text, Punctuation, Text), 'format'),
+ (r'(eq|lt|gt|le|ge|ne|not|and|or|cmp)\b', Operator.Word),
+ # common delimiters
+ (r's/(\\\\|\\[^\\]|[^\\/])*/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*',
+ String.Regex),
+ (r's!(\\\\|\\!|[^!])*!(\\\\|\\!|[^!])*![egimosx]*', String.Regex),
+ (r's\\(\\\\|[^\\])*\\(\\\\|[^\\])*\\[egimosx]*', String.Regex),
+ (r's@(\\\\|\\[^\\]|[^\\@])*@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*',
+ String.Regex),
+ (r's%(\\\\|\\[^\\]|[^\\%])*%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*',
+ String.Regex),
+ # balanced delimiters
+ (r's{(\\\\|\\[^\\]|[^\\}])*}\s*', String.Regex, 'balanced-regex'),
+ (r's<(\\\\|\\[^\\]|[^\\>])*>\s*', String.Regex, 'balanced-regex'),
+ (r's\[(\\\\|\\[^\\]|[^\\\]])*\]\s*', String.Regex,
+ 'balanced-regex'),
+ (r's\((\\\\|\\[^\\]|[^\\\)])*\)\s*', String.Regex,
+ 'balanced-regex'),
+
+ (r'm?/(\\\\|\\[^\\]|[^\\/\n])*/[gcimosx]*', String.Regex),
+ (r'm(?=[/!\\{<\[\(@%\$])', String.Regex, 'balanced-regex'),
+ (r'((?<==~)|(?<=\())\s*/(\\\\|\\[^\\]|[^\\/])*/[gcimosx]*',
+ String.Regex),
+ (r'\s+', Text),
+ (words((
+ 'abs', 'accept', 'alarm', 'atan2', 'bind', 'binmode', 'bless', 'caller', 'chdir',
+ 'chmod', 'chomp', 'chop', 'chown', 'chr', 'chroot', 'close', 'closedir', 'connect',
+ 'continue', 'cos', 'crypt', 'dbmclose', 'dbmopen', 'defined', 'delete', 'die',
+ 'dump', 'each', 'endgrent', 'endhostent', 'endnetent', 'endprotoent',
+ 'endpwent', 'endservent', 'eof', 'eval', 'exec', 'exists', 'exit', 'exp', 'fcntl',
+ 'fileno', 'flock', 'fork', 'format', 'formline', 'getc', 'getgrent', 'getgrgid',
+ 'getgrnam', 'gethostbyaddr', 'gethostbyname', 'gethostent', 'getlogin',
+ 'getnetbyaddr', 'getnetbyname', 'getnetent', 'getpeername', 'getpgrp',
+ 'getppid', 'getpriority', 'getprotobyname', 'getprotobynumber',
+ 'getprotoent', 'getpwent', 'getpwnam', 'getpwuid', 'getservbyname',
+ 'getservbyport', 'getservent', 'getsockname', 'getsockopt', 'glob', 'gmtime',
+ 'goto', 'grep', 'hex', 'import', 'index', 'int', 'ioctl', 'join', 'keys', 'kill', 'last',
+ 'lc', 'lcfirst', 'length', 'link', 'listen', 'local', 'localtime', 'log', 'lstat',
+ 'map', 'mkdir', 'msgctl', 'msgget', 'msgrcv', 'msgsnd', 'my', 'next', 'no', 'oct', 'open',
+ 'opendir', 'ord', 'our', 'pack', 'package', 'pipe', 'pop', 'pos', 'printf',
+ 'prototype', 'push', 'quotemeta', 'rand', 'read', 'readdir',
+ 'readline', 'readlink', 'readpipe', 'recv', 'redo', 'ref', 'rename', 'require',
+ 'reverse', 'rewinddir', 'rindex', 'rmdir', 'scalar', 'seek', 'seekdir',
+ 'select', 'semctl', 'semget', 'semop', 'send', 'setgrent', 'sethostent', 'setnetent',
+ 'setpgrp', 'setpriority', 'setprotoent', 'setpwent', 'setservent',
+ 'setsockopt', 'shift', 'shmctl', 'shmget', 'shmread', 'shmwrite', 'shutdown',
+ 'sin', 'sleep', 'socket', 'socketpair', 'sort', 'splice', 'split', 'sprintf', 'sqrt',
+ 'srand', 'stat', 'study', 'substr', 'symlink', 'syscall', 'sysopen', 'sysread',
+ 'sysseek', 'system', 'syswrite', 'tell', 'telldir', 'tie', 'tied', 'time', 'times', 'tr',
+ 'truncate', 'uc', 'ucfirst', 'umask', 'undef', 'unlink', 'unpack', 'unshift', 'untie',
+ 'utime', 'values', 'vec', 'wait', 'waitpid', 'wantarray', 'warn', 'write'), suffix=r'\b'),
+ Name.Builtin),
+ (r'((__(DATA|DIE|WARN)__)|(STD(IN|OUT|ERR)))\b', Name.Builtin.Pseudo),
+ (r'<<([\'"]?)([a-zA-Z_]\w*)\1;?\n.*?\n\2\n', String),
+ (r'__END__', Comment.Preproc, 'end-part'),
+ (r'\$\^[ADEFHILMOPSTWX]', Name.Variable.Global),
+ (r"\$[\\\"\[\]'&`+*.,;=%~?@$!<>(^|/-](?!\w)", Name.Variable.Global),
+ (r'[$@%#]+', Name.Variable, 'varname'),
+ (r'0_?[0-7]+(_[0-7]+)*', Number.Oct),
+ (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex),
+ (r'0b[01]+(_[01]+)*', Number.Bin),
+ (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?',
+ Number.Float),
+ (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float),
+ (r'\d+(_\d+)*', Number.Integer),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ (r'`(\\\\|\\[^\\]|[^`\\])*`', String.Backtick),
+ (r'<([^\s>]+)>', String.Regex),
+ (r'(q|qq|qw|qr|qx)\{', String.Other, 'cb-string'),
+ (r'(q|qq|qw|qr|qx)\(', String.Other, 'rb-string'),
+ (r'(q|qq|qw|qr|qx)\[', String.Other, 'sb-string'),
+ (r'(q|qq|qw|qr|qx)\<', String.Other, 'lt-string'),
+ (r'(q|qq|qw|qr|qx)([^a-zA-Z0-9])(.|\n)*?\2', String.Other),
+ (r'package\s+', Keyword, 'modulename'),
+ (r'sub\s+', Keyword, 'funcname'),
+ (r'(\[\]|\*\*|::|<<|>>|>=|<=>|<=|={3}|!=|=~|'
+ r'!~|&&?|\|\||\.{1,3})', Operator),
+ (r'[-+/*%=<>&^|!\\~]=?', Operator),
+ (r'[\(\)\[\]:;,<>/\?\{\}]', Punctuation), # yes, there's no shortage
+ # of punctuation in Perl!
+ (r'(?=\w)', Name, 'name'),
+ ],
+ 'format': [
+ (r'\.\n', String.Interpol, '#pop'),
+ (r'[^\n]*\n', String.Interpol),
+ ],
+ 'varname': [
+ (r'\s+', Text),
+ (r'\{', Punctuation, '#pop'), # hash syntax?
+ (r'\)|,', Punctuation, '#pop'), # argument specifier
+ (r'\w+::', Name.Namespace),
+ (r'[\w:]+', Name.Variable, '#pop'),
+ ],
+ 'name': [
+ (r'\w+::', Name.Namespace),
+ (r'[\w:]+', Name, '#pop'),
+ (r'[A-Z_]+(?=\W)', Name.Constant, '#pop'),
+ (r'(?=\W)', Text, '#pop'),
+ ],
+ 'modulename': [
+ (r'[a-zA-Z_]\w*', Name.Namespace, '#pop')
+ ],
+ 'funcname': [
+ (r'[a-zA-Z_]\w*[\!\?]?', Name.Function),
+ (r'\s+', Text),
+ # argument declaration
+ (r'(\([$@%]*\))(\s*)', bygroups(Punctuation, Text)),
+ (r'.*?{', Punctuation, '#pop'),
+ (r';', Punctuation, '#pop'),
+ ],
+ 'cb-string': [
+ (r'\\[\{\}\\]', String.Other),
+ (r'\\', String.Other),
+ (r'\{', String.Other, 'cb-string'),
+ (r'\}', String.Other, '#pop'),
+ (r'[^\{\}\\]+', String.Other)
+ ],
+ 'rb-string': [
+ (r'\\[\(\)\\]', String.Other),
+ (r'\\', String.Other),
+ (r'\(', String.Other, 'rb-string'),
+ (r'\)', String.Other, '#pop'),
+ (r'[^\(\)]+', String.Other)
+ ],
+ 'sb-string': [
+ (r'\\[\[\]\\]', String.Other),
+ (r'\\', String.Other),
+ (r'\[', String.Other, 'sb-string'),
+ (r'\]', String.Other, '#pop'),
+ (r'[^\[\]]+', String.Other)
+ ],
+ 'lt-string': [
+ (r'\\[\<\>\\]', String.Other),
+ (r'\\', String.Other),
+ (r'\<', String.Other, 'lt-string'),
+ (r'\>', String.Other, '#pop'),
+ (r'[^\<\>]+', String.Other)
+ ],
+ 'end-part': [
+ (r'.+', Comment.Preproc, '#pop')
+ ]
+ }
+
+ def analyse_text(text):
+ if shebang_matches(text, r'perl'):
+ return True
+ if re.search('(?:my|our)\s+[$@%(]', text):
+ return 0.9
+
+
+class Perl6Lexer(ExtendedRegexLexer):
+ """
+ For `Perl 6 <http://www.perl6.org>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Perl6'
+ aliases = ['perl6', 'pl6']
+ filenames = ['*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6',
+ '*.6pm', '*.p6m', '*.pm6', '*.t']
+ mimetypes = ['text/x-perl6', 'application/x-perl6']
+ flags = re.MULTILINE | re.DOTALL | re.UNICODE
+
+ PERL6_IDENTIFIER_RANGE = "['\w:-]"
+
+ PERL6_KEYWORDS = (
+ 'BEGIN', 'CATCH', 'CHECK', 'CONTROL', 'END', 'ENTER', 'FIRST', 'INIT',
+ 'KEEP', 'LAST', 'LEAVE', 'NEXT', 'POST', 'PRE', 'START', 'TEMP',
+ 'UNDO', 'as', 'assoc', 'async', 'augment', 'binary', 'break', 'but',
+ 'cached', 'category', 'class', 'constant', 'contend', 'continue',
+ 'copy', 'deep', 'default', 'defequiv', 'defer', 'die', 'do', 'else',
+ 'elsif', 'enum', 'equiv', 'exit', 'export', 'fail', 'fatal', 'for',
+ 'gather', 'given', 'goto', 'grammar', 'handles', 'has', 'if', 'inline',
+ 'irs', 'is', 'last', 'leave', 'let', 'lift', 'loop', 'looser', 'macro',
+ 'make', 'maybe', 'method', 'module', 'multi', 'my', 'next', 'of',
+ 'ofs', 'only', 'oo', 'ors', 'our', 'package', 'parsed', 'prec',
+ 'proto', 'readonly', 'redo', 'ref', 'regex', 'reparsed', 'repeat',
+ 'require', 'required', 'return', 'returns', 'role', 'rule', 'rw',
+ 'self', 'slang', 'state', 'sub', 'submethod', 'subset', 'supersede',
+ 'take', 'temp', 'tighter', 'token', 'trusts', 'try', 'unary',
+ 'unless', 'until', 'use', 'warn', 'when', 'where', 'while', 'will',
+ )
+
+ PERL6_BUILTINS = (
+ 'ACCEPTS', 'HOW', 'REJECTS', 'VAR', 'WHAT', 'WHENCE', 'WHERE', 'WHICH',
+ 'WHO', 'abs', 'acos', 'acosec', 'acosech', 'acosh', 'acotan', 'acotanh',
+ 'all', 'any', 'approx', 'arity', 'asec', 'asech', 'asin', 'asinh',
+ 'assuming', 'atan', 'atan2', 'atanh', 'attr', 'bless', 'body', 'by',
+ 'bytes', 'caller', 'callsame', 'callwith', 'can', 'capitalize', 'cat',
+ 'ceiling', 'chars', 'chmod', 'chomp', 'chop', 'chr', 'chroot',
+ 'circumfix', 'cis', 'classify', 'clone', 'close', 'cmp_ok', 'codes',
+ 'comb', 'connect', 'contains', 'context', 'cos', 'cosec', 'cosech',
+ 'cosh', 'cotan', 'cotanh', 'count', 'defined', 'delete', 'diag',
+ 'dies_ok', 'does', 'e', 'each', 'eager', 'elems', 'end', 'eof', 'eval',
+ 'eval_dies_ok', 'eval_elsewhere', 'eval_lives_ok', 'evalfile', 'exists',
+ 'exp', 'first', 'flip', 'floor', 'flunk', 'flush', 'fmt', 'force_todo',
+ 'fork', 'from', 'getc', 'gethost', 'getlogin', 'getpeername', 'getpw',
+ 'gmtime', 'graphs', 'grep', 'hints', 'hyper', 'im', 'index', 'infix',
+ 'invert', 'is_approx', 'is_deeply', 'isa', 'isa_ok', 'isnt', 'iterator',
+ 'join', 'key', 'keys', 'kill', 'kv', 'lastcall', 'lazy', 'lc', 'lcfirst',
+ 'like', 'lines', 'link', 'lives_ok', 'localtime', 'log', 'log10', 'map',
+ 'max', 'min', 'minmax', 'name', 'new', 'nextsame', 'nextwith', 'nfc',
+ 'nfd', 'nfkc', 'nfkd', 'nok_error', 'nonce', 'none', 'normalize', 'not',
+ 'nothing', 'ok', 'once', 'one', 'open', 'opendir', 'operator', 'ord',
+ 'p5chomp', 'p5chop', 'pack', 'pair', 'pairs', 'pass', 'perl', 'pi',
+ 'pick', 'plan', 'plan_ok', 'polar', 'pop', 'pos', 'postcircumfix',
+ 'postfix', 'pred', 'prefix', 'print', 'printf', 'push', 'quasi',
+ 'quotemeta', 'rand', 're', 'read', 'readdir', 'readline', 'reduce',
+ 'reverse', 'rewind', 'rewinddir', 'rindex', 'roots', 'round',
+ 'roundrobin', 'run', 'runinstead', 'sameaccent', 'samecase', 'say',
+ 'sec', 'sech', 'sech', 'seek', 'shape', 'shift', 'sign', 'signature',
+ 'sin', 'sinh', 'skip', 'skip_rest', 'sleep', 'slurp', 'sort', 'splice',
+ 'split', 'sprintf', 'sqrt', 'srand', 'strand', 'subst', 'substr', 'succ',
+ 'sum', 'symlink', 'tan', 'tanh', 'throws_ok', 'time', 'times', 'to',
+ 'todo', 'trim', 'trim_end', 'trim_start', 'true', 'truncate', 'uc',
+ 'ucfirst', 'undef', 'undefine', 'uniq', 'unlike', 'unlink', 'unpack',
+ 'unpolar', 'unshift', 'unwrap', 'use_ok', 'value', 'values', 'vec',
+ 'version_lt', 'void', 'wait', 'want', 'wrap', 'write', 'zip',
+ )
+
+ PERL6_BUILTIN_CLASSES = (
+ 'Abstraction', 'Any', 'AnyChar', 'Array', 'Associative', 'Bag', 'Bit',
+ 'Blob', 'Block', 'Bool', 'Buf', 'Byte', 'Callable', 'Capture', 'Char', 'Class',
+ 'Code', 'Codepoint', 'Comparator', 'Complex', 'Decreasing', 'Exception',
+ 'Failure', 'False', 'Grammar', 'Grapheme', 'Hash', 'IO', 'Increasing',
+ 'Int', 'Junction', 'KeyBag', 'KeyExtractor', 'KeyHash', 'KeySet',
+ 'KitchenSink', 'List', 'Macro', 'Mapping', 'Match', 'Matcher', 'Method',
+ 'Module', 'Num', 'Object', 'Ordered', 'Ordering', 'OrderingPair',
+ 'Package', 'Pair', 'Positional', 'Proxy', 'Range', 'Rat', 'Regex',
+ 'Role', 'Routine', 'Scalar', 'Seq', 'Set', 'Signature', 'Str', 'StrLen',
+ 'StrPos', 'Sub', 'Submethod', 'True', 'UInt', 'Undef', 'Version', 'Void',
+ 'Whatever', 'bit', 'bool', 'buf', 'buf1', 'buf16', 'buf2', 'buf32',
+ 'buf4', 'buf64', 'buf8', 'complex', 'int', 'int1', 'int16', 'int2',
+ 'int32', 'int4', 'int64', 'int8', 'num', 'rat', 'rat1', 'rat16', 'rat2',
+ 'rat32', 'rat4', 'rat64', 'rat8', 'uint', 'uint1', 'uint16', 'uint2',
+ 'uint32', 'uint4', 'uint64', 'uint8', 'utf16', 'utf32', 'utf8',
+ )
+
+ PERL6_OPERATORS = (
+ 'X', 'Z', 'after', 'also', 'and', 'andthen', 'before', 'cmp', 'div',
+ 'eq', 'eqv', 'extra', 'ff', 'fff', 'ge', 'gt', 'le', 'leg', 'lt', 'm',
+ 'mm', 'mod', 'ne', 'or', 'orelse', 'rx', 's', 'tr', 'x', 'xor', 'xx',
+ '++', '--', '**', '!', '+', '-', '~', '?', '|', '||', '+^', '~^', '?^',
+ '^', '*', '/', '%', '%%', '+&', '+<', '+>', '~&', '~<', '~>', '?&',
+ 'gcd', 'lcm', '+', '-', '+|', '+^', '~|', '~^', '?|', '?^',
+ '~', '&', '^', 'but', 'does', '<=>', '..', '..^', '^..', '^..^',
+ '!=', '==', '<', '<=', '>', '>=', '~~', '===', '!eqv',
+ '&&', '||', '^^', '//', 'min', 'max', '??', '!!', 'ff', 'fff', 'so',
+ 'not', '<==', '==>', '<<==', '==>>',
+ )
+
+ # Perl 6 has a *lot* of possible bracketing characters
+ # this list was lifted from STD.pm6 (https://github.com/perl6/std)
+ PERL6_BRACKETS = {
+ u'\u0028': u'\u0029', u'\u003c': u'\u003e', u'\u005b': u'\u005d',
+ u'\u007b': u'\u007d', u'\u00ab': u'\u00bb', u'\u0f3a': u'\u0f3b',
+ u'\u0f3c': u'\u0f3d', u'\u169b': u'\u169c', u'\u2018': u'\u2019',
+ u'\u201a': u'\u2019', u'\u201b': u'\u2019', u'\u201c': u'\u201d',
+ u'\u201e': u'\u201d', u'\u201f': u'\u201d', u'\u2039': u'\u203a',
+ u'\u2045': u'\u2046', u'\u207d': u'\u207e', u'\u208d': u'\u208e',
+ u'\u2208': u'\u220b', u'\u2209': u'\u220c', u'\u220a': u'\u220d',
+ u'\u2215': u'\u29f5', u'\u223c': u'\u223d', u'\u2243': u'\u22cd',
+ u'\u2252': u'\u2253', u'\u2254': u'\u2255', u'\u2264': u'\u2265',
+ u'\u2266': u'\u2267', u'\u2268': u'\u2269', u'\u226a': u'\u226b',
+ u'\u226e': u'\u226f', u'\u2270': u'\u2271', u'\u2272': u'\u2273',
+ u'\u2274': u'\u2275', u'\u2276': u'\u2277', u'\u2278': u'\u2279',
+ u'\u227a': u'\u227b', u'\u227c': u'\u227d', u'\u227e': u'\u227f',
+ u'\u2280': u'\u2281', u'\u2282': u'\u2283', u'\u2284': u'\u2285',
+ u'\u2286': u'\u2287', u'\u2288': u'\u2289', u'\u228a': u'\u228b',
+ u'\u228f': u'\u2290', u'\u2291': u'\u2292', u'\u2298': u'\u29b8',
+ u'\u22a2': u'\u22a3', u'\u22a6': u'\u2ade', u'\u22a8': u'\u2ae4',
+ u'\u22a9': u'\u2ae3', u'\u22ab': u'\u2ae5', u'\u22b0': u'\u22b1',
+ u'\u22b2': u'\u22b3', u'\u22b4': u'\u22b5', u'\u22b6': u'\u22b7',
+ u'\u22c9': u'\u22ca', u'\u22cb': u'\u22cc', u'\u22d0': u'\u22d1',
+ u'\u22d6': u'\u22d7', u'\u22d8': u'\u22d9', u'\u22da': u'\u22db',
+ u'\u22dc': u'\u22dd', u'\u22de': u'\u22df', u'\u22e0': u'\u22e1',
+ u'\u22e2': u'\u22e3', u'\u22e4': u'\u22e5', u'\u22e6': u'\u22e7',
+ u'\u22e8': u'\u22e9', u'\u22ea': u'\u22eb', u'\u22ec': u'\u22ed',
+ u'\u22f0': u'\u22f1', u'\u22f2': u'\u22fa', u'\u22f3': u'\u22fb',
+ u'\u22f4': u'\u22fc', u'\u22f6': u'\u22fd', u'\u22f7': u'\u22fe',
+ u'\u2308': u'\u2309', u'\u230a': u'\u230b', u'\u2329': u'\u232a',
+ u'\u23b4': u'\u23b5', u'\u2768': u'\u2769', u'\u276a': u'\u276b',
+ u'\u276c': u'\u276d', u'\u276e': u'\u276f', u'\u2770': u'\u2771',
+ u'\u2772': u'\u2773', u'\u2774': u'\u2775', u'\u27c3': u'\u27c4',
+ u'\u27c5': u'\u27c6', u'\u27d5': u'\u27d6', u'\u27dd': u'\u27de',
+ u'\u27e2': u'\u27e3', u'\u27e4': u'\u27e5', u'\u27e6': u'\u27e7',
+ u'\u27e8': u'\u27e9', u'\u27ea': u'\u27eb', u'\u2983': u'\u2984',
+ u'\u2985': u'\u2986', u'\u2987': u'\u2988', u'\u2989': u'\u298a',
+ u'\u298b': u'\u298c', u'\u298d': u'\u298e', u'\u298f': u'\u2990',
+ u'\u2991': u'\u2992', u'\u2993': u'\u2994', u'\u2995': u'\u2996',
+ u'\u2997': u'\u2998', u'\u29c0': u'\u29c1', u'\u29c4': u'\u29c5',
+ u'\u29cf': u'\u29d0', u'\u29d1': u'\u29d2', u'\u29d4': u'\u29d5',
+ u'\u29d8': u'\u29d9', u'\u29da': u'\u29db', u'\u29f8': u'\u29f9',
+ u'\u29fc': u'\u29fd', u'\u2a2b': u'\u2a2c', u'\u2a2d': u'\u2a2e',
+ u'\u2a34': u'\u2a35', u'\u2a3c': u'\u2a3d', u'\u2a64': u'\u2a65',
+ u'\u2a79': u'\u2a7a', u'\u2a7d': u'\u2a7e', u'\u2a7f': u'\u2a80',
+ u'\u2a81': u'\u2a82', u'\u2a83': u'\u2a84', u'\u2a8b': u'\u2a8c',
+ u'\u2a91': u'\u2a92', u'\u2a93': u'\u2a94', u'\u2a95': u'\u2a96',
+ u'\u2a97': u'\u2a98', u'\u2a99': u'\u2a9a', u'\u2a9b': u'\u2a9c',
+ u'\u2aa1': u'\u2aa2', u'\u2aa6': u'\u2aa7', u'\u2aa8': u'\u2aa9',
+ u'\u2aaa': u'\u2aab', u'\u2aac': u'\u2aad', u'\u2aaf': u'\u2ab0',
+ u'\u2ab3': u'\u2ab4', u'\u2abb': u'\u2abc', u'\u2abd': u'\u2abe',
+ u'\u2abf': u'\u2ac0', u'\u2ac1': u'\u2ac2', u'\u2ac3': u'\u2ac4',
+ u'\u2ac5': u'\u2ac6', u'\u2acd': u'\u2ace', u'\u2acf': u'\u2ad0',
+ u'\u2ad1': u'\u2ad2', u'\u2ad3': u'\u2ad4', u'\u2ad5': u'\u2ad6',
+ u'\u2aec': u'\u2aed', u'\u2af7': u'\u2af8', u'\u2af9': u'\u2afa',
+ u'\u2e02': u'\u2e03', u'\u2e04': u'\u2e05', u'\u2e09': u'\u2e0a',
+ u'\u2e0c': u'\u2e0d', u'\u2e1c': u'\u2e1d', u'\u2e20': u'\u2e21',
+ u'\u3008': u'\u3009', u'\u300a': u'\u300b', u'\u300c': u'\u300d',
+ u'\u300e': u'\u300f', u'\u3010': u'\u3011', u'\u3014': u'\u3015',
+ u'\u3016': u'\u3017', u'\u3018': u'\u3019', u'\u301a': u'\u301b',
+ u'\u301d': u'\u301e', u'\ufd3e': u'\ufd3f', u'\ufe17': u'\ufe18',
+ u'\ufe35': u'\ufe36', u'\ufe37': u'\ufe38', u'\ufe39': u'\ufe3a',
+ u'\ufe3b': u'\ufe3c', u'\ufe3d': u'\ufe3e', u'\ufe3f': u'\ufe40',
+ u'\ufe41': u'\ufe42', u'\ufe43': u'\ufe44', u'\ufe47': u'\ufe48',
+ u'\ufe59': u'\ufe5a', u'\ufe5b': u'\ufe5c', u'\ufe5d': u'\ufe5e',
+ u'\uff08': u'\uff09', u'\uff1c': u'\uff1e', u'\uff3b': u'\uff3d',
+ u'\uff5b': u'\uff5d', u'\uff5f': u'\uff60', u'\uff62': u'\uff63',
+ }
+
+ def _build_word_match(words, boundary_regex_fragment=None, prefix='', suffix=''):
+ if boundary_regex_fragment is None:
+ return r'\b(' + prefix + r'|'.join(re.escape(x) for x in words) + \
+ suffix + r')\b'
+ else:
+ return r'(?<!' + boundary_regex_fragment + r')' + prefix + r'(' + \
+ r'|'.join(re.escape(x) for x in words) + r')' + suffix + r'(?!' + \
+ boundary_regex_fragment + r')'
+
+ def brackets_callback(token_class):
+ def callback(lexer, match, context):
+ groups = match.groupdict()
+ opening_chars = groups['delimiter']
+ n_chars = len(opening_chars)
+ adverbs = groups.get('adverbs')
+
+ closer = Perl6Lexer.PERL6_BRACKETS.get(opening_chars[0])
+ text = context.text
+
+ if closer is None: # it's not a mirrored character, which means we
+ # just need to look for the next occurrence
+
+ end_pos = text.find(opening_chars, match.start('delimiter') + n_chars)
+ else: # we need to look for the corresponding closing character,
+ # keep nesting in mind
+ closing_chars = closer * n_chars
+ nesting_level = 1
+
+ search_pos = match.start('delimiter')
+
+ while nesting_level > 0:
+ next_open_pos = text.find(opening_chars, search_pos + n_chars)
+ next_close_pos = text.find(closing_chars, search_pos + n_chars)
+
+ if next_close_pos == -1:
+ next_close_pos = len(text)
+ nesting_level = 0
+ elif next_open_pos != -1 and next_open_pos < next_close_pos:
+ nesting_level += 1
+ search_pos = next_open_pos
+ else: # next_close_pos < next_open_pos
+ nesting_level -= 1
+ search_pos = next_close_pos
+
+ end_pos = next_close_pos
+
+ if end_pos < 0: # if we didn't find a closer, just highlight the
+ # rest of the text in this class
+ end_pos = len(text)
+
+ if adverbs is not None and re.search(r':to\b', adverbs):
+ heredoc_terminator = text[match.start('delimiter') + n_chars:end_pos]
+ end_heredoc = re.search(r'^\s*' + re.escape(heredoc_terminator) +
+ r'\s*$', text[end_pos:], re.MULTILINE)
+
+ if end_heredoc:
+ end_pos += end_heredoc.end()
+ else:
+ end_pos = len(text)
+
+ yield match.start(), token_class, text[match.start():end_pos + n_chars]
+ context.pos = end_pos + n_chars
+
+ return callback
+
+ def opening_brace_callback(lexer, match, context):
+ stack = context.stack
+
+ yield match.start(), Text, context.text[match.start():match.end()]
+ context.pos = match.end()
+
+ # if we encounter an opening brace and we're one level
+ # below a token state, it means we need to increment
+ # the nesting level for braces so we know later when
+ # we should return to the token rules.
+ if len(stack) > 2 and stack[-2] == 'token':
+ context.perl6_token_nesting_level += 1
+
+ def closing_brace_callback(lexer, match, context):
+ stack = context.stack
+
+ yield match.start(), Text, context.text[match.start():match.end()]
+ context.pos = match.end()
+
+ # if we encounter a free closing brace and we're one level
+ # below a token state, it means we need to check the nesting
+ # level to see if we need to return to the token state.
+ if len(stack) > 2 and stack[-2] == 'token':
+ context.perl6_token_nesting_level -= 1
+ if context.perl6_token_nesting_level == 0:
+ stack.pop()
+
+ def embedded_perl6_callback(lexer, match, context):
+ context.perl6_token_nesting_level = 1
+ yield match.start(), Text, context.text[match.start():match.end()]
+ context.pos = match.end()
+ context.stack.append('root')
+
+ # If you're modifying these rules, be careful if you need to process '{' or '}'
+ # characters. We have special logic for processing these characters (due to the fact
+ # that you can nest Perl 6 code in regex blocks), so if you need to process one of
+ # them, make sure you also process the corresponding one!
+ tokens = {
+ 'common': [
+ (r'#[`|=](?P<delimiter>(?P<first_char>[' + ''.join(PERL6_BRACKETS) + r'])(?P=first_char)*)',
+ brackets_callback(Comment.Multiline)),
+ (r'#[^\n]*$', Comment.Singleline),
+ (r'^(\s*)=begin\s+(\w+)\b.*?^\1=end\s+\2', Comment.Multiline),
+ (r'^(\s*)=for.*?\n\s*?\n', Comment.Multiline),
+ (r'^=.*?\n\s*?\n', Comment.Multiline),
+ (r'(regex|token|rule)(\s*' + PERL6_IDENTIFIER_RANGE + '+:sym)',
+ bygroups(Keyword, Name), 'token-sym-brackets'),
+ (r'(regex|token|rule)(?!' + PERL6_IDENTIFIER_RANGE + ')(\s*' + PERL6_IDENTIFIER_RANGE + '+)?',
+ bygroups(Keyword, Name), 'pre-token'),
+ # deal with a special case in the Perl 6 grammar (role q { ... })
+ (r'(role)(\s+)(q)(\s*)', bygroups(Keyword, Text, Name, Text)),
+ (_build_word_match(PERL6_KEYWORDS, PERL6_IDENTIFIER_RANGE), Keyword),
+ (_build_word_match(PERL6_BUILTIN_CLASSES, PERL6_IDENTIFIER_RANGE, suffix='(?::[UD])?'),
+ Name.Builtin),
+ (_build_word_match(PERL6_BUILTINS, PERL6_IDENTIFIER_RANGE), Name.Builtin),
+ # copied from PerlLexer
+ (r'[$@%&][.^:?=!~]?' + PERL6_IDENTIFIER_RANGE + u'+(?:<<.*?>>|<.*?>|«.*?»)*',
+ Name.Variable),
+ (r'\$[!/](?:<<.*?>>|<.*?>|«.*?»)*', Name.Variable.Global),
+ (r'::\?\w+', Name.Variable.Global),
+ (r'[$@%&]\*' + PERL6_IDENTIFIER_RANGE + u'+(?:<<.*?>>|<.*?>|«.*?»)*',
+ Name.Variable.Global),
+ (r'\$(?:<.*?>)+', Name.Variable),
+ (r'(?:q|qq|Q)[a-zA-Z]?\s*(?P<adverbs>:[\w\s:]+)?\s*(?P<delimiter>(?P<first_char>[^0-9a-zA-Z:\s])'
+ r'(?P=first_char)*)', brackets_callback(String)),
+ # copied from PerlLexer
+ (r'0_?[0-7]+(_[0-7]+)*', Number.Oct),
+ (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex),
+ (r'0b[01]+(_[01]+)*', Number.Bin),
+ (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?',
+ Number.Float),
+ (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float),
+ (r'\d+(_\d+)*', Number.Integer),
+ (r'(?<=~~)\s*/(?:\\\\|\\/|.)*?/', String.Regex),
+ (r'(?<=[=(,])\s*/(?:\\\\|\\/|.)*?/', String.Regex),
+ (r'm\w+(?=\()', Name),
+ (r'(?:m|ms|rx)\s*(?P<adverbs>:[\w\s:]+)?\s*(?P<delimiter>(?P<first_char>[^0-9a-zA-Z_:\s])'
+ r'(?P=first_char)*)', brackets_callback(String.Regex)),
+ (r'(?:s|ss|tr)\s*(?::[\w\s:]+)?\s*/(?:\\\\|\\/|.)*?/(?:\\\\|\\/|.)*?/',
+ String.Regex),
+ (r'<[^\s=].*?\S>', String),
+ (_build_word_match(PERL6_OPERATORS), Operator),
+ (r'[0-9a-zA-Z_]' + PERL6_IDENTIFIER_RANGE + '*', Name),
+ (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
+ (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
+ ],
+ 'root': [
+ include('common'),
+ (r'\{', opening_brace_callback),
+ (r'\}', closing_brace_callback),
+ (r'.+?', Text),
+ ],
+ 'pre-token': [
+ include('common'),
+ (r'\{', Text, ('#pop', 'token')),
+ (r'.+?', Text),
+ ],
+ 'token-sym-brackets': [
+ (r'(?P<delimiter>(?P<first_char>[' + ''.join(PERL6_BRACKETS) + '])(?P=first_char)*)',
+ brackets_callback(Name), ('#pop', 'pre-token')),
+ default(('#pop', 'pre-token')),
+ ],
+ 'token': [
+ (r'}', Text, '#pop'),
+ (r'(?<=:)(?:my|our|state|constant|temp|let).*?;', using(this)),
+ # make sure that quotes in character classes aren't treated as strings
+ (r'<(?:[-!?+.]\s*)?\[.*?\]>', String.Regex),
+ # make sure that '#' characters in quotes aren't treated as comments
+ (r"(?<!\\)'(\\\\|\\[^\\]|[^'\\])*'", String.Regex),
+ (r'(?<!\\)"(\\\\|\\[^\\]|[^"\\])*"', String.Regex),
+ (r'#.*?$', Comment.Singleline),
+ (r'\{', embedded_perl6_callback),
+ ('.+?', String.Regex),
+ ],
+ }
+
+ def analyse_text(text):
+ def strip_pod(lines):
+ in_pod = False
+ stripped_lines = []
+
+ for line in lines:
+ if re.match(r'^=(?:end|cut)', line):
+ in_pod = False
+ elif re.match(r'^=\w+', line):
+ in_pod = True
+ elif not in_pod:
+ stripped_lines.append(line)
+
+ return stripped_lines
+
+ # XXX handle block comments
+ lines = text.splitlines()
+ lines = strip_pod(lines)
+ text = '\n'.join(lines)
+
+ if shebang_matches(text, r'perl6|rakudo|niecza|pugs'):
+ return True
+
+ saw_perl_decl = False
+ rating = False
+
+ # check for my/our/has declarations
+ if re.search("(?:my|our|has)\s+(?:" + Perl6Lexer.PERL6_IDENTIFIER_RANGE +
+ "+\s+)?[$@%&(]", text):
+ rating = 0.8
+ saw_perl_decl = True
+
+ for line in lines:
+ line = re.sub('#.*', '', line)
+ if re.match('^\s*$', line):
+ continue
+
+ # match v6; use v6; use v6.0; use v6.0.0;
+ if re.match('^\s*(?:use\s+)?v6(?:\.\d(?:\.\d)?)?;', line):
+ return True
+ # match class, module, role, enum, grammar declarations
+ class_decl = re.match('^\s*(?:(?P<scope>my|our)\s+)?(?:module|class|role|enum|grammar)', line)
+ if class_decl:
+ if saw_perl_decl or class_decl.group('scope') is not None:
+ return True
+ rating = 0.05
+ continue
+ break
+
+ return rating
+
+ def __init__(self, **options):
+ super(Perl6Lexer, self).__init__(**options)
+ self.encoding = options.get('encoding', 'utf-8')
diff --git a/pygments/lexers/php.py b/pygments/lexers/php.py
new file mode 100644
index 00000000..091cf93f
--- /dev/null
+++ b/pygments/lexers/php.py
@@ -0,0 +1,246 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.php
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for PHP and related languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, default, using, this
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Other
+from pygments.util import get_bool_opt, get_list_opt, iteritems
+
+__all__ = ['ZephirLexer', 'PhpLexer']
+
+
+class ZephirLexer(RegexLexer):
+ """
+ For `Zephir language <http://zephir-lang.com/>`_ source code.
+
+ Zephir is a compiled high level language aimed
+ to the creation of C-extensions for PHP.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Zephir'
+ aliases = ['zephir']
+ filenames = ['*.zep']
+
+ zephir_keywords = ['fetch', 'echo', 'isset', 'empty']
+ zephir_type = ['bit', 'bits', 'string']
+
+ flags = re.DOTALL | re.MULTILINE
+
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ default('#pop')
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
+ r'(<<|>>>?|==?|!=?|->|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|loop|'
+ r'require|inline|throw|try|catch|finally|new|delete|typeof|instanceof|void|'
+ r'namespace|use|extends|this|fetch|isset|unset|echo|fetch|likely|unlikely|'
+ r'empty)\b', Keyword, 'slashstartsregex'),
+ (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(abstract|boolean|bool|char|class|const|double|enum|export|extends|final|'
+ r'native|goto|implements|import|int|string|interface|long|ulong|char|uchar|'
+ r'float|unsigned|private|protected|public|short|static|self|throws|reverse|'
+ r'transient|volatile)\b', Keyword.Reserved),
+ (r'(true|false|null|undefined)\b', Keyword.Constant),
+ (r'(Array|Boolean|Date|_REQUEST|_COOKIE|_SESSION|'
+ r'_GET|_POST|_SERVER|this|stdClass|range|count|iterator|'
+ r'window)\b', Name.Builtin),
+ (r'[$a-zA-Z_][\w\\]*', Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ ]
+ }
+
+
+class PhpLexer(RegexLexer):
+ """
+ For `PHP <http://www.php.net/>`_ source code.
+ For PHP embedded in HTML, use the `HtmlPhpLexer`.
+
+ Additional options accepted:
+
+ `startinline`
+ If given and ``True`` the lexer starts highlighting with
+ php code (i.e.: no starting ``<?php`` required). The default
+ is ``False``.
+ `funcnamehighlighting`
+ If given and ``True``, highlight builtin function names
+ (default: ``True``).
+ `disabledmodules`
+ If given, must be a list of module names whose function names
+ should not be highlighted. By default all modules are highlighted
+ except the special ``'unknown'`` module that includes functions
+ that are known to php but are undocumented.
+
+ To get a list of allowed modules have a look into the
+ `_php_builtins` module:
+
+ .. sourcecode:: pycon
+
+ >>> from pygments.lexers._php_builtins import MODULES
+ >>> MODULES.keys()
+ ['PHP Options/Info', 'Zip', 'dba', ...]
+
+ In fact the names of those modules match the module names from
+ the php documentation.
+ """
+
+ name = 'PHP'
+ aliases = ['php', 'php3', 'php4', 'php5']
+ filenames = ['*.php', '*.php[345]', '*.inc']
+ mimetypes = ['text/x-php']
+
+ # Note that a backslash is included in the following two patterns
+ # PHP uses a backslash as a namespace separator
+ _ident_char = r'[\\\w]|[^\x00-\x7f]'
+ _ident_begin = r'(?:[\\_a-z]|[^\x00-\x7f])'
+ _ident_end = r'(?:' + _ident_char + ')*'
+ _ident_inner = _ident_begin + _ident_end
+
+ flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
+ tokens = {
+ 'root': [
+ (r'<\?(php)?', Comment.Preproc, 'php'),
+ (r'[^<]+', Other),
+ (r'<', Other)
+ ],
+ 'php': [
+ (r'\?>', Comment.Preproc, '#pop'),
+ (r'<<<([\'"]?)(' + _ident_inner + r')\1\n.*?\n\s*\2;?\n', String),
+ (r'\s+', Text),
+ (r'#.*?\n', Comment.Single),
+ (r'//.*?\n', Comment.Single),
+ # put the empty comment here, it is otherwise seen as
+ # the start of a docstring
+ (r'/\*\*/', Comment.Multiline),
+ (r'/\*\*.*?\*/', String.Doc),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'(->|::)(\s*)(' + _ident_inner + ')',
+ bygroups(Operator, Text, Name.Attribute)),
+ (r'[~!%^&*+=|:.<>/?@-]+', Operator),
+ (r'[\[\]{}();,]+', Punctuation),
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+ (r'(function)(\s*)(?=\()', bygroups(Keyword, Text)),
+ (r'(function)(\s+)(&?)(\s*)',
+ bygroups(Keyword, Text, Operator, Text), 'functionname'),
+ (r'(const)(\s+)(' + _ident_inner + ')',
+ bygroups(Keyword, Text, Name.Constant)),
+ (r'(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|'
+ r'eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|'
+ r'FALSE|print|for|require|continue|foreach|require_once|'
+ r'declare|return|default|static|do|switch|die|stdClass|'
+ r'echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|'
+ r'virtual|endfor|include_once|while|endforeach|global|__FILE__|'
+ r'endif|list|__LINE__|endswitch|new|__sleep|endwhile|not|'
+ r'array|__wakeup|E_ALL|NULL|final|php_user_filter|interface|'
+ r'implements|public|private|protected|abstract|clone|try|'
+ r'catch|throw|this|use|namespace|trait|yield|'
+ r'finally)\b', Keyword),
+ (r'(true|false|null)\b', Keyword.Constant),
+ (r'\$\{\$+' + _ident_inner + '\}', Name.Variable),
+ (r'\$+' + _ident_inner, Name.Variable),
+ (_ident_inner, Name.Other),
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+', Number.Float),
+ (r'0[0-7]+', Number.Oct),
+ (r'0[xX][a-f0-9]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ (r'0b[01]+', Number.Bin),
+ (r"'([^'\\]*(?:\\.[^'\\]*)*)'", String.Single),
+ (r'`([^`\\]*(?:\\.[^`\\]*)*)`', String.Backtick),
+ (r'"', String.Double, 'string'),
+ ],
+ 'classname': [
+ (_ident_inner, Name.Class, '#pop')
+ ],
+ 'functionname': [
+ (_ident_inner, Name.Function, '#pop')
+ ],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'[^{$"\\]+', String.Double),
+ (r'\\([nrt\"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})', String.Escape),
+ (r'\$' + _ident_inner + '(\[\S+?\]|->' + _ident_inner + ')?',
+ String.Interpol),
+ (r'(\{\$\{)(.*?)(\}\})',
+ bygroups(String.Interpol, using(this, _startinline=True),
+ String.Interpol)),
+ (r'(\{)(\$.*?)(\})',
+ bygroups(String.Interpol, using(this, _startinline=True),
+ String.Interpol)),
+ (r'(\$\{)(\S+)(\})',
+ bygroups(String.Interpol, Name.Variable, String.Interpol)),
+ (r'[${\\]+', String.Double)
+ ],
+ }
+
+ def __init__(self, **options):
+ self.funcnamehighlighting = get_bool_opt(
+ options, 'funcnamehighlighting', True)
+ self.disabledmodules = get_list_opt(
+ options, 'disabledmodules', ['unknown'])
+ self.startinline = get_bool_opt(options, 'startinline', False)
+
+ # private option argument for the lexer itself
+ if '_startinline' in options:
+ self.startinline = options.pop('_startinline')
+
+ # collect activated functions in a set
+ self._functions = set()
+ if self.funcnamehighlighting:
+ from pygments.lexers._php_builtins import MODULES
+ for key, value in iteritems(MODULES):
+ if key not in self.disabledmodules:
+ self._functions.update(value)
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ stack = ['root']
+ if self.startinline:
+ stack.append('php')
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text, stack):
+ if token is Name.Other:
+ if value in self._functions:
+ yield index, Name.Builtin, value
+ continue
+ yield index, token, value
+
+ def analyse_text(text):
+ rv = 0.0
+ if re.search(r'<\?(?!xml)', text):
+ rv += 0.3
+ if '?>' in text:
+ rv += 0.1
+ return rv
diff --git a/pygments/lexers/prolog.py b/pygments/lexers/prolog.py
new file mode 100644
index 00000000..6914bd78
--- /dev/null
+++ b/pygments/lexers/prolog.py
@@ -0,0 +1,306 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.prolog
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Prolog and Prolog-like languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['PrologLexer', 'LogtalkLexer']
+
+
+class PrologLexer(RegexLexer):
+ """
+ Lexer for Prolog files.
+ """
+ name = 'Prolog'
+ aliases = ['prolog']
+ filenames = ['*.ecl', '*.prolog', '*.pro', '*.pl']
+ mimetypes = ['text/x-prolog']
+
+ flags = re.UNICODE
+
+ tokens = {
+ 'root': [
+ (r'^#.*', Comment.Single),
+ (r'/\*', Comment.Multiline, 'nested-comment'),
+ (r'%.*', Comment.Single),
+ # character literal
+ (r'0\'.', String.Char),
+ (r'0b[01]+', Number.Bin),
+ (r'0o[0-7]+', Number.Oct),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ # literal with prepended base
+ (r'\d\d?\'[a-zA-Z0-9]+', Number.Integer),
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+', Number.Integer),
+ (r'[\[\](){}|.,;!]', Punctuation),
+ (r':-|-->', Punctuation),
+ (r'"(?:\\x[0-9a-fA-F]+\\|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|'
+ r'\\[0-7]+\\|\\["\nabcefnrstv]|[^\\"])*"', String.Double),
+ (r"'(?:''|[^'])*'", String.Atom), # quoted atom
+ # Needs to not be followed by an atom.
+ # (r'=(?=\s|[a-zA-Z\[])', Operator),
+ (r'is\b', Operator),
+ (r'(<|>|=<|>=|==|=:=|=|/|//|\*|\+|-)(?=\s|[a-zA-Z0-9\[])',
+ Operator),
+ (r'(mod|div|not)\b', Operator),
+ (r'_', Keyword), # The don't-care variable
+ (r'([a-z]+)(:)', bygroups(Name.Namespace, Punctuation)),
+ (u'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
+ u'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
+ u'(\\s*)(:-|-->)',
+ bygroups(Name.Function, Text, Operator)), # function defn
+ (u'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
+ u'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
+ u'(\\s*)(\\()',
+ bygroups(Name.Function, Text, Punctuation)),
+ (u'[a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
+ u'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*',
+ String.Atom), # atom, characters
+ # This one includes !
+ (u'[#&*+\\-./:<=>?@\\\\^~\u00a1-\u00bf\u2010-\u303f]+',
+ String.Atom), # atom, graphics
+ (r'[A-Z_]\w*', Name.Variable),
+ (u'\\s+|[\u2000-\u200f\ufff0-\ufffe\uffef]', Text),
+ ],
+ 'nested-comment': [
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'[^*/]+', Comment.Multiline),
+ (r'[*/]', Comment.Multiline),
+ ],
+ }
+
+ def analyse_text(text):
+ return ':-' in text
+
+
+class LogtalkLexer(RegexLexer):
+ """
+ For `Logtalk <http://logtalk.org/>`_ source code.
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'Logtalk'
+ aliases = ['logtalk']
+ filenames = ['*.lgt', '*.logtalk']
+ mimetypes = ['text/x-logtalk']
+
+ tokens = {
+ 'root': [
+ # Directives
+ (r'^\s*:-\s',Punctuation,'directive'),
+ # Comments
+ (r'%.*?\n', Comment),
+ (r'/\*(.|\n)*?\*/',Comment),
+ # Whitespace
+ (r'\n', Text),
+ (r'\s+', Text),
+ # Numbers
+ (r"0'.", Number),
+ (r'0b[01]+', Number.Bin),
+ (r'0o[0-7]+', Number.Oct),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
+ # Variables
+ (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
+ # Event handlers
+ (r'(after|before)(?=[(])', Keyword),
+ # Message forwarding handler
+ (r'forward(?=[(])', Keyword),
+ # Execution-context methods
+ (r'(parameter|this|se(lf|nder))(?=[(])', Keyword),
+ # Reflection
+ (r'(current_predicate|predicate_property)(?=[(])', Keyword),
+ # DCGs and term expansion
+ (r'(expand_(goal|term)|(goal|term)_expansion|phrase)(?=[(])', Keyword),
+ # Entity
+ (r'(abolish|c(reate|urrent))_(object|protocol|category)(?=[(])', Keyword),
+ (r'(object|protocol|category)_property(?=[(])', Keyword),
+ # Entity relations
+ (r'co(mplements_object|nforms_to_protocol)(?=[(])', Keyword),
+ (r'extends_(object|protocol|category)(?=[(])', Keyword),
+ (r'imp(lements_protocol|orts_category)(?=[(])', Keyword),
+ (r'(instantiat|specializ)es_class(?=[(])', Keyword),
+ # Events
+ (r'(current_event|(abolish|define)_events)(?=[(])', Keyword),
+ # Flags
+ (r'(current|set)_logtalk_flag(?=[(])', Keyword),
+ # Compiling, loading, and library paths
+ (r'logtalk_(compile|l(ibrary_path|oad|oad_context)|make)(?=[(])', Keyword),
+ (r'\blogtalk_make\b', Keyword),
+ # Database
+ (r'(clause|retract(all)?)(?=[(])', Keyword),
+ (r'a(bolish|ssert(a|z))(?=[(])', Keyword),
+ # Control constructs
+ (r'(ca(ll|tch)|throw)(?=[(])', Keyword),
+ (r'(fa(il|lse)|true)\b', Keyword),
+ # All solutions
+ (r'((bag|set)of|f(ind|or)all)(?=[(])', Keyword),
+ # Multi-threading meta-predicates
+ (r'threaded(_(call|once|ignore|exit|peek|wait|notify))?(?=[(])', Keyword),
+ # Term unification
+ (r'(subsumes_term|unify_with_occurs_check)(?=[(])', Keyword),
+ # Term creation and decomposition
+ (r'(functor|arg|copy_term|numbervars|term_variables)(?=[(])', Keyword),
+ # Evaluable functors
+ (r'(rem|m(ax|in|od)|abs|sign)(?=[(])', Keyword),
+ (r'float(_(integer|fractional)_part)?(?=[(])', Keyword),
+ (r'(floor|truncate|round|ceiling)(?=[(])', Keyword),
+ # Other arithmetic functors
+ (r'(cos|a(cos|sin|tan)|exp|log|s(in|qrt))(?=[(])', Keyword),
+ # Term testing
+ (r'(var|atom(ic)?|integer|float|c(allable|ompound)|n(onvar|umber)|'
+ r'ground|acyclic_term)(?=[(])', Keyword),
+ # Term comparison
+ (r'compare(?=[(])', Keyword),
+ # Stream selection and control
+ (r'(curren|se)t_(in|out)put(?=[(])', Keyword),
+ (r'(open|close)(?=[(])', Keyword),
+ (r'flush_output(?=[(])', Keyword),
+ (r'(at_end_of_stream|flush_output)\b', Keyword),
+ (r'(stream_property|at_end_of_stream|set_stream_position)(?=[(])', Keyword),
+ # Character and byte input/output
+ (r'(nl|(get|peek|put)_(byte|c(har|ode)))(?=[(])', Keyword),
+ (r'\bnl\b', Keyword),
+ # Term input/output
+ (r'read(_term)?(?=[(])', Keyword),
+ (r'write(q|_(canonical|term))?(?=[(])', Keyword),
+ (r'(current_)?op(?=[(])', Keyword),
+ (r'(current_)?char_conversion(?=[(])', Keyword),
+ # Atomic term processing
+ (r'atom_(length|c(hars|o(ncat|des)))(?=[(])', Keyword),
+ (r'(char_code|sub_atom)(?=[(])', Keyword),
+ (r'number_c(har|ode)s(?=[(])', Keyword),
+ # Implementation defined hooks functions
+ (r'(se|curren)t_prolog_flag(?=[(])', Keyword),
+ (r'\bhalt\b', Keyword),
+ (r'halt(?=[(])', Keyword),
+ # Message sending operators
+ (r'(::|:|\^\^)', Operator),
+ # External call
+ (r'[{}]', Keyword),
+ # Logic and control
+ (r'(ignore|once)(?=[(])', Keyword),
+ (r'\brepeat\b', Keyword),
+ # Sorting
+ (r'(key)?sort(?=[(])', Keyword),
+ # Bitwise functors
+ (r'(>>|<<|/\\|\\\\|\\)', Operator),
+ # Predicate aliases
+ (r'\bas\b', Operator),
+ # Arithemtic evaluation
+ (r'\bis\b', Keyword),
+ # Arithemtic comparison
+ (r'(=:=|=\\=|<|=<|>=|>)', Operator),
+ # Term creation and decomposition
+ (r'=\.\.', Operator),
+ # Term unification
+ (r'(=|\\=)', Operator),
+ # Term comparison
+ (r'(==|\\==|@=<|@<|@>=|@>)', Operator),
+ # Evaluable functors
+ (r'(//|[-+*/])', Operator),
+ (r'\b(e|pi|mod|rem)\b', Operator),
+ # Other arithemtic functors
+ (r'\b\*\*\b', Operator),
+ # DCG rules
+ (r'-->', Operator),
+ # Control constructs
+ (r'([!;]|->)', Operator),
+ # Logic and control
+ (r'\\+', Operator),
+ # Mode operators
+ (r'[?@]', Operator),
+ # Existential quantifier
+ (r'\^', Operator),
+ # Strings
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # Ponctuation
+ (r'[()\[\],.|]', Text),
+ # Atoms
+ (r"[a-z][a-zA-Z0-9_]*", Text),
+ (r"[']", String, 'quoted_atom'),
+ ],
+
+ 'quoted_atom': [
+ (r"['][']", String),
+ (r"[']", String, '#pop'),
+ (r'\\([\\abfnrtv"\']|(x[a-fA-F0-9]+|[0-7]+)\\)', String.Escape),
+ (r"[^\\'\n]+", String),
+ (r'\\', String),
+ ],
+
+ 'directive': [
+ # Conditional compilation directives
+ (r'(el)?if(?=[(])', Keyword, 'root'),
+ (r'(e(lse|ndif))[.]', Keyword, 'root'),
+ # Entity directives
+ (r'(category|object|protocol)(?=[(])', Keyword, 'entityrelations'),
+ (r'(end_(category|object|protocol))[.]',Keyword, 'root'),
+ # Predicate scope directives
+ (r'(public|protected|private)(?=[(])', Keyword, 'root'),
+ # Other directives
+ (r'e(n(coding|sure_loaded)|xport)(?=[(])', Keyword, 'root'),
+ (r'in(clude|itialization|fo)(?=[(])', Keyword, 'root'),
+ (r'(built_in|dynamic|synchronized|threaded)[.]', Keyword, 'root'),
+ (r'(alias|d(ynamic|iscontiguous)|m(eta_(non_terminal|predicate)|ode|ultifile)|'
+ r's(et_(logtalk|prolog)_flag|ynchronized))(?=[(])', Keyword, 'root'),
+ (r'op(?=[(])', Keyword, 'root'),
+ (r'(c(alls|oinductive)|module|reexport|use(s|_module))(?=[(])', Keyword, 'root'),
+ (r'[a-z][a-zA-Z0-9_]*(?=[(])', Text, 'root'),
+ (r'[a-z][a-zA-Z0-9_]*[.]', Text, 'root'),
+ ],
+
+ 'entityrelations': [
+ (r'(complements|extends|i(nstantiates|mp(lements|orts))|specializes)(?=[(])', Keyword),
+ # Numbers
+ (r"0'.", Number),
+ (r'0b[01]+', Number.Bin),
+ (r'0o[0-7]+', Number.Oct),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
+ # Variables
+ (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
+ # Atoms
+ (r"[a-z][a-zA-Z0-9_]*", Text),
+ (r"[']", String, 'quoted_atom'),
+ # Strings
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # End of entity-opening directive
+ (r'([)]\.)', Text, 'root'),
+ # Scope operator
+ (r'(::)', Operator),
+ # Ponctuation
+ (r'[()\[\],.|]', Text),
+ # Comments
+ (r'%.*?\n', Comment),
+ (r'/\*(.|\n)*?\*/',Comment),
+ # Whitespace
+ (r'\n', Text),
+ (r'\s+', Text),
+ ]
+ }
+
+ def analyse_text(text):
+ if ':- object(' in text:
+ return 1.0
+ elif ':- protocol(' in text:
+ return 1.0
+ elif ':- category(' in text:
+ return 1.0
+ elif re.search('^:-\s[a-z]', text, re.M):
+ return 0.9
+ else:
+ return 0.0
diff --git a/pygments/lexers/python.py b/pygments/lexers/python.py
new file mode 100644
index 00000000..411f7bc7
--- /dev/null
+++ b/pygments/lexers/python.py
@@ -0,0 +1,833 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.python
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Python and related languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
+ default, words, combined, do_insertions
+from pygments.util import get_bool_opt, shebang_matches
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic, Other, Error
+from pygments import unistring as uni
+
+__all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer',
+ 'Python3Lexer', 'Python3TracebackLexer', 'CythonLexer',
+ 'DgLexer', 'NumPyLexer']
+
+line_re = re.compile('.*?\n')
+
+
+class PythonLexer(RegexLexer):
+ """
+ For `Python <http://www.python.org>`_ source code.
+ """
+
+ name = 'Python'
+ aliases = ['python', 'py', 'sage']
+ filenames = ['*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage']
+ mimetypes = ['text/x-python', 'application/x-python']
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
+ (r"^(\s*)([rRuU]{,2}'''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
+ (r'[^\S\n]+', Text),
+ (r'#.*$', Comment),
+ (r'[]{}:(),;[]', Punctuation),
+ (r'\\\n', Text),
+ (r'\\', Text),
+ (r'(in|is|and|or|not)\b', Operator.Word),
+ (r'!=|==|<<|>>|[-~+/*%=<>&^|.]', Operator),
+ include('keywords'),
+ (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'funcname'),
+ (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'classname'),
+ (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
+ 'fromimport'),
+ (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
+ 'import'),
+ include('builtins'),
+ include('backtick'),
+ ('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'),
+ ("(?:[rR]|[uU][rR]|[rR][uU])'''", String, 'tsqs'),
+ ('(?:[rR]|[uU][rR]|[rR][uU])"', String, 'dqs'),
+ ("(?:[rR]|[uU][rR]|[rR][uU])'", String, 'sqs'),
+ ('[uU]?"""', String, combined('stringescape', 'tdqs')),
+ ("[uU]?'''", String, combined('stringescape', 'tsqs')),
+ ('[uU]?"', String, combined('stringescape', 'dqs')),
+ ("[uU]?'", String, combined('stringescape', 'sqs')),
+ include('name'),
+ include('numbers'),
+ ],
+ 'keywords': [
+ (words((
+ 'assert', 'break', 'continue', 'del', 'elif', 'else', 'except',
+ 'exec', 'finally', 'for', 'global', 'if', 'lambda', 'pass',
+ 'print', 'raise', 'return', 'try', 'while', 'yield',
+ 'yield from', 'as', 'with'), suffix=r'\b'),
+ Keyword),
+ ],
+ 'builtins': [
+ (words((
+ '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin',
+ 'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr', 'classmethod',
+ 'cmp', 'coerce', 'compile', 'complex', 'delattr', 'dict', 'dir', 'divmod',
+ 'enumerate', 'eval', 'execfile', 'exit', 'file', 'filter', 'float',
+ 'frozenset', 'getattr', 'globals', 'hasattr', 'hash', 'hex', 'id',
+ 'input', 'int', 'intern', 'isinstance', 'issubclass', 'iter', 'len',
+ 'list', 'locals', 'long', 'map', 'max', 'min', 'next', 'object',
+ 'oct', 'open', 'ord', 'pow', 'property', 'range', 'raw_input', 'reduce',
+ 'reload', 'repr', 'reversed', 'round', 'set', 'setattr', 'slice',
+ 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple', 'type',
+ 'unichr', 'unicode', 'vars', 'xrange', 'zip'),
+ prefix=r'(?<!\.)', suffix=r'\b'),
+ Name.Builtin),
+ (r'(?<!\.)(self|None|Ellipsis|NotImplemented|False|True'
+ r')\b', Name.Builtin.Pseudo),
+ (words((
+ 'ArithmeticError', 'AssertionError', 'AttributeError',
+ 'BaseException', 'DeprecationWarning', 'EOFError', 'EnvironmentError',
+ 'Exception', 'FloatingPointError', 'FutureWarning', 'GeneratorExit',
+ 'IOError', 'ImportError', 'ImportWarning', 'IndentationError',
+ 'IndexError', 'KeyError', 'KeyboardInterrupt', 'LookupError',
+ 'MemoryError', 'NameError', 'NotImplemented', 'NotImplementedError',
+ 'OSError', 'OverflowError', 'OverflowWarning', 'PendingDeprecationWarning',
+ 'ReferenceError', 'RuntimeError', 'RuntimeWarning', 'StandardError',
+ 'StopIteration', 'SyntaxError', 'SyntaxWarning', 'SystemError',
+ 'SystemExit', 'TabError', 'TypeError', 'UnboundLocalError',
+ 'UnicodeDecodeError', 'UnicodeEncodeError', 'UnicodeError',
+ 'UnicodeTranslateError', 'UnicodeWarning', 'UserWarning',
+ 'ValueError', 'VMSError', 'Warning', 'WindowsError',
+ 'ZeroDivisionError'), prefix=r'(?<!\.)', suffix=r'\b'),
+ Name.Exception),
+ ],
+ 'numbers': [
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float),
+ (r'\d+[eE][+-]?[0-9]+j?', Number.Float),
+ (r'0[0-7]+j?', Number.Oct),
+ (r'0[bB][01]+', Number.Bin),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'\d+L', Number.Integer.Long),
+ (r'\d+j?', Number.Integer)
+ ],
+ 'backtick': [
+ ('`.*?`', String.Backtick),
+ ],
+ 'name': [
+ (r'@[\w.]+', Name.Decorator),
+ ('[a-zA-Z_]\w*', Name),
+ ],
+ 'funcname': [
+ ('[a-zA-Z_]\w*', Name.Function, '#pop')
+ ],
+ 'classname': [
+ ('[a-zA-Z_]\w*', Name.Class, '#pop')
+ ],
+ 'import': [
+ (r'(?:[ \t]|\\\n)+', Text),
+ (r'as\b', Keyword.Namespace),
+ (r',', Operator),
+ (r'[a-zA-Z_][\w.]*', Name.Namespace),
+ default('#pop') # all else: go back
+ ],
+ 'fromimport': [
+ (r'(?:[ \t]|\\\n)+', Text),
+ (r'import\b', Keyword.Namespace, '#pop'),
+ # if None occurs here, it's "raise x from None", since None can
+ # never be a module name
+ (r'None\b', Name.Builtin.Pseudo, '#pop'),
+ # sadly, in "raise x from y" y will be highlighted as namespace too
+ (r'[a-zA-Z_.][\w.]*', Name.Namespace),
+ # anything else here also means "raise x from y" and is therefore
+ # not an error
+ default('#pop'),
+ ],
+ 'stringescape': [
+ (r'\\([\\abfnrtv"\']|\n|N{.*?}|u[a-fA-F0-9]{4}|'
+ r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'strings': [
+ (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
+ '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol),
+ (r'[^\\\'"%\n]+', String),
+ # quotes, percents and backslashes must be parsed one at a time
+ (r'[\'"\\]', String),
+ # unhandled string formatting sign
+ (r'%', String)
+ # newlines are an error (use "nl" state)
+ ],
+ 'nl': [
+ (r'\n', String)
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
+ include('strings')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop'),
+ (r"\\\\|\\'|\\\n", String.Escape), # included here for raw strings
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ include('strings'),
+ include('nl')
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop'),
+ include('strings'),
+ include('nl')
+ ],
+ }
+
+ def analyse_text(text):
+ return shebang_matches(text, r'pythonw?(2(\.\d)?)?') or \
+ 'import ' in text[:1000]
+
+
+class Python3Lexer(RegexLexer):
+ """
+ For `Python <http://www.python.org>`_ source code (version 3.0).
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'Python 3'
+ aliases = ['python3', 'py3']
+ filenames = [] # Nothing until Python 3 gets widespread
+ mimetypes = ['text/x-python3', 'application/x-python3']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ uni_name = "[%s][%s]*" % (uni.xid_start, uni.xid_continue)
+
+ tokens = PythonLexer.tokens.copy()
+ tokens['keywords'] = [
+ (words((
+ 'assert', 'break', 'continue', 'del', 'elif', 'else', 'except',
+ 'finally', 'for', 'global', 'if', 'lambda', 'pass', 'raise',
+ 'nonlocal', 'return', 'try', 'while', 'yield', 'yield from', 'as',
+ 'with', 'True', 'False', 'None'), suffix=r'\b'),
+ Keyword),
+ ]
+ tokens['builtins'] = [
+ (words((
+ '__import__', 'abs', 'all', 'any', 'bin', 'bool', 'bytearray', 'bytes',
+ 'chr', 'classmethod', 'cmp', 'compile', 'complex', 'delattr', 'dict',
+ 'dir', 'divmod', 'enumerate', 'eval', 'filter', 'float', 'format',
+ 'frozenset', 'getattr', 'globals', 'hasattr', 'hash', 'hex', 'id',
+ 'input', 'int', 'isinstance', 'issubclass', 'iter', 'len', 'list',
+ 'locals', 'map', 'max', 'memoryview', 'min', 'next', 'object', 'oct',
+ 'open', 'ord', 'pow', 'print', 'property', 'range', 'repr', 'reversed',
+ 'round', 'set', 'setattr', 'slice', 'sorted', 'staticmethod', 'str',
+ 'sum', 'super', 'tuple', 'type', 'vars', 'zip'), prefix=r'(?<!\.)',
+ suffix=r'\b'),
+ Name.Builtin),
+ (r'(?<!\.)(self|Ellipsis|NotImplemented)\b', Name.Builtin.Pseudo),
+ (words((
+ 'ArithmeticError', 'AssertionError', 'AttributeError',
+ 'BaseException', 'BufferError', 'BytesWarning', 'DeprecationWarning',
+ 'EOFError', 'EnvironmentError', 'Exception', 'FloatingPointError',
+ 'FutureWarning', 'GeneratorExit', 'IOError', 'ImportError',
+ 'ImportWarning', 'IndentationError', 'IndexError', 'KeyError',
+ 'KeyboardInterrupt', 'LookupError', 'MemoryError', 'NameError',
+ 'NotImplementedError', 'OSError', 'OverflowError',
+ 'PendingDeprecationWarning', 'ReferenceError',
+ 'RuntimeError', 'RuntimeWarning', 'StopIteration',
+ 'SyntaxError', 'SyntaxWarning', 'SystemError', 'SystemExit', 'TabError',
+ 'TypeError', 'UnboundLocalError', 'UnicodeDecodeError',
+ 'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
+ 'UnicodeWarning', 'UserWarning', 'ValueError', 'VMSError', 'Warning',
+ 'WindowsError', 'ZeroDivisionError',
+ # new builtin exceptions from PEP 3151
+ 'BlockingIOError', 'ChildProcessError', 'ConnectionError',
+ 'BrokenPipeError', 'ConnectionAbortedError', 'ConnectionRefusedError',
+ 'ConnectionResetError', 'FileExistsError', 'FileNotFoundError',
+ 'InterruptedError', 'IsADirectoryError', 'NotADirectoryError',
+ 'PermissionError', 'ProcessLookupError', 'TimeoutError'),
+ prefix=r'(?<!\.)', suffix=r'\b'),
+ Name.Exception),
+ ]
+ tokens['numbers'] = [
+ (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'0[oO][0-7]+', Number.Oct),
+ (r'0[bB][01]+', Number.Bin),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'\d+', Number.Integer)
+ ]
+ tokens['backtick'] = []
+ tokens['name'] = [
+ (r'@\w+', Name.Decorator),
+ (uni_name, Name),
+ ]
+ tokens['funcname'] = [
+ (uni_name, Name.Function, '#pop')
+ ]
+ tokens['classname'] = [
+ (uni_name, Name.Class, '#pop')
+ ]
+ tokens['import'] = [
+ (r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
+ (r'\.', Name.Namespace),
+ (uni_name, Name.Namespace),
+ (r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
+ default('#pop') # all else: go back
+ ]
+ tokens['fromimport'] = [
+ (r'(\s+)(import)\b', bygroups(Text, Keyword), '#pop'),
+ (r'\.', Name.Namespace),
+ (uni_name, Name.Namespace),
+ default('#pop'),
+ ]
+ # don't highlight "%s" substitutions
+ tokens['strings'] = [
+ (r'[^\\\'"%\n]+', String),
+ # quotes, percents and backslashes must be parsed one at a time
+ (r'[\'"\\]', String),
+ # unhandled string formatting sign
+ (r'%', String)
+ # newlines are an error (use "nl" state)
+ ]
+
+ def analyse_text(text):
+ return shebang_matches(text, r'pythonw?3(\.\d)?')
+
+
+class PythonConsoleLexer(Lexer):
+ """
+ For Python console output or doctests, such as:
+
+ .. sourcecode:: pycon
+
+ >>> a = 'foo'
+ >>> print a
+ foo
+ >>> 1 / 0
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ ZeroDivisionError: integer division or modulo by zero
+
+ Additional options:
+
+ `python3`
+ Use Python 3 lexer for code. Default is ``False``.
+
+ .. versionadded:: 1.0
+ """
+ name = 'Python console session'
+ aliases = ['pycon']
+ mimetypes = ['text/x-python-doctest']
+
+ def __init__(self, **options):
+ self.python3 = get_bool_opt(options, 'python3', False)
+ Lexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ if self.python3:
+ pylexer = Python3Lexer(**self.options)
+ tblexer = Python3TracebackLexer(**self.options)
+ else:
+ pylexer = PythonLexer(**self.options)
+ tblexer = PythonTracebackLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+ curtb = ''
+ tbindex = 0
+ tb = 0
+ for match in line_re.finditer(text):
+ line = match.group()
+ if line.startswith(u'>>> ') or line.startswith(u'... '):
+ tb = 0
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:4])]))
+ curcode += line[4:]
+ elif line.rstrip() == u'...' and not tb:
+ # only a new >>> prompt can end an exception block
+ # otherwise an ellipsis in place of the traceback frames
+ # will be mishandled
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, u'...')]))
+ curcode += line[3:]
+ else:
+ if curcode:
+ for item in do_insertions(
+ insertions, pylexer.get_tokens_unprocessed(curcode)):
+ yield item
+ curcode = ''
+ insertions = []
+ if (line.startswith(u'Traceback (most recent call last):') or
+ re.match(u' File "[^"]+", line \\d+\\n$', line)):
+ tb = 1
+ curtb = line
+ tbindex = match.start()
+ elif line == 'KeyboardInterrupt\n':
+ yield match.start(), Name.Class, line
+ elif tb:
+ curtb += line
+ if not (line.startswith(' ') or line.strip() == u'...'):
+ tb = 0
+ for i, t, v in tblexer.get_tokens_unprocessed(curtb):
+ yield tbindex+i, t, v
+ else:
+ yield match.start(), Generic.Output, line
+ if curcode:
+ for item in do_insertions(insertions,
+ pylexer.get_tokens_unprocessed(curcode)):
+ yield item
+ if curtb:
+ for i, t, v in tblexer.get_tokens_unprocessed(curtb):
+ yield tbindex+i, t, v
+
+
+class PythonTracebackLexer(RegexLexer):
+ """
+ For Python tracebacks.
+
+ .. versionadded:: 0.7
+ """
+
+ name = 'Python Traceback'
+ aliases = ['pytb']
+ filenames = ['*.pytb']
+ mimetypes = ['text/x-python-traceback']
+
+ tokens = {
+ 'root': [
+ (r'^Traceback \(most recent call last\):\n',
+ Generic.Traceback, 'intb'),
+ # SyntaxError starts with this.
+ (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
+ (r'^.*\n', Other),
+ ],
+ 'intb': [
+ (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
+ bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
+ (r'^( File )("[^"]+")(, line )(\d+)(\n)',
+ bygroups(Text, Name.Builtin, Text, Number, Text)),
+ (r'^( )(.+)(\n)',
+ bygroups(Text, using(PythonLexer), Text)),
+ (r'^([ \t]*)(\.\.\.)(\n)',
+ bygroups(Text, Comment, Text)), # for doctests...
+ (r'^([^:]+)(: )(.+)(\n)',
+ bygroups(Generic.Error, Text, Name, Text), '#pop'),
+ (r'^([a-zA-Z_]\w*)(:?\n)',
+ bygroups(Generic.Error, Text), '#pop')
+ ],
+ }
+
+
+class Python3TracebackLexer(RegexLexer):
+ """
+ For Python 3.0 tracebacks, with support for chained exceptions.
+
+ .. versionadded:: 1.0
+ """
+
+ name = 'Python 3.0 Traceback'
+ aliases = ['py3tb']
+ filenames = ['*.py3tb']
+ mimetypes = ['text/x-python3-traceback']
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'^Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'),
+ (r'^During handling of the above exception, another '
+ r'exception occurred:\n\n', Generic.Traceback),
+ (r'^The above exception was the direct cause of the '
+ r'following exception:\n\n', Generic.Traceback),
+ (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
+ ],
+ 'intb': [
+ (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
+ bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)),
+ (r'^( File )("[^"]+")(, line )(\d+)(\n)',
+ bygroups(Text, Name.Builtin, Text, Number, Text)),
+ (r'^( )(.+)(\n)',
+ bygroups(Text, using(Python3Lexer), Text)),
+ (r'^([ \t]*)(\.\.\.)(\n)',
+ bygroups(Text, Comment, Text)), # for doctests...
+ (r'^([^:]+)(: )(.+)(\n)',
+ bygroups(Generic.Error, Text, Name, Text), '#pop'),
+ (r'^([a-zA-Z_]\w*)(:?\n)',
+ bygroups(Generic.Error, Text), '#pop')
+ ],
+ }
+
+
+class CythonLexer(RegexLexer):
+ """
+ For Pyrex and `Cython <http://cython.org>`_ source code.
+
+ .. versionadded:: 1.1
+ """
+
+ name = 'Cython'
+ aliases = ['cython', 'pyx', 'pyrex']
+ filenames = ['*.pyx', '*.pxd', '*.pxi']
+ mimetypes = ['text/x-cython', 'application/x-cython']
+
+ tokens = {
+ 'root': [
+ (r'\n', Text),
+ (r'^(\s*)("""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
+ (r"^(\s*)('''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
+ (r'[^\S\n]+', Text),
+ (r'#.*$', Comment),
+ (r'[]{}:(),;[]', Punctuation),
+ (r'\\\n', Text),
+ (r'\\', Text),
+ (r'(in|is|and|or|not)\b', Operator.Word),
+ (r'(<)([a-zA-Z0-9.?]+)(>)',
+ bygroups(Punctuation, Keyword.Type, Punctuation)),
+ (r'!=|==|<<|>>|[-~+/*%=<>&^|.?]', Operator),
+ (r'(from)(\d+)(<=)(\s+)(<)(\d+)(:)',
+ bygroups(Keyword, Number.Integer, Operator, Name, Operator,
+ Name, Punctuation)),
+ include('keywords'),
+ (r'(def|property)(\s+)', bygroups(Keyword, Text), 'funcname'),
+ (r'(cp?def)(\s+)', bygroups(Keyword, Text), 'cdef'),
+ (r'(class|struct)(\s+)', bygroups(Keyword, Text), 'classname'),
+ (r'(from)(\s+)', bygroups(Keyword, Text), 'fromimport'),
+ (r'(c?import)(\s+)', bygroups(Keyword, Text), 'import'),
+ include('builtins'),
+ include('backtick'),
+ ('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'),
+ ("(?:[rR]|[uU][rR]|[rR][uU])'''", String, 'tsqs'),
+ ('(?:[rR]|[uU][rR]|[rR][uU])"', String, 'dqs'),
+ ("(?:[rR]|[uU][rR]|[rR][uU])'", String, 'sqs'),
+ ('[uU]?"""', String, combined('stringescape', 'tdqs')),
+ ("[uU]?'''", String, combined('stringescape', 'tsqs')),
+ ('[uU]?"', String, combined('stringescape', 'dqs')),
+ ("[uU]?'", String, combined('stringescape', 'sqs')),
+ include('name'),
+ include('numbers'),
+ ],
+ 'keywords': [
+ (words((
+ 'assert', 'break', 'by', 'continue', 'ctypedef', 'del', 'elif',
+ 'else', 'except', 'except?', 'exec', 'finally', 'for', 'gil',
+ 'global', 'if', 'include', 'lambda', 'nogil', 'pass', 'print',
+ 'raise', 'return', 'try', 'while', 'yield', 'as', 'with'), suffix=r'\b'),
+ Keyword),
+ (r'(DEF|IF|ELIF|ELSE)\b', Comment.Preproc),
+ ],
+ 'builtins': [
+ (words((
+ '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin',
+ 'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr',
+ 'classmethod', 'cmp', 'coerce', 'compile', 'complex', 'delattr',
+ 'dict', 'dir', 'divmod', 'enumerate', 'eval', 'execfile', 'exit',
+ 'file', 'filter', 'float', 'frozenset', 'getattr', 'globals',
+ 'hasattr', 'hash', 'hex', 'id', 'input', 'int', 'intern', 'isinstance',
+ 'issubclass', 'iter', 'len', 'list', 'locals', 'long', 'map', 'max',
+ 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'property',
+ 'range', 'raw_input', 'reduce', 'reload', 'repr', 'reversed',
+ 'round', 'set', 'setattr', 'slice', 'sorted', 'staticmethod',
+ 'str', 'sum', 'super', 'tuple', 'type', 'unichr', 'unicode',
+ 'vars', 'xrange', 'zip'), prefix=r'(?<!\.)', suffix=r'\b'),
+ Name.Builtin),
+ (r'(?<!\.)(self|None|Ellipsis|NotImplemented|False|True|NULL'
+ r')\b', Name.Builtin.Pseudo),
+ (words((
+ 'ArithmeticError', 'AssertionError', 'AttributeError',
+ 'BaseException', 'DeprecationWarning', 'EOFError', 'EnvironmentError',
+ 'Exception', 'FloatingPointError', 'FutureWarning', 'GeneratorExit', 'IOError',
+ 'ImportError', 'ImportWarning', 'IndentationError', 'IndexError', 'KeyError',
+ 'KeyboardInterrupt', 'LookupError', 'MemoryError', 'NameError',
+ 'NotImplemented', 'NotImplementedError', 'OSError', 'OverflowError',
+ 'OverflowWarning', 'PendingDeprecationWarning', 'ReferenceError',
+ 'RuntimeError', 'RuntimeWarning', 'StandardError', 'StopIteration',
+ 'SyntaxError', 'SyntaxWarning', 'SystemError', 'SystemExit', 'TabError',
+ 'TypeError', 'UnboundLocalError', 'UnicodeDecodeError',
+ 'UnicodeEncodeError', 'UnicodeError', 'UnicodeTranslateError',
+ 'UnicodeWarning', 'UserWarning', 'ValueError', 'Warning',
+ 'ZeroDivisionError'), prefix=r'(?<!\.)', suffix=r'\b'),
+ Name.Exception),
+ ],
+ 'numbers': [
+ (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
+ (r'0\d+', Number.Oct),
+ (r'0[xX][a-fA-F0-9]+', Number.Hex),
+ (r'\d+L', Number.Integer.Long),
+ (r'\d+', Number.Integer)
+ ],
+ 'backtick': [
+ ('`.*?`', String.Backtick),
+ ],
+ 'name': [
+ (r'@\w+', Name.Decorator),
+ ('[a-zA-Z_]\w*', Name),
+ ],
+ 'funcname': [
+ ('[a-zA-Z_]\w*', Name.Function, '#pop')
+ ],
+ 'cdef': [
+ (r'(public|readonly|extern|api|inline)\b', Keyword.Reserved),
+ (r'(struct|enum|union|class)\b', Keyword),
+ (r'([a-zA-Z_]\w*)(\s*)(?=[(:#=]|$)',
+ bygroups(Name.Function, Text), '#pop'),
+ (r'([a-zA-Z_]\w*)(\s*)(,)',
+ bygroups(Name.Function, Text, Punctuation)),
+ (r'from\b', Keyword, '#pop'),
+ (r'as\b', Keyword),
+ (r':', Punctuation, '#pop'),
+ (r'(?=["\'])', Text, '#pop'),
+ (r'[a-zA-Z_]\w*', Keyword.Type),
+ (r'.', Text),
+ ],
+ 'classname': [
+ ('[a-zA-Z_]\w*', Name.Class, '#pop')
+ ],
+ 'import': [
+ (r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
+ (r'[a-zA-Z_][\w.]*', Name.Namespace),
+ (r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
+ default('#pop') # all else: go back
+ ],
+ 'fromimport': [
+ (r'(\s+)(c?import)\b', bygroups(Text, Keyword), '#pop'),
+ (r'[a-zA-Z_.][\w.]*', Name.Namespace),
+ # ``cdef foo from "header"``, or ``for foo from 0 < i < 10``
+ default('#pop'),
+ ],
+ 'stringescape': [
+ (r'\\([\\abfnrtv"\']|\n|N{.*?}|u[a-fA-F0-9]{4}|'
+ r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'strings': [
+ (r'%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
+ '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol),
+ (r'[^\\\'"%\n]+', String),
+ # quotes, percents and backslashes must be parsed one at a time
+ (r'[\'"\\]', String),
+ # unhandled string formatting sign
+ (r'%', String)
+ # newlines are an error (use "nl" state)
+ ],
+ 'nl': [
+ (r'\n', String)
+ ],
+ 'dqs': [
+ (r'"', String, '#pop'),
+ (r'\\\\|\\"|\\\n', String.Escape), # included here again for raw strings
+ include('strings')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop'),
+ (r"\\\\|\\'|\\\n", String.Escape), # included here again for raw strings
+ include('strings')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop'),
+ include('strings'),
+ include('nl')
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop'),
+ include('strings'),
+ include('nl')
+ ],
+ }
+
+
+class DgLexer(RegexLexer):
+ """
+ Lexer for `dg <http://pyos.github.com/dg>`_,
+ a functional and object-oriented programming language
+ running on the CPython 3 VM.
+
+ .. versionadded:: 1.6
+ """
+ name = 'dg'
+ aliases = ['dg']
+ filenames = ['*.dg']
+ mimetypes = ['text/x-dg']
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'#.*?$', Comment.Single),
+
+ (r'(?i)0b[01]+', Number.Bin),
+ (r'(?i)0o[0-7]+', Number.Oct),
+ (r'(?i)0x[0-9a-f]+', Number.Hex),
+ (r'(?i)[+-]?[0-9]+\.[0-9]+(e[+-]?[0-9]+)?j?', Number.Float),
+ (r'(?i)[+-]?[0-9]+e[+-]?\d+j?', Number.Float),
+ (r'(?i)[+-]?[0-9]+j?', Number.Integer),
+
+ (r"(?i)(br|r?b?)'''", String, combined('stringescape', 'tsqs', 'string')),
+ (r'(?i)(br|r?b?)"""', String, combined('stringescape', 'tdqs', 'string')),
+ (r"(?i)(br|r?b?)'", String, combined('stringescape', 'sqs', 'string')),
+ (r'(?i)(br|r?b?)"', String, combined('stringescape', 'dqs', 'string')),
+
+ (r"`\w+'*`", Operator),
+ (r'\b(and|in|is|or|where)\b', Operator.Word),
+ (r'[!$%&*+\-./:<-@\\^|~;,]+', Operator),
+
+ (words((
+ 'bool', 'bytearray', 'bytes', 'classmethod', 'complex', 'dict', 'dict\'',
+ 'float', 'frozenset', 'int', 'list', 'list\'', 'memoryview', 'object',
+ 'property', 'range', 'set', 'set\'', 'slice', 'staticmethod', 'str', 'super',
+ 'tuple', 'tuple\'', 'type'), prefix=r'(?<!\.)', suffix=r'(?![\'\w])'),
+ Name.Builtin),
+ (words((
+ '__import__', 'abs', 'all', 'any', 'bin', 'bind', 'chr', 'cmp', 'compile',
+ 'complex', 'delattr', 'dir', 'divmod', 'drop', 'dropwhile', 'enumerate',
+ 'eval', 'exhaust', 'filter', 'flip', 'foldl1?', 'format', 'fst', 'getattr',
+ 'globals', 'hasattr', 'hash', 'head', 'hex', 'id', 'init', 'input',
+ 'isinstance', 'issubclass', 'iter', 'iterate', 'last', 'len', 'locals',
+ 'map', 'max', 'min', 'next', 'oct', 'open', 'ord', 'pow', 'print', 'repr',
+ 'reversed', 'round', 'setattr', 'scanl1?', 'snd', 'sorted', 'sum', 'tail',
+ 'take', 'takewhile', 'vars', 'zip'), prefix=r'(?<!\.)', suffix=r'(?![\'\w])'),
+ Name.Builtin),
+ (r"(?<!\.)(self|Ellipsis|NotImplemented|None|True|False)(?!['\w])",
+ Name.Builtin.Pseudo),
+
+ (r"(?<!\.)[A-Z]\w*(Error|Exception|Warning)'*(?!['\w])",
+ Name.Exception),
+ (r"(?<!\.)(Exception|GeneratorExit|KeyboardInterrupt|StopIteration|"
+ r"SystemExit)(?!['\w])", Name.Exception),
+
+ (r"(?<![\.\w])(except|finally|for|if|import|not|otherwise|raise|"
+ r"subclass|while|with|yield)(?!['\w])", Keyword.Reserved),
+
+ (r"[A-Z_]+'*(?!['\w])", Name),
+ (r"[A-Z]\w+'*(?!['\w])", Keyword.Type),
+ (r"\w+'*", Name),
+
+ (r'[()]', Punctuation),
+ (r'.', Error),
+ ],
+ 'stringescape': [
+ (r'\\([\\abfnrtv"\']|\n|N{.*?}|u[a-fA-F0-9]{4}|'
+ r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
+ ],
+ 'string': [
+ (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
+ '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol),
+ (r'[^\\\'"%\n]+', String),
+ # quotes, percents and backslashes must be parsed one at a time
+ (r'[\'"\\]', String),
+ # unhandled string formatting sign
+ (r'%', String),
+ (r'\n', String)
+ ],
+ 'dqs': [
+ (r'"', String, '#pop')
+ ],
+ 'sqs': [
+ (r"'", String, '#pop')
+ ],
+ 'tdqs': [
+ (r'"""', String, '#pop')
+ ],
+ 'tsqs': [
+ (r"'''", String, '#pop')
+ ],
+ }
+
+
+class NumPyLexer(PythonLexer):
+ """
+ A Python lexer recognizing Numerical Python builtins.
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'NumPy'
+ aliases = ['numpy']
+
+ # override the mimetypes to not inherit them from python
+ mimetypes = []
+ filenames = []
+
+ EXTRA_KEYWORDS = set((
+ 'abs', 'absolute', 'accumulate', 'add', 'alen', 'all', 'allclose',
+ 'alltrue', 'alterdot', 'amax', 'amin', 'angle', 'any', 'append',
+ 'apply_along_axis', 'apply_over_axes', 'arange', 'arccos', 'arccosh',
+ 'arcsin', 'arcsinh', 'arctan', 'arctan2', 'arctanh', 'argmax', 'argmin',
+ 'argsort', 'argwhere', 'around', 'array', 'array2string', 'array_equal',
+ 'array_equiv', 'array_repr', 'array_split', 'array_str', 'arrayrange',
+ 'asanyarray', 'asarray', 'asarray_chkfinite', 'ascontiguousarray',
+ 'asfarray', 'asfortranarray', 'asmatrix', 'asscalar', 'astype',
+ 'atleast_1d', 'atleast_2d', 'atleast_3d', 'average', 'bartlett',
+ 'base_repr', 'beta', 'binary_repr', 'bincount', 'binomial',
+ 'bitwise_and', 'bitwise_not', 'bitwise_or', 'bitwise_xor', 'blackman',
+ 'bmat', 'broadcast', 'byte_bounds', 'bytes', 'byteswap', 'c_',
+ 'can_cast', 'ceil', 'choose', 'clip', 'column_stack', 'common_type',
+ 'compare_chararrays', 'compress', 'concatenate', 'conj', 'conjugate',
+ 'convolve', 'copy', 'corrcoef', 'correlate', 'cos', 'cosh', 'cov',
+ 'cross', 'cumprod', 'cumproduct', 'cumsum', 'delete', 'deprecate',
+ 'diag', 'diagflat', 'diagonal', 'diff', 'digitize', 'disp', 'divide',
+ 'dot', 'dsplit', 'dstack', 'dtype', 'dump', 'dumps', 'ediff1d', 'empty',
+ 'empty_like', 'equal', 'exp', 'expand_dims', 'expm1', 'extract', 'eye',
+ 'fabs', 'fastCopyAndTranspose', 'fft', 'fftfreq', 'fftshift', 'fill',
+ 'finfo', 'fix', 'flat', 'flatnonzero', 'flatten', 'fliplr', 'flipud',
+ 'floor', 'floor_divide', 'fmod', 'frexp', 'fromarrays', 'frombuffer',
+ 'fromfile', 'fromfunction', 'fromiter', 'frompyfunc', 'fromstring',
+ 'generic', 'get_array_wrap', 'get_include', 'get_numarray_include',
+ 'get_numpy_include', 'get_printoptions', 'getbuffer', 'getbufsize',
+ 'geterr', 'geterrcall', 'geterrobj', 'getfield', 'gradient', 'greater',
+ 'greater_equal', 'gumbel', 'hamming', 'hanning', 'histogram',
+ 'histogram2d', 'histogramdd', 'hsplit', 'hstack', 'hypot', 'i0',
+ 'identity', 'ifft', 'imag', 'index_exp', 'indices', 'inf', 'info',
+ 'inner', 'insert', 'int_asbuffer', 'interp', 'intersect1d',
+ 'intersect1d_nu', 'inv', 'invert', 'iscomplex', 'iscomplexobj',
+ 'isfinite', 'isfortran', 'isinf', 'isnan', 'isneginf', 'isposinf',
+ 'isreal', 'isrealobj', 'isscalar', 'issctype', 'issubclass_',
+ 'issubdtype', 'issubsctype', 'item', 'itemset', 'iterable', 'ix_',
+ 'kaiser', 'kron', 'ldexp', 'left_shift', 'less', 'less_equal', 'lexsort',
+ 'linspace', 'load', 'loads', 'loadtxt', 'log', 'log10', 'log1p', 'log2',
+ 'logical_and', 'logical_not', 'logical_or', 'logical_xor', 'logspace',
+ 'lstsq', 'mat', 'matrix', 'max', 'maximum', 'maximum_sctype',
+ 'may_share_memory', 'mean', 'median', 'meshgrid', 'mgrid', 'min',
+ 'minimum', 'mintypecode', 'mod', 'modf', 'msort', 'multiply', 'nan',
+ 'nan_to_num', 'nanargmax', 'nanargmin', 'nanmax', 'nanmin', 'nansum',
+ 'ndenumerate', 'ndim', 'ndindex', 'negative', 'newaxis', 'newbuffer',
+ 'newbyteorder', 'nonzero', 'not_equal', 'obj2sctype', 'ogrid', 'ones',
+ 'ones_like', 'outer', 'permutation', 'piecewise', 'pinv', 'pkgload',
+ 'place', 'poisson', 'poly', 'poly1d', 'polyadd', 'polyder', 'polydiv',
+ 'polyfit', 'polyint', 'polymul', 'polysub', 'polyval', 'power', 'prod',
+ 'product', 'ptp', 'put', 'putmask', 'r_', 'randint', 'random_integers',
+ 'random_sample', 'ranf', 'rank', 'ravel', 'real', 'real_if_close',
+ 'recarray', 'reciprocal', 'reduce', 'remainder', 'repeat', 'require',
+ 'reshape', 'resize', 'restoredot', 'right_shift', 'rint', 'roll',
+ 'rollaxis', 'roots', 'rot90', 'round', 'round_', 'row_stack', 's_',
+ 'sample', 'savetxt', 'sctype2char', 'searchsorted', 'seed', 'select',
+ 'set_numeric_ops', 'set_printoptions', 'set_string_function',
+ 'setbufsize', 'setdiff1d', 'seterr', 'seterrcall', 'seterrobj',
+ 'setfield', 'setflags', 'setmember1d', 'setxor1d', 'shape',
+ 'show_config', 'shuffle', 'sign', 'signbit', 'sin', 'sinc', 'sinh',
+ 'size', 'slice', 'solve', 'sometrue', 'sort', 'sort_complex', 'source',
+ 'split', 'sqrt', 'square', 'squeeze', 'standard_normal', 'std',
+ 'subtract', 'sum', 'svd', 'swapaxes', 'take', 'tan', 'tanh', 'tensordot',
+ 'test', 'tile', 'tofile', 'tolist', 'tostring', 'trace', 'transpose',
+ 'trapz', 'tri', 'tril', 'trim_zeros', 'triu', 'true_divide', 'typeDict',
+ 'typename', 'uniform', 'union1d', 'unique', 'unique1d', 'unravel_index',
+ 'unwrap', 'vander', 'var', 'vdot', 'vectorize', 'view', 'vonmises',
+ 'vsplit', 'vstack', 'weibull', 'where', 'who', 'zeros', 'zeros_like'
+ ))
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in \
+ PythonLexer.get_tokens_unprocessed(self, text):
+ if token is Name and value in self.EXTRA_KEYWORDS:
+ yield index, Keyword.Pseudo, value
+ else:
+ yield index, token, value
+
+ def analyse_text(text):
+ return (shebang_matches(text, r'pythonw?(2(\.\d)?)?') or
+ 'import ' in text[:1000]) \
+ and ('import numpy' in text or 'from numpy import' in text)
diff --git a/pygments/lexers/qbasic.py b/pygments/lexers/qbasic.py
deleted file mode 100644
index 80b80f9f..00000000
--- a/pygments/lexers/qbasic.py
+++ /dev/null
@@ -1,157 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- pygments.lexers.qbasic
- ~~~~~~~~~~~~~~~~~~~~~~
-
- Simple lexer for Microsoft QBasic source code.
-
- :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups
-from pygments.token import Text, Name, Comment, String, Keyword, Punctuation, \
- Number, Operator
-
-__all__ = ['QBasicLexer']
-
-
-class QBasicLexer(RegexLexer):
- """
- For
- `QBasic <http://en.wikipedia.org/wiki/QBasic>`_
- source code.
- """
-
- name = 'QBasic'
- aliases = ['qbasic', 'basic']
- filenames = ['*.BAS', '*.bas']
- mimetypes = ['text/basic']
-
- declarations = ['DATA', 'LET']
-
- functions = [
- 'ABS', 'ASC', 'ATN', 'CDBL', 'CHR$', 'CINT', 'CLNG',
- 'COMMAND$', 'COS', 'CSNG', 'CSRLIN', 'CVD', 'CVDMBF', 'CVI',
- 'CVL', 'CVS', 'CVSMBF', 'DATE$', 'ENVIRON$', 'EOF', 'ERDEV',
- 'ERDEV$', 'ERL', 'ERR', 'EXP', 'FILEATTR', 'FIX', 'FRE',
- 'FREEFILE', 'HEX$', 'INKEY$', 'INP', 'INPUT$', 'INSTR', 'INT',
- 'IOCTL$', 'LBOUND', 'LCASE$', 'LEFT$', 'LEN', 'LOC', 'LOF',
- 'LOG', 'LPOS', 'LTRIM$', 'MID$', 'MKD$', 'MKDMBF$', 'MKI$',
- 'MKL$', 'MKS$', 'MKSMBF$', 'OCT$', 'PEEK', 'PEN', 'PLAY',
- 'PMAP', 'POINT', 'POS', 'RIGHT$', 'RND', 'RTRIM$', 'SADD',
- 'SCREEN', 'SEEK', 'SETMEM', 'SGN', 'SIN', 'SPACE$', 'SPC',
- 'SQR', 'STICK', 'STR$', 'STRIG', 'STRING$', 'TAB', 'TAN',
- 'TIME$', 'TIMER', 'UBOUND', 'UCASE$', 'VAL', 'VARPTR',
- 'VARPTR$', 'VARSEG'
- ]
-
- metacommands = ['$DYNAMIC', '$INCLUDE', '$STATIC']
-
- operators = ['AND', 'EQV', 'IMP', 'NOT', 'OR', 'XOR']
-
- statements = [
- 'BEEP', 'BLOAD', 'BSAVE', 'CALL', 'CALL ABSOLUTE',
- 'CALL INTERRUPT', 'CALLS', 'CHAIN', 'CHDIR', 'CIRCLE', 'CLEAR',
- 'CLOSE', 'CLS', 'COLOR', 'COM', 'COMMON', 'CONST', 'DATA',
- 'DATE$', 'DECLARE', 'DEF FN', 'DEF SEG', 'DEFDBL', 'DEFINT',
- 'DEFLNG', 'DEFSNG', 'DEFSTR', 'DEF', 'DIM', 'DO', 'LOOP',
- 'DRAW', 'END', 'ENVIRON', 'ERASE', 'ERROR', 'EXIT', 'FIELD',
- 'FILES', 'FOR', 'NEXT', 'FUNCTION', 'GET', 'GOSUB', 'GOTO',
- 'IF', 'THEN', 'INPUT', 'INPUT #', 'IOCTL', 'KEY', 'KEY',
- 'KILL', 'LET', 'LINE', 'LINE INPUT', 'LINE INPUT #', 'LOCATE',
- 'LOCK', 'UNLOCK', 'LPRINT', 'LSET', 'MID$', 'MKDIR', 'NAME',
- 'ON COM', 'ON ERROR', 'ON KEY', 'ON PEN', 'ON PLAY',
- 'ON STRIG', 'ON TIMER', 'ON UEVENT', 'ON', 'OPEN', 'OPEN COM',
- 'OPTION BASE', 'OUT', 'PAINT', 'PALETTE', 'PCOPY', 'PEN',
- 'PLAY', 'POKE', 'PRESET', 'PRINT', 'PRINT #', 'PRINT USING',
- 'PSET', 'PUT', 'PUT', 'RANDOMIZE', 'READ', 'REDIM', 'REM',
- 'RESET', 'RESTORE', 'RESUME', 'RETURN', 'RMDIR', 'RSET', 'RUN',
- 'SCREEN', 'SEEK', 'SELECT CASE', 'SHARED', 'SHELL', 'SLEEP',
- 'SOUND', 'STATIC', 'STOP', 'STRIG', 'SUB', 'SWAP', 'SYSTEM',
- 'TIME$', 'TIMER', 'TROFF', 'TRON', 'TYPE', 'UEVENT', 'UNLOCK',
- 'VIEW', 'WAIT', 'WHILE', 'WEND', 'WIDTH', 'WINDOW', 'WRITE'
- ]
-
- keywords = [
- 'ACCESS', 'ALIAS', 'ANY', 'APPEND', 'AS', 'BASE', 'BINARY',
- 'BYVAL', 'CASE', 'CDECL', 'DOUBLE', 'ELSE', 'ELSEIF', 'ENDIF',
- 'INTEGER', 'IS', 'LIST', 'LOCAL', 'LONG', 'LOOP', 'MOD',
- 'NEXT', 'OFF', 'ON', 'OUTPUT', 'RANDOM', 'SIGNAL', 'SINGLE',
- 'STEP', 'STRING', 'THEN', 'TO', 'UNTIL', 'USING', 'WEND'
- ]
-
- tokens = {
- 'root': [
- (r'\n+', Text),
- (r'\s+', Text.Whitespace),
- (r'^(\s*)(\d*)(\s*)(REM .*)$',
- bygroups(Text.Whitespace, Name.Label, Text.Whitespace,
- Comment.Single)),
- (r'^(\s*)(\d+)(\s*)',
- bygroups(Text.Whitespace, Name.Label, Text.Whitespace)),
- (r'(?=[\s]*)(\w+)(?=[\s]*=)', Name.Variable.Global),
- (r'(?=[^"]*)\'.*$', Comment.Single),
- (r'"[^\n\"]*"', String.Double),
- (r'(END)(\s+)(FUNCTION|IF|SELECT|SUB)',
- bygroups(Keyword.Reserved, Text.Whitespace, Keyword.Reserved)),
- (r'(DECLARE)(\s+)([A-Z]+)(\s+)(\S+)',
- bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable,
- Text.Whitespace, Name)),
- (r'(DIM)(\s+)(SHARED)(\s+)([^\s\(]+)',
- bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable,
- Text.Whitespace, Name.Variable.Global)),
- (r'(DIM)(\s+)([^\s\(]+)',
- bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable.Global)),
- (r'^(\s*)([a-zA-Z_]+)(\s*)(\=)',
- bygroups(Text.Whitespace, Name.Variable.Global, Text.Whitespace,
- Operator)),
- (r'(GOTO|GOSUB)(\s+)(\w+\:?)',
- bygroups(Keyword.Reserved, Text.Whitespace, Name.Label)),
- (r'(SUB)(\s+)(\w+\:?)',
- bygroups(Keyword.Reserved, Text.Whitespace, Name.Label)),
- include('declarations'),
- include('functions'),
- include('metacommands'),
- include('operators'),
- include('statements'),
- include('keywords'),
- (r'[a-zA-Z_]\w*[\$@#&!]', Name.Variable.Global),
- (r'[a-zA-Z_]\w*\:', Name.Label),
- (r'\-?\d*\.\d+[@|#]?', Number.Float),
- (r'\-?\d+[@|#]', Number.Float),
- (r'\-?\d+#?', Number.Integer.Long),
- (r'\-?\d+#?', Number.Integer),
- (r'!=|==|:=|\.=|<<|>>|[-~+/\\*%=<>&^|?:!.]', Operator),
- (r'[\[\]{}(),;]', Punctuation),
- (r'[\w]+', Name.Variable.Global),
- ],
- # can't use regular \b because of X$()
- 'declarations': [
- (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, declarations)),
- Keyword.Declaration),
- ],
- 'functions': [
- (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, functions)),
- Keyword.Reserved),
- ],
- 'metacommands': [
- (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, metacommands)),
- Keyword.Constant),
- ],
- 'operators': [
- (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, operators)), Operator.Word),
- ],
- 'statements': [
- (r'\b(%s)\b' % '|'.join(map(re.escape, statements)),
- Keyword.Reserved),
- ],
- 'keywords': [
- (r'\b(%s)\b' % '|'.join(keywords), Keyword),
- ],
- }
-
- def analyse_text(text):
- return 0.2
diff --git a/pygments/lexers/r.py b/pygments/lexers/r.py
new file mode 100644
index 00000000..0bc6a9f5
--- /dev/null
+++ b/pygments/lexers/r.py
@@ -0,0 +1,453 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.r
+ ~~~~~~~~~~~~~~~~~
+
+ Lexers for the R/S languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, include, words, do_insertions
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic
+
+__all__ = ['RConsoleLexer', 'SLexer', 'RdLexer']
+
+
+line_re = re.compile('.*?\n')
+
+
+class RConsoleLexer(Lexer):
+ """
+ For R console transcripts or R CMD BATCH output files.
+ """
+
+ name = 'RConsole'
+ aliases = ['rconsole', 'rout']
+ filenames = ['*.Rout']
+
+ def get_tokens_unprocessed(self, text):
+ slexer = SLexer(**self.options)
+
+ current_code_block = ''
+ insertions = []
+
+ for match in line_re.finditer(text):
+ line = match.group()
+ if line.startswith('>') or line.startswith('+'):
+ # Colorize the prompt as such,
+ # then put rest of line into current_code_block
+ insertions.append((len(current_code_block),
+ [(0, Generic.Prompt, line[:2])]))
+ current_code_block += line[2:]
+ else:
+ # We have reached a non-prompt line!
+ # If we have stored prompt lines, need to process them first.
+ if current_code_block:
+ # Weave together the prompts and highlight code.
+ for item in do_insertions(
+ insertions, slexer.get_tokens_unprocessed(current_code_block)):
+ yield item
+ # Reset vars for next code block.
+ current_code_block = ''
+ insertions = []
+ # Now process the actual line itself, this is output from R.
+ yield match.start(), Generic.Output, line
+
+ # If we happen to end on a code block with nothing after it, need to
+ # process the last code block. This is neither elegant nor DRY so
+ # should be changed.
+ if current_code_block:
+ for item in do_insertions(
+ insertions, slexer.get_tokens_unprocessed(current_code_block)):
+ yield item
+
+
+class SLexer(RegexLexer):
+ """
+ For S, S-plus, and R source code.
+
+ .. versionadded:: 0.10
+ """
+
+ name = 'S'
+ aliases = ['splus', 's', 'r']
+ filenames = ['*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron']
+ mimetypes = ['text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r',
+ 'text/x-R', 'text/x-r-history', 'text/x-r-profile']
+
+ builtins_base = (
+ 'Arg', 'Conj', 'Cstack_info', 'Encoding', 'FALSE',
+ 'Filter', 'Find', 'I', 'ISOdate', 'ISOdatetime', 'Im', 'Inf',
+ 'La.svd', 'Map', 'Math.Date', 'Math.POSIXt', 'Math.data.frame',
+ 'Math.difftime', 'Math.factor', 'Mod', 'NA_character_',
+ 'NA_complex_', 'NA_real_', 'NCOL', 'NROW', 'NULLNA_integer_', 'NaN',
+ 'Negate', 'NextMethod', 'Ops.Date', 'Ops.POSIXt', 'Ops.data.frame',
+ 'Ops.difftime', 'Ops.factor', 'Ops.numeric_version', 'Ops.ordered',
+ 'Position', 'R.Version', 'R.home', 'R.version', 'R.version.string',
+ 'RNGkind', 'RNGversion', 'R_system_version', 'Re', 'Recall',
+ 'Reduce', 'Summary.Date', 'Summary.POSIXct', 'Summary.POSIXlt',
+ 'Summary.data.frame', 'Summary.difftime', 'Summary.factor',
+ 'Summary.numeric_version', 'Summary.ordered', 'Sys.Date',
+ 'Sys.chmod', 'Sys.getenv', 'Sys.getlocale', 'Sys.getpid',
+ 'Sys.glob', 'Sys.info', 'Sys.localeconv', 'Sys.readlink',
+ 'Sys.setFileTime', 'Sys.setenv', 'Sys.setlocale', 'Sys.sleep',
+ 'Sys.time', 'Sys.timezone', 'Sys.umask', 'Sys.unsetenv',
+ 'Sys.which', 'TRUE', 'UseMethod', 'Vectorize', 'abbreviate', 'abs',
+ 'acos', 'acosh', 'addNA', 'addTaskCallback', 'agrep', 'alist',
+ 'all', 'all.equal', 'all.equal.POSIXct', 'all.equal.character',
+ 'all.equal.default', 'all.equal.factor', 'all.equal.formula',
+ 'all.equal.language', 'all.equal.list', 'all.equal.numeric',
+ 'all.equal.raw', 'all.names', 'all.vars', 'any', 'anyDuplicated',
+ 'anyDuplicated.array', 'anyDuplicated.data.frame',
+ 'anyDuplicated.default', 'anyDuplicated.matrix', 'aperm',
+ 'aperm.default', 'aperm.table', 'append', 'apply', 'args',
+ 'arrayInd', 'as.Date', 'as.Date.POSIXct', 'as.Date.POSIXlt',
+ 'as.Date.character', 'as.Date.date', 'as.Date.dates',
+ 'as.Date.default', 'as.Date.factor', 'as.Date.numeric',
+ 'as.POSIXct', 'as.POSIXct.Date', 'as.POSIXct.POSIXlt',
+ 'as.POSIXct.date', 'as.POSIXct.dates', 'as.POSIXct.default',
+ 'as.POSIXct.numeric', 'as.POSIXlt', 'as.POSIXlt.Date',
+ 'as.POSIXlt.POSIXct', 'as.POSIXlt.character', 'as.POSIXlt.date',
+ 'as.POSIXlt.dates', 'as.POSIXlt.default', 'as.POSIXlt.factor',
+ 'as.POSIXlt.numeric', 'as.array', 'as.array.default', 'as.call',
+ 'as.character', 'as.character.Date', 'as.character.POSIXt',
+ 'as.character.condition', 'as.character.default',
+ 'as.character.error', 'as.character.factor', 'as.character.hexmode',
+ 'as.character.numeric_version', 'as.character.octmode',
+ 'as.character.srcref', 'as.complex', 'as.data.frame',
+ 'as.data.frame.AsIs', 'as.data.frame.Date', 'as.data.frame.POSIXct',
+ 'as.data.frame.POSIXlt', 'as.data.frame.array',
+ 'as.data.frame.character', 'as.data.frame.complex',
+ 'as.data.frame.data.frame', 'as.data.frame.default',
+ 'as.data.frame.difftime', 'as.data.frame.factor',
+ 'as.data.frame.integer', 'as.data.frame.list',
+ 'as.data.frame.logical', 'as.data.frame.matrix',
+ 'as.data.frame.model.matrix', 'as.data.frame.numeric',
+ 'as.data.frame.numeric_version', 'as.data.frame.ordered',
+ 'as.data.frame.raw', 'as.data.frame.table', 'as.data.frame.ts',
+ 'as.data.frame.vector', 'as.difftime', 'as.double',
+ 'as.double.POSIXlt', 'as.double.difftime', 'as.environment',
+ 'as.expression', 'as.expression.default', 'as.factor',
+ 'as.function', 'as.function.default', 'as.hexmode', 'as.integer',
+ 'as.list', 'as.list.Date', 'as.list.POSIXct', 'as.list.data.frame',
+ 'as.list.default', 'as.list.environment', 'as.list.factor',
+ 'as.list.function', 'as.list.numeric_version', 'as.logical',
+ 'as.logical.factor', 'as.matrix', 'as.matrix.POSIXlt',
+ 'as.matrix.data.frame', 'as.matrix.default', 'as.matrix.noquote',
+ 'as.name', 'as.null', 'as.null.default', 'as.numeric',
+ 'as.numeric_version', 'as.octmode', 'as.ordered',
+ 'as.package_version', 'as.pairlist', 'as.qr', 'as.raw', 'as.single',
+ 'as.single.default', 'as.symbol', 'as.table', 'as.table.default',
+ 'as.vector', 'as.vector.factor', 'asNamespace', 'asS3', 'asS4',
+ 'asin', 'asinh', 'assign', 'atan', 'atan2', 'atanh',
+ 'attachNamespace', 'attr', 'attr.all.equal', 'attributes',
+ 'autoload', 'autoloader', 'backsolve', 'baseenv', 'basename',
+ 'besselI', 'besselJ', 'besselK', 'besselY', 'beta',
+ 'bindingIsActive', 'bindingIsLocked', 'bindtextdomain', 'bitwAnd',
+ 'bitwNot', 'bitwOr', 'bitwShiftL', 'bitwShiftR', 'bitwXor', 'body',
+ 'bquote', 'browser', 'browserCondition', 'browserSetDebug',
+ 'browserText', 'builtins', 'by', 'by.data.frame', 'by.default',
+ 'bzfile', 'c.Date', 'c.POSIXct', 'c.POSIXlt', 'c.noquote',
+ 'c.numeric_version', 'call', 'callCC', 'capabilities', 'casefold',
+ 'cat', 'category', 'cbind', 'cbind.data.frame', 'ceiling',
+ 'char.expand', 'charToRaw', 'charmatch', 'chartr', 'check_tzones',
+ 'chol', 'chol.default', 'chol2inv', 'choose', 'class',
+ 'clearPushBack', 'close', 'close.connection', 'close.srcfile',
+ 'close.srcfilealias', 'closeAllConnections', 'col', 'colMeans',
+ 'colSums', 'colnames', 'commandArgs', 'comment', 'computeRestarts',
+ 'conditionCall', 'conditionCall.condition', 'conditionMessage',
+ 'conditionMessage.condition', 'conflicts', 'contributors', 'cos',
+ 'cosh', 'crossprod', 'cummax', 'cummin', 'cumprod', 'cumsum', 'cut',
+ 'cut.Date', 'cut.POSIXt', 'cut.default', 'dQuote', 'data.class',
+ 'data.matrix', 'date', 'debug', 'debugonce',
+ 'default.stringsAsFactors', 'delayedAssign', 'deparse', 'det',
+ 'determinant', 'determinant.matrix', 'dget', 'diag', 'diff',
+ 'diff.Date', 'diff.POSIXt', 'diff.default', 'difftime', 'digamma',
+ 'dim', 'dim.data.frame', 'dimnames', 'dimnames.data.frame', 'dir',
+ 'dir.create', 'dirname', 'do.call', 'dput', 'drop', 'droplevels',
+ 'droplevels.data.frame', 'droplevels.factor', 'dump', 'duplicated',
+ 'duplicated.POSIXlt', 'duplicated.array', 'duplicated.data.frame',
+ 'duplicated.default', 'duplicated.matrix',
+ 'duplicated.numeric_version', 'dyn.load', 'dyn.unload', 'eapply',
+ 'eigen', 'else', 'emptyenv', 'enc2native', 'enc2utf8',
+ 'encodeString', 'enquote', 'env.profile', 'environment',
+ 'environmentIsLocked', 'environmentName', 'eval', 'eval.parent',
+ 'evalq', 'exists', 'exp', 'expand.grid', 'expm1', 'expression',
+ 'factor', 'factorial', 'fifo', 'file', 'file.access', 'file.append',
+ 'file.choose', 'file.copy', 'file.create', 'file.exists',
+ 'file.info', 'file.link', 'file.path', 'file.remove', 'file.rename',
+ 'file.show', 'file.symlink', 'find.package', 'findInterval',
+ 'findPackageEnv', 'findRestart', 'floor', 'flush',
+ 'flush.connection', 'force', 'formals', 'format',
+ 'format.AsIs', 'format.Date', 'format.POSIXct', 'format.POSIXlt',
+ 'format.data.frame', 'format.default', 'format.difftime',
+ 'format.factor', 'format.hexmode', 'format.info',
+ 'format.libraryIQR', 'format.numeric_version', 'format.octmode',
+ 'format.packageInfo', 'format.pval', 'format.summaryDefault',
+ 'formatC', 'formatDL', 'forwardsolve', 'gamma', 'gc', 'gc.time',
+ 'gcinfo', 'gctorture', 'gctorture2', 'get', 'getAllConnections',
+ 'getCallingDLL', 'getCallingDLLe', 'getConnection',
+ 'getDLLRegisteredRoutines', 'getDLLRegisteredRoutines.DLLInfo',
+ 'getDLLRegisteredRoutines.character', 'getElement',
+ 'getExportedValue', 'getHook', 'getLoadedDLLs', 'getNamespace',
+ 'getNamespaceExports', 'getNamespaceImports', 'getNamespaceInfo',
+ 'getNamespaceName', 'getNamespaceUsers', 'getNamespaceVersion',
+ 'getNativeSymbolInfo', 'getOption', 'getRversion', 'getSrcLines',
+ 'getTaskCallbackNames', 'geterrmessage', 'gettext', 'gettextf',
+ 'getwd', 'gl', 'globalenv', 'gregexpr', 'grep', 'grepRaw', 'grepl',
+ 'gsub', 'gzcon', 'gzfile', 'head', 'iconv', 'iconvlist',
+ 'icuSetCollate', 'identical', 'identity', 'ifelse', 'importIntoEnv',
+ 'in', 'inherits', 'intToBits', 'intToUtf8', 'interaction', 'interactive',
+ 'intersect', 'inverse.rle', 'invisible', 'invokeRestart',
+ 'invokeRestartInteractively', 'is.R', 'is.array', 'is.atomic',
+ 'is.call', 'is.character', 'is.complex', 'is.data.frame',
+ 'is.double', 'is.element', 'is.environment', 'is.expression',
+ 'is.factor', 'is.finite', 'is.function', 'is.infinite',
+ 'is.integer', 'is.language', 'is.list', 'is.loaded', 'is.logical',
+ 'is.matrix', 'is.na', 'is.na.POSIXlt', 'is.na.data.frame',
+ 'is.na.numeric_version', 'is.name', 'is.nan', 'is.null',
+ 'is.numeric', 'is.numeric.Date', 'is.numeric.POSIXt',
+ 'is.numeric.difftime', 'is.numeric_version', 'is.object',
+ 'is.ordered', 'is.package_version', 'is.pairlist', 'is.primitive',
+ 'is.qr', 'is.raw', 'is.recursive', 'is.single', 'is.symbol',
+ 'is.table', 'is.unsorted', 'is.vector', 'isBaseNamespace',
+ 'isIncomplete', 'isNamespace', 'isOpen', 'isRestart', 'isS4',
+ 'isSeekable', 'isSymmetric', 'isSymmetric.matrix', 'isTRUE',
+ 'isatty', 'isdebugged', 'jitter', 'julian', 'julian.Date',
+ 'julian.POSIXt', 'kappa', 'kappa.default', 'kappa.lm', 'kappa.qr',
+ 'kronecker', 'l10n_info', 'labels', 'labels.default', 'lapply',
+ 'lazyLoad', 'lazyLoadDBexec', 'lazyLoadDBfetch', 'lbeta', 'lchoose',
+ 'length', 'length.POSIXlt', 'letters', 'levels', 'levels.default',
+ 'lfactorial', 'lgamma', 'library.dynam', 'library.dynam.unload',
+ 'licence', 'license', 'list.dirs', 'list.files', 'list2env', 'load',
+ 'loadNamespace', 'loadedNamespaces', 'loadingNamespaceInfo',
+ 'local', 'lockBinding', 'lockEnvironment', 'log', 'log10', 'log1p',
+ 'log2', 'logb', 'lower.tri', 'ls', 'make.names', 'make.unique',
+ 'makeActiveBinding', 'mapply', 'margin.table', 'mat.or.vec',
+ 'match', 'match.arg', 'match.call', 'match.fun', 'max', 'max.col',
+ 'mean', 'mean.Date', 'mean.POSIXct', 'mean.POSIXlt', 'mean.default',
+ 'mean.difftime', 'mem.limits', 'memCompress', 'memDecompress',
+ 'memory.profile', 'merge', 'merge.data.frame', 'merge.default',
+ 'message', 'mget', 'min', 'missing', 'mode', 'month.abb',
+ 'month.name', 'months', 'months.Date', 'months.POSIXt',
+ 'months.abb', 'months.nameletters', 'names', 'names.POSIXlt',
+ 'namespaceExport', 'namespaceImport', 'namespaceImportClasses',
+ 'namespaceImportFrom', 'namespaceImportMethods', 'nargs', 'nchar',
+ 'ncol', 'new.env', 'ngettext', 'nlevels', 'noquote', 'norm',
+ 'normalizePath', 'nrow', 'numeric_version', 'nzchar', 'objects',
+ 'oldClass', 'on.exit', 'open', 'open.connection', 'open.srcfile',
+ 'open.srcfilealias', 'open.srcfilecopy', 'options', 'order',
+ 'ordered', 'outer', 'packBits', 'packageEvent',
+ 'packageHasNamespace', 'packageStartupMessage', 'package_version',
+ 'pairlist', 'parent.env', 'parent.frame', 'parse',
+ 'parseNamespaceFile', 'paste', 'paste0', 'path.expand',
+ 'path.package', 'pipe', 'pmatch', 'pmax', 'pmax.int', 'pmin',
+ 'pmin.int', 'polyroot', 'pos.to.env', 'pretty', 'pretty.default',
+ 'prettyNum', 'print', 'print.AsIs', 'print.DLLInfo',
+ 'print.DLLInfoList', 'print.DLLRegisteredRoutines', 'print.Date',
+ 'print.NativeRoutineList', 'print.POSIXct', 'print.POSIXlt',
+ 'print.by', 'print.condition', 'print.connection',
+ 'print.data.frame', 'print.default', 'print.difftime',
+ 'print.factor', 'print.function', 'print.hexmode',
+ 'print.libraryIQR', 'print.listof', 'print.noquote',
+ 'print.numeric_version', 'print.octmode', 'print.packageInfo',
+ 'print.proc_time', 'print.restart', 'print.rle',
+ 'print.simple.list', 'print.srcfile', 'print.srcref',
+ 'print.summary.table', 'print.summaryDefault', 'print.table',
+ 'print.warnings', 'prmatrix', 'proc.time', 'prod', 'prop.table',
+ 'provideDimnames', 'psigamma', 'pushBack', 'pushBackLength', 'q',
+ 'qr', 'qr.Q', 'qr.R', 'qr.X', 'qr.coef', 'qr.default', 'qr.fitted',
+ 'qr.qty', 'qr.qy', 'qr.resid', 'qr.solve', 'quarters',
+ 'quarters.Date', 'quarters.POSIXt', 'quit', 'quote', 'range',
+ 'range.default', 'rank', 'rapply', 'raw', 'rawConnection',
+ 'rawConnectionValue', 'rawShift', 'rawToBits', 'rawToChar', 'rbind',
+ 'rbind.data.frame', 'rcond', 'read.dcf', 'readBin', 'readChar',
+ 'readLines', 'readRDS', 'readRenviron', 'readline', 'reg.finalizer',
+ 'regexec', 'regexpr', 'registerS3method', 'registerS3methods',
+ 'regmatches', 'remove', 'removeTaskCallback', 'rep', 'rep.Date',
+ 'rep.POSIXct', 'rep.POSIXlt', 'rep.factor', 'rep.int',
+ 'rep.numeric_version', 'rep_len', 'replace', 'replicate',
+ 'requireNamespace', 'restartDescription', 'restartFormals',
+ 'retracemem', 'rev', 'rev.default', 'rle', 'rm', 'round',
+ 'round.Date', 'round.POSIXt', 'row', 'row.names',
+ 'row.names.data.frame', 'row.names.default', 'rowMeans', 'rowSums',
+ 'rownames', 'rowsum', 'rowsum.data.frame', 'rowsum.default',
+ 'sQuote', 'sample', 'sample.int', 'sapply', 'save', 'save.image',
+ 'saveRDS', 'scale', 'scale.default', 'scan', 'search',
+ 'searchpaths', 'seek', 'seek.connection', 'seq', 'seq.Date',
+ 'seq.POSIXt', 'seq.default', 'seq.int', 'seq_along', 'seq_len',
+ 'sequence', 'serialize', 'set.seed', 'setHook', 'setNamespaceInfo',
+ 'setSessionTimeLimit', 'setTimeLimit', 'setdiff', 'setequal',
+ 'setwd', 'shQuote', 'showConnections', 'sign', 'signalCondition',
+ 'signif', 'simpleCondition', 'simpleError', 'simpleMessage',
+ 'simpleWarning', 'simplify2array', 'sin', 'single',
+ 'sinh', 'sink', 'sink.number', 'slice.index', 'socketConnection',
+ 'socketSelect', 'solve', 'solve.default', 'solve.qr', 'sort',
+ 'sort.POSIXlt', 'sort.default', 'sort.int', 'sort.list', 'split',
+ 'split.Date', 'split.POSIXct', 'split.data.frame', 'split.default',
+ 'sprintf', 'sqrt', 'srcfile', 'srcfilealias', 'srcfilecopy',
+ 'srcref', 'standardGeneric', 'stderr', 'stdin', 'stdout', 'stop',
+ 'stopifnot', 'storage.mode', 'strftime', 'strptime', 'strsplit',
+ 'strtoi', 'strtrim', 'structure', 'strwrap', 'sub', 'subset',
+ 'subset.data.frame', 'subset.default', 'subset.matrix',
+ 'substitute', 'substr', 'substring', 'sum', 'summary',
+ 'summary.Date', 'summary.POSIXct', 'summary.POSIXlt',
+ 'summary.connection', 'summary.data.frame', 'summary.default',
+ 'summary.factor', 'summary.matrix', 'summary.proc_time',
+ 'summary.srcfile', 'summary.srcref', 'summary.table',
+ 'suppressMessages', 'suppressPackageStartupMessages',
+ 'suppressWarnings', 'svd', 'sweep', 'sys.call', 'sys.calls',
+ 'sys.frame', 'sys.frames', 'sys.function', 'sys.load.image',
+ 'sys.nframe', 'sys.on.exit', 'sys.parent', 'sys.parents',
+ 'sys.save.image', 'sys.source', 'sys.status', 'system',
+ 'system.file', 'system.time', 'system2', 't', 't.data.frame',
+ 't.default', 'table', 'tabulate', 'tail', 'tan', 'tanh', 'tapply',
+ 'taskCallbackManager', 'tcrossprod', 'tempdir', 'tempfile',
+ 'testPlatformEquivalence', 'textConnection', 'textConnectionValue',
+ 'toString', 'toString.default', 'tolower', 'topenv', 'toupper',
+ 'trace', 'traceback', 'tracemem', 'tracingState', 'transform',
+ 'transform.data.frame', 'transform.default', 'trigamma', 'trunc',
+ 'trunc.Date', 'trunc.POSIXt', 'truncate', 'truncate.connection',
+ 'try', 'tryCatch', 'typeof', 'unclass', 'undebug', 'union',
+ 'unique', 'unique.POSIXlt', 'unique.array', 'unique.data.frame',
+ 'unique.default', 'unique.matrix', 'unique.numeric_version',
+ 'units', 'units.difftime', 'unix.time', 'unlink', 'unlist',
+ 'unloadNamespace', 'unlockBinding', 'unname', 'unserialize',
+ 'unsplit', 'untrace', 'untracemem', 'unz', 'upper.tri', 'url',
+ 'utf8ToInt', 'vapply', 'version', 'warning', 'warnings', 'weekdays',
+ 'weekdays.Date', 'weekdays.POSIXt', 'which', 'which.max',
+ 'which.min', 'with', 'with.default', 'withCallingHandlers',
+ 'withRestarts', 'withVisible', 'within', 'within.data.frame',
+ 'within.list', 'write', 'write.dcf', 'writeBin', 'writeChar',
+ 'writeLines', 'xor', 'xor.hexmode', 'xor.octmode',
+ 'xpdrows.data.frame', 'xtfrm', 'xtfrm.AsIs', 'xtfrm.Date',
+ 'xtfrm.POSIXct', 'xtfrm.POSIXlt', 'xtfrm.Surv', 'xtfrm.default',
+ 'xtfrm.difftime', 'xtfrm.factor', 'xtfrm.numeric_version', 'xzfile',
+ 'zapsmall'
+ )
+
+ tokens = {
+ 'comments': [
+ (r'#.*$', Comment.Single),
+ ],
+ 'valid_name': [
+ (r'[a-zA-Z][0-9a-zA-Z\._]*', Text),
+ # can begin with ., but not if that is followed by a digit
+ (r'\.[a-zA-Z_][0-9a-zA-Z\._]*', Text),
+ ],
+ 'punctuation': [
+ (r'\[{1,2}|\]{1,2}|\(|\)|;|,', Punctuation),
+ ],
+ 'keywords': [
+ (words(builtins_base, suffix=r'(?![\w\. =])'),
+ Keyword.Pseudo),
+ (r'(if|else|for|while|repeat|in|next|break|return|switch|function)'
+ r'(?![\w\.])',
+ Keyword.Reserved),
+ (r'(array|category|character|complex|double|function|integer|list|'
+ r'logical|matrix|numeric|vector|data.frame|c)'
+ r'(?![\w\.])',
+ Keyword.Type),
+ (r'(library|require|attach|detach|source)'
+ r'(?![\w\.])',
+ Keyword.Namespace)
+ ],
+ 'operators': [
+ (r'<<?-|->>?|-|==|<=|>=|<|>|&&?|!=|\|\|?|\?', Operator),
+ (r'\*|\+|\^|/|!|%[^%]*%|=|~|\$|@|:{1,3}', Operator)
+ ],
+ 'builtin_symbols': [
+ (r'(NULL|NA(_(integer|real|complex|character)_)?|'
+ r'letters|LETTERS|Inf|TRUE|FALSE|NaN|pi|\.\.(\.|[0-9]+))'
+ r'(?![0-9a-zA-Z\._])',
+ Keyword.Constant),
+ (r'(T|F)\b', Name.Builtin.Pseudo),
+ ],
+ 'numbers': [
+ # hex number
+ (r'0[xX][a-fA-F0-9]+([pP][0-9]+)?[Li]?', Number.Hex),
+ # decimal number
+ (r'[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)([eE][+-]?[0-9]+)?[Li]?',
+ Number),
+ ],
+ 'statements': [
+ include('comments'),
+ # whitespaces
+ (r'\s+', Text),
+ (r'`.*?`', String.Backtick),
+ (r'\'', String, 'string_squote'),
+ (r'\"', String, 'string_dquote'),
+ include('builtin_symbols'),
+ include('numbers'),
+ include('keywords'),
+ include('punctuation'),
+ include('operators'),
+ include('valid_name'),
+ ],
+ 'root': [
+ include('statements'),
+ # blocks:
+ (r'\{|\}', Punctuation),
+ # (r'\{', Punctuation, 'block'),
+ (r'.', Text),
+ ],
+ # 'block': [
+ # include('statements'),
+ # ('\{', Punctuation, '#push'),
+ # ('\}', Punctuation, '#pop')
+ # ],
+ 'string_squote': [
+ (r'([^\'\\]|\\.)*\'', String, '#pop'),
+ ],
+ 'string_dquote': [
+ (r'([^"\\]|\\.)*"', String, '#pop'),
+ ],
+ }
+
+ def analyse_text(text):
+ if re.search(r'[a-z0-9_\])\s]<-(?!-)', text):
+ return 0.11
+
+
+class RdLexer(RegexLexer):
+ """
+ Pygments Lexer for R documentation (Rd) files
+
+ This is a very minimal implementation, highlighting little more
+ than the macros. A description of Rd syntax is found in `Writing R
+ Extensions <http://cran.r-project.org/doc/manuals/R-exts.html>`_
+ and `Parsing Rd files <developer.r-project.org/parseRd.pdf>`_.
+
+ .. versionadded:: 1.6
+ """
+ name = 'Rd'
+ aliases = ['rd']
+ filenames = ['*.Rd']
+ mimetypes = ['text/x-r-doc']
+
+ # To account for verbatim / LaTeX-like / and R-like areas
+ # would require parsing.
+ tokens = {
+ 'root': [
+ # catch escaped brackets and percent sign
+ (r'\\[\\{}%]', String.Escape),
+ # comments
+ (r'%.*$', Comment),
+ # special macros with no arguments
+ (r'\\(?:cr|l?dots|R|tab)\b', Keyword.Constant),
+ # macros
+ (r'\\[a-zA-Z]+\b', Keyword),
+ # special preprocessor macros
+ (r'^\s*#(?:ifn?def|endif).*\b', Comment.Preproc),
+ # non-escaped brackets
+ (r'[{}]', Name.Builtin),
+ # everything else
+ (r'[^\\%\n{}]+', Text),
+ (r'.', Text),
+ ]
+ }
diff --git a/pygments/lexers/rebol.py b/pygments/lexers/rebol.py
new file mode 100644
index 00000000..6f8dbd2d
--- /dev/null
+++ b/pygments/lexers/rebol.py
@@ -0,0 +1,433 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.rebol
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the REBOL and related languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Generic, Whitespace
+
+__all__ = ['RebolLexer', 'RedLexer']
+
+
+class RebolLexer(RegexLexer):
+ """
+ A `REBOL <http://www.rebol.com/>`_ lexer.
+
+ .. versionadded:: 1.1
+ """
+ name = 'REBOL'
+ aliases = ['rebol']
+ filenames = ['*.r', '*.r3', '*.reb']
+ mimetypes = ['text/x-rebol']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ re.IGNORECASE
+
+ escape_re = r'(?:\^\([0-9a-f]{1,4}\)*)'
+
+ def word_callback(lexer, match):
+ word = match.group()
+
+ if re.match(".*:$", word):
+ yield match.start(), Generic.Subheading, word
+ elif re.match(
+ r'(native|alias|all|any|as-string|as-binary|bind|bound\?|case|'
+ r'catch|checksum|comment|debase|dehex|exclude|difference|disarm|'
+ r'either|else|enbase|foreach|remove-each|form|free|get|get-env|if|'
+ r'in|intersect|loop|minimum-of|maximum-of|mold|new-line|'
+ r'new-line\?|not|now|prin|print|reduce|compose|construct|repeat|'
+ r'reverse|save|script\?|set|shift|switch|throw|to-hex|trace|try|'
+ r'type\?|union|unique|unless|unprotect|unset|until|use|value\?|'
+ r'while|compress|decompress|secure|open|close|read|read-io|'
+ r'write-io|write|update|query|wait|input\?|exp|log-10|log-2|'
+ r'log-e|square-root|cosine|sine|tangent|arccosine|arcsine|'
+ r'arctangent|protect|lowercase|uppercase|entab|detab|connected\?|'
+ r'browse|launch|stats|get-modes|set-modes|to-local-file|'
+ r'to-rebol-file|encloak|decloak|create-link|do-browser|bind\?|'
+ r'hide|draw|show|size-text|textinfo|offset-to-caret|'
+ r'caret-to-offset|local-request-file|rgb-to-hsv|hsv-to-rgb|'
+ r'crypt-strength\?|dh-make-key|dh-generate-key|dh-compute-key|'
+ r'dsa-make-key|dsa-generate-key|dsa-make-signature|'
+ r'dsa-verify-signature|rsa-make-key|rsa-generate-key|'
+ r'rsa-encrypt)$', word):
+ yield match.start(), Name.Builtin, word
+ elif re.match(
+ r'(add|subtract|multiply|divide|remainder|power|and~|or~|xor~|'
+ r'minimum|maximum|negate|complement|absolute|random|head|tail|'
+ r'next|back|skip|at|pick|first|second|third|fourth|fifth|sixth|'
+ r'seventh|eighth|ninth|tenth|last|path|find|select|make|to|copy\*|'
+ r'insert|remove|change|poke|clear|trim|sort|min|max|abs|cp|'
+ r'copy)$', word):
+ yield match.start(), Name.Function, word
+ elif re.match(
+ r'(error|source|input|license|help|install|echo|Usage|with|func|'
+ r'throw-on-error|function|does|has|context|probe|\?\?|as-pair|'
+ r'mod|modulo|round|repend|about|set-net|append|join|rejoin|reform|'
+ r'remold|charset|array|replace|move|extract|forskip|forall|alter|'
+ r'first+|also|take|for|forever|dispatch|attempt|what-dir|'
+ r'change-dir|clean-path|list-dir|dirize|rename|split-path|delete|'
+ r'make-dir|delete-dir|in-dir|confirm|dump-obj|upgrade|what|'
+ r'build-tag|process-source|build-markup|decode-cgi|read-cgi|'
+ r'write-user|save-user|set-user-name|protect-system|parse-xml|'
+ r'cvs-date|cvs-version|do-boot|get-net-info|desktop|layout|'
+ r'scroll-para|get-face|alert|set-face|uninstall|unfocus|'
+ r'request-dir|center-face|do-events|net-error|decode-url|'
+ r'parse-header|parse-header-date|parse-email-addrs|import-email|'
+ r'send|build-attach-body|resend|show-popup|hide-popup|open-events|'
+ r'find-key-face|do-face|viewtop|confine|find-window|'
+ r'insert-event-func|remove-event-func|inform|dump-pane|dump-face|'
+ r'flag-face|deflag-face|clear-fields|read-net|vbug|path-thru|'
+ r'read-thru|load-thru|do-thru|launch-thru|load-image|'
+ r'request-download|do-face-alt|set-font|set-para|get-style|'
+ r'set-style|make-face|stylize|choose|hilight-text|hilight-all|'
+ r'unlight-text|focus|scroll-drag|clear-face|reset-face|scroll-face|'
+ r'resize-face|load-stock|load-stock-block|notify|request|flash|'
+ r'request-color|request-pass|request-text|request-list|'
+ r'request-date|request-file|dbug|editor|link-relative-path|'
+ r'emailer|parse-error)$', word):
+ yield match.start(), Keyword.Namespace, word
+ elif re.match(
+ r'(halt|quit|do|load|q|recycle|call|run|ask|parse|view|unview|'
+ r'return|exit|break)$', word):
+ yield match.start(), Name.Exception, word
+ elif re.match('REBOL$', word):
+ yield match.start(), Generic.Heading, word
+ elif re.match("to-.*", word):
+ yield match.start(), Keyword, word
+ elif re.match('(\+|-|\*|/|//|\*\*|and|or|xor|=\?|=|==|<>|<|>|<=|>=)$',
+ word):
+ yield match.start(), Operator, word
+ elif re.match(".*\?$", word):
+ yield match.start(), Keyword, word
+ elif re.match(".*\!$", word):
+ yield match.start(), Keyword.Type, word
+ elif re.match("'.*", word):
+ yield match.start(), Name.Variable.Instance, word # lit-word
+ elif re.match("#.*", word):
+ yield match.start(), Name.Label, word # issue
+ elif re.match("%.*", word):
+ yield match.start(), Name.Decorator, word # file
+ else:
+ yield match.start(), Name.Variable, word
+
+ tokens = {
+ 'root': [
+ (r'[^R]+', Comment),
+ (r'REBOL\s+\[', Generic.Strong, 'script'),
+ (r'R', Comment)
+ ],
+ 'script': [
+ (r'\s+', Text),
+ (r'#"', String.Char, 'char'),
+ (r'#{[0-9a-f]*}', Number.Hex),
+ (r'2#{', Number.Hex, 'bin2'),
+ (r'64#{[0-9a-z+/=\s]*}', Number.Hex),
+ (r'"', String, 'string'),
+ (r'{', String, 'string2'),
+ (r';#+.*\n', Comment.Special),
+ (r';\*+.*\n', Comment.Preproc),
+ (r';.*\n', Comment),
+ (r'%"', Name.Decorator, 'stringFile'),
+ (r'%[^(\^{^")\s\[\]]+', Name.Decorator),
+ (r'[+-]?([a-z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money
+ (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time
+ (r'\d+[\-\/][0-9a-z]+[\-\/]\d+(\/\d+\:\d+((\:\d+)?'
+ r'([\.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other), # date
+ (r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple
+ (r'\d+[xX]\d+', Keyword.Constant), # pair
+ (r'[+-]?\d+(\'\d+)?([\.,]\d*)?[eE][+-]?\d+', Number.Float),
+ (r'[+-]?\d+(\'\d+)?[\.,]\d*', Number.Float),
+ (r'[+-]?\d+(\'\d+)?', Number),
+ (r'[\[\]\(\)]', Generic.Strong),
+ (r'[a-z]+[^(\^{"\s:)]*://[^(\^{"\s)]*', Name.Decorator), # url
+ (r'mailto:[^(\^{"@\s)]+@[^(\^{"@\s)]+', Name.Decorator), # url
+ (r'[^(\^{"@\s)]+@[^(\^{"@\s)]+', Name.Decorator), # email
+ (r'comment\s"', Comment, 'commentString1'),
+ (r'comment\s{', Comment, 'commentString2'),
+ (r'comment\s\[', Comment, 'commentBlock'),
+ (r'comment\s[^(\s{\"\[]+', Comment),
+ (r'/[^(\^{^")\s/[\]]*', Name.Attribute),
+ (r'([^(\^{^")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
+ (r'<[\w:.-]*>', Name.Tag),
+ (r'<[^(<>\s")]+', Name.Tag, 'tag'),
+ (r'([^(\^{^")\s]+)', Text),
+ ],
+ 'string': [
+ (r'[^(\^")]+', String),
+ (escape_re, String.Escape),
+ (r'[\(|\)]+', String),
+ (r'\^.', String.Escape),
+ (r'"', String, '#pop'),
+ ],
+ 'string2': [
+ (r'[^(\^{^})]+', String),
+ (escape_re, String.Escape),
+ (r'[\(|\)]+', String),
+ (r'\^.', String.Escape),
+ (r'{', String, '#push'),
+ (r'}', String, '#pop'),
+ ],
+ 'stringFile': [
+ (r'[^(\^")]+', Name.Decorator),
+ (escape_re, Name.Decorator),
+ (r'\^.', Name.Decorator),
+ (r'"', Name.Decorator, '#pop'),
+ ],
+ 'char': [
+ (escape_re + '"', String.Char, '#pop'),
+ (r'\^."', String.Char, '#pop'),
+ (r'."', String.Char, '#pop'),
+ ],
+ 'tag': [
+ (escape_re, Name.Tag),
+ (r'"', Name.Tag, 'tagString'),
+ (r'[^(<>\r\n")]+', Name.Tag),
+ (r'>', Name.Tag, '#pop'),
+ ],
+ 'tagString': [
+ (r'[^(\^")]+', Name.Tag),
+ (escape_re, Name.Tag),
+ (r'[\(|\)]+', Name.Tag),
+ (r'\^.', Name.Tag),
+ (r'"', Name.Tag, '#pop'),
+ ],
+ 'tuple': [
+ (r'(\d+\.)+', Keyword.Constant),
+ (r'\d+', Keyword.Constant, '#pop'),
+ ],
+ 'bin2': [
+ (r'\s+', Number.Hex),
+ (r'([0-1]\s*){8}', Number.Hex),
+ (r'}', Number.Hex, '#pop'),
+ ],
+ 'commentString1': [
+ (r'[^(\^")]+', Comment),
+ (escape_re, Comment),
+ (r'[\(|\)]+', Comment),
+ (r'\^.', Comment),
+ (r'"', Comment, '#pop'),
+ ],
+ 'commentString2': [
+ (r'[^(\^{^})]+', Comment),
+ (escape_re, Comment),
+ (r'[\(|\)]+', Comment),
+ (r'\^.', Comment),
+ (r'{', Comment, '#push'),
+ (r'}', Comment, '#pop'),
+ ],
+ 'commentBlock': [
+ (r'\[', Comment, '#push'),
+ (r'\]', Comment, '#pop'),
+ (r'"', Comment, "commentString1"),
+ (r'{', Comment, "commentString2"),
+ (r'[^(\[\]\"{)]+', Comment),
+ ],
+ }
+
+ def analyse_text(text):
+ """
+ Check if code contains REBOL header and so it probably not R code
+ """
+ if re.match(r'^\s*REBOL\s*\[', text, re.IGNORECASE):
+ # The code starts with REBOL header
+ return 1.0
+ elif re.search(r'\s*REBOL\s*[', text, re.IGNORECASE):
+ # The code contains REBOL header but also some text before it
+ return 0.5
+
+
+class RedLexer(RegexLexer):
+ """
+ A `Red-language <http://www.red-lang.org/>`_ lexer.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Red'
+ aliases = ['red', 'red/system']
+ filenames = ['*.red', '*.reds']
+ mimetypes = ['text/x-red', 'text/x-red-system']
+
+ flags = re.IGNORECASE | re.MULTILINE
+
+ escape_re = r'(?:\^\([0-9a-f]{1,4}\)*)'
+
+ def word_callback(lexer, match):
+ word = match.group()
+
+ if re.match(".*:$", word):
+ yield match.start(), Generic.Subheading, word
+ elif re.match(r'(if|unless|either|any|all|while|until|loop|repeat|'
+ r'foreach|forall|func|function|does|has|switch|'
+ r'case|reduce|compose|get|set|print|prin|equal\?|'
+ r'not-equal\?|strict-equal\?|lesser\?|greater\?|lesser-or-equal\?|'
+ r'greater-or-equal\?|same\?|not|type\?|stats|'
+ r'bind|union|replace|charset|routine)$', word):
+ yield match.start(), Name.Builtin, word
+ elif re.match(r'(make|random|reflect|to|form|mold|absolute|add|divide|multiply|negate|'
+ r'power|remainder|round|subtract|even\?|odd\?|and~|complement|or~|xor~|'
+ r'append|at|back|change|clear|copy|find|head|head\?|index\?|insert|'
+ r'length\?|next|pick|poke|remove|reverse|select|sort|skip|swap|tail|tail\?|'
+ r'take|trim|create|close|delete|modify|open|open\?|query|read|rename|'
+ r'update|write)$', word):
+ yield match.start(), Name.Function, word
+ elif re.match(r'(yes|on|no|off|true|false|tab|cr|lf|newline|escape|slash|sp|space|null|'
+ r'none|crlf|dot|null-byte)$', word):
+ yield match.start(), Name.Builtin.Pseudo, word
+ elif re.match(r'(#system-global|#include|#enum|#define|#either|#if|#import|#export|'
+ r'#switch|#default|#get-definition)$', word):
+ yield match.start(), Keyword.Namespace, word
+ elif re.match(r'(system|halt|quit|quit-return|do|load|q|recycle|call|run|ask|parse|'
+ r'raise-error|return|exit|break|alias|push|pop|probe|\?\?|spec-of|body-of|'
+ r'quote|forever)$', word):
+ yield match.start(), Name.Exception, word
+ elif re.match(r'(action\?|block\?|char\?|datatype\?|file\?|function\?|get-path\?|zero\?|'
+ r'get-word\?|integer\?|issue\?|lit-path\?|lit-word\?|logic\?|native\?|'
+ r'op\?|paren\?|path\?|refinement\?|set-path\?|set-word\?|string\?|unset\?|'
+ r'any-struct\?|none\?|word\?|any-series\?)$', word):
+ yield match.start(), Keyword, word
+ elif re.match(r'(JNICALL|stdcall|cdecl|infix)$', word):
+ yield match.start(), Keyword.Namespace, word
+ elif re.match("to-.*", word):
+ yield match.start(), Keyword, word
+ elif re.match('(\+|-\*\*|-|\*\*|//|/|\*|and|or|xor|=\?|===|==|=|<>|<=|>=|'
+ '<<<|>>>|<<|>>|<|>%)$', word):
+ yield match.start(), Operator, word
+ elif re.match(".*\!$", word):
+ yield match.start(), Keyword.Type, word
+ elif re.match("'.*", word):
+ yield match.start(), Name.Variable.Instance, word # lit-word
+ elif re.match("#.*", word):
+ yield match.start(), Name.Label, word # issue
+ elif re.match("%.*", word):
+ yield match.start(), Name.Decorator, word # file
+ elif re.match(":.*", word):
+ yield match.start(), Generic.Subheading, word # get-word
+ else:
+ yield match.start(), Name.Variable, word
+
+ tokens = {
+ 'root': [
+ (r'[^R]+', Comment),
+ (r'Red/System\s+\[', Generic.Strong, 'script'),
+ (r'Red\s+\[', Generic.Strong, 'script'),
+ (r'R', Comment)
+ ],
+ 'script': [
+ (r'\s+', Text),
+ (r'#"', String.Char, 'char'),
+ (r'#{[0-9a-f\s]*}', Number.Hex),
+ (r'2#{', Number.Hex, 'bin2'),
+ (r'64#{[0-9a-z+/=\s]*}', Number.Hex),
+ (r'([0-9a-f]+)(h)((\s)|(?=[\[\]{}""\(\)]))',
+ bygroups(Number.Hex, Name.Variable, Whitespace)),
+ (r'"', String, 'string'),
+ (r'{', String, 'string2'),
+ (r';#+.*\n', Comment.Special),
+ (r';\*+.*\n', Comment.Preproc),
+ (r';.*\n', Comment),
+ (r'%"', Name.Decorator, 'stringFile'),
+ (r'%[^(\^{^")\s\[\]]+', Name.Decorator),
+ (r'[+-]?([a-z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money
+ (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time
+ (r'\d+[\-\/][0-9a-z]+[\-\/]\d+(\/\d+\:\d+((\:\d+)?'
+ r'([\.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other), # date
+ (r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple
+ (r'\d+[xX]\d+', Keyword.Constant), # pair
+ (r'[+-]?\d+(\'\d+)?([\.,]\d*)?[eE][+-]?\d+', Number.Float),
+ (r'[+-]?\d+(\'\d+)?[\.,]\d*', Number.Float),
+ (r'[+-]?\d+(\'\d+)?', Number),
+ (r'[\[\]\(\)]', Generic.Strong),
+ (r'[a-z]+[^(\^{"\s:)]*://[^(\^{"\s)]*', Name.Decorator), # url
+ (r'mailto:[^(\^{"@\s)]+@[^(\^{"@\s)]+', Name.Decorator), # url
+ (r'[^(\^{"@\s)]+@[^(\^{"@\s)]+', Name.Decorator), # email
+ (r'comment\s"', Comment, 'commentString1'),
+ (r'comment\s{', Comment, 'commentString2'),
+ (r'comment\s\[', Comment, 'commentBlock'),
+ (r'comment\s[^(\s{\"\[]+', Comment),
+ (r'/[^(\^{^")\s/[\]]*', Name.Attribute),
+ (r'([^(\^{^")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
+ (r'<[\w:.-]*>', Name.Tag),
+ (r'<[^(<>\s")]+', Name.Tag, 'tag'),
+ (r'([^(\^{^")\s]+)', Text),
+ ],
+ 'string': [
+ (r'[^(\^")]+', String),
+ (escape_re, String.Escape),
+ (r'[\(|\)]+', String),
+ (r'\^.', String.Escape),
+ (r'"', String, '#pop'),
+ ],
+ 'string2': [
+ (r'[^(\^{^})]+', String),
+ (escape_re, String.Escape),
+ (r'[\(|\)]+', String),
+ (r'\^.', String.Escape),
+ (r'{', String, '#push'),
+ (r'}', String, '#pop'),
+ ],
+ 'stringFile': [
+ (r'[^(\^")]+', Name.Decorator),
+ (escape_re, Name.Decorator),
+ (r'\^.', Name.Decorator),
+ (r'"', Name.Decorator, '#pop'),
+ ],
+ 'char': [
+ (escape_re + '"', String.Char, '#pop'),
+ (r'\^."', String.Char, '#pop'),
+ (r'."', String.Char, '#pop'),
+ ],
+ 'tag': [
+ (escape_re, Name.Tag),
+ (r'"', Name.Tag, 'tagString'),
+ (r'[^(<>\r\n")]+', Name.Tag),
+ (r'>', Name.Tag, '#pop'),
+ ],
+ 'tagString': [
+ (r'[^(\^")]+', Name.Tag),
+ (escape_re, Name.Tag),
+ (r'[\(|\)]+', Name.Tag),
+ (r'\^.', Name.Tag),
+ (r'"', Name.Tag, '#pop'),
+ ],
+ 'tuple': [
+ (r'(\d+\.)+', Keyword.Constant),
+ (r'\d+', Keyword.Constant, '#pop'),
+ ],
+ 'bin2': [
+ (r'\s+', Number.Hex),
+ (r'([0-1]\s*){8}', Number.Hex),
+ (r'}', Number.Hex, '#pop'),
+ ],
+ 'commentString1': [
+ (r'[^(\^")]+', Comment),
+ (escape_re, Comment),
+ (r'[\(|\)]+', Comment),
+ (r'\^.', Comment),
+ (r'"', Comment, '#pop'),
+ ],
+ 'commentString2': [
+ (r'[^(\^{^})]+', Comment),
+ (escape_re, Comment),
+ (r'[\(|\)]+', Comment),
+ (r'\^.', Comment),
+ (r'{', Comment, '#push'),
+ (r'}', Comment, '#pop'),
+ ],
+ 'commentBlock': [
+ (r'\[', Comment, '#push'),
+ (r'\]', Comment, '#pop'),
+ (r'"', Comment, "commentString1"),
+ (r'{', Comment, "commentString2"),
+ (r'[^(\[\]\"{)]+', Comment),
+ ],
+ }
diff --git a/pygments/lexers/_robotframeworklexer.py b/pygments/lexers/robotframework.py
index 2889e1b8..7b6f5564 100644
--- a/pygments/lexers/_robotframeworklexer.py
+++ b/pygments/lexers/robotframework.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
"""
- pygments.lexers._robotframeworklexer
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ pygments.lexers.robotframework
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Lexer for Robot Framework.
@@ -29,6 +29,8 @@ from pygments.lexer import Lexer
from pygments.token import Token
from pygments.util import text_type
+__all__ = ['RobotFrameworkLexer']
+
HEADING = Token.Generic.Heading
SETTING = Token.Keyword.Namespace
diff --git a/pygments/lexers/ruby.py b/pygments/lexers/ruby.py
new file mode 100644
index 00000000..096c6dc8
--- /dev/null
+++ b/pygments/lexers/ruby.py
@@ -0,0 +1,516 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.ruby
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Ruby and related languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import Lexer, RegexLexer, ExtendedRegexLexer, include, \
+ bygroups, default, LexerContext, do_insertions, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error, Generic
+from pygments.util import shebang_matches
+
+__all__ = ['RubyLexer', 'RubyConsoleLexer', 'FancyLexer']
+
+line_re = re.compile('.*?\n')
+
+
+RUBY_OPERATORS = (
+ '*', '**', '-', '+', '-@', '+@', '/', '%', '&', '|', '^', '`', '~',
+ '[]', '[]=', '<<', '>>', '<', '<>', '<=>', '>', '>=', '==', '==='
+)
+
+class RubyLexer(ExtendedRegexLexer):
+ """
+ For `Ruby <http://www.ruby-lang.org>`_ source code.
+ """
+
+ name = 'Ruby'
+ aliases = ['rb', 'ruby', 'duby']
+ filenames = ['*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec',
+ '*.rbx', '*.duby']
+ mimetypes = ['text/x-ruby', 'application/x-ruby']
+
+ flags = re.DOTALL | re.MULTILINE
+
+ def heredoc_callback(self, match, ctx):
+ # okay, this is the hardest part of parsing Ruby...
+ # match: 1 = <<-?, 2 = quote? 3 = name 4 = quote? 5 = rest of line
+
+ start = match.start(1)
+ yield start, Operator, match.group(1) # <<-?
+ yield match.start(2), String.Heredoc, match.group(2) # quote ", ', `
+ yield match.start(3), Name.Constant, match.group(3) # heredoc name
+ yield match.start(4), String.Heredoc, match.group(4) # quote again
+
+ heredocstack = ctx.__dict__.setdefault('heredocstack', [])
+ outermost = not bool(heredocstack)
+ heredocstack.append((match.group(1) == '<<-', match.group(3)))
+
+ ctx.pos = match.start(5)
+ ctx.end = match.end(5)
+ # this may find other heredocs
+ for i, t, v in self.get_tokens_unprocessed(context=ctx):
+ yield i, t, v
+ ctx.pos = match.end()
+
+ if outermost:
+ # this is the outer heredoc again, now we can process them all
+ for tolerant, hdname in heredocstack:
+ lines = []
+ for match in line_re.finditer(ctx.text, ctx.pos):
+ if tolerant:
+ check = match.group().strip()
+ else:
+ check = match.group().rstrip()
+ if check == hdname:
+ for amatch in lines:
+ yield amatch.start(), String.Heredoc, amatch.group()
+ yield match.start(), Name.Constant, match.group()
+ ctx.pos = match.end()
+ break
+ else:
+ lines.append(match)
+ else:
+ # end of heredoc not found -- error!
+ for amatch in lines:
+ yield amatch.start(), Error, amatch.group()
+ ctx.end = len(ctx.text)
+ del heredocstack[:]
+
+ def gen_rubystrings_rules():
+ def intp_regex_callback(self, match, ctx):
+ yield match.start(1), String.Regex, match.group(1) # begin
+ nctx = LexerContext(match.group(3), 0, ['interpolated-regex'])
+ for i, t, v in self.get_tokens_unprocessed(context=nctx):
+ yield match.start(3)+i, t, v
+ yield match.start(4), String.Regex, match.group(4) # end[mixounse]*
+ ctx.pos = match.end()
+
+ def intp_string_callback(self, match, ctx):
+ yield match.start(1), String.Other, match.group(1)
+ nctx = LexerContext(match.group(3), 0, ['interpolated-string'])
+ for i, t, v in self.get_tokens_unprocessed(context=nctx):
+ yield match.start(3)+i, t, v
+ yield match.start(4), String.Other, match.group(4) # end
+ ctx.pos = match.end()
+
+ states = {}
+ states['strings'] = [
+ # easy ones
+ (r'\:@{0,2}[a-zA-Z_]\w*[\!\?]?', String.Symbol),
+ (words(RUBY_OPERATORS, prefix=r'\:@{0,2}'), String.Symbol),
+ (r":'(\\\\|\\'|[^'])*'", String.Symbol),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ (r':"', String.Symbol, 'simple-sym'),
+ (r'([a-zA-Z_]\w*)(:)(?!:)',
+ bygroups(String.Symbol, Punctuation)), # Since Ruby 1.9
+ (r'"', String.Double, 'simple-string'),
+ (r'(?<!\.)`', String.Backtick, 'simple-backtick'),
+ ]
+
+ # double-quoted string and symbol
+ for name, ttype, end in ('string', String.Double, '"'), \
+ ('sym', String.Symbol, '"'), \
+ ('backtick', String.Backtick, '`'):
+ states['simple-'+name] = [
+ include('string-intp-escaped'),
+ (r'[^\\%s#]+' % end, ttype),
+ (r'[\\#]', ttype),
+ (end, ttype, '#pop'),
+ ]
+
+ # braced quoted strings
+ for lbrace, rbrace, name in ('\\{', '\\}', 'cb'), \
+ ('\\[', '\\]', 'sb'), \
+ ('\\(', '\\)', 'pa'), \
+ ('<', '>', 'ab'):
+ states[name+'-intp-string'] = [
+ (r'\\[\\' + lbrace + rbrace + ']', String.Other),
+ (lbrace, String.Other, '#push'),
+ (rbrace, String.Other, '#pop'),
+ include('string-intp-escaped'),
+ (r'[\\#' + lbrace + rbrace + ']', String.Other),
+ (r'[^\\#' + lbrace + rbrace + ']+', String.Other),
+ ]
+ states['strings'].append((r'%[QWx]?' + lbrace, String.Other,
+ name+'-intp-string'))
+ states[name+'-string'] = [
+ (r'\\[\\' + lbrace + rbrace + ']', String.Other),
+ (lbrace, String.Other, '#push'),
+ (rbrace, String.Other, '#pop'),
+ (r'[\\#' + lbrace + rbrace + ']', String.Other),
+ (r'[^\\#' + lbrace + rbrace + ']+', String.Other),
+ ]
+ states['strings'].append((r'%[qsw]' + lbrace, String.Other,
+ name+'-string'))
+ states[name+'-regex'] = [
+ (r'\\[\\' + lbrace + rbrace + ']', String.Regex),
+ (lbrace, String.Regex, '#push'),
+ (rbrace + '[mixounse]*', String.Regex, '#pop'),
+ include('string-intp'),
+ (r'[\\#' + lbrace + rbrace + ']', String.Regex),
+ (r'[^\\#' + lbrace + rbrace + ']+', String.Regex),
+ ]
+ states['strings'].append((r'%r' + lbrace, String.Regex,
+ name+'-regex'))
+
+ # these must come after %<brace>!
+ states['strings'] += [
+ # %r regex
+ (r'(%r([^a-zA-Z0-9]))((?:\\\2|(?!\2).)*)(\2[mixounse]*)',
+ intp_regex_callback),
+ # regular fancy strings with qsw
+ (r'%[qsw]([^a-zA-Z0-9])((?:\\\1|(?!\1).)*)\1', String.Other),
+ (r'(%[QWx]([^a-zA-Z0-9]))((?:\\\2|(?!\2).)*)(\2)',
+ intp_string_callback),
+ # special forms of fancy strings after operators or
+ # in method calls with braces
+ (r'(?<=[-+/*%=<>&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
+ bygroups(Text, String.Other, None)),
+ # and because of fixed width lookbehinds the whole thing a
+ # second time for line startings...
+ (r'^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
+ bygroups(Text, String.Other, None)),
+ # all regular fancy strings without qsw
+ (r'(%([^a-zA-Z0-9\s]))((?:\\\2|(?!\2).)*)(\2)',
+ intp_string_callback),
+ ]
+
+ return states
+
+ tokens = {
+ 'root': [
+ (r'#.*?$', Comment.Single),
+ (r'=begin\s.*?\n=end.*?$', Comment.Multiline),
+ # keywords
+ (words((
+ 'BEGIN', 'END', 'alias', 'begin', 'break', 'case', 'defined?',
+ 'do', 'else', 'elsif', 'end', 'ensure', 'for', 'if', 'in', 'next', 'redo',
+ 'rescue', 'raise', 'retry', 'return', 'super', 'then', 'undef',
+ 'unless', 'until', 'when', 'while', 'yield'), suffix=r'\b'),
+ Keyword),
+ # start of function, class and module names
+ (r'(module)(\s+)([a-zA-Z_]\w*'
+ r'(?:::[a-zA-Z_]\w*)*)',
+ bygroups(Keyword, Text, Name.Namespace)),
+ (r'(def)(\s+)', bygroups(Keyword, Text), 'funcname'),
+ (r'def(?=[*%&^`~+-/\[<>=])', Keyword, 'funcname'),
+ (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
+ # special methods
+ (words((
+ 'initialize', 'new', 'loop', 'include', 'extend', 'raise', 'attr_reader',
+ 'attr_writer', 'attr_accessor', 'attr', 'catch', 'throw', 'private',
+ 'module_function', 'public', 'protected', 'true', 'false', 'nil'),
+ suffix=r'\b'),
+ Keyword.Pseudo),
+ (r'(not|and|or)\b', Operator.Word),
+ (words((
+ 'autoload', 'block_given', 'const_defined', 'eql', 'equal', 'frozen', 'include',
+ 'instance_of', 'is_a', 'iterator', 'kind_of', 'method_defined', 'nil',
+ 'private_method_defined', 'protected_method_defined',
+ 'public_method_defined', 'respond_to', 'tainted'), suffix=r'\?'),
+ Name.Builtin),
+ (r'(chomp|chop|exit|gsub|sub)!', Name.Builtin),
+ (words((
+ 'Array', 'Float', 'Integer', 'String', '__id__', '__send__', 'abort',
+ 'ancestors', 'at_exit', 'autoload', 'binding', 'callcc', 'caller',
+ 'catch', 'chomp', 'chop', 'class_eval', 'class_variables',
+ 'clone', 'const_defined?', 'const_get', 'const_missing', 'const_set',
+ 'constants', 'display', 'dup', 'eval', 'exec', 'exit', 'extend', 'fail', 'fork',
+ 'format', 'freeze', 'getc', 'gets', 'global_variables', 'gsub',
+ 'hash', 'id', 'included_modules', 'inspect', 'instance_eval',
+ 'instance_method', 'instance_methods',
+ 'instance_variable_get', 'instance_variable_set', 'instance_variables',
+ 'lambda', 'load', 'local_variables', 'loop',
+ 'method', 'method_missing', 'methods', 'module_eval', 'name',
+ 'object_id', 'open', 'p', 'print', 'printf', 'private_class_method',
+ 'private_instance_methods',
+ 'private_methods', 'proc', 'protected_instance_methods',
+ 'protected_methods', 'public_class_method',
+ 'public_instance_methods', 'public_methods',
+ 'putc', 'puts', 'raise', 'rand', 'readline', 'readlines', 'require',
+ 'scan', 'select', 'self', 'send', 'set_trace_func', 'singleton_methods', 'sleep',
+ 'split', 'sprintf', 'srand', 'sub', 'syscall', 'system', 'taint',
+ 'test', 'throw', 'to_a', 'to_s', 'trace_var', 'trap', 'untaint',
+ 'untrace_var', 'warn'), prefix=r'(?<!\.)', suffix=r'\b'),
+ Name.Builtin),
+ (r'__(FILE|LINE)__\b', Name.Builtin.Pseudo),
+ # normal heredocs
+ (r'(?<!\w)(<<-?)(["`\']?)([a-zA-Z_]\w*)(\2)(.*?\n)',
+ heredoc_callback),
+ # empty string heredocs
+ (r'(<<-?)("|\')()(\2)(.*?\n)', heredoc_callback),
+ (r'__END__', Comment.Preproc, 'end-part'),
+ # multiline regex (after keywords or assignments)
+ (r'(?:^|(?<=[=<>~!:])|'
+ r'(?<=(?:\s|;)when\s)|'
+ r'(?<=(?:\s|;)or\s)|'
+ r'(?<=(?:\s|;)and\s)|'
+ r'(?<=(?:\s|;|\.)index\s)|'
+ r'(?<=(?:\s|;|\.)scan\s)|'
+ r'(?<=(?:\s|;|\.)sub\s)|'
+ r'(?<=(?:\s|;|\.)sub!\s)|'
+ r'(?<=(?:\s|;|\.)gsub\s)|'
+ r'(?<=(?:\s|;|\.)gsub!\s)|'
+ r'(?<=(?:\s|;|\.)match\s)|'
+ r'(?<=(?:\s|;)if\s)|'
+ r'(?<=(?:\s|;)elsif\s)|'
+ r'(?<=^when\s)|'
+ r'(?<=^index\s)|'
+ r'(?<=^scan\s)|'
+ r'(?<=^sub\s)|'
+ r'(?<=^gsub\s)|'
+ r'(?<=^sub!\s)|'
+ r'(?<=^gsub!\s)|'
+ r'(?<=^match\s)|'
+ r'(?<=^if\s)|'
+ r'(?<=^elsif\s)'
+ r')(\s*)(/)', bygroups(Text, String.Regex), 'multiline-regex'),
+ # multiline regex (in method calls or subscripts)
+ (r'(?<=\(|,|\[)/', String.Regex, 'multiline-regex'),
+ # multiline regex (this time the funny no whitespace rule)
+ (r'(\s+)(/)(?![\s=])', bygroups(Text, String.Regex),
+ 'multiline-regex'),
+ # lex numbers and ignore following regular expressions which
+ # are division operators in fact (grrrr. i hate that. any
+ # better ideas?)
+ # since pygments 0.7 we also eat a "?" operator after numbers
+ # so that the char operator does not work. Chars are not allowed
+ # there so that you can use the ternary operator.
+ # stupid example:
+ # x>=0?n[x]:""
+ (r'(0_?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
+ bygroups(Number.Oct, Text, Operator)),
+ (r'(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
+ bygroups(Number.Hex, Text, Operator)),
+ (r'(0b[01]+(?:_[01]+)*)(\s*)([/?])?',
+ bygroups(Number.Bin, Text, Operator)),
+ (r'([\d]+(?:_\d+)*)(\s*)([/?])?',
+ bygroups(Number.Integer, Text, Operator)),
+ # Names
+ (r'@@[a-zA-Z_]\w*', Name.Variable.Class),
+ (r'@[a-zA-Z_]\w*', Name.Variable.Instance),
+ (r'\$\w+', Name.Variable.Global),
+ (r'\$[!@&`\'+~=/\\,;.<>_*$?:"]', Name.Variable.Global),
+ (r'\$-[0adFiIlpvw]', Name.Variable.Global),
+ (r'::', Operator),
+ include('strings'),
+ # chars
+ (r'\?(\\[MC]-)*' # modifiers
+ r'(\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)'
+ r'(?!\w)',
+ String.Char),
+ (r'[A-Z]\w+', Name.Constant),
+ # this is needed because ruby attributes can look
+ # like keywords (class) or like this: ` ?!?
+ (words(RUBY_OPERATORS, prefix=r'(\.|::)'),
+ bygroups(Operator, Name.Operator)),
+ (r'(\.|::)([a-zA-Z_]\w*[\!\?]?|[*%&^`~+\-/\[<>=])',
+ bygroups(Operator, Name)),
+ (r'[a-zA-Z_]\w*[\!\?]?', Name),
+ (r'(\[|\]|\*\*|<<?|>>?|>=|<=|<=>|=~|={3}|'
+ r'!~|&&?|\|\||\.{1,3})', Operator),
+ (r'[-+/*%=<>&!^|~]=?', Operator),
+ (r'[(){};,/?:\\]', Punctuation),
+ (r'\s+', Text)
+ ],
+ 'funcname': [
+ (r'\(', Punctuation, 'defexpr'),
+ (r'(?:([a-zA-Z_]\w*)(\.))?'
+ r'([a-zA-Z_]\w*[\!\?]?|\*\*?|[-+]@?|'
+ r'[/%&|^`~]|\[\]=?|<<|>>|<=?>|>=?|===?)',
+ bygroups(Name.Class, Operator, Name.Function), '#pop'),
+ default('#pop')
+ ],
+ 'classname': [
+ (r'\(', Punctuation, 'defexpr'),
+ (r'<<', Operator, '#pop'),
+ (r'[A-Z_]\w*', Name.Class, '#pop'),
+ default('#pop')
+ ],
+ 'defexpr': [
+ (r'(\))(\.|::)?', bygroups(Punctuation, Operator), '#pop'),
+ (r'\(', Operator, '#push'),
+ include('root')
+ ],
+ 'in-intp': [
+ (r'{', String.Interpol, '#push'),
+ (r'}', String.Interpol, '#pop'),
+ include('root'),
+ ],
+ 'string-intp': [
+ (r'#{', String.Interpol, 'in-intp'),
+ (r'#@@?[a-zA-Z_]\w*', String.Interpol),
+ (r'#\$[a-zA-Z_]\w*', String.Interpol)
+ ],
+ 'string-intp-escaped': [
+ include('string-intp'),
+ (r'\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})',
+ String.Escape)
+ ],
+ 'interpolated-regex': [
+ include('string-intp'),
+ (r'[\\#]', String.Regex),
+ (r'[^\\#]+', String.Regex),
+ ],
+ 'interpolated-string': [
+ include('string-intp'),
+ (r'[\\#]', String.Other),
+ (r'[^\\#]+', String.Other),
+ ],
+ 'multiline-regex': [
+ include('string-intp'),
+ (r'\\\\', String.Regex),
+ (r'\\/', String.Regex),
+ (r'[\\#]', String.Regex),
+ (r'[^\\/#]+', String.Regex),
+ (r'/[mixounse]*', String.Regex, '#pop'),
+ ],
+ 'end-part': [
+ (r'.+', Comment.Preproc, '#pop')
+ ]
+ }
+ tokens.update(gen_rubystrings_rules())
+
+ def analyse_text(text):
+ return shebang_matches(text, r'ruby(1\.\d)?')
+
+
+class RubyConsoleLexer(Lexer):
+ """
+ For Ruby interactive console (**irb**) output like:
+
+ .. sourcecode:: rbcon
+
+ irb(main):001:0> a = 1
+ => 1
+ irb(main):002:0> puts a
+ 1
+ => nil
+ """
+ name = 'Ruby irb session'
+ aliases = ['rbcon', 'irb']
+ mimetypes = ['text/x-ruby-shellsession']
+
+ _prompt_re = re.compile('irb\([a-zA-Z_]\w*\):\d{3}:\d+[>*"\'] '
+ '|>> |\?> ')
+
+ def get_tokens_unprocessed(self, text):
+ rblexer = RubyLexer(**self.options)
+
+ curcode = ''
+ insertions = []
+ for match in line_re.finditer(text):
+ line = match.group()
+ m = self._prompt_re.match(line)
+ if m is not None:
+ end = m.end()
+ insertions.append((len(curcode),
+ [(0, Generic.Prompt, line[:end])]))
+ curcode += line[end:]
+ else:
+ if curcode:
+ for item in do_insertions(
+ insertions, rblexer.get_tokens_unprocessed(curcode)):
+ yield item
+ curcode = ''
+ insertions = []
+ yield match.start(), Generic.Output, line
+ if curcode:
+ for item in do_insertions(
+ insertions, rblexer.get_tokens_unprocessed(curcode)):
+ yield item
+
+
+class FancyLexer(RegexLexer):
+ """
+ Pygments Lexer For `Fancy <http://www.fancy-lang.org/>`_.
+
+ Fancy is a self-hosted, pure object-oriented, dynamic,
+ class-based, concurrent general-purpose programming language
+ running on Rubinius, the Ruby VM.
+
+ .. versionadded:: 1.5
+ """
+ name = 'Fancy'
+ filenames = ['*.fy', '*.fancypack']
+ aliases = ['fancy', 'fy']
+ mimetypes = ['text/x-fancysrc']
+
+ tokens = {
+ # copied from PerlLexer:
+ 'balanced-regex': [
+ (r'/(\\\\|\\/|[^/])*/[egimosx]*', String.Regex, '#pop'),
+ (r'!(\\\\|\\!|[^!])*![egimosx]*', String.Regex, '#pop'),
+ (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
+ (r'{(\\\\|\\}|[^}])*}[egimosx]*', String.Regex, '#pop'),
+ (r'<(\\\\|\\>|[^>])*>[egimosx]*', String.Regex, '#pop'),
+ (r'\[(\\\\|\\\]|[^\]])*\][egimosx]*', String.Regex, '#pop'),
+ (r'\((\\\\|\\\)|[^\)])*\)[egimosx]*', String.Regex, '#pop'),
+ (r'@(\\\\|\\\@|[^\@])*@[egimosx]*', String.Regex, '#pop'),
+ (r'%(\\\\|\\\%|[^\%])*%[egimosx]*', String.Regex, '#pop'),
+ (r'\$(\\\\|\\\$|[^\$])*\$[egimosx]*', String.Regex, '#pop'),
+ ],
+ 'root': [
+ (r'\s+', Text),
+
+ # balanced delimiters (copied from PerlLexer):
+ (r's{(\\\\|\\}|[^}])*}\s*', String.Regex, 'balanced-regex'),
+ (r's<(\\\\|\\>|[^>])*>\s*', String.Regex, 'balanced-regex'),
+ (r's\[(\\\\|\\\]|[^\]])*\]\s*', String.Regex, 'balanced-regex'),
+ (r's\((\\\\|\\\)|[^\)])*\)\s*', String.Regex, 'balanced-regex'),
+ (r'm?/(\\\\|\\/|[^/\n])*/[gcimosx]*', String.Regex),
+ (r'm(?=[/!\\{<\[\(@%\$])', String.Regex, 'balanced-regex'),
+
+ # Comments
+ (r'#(.*?)\n', Comment.Single),
+ # Symbols
+ (r'\'([^\'\s\[\]\(\)\{\}]+|\[\])', String.Symbol),
+ # Multi-line DoubleQuotedString
+ (r'"""(\\\\|\\"|[^"])*"""', String),
+ # DoubleQuotedString
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # keywords
+ (r'(def|class|try|catch|finally|retry|return|return_local|match|'
+ r'case|->|=>)\b', Keyword),
+ # constants
+ (r'(self|super|nil|false|true)\b', Name.Constant),
+ (r'[(){};,/?\|:\\]', Punctuation),
+ # names
+ (words((
+ 'Object', 'Array', 'Hash', 'Directory', 'File', 'Class', 'String',
+ 'Number', 'Enumerable', 'FancyEnumerable', 'Block', 'TrueClass',
+ 'NilClass', 'FalseClass', 'Tuple', 'Symbol', 'Stack', 'Set',
+ 'FancySpec', 'Method', 'Package', 'Range'), suffix=r'\b'),
+ Name.Builtin),
+ # functions
+ (r'[a-zA-Z](\w|[-+?!=*/^><%])*:', Name.Function),
+ # operators, must be below functions
+ (r'[-+*/~,<>=&!?%^\[\]\.$]+', Operator),
+ ('[A-Z]\w*', Name.Constant),
+ ('@[a-zA-Z_]\w*', Name.Variable.Instance),
+ ('@@[a-zA-Z_]\w*', Name.Variable.Class),
+ ('@@?', Operator),
+ ('[a-zA-Z_]\w*', Name),
+ # numbers - / checks are necessary to avoid mismarking regexes,
+ # see comment in RubyLexer
+ (r'(0[oO]?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
+ bygroups(Number.Oct, Text, Operator)),
+ (r'(0[xX][0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
+ bygroups(Number.Hex, Text, Operator)),
+ (r'(0[bB][01]+(?:_[01]+)*)(\s*)([/?])?',
+ bygroups(Number.Bin, Text, Operator)),
+ (r'([\d]+(?:_\d+)*)(\s*)([/?])?',
+ bygroups(Number.Integer, Text, Operator)),
+ (r'\d+([eE][+-]?[0-9]+)|\d+\.\d+([eE][+-]?[0-9]+)?', Number.Float),
+ (r'\d+', Number.Integer)
+ ]
+ }
diff --git a/pygments/lexers/rust.py b/pygments/lexers/rust.py
new file mode 100644
index 00000000..b4acbbd1
--- /dev/null
+++ b/pygments/lexers/rust.py
@@ -0,0 +1,163 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.rust
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the Rust language.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Whitespace
+
+__all__ = ['RustLexer']
+
+
+class RustLexer(RegexLexer):
+ """
+ Lexer for the Rust programming language (version 0.9).
+
+ .. versionadded:: 1.6
+ """
+ name = 'Rust'
+ filenames = ['*.rs']
+ aliases = ['rust']
+ mimetypes = ['text/x-rustsrc']
+
+ tokens = {
+ 'root': [
+ # Whitespace and Comments
+ (r'\n', Whitespace),
+ (r'\s+', Whitespace),
+ (r'//[/!](.*?)\n', Comment.Doc),
+ (r'//(.*?)\n', Comment.Single),
+ (r'/\*', Comment.Multiline, 'comment'),
+
+ # Keywords
+ (words((
+ 'as', 'box', 'break', 'continue', 'do', 'else', 'enum', 'extern',
+ 'fn', 'for', 'if', 'impl', 'in', 'loop', 'match', 'mut', 'priv',
+ 'proc', 'pub', 'ref', 'return', 'static', '\'static', 'struct',
+ 'trait', 'true', 'type', 'unsafe', 'while'), suffix=r'\b'),
+ Keyword),
+ (words(('alignof', 'be', 'const', 'offsetof', 'pure', 'sizeof',
+ 'typeof', 'once', 'unsized', 'yield'), suffix=r'\b'),
+ Keyword.Reserved),
+ (r'(mod|use)\b', Keyword.Namespace),
+ (r'(true|false)\b', Keyword.Constant),
+ (r'let\b', Keyword.Declaration),
+ (words(('u8', 'u16', 'u32', 'u64', 'i8', 'i16', 'i32', 'i64', 'uint',
+ 'int', 'f32', 'f64', 'str', 'bool'), suffix=r'\b'),
+ Keyword.Type),
+ (r'self\b', Name.Builtin.Pseudo),
+ # Prelude
+ (words((
+ 'Freeze', 'Pod', 'Send', 'Sized', 'Add', 'Sub', 'Mul', 'Div', 'Rem', 'Neg', 'Not', 'BitAnd',
+ 'BitOr', 'BitXor', 'Drop', 'Shl', 'Shr', 'Index', 'Option', 'Some', 'None', 'Result',
+ 'Ok', 'Err', 'from_str', 'range', 'print', 'println', 'Any', 'AnyOwnExt', 'AnyRefExt',
+ 'AnyMutRefExt', 'Ascii', 'AsciiCast', 'OnwedAsciiCast', 'AsciiStr',
+ 'IntoBytes', 'Bool', 'ToCStr', 'Char', 'Clone', 'DeepClone', 'Eq', 'ApproxEq',
+ 'Ord', 'TotalEq', 'Ordering', 'Less', 'Equal', 'Greater', 'Equiv', 'Container',
+ 'Mutable', 'Map', 'MutableMap', 'Set', 'MutableSet', 'Default', 'FromStr',
+ 'Hash', 'FromIterator', 'Extendable', 'Iterator', 'DoubleEndedIterator',
+ 'RandomAccessIterator', 'CloneableIterator', 'OrdIterator',
+ 'MutableDoubleEndedIterator', 'ExactSize', 'Times', 'Algebraic',
+ 'Trigonometric', 'Exponential', 'Hyperbolic', 'Bitwise', 'BitCount',
+ 'Bounded', 'Integer', 'Fractional', 'Real', 'RealExt', 'Num', 'NumCast',
+ 'CheckedAdd', 'CheckedSub', 'CheckedMul', 'Orderable', 'Signed',
+ 'Unsigned', 'Round', 'Primitive', 'Int', 'Float', 'ToStrRadix',
+ 'ToPrimitive', 'FromPrimitive', 'GenericPath', 'Path', 'PosixPath',
+ 'WindowsPath', 'RawPtr', 'Buffer', 'Writer', 'Reader', 'Seek',
+ 'SendStr', 'SendStrOwned', 'SendStrStatic', 'IntoSendStr', 'Str',
+ 'StrVector', 'StrSlice', 'OwnedStr', 'IterBytes', 'ToStr', 'IntoStr',
+ 'CopyableTuple', 'ImmutableTuple', 'ImmutableEqVector', 'ImmutableTotalOrdVector',
+ 'ImmutableCopyableVector', 'OwnedVector', 'OwnedCopyableVector',
+ 'OwnedEqVector', 'MutableVector', 'MutableTotalOrdVector',
+ 'Vector', 'VectorVector', 'CopyableVector', 'ImmutableVector',
+ 'Port', 'Chan', 'SharedChan', 'spawn', 'drop'), suffix=r'\b'),
+ Name.Builtin),
+ (r'(ImmutableTuple\d+|Tuple\d+)\b', Name.Builtin),
+ # Borrowed pointer
+ (r'(&)(\'[A-Za-z_]\w*)?', bygroups(Operator, Name)),
+ # Labels
+ (r'\'[A-Za-z_]\w*:', Name.Label),
+ # Character Literal
+ (r"""'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
+ r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|.)'""",
+ String.Char),
+ # Lifetime
+ (r"""'[a-zA-Z_]\w*""", Name.Label),
+ # Binary Literal
+ (r'0b[01_]+', Number.Bin, 'number_lit'),
+ # Octal Literal
+ (r'0o[0-7_]+', Number.Oct, 'number_lit'),
+ # Hexadecimal Literal
+ (r'0[xX][0-9a-fA-F_]+', Number.Hex, 'number_lit'),
+ # Decimal Literal
+ (r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|'
+ r'\.[0-9_]*|[eE][+\-]?[0-9_]+)', Number.Float, 'number_lit'),
+ (r'[0-9][0-9_]*', Number.Integer, 'number_lit'),
+ # String Literal
+ (r'"', String, 'string'),
+ (r'r(#*)".*?"\1', String.Raw),
+
+ # Operators and Punctuation
+ (r'[{}()\[\],.;]', Punctuation),
+ (r'[+\-*/%&|<>^!~@=:?]', Operator),
+
+ # Identifier
+ (r'[a-zA-Z_]\w*', Name),
+
+ # Attributes
+ (r'#!?\[', Comment.Preproc, 'attribute['),
+ # Macros
+ (r'([A-Za-z_]\w*)(!)(\s*)([A-Za-z_]\w*)?(\s*)(\{)',
+ bygroups(Comment.Preproc, Punctuation, Whitespace, Name,
+ Whitespace, Punctuation), 'macro{'),
+ (r'([A-Za-z_]\w*)(!)(\s*)([A-Za-z_]\w*)?(\()',
+ bygroups(Comment.Preproc, Punctuation, Whitespace, Name,
+ Punctuation), 'macro('),
+ ],
+ 'comment': [
+ (r'[^*/]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline),
+ ],
+ 'number_lit': [
+ (r'(([ui](8|16|32|64)?)|(f(32|64)?))?', Keyword, '#pop'),
+ ],
+ 'string': [
+ (r'"', String, '#pop'),
+ (r"""\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
+ r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}""", String.Escape),
+ (r'[^\\"]+', String),
+ (r'\\', String),
+ ],
+ 'macro{': [
+ (r'\{', Operator, '#push'),
+ (r'\}', Operator, '#pop'),
+ ],
+ 'macro(': [
+ (r'\(', Operator, '#push'),
+ (r'\)', Operator, '#pop'),
+ ],
+ 'attribute_common': [
+ (r'"', String, 'string'),
+ (r'\[', Comment.Preproc, 'attribute['),
+ (r'\(', Comment.Preproc, 'attribute('),
+ ],
+ 'attribute[': [
+ include('attribute_common'),
+ (r'\];?', Comment.Preproc, '#pop'),
+ (r'[^"\]]+', Comment.Preproc),
+ ],
+ 'attribute(': [
+ include('attribute_common'),
+ (r'\);?', Comment.Preproc, '#pop'),
+ (r'[^"\)]+', Comment.Preproc),
+ ],
+ }
diff --git a/pygments/lexers/scripting.py b/pygments/lexers/scripting.py
new file mode 100644
index 00000000..602877f4
--- /dev/null
+++ b/pygments/lexers/scripting.py
@@ -0,0 +1,922 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.scripting
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexer for scripting and embedded languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, default, combined, \
+ words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error, Whitespace
+from pygments.util import get_bool_opt, get_list_opt, iteritems
+
+__all__ = ['LuaLexer', 'MoonScriptLexer', 'ChaiscriptLexer', 'LSLLexer',
+ 'AppleScriptLexer', 'RexxLexer', 'MOOCodeLexer', 'HybrisLexer']
+
+
+class LuaLexer(RegexLexer):
+ """
+ For `Lua <http://www.lua.org>`_ source code.
+
+ Additional options accepted:
+
+ `func_name_highlighting`
+ If given and ``True``, highlight builtin function names
+ (default: ``True``).
+ `disabled_modules`
+ If given, must be a list of module names whose function names
+ should not be highlighted. By default all modules are highlighted.
+
+ To get a list of allowed modules have a look into the
+ `_lua_builtins` module:
+
+ .. sourcecode:: pycon
+
+ >>> from pygments.lexers._lua_builtins import MODULES
+ >>> MODULES.keys()
+ ['string', 'coroutine', 'modules', 'io', 'basic', ...]
+ """
+
+ name = 'Lua'
+ aliases = ['lua']
+ filenames = ['*.lua', '*.wlua']
+ mimetypes = ['text/x-lua', 'application/x-lua']
+
+ tokens = {
+ 'root': [
+ # lua allows a file to start with a shebang
+ (r'#!(.*?)$', Comment.Preproc),
+ default('base'),
+ ],
+ 'base': [
+ (r'(?s)--\[(=*)\[.*?\]\1\]', Comment.Multiline),
+ ('--.*$', Comment.Single),
+
+ (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
+ (r'(?i)\d+e[+-]?\d+', Number.Float),
+ ('(?i)0x[0-9a-f]*', Number.Hex),
+ (r'\d+', Number.Integer),
+
+ (r'\n', Text),
+ (r'[^\S\n]', Text),
+ # multiline strings
+ (r'(?s)\[(=*)\[.*?\]\1\]', String),
+
+ (r'(==|~=|<=|>=|\.\.\.|\.\.|[=+\-*/%^<>#])', Operator),
+ (r'[\[\]\{\}\(\)\.,:;]', Punctuation),
+ (r'(and|or|not)\b', Operator.Word),
+
+ ('(break|do|else|elseif|end|for|if|in|repeat|return|then|until|'
+ r'while)\b', Keyword),
+ (r'(local)\b', Keyword.Declaration),
+ (r'(true|false|nil)\b', Keyword.Constant),
+
+ (r'(function)\b', Keyword, 'funcname'),
+
+ (r'[A-Za-z_]\w*(\.[A-Za-z_]\w*)?', Name),
+
+ ("'", String.Single, combined('stringescape', 'sqs')),
+ ('"', String.Double, combined('stringescape', 'dqs'))
+ ],
+
+ 'funcname': [
+ (r'\s+', Text),
+ ('(?:([A-Za-z_]\w*)(\.))?([A-Za-z_]\w*)',
+ bygroups(Name.Class, Punctuation, Name.Function), '#pop'),
+ # inline function
+ ('\(', Punctuation, '#pop'),
+ ],
+
+ # if I understand correctly, every character is valid in a lua string,
+ # so this state is only for later corrections
+ 'string': [
+ ('.', String)
+ ],
+
+ 'stringescape': [
+ (r'''\\([abfnrtv\\"']|\d{1,3})''', String.Escape)
+ ],
+
+ 'sqs': [
+ ("'", String, '#pop'),
+ include('string')
+ ],
+
+ 'dqs': [
+ ('"', String, '#pop'),
+ include('string')
+ ]
+ }
+
+ def __init__(self, **options):
+ self.func_name_highlighting = get_bool_opt(
+ options, 'func_name_highlighting', True)
+ self.disabled_modules = get_list_opt(options, 'disabled_modules', [])
+
+ self._functions = set()
+ if self.func_name_highlighting:
+ from pygments.lexers._lua_builtins import MODULES
+ for mod, func in iteritems(MODULES):
+ if mod not in self.disabled_modules:
+ self._functions.update(func)
+ RegexLexer.__init__(self, **options)
+
+ def get_tokens_unprocessed(self, text):
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name:
+ if value in self._functions:
+ yield index, Name.Builtin, value
+ continue
+ elif '.' in value:
+ a, b = value.split('.')
+ yield index, Name, a
+ yield index + len(a), Punctuation, u'.'
+ yield index + len(a) + 1, Name, b
+ continue
+ yield index, token, value
+
+
+class MoonScriptLexer(LuaLexer):
+ """
+ For `MoonScript <http://moonscript.org>`_ source code.
+
+ .. versionadded:: 1.5
+ """
+
+ name = "MoonScript"
+ aliases = ["moon", "moonscript"]
+ filenames = ["*.moon"]
+ mimetypes = ['text/x-moonscript', 'application/x-moonscript']
+
+ tokens = {
+ 'root': [
+ (r'#!(.*?)$', Comment.Preproc),
+ default('base'),
+ ],
+ 'base': [
+ ('--.*$', Comment.Single),
+ (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
+ (r'(?i)\d+e[+-]?\d+', Number.Float),
+ (r'(?i)0x[0-9a-f]*', Number.Hex),
+ (r'\d+', Number.Integer),
+ (r'\n', Text),
+ (r'[^\S\n]+', Text),
+ (r'(?s)\[(=*)\[.*?\]\1\]', String),
+ (r'(->|=>)', Name.Function),
+ (r':[a-zA-Z_]\w*', Name.Variable),
+ (r'(==|!=|~=|<=|>=|\.\.\.|\.\.|[=+\-*/%^<>#!.\\:])', Operator),
+ (r'[;,]', Punctuation),
+ (r'[\[\]\{\}\(\)]', Keyword.Type),
+ (r'[a-zA-Z_]\w*:', Name.Variable),
+ (words((
+ 'class', 'extends', 'if', 'then', 'super', 'do', 'with',
+ 'import', 'export', 'while', 'elseif', 'return', 'for', 'in',
+ 'from', 'when', 'using', 'else', 'and', 'or', 'not', 'switch',
+ 'break'), suffix=r'\b'),
+ Keyword),
+ (r'(true|false|nil)\b', Keyword.Constant),
+ (r'(and|or|not)\b', Operator.Word),
+ (r'(self)\b', Name.Builtin.Pseudo),
+ (r'@@?([a-zA-Z_]\w*)?', Name.Variable.Class),
+ (r'[A-Z]\w*', Name.Class), # proper name
+ (r'[A-Za-z_]\w*(\.[A-Za-z_]\w*)?', Name),
+ ("'", String.Single, combined('stringescape', 'sqs')),
+ ('"', String.Double, combined('stringescape', 'dqs'))
+ ],
+ 'stringescape': [
+ (r'''\\([abfnrtv\\"']|\d{1,3})''', String.Escape)
+ ],
+ 'sqs': [
+ ("'", String.Single, '#pop'),
+ (".", String)
+ ],
+ 'dqs': [
+ ('"', String.Double, '#pop'),
+ (".", String)
+ ]
+ }
+
+ def get_tokens_unprocessed(self, text):
+ # set . as Operator instead of Punctuation
+ for index, token, value in LuaLexer.get_tokens_unprocessed(self, text):
+ if token == Punctuation and value == ".":
+ token = Operator
+ yield index, token, value
+
+
+class ChaiscriptLexer(RegexLexer):
+ """
+ For `ChaiScript <http://chaiscript.com/>`_ source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'ChaiScript'
+ aliases = ['chai', 'chaiscript']
+ filenames = ['*.chai']
+ mimetypes = ['text/x-chaiscript', 'application/x-chaiscript']
+
+ flags = re.DOTALL
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'^\#.*?\n', Comment.Single)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ default('#pop')
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ include('commentsandwhitespace'),
+ (r'\n', Text),
+ (r'[^\S\n]+', Text),
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|\.\.'
+ r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'[=+\-*/]', Operator),
+ (r'(for|in|while|do|break|return|continue|if|else|'
+ r'throw|try|catch'
+ r')\b', Keyword, 'slashstartsregex'),
+ (r'(var)\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(attr|def|fun)\b', Keyword.Reserved),
+ (r'(true|false)\b', Keyword.Constant),
+ (r'(eval|throw)\b', Name.Builtin),
+ (r'`\S+`', Name.Builtin),
+ (r'[$a-zA-Z_]\w*', Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"', String.Double, 'dqstring'),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ ],
+ 'dqstring': [
+ (r'\${[^"}]+?}', String.Iterpol),
+ (r'\$', String.Double),
+ (r'\\\\', String.Double),
+ (r'\\"', String.Double),
+ (r'[^\\\\\\"$]+', String.Double),
+ (r'"', String.Double, '#pop'),
+ ],
+ }
+
+
+class LSLLexer(RegexLexer):
+ """
+ For Second Life's Linden Scripting Language source code.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'LSL'
+ aliases = ['lsl']
+ filenames = ['*.lsl']
+ mimetypes = ['text/x-lsl']
+
+ flags = re.MULTILINE
+
+ lsl_keywords = r'\b(?:do|else|for|if|jump|return|while)\b'
+ lsl_types = r'\b(?:float|integer|key|list|quaternion|rotation|string|vector)\b'
+ lsl_states = r'\b(?:(?:state)\s+\w+|default)\b'
+ lsl_events = r'\b(?:state_(?:entry|exit)|touch(?:_(?:start|end))?|(?:land_)?collision(?:_(?:start|end))?|timer|listen|(?:no_)?sensor|control|(?:not_)?at_(?:rot_)?target|money|email|run_time_permissions|changed|attach|dataserver|moving_(?:start|end)|link_message|(?:on|object)_rez|remote_data|http_re(?:sponse|quest)|path_update|transaction_result)\b'
+ lsl_functions_builtin = r'\b(?:ll(?:ReturnObjectsBy(?:ID|Owner)|Json(?:2List|[GS]etValue|ValueType)|Sin|Cos|Tan|Atan2|Sqrt|Pow|Abs|Fabs|Frand|Floor|Ceil|Round|Vec(?:Mag|Norm|Dist)|Rot(?:Between|2(?:Euler|Fwd|Left|Up))|(?:Euler|Axes)2Rot|Whisper|(?:Region|Owner)?Say|Shout|Listen(?:Control|Remove)?|Sensor(?:Repeat|Remove)?|Detected(?:Name|Key|Owner|Type|Pos|Vel|Grab|Rot|Group|LinkNumber)|Die|Ground|Wind|(?:[GS]et)(?:AnimationOverride|MemoryLimit|PrimMediaParams|ParcelMusicURL|Object(?:Desc|Name)|PhysicsMaterial|Status|Scale|Color|Alpha|Texture|Pos|Rot|Force|Torque)|ResetAnimationOverride|(?:Scale|Offset|Rotate)Texture|(?:Rot)?Target(?:Remove)?|(?:Stop)?MoveToTarget|Apply(?:Rotational)?Impulse|Set(?:KeyframedMotion|ContentType|RegionPos|(?:Angular)?Velocity|Buoyancy|HoverHeight|ForceAndTorque|TimerEvent|ScriptState|Damage|TextureAnim|Sound(?:Queueing|Radius)|Vehicle(?:Type|(?:Float|Vector|Rotation)Param)|(?:Touch|Sit)?Text|Camera(?:Eye|At)Offset|PrimitiveParams|ClickAction|Link(?:Alpha|Color|PrimitiveParams(?:Fast)?|Texture(?:Anim)?|Camera|Media)|RemoteScriptAccessPin|PayPrice|LocalRot)|ScaleByFactor|Get(?:(?:Max|Min)ScaleFactor|ClosestNavPoint|StaticPath|SimStats|Env|PrimitiveParams|Link(?:PrimitiveParams|Number(?:OfSides)?|Key|Name|Media)|HTTPHeader|FreeURLs|Object(?:Details|PermMask|PrimCount)|Parcel(?:MaxPrims|Details|Prim(?:Count|Owners))|Attached|(?:SPMax|Free|Used)Memory|Region(?:Name|TimeDilation|FPS|Corner|AgentCount)|Root(?:Position|Rotation)|UnixTime|(?:Parcel|Region)Flags|(?:Wall|GMT)clock|SimulatorHostname|BoundingBox|GeometricCenter|Creator|NumberOf(?:Prims|NotecardLines|Sides)|Animation(?:List)?|(?:Camera|Local)(?:Pos|Rot)|Vel|Accel|Omega|Time(?:stamp|OfDay)|(?:Object|CenterOf)?Mass|MassMKS|Energy|Owner|(?:Owner)?Key|SunDirection|Texture(?:Offset|Scale|Rot)|Inventory(?:Number|Name|Key|Type|Creator|PermMask)|Permissions(?:Key)?|StartParameter|List(?:Length|EntryType)|Date|Agent(?:Size|Info|Language|List)|LandOwnerAt|NotecardLine|Script(?:Name|State))|(?:Get|Reset|GetAndReset)Time|PlaySound(?:Slave)?|LoopSound(?:Master|Slave)?|(?:Trigger|Stop|Preload)Sound|(?:(?:Get|Delete)Sub|Insert)String|To(?:Upper|Lower)|Give(?:InventoryList|Money)|RezObject|(?:Stop)?LookAt|Sleep|CollisionFilter|(?:Take|Release)Controls|DetachFromAvatar|AttachToAvatar(?:Temp)?|InstantMessage|(?:GetNext)?Email|StopHover|MinEventDelay|RotLookAt|String(?:Length|Trim)|(?:Start|Stop)Animation|TargetOmega|RequestPermissions|(?:Create|Break)Link|BreakAllLinks|(?:Give|Remove)Inventory|Water|PassTouches|Request(?:Agent|Inventory)Data|TeleportAgent(?:Home|GlobalCoords)?|ModifyLand|CollisionSound|ResetScript|MessageLinked|PushObject|PassCollisions|AxisAngle2Rot|Rot2(?:Axis|Angle)|A(?:cos|sin)|AngleBetween|AllowInventoryDrop|SubStringIndex|List2(?:CSV|Integer|Json|Float|String|Key|Vector|Rot|List(?:Strided)?)|DeleteSubList|List(?:Statistics|Sort|Randomize|(?:Insert|Find|Replace)List)|EdgeOfWorld|AdjustSoundVolume|Key2Name|TriggerSoundLimited|EjectFromLand|(?:CSV|ParseString)2List|OverMyLand|SameGroup|UnSit|Ground(?:Slope|Normal|Contour)|GroundRepel|(?:Set|Remove)VehicleFlags|(?:AvatarOn)?(?:Link)?SitTarget|Script(?:Danger|Profiler)|Dialog|VolumeDetect|ResetOtherScript|RemoteLoadScriptPin|(?:Open|Close)RemoteDataChannel|SendRemoteData|RemoteDataReply|(?:Integer|String)ToBase64|XorBase64|Log(?:10)?|Base64To(?:String|Integer)|ParseStringKeepNulls|RezAtRoot|RequestSimulatorData|ForceMouselook|(?:Load|Release|(?:E|Une)scape)URL|ParcelMedia(?:CommandList|Query)|ModPow|MapDestination|(?:RemoveFrom|AddTo|Reset)Land(?:Pass|Ban)List|(?:Set|Clear)CameraParams|HTTP(?:Request|Response)|TextBox|DetectedTouch(?:UV|Face|Pos|(?:N|Bin)ormal|ST)|(?:MD5|SHA1|DumpList2)String|Request(?:Secure)?URL|Clear(?:Prim|Link)Media|(?:Link)?ParticleSystem|(?:Get|Request)(?:Username|DisplayName)|RegionSayTo|CastRay|GenerateKey|TransferLindenDollars|ManageEstateAccess|(?:Create|Delete)Character|ExecCharacterCmd|Evade|FleeFrom|NavigateTo|PatrolPoints|Pursue|UpdateCharacter|WanderWithin))\b'
+ lsl_constants_float = r'\b(?:DEG_TO_RAD|PI(?:_BY_TWO)?|RAD_TO_DEG|SQRT2|TWO_PI)\b'
+ lsl_constants_integer = r'\b(?:JSON_APPEND|STATUS_(?:PHYSICS|ROTATE_[XYZ]|PHANTOM|SANDBOX|BLOCK_GRAB(?:_OBJECT)?|(?:DIE|RETURN)_AT_EDGE|CAST_SHADOWS|OK|MALFORMED_PARAMS|TYPE_MISMATCH|BOUNDS_ERROR|NOT_(?:FOUND|SUPPORTED)|INTERNAL_ERROR|WHITELIST_FAILED)|AGENT(?:_(?:BY_(?:LEGACY_|USER)NAME|FLYING|ATTACHMENTS|SCRIPTED|MOUSELOOK|SITTING|ON_OBJECT|AWAY|WALKING|IN_AIR|TYPING|CROUCHING|BUSY|ALWAYS_RUN|AUTOPILOT|LIST_(?:PARCEL(?:_OWNER)?|REGION)))?|CAMERA_(?:PITCH|DISTANCE|BEHINDNESS_(?:ANGLE|LAG)|(?:FOCUS|POSITION)(?:_(?:THRESHOLD|LOCKED|LAG))?|FOCUS_OFFSET|ACTIVE)|ANIM_ON|LOOP|REVERSE|PING_PONG|SMOOTH|ROTATE|SCALE|ALL_SIDES|LINK_(?:ROOT|SET|ALL_(?:OTHERS|CHILDREN)|THIS)|ACTIVE|PASSIVE|SCRIPTED|CONTROL_(?:FWD|BACK|(?:ROT_)?(?:LEFT|RIGHT)|UP|DOWN|(?:ML_)?LBUTTON)|PERMISSION_(?:RETURN_OBJECTS|DEBIT|OVERRIDE_ANIMATIONS|SILENT_ESTATE_MANAGEMENT|TAKE_CONTROLS|TRIGGER_ANIMATION|ATTACH|CHANGE_LINKS|(?:CONTROL|TRACK)_CAMERA|TELEPORT)|INVENTORY_(?:TEXTURE|SOUND|OBJECT|SCRIPT|LANDMARK|CLOTHING|NOTECARD|BODYPART|ANIMATION|GESTURE|ALL|NONE)|CHANGED_(?:INVENTORY|COLOR|SHAPE|SCALE|TEXTURE|LINK|ALLOWED_DROP|OWNER|REGION(?:_START)?|TELEPORT|MEDIA)|OBJECT_(?:(?:PHYSICS|SERVER|STREAMING)_COST|UNKNOWN_DETAIL|CHARACTER_TIME|PHANTOM|PHYSICS|TEMP_ON_REZ|NAME|DESC|POS|PRIM_EQUIVALENCE|RETURN_(?:PARCEL(?:_OWNER)?|REGION)|ROO?T|VELOCITY|OWNER|GROUP|CREATOR|ATTACHED_POINT|RENDER_WEIGHT|PATHFINDING_TYPE|(?:RUNNING|TOTAL)_SCRIPT_COUNT|SCRIPT_(?:MEMORY|TIME))|TYPE_(?:INTEGER|FLOAT|STRING|KEY|VECTOR|ROTATION|INVALID)|(?:DEBUG|PUBLIC)_CHANNEL|ATTACH_(?:AVATAR_CENTER|CHEST|HEAD|BACK|PELVIS|MOUTH|CHIN|NECK|NOSE|BELLY|[LR](?:SHOULDER|HAND|FOOT|EAR|EYE|[UL](?:ARM|LEG)|HIP)|(?:LEFT|RIGHT)_PEC|HUD_(?:CENTER_[12]|TOP_(?:RIGHT|CENTER|LEFT)|BOTTOM(?:_(?:RIGHT|LEFT))?))|LAND_(?:LEVEL|RAISE|LOWER|SMOOTH|NOISE|REVERT)|DATA_(?:ONLINE|NAME|BORN|SIM_(?:POS|STATUS|RATING)|PAYINFO)|PAYMENT_INFO_(?:ON_FILE|USED)|REMOTE_DATA_(?:CHANNEL|REQUEST|REPLY)|PSYS_(?:PART_(?:BF_(?:ZERO|ONE(?:_MINUS_(?:DEST_COLOR|SOURCE_(ALPHA|COLOR)))?|DEST_COLOR|SOURCE_(ALPHA|COLOR))|BLEND_FUNC_(DEST|SOURCE)|FLAGS|(?:START|END)_(?:COLOR|ALPHA|SCALE|GLOW)|MAX_AGE|(?:RIBBON|WIND|INTERP_(?:COLOR|SCALE)|BOUNCE|FOLLOW_(?:SRC|VELOCITY)|TARGET_(?:POS|LINEAR)|EMISSIVE)_MASK)|SRC_(?:MAX_AGE|PATTERN|ANGLE_(?:BEGIN|END)|BURST_(?:RATE|PART_COUNT|RADIUS|SPEED_(?:MIN|MAX))|ACCEL|TEXTURE|TARGET_KEY|OMEGA|PATTERN_(?:DROP|EXPLODE|ANGLE(?:_CONE(?:_EMPTY)?)?)))|VEHICLE_(?:REFERENCE_FRAME|TYPE_(?:NONE|SLED|CAR|BOAT|AIRPLANE|BALLOON)|(?:LINEAR|ANGULAR)_(?:FRICTION_TIMESCALE|MOTOR_DIRECTION)|LINEAR_MOTOR_OFFSET|HOVER_(?:HEIGHT|EFFICIENCY|TIMESCALE)|BUOYANCY|(?:LINEAR|ANGULAR)_(?:DEFLECTION_(?:EFFICIENCY|TIMESCALE)|MOTOR_(?:DECAY_)?TIMESCALE)|VERTICAL_ATTRACTION_(?:EFFICIENCY|TIMESCALE)|BANKING_(?:EFFICIENCY|MIX|TIMESCALE)|FLAG_(?:NO_DEFLECTION_UP|LIMIT_(?:ROLL_ONLY|MOTOR_UP)|HOVER_(?:(?:WATER|TERRAIN|UP)_ONLY|GLOBAL_HEIGHT)|MOUSELOOK_(?:STEER|BANK)|CAMERA_DECOUPLED))|PRIM_(?:TYPE(?:_(?:BOX|CYLINDER|PRISM|SPHERE|TORUS|TUBE|RING|SCULPT))?|HOLE_(?:DEFAULT|CIRCLE|SQUARE|TRIANGLE)|MATERIAL(?:_(?:STONE|METAL|GLASS|WOOD|FLESH|PLASTIC|RUBBER))?|SHINY_(?:NONE|LOW|MEDIUM|HIGH)|BUMP_(?:NONE|BRIGHT|DARK|WOOD|BARK|BRICKS|CHECKER|CONCRETE|TILE|STONE|DISKS|GRAVEL|BLOBS|SIDING|LARGETILE|STUCCO|SUCTION|WEAVE)|TEXGEN_(?:DEFAULT|PLANAR)|SCULPT_(?:TYPE_(?:SPHERE|TORUS|PLANE|CYLINDER|MASK)|FLAG_(?:MIRROR|INVERT))|PHYSICS(?:_(?:SHAPE_(?:CONVEX|NONE|PRIM|TYPE)))?|(?:POS|ROT)_LOCAL|SLICE|TEXT|FLEXIBLE|POINT_LIGHT|TEMP_ON_REZ|PHANTOM|POSITION|SIZE|ROTATION|TEXTURE|NAME|OMEGA|DESC|LINK_TARGET|COLOR|BUMP_SHINY|FULLBRIGHT|TEXGEN|GLOW|MEDIA_(?:ALT_IMAGE_ENABLE|CONTROLS|(?:CURRENT|HOME)_URL|AUTO_(?:LOOP|PLAY|SCALE|ZOOM)|FIRST_CLICK_INTERACT|(?:WIDTH|HEIGHT)_PIXELS|WHITELIST(?:_ENABLE)?|PERMS_(?:INTERACT|CONTROL)|PARAM_MAX|CONTROLS_(?:STANDARD|MINI)|PERM_(?:NONE|OWNER|GROUP|ANYONE)|MAX_(?:URL_LENGTH|WHITELIST_(?:SIZE|COUNT)|(?:WIDTH|HEIGHT)_PIXELS)))|MASK_(?:BASE|OWNER|GROUP|EVERYONE|NEXT)|PERM_(?:TRANSFER|MODIFY|COPY|MOVE|ALL)|PARCEL_(?:MEDIA_COMMAND_(?:STOP|PAUSE|PLAY|LOOP|TEXTURE|URL|TIME|AGENT|UNLOAD|AUTO_ALIGN|TYPE|SIZE|DESC|LOOP_SET)|FLAG_(?:ALLOW_(?:FLY|(?:GROUP_)?SCRIPTS|LANDMARK|TERRAFORM|DAMAGE|CREATE_(?:GROUP_)?OBJECTS)|USE_(?:ACCESS_(?:GROUP|LIST)|BAN_LIST|LAND_PASS_LIST)|LOCAL_SOUND_ONLY|RESTRICT_PUSHOBJECT|ALLOW_(?:GROUP|ALL)_OBJECT_ENTRY)|COUNT_(?:TOTAL|OWNER|GROUP|OTHER|SELECTED|TEMP)|DETAILS_(?:NAME|DESC|OWNER|GROUP|AREA|ID|SEE_AVATARS))|LIST_STAT_(?:MAX|MIN|MEAN|MEDIAN|STD_DEV|SUM(?:_SQUARES)?|NUM_COUNT|GEOMETRIC_MEAN|RANGE)|PAY_(?:HIDE|DEFAULT)|REGION_FLAG_(?:ALLOW_DAMAGE|FIXED_SUN|BLOCK_TERRAFORM|SANDBOX|DISABLE_(?:COLLISIONS|PHYSICS)|BLOCK_FLY|ALLOW_DIRECT_TELEPORT|RESTRICT_PUSHOBJECT)|HTTP_(?:METHOD|MIMETYPE|BODY_(?:MAXLENGTH|TRUNCATED)|CUSTOM_HEADER|PRAGMA_NO_CACHE|VERBOSE_THROTTLE|VERIFY_CERT)|STRING_(?:TRIM(?:_(?:HEAD|TAIL))?)|CLICK_ACTION_(?:NONE|TOUCH|SIT|BUY|PAY|OPEN(?:_MEDIA)?|PLAY|ZOOM)|TOUCH_INVALID_FACE|PROFILE_(?:NONE|SCRIPT_MEMORY)|RC_(?:DATA_FLAGS|DETECT_PHANTOM|GET_(?:LINK_NUM|NORMAL|ROOT_KEY)|MAX_HITS|REJECT_(?:TYPES|AGENTS|(?:NON)?PHYSICAL|LAND))|RCERR_(?:CAST_TIME_EXCEEDED|SIM_PERF_LOW|UNKNOWN)|ESTATE_ACCESS_(?:ALLOWED_(?:AGENT|GROUP)_(?:ADD|REMOVE)|BANNED_AGENT_(?:ADD|REMOVE))|DENSITY|FRICTION|RESTITUTION|GRAVITY_MULTIPLIER|KFM_(?:COMMAND|CMD_(?:PLAY|STOP|PAUSE|SET_MODE)|MODE|FORWARD|LOOP|PING_PONG|REVERSE|DATA|ROTATION|TRANSLATION)|ERR_(?:GENERIC|PARCEL_PERMISSIONS|MALFORMED_PARAMS|RUNTIME_PERMISSIONS|THROTTLED)|CHARACTER_(?:CMD_(?:(?:SMOOTH_)?STOP|JUMP)|DESIRED_(?:TURN_)?SPEED|RADIUS|STAY_WITHIN_PARCEL|LENGTH|ORIENTATION|ACCOUNT_FOR_SKIPPED_FRAMES|AVOIDANCE_MODE|TYPE(?:_(?:[ABCD]|NONE))?|MAX_(?:DECEL|TURN_RADIUS|(?:ACCEL|SPEED)))|PURSUIT_(?:OFFSET|FUZZ_FACTOR|GOAL_TOLERANCE|INTERCEPT)|REQUIRE_LINE_OF_SIGHT|FORCE_DIRECT_PATH|VERTICAL|HORIZONTAL|AVOID_(?:CHARACTERS|DYNAMIC_OBSTACLES|NONE)|PU_(?:EVADE_(?:HIDDEN|SPOTTED)|FAILURE_(?:DYNAMIC_PATHFINDING_DISABLED|INVALID_(?:GOAL|START)|NO_(?:NAVMESH|VALID_DESTINATION)|OTHER|TARGET_GONE|(?:PARCEL_)?UNREACHABLE)|(?:GOAL|SLOWDOWN_DISTANCE)_REACHED)|TRAVERSAL_TYPE(?:_(?:FAST|NONE|SLOW))?|CONTENT_TYPE_(?:ATOM|FORM|HTML|JSON|LLSD|RSS|TEXT|XHTML|XML)|GCNP_(?:RADIUS|STATIC)|(?:PATROL|WANDER)_PAUSE_AT_WAYPOINTS|OPT_(?:AVATAR|CHARACTER|EXCLUSION_VOLUME|LEGACY_LINKSET|MATERIAL_VOLUME|OTHER|STATIC_OBSTACLE|WALKABLE)|SIM_STAT_PCT_CHARS_STEPPED)\b'
+ lsl_constants_integer_boolean = r'\b(?:FALSE|TRUE)\b'
+ lsl_constants_rotation = r'\b(?:ZERO_ROTATION)\b'
+ lsl_constants_string = r'\b(?:EOF|JSON_(?:ARRAY|DELETE|FALSE|INVALID|NULL|NUMBER|OBJECT|STRING|TRUE)|NULL_KEY|TEXTURE_(?:BLANK|DEFAULT|MEDIA|PLYWOOD|TRANSPARENT)|URL_REQUEST_(?:GRANTED|DENIED))\b'
+ lsl_constants_vector = r'\b(?:TOUCH_INVALID_(?:TEXCOORD|VECTOR)|ZERO_VECTOR)\b'
+ lsl_invalid_broken = r'\b(?:LAND_(?:LARGE|MEDIUM|SMALL)_BRUSH)\b'
+ lsl_invalid_deprecated = r'\b(?:ATTACH_[LR]PEC|DATA_RATING|OBJECT_ATTACHMENT_(?:GEOMETRY_BYTES|SURFACE_AREA)|PRIM_(?:CAST_SHADOWS|MATERIAL_LIGHT|TYPE_LEGACY)|PSYS_SRC_(?:INNER|OUTER)ANGLE|VEHICLE_FLAG_NO_FLY_UP|ll(?:Cloud|Make(?:Explosion|Fountain|Smoke|Fire)|RemoteDataSetRegion|Sound(?:Preload)?|XorBase64Strings(?:Correct)?))\b'
+ lsl_invalid_illegal = r'\b(?:event)\b'
+ lsl_invalid_unimplemented = r'\b(?:CHARACTER_(?:MAX_ANGULAR_(?:ACCEL|SPEED)|TURN_SPEED_MULTIPLIER)|PERMISSION_(?:CHANGE_(?:JOINTS|PERMISSIONS)|RELEASE_OWNERSHIP|REMAP_CONTROLS)|PRIM_PHYSICS_MATERIAL|PSYS_SRC_OBJ_REL_MASK|ll(?:CollisionSprite|(?:Stop)?PointAt|(?:(?:Refresh|Set)Prim)URL|(?:Take|Release)Camera|RemoteLoadScript))\b'
+ lsl_reserved_godmode = r'\b(?:ll(?:GodLikeRezObject|Set(?:Inventory|Object)PermMask))\b'
+ lsl_reserved_log = r'\b(?:print)\b'
+ lsl_operators = r'\+\+|\-\-|<<|>>|&&?|\|\|?|\^|~|[!%<>=*+\-\/]=?'
+
+ tokens = {
+ 'root':
+ [
+ (r'//.*?\n', Comment.Single),
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'"', String.Double, 'string'),
+ (lsl_keywords, Keyword),
+ (lsl_types, Keyword.Type),
+ (lsl_states, Name.Class),
+ (lsl_events, Name.Builtin),
+ (lsl_functions_builtin, Name.Function),
+ (lsl_constants_float, Keyword.Constant),
+ (lsl_constants_integer, Keyword.Constant),
+ (lsl_constants_integer_boolean, Keyword.Constant),
+ (lsl_constants_rotation, Keyword.Constant),
+ (lsl_constants_string, Keyword.Constant),
+ (lsl_constants_vector, Keyword.Constant),
+ (lsl_invalid_broken, Error),
+ (lsl_invalid_deprecated, Error),
+ (lsl_invalid_illegal, Error),
+ (lsl_invalid_unimplemented, Error),
+ (lsl_reserved_godmode, Keyword.Reserved),
+ (lsl_reserved_log, Keyword.Reserved),
+ (r'\b([a-zA-Z_]\w*)\b', Name.Variable),
+ (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d*', Number.Float),
+ (r'(\d+\.\d*|\.\d+)', Number.Float),
+ (r'0[xX][0-9a-fA-F]+', Number.Hex),
+ (r'\d+', Number.Integer),
+ (lsl_operators, Operator),
+ (r':=?', Error),
+ (r'[,;{}\(\)\[\]]', Punctuation),
+ (r'\n+', Whitespace),
+ (r'\s+', Whitespace)
+ ],
+ 'comment':
+ [
+ (r'[^*/]+', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline)
+ ],
+ 'string':
+ [
+ (r'\\([nt"\\])', String.Escape),
+ (r'"', String.Double, '#pop'),
+ (r'\\.', Error),
+ (r'[^"\\]+', String.Double),
+ ]
+ }
+
+
+class AppleScriptLexer(RegexLexer):
+ """
+ For `AppleScript source code
+ <http://developer.apple.com/documentation/AppleScript/
+ Conceptual/AppleScriptLangGuide>`_,
+ including `AppleScript Studio
+ <http://developer.apple.com/documentation/AppleScript/
+ Reference/StudioReference>`_.
+ Contributed by Andreas Amann <aamann@mac.com>.
+
+ .. versionadded:: 1.0
+ """
+
+ name = 'AppleScript'
+ aliases = ['applescript']
+ filenames = ['*.applescript']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ Identifiers = r'[a-zA-Z]\w*'
+
+ # XXX: use words() for all of these
+ Literals = ('AppleScript', 'current application', 'false', 'linefeed',
+ 'missing value', 'pi', 'quote', 'result', 'return', 'space',
+ 'tab', 'text item delimiters', 'true', 'version')
+ Classes = ('alias ', 'application ', 'boolean ', 'class ', 'constant ',
+ 'date ', 'file ', 'integer ', 'list ', 'number ', 'POSIX file ',
+ 'real ', 'record ', 'reference ', 'RGB color ', 'script ',
+ 'text ', 'unit types', '(?:Unicode )?text', 'string')
+ BuiltIn = ('attachment', 'attribute run', 'character', 'day', 'month',
+ 'paragraph', 'word', 'year')
+ HandlerParams = ('about', 'above', 'against', 'apart from', 'around',
+ 'aside from', 'at', 'below', 'beneath', 'beside',
+ 'between', 'for', 'given', 'instead of', 'on', 'onto',
+ 'out of', 'over', 'since')
+ Commands = ('ASCII (character|number)', 'activate', 'beep', 'choose URL',
+ 'choose application', 'choose color', 'choose file( name)?',
+ 'choose folder', 'choose from list',
+ 'choose remote application', 'clipboard info',
+ 'close( access)?', 'copy', 'count', 'current date', 'delay',
+ 'delete', 'display (alert|dialog)', 'do shell script',
+ 'duplicate', 'exists', 'get eof', 'get volume settings',
+ 'info for', 'launch', 'list (disks|folder)', 'load script',
+ 'log', 'make', 'mount volume', 'new', 'offset',
+ 'open( (for access|location))?', 'path to', 'print', 'quit',
+ 'random number', 'read', 'round', 'run( script)?',
+ 'say', 'scripting components',
+ 'set (eof|the clipboard to|volume)', 'store script',
+ 'summarize', 'system attribute', 'system info',
+ 'the clipboard', 'time to GMT', 'write', 'quoted form')
+ References = ('(in )?back of', '(in )?front of', '[0-9]+(st|nd|rd|th)',
+ 'first', 'second', 'third', 'fourth', 'fifth', 'sixth',
+ 'seventh', 'eighth', 'ninth', 'tenth', 'after', 'back',
+ 'before', 'behind', 'every', 'front', 'index', 'last',
+ 'middle', 'some', 'that', 'through', 'thru', 'where', 'whose')
+ Operators = ("and", "or", "is equal", "equals", "(is )?equal to", "is not",
+ "isn't", "isn't equal( to)?", "is not equal( to)?",
+ "doesn't equal", "does not equal", "(is )?greater than",
+ "comes after", "is not less than or equal( to)?",
+ "isn't less than or equal( to)?", "(is )?less than",
+ "comes before", "is not greater than or equal( to)?",
+ "isn't greater than or equal( to)?",
+ "(is )?greater than or equal( to)?", "is not less than",
+ "isn't less than", "does not come before",
+ "doesn't come before", "(is )?less than or equal( to)?",
+ "is not greater than", "isn't greater than",
+ "does not come after", "doesn't come after", "starts? with",
+ "begins? with", "ends? with", "contains?", "does not contain",
+ "doesn't contain", "is in", "is contained by", "is not in",
+ "is not contained by", "isn't contained by", "div", "mod",
+ "not", "(a )?(ref( to)?|reference to)", "is", "does")
+ Control = ('considering', 'else', 'error', 'exit', 'from', 'if',
+ 'ignoring', 'in', 'repeat', 'tell', 'then', 'times', 'to',
+ 'try', 'until', 'using terms from', 'while', 'whith',
+ 'with timeout( of)?', 'with transaction', 'by', 'continue',
+ 'end', 'its?', 'me', 'my', 'return', 'of', 'as')
+ Declarations = ('global', 'local', 'prop(erty)?', 'set', 'get')
+ Reserved = ('but', 'put', 'returning', 'the')
+ StudioClasses = ('action cell', 'alert reply', 'application', 'box',
+ 'browser( cell)?', 'bundle', 'button( cell)?', 'cell',
+ 'clip view', 'color well', 'color-panel',
+ 'combo box( item)?', 'control',
+ 'data( (cell|column|item|row|source))?', 'default entry',
+ 'dialog reply', 'document', 'drag info', 'drawer',
+ 'event', 'font(-panel)?', 'formatter',
+ 'image( (cell|view))?', 'matrix', 'menu( item)?', 'item',
+ 'movie( view)?', 'open-panel', 'outline view', 'panel',
+ 'pasteboard', 'plugin', 'popup button',
+ 'progress indicator', 'responder', 'save-panel',
+ 'scroll view', 'secure text field( cell)?', 'slider',
+ 'sound', 'split view', 'stepper', 'tab view( item)?',
+ 'table( (column|header cell|header view|view))',
+ 'text( (field( cell)?|view))?', 'toolbar( item)?',
+ 'user-defaults', 'view', 'window')
+ StudioEvents = ('accept outline drop', 'accept table drop', 'action',
+ 'activated', 'alert ended', 'awake from nib', 'became key',
+ 'became main', 'begin editing', 'bounds changed',
+ 'cell value', 'cell value changed', 'change cell value',
+ 'change item value', 'changed', 'child of item',
+ 'choose menu item', 'clicked', 'clicked toolbar item',
+ 'closed', 'column clicked', 'column moved',
+ 'column resized', 'conclude drop', 'data representation',
+ 'deminiaturized', 'dialog ended', 'document nib name',
+ 'double clicked', 'drag( (entered|exited|updated))?',
+ 'drop', 'end editing', 'exposed', 'idle', 'item expandable',
+ 'item value', 'item value changed', 'items changed',
+ 'keyboard down', 'keyboard up', 'launched',
+ 'load data representation', 'miniaturized', 'mouse down',
+ 'mouse dragged', 'mouse entered', 'mouse exited',
+ 'mouse moved', 'mouse up', 'moved',
+ 'number of browser rows', 'number of items',
+ 'number of rows', 'open untitled', 'opened', 'panel ended',
+ 'parameters updated', 'plugin loaded', 'prepare drop',
+ 'prepare outline drag', 'prepare outline drop',
+ 'prepare table drag', 'prepare table drop',
+ 'read from file', 'resigned active', 'resigned key',
+ 'resigned main', 'resized( sub views)?',
+ 'right mouse down', 'right mouse dragged',
+ 'right mouse up', 'rows changed', 'scroll wheel',
+ 'selected tab view item', 'selection changed',
+ 'selection changing', 'should begin editing',
+ 'should close', 'should collapse item',
+ 'should end editing', 'should expand item',
+ 'should open( untitled)?',
+ 'should quit( after last window closed)?',
+ 'should select column', 'should select item',
+ 'should select row', 'should select tab view item',
+ 'should selection change', 'should zoom', 'shown',
+ 'update menu item', 'update parameters',
+ 'update toolbar item', 'was hidden', 'was miniaturized',
+ 'will become active', 'will close', 'will dismiss',
+ 'will display browser cell', 'will display cell',
+ 'will display item cell', 'will display outline cell',
+ 'will finish launching', 'will hide', 'will miniaturize',
+ 'will move', 'will open', 'will pop up', 'will quit',
+ 'will resign active', 'will resize( sub views)?',
+ 'will select tab view item', 'will show', 'will zoom',
+ 'write to file', 'zoomed')
+ StudioCommands = ('animate', 'append', 'call method', 'center',
+ 'close drawer', 'close panel', 'display',
+ 'display alert', 'display dialog', 'display panel', 'go',
+ 'hide', 'highlight', 'increment', 'item for',
+ 'load image', 'load movie', 'load nib', 'load panel',
+ 'load sound', 'localized string', 'lock focus', 'log',
+ 'open drawer', 'path for', 'pause', 'perform action',
+ 'play', 'register', 'resume', 'scroll', 'select( all)?',
+ 'show', 'size to fit', 'start', 'step back',
+ 'step forward', 'stop', 'synchronize', 'unlock focus',
+ 'update')
+ StudioProperties = ('accepts arrow key', 'action method', 'active',
+ 'alignment', 'allowed identifiers',
+ 'allows branch selection', 'allows column reordering',
+ 'allows column resizing', 'allows column selection',
+ 'allows customization',
+ 'allows editing text attributes',
+ 'allows empty selection', 'allows mixed state',
+ 'allows multiple selection', 'allows reordering',
+ 'allows undo', 'alpha( value)?', 'alternate image',
+ 'alternate increment value', 'alternate title',
+ 'animation delay', 'associated file name',
+ 'associated object', 'auto completes', 'auto display',
+ 'auto enables items', 'auto repeat',
+ 'auto resizes( outline column)?',
+ 'auto save expanded items', 'auto save name',
+ 'auto save table columns', 'auto saves configuration',
+ 'auto scroll', 'auto sizes all columns to fit',
+ 'auto sizes cells', 'background color', 'bezel state',
+ 'bezel style', 'bezeled', 'border rect', 'border type',
+ 'bordered', 'bounds( rotation)?', 'box type',
+ 'button returned', 'button type',
+ 'can choose directories', 'can choose files',
+ 'can draw', 'can hide',
+ 'cell( (background color|size|type))?', 'characters',
+ 'class', 'click count', 'clicked( data)? column',
+ 'clicked data item', 'clicked( data)? row',
+ 'closeable', 'collating', 'color( (mode|panel))',
+ 'command key down', 'configuration',
+ 'content(s| (size|view( margins)?))?', 'context',
+ 'continuous', 'control key down', 'control size',
+ 'control tint', 'control view',
+ 'controller visible', 'coordinate system',
+ 'copies( on scroll)?', 'corner view', 'current cell',
+ 'current column', 'current( field)? editor',
+ 'current( menu)? item', 'current row',
+ 'current tab view item', 'data source',
+ 'default identifiers', 'delta (x|y|z)',
+ 'destination window', 'directory', 'display mode',
+ 'displayed cell', 'document( (edited|rect|view))?',
+ 'double value', 'dragged column', 'dragged distance',
+ 'dragged items', 'draws( cell)? background',
+ 'draws grid', 'dynamically scrolls', 'echos bullets',
+ 'edge', 'editable', 'edited( data)? column',
+ 'edited data item', 'edited( data)? row', 'enabled',
+ 'enclosing scroll view', 'ending page',
+ 'error handling', 'event number', 'event type',
+ 'excluded from windows menu', 'executable path',
+ 'expanded', 'fax number', 'field editor', 'file kind',
+ 'file name', 'file type', 'first responder',
+ 'first visible column', 'flipped', 'floating',
+ 'font( panel)?', 'formatter', 'frameworks path',
+ 'frontmost', 'gave up', 'grid color', 'has data items',
+ 'has horizontal ruler', 'has horizontal scroller',
+ 'has parent data item', 'has resize indicator',
+ 'has shadow', 'has sub menu', 'has vertical ruler',
+ 'has vertical scroller', 'header cell', 'header view',
+ 'hidden', 'hides when deactivated', 'highlights by',
+ 'horizontal line scroll', 'horizontal page scroll',
+ 'horizontal ruler view', 'horizontally resizable',
+ 'icon image', 'id', 'identifier',
+ 'ignores multiple clicks',
+ 'image( (alignment|dims when disabled|frame style|scaling))?',
+ 'imports graphics', 'increment value',
+ 'indentation per level', 'indeterminate', 'index',
+ 'integer value', 'intercell spacing', 'item height',
+ 'key( (code|equivalent( modifier)?|window))?',
+ 'knob thickness', 'label', 'last( visible)? column',
+ 'leading offset', 'leaf', 'level', 'line scroll',
+ 'loaded', 'localized sort', 'location', 'loop mode',
+ 'main( (bunde|menu|window))?', 'marker follows cell',
+ 'matrix mode', 'maximum( content)? size',
+ 'maximum visible columns',
+ 'menu( form representation)?', 'miniaturizable',
+ 'miniaturized', 'minimized image', 'minimized title',
+ 'minimum column width', 'minimum( content)? size',
+ 'modal', 'modified', 'mouse down state',
+ 'movie( (controller|file|rect))?', 'muted', 'name',
+ 'needs display', 'next state', 'next text',
+ 'number of tick marks', 'only tick mark values',
+ 'opaque', 'open panel', 'option key down',
+ 'outline table column', 'page scroll', 'pages across',
+ 'pages down', 'palette label', 'pane splitter',
+ 'parent data item', 'parent window', 'pasteboard',
+ 'path( (names|separator))?', 'playing',
+ 'plays every frame', 'plays selection only', 'position',
+ 'preferred edge', 'preferred type', 'pressure',
+ 'previous text', 'prompt', 'properties',
+ 'prototype cell', 'pulls down', 'rate',
+ 'released when closed', 'repeated',
+ 'requested print time', 'required file type',
+ 'resizable', 'resized column', 'resource path',
+ 'returns records', 'reuses columns', 'rich text',
+ 'roll over', 'row height', 'rulers visible',
+ 'save panel', 'scripts path', 'scrollable',
+ 'selectable( identifiers)?', 'selected cell',
+ 'selected( data)? columns?', 'selected data items?',
+ 'selected( data)? rows?', 'selected item identifier',
+ 'selection by rect', 'send action on arrow key',
+ 'sends action when done editing', 'separates columns',
+ 'separator item', 'sequence number', 'services menu',
+ 'shared frameworks path', 'shared support path',
+ 'sheet', 'shift key down', 'shows alpha',
+ 'shows state by', 'size( mode)?',
+ 'smart insert delete enabled', 'sort case sensitivity',
+ 'sort column', 'sort order', 'sort type',
+ 'sorted( data rows)?', 'sound', 'source( mask)?',
+ 'spell checking enabled', 'starting page', 'state',
+ 'string value', 'sub menu', 'super menu', 'super view',
+ 'tab key traverses cells', 'tab state', 'tab type',
+ 'tab view', 'table view', 'tag', 'target( printer)?',
+ 'text color', 'text container insert',
+ 'text container origin', 'text returned',
+ 'tick mark position', 'time stamp',
+ 'title(d| (cell|font|height|position|rect))?',
+ 'tool tip', 'toolbar', 'trailing offset', 'transparent',
+ 'treat packages as directories', 'truncated labels',
+ 'types', 'unmodified characters', 'update views',
+ 'use sort indicator', 'user defaults',
+ 'uses data source', 'uses ruler',
+ 'uses threaded animation',
+ 'uses title from previous column', 'value wraps',
+ 'version',
+ 'vertical( (line scroll|page scroll|ruler view))?',
+ 'vertically resizable', 'view',
+ 'visible( document rect)?', 'volume', 'width', 'window',
+ 'windows menu', 'wraps', 'zoomable', 'zoomed')
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (u'¬\\n', String.Escape),
+ (r"'s\s+", Text), # This is a possessive, consider moving
+ (r'(--|#).*?$', Comment),
+ (r'\(\*', Comment.Multiline, 'comment'),
+ (r'[\(\){}!,.:]', Punctuation),
+ (u'(«)([^»]+)(»)',
+ bygroups(Text, Name.Builtin, Text)),
+ (r'\b((?:considering|ignoring)\s*)'
+ r'(application responses|case|diacriticals|hyphens|'
+ r'numeric strings|punctuation|white space)',
+ bygroups(Keyword, Name.Builtin)),
+ (u'(-|\\*|\\+|&|≠|>=?|<=?|=|≥|≤|/|÷|\\^)', Operator),
+ (r"\b(%s)\b" % '|'.join(Operators), Operator.Word),
+ (r'^(\s*(?:on|end)\s+)'
+ r'(%s)' % '|'.join(StudioEvents[::-1]),
+ bygroups(Keyword, Name.Function)),
+ (r'^(\s*)(in|on|script|to)(\s+)', bygroups(Text, Keyword, Text)),
+ (r'\b(as )(%s)\b' % '|'.join(Classes),
+ bygroups(Keyword, Name.Class)),
+ (r'\b(%s)\b' % '|'.join(Literals), Name.Constant),
+ (r'\b(%s)\b' % '|'.join(Commands), Name.Builtin),
+ (r'\b(%s)\b' % '|'.join(Control), Keyword),
+ (r'\b(%s)\b' % '|'.join(Declarations), Keyword),
+ (r'\b(%s)\b' % '|'.join(Reserved), Name.Builtin),
+ (r'\b(%s)s?\b' % '|'.join(BuiltIn), Name.Builtin),
+ (r'\b(%s)\b' % '|'.join(HandlerParams), Name.Builtin),
+ (r'\b(%s)\b' % '|'.join(StudioProperties), Name.Attribute),
+ (r'\b(%s)s?\b' % '|'.join(StudioClasses), Name.Builtin),
+ (r'\b(%s)\b' % '|'.join(StudioCommands), Name.Builtin),
+ (r'\b(%s)\b' % '|'.join(References), Name.Builtin),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r'\b(%s)\b' % Identifiers, Name.Variable),
+ (r'[-+]?(\d+\.\d*|\d*\.\d+)(E[-+][0-9]+)?', Number.Float),
+ (r'[-+]?\d+', Number.Integer),
+ ],
+ 'comment': [
+ ('\(\*', Comment.Multiline, '#push'),
+ ('\*\)', Comment.Multiline, '#pop'),
+ ('[^*(]+', Comment.Multiline),
+ ('[*(]', Comment.Multiline),
+ ],
+ }
+
+
+class RexxLexer(RegexLexer):
+ """
+ `Rexx <http://www.rexxinfo.org/>`_ is a scripting language available for
+ a wide range of different platforms with its roots found on mainframe
+ systems. It is popular for I/O- and data based tasks and can act as glue
+ language to bind different applications together.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Rexx'
+ aliases = ['rexx', 'arexx']
+ filenames = ['*.rexx', '*.rex', '*.rx', '*.arexx']
+ mimetypes = ['text/x-rexx']
+ flags = re.IGNORECASE
+
+ tokens = {
+ 'root': [
+ (r'\s', Whitespace),
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'"', String, 'string_double'),
+ (r"'", String, 'string_single'),
+ (r'[0-9]+(\.[0-9]+)?(e[+-]?[0-9])?', Number),
+ (r'([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b',
+ bygroups(Name.Function, Whitespace, Operator, Whitespace,
+ Keyword.Declaration)),
+ (r'([a-z_]\w*)(\s*)(:)',
+ bygroups(Name.Label, Whitespace, Operator)),
+ include('function'),
+ include('keyword'),
+ include('operator'),
+ (r'[a-z_]\w*', Text),
+ ],
+ 'function': [
+ (words((
+ 'abbrev', 'abs', 'address', 'arg', 'b2x', 'bitand', 'bitor', 'bitxor',
+ 'c2d', 'c2x', 'center', 'charin', 'charout', 'chars', 'compare',
+ 'condition', 'copies', 'd2c', 'd2x', 'datatype', 'date', 'delstr',
+ 'delword', 'digits', 'errortext', 'form', 'format', 'fuzz', 'insert',
+ 'lastpos', 'left', 'length', 'linein', 'lineout', 'lines', 'max',
+ 'min', 'overlay', 'pos', 'queued', 'random', 'reverse', 'right', 'sign',
+ 'sourceline', 'space', 'stream', 'strip', 'substr', 'subword', 'symbol',
+ 'time', 'trace', 'translate', 'trunc', 'value', 'verify', 'word',
+ 'wordindex', 'wordlength', 'wordpos', 'words', 'x2b', 'x2c', 'x2d',
+ 'xrange'), suffix=r'(\s*)(\()'),
+ bygroups(Name.Builtin, Whitespace, Operator)),
+ ],
+ 'keyword': [
+ (r'(address|arg|by|call|do|drop|else|end|exit|for|forever|if|'
+ r'interpret|iterate|leave|nop|numeric|off|on|options|parse|'
+ r'pull|push|queue|return|say|select|signal|to|then|trace|until|'
+ r'while)\b', Keyword.Reserved),
+ ],
+ 'operator': [
+ (r'(-|//|/|\(|\)|\*\*|\*|\\<<|\\<|\\==|\\=|\\>>|\\>|\\|\|\||\||'
+ r'&&|&|%|\+|<<=|<<|<=|<>|<|==|=|><|>=|>>=|>>|>|¬<<|¬<|¬==|¬=|'
+ r'¬>>|¬>|¬|\.|,)', Operator),
+ ],
+ 'string_double': [
+ (r'[^"\n]+', String),
+ (r'""', String),
+ (r'"', String, '#pop'),
+ (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
+ ],
+ 'string_single': [
+ (r'[^\'\n]', String),
+ (r'\'\'', String),
+ (r'\'', String, '#pop'),
+ (r'\n', Text, '#pop'), # Stray linefeed also terminates strings.
+ ],
+ 'comment': [
+ (r'[^*]+', Comment.Multiline),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'\*', Comment.Multiline),
+ ]
+ }
+
+ _c = lambda s: re.compile(s, re.MULTILINE)
+ _ADDRESS_COMMAND_PATTERN = _c(r'^\s*address\s+command\b')
+ _ADDRESS_PATTERN = _c(r'^\s*address\s+')
+ _DO_WHILE_PATTERN = _c(r'^\s*do\s+while\b')
+ _IF_THEN_DO_PATTERN = _c(r'^\s*if\b.+\bthen\s+do\s*$')
+ _PROCEDURE_PATTERN = _c(r'^\s*([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b')
+ _ELSE_DO_PATTERN = _c(r'\belse\s+do\s*$')
+ _PARSE_ARG_PATTERN = _c(r'^\s*parse\s+(upper\s+)?(arg|value)\b')
+ PATTERNS_AND_WEIGHTS = (
+ (_ADDRESS_COMMAND_PATTERN, 0.2),
+ (_ADDRESS_PATTERN, 0.05),
+ (_DO_WHILE_PATTERN, 0.1),
+ (_ELSE_DO_PATTERN, 0.1),
+ (_IF_THEN_DO_PATTERN, 0.1),
+ (_PROCEDURE_PATTERN, 0.5),
+ (_PARSE_ARG_PATTERN, 0.2),
+ )
+
+ def analyse_text(text):
+ """
+ Check for inital comment and patterns that distinguish Rexx from other
+ C-like languages.
+ """
+ if re.search(r'/\*\**\s*rexx', text, re.IGNORECASE):
+ # Header matches MVS Rexx requirements, this is certainly a Rexx
+ # script.
+ return 1.0
+ elif text.startswith('/*'):
+ # Header matches general Rexx requirements; the source code might
+ # still be any language using C comments such as C++, C# or Java.
+ lowerText = text.lower()
+ result = sum(weight
+ for (pattern, weight) in RexxLexer.PATTERNS_AND_WEIGHTS
+ if pattern.search(lowerText)) + 0.01
+ return min(result, 1.0)
+
+
+class MOOCodeLexer(RegexLexer):
+ """
+ For `MOOCode <http://www.moo.mud.org/>`_ (the MOO scripting
+ language).
+
+ .. versionadded:: 0.9
+ """
+ name = 'MOOCode'
+ filenames = ['*.moo']
+ aliases = ['moocode', 'moo']
+ mimetypes = ['text/x-moocode']
+
+ tokens = {
+ 'root': [
+ # Numbers
+ (r'(0|[1-9][0-9_]*)', Number.Integer),
+ # Strings
+ (r'"(\\\\|\\"|[^"])*"', String),
+ # exceptions
+ (r'(E_PERM|E_DIV)', Name.Exception),
+ # db-refs
+ (r'((#[-0-9]+)|(\$[a-z_A-Z0-9]+))', Name.Entity),
+ # Keywords
+ (r'\b(if|else|elseif|endif|for|endfor|fork|endfork|while'
+ r'|endwhile|break|continue|return|try'
+ r'|except|endtry|finally|in)\b', Keyword),
+ # builtins
+ (r'(random|length)', Name.Builtin),
+ # special variables
+ (r'(player|caller|this|args)', Name.Variable.Instance),
+ # skip whitespace
+ (r'\s+', Text),
+ (r'\n', Text),
+ # other operators
+ (r'([!;=,{}&\|:\.\[\]@\(\)\<\>\?]+)', Operator),
+ # function call
+ (r'([a-z_A-Z0-9]+)(\()', bygroups(Name.Function, Operator)),
+ # variables
+ (r'([a-zA-Z_0-9]+)', Text),
+ ]
+ }
+
+
+class HybrisLexer(RegexLexer):
+ """
+ For `Hybris <http://www.hybris-lang.org>`_ source code.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'Hybris'
+ aliases = ['hybris', 'hy']
+ filenames = ['*.hy', '*.hyb']
+ mimetypes = ['text/x-hybris', 'application/x-hybris']
+
+ flags = re.MULTILINE | re.DOTALL
+
+ tokens = {
+ 'root': [
+ # method names
+ (r'^(\s*(?:function|method|operator\s+)+?)'
+ r'([a-zA-Z_]\w*)'
+ r'(\s*)(\()', bygroups(Keyword, Name.Function, Text, Operator)),
+ (r'[^\S\n]+', Text),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline),
+ (r'@[a-zA-Z_][\w\.]*', Name.Decorator),
+ (r'(break|case|catch|next|default|do|else|finally|for|foreach|of|'
+ r'unless|if|new|return|switch|me|throw|try|while)\b', Keyword),
+ (r'(extends|private|protected|public|static|throws|function|method|'
+ r'operator)\b', Keyword.Declaration),
+ (r'(true|false|null|__FILE__|__LINE__|__VERSION__|__LIB_PATH__|'
+ r'__INC_PATH__)\b', Keyword.Constant),
+ (r'(class|struct)(\s+)',
+ bygroups(Keyword.Declaration, Text), 'class'),
+ (r'(import|include)(\s+)',
+ bygroups(Keyword.Namespace, Text), 'import'),
+ (words((
+ 'gc_collect', 'gc_mm_items', 'gc_mm_usage', 'gc_collect_threshold',
+ 'urlencode', 'urldecode', 'base64encode', 'base64decode', 'sha1', 'crc32', 'sha2',
+ 'md5', 'md5_file', 'acos', 'asin', 'atan', 'atan2', 'ceil', 'cos', 'cosh', 'exp',
+ 'fabs', 'floor', 'fmod', 'log', 'log10', 'pow', 'sin', 'sinh', 'sqrt', 'tan', 'tanh',
+ 'isint', 'isfloat', 'ischar', 'isstring', 'isarray', 'ismap', 'isalias', 'typeof',
+ 'sizeof', 'toint', 'tostring', 'fromxml', 'toxml', 'binary', 'pack', 'load', 'eval',
+ 'var_names', 'var_values', 'user_functions', 'dyn_functions', 'methods', 'call',
+ 'call_method', 'mknod', 'mkfifo', 'mount', 'umount2', 'umount', 'ticks', 'usleep',
+ 'sleep', 'time', 'strtime', 'strdate', 'dllopen', 'dlllink', 'dllcall', 'dllcall_argv',
+ 'dllclose', 'env', 'exec', 'fork', 'getpid', 'wait', 'popen', 'pclose', 'exit', 'kill',
+ 'pthread_create', 'pthread_create_argv', 'pthread_exit', 'pthread_join', 'pthread_kill',
+ 'smtp_send', 'http_get', 'http_post', 'http_download', 'socket', 'bind', 'listen',
+ 'accept', 'getsockname', 'getpeername', 'settimeout', 'connect', 'server', 'recv',
+ 'send', 'close', 'print', 'println', 'printf', 'input', 'readline', 'serial_open',
+ 'serial_fcntl', 'serial_get_attr', 'serial_get_ispeed', 'serial_get_ospeed',
+ 'serial_set_attr', 'serial_set_ispeed', 'serial_set_ospeed', 'serial_write',
+ 'serial_read', 'serial_close', 'xml_load', 'xml_parse', 'fopen', 'fseek', 'ftell',
+ 'fsize', 'fread', 'fwrite', 'fgets', 'fclose', 'file', 'readdir', 'pcre_replace', 'size',
+ 'pop', 'unmap', 'has', 'keys', 'values', 'length', 'find', 'substr', 'replace', 'split',
+ 'trim', 'remove', 'contains', 'join'), suffix=r'\b'),
+ Name.Builtin),
+ (words((
+ 'MethodReference', 'Runner', 'Dll', 'Thread', 'Pipe', 'Process',
+ 'Runnable', 'CGI', 'ClientSocket', 'Socket', 'ServerSocket',
+ 'File', 'Console', 'Directory', 'Exception'), suffix=r'\b'),
+ Keyword.Type),
+ (r'"(\\\\|\\"|[^"])*"', String),
+ (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
+ (r'(\.)([a-zA-Z_]\w*)',
+ bygroups(Operator, Name.Attribute)),
+ (r'[a-zA-Z_]\w*:', Name.Label),
+ (r'[a-zA-Z_\$]\w*', Name),
+ (r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?\-@]+', Operator),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-f]+', Number.Hex),
+ (r'[0-9]+L?', Number.Integer),
+ (r'\n', Text),
+ ],
+ 'class': [
+ (r'[a-zA-Z_]\w*', Name.Class, '#pop')
+ ],
+ 'import': [
+ (r'[\w.]+\*?', Name.Namespace, '#pop')
+ ],
+ }
diff --git a/pygments/lexers/shell.py b/pygments/lexers/shell.py
index df9b56f4..b4d81446 100644
--- a/pygments/lexers/shell.py
+++ b/pygments/lexers/shell.py
@@ -31,7 +31,7 @@ class BashLexer(RegexLexer):
"""
name = 'Bash'
- aliases = ['bash', 'sh', 'ksh']
+ aliases = ['bash', 'sh', 'ksh', 'shell']
filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass',
'.bashrc', 'bashrc', '.bash_*', 'bash_*', 'PKGBUILD']
mimetypes = ['application/x-sh', 'application/x-shellscript']
@@ -39,11 +39,15 @@ class BashLexer(RegexLexer):
tokens = {
'root': [
include('basic'),
- (r'\$\(\(', Keyword, 'math'),
- (r'\$\(', Keyword, 'paren'),
- (r'\${#?', Keyword, 'curly'),
(r'`', String.Backtick, 'backticks'),
include('data'),
+ include('interp'),
+ ],
+ 'interp': [
+ (r'\$\(\(', Keyword, 'math'),
+ (r'\$\(', Keyword, 'paren'),
+ (r'\${#?', String.Interpol, 'curly'),
+ (r'\$#?(\w+|.)', Name.Variable),
],
'basic': [
(r'\b(if|fi|else|while|do|done|for|then|return|function|case|'
@@ -65,22 +69,28 @@ class BashLexer(RegexLexer):
(r'&&|\|\|', Operator),
],
'data': [
- (r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
- (r"(?s)\$?'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ (r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"', String.Double),
+ (r'"', String.Double, 'string'),
+ (r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
+ (r"(?s)'.*?'", String.Single),
(r';', Punctuation),
(r'&', Punctuation),
(r'\|', Punctuation),
(r'\s+', Text),
(r'\d+(?= |\Z)', Number),
(r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
- (r'\$#?(\w+|.)', Name.Variable),
(r'<', Text),
],
+ 'string': [
+ (r'"', String.Double, '#pop'),
+ (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double),
+ include('interp'),
+ ],
'curly': [
- (r'}', Keyword, '#pop'),
+ (r'}', String.Interpol, '#pop'),
(r':-', Keyword),
(r'\w+', Name.Variable),
- (r'[^}:"\'`$]+', Punctuation),
+ (r'[^}:"\'`$\\]+', Punctuation),
(r':', Punctuation),
include('root'),
],
diff --git a/pygments/lexers/smalltalk.py b/pygments/lexers/smalltalk.py
new file mode 100644
index 00000000..4e78ac07
--- /dev/null
+++ b/pygments/lexers/smalltalk.py
@@ -0,0 +1,195 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.smalltalk
+ ~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Smalltalk and related languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, bygroups, default
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['SmalltalkLexer', 'NewspeakLexer']
+
+
+class SmalltalkLexer(RegexLexer):
+ """
+ For `Smalltalk <http://www.smalltalk.org/>`_ syntax.
+ Contributed by Stefan Matthias Aust.
+ Rewritten by Nils Winter.
+
+ .. versionadded:: 0.10
+ """
+ name = 'Smalltalk'
+ filenames = ['*.st']
+ aliases = ['smalltalk', 'squeak', 'st']
+ mimetypes = ['text/x-smalltalk']
+
+ tokens = {
+ 'root': [
+ (r'(<)(\w+:)(.*?)(>)', bygroups(Text, Keyword, Text, Text)),
+ include('squeak fileout'),
+ include('whitespaces'),
+ include('method definition'),
+ (r'(\|)([\w\s]*)(\|)', bygroups(Operator, Name.Variable, Operator)),
+ include('objects'),
+ (r'\^|\:=|\_', Operator),
+ # temporaries
+ (r'[\]({}.;!]', Text),
+ ],
+ 'method definition': [
+ # Not perfect can't allow whitespaces at the beginning and the
+ # without breaking everything
+ (r'([a-zA-Z]+\w*:)(\s*)(\w+)',
+ bygroups(Name.Function, Text, Name.Variable)),
+ (r'^(\b[a-zA-Z]+\w*\b)(\s*)$', bygroups(Name.Function, Text)),
+ (r'^([-+*/\\~<>=|&!?,@%]+)(\s*)(\w+)(\s*)$',
+ bygroups(Name.Function, Text, Name.Variable, Text)),
+ ],
+ 'blockvariables': [
+ include('whitespaces'),
+ (r'(:)(\s*)(\w+)',
+ bygroups(Operator, Text, Name.Variable)),
+ (r'\|', Operator, '#pop'),
+ default('#pop'), # else pop
+ ],
+ 'literals': [
+ (r"'(''|[^'])*'", String, 'afterobject'),
+ (r'\$.', String.Char, 'afterobject'),
+ (r'#\(', String.Symbol, 'parenth'),
+ (r'\)', Text, 'afterobject'),
+ (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number, 'afterobject'),
+ ],
+ '_parenth_helper': [
+ include('whitespaces'),
+ (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number),
+ (r'[-+*/\\~<>=|&#!?,@%\w:]+', String.Symbol),
+ # literals
+ (r"'(''|[^'])*'", String),
+ (r'\$.', String.Char),
+ (r'#*\(', String.Symbol, 'inner_parenth'),
+ ],
+ 'parenth': [
+ # This state is a bit tricky since
+ # we can't just pop this state
+ (r'\)', String.Symbol, ('root', 'afterobject')),
+ include('_parenth_helper'),
+ ],
+ 'inner_parenth': [
+ (r'\)', String.Symbol, '#pop'),
+ include('_parenth_helper'),
+ ],
+ 'whitespaces': [
+ # skip whitespace and comments
+ (r'\s+', Text),
+ (r'"(""|[^"])*"', Comment),
+ ],
+ 'objects': [
+ (r'\[', Text, 'blockvariables'),
+ (r'\]', Text, 'afterobject'),
+ (r'\b(self|super|true|false|nil|thisContext)\b',
+ Name.Builtin.Pseudo, 'afterobject'),
+ (r'\b[A-Z]\w*(?!:)\b', Name.Class, 'afterobject'),
+ (r'\b[a-z]\w*(?!:)\b', Name.Variable, 'afterobject'),
+ (r'#("(""|[^"])*"|[-+*/\\~<>=|&!?,@%]+|[\w:]+)',
+ String.Symbol, 'afterobject'),
+ include('literals'),
+ ],
+ 'afterobject': [
+ (r'! !$', Keyword, '#pop'), # squeak chunk delimiter
+ include('whitespaces'),
+ (r'\b(ifTrue:|ifFalse:|whileTrue:|whileFalse:|timesRepeat:)',
+ Name.Builtin, '#pop'),
+ (r'\b(new\b(?!:))', Name.Builtin),
+ (r'\:=|\_', Operator, '#pop'),
+ (r'\b[a-zA-Z]+\w*:', Name.Function, '#pop'),
+ (r'\b[a-zA-Z]+\w*', Name.Function),
+ (r'\w+:?|[-+*/\\~<>=|&!?,@%]+', Name.Function, '#pop'),
+ (r'\.', Punctuation, '#pop'),
+ (r';', Punctuation),
+ (r'[\])}]', Text),
+ (r'[\[({]', Text, '#pop'),
+ ],
+ 'squeak fileout': [
+ # Squeak fileout format (optional)
+ (r'^"(""|[^"])*"!', Keyword),
+ (r"^'(''|[^'])*'!", Keyword),
+ (r'^(!)(\w+)( commentStamp: )(.*?)( prior: .*?!\n)(.*?)(!)',
+ bygroups(Keyword, Name.Class, Keyword, String, Keyword, Text, Keyword)),
+ (r"^(!)(\w+(?: class)?)( methodsFor: )('(?:''|[^'])*')(.*?!)",
+ bygroups(Keyword, Name.Class, Keyword, String, Keyword)),
+ (r'^(\w+)( subclass: )(#\w+)'
+ r'(\s+instanceVariableNames: )(.*?)'
+ r'(\s+classVariableNames: )(.*?)'
+ r'(\s+poolDictionaries: )(.*?)'
+ r'(\s+category: )(.*?)(!)',
+ bygroups(Name.Class, Keyword, String.Symbol, Keyword, String, Keyword,
+ String, Keyword, String, Keyword, String, Keyword)),
+ (r'^(\w+(?: class)?)(\s+instanceVariableNames: )(.*?)(!)',
+ bygroups(Name.Class, Keyword, String, Keyword)),
+ (r'(!\n)(\].*)(! !)$', bygroups(Keyword, Text, Keyword)),
+ (r'! !$', Keyword),
+ ],
+ }
+
+
+class NewspeakLexer(RegexLexer):
+ """
+ For `Newspeak <http://newspeaklanguage.org/>` syntax.
+
+ .. versionadded:: 1.1
+ """
+ name = 'Newspeak'
+ filenames = ['*.ns2']
+ aliases = ['newspeak', ]
+ mimetypes = ['text/x-newspeak']
+
+ tokens = {
+ 'root': [
+ (r'\b(Newsqueak2)\b', Keyword.Declaration),
+ (r"'[^']*'", String),
+ (r'\b(class)(\s+)(\w+)(\s*)',
+ bygroups(Keyword.Declaration, Text, Name.Class, Text)),
+ (r'\b(mixin|self|super|private|public|protected|nil|true|false)\b',
+ Keyword),
+ (r'(\w+\:)(\s*)([a-zA-Z_]\w+)',
+ bygroups(Name.Function, Text, Name.Variable)),
+ (r'(\w+)(\s*)(=)',
+ bygroups(Name.Attribute, Text, Operator)),
+ (r'<\w+>', Comment.Special),
+ include('expressionstat'),
+ include('whitespace')
+ ],
+
+ 'expressionstat': [
+ (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
+ (r'\d+', Number.Integer),
+ (r':\w+', Name.Variable),
+ (r'(\w+)(::)', bygroups(Name.Variable, Operator)),
+ (r'\w+:', Name.Function),
+ (r'\w+', Name.Variable),
+ (r'\(|\)', Punctuation),
+ (r'\[|\]', Punctuation),
+ (r'\{|\}', Punctuation),
+
+ (r'(\^|\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-|:)', Operator),
+ (r'\.|;', Punctuation),
+ include('whitespace'),
+ include('literals'),
+ ],
+ 'literals': [
+ (r'\$.', String),
+ (r"'[^']*'", String),
+ (r"#'[^']*'", String.Symbol),
+ (r"#\w+:?", String.Symbol),
+ (r"#(\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-)+", String.Symbol)
+ ],
+ 'whitespace': [
+ (r'\s+', Text),
+ (r'"[^"]*"', Comment)
+ ],
+ }
diff --git a/pygments/lexers/snobol.py b/pygments/lexers/snobol.py
new file mode 100644
index 00000000..97f614bd
--- /dev/null
+++ b/pygments/lexers/snobol.py
@@ -0,0 +1,83 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.snobol
+ ~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for the SNOBOL language.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['SnobolLexer']
+
+
+class SnobolLexer(RegexLexer):
+ """
+ Lexer for the SNOBOL4 programming language.
+
+ Recognizes the common ASCII equivalents of the original SNOBOL4 operators.
+ Does not require spaces around binary operators.
+
+ .. versionadded:: 1.5
+ """
+
+ name = "Snobol"
+ aliases = ["snobol"]
+ filenames = ['*.snobol']
+ mimetypes = ['text/x-snobol']
+
+ tokens = {
+ # root state, start of line
+ # comments, continuation lines, and directives start in column 1
+ # as do labels
+ 'root': [
+ (r'\*.*\n', Comment),
+ (r'[\+\.] ', Punctuation, 'statement'),
+ (r'-.*\n', Comment),
+ (r'END\s*\n', Name.Label, 'heredoc'),
+ (r'[A-Za-z\$][\w$]*', Name.Label, 'statement'),
+ (r'\s+', Text, 'statement'),
+ ],
+ # statement state, line after continuation or label
+ 'statement': [
+ (r'\s*\n', Text, '#pop'),
+ (r'\s+', Text),
+ (r'(?<=[^\w.])(LT|LE|EQ|NE|GE|GT|INTEGER|IDENT|DIFFER|LGT|SIZE|'
+ r'REPLACE|TRIM|DUPL|REMDR|DATE|TIME|EVAL|APPLY|OPSYN|LOAD|UNLOAD|'
+ r'LEN|SPAN|BREAK|ANY|NOTANY|TAB|RTAB|REM|POS|RPOS|FAIL|FENCE|'
+ r'ABORT|ARB|ARBNO|BAL|SUCCEED|INPUT|OUTPUT|TERMINAL)(?=[^\w.])',
+ Name.Builtin),
+ (r'[A-Za-z][\w\.]*', Name),
+ # ASCII equivalents of original operators
+ # | for the EBCDIC equivalent, ! likewise
+ # \ for EBCDIC negation
+ (r'\*\*|[\?\$\.!%\*/#+\-@\|&\\=]', Operator),
+ (r'"[^"]*"', String),
+ (r"'[^']*'", String),
+ # Accept SPITBOL syntax for real numbers
+ # as well as Macro SNOBOL4
+ (r'[0-9]+(?=[^\.EeDd])', Number.Integer),
+ (r'[0-9]+(\.[0-9]*)?([EDed][-+]?[0-9]+)?', Number.Float),
+ # Goto
+ (r':', Punctuation, 'goto'),
+ (r'[\(\)<>,;]', Punctuation),
+ ],
+ # Goto block
+ 'goto': [
+ (r'\s*\n', Text, "#pop:2"),
+ (r'\s+', Text),
+ (r'F|S', Keyword),
+ (r'(\()([A-Za-z][\w.]*)(\))',
+ bygroups(Punctuation, Name.Label, Punctuation))
+ ],
+ # everything after the END statement is basically one
+ # big heredoc.
+ 'heredoc': [
+ (r'.*\n', String.Heredoc)
+ ]
+ }
diff --git a/pygments/lexers/sql.py b/pygments/lexers/sql.py
index 7540f079..91e2237c 100644
--- a/pygments/lexers/sql.py
+++ b/pygments/lexers/sql.py
@@ -40,14 +40,14 @@
import re
-from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups
+from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, words
from pygments.token import Punctuation, \
Text, Comment, Operator, Keyword, Name, String, Number, Generic
from pygments.lexers import get_lexer_by_name, ClassNotFound
from pygments.util import iteritems
from pygments.lexers._postgres_builtins import KEYWORDS, DATATYPES, \
- PSEUDO_TYPES, PLPGSQL_KEYWORDS
+ PSEUDO_TYPES, PLPGSQL_KEYWORDS
__all__ = ['PostgresLexer', 'PlPgsqlLexer', 'PostgresConsoleLexer',
@@ -57,6 +57,7 @@ line_re = re.compile('.*?\n')
language_re = re.compile(r"\s+LANGUAGE\s+'?(\w+)'?", re.IGNORECASE)
+
def language_callback(lexer, match):
"""Parse the content of a $-string using a lexer
@@ -102,7 +103,7 @@ class PostgresBase(object):
if lang.lower() == 'sql':
return get_lexer_by_name('postgresql', **self.options)
- tries = [ lang ]
+ tries = [lang]
if lang.startswith('pl'):
tries.append(lang[2:])
if lang.endswith('u'):
@@ -138,17 +139,17 @@ class PostgresLexer(PostgresBase, RegexLexer):
(r'\s+', Text),
(r'--.*?\n', Comment.Single),
(r'/\*', Comment.Multiline, 'multiline-comments'),
- (r'(' + '|'.join([s.replace(" ", "\s+")
- for s in DATATYPES + PSEUDO_TYPES])
- + r')\b', Name.Builtin),
- (r'(' + '|'.join(KEYWORDS) + r')\b', Keyword),
+ (r'(' + '|'.join(s.replace(" ", "\s+")
+ for s in DATATYPES + PSEUDO_TYPES)
+ + r')\b', Name.Builtin),
+ (words(KEYWORDS, suffix=r'\b'), Keyword),
(r'[+*/<>=~!@#%^&|`?-]+', Operator),
(r'::', Operator), # cast
(r'\$\d+', Name.Variable),
(r'([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?', Number.Float),
(r'[0-9]+', Number.Integer),
(r"(E|U&)?'(''|[^'])*'", String.Single),
- (r'(U&)?"(""|[^"])*"', String.Name), # quoted identifier
+ (r'(U&)?"(""|[^"])*"', String.Name), # quoted identifier
(r'(?s)(\$[^\$]*\$)(.*?)(\1)', language_callback),
(r'[a-z_]\w*', Name),
@@ -183,7 +184,7 @@ class PlPgsqlLexer(PostgresBase, RegexLexer):
for i, pattern in enumerate(tokens['root']):
if pattern[1] == Keyword:
tokens['root'][i] = (
- r'(' + '|'.join(KEYWORDS + PLPGSQL_KEYWORDS) + r')\b',
+ words(KEYWORDS + PLPGSQL_KEYWORDS, suffix=r'\b'),
Keyword)
del i
break
@@ -240,11 +241,14 @@ class lookahead(object):
def __init__(self, x):
self.iter = iter(x)
self._nextitem = None
+
def __iter__(self):
return self
+
def send(self, i):
self._nextitem = i
return i
+
def __next__(self):
if self._nextitem is not None:
ni = self._nextitem
@@ -305,12 +309,12 @@ class PostgresConsoleLexer(Lexer):
# TODO: better handle multiline comments at the end with
# a lexer with an external state?
if re_psql_command.match(curcode) \
- or re_end_command.search(curcode):
+ or re_end_command.search(curcode):
break
# Emit the combined stream of command and prompt(s)
for item in do_insertions(insertions,
- sql.get_tokens_unprocessed(curcode)):
+ sql.get_tokens_unprocessed(curcode)):
yield item
# Emit the output lines
@@ -326,7 +330,7 @@ class PostgresConsoleLexer(Lexer):
mmsg = re_message.match(line)
if mmsg is not None:
if mmsg.group(1).startswith("ERROR") \
- or mmsg.group(1).startswith("FATAL"):
+ or mmsg.group(1).startswith("FATAL"):
out_token = Generic.Error
yield (mmsg.start(1), Generic.Strong, mmsg.group(1))
yield (mmsg.start(2), out_token, mmsg.group(2))
@@ -351,91 +355,94 @@ class SqlLexer(RegexLexer):
(r'\s+', Text),
(r'--.*?\n', Comment.Single),
(r'/\*', Comment.Multiline, 'multiline-comments'),
- (r'(ABORT|ABS|ABSOLUTE|ACCESS|ADA|ADD|ADMIN|AFTER|AGGREGATE|'
- r'ALIAS|ALL|ALLOCATE|ALTER|ANALYSE|ANALYZE|AND|ANY|ARE|AS|'
- r'ASC|ASENSITIVE|ASSERTION|ASSIGNMENT|ASYMMETRIC|AT|ATOMIC|'
- r'AUTHORIZATION|AVG|BACKWARD|BEFORE|BEGIN|BETWEEN|BITVAR|'
- r'BIT_LENGTH|BOTH|BREADTH|BY|C|CACHE|CALL|CALLED|CARDINALITY|'
- r'CASCADE|CASCADED|CASE|CAST|CATALOG|CATALOG_NAME|CHAIN|'
- r'CHARACTERISTICS|CHARACTER_LENGTH|CHARACTER_SET_CATALOG|'
- r'CHARACTER_SET_NAME|CHARACTER_SET_SCHEMA|CHAR_LENGTH|CHECK|'
- r'CHECKED|CHECKPOINT|CLASS|CLASS_ORIGIN|CLOB|CLOSE|CLUSTER|'
- r'COALSECE|COBOL|COLLATE|COLLATION|COLLATION_CATALOG|'
- r'COLLATION_NAME|COLLATION_SCHEMA|COLUMN|COLUMN_NAME|'
- r'COMMAND_FUNCTION|COMMAND_FUNCTION_CODE|COMMENT|COMMIT|'
- r'COMMITTED|COMPLETION|CONDITION_NUMBER|CONNECT|CONNECTION|'
- r'CONNECTION_NAME|CONSTRAINT|CONSTRAINTS|CONSTRAINT_CATALOG|'
- r'CONSTRAINT_NAME|CONSTRAINT_SCHEMA|CONSTRUCTOR|CONTAINS|'
- r'CONTINUE|CONVERSION|CONVERT|COPY|CORRESPONTING|COUNT|'
- r'CREATE|CREATEDB|CREATEUSER|CROSS|CUBE|CURRENT|CURRENT_DATE|'
- r'CURRENT_PATH|CURRENT_ROLE|CURRENT_TIME|CURRENT_TIMESTAMP|'
- r'CURRENT_USER|CURSOR|CURSOR_NAME|CYCLE|DATA|DATABASE|'
- r'DATETIME_INTERVAL_CODE|DATETIME_INTERVAL_PRECISION|DAY|'
- r'DEALLOCATE|DECLARE|DEFAULT|DEFAULTS|DEFERRABLE|DEFERRED|'
- r'DEFINED|DEFINER|DELETE|DELIMITER|DELIMITERS|DEREF|DESC|'
- r'DESCRIBE|DESCRIPTOR|DESTROY|DESTRUCTOR|DETERMINISTIC|'
- r'DIAGNOSTICS|DICTIONARY|DISCONNECT|DISPATCH|DISTINCT|DO|'
- r'DOMAIN|DROP|DYNAMIC|DYNAMIC_FUNCTION|DYNAMIC_FUNCTION_CODE|'
- r'EACH|ELSE|ENCODING|ENCRYPTED|END|END-EXEC|EQUALS|ESCAPE|EVERY|'
- r'EXCEPTION|EXCEPT|EXCLUDING|EXCLUSIVE|EXEC|EXECUTE|EXISTING|'
- r'EXISTS|EXPLAIN|EXTERNAL|EXTRACT|FALSE|FETCH|FINAL|FIRST|FOR|'
- r'FORCE|FOREIGN|FORTRAN|FORWARD|FOUND|FREE|FREEZE|FROM|FULL|'
- r'FUNCTION|G|GENERAL|GENERATED|GET|GLOBAL|GO|GOTO|GRANT|GRANTED|'
- r'GROUP|GROUPING|HANDLER|HAVING|HIERARCHY|HOLD|HOST|IDENTITY|'
- r'IGNORE|ILIKE|IMMEDIATE|IMMUTABLE|IMPLEMENTATION|IMPLICIT|IN|'
- r'INCLUDING|INCREMENT|INDEX|INDITCATOR|INFIX|INHERITS|INITIALIZE|'
- r'INITIALLY|INNER|INOUT|INPUT|INSENSITIVE|INSERT|INSTANTIABLE|'
- r'INSTEAD|INTERSECT|INTO|INVOKER|IS|ISNULL|ISOLATION|ITERATE|JOIN|'
- r'KEY|KEY_MEMBER|KEY_TYPE|LANCOMPILER|LANGUAGE|LARGE|LAST|'
- r'LATERAL|LEADING|LEFT|LENGTH|LESS|LEVEL|LIKE|LIMIT|LISTEN|LOAD|'
- r'LOCAL|LOCALTIME|LOCALTIMESTAMP|LOCATION|LOCATOR|LOCK|LOWER|'
- r'MAP|MATCH|MAX|MAXVALUE|MESSAGE_LENGTH|MESSAGE_OCTET_LENGTH|'
- r'MESSAGE_TEXT|METHOD|MIN|MINUTE|MINVALUE|MOD|MODE|MODIFIES|'
- r'MODIFY|MONTH|MORE|MOVE|MUMPS|NAMES|NATIONAL|NATURAL|NCHAR|'
- r'NCLOB|NEW|NEXT|NO|NOCREATEDB|NOCREATEUSER|NONE|NOT|NOTHING|'
- r'NOTIFY|NOTNULL|NULL|NULLABLE|NULLIF|OBJECT|OCTET_LENGTH|OF|OFF|'
- r'OFFSET|OIDS|OLD|ON|ONLY|OPEN|OPERATION|OPERATOR|OPTION|OPTIONS|'
- r'OR|ORDER|ORDINALITY|OUT|OUTER|OUTPUT|OVERLAPS|OVERLAY|OVERRIDING|'
- r'OWNER|PAD|PARAMETER|PARAMETERS|PARAMETER_MODE|PARAMATER_NAME|'
- r'PARAMATER_ORDINAL_POSITION|PARAMETER_SPECIFIC_CATALOG|'
- r'PARAMETER_SPECIFIC_NAME|PARAMATER_SPECIFIC_SCHEMA|PARTIAL|'
- r'PASCAL|PENDANT|PLACING|PLI|POSITION|POSTFIX|PRECISION|PREFIX|'
- r'PREORDER|PREPARE|PRESERVE|PRIMARY|PRIOR|PRIVILEGES|PROCEDURAL|'
- r'PROCEDURE|PUBLIC|READ|READS|RECHECK|RECURSIVE|REF|REFERENCES|'
- r'REFERENCING|REINDEX|RELATIVE|RENAME|REPEATABLE|REPLACE|RESET|'
- r'RESTART|RESTRICT|RESULT|RETURN|RETURNED_LENGTH|'
- r'RETURNED_OCTET_LENGTH|RETURNED_SQLSTATE|RETURNS|REVOKE|RIGHT|'
- r'ROLE|ROLLBACK|ROLLUP|ROUTINE|ROUTINE_CATALOG|ROUTINE_NAME|'
- r'ROUTINE_SCHEMA|ROW|ROWS|ROW_COUNT|RULE|SAVE_POINT|SCALE|SCHEMA|'
- r'SCHEMA_NAME|SCOPE|SCROLL|SEARCH|SECOND|SECURITY|SELECT|SELF|'
- r'SENSITIVE|SERIALIZABLE|SERVER_NAME|SESSION|SESSION_USER|SET|'
- r'SETOF|SETS|SHARE|SHOW|SIMILAR|SIMPLE|SIZE|SOME|SOURCE|SPACE|'
- r'SPECIFIC|SPECIFICTYPE|SPECIFIC_NAME|SQL|SQLCODE|SQLERROR|'
- r'SQLEXCEPTION|SQLSTATE|SQLWARNINIG|STABLE|START|STATE|STATEMENT|'
- r'STATIC|STATISTICS|STDIN|STDOUT|STORAGE|STRICT|STRUCTURE|STYPE|'
- r'SUBCLASS_ORIGIN|SUBLIST|SUBSTRING|SUM|SYMMETRIC|SYSID|SYSTEM|'
- r'SYSTEM_USER|TABLE|TABLE_NAME| TEMP|TEMPLATE|TEMPORARY|TERMINATE|'
- r'THAN|THEN|TIMESTAMP|TIMEZONE_HOUR|TIMEZONE_MINUTE|TO|TOAST|'
- r'TRAILING|TRANSATION|TRANSACTIONS_COMMITTED|'
- r'TRANSACTIONS_ROLLED_BACK|TRANSATION_ACTIVE|TRANSFORM|'
- r'TRANSFORMS|TRANSLATE|TRANSLATION|TREAT|TRIGGER|TRIGGER_CATALOG|'
- r'TRIGGER_NAME|TRIGGER_SCHEMA|TRIM|TRUE|TRUNCATE|TRUSTED|TYPE|'
- r'UNCOMMITTED|UNDER|UNENCRYPTED|UNION|UNIQUE|UNKNOWN|UNLISTEN|'
- r'UNNAMED|UNNEST|UNTIL|UPDATE|UPPER|USAGE|USER|'
- r'USER_DEFINED_TYPE_CATALOG|USER_DEFINED_TYPE_NAME|'
- r'USER_DEFINED_TYPE_SCHEMA|USING|VACUUM|VALID|VALIDATOR|VALUES|'
- r'VARIABLE|VERBOSE|VERSION|VIEW|VOLATILE|WHEN|WHENEVER|WHERE|'
- r'WITH|WITHOUT|WORK|WRITE|YEAR|ZONE)\b', Keyword),
- (r'(ARRAY|BIGINT|BINARY|BIT|BLOB|BOOLEAN|CHAR|CHARACTER|DATE|'
- r'DEC|DECIMAL|FLOAT|INT|INTEGER|INTERVAL|NUMBER|NUMERIC|REAL|'
- r'SERIAL|SMALLINT|VARCHAR|VARYING|INT8|SERIAL8|TEXT)\b',
+ (words((
+ 'ABORT', 'ABS', 'ABSOLUTE', 'ACCESS', 'ADA', 'ADD', 'ADMIN', 'AFTER', 'AGGREGATE',
+ 'ALIAS', 'ALL', 'ALLOCATE', 'ALTER', 'ANALYSE', 'ANALYZE', 'AND', 'ANY', 'ARE', 'AS',
+ 'ASC', 'ASENSITIVE', 'ASSERTION', 'ASSIGNMENT', 'ASYMMETRIC', 'AT', 'ATOMIC',
+ 'AUTHORIZATION', 'AVG', 'BACKWARD', 'BEFORE', 'BEGIN', 'BETWEEN', 'BITVAR',
+ 'BIT_LENGTH', 'BOTH', 'BREADTH', 'BY', 'C', 'CACHE', 'CALL', 'CALLED', 'CARDINALITY',
+ 'CASCADE', 'CASCADED', 'CASE', 'CAST', 'CATALOG', 'CATALOG_NAME', 'CHAIN',
+ 'CHARACTERISTICS', 'CHARACTER_LENGTH', 'CHARACTER_SET_CATALOG',
+ 'CHARACTER_SET_NAME', 'CHARACTER_SET_SCHEMA', 'CHAR_LENGTH', 'CHECK',
+ 'CHECKED', 'CHECKPOINT', 'CLASS', 'CLASS_ORIGIN', 'CLOB', 'CLOSE', 'CLUSTER',
+ 'COALSECE', 'COBOL', 'COLLATE', 'COLLATION', 'COLLATION_CATALOG',
+ 'COLLATION_NAME', 'COLLATION_SCHEMA', 'COLUMN', 'COLUMN_NAME',
+ 'COMMAND_FUNCTION', 'COMMAND_FUNCTION_CODE', 'COMMENT', 'COMMIT',
+ 'COMMITTED', 'COMPLETION', 'CONDITION_NUMBER', 'CONNECT', 'CONNECTION',
+ 'CONNECTION_NAME', 'CONSTRAINT', 'CONSTRAINTS', 'CONSTRAINT_CATALOG',
+ 'CONSTRAINT_NAME', 'CONSTRAINT_SCHEMA', 'CONSTRUCTOR', 'CONTAINS',
+ 'CONTINUE', 'CONVERSION', 'CONVERT', 'COPY', 'CORRESPONTING', 'COUNT',
+ 'CREATE', 'CREATEDB', 'CREATEUSER', 'CROSS', 'CUBE', 'CURRENT', 'CURRENT_DATE',
+ 'CURRENT_PATH', 'CURRENT_ROLE', 'CURRENT_TIME', 'CURRENT_TIMESTAMP',
+ 'CURRENT_USER', 'CURSOR', 'CURSOR_NAME', 'CYCLE', 'DATA', 'DATABASE',
+ 'DATETIME_INTERVAL_CODE', 'DATETIME_INTERVAL_PRECISION', 'DAY',
+ 'DEALLOCATE', 'DECLARE', 'DEFAULT', 'DEFAULTS', 'DEFERRABLE', 'DEFERRED',
+ 'DEFINED', 'DEFINER', 'DELETE', 'DELIMITER', 'DELIMITERS', 'DEREF', 'DESC',
+ 'DESCRIBE', 'DESCRIPTOR', 'DESTROY', 'DESTRUCTOR', 'DETERMINISTIC',
+ 'DIAGNOSTICS', 'DICTIONARY', 'DISCONNECT', 'DISPATCH', 'DISTINCT', 'DO',
+ 'DOMAIN', 'DROP', 'DYNAMIC', 'DYNAMIC_FUNCTION', 'DYNAMIC_FUNCTION_CODE',
+ 'EACH', 'ELSE', 'ENCODING', 'ENCRYPTED', 'END', 'END-EXEC', 'EQUALS', 'ESCAPE', 'EVERY',
+ 'EXCEPTION', 'EXCEPT', 'EXCLUDING', 'EXCLUSIVE', 'EXEC', 'EXECUTE', 'EXISTING',
+ 'EXISTS', 'EXPLAIN', 'EXTERNAL', 'EXTRACT', 'FALSE', 'FETCH', 'FINAL', 'FIRST', 'FOR',
+ 'FORCE', 'FOREIGN', 'FORTRAN', 'FORWARD', 'FOUND', 'FREE', 'FREEZE', 'FROM', 'FULL',
+ 'FUNCTION', 'G', 'GENERAL', 'GENERATED', 'GET', 'GLOBAL', 'GO', 'GOTO', 'GRANT', 'GRANTED',
+ 'GROUP', 'GROUPING', 'HANDLER', 'HAVING', 'HIERARCHY', 'HOLD', 'HOST', 'IDENTITY',
+ 'IGNORE', 'ILIKE', 'IMMEDIATE', 'IMMUTABLE', 'IMPLEMENTATION', 'IMPLICIT', 'IN',
+ 'INCLUDING', 'INCREMENT', 'INDEX', 'INDITCATOR', 'INFIX', 'INHERITS', 'INITIALIZE',
+ 'INITIALLY', 'INNER', 'INOUT', 'INPUT', 'INSENSITIVE', 'INSERT', 'INSTANTIABLE',
+ 'INSTEAD', 'INTERSECT', 'INTO', 'INVOKER', 'IS', 'ISNULL', 'ISOLATION', 'ITERATE', 'JOIN',
+ 'KEY', 'KEY_MEMBER', 'KEY_TYPE', 'LANCOMPILER', 'LANGUAGE', 'LARGE', 'LAST',
+ 'LATERAL', 'LEADING', 'LEFT', 'LENGTH', 'LESS', 'LEVEL', 'LIKE', 'LIMIT', 'LISTEN', 'LOAD',
+ 'LOCAL', 'LOCALTIME', 'LOCALTIMESTAMP', 'LOCATION', 'LOCATOR', 'LOCK', 'LOWER',
+ 'MAP', 'MATCH', 'MAX', 'MAXVALUE', 'MESSAGE_LENGTH', 'MESSAGE_OCTET_LENGTH',
+ 'MESSAGE_TEXT', 'METHOD', 'MIN', 'MINUTE', 'MINVALUE', 'MOD', 'MODE', 'MODIFIES',
+ 'MODIFY', 'MONTH', 'MORE', 'MOVE', 'MUMPS', 'NAMES', 'NATIONAL', 'NATURAL', 'NCHAR',
+ 'NCLOB', 'NEW', 'NEXT', 'NO', 'NOCREATEDB', 'NOCREATEUSER', 'NONE', 'NOT', 'NOTHING',
+ 'NOTIFY', 'NOTNULL', 'NULL', 'NULLABLE', 'NULLIF', 'OBJECT', 'OCTET_LENGTH', 'OF', 'OFF',
+ 'OFFSET', 'OIDS', 'OLD', 'ON', 'ONLY', 'OPEN', 'OPERATION', 'OPERATOR', 'OPTION', 'OPTIONS',
+ 'OR', 'ORDER', 'ORDINALITY', 'OUT', 'OUTER', 'OUTPUT', 'OVERLAPS', 'OVERLAY', 'OVERRIDING',
+ 'OWNER', 'PAD', 'PARAMETER', 'PARAMETERS', 'PARAMETER_MODE', 'PARAMATER_NAME',
+ 'PARAMATER_ORDINAL_POSITION', 'PARAMETER_SPECIFIC_CATALOG',
+ 'PARAMETER_SPECIFIC_NAME', 'PARAMATER_SPECIFIC_SCHEMA', 'PARTIAL',
+ 'PASCAL', 'PENDANT', 'PLACING', 'PLI', 'POSITION', 'POSTFIX', 'PRECISION', 'PREFIX',
+ 'PREORDER', 'PREPARE', 'PRESERVE', 'PRIMARY', 'PRIOR', 'PRIVILEGES', 'PROCEDURAL',
+ 'PROCEDURE', 'PUBLIC', 'READ', 'READS', 'RECHECK', 'RECURSIVE', 'REF', 'REFERENCES',
+ 'REFERENCING', 'REINDEX', 'RELATIVE', 'RENAME', 'REPEATABLE', 'REPLACE', 'RESET',
+ 'RESTART', 'RESTRICT', 'RESULT', 'RETURN', 'RETURNED_LENGTH',
+ 'RETURNED_OCTET_LENGTH', 'RETURNED_SQLSTATE', 'RETURNS', 'REVOKE', 'RIGHT',
+ 'ROLE', 'ROLLBACK', 'ROLLUP', 'ROUTINE', 'ROUTINE_CATALOG', 'ROUTINE_NAME',
+ 'ROUTINE_SCHEMA', 'ROW', 'ROWS', 'ROW_COUNT', 'RULE', 'SAVE_POINT', 'SCALE', 'SCHEMA',
+ 'SCHEMA_NAME', 'SCOPE', 'SCROLL', 'SEARCH', 'SECOND', 'SECURITY', 'SELECT', 'SELF',
+ 'SENSITIVE', 'SERIALIZABLE', 'SERVER_NAME', 'SESSION', 'SESSION_USER', 'SET',
+ 'SETOF', 'SETS', 'SHARE', 'SHOW', 'SIMILAR', 'SIMPLE', 'SIZE', 'SOME', 'SOURCE', 'SPACE',
+ 'SPECIFIC', 'SPECIFICTYPE', 'SPECIFIC_NAME', 'SQL', 'SQLCODE', 'SQLERROR',
+ 'SQLEXCEPTION', 'SQLSTATE', 'SQLWARNINIG', 'STABLE', 'START', 'STATE', 'STATEMENT',
+ 'STATIC', 'STATISTICS', 'STDIN', 'STDOUT', 'STORAGE', 'STRICT', 'STRUCTURE', 'STYPE',
+ 'SUBCLASS_ORIGIN', 'SUBLIST', 'SUBSTRING', 'SUM', 'SYMMETRIC', 'SYSID', 'SYSTEM',
+ 'SYSTEM_USER', 'TABLE', 'TABLE_NAME', ' TEMP', 'TEMPLATE', 'TEMPORARY', 'TERMINATE',
+ 'THAN', 'THEN', 'TIMESTAMP', 'TIMEZONE_HOUR', 'TIMEZONE_MINUTE', 'TO', 'TOAST',
+ 'TRAILING', 'TRANSATION', 'TRANSACTIONS_COMMITTED',
+ 'TRANSACTIONS_ROLLED_BACK', 'TRANSATION_ACTIVE', 'TRANSFORM',
+ 'TRANSFORMS', 'TRANSLATE', 'TRANSLATION', 'TREAT', 'TRIGGER', 'TRIGGER_CATALOG',
+ 'TRIGGER_NAME', 'TRIGGER_SCHEMA', 'TRIM', 'TRUE', 'TRUNCATE', 'TRUSTED', 'TYPE',
+ 'UNCOMMITTED', 'UNDER', 'UNENCRYPTED', 'UNION', 'UNIQUE', 'UNKNOWN', 'UNLISTEN',
+ 'UNNAMED', 'UNNEST', 'UNTIL', 'UPDATE', 'UPPER', 'USAGE', 'USER',
+ 'USER_DEFINED_TYPE_CATALOG', 'USER_DEFINED_TYPE_NAME',
+ 'USER_DEFINED_TYPE_SCHEMA', 'USING', 'VACUUM', 'VALID', 'VALIDATOR', 'VALUES',
+ 'VARIABLE', 'VERBOSE', 'VERSION', 'VIEW', 'VOLATILE', 'WHEN', 'WHENEVER', 'WHERE',
+ 'WITH', 'WITHOUT', 'WORK', 'WRITE', 'YEAR', 'ZONE'), suffix=r'\b'),
+ Keyword),
+ (words((
+ 'ARRAY', 'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BOOLEAN', 'CHAR', 'CHARACTER', 'DATE',
+ 'DEC', 'DECIMAL', 'FLOAT', 'INT', 'INTEGER', 'INTERVAL', 'NUMBER', 'NUMERIC', 'REAL',
+ 'SERIAL', 'SMALLINT', 'VARCHAR', 'VARYING', 'INT8', 'SERIAL8', 'TEXT'), suffix=r'\b'),
Name.Builtin),
(r'[+*/<>=~!@#%^&|`?-]', Operator),
(r'[0-9]+', Number.Integer),
# TODO: Backslash escapes?
(r"'(''|[^'])*'", String.Single),
- (r'"(""|[^"])*"', String.Symbol), # not a real string literal in ANSI SQL
- (r'[a-z_]\w*', Name),
+ (r'"(""|[^"])*"', String.Symbol), # not a real string literal in ANSI SQL
+ (r'[a-z_][\w\$]*', Name), # allow $s in strings for Oracle
(r'[;:()\[\],\.]', Punctuation)
],
'multiline-comments': [
@@ -464,10 +471,9 @@ class MySqlLexer(RegexLexer):
(r'/\*', Comment.Multiline, 'multiline-comments'),
(r'[0-9]+', Number.Integer),
(r'[0-9]*\.[0-9]+(e[+-][0-9]+)', Number.Float),
- # TODO: add backslash escapes
- (r"'(''|[^'])*'", String.Single),
- (r'"(""|[^"])*"', String.Double),
- (r"`(``|[^`])*`", String.Symbol),
+ (r"'(\\\\|\\'|''|[^'])*'", String.Single),
+ (r'"(\\\\|\\"|""|[^"])*"', String.Double),
+ (r"`(\\\\|\\`|``|[^`])*`", String.Symbol),
(r'[+*/<>=~!@#%^&|`?-]', Operator),
(r'\b(tinyint|smallint|mediumint|int|integer|bigint|date|'
r'datetime|time|bit|bool|tinytext|mediumtext|longtext|text|'
diff --git a/pygments/lexers/tcl.py b/pygments/lexers/tcl.py
new file mode 100644
index 00000000..13d945d7
--- /dev/null
+++ b/pygments/lexers/tcl.py
@@ -0,0 +1,145 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.tcl
+ ~~~~~~~~~~~~~~~~~~~
+
+ Lexers for Tcl and related languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from pygments.lexer import RegexLexer, include, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number
+from pygments.util import shebang_matches
+
+__all__ = ['TclLexer']
+
+
+class TclLexer(RegexLexer):
+ """
+ For Tcl source code.
+
+ .. versionadded:: 0.10
+ """
+
+ keyword_cmds_re = words((
+ 'after', 'apply', 'array', 'break', 'catch', 'continue', 'elseif', 'else', 'error',
+ 'eval', 'expr', 'for', 'foreach', 'global', 'if', 'namespace', 'proc', 'rename', 'return',
+ 'set', 'switch', 'then', 'trace', 'unset', 'update', 'uplevel', 'upvar', 'variable',
+ 'vwait', 'while'), prefix=r'\b', suffix=r'\b')
+
+ builtin_cmds_re = words((
+ 'append', 'bgerror', 'binary', 'cd', 'chan', 'clock', 'close', 'concat', 'dde', 'dict',
+ 'encoding', 'eof', 'exec', 'exit', 'fblocked', 'fconfigure', 'fcopy', 'file',
+ 'fileevent', 'flush', 'format', 'gets', 'glob', 'history', 'http', 'incr', 'info', 'interp',
+ 'join', 'lappend', 'lassign', 'lindex', 'linsert', 'list', 'llength', 'load', 'loadTk',
+ 'lrange', 'lrepeat', 'lreplace', 'lreverse', 'lsearch', 'lset', 'lsort', 'mathfunc',
+ 'mathop', 'memory', 'msgcat', 'open', 'package', 'pid', 'pkg::create', 'pkg_mkIndex',
+ 'platform', 'platform::shell', 'puts', 'pwd', 're_syntax', 'read', 'refchan',
+ 'regexp', 'registry', 'regsub', 'scan', 'seek', 'socket', 'source', 'split', 'string',
+ 'subst', 'tell', 'time', 'tm', 'unknown', 'unload'), prefix=r'\b', suffix=r'\b')
+
+ name = 'Tcl'
+ aliases = ['tcl']
+ filenames = ['*.tcl', '*.rvt']
+ mimetypes = ['text/x-tcl', 'text/x-script.tcl', 'application/x-tcl']
+
+ def _gen_command_rules(keyword_cmds_re, builtin_cmds_re, context=""):
+ return [
+ (keyword_cmds_re, Keyword, 'params' + context),
+ (builtin_cmds_re, Name.Builtin, 'params' + context),
+ (r'([\w\.\-]+)', Name.Variable, 'params' + context),
+ (r'#', Comment, 'comment'),
+ ]
+
+ tokens = {
+ 'root': [
+ include('command'),
+ include('basic'),
+ include('data'),
+ (r'}', Keyword), # HACK: somehow we miscounted our braces
+ ],
+ 'command': _gen_command_rules(keyword_cmds_re, builtin_cmds_re),
+ 'command-in-brace': _gen_command_rules(keyword_cmds_re,
+ builtin_cmds_re,
+ "-in-brace"),
+ 'command-in-bracket': _gen_command_rules(keyword_cmds_re,
+ builtin_cmds_re,
+ "-in-bracket"),
+ 'command-in-paren': _gen_command_rules(keyword_cmds_re,
+ builtin_cmds_re,
+ "-in-paren"),
+ 'basic': [
+ (r'\(', Keyword, 'paren'),
+ (r'\[', Keyword, 'bracket'),
+ (r'\{', Keyword, 'brace'),
+ (r'"', String.Double, 'string'),
+ (r'(eq|ne|in|ni)\b', Operator.Word),
+ (r'!=|==|<<|>>|<=|>=|&&|\|\||\*\*|[-+~!*/%<>&^|?:]', Operator),
+ ],
+ 'data': [
+ (r'\s+', Text),
+ (r'0x[a-fA-F0-9]+', Number.Hex),
+ (r'0[0-7]+', Number.Oct),
+ (r'\d+\.\d+', Number.Float),
+ (r'\d+', Number.Integer),
+ (r'\$([\w\.\-\:]+)', Name.Variable),
+ (r'([\w\.\-\:]+)', Text),
+ ],
+ 'params': [
+ (r';', Keyword, '#pop'),
+ (r'\n', Text, '#pop'),
+ (r'(else|elseif|then)\b', Keyword),
+ include('basic'),
+ include('data'),
+ ],
+ 'params-in-brace': [
+ (r'}', Keyword, ('#pop', '#pop')),
+ include('params')
+ ],
+ 'params-in-paren': [
+ (r'\)', Keyword, ('#pop', '#pop')),
+ include('params')
+ ],
+ 'params-in-bracket': [
+ (r'\]', Keyword, ('#pop', '#pop')),
+ include('params')
+ ],
+ 'string': [
+ (r'\[', String.Double, 'string-square'),
+ (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\])', String.Double),
+ (r'"', String.Double, '#pop')
+ ],
+ 'string-square': [
+ (r'\[', String.Double, 'string-square'),
+ (r'(?s)(\\\\|\\[0-7]+|\\.|\\\n|[^\]\\])', String.Double),
+ (r'\]', String.Double, '#pop')
+ ],
+ 'brace': [
+ (r'}', Keyword, '#pop'),
+ include('command-in-brace'),
+ include('basic'),
+ include('data'),
+ ],
+ 'paren': [
+ (r'\)', Keyword, '#pop'),
+ include('command-in-paren'),
+ include('basic'),
+ include('data'),
+ ],
+ 'bracket': [
+ (r'\]', Keyword, '#pop'),
+ include('command-in-bracket'),
+ include('basic'),
+ include('data'),
+ ],
+ 'comment': [
+ (r'.*[^\\]\n', Comment, '#pop'),
+ (r'.*\\\n', Comment),
+ ],
+ }
+
+ def analyse_text(text):
+ return shebang_matches(text, r'(tcl)')
diff --git a/pygments/lexers/templates.py b/pygments/lexers/templates.py
index 8d91d9d0..ee30b836 100644
--- a/pygments/lexers/templates.py
+++ b/pygments/lexers/templates.py
@@ -11,17 +11,18 @@
import re
-from pygments.lexers.web import \
- PhpLexer, HtmlLexer, XmlLexer, JavascriptLexer, CssLexer, LassoLexer
-from pygments.lexers.agile import PythonLexer, PerlLexer
-from pygments.lexers.compiled import JavaLexer
-from pygments.lexers.jvm import TeaLangLexer
-from pygments.lexers.text import YamlLexer
+from pygments.lexers.html import HtmlLexer, XmlLexer
+from pygments.lexers.javascript import JavascriptLexer, LassoLexer
+from pygments.lexers.css import CssLexer
+from pygments.lexers.php import PhpLexer
+from pygments.lexers.python import PythonLexer
+from pygments.lexers.perl import PerlLexer
+from pygments.lexers.jvm import JavaLexer, TeaLangLexer
+from pygments.lexers.data import YamlLexer
from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \
- include, using, this, default, combined
-from pygments.token import Error, Punctuation, \
- Text, Comment, Operator, Keyword, Name, String, Number, Other, Token, \
- Whitespace
+ include, using, this, default, combined
+from pygments.token import Error, Punctuation, Whitespace, \
+ Text, Comment, Operator, Keyword, Name, String, Number, Other, Token
from pygments.util import html_doctype_matches, looks_like_xml
__all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer',
@@ -63,7 +64,7 @@ class ErbLexer(Lexer):
_block_re = re.compile(r'(<%%|%%>|<%=|<%#|<%-|<%|-%>|%>|^%[^%].*?$)', re.M)
def __init__(self, **options):
- from pygments.lexers.agile import RubyLexer
+ from pygments.lexers.ruby import RubyLexer
self.ruby_lexer = RubyLexer(**options)
Lexer.__init__(self, **options)
@@ -106,7 +107,7 @@ class ErbLexer(Lexer):
data = tokens.pop()
r_idx = 0
for r_idx, r_token, r_value in \
- self.ruby_lexer.get_tokens_unprocessed(data):
+ self.ruby_lexer.get_tokens_unprocessed(data):
yield r_idx + idx, r_token, r_value
idx += len(data)
state = 2
@@ -119,7 +120,7 @@ class ErbLexer(Lexer):
yield idx, Comment.Preproc, tag[0]
r_idx = 0
for r_idx, r_token, r_value in \
- self.ruby_lexer.get_tokens_unprocessed(tag[1:]):
+ self.ruby_lexer.get_tokens_unprocessed(tag[1:]):
yield idx + 1 + r_idx, r_token, r_value
idx += len(tag)
state = 0
@@ -169,10 +170,11 @@ class SmartyLexer(RegexLexer):
],
'smarty': [
(r'\s+', Text),
- (r'\}', Comment.Preproc, '#pop'),
+ (r'{', Comment.Preproc, '#push'),
+ (r'}', Comment.Preproc, '#pop'),
(r'#[a-zA-Z_]\w*#', Name.Variable),
(r'\$[a-zA-Z_]\w*(\.\w+)*', Name.Variable),
- (r'[~!%^&*()+=|\[\]:;,.<>/?{}@-]', Operator),
+ (r'[~!%^&*()+=|\[\]:;,.<>/?@-]', Operator),
(r'(true|false|null)\b', Keyword.Constant),
(r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
@@ -205,7 +207,7 @@ class VelocityLexer(RegexLexer):
name = 'Velocity'
aliases = ['velocity']
- filenames = ['*.vm','*.fhtml']
+ filenames = ['*.vm', '*.fhtml']
flags = re.MULTILINE | re.DOTALL
@@ -284,12 +286,12 @@ class VelocityHtmlLexer(DelegatingLexer):
name = 'HTML+Velocity'
aliases = ['html+velocity']
- alias_filenames = ['*.html','*.fhtml']
+ alias_filenames = ['*.html', '*.fhtml']
mimetypes = ['text/html+velocity']
def __init__(self, **options):
super(VelocityHtmlLexer, self).__init__(HtmlLexer, VelocityLexer,
- **options)
+ **options)
class VelocityXmlLexer(DelegatingLexer):
@@ -301,7 +303,7 @@ class VelocityXmlLexer(DelegatingLexer):
name = 'XML+Velocity'
aliases = ['xml+velocity']
- alias_filenames = ['*.xml','*.vm']
+ alias_filenames = ['*.xml', '*.vm']
mimetypes = ['application/xml+velocity']
def __init__(self, **options):
@@ -653,7 +655,8 @@ class MakoHtmlLexer(DelegatingLexer):
def __init__(self, **options):
super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer,
- **options)
+ **options)
+
class MakoXmlLexer(DelegatingLexer):
"""
@@ -669,7 +672,8 @@ class MakoXmlLexer(DelegatingLexer):
def __init__(self, **options):
super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer,
- **options)
+ **options)
+
class MakoJavascriptLexer(DelegatingLexer):
"""
@@ -687,7 +691,8 @@ class MakoJavascriptLexer(DelegatingLexer):
def __init__(self, **options):
super(MakoJavascriptLexer, self).__init__(JavascriptLexer,
- MakoLexer, **options)
+ MakoLexer, **options)
+
class MakoCssLexer(DelegatingLexer):
"""
@@ -703,7 +708,7 @@ class MakoCssLexer(DelegatingLexer):
def __init__(self, **options):
super(MakoCssLexer, self).__init__(CssLexer, MakoLexer,
- **options)
+ **options)
# Genshi and Cheetah lexers courtesy of Matt Good.
@@ -1416,7 +1421,7 @@ class EvoqueLexer(RegexLexer):
String, Punctuation)),
# directives: evoque, overlay
# see doc for handling first name arg: /directives/evoque/
- #+ minor inconsistency: the "name" in e.g. $overlay{name=site_base}
+ # + minor inconsistency: the "name" in e.g. $overlay{name=site_base}
# should be using(PythonLexer), not passed out as String
(r'(\$)(evoque|overlay)(\{(%)?)(\s*[#\w\-"\'.]+[^=,%}]+?)?'
r'(.*?)((?(4)%)\})',
@@ -1442,6 +1447,7 @@ class EvoqueLexer(RegexLexer):
],
}
+
class EvoqueHtmlLexer(DelegatingLexer):
"""
Subclass of the `EvoqueLexer` that highlights unlexed data with the
@@ -1458,6 +1464,7 @@ class EvoqueHtmlLexer(DelegatingLexer):
super(EvoqueHtmlLexer, self).__init__(HtmlLexer, EvoqueLexer,
**options)
+
class EvoqueXmlLexer(DelegatingLexer):
"""
Subclass of the `EvoqueLexer` that highlights unlexed data with the
@@ -1474,6 +1481,7 @@ class EvoqueXmlLexer(DelegatingLexer):
super(EvoqueXmlLexer, self).__init__(XmlLexer, EvoqueLexer,
**options)
+
class ColdfusionLexer(RegexLexer):
"""
Coldfusion statements
@@ -1554,7 +1562,7 @@ class ColdfusionMarkupLexer(RegexLexer):
(r'[^#<]+', Other),
(r'(#)(.*?)(#)', bygroups(Punctuation, using(ColdfusionLexer),
Punctuation)),
- #(r'<cfoutput.*?>', Name.Builtin, '#push'),
+ # (r'<cfoutput.*?>', Name.Builtin, '#push'),
(r'</cfoutput.*?>', Name.Builtin, '#pop'),
include('tags'),
(r'(?s)<[^<>]*', Other),
@@ -1585,6 +1593,8 @@ class ColdfusionHtmlLexer(DelegatingLexer):
class ColdfusionCFCLexer(DelegatingLexer):
"""
Coldfusion markup/script components
+
+ .. versionadded:: 2.0
"""
name = 'Coldfusion CFC'
aliases = ['cfc']
@@ -1593,7 +1603,7 @@ class ColdfusionCFCLexer(DelegatingLexer):
def __init__(self, **options):
super(ColdfusionCFCLexer, self).__init__(ColdfusionHtmlLexer, ColdfusionLexer,
- **options)
+ **options)
class SspLexer(DelegatingLexer):
@@ -1634,13 +1644,13 @@ class TeaTemplateRootLexer(RegexLexer):
(r'<%\S?', Keyword, 'sec'),
(r'[^<]+', Other),
(r'<', Other),
- ],
+ ],
'sec': [
(r'%>', Keyword, '#pop'),
# note: '\w\W' != '.' without DOTALL.
(r'[\w\W]+?(?=%>|\Z)', using(TeaLangLexer)),
- ],
- }
+ ],
+ }
class TeaTemplateLexer(DelegatingLexer):
@@ -1773,12 +1783,15 @@ class LassoJavascriptLexer(DelegatingLexer):
rv += 0.2
return rv
+
class HandlebarsLexer(RegexLexer):
"""
Generic `handlebars <http://handlebarsjs.com/>` template lexer.
Highlights only the Handlebars template tags (stuff between `{{` and `}}`).
Everything else is left for a delegating lexer.
+
+ .. versionadded:: 2.0
"""
name = "Handlebars"
@@ -1804,16 +1817,16 @@ class HandlebarsLexer(RegexLexer):
Keyword)),
# General {{#block}}
- (r'([\#/])(\w+)', bygroups(Name.Function, Name.Function)),
+ (r'([\#/])([\w-]+)', bygroups(Name.Function, Name.Function)),
# {{opt=something}}
- (r'(\w+)(=)', bygroups(Name.Attribute, Operator)),
+ (r'([\w-]+)(=)', bygroups(Name.Attribute, Operator)),
# borrowed from DjangoLexer
(r':?"(\\\\|\\"|[^"])*"', String.Double),
(r":?'(\\\\|\\'|[^'])*'", String.Single),
(r'[a-zA-Z][\w-]*', Name.Variable),
- (r'\.\w+', Name.Variable),
+ (r'\.[\w-]+', Name.Variable),
(r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
]
@@ -1824,6 +1837,8 @@ class HandlebarsHtmlLexer(DelegatingLexer):
"""
Subclass of the `HandlebarsLexer` that highlights unlexed data with the
`HtmlLexer`.
+
+ .. versionadded:: 2.0
"""
name = "HTML+Handlebars"
@@ -1912,14 +1927,14 @@ class LiquidLexer(RegexLexer):
'output': [
include('whitespace'),
- ('\}\}', Punctuation, '#pop'), # end of output
+ ('\}\}', Punctuation, '#pop'), # end of output
(r'\|', Punctuation, 'filters')
],
'filters': [
include('whitespace'),
- (r'\}\}', Punctuation, ('#pop', '#pop')), # end of filters and output
+ (r'\}\}', Punctuation, ('#pop', '#pop')), # end of filters and output
(r'([^\s\|:]+)(:?)(\s*)',
bygroups(Name.Function, Punctuation, Whitespace), 'filter-markup')
@@ -1992,22 +2007,22 @@ class LiquidLexer(RegexLexer):
'default-param-markup': [
include('param-markup'),
- (r'.', Text) # fallback for switches / variables / un-quoted strings / ...
+ (r'.', Text) # fallback for switches / variables / un-quoted strings / ...
],
'variable-param-markup': [
include('param-markup'),
include('variable'),
- (r'.', Text) # fallback
+ (r'.', Text) # fallback
],
'tag-markup': [
- (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
+ (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
include('default-param-markup')
],
'variable-tag-markup': [
- (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
+ (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
include('variable-param-markup')
],
@@ -2031,7 +2046,7 @@ class LiquidLexer(RegexLexer):
(r'\d+', Number.Integer)
],
- 'generic': [ # decides for variable, string, keyword or number
+ 'generic': [ # decides for variable, string, keyword or number
include('keyword'),
include('string'),
include('number'),
diff --git a/pygments/lexers/testing.py b/pygments/lexers/testing.py
new file mode 100644
index 00000000..2e769930
--- /dev/null
+++ b/pygments/lexers/testing.py
@@ -0,0 +1,135 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.testing
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for testing languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include, bygroups, using, \
+ this, inherit, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Error
+
+__all__ = ['GherkinLexer']
+
+
+class GherkinLexer(RegexLexer):
+ """
+ For `Gherkin <http://github.com/aslakhellesoy/gherkin/>` syntax.
+
+ .. versionadded:: 1.2
+ """
+ name = 'Gherkin'
+ aliases = ['cucumber', 'gherkin']
+ filenames = ['*.feature']
+ mimetypes = ['text/x-gherkin']
+
+ feature_keywords = u'^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$'
+ feature_element_keywords = u'^(\\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$'
+ examples_keywords = u'^(\\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$'
+ step_keywords = u'^(\\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假如|但是|但し|並且|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y\'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y\'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )'
+
+ tokens = {
+ 'comments': [
+ (r'^\s*#.*$', Comment),
+ ],
+ 'feature_elements' : [
+ (step_keywords, Keyword, "step_content_stack"),
+ include('comments'),
+ (r"(\s|.)", Name.Function),
+ ],
+ 'feature_elements_on_stack' : [
+ (step_keywords, Keyword, "#pop:2"),
+ include('comments'),
+ (r"(\s|.)", Name.Function),
+ ],
+ 'examples_table': [
+ (r"\s+\|", Keyword, 'examples_table_header'),
+ include('comments'),
+ (r"(\s|.)", Name.Function),
+ ],
+ 'examples_table_header': [
+ (r"\s+\|\s*$", Keyword, "#pop:2"),
+ include('comments'),
+ (r"\\\|", Name.Variable),
+ (r"\s*\|", Keyword),
+ (r"[^\|]", Name.Variable),
+ ],
+ 'scenario_sections_on_stack': [
+ (feature_element_keywords,
+ bygroups(Name.Function, Keyword, Keyword, Name.Function),
+ "feature_elements_on_stack"),
+ ],
+ 'narrative': [
+ include('scenario_sections_on_stack'),
+ include('comments'),
+ (r"(\s|.)", Name.Function),
+ ],
+ 'table_vars': [
+ (r'(<[^>]+>)', Name.Variable),
+ ],
+ 'numbers': [
+ (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', String),
+ ],
+ 'string': [
+ include('table_vars'),
+ (r'(\s|.)', String),
+ ],
+ 'py_string': [
+ (r'"""', Keyword, "#pop"),
+ include('string'),
+ ],
+ 'step_content_root':[
+ (r"$", Keyword, "#pop"),
+ include('step_content'),
+ ],
+ 'step_content_stack':[
+ (r"$", Keyword, "#pop:2"),
+ include('step_content'),
+ ],
+ 'step_content':[
+ (r'"', Name.Function, "double_string"),
+ include('table_vars'),
+ include('numbers'),
+ include('comments'),
+ (r'(\s|.)', Name.Function),
+ ],
+ 'table_content': [
+ (r"\s+\|\s*$", Keyword, "#pop"),
+ include('comments'),
+ (r"\\\|", String),
+ (r"\s*\|", Keyword),
+ include('string'),
+ ],
+ 'double_string': [
+ (r'"', Name.Function, "#pop"),
+ include('string'),
+ ],
+ 'root': [
+ (r'\n', Name.Function),
+ include('comments'),
+ (r'"""', Keyword, "py_string"),
+ (r'\s+\|', Keyword, 'table_content'),
+ (r'"', Name.Function, "double_string"),
+ include('table_vars'),
+ include('numbers'),
+ (r'(\s*)(@[^@\r\n\t ]+)', bygroups(Name.Function, Name.Tag)),
+ (step_keywords, bygroups(Name.Function, Keyword),
+ 'step_content_root'),
+ (feature_keywords, bygroups(Keyword, Keyword, Name.Function),
+ 'narrative'),
+ (feature_element_keywords,
+ bygroups(Name.Function, Keyword, Keyword, Name.Function),
+ 'feature_elements'),
+ (examples_keywords,
+ bygroups(Name.Function, Keyword, Keyword, Name.Function),
+ 'examples_table'),
+ (r'(\s|.)', Name.Function),
+ ]
+ }
diff --git a/pygments/lexers/text.py b/pygments/lexers/text.py
index 8de3ded7..3e543af8 100644
--- a/pygments/lexers/text.py
+++ b/pygments/lexers/text.py
@@ -9,2047 +9,17 @@
:license: BSD, see LICENSE for details.
"""
-import re
-from bisect import bisect
-
-from pygments.lexer import Lexer, LexerContext, RegexLexer, ExtendedRegexLexer, \
- bygroups, include, using, this, do_insertions, default
-from pygments.token import Punctuation, Text, Comment, Keyword, Name, String, \
- Generic, Operator, Number, Whitespace, Literal
-from pygments.util import get_bool_opt, ClassNotFound
-from pygments.lexers.agile import PythonLexer
-from pygments.lexers.other import BashLexer
-
-__all__ = ['IniLexer', 'PropertiesLexer', 'SourcesListLexer', 'BaseMakefileLexer',
- 'MakefileLexer', 'DiffLexer', 'IrcLogsLexer', 'TexLexer',
- 'GroffLexer', 'ApacheConfLexer', 'BBCodeLexer', 'MoinWikiLexer',
- 'RstLexer', 'VimLexer', 'GettextLexer', 'SquidConfLexer',
- 'DebianControlLexer', 'DarcsPatchLexer', 'YamlLexer',
- 'LighttpdConfLexer', 'NginxConfLexer', 'CMakeLexer', 'HttpLexer',
- 'PyPyLogLexer', 'RegeditLexer', 'HxmlLexer', 'EbnfLexer',
- 'TodotxtLexer', 'DockerLexer']
-
-
-class IniLexer(RegexLexer):
- """
- Lexer for configuration files in INI style.
- """
-
- name = 'INI'
- aliases = ['ini', 'cfg', 'dosini']
- filenames = ['*.ini', '*.cfg']
- mimetypes = ['text/x-ini']
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'[;#].*', Comment.Single),
- (r'\[.*?\]$', Keyword),
- (r'(.*?)([ \t]*)(=)([ \t]*)(.*(?:\n[ \t].+)*)',
- bygroups(Name.Attribute, Text, Operator, Text, String))
- ]
- }
-
- def analyse_text(text):
- npos = text.find('\n')
- if npos < 3:
- return False
- return text[0] == '[' and text[npos-1] == ']'
-
-
-class RegeditLexer(RegexLexer):
- """
- Lexer for `Windows Registry
- <http://en.wikipedia.org/wiki/Windows_Registry#.REG_files>`_ files produced
- by regedit.
-
- .. versionadded:: 1.6
- """
-
- name = 'reg'
- aliases = ['registry']
- filenames = ['*.reg']
- mimetypes = ['text/x-windows-registry']
-
- tokens = {
- 'root': [
- (r'Windows Registry Editor.*', Text),
- (r'\s+', Text),
- (r'[;#].*', Comment.Single),
- (r'(\[)(-?)(HKEY_[A-Z_]+)(.*?\])$',
- bygroups(Keyword, Operator, Name.Builtin, Keyword)),
- # String keys, which obey somewhat normal escaping
- (r'("(?:\\"|\\\\|[^"])+")([ \t]*)(=)([ \t]*)',
- bygroups(Name.Attribute, Text, Operator, Text),
- 'value'),
- # Bare keys (includes @)
- (r'(.*?)([ \t]*)(=)([ \t]*)',
- bygroups(Name.Attribute, Text, Operator, Text),
- 'value'),
- ],
- 'value': [
- (r'-', Operator, '#pop'), # delete value
- (r'(dword|hex(?:\([0-9a-fA-F]\))?)(:)([0-9a-fA-F,]+)',
- bygroups(Name.Variable, Punctuation, Number), '#pop'),
- # As far as I know, .reg files do not support line continuation.
- (r'.*', String, '#pop'),
- ]
- }
-
- def analyse_text(text):
- return text.startswith('Windows Registry Editor')
-
-
-class PropertiesLexer(RegexLexer):
- """
- Lexer for configuration files in Java's properties format.
-
- .. versionadded:: 1.4
- """
-
- name = 'Properties'
- aliases = ['properties', 'jproperties']
- filenames = ['*.properties']
- mimetypes = ['text/x-java-properties']
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'(?:[;#]|//).*$', Comment),
- (r'(.*?)([ \t]*)([=:])([ \t]*)(.*(?:(?<=\\)\n.*)*)',
- bygroups(Name.Attribute, Text, Operator, Text, String)),
- ],
- }
-
-
-class SourcesListLexer(RegexLexer):
- """
- Lexer that highlights debian sources.list files.
-
- .. versionadded:: 0.7
- """
-
- name = 'Debian Sourcelist'
- aliases = ['sourceslist', 'sources.list', 'debsources']
- filenames = ['sources.list']
- mimetype = ['application/x-debian-sourceslist']
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'#.*?$', Comment),
- (r'^(deb(?:-src)?)(\s+)',
- bygroups(Keyword, Text), 'distribution')
- ],
- 'distribution': [
- (r'#.*?$', Comment, '#pop'),
- (r'\$\(ARCH\)', Name.Variable),
- (r'[^\s$[]+', String),
- (r'\[', String.Other, 'escaped-distribution'),
- (r'\$', String),
- (r'\s+', Text, 'components')
- ],
- 'escaped-distribution': [
- (r'\]', String.Other, '#pop'),
- (r'\$\(ARCH\)', Name.Variable),
- (r'[^\]$]+', String.Other),
- (r'\$', String.Other)
- ],
- 'components': [
- (r'#.*?$', Comment, '#pop:2'),
- (r'$', Text, '#pop:2'),
- (r'\s+', Text),
- (r'\S+', Keyword.Pseudo),
- ]
- }
-
- def analyse_text(text):
- for line in text.split('\n'):
- line = line.strip()
- if not (line.startswith('#') or line.startswith('deb ') or
- line.startswith('deb-src ') or not line):
- return False
- return True
-
-
-class MakefileLexer(Lexer):
- """
- Lexer for BSD and GNU make extensions (lenient enough to handle both in
- the same file even).
-
- *Rewritten in Pygments 0.10.*
- """
-
- name = 'Makefile'
- aliases = ['make', 'makefile', 'mf', 'bsdmake']
- filenames = ['*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile']
- mimetypes = ['text/x-makefile']
-
- r_special = re.compile(r'^(?:'
- # BSD Make
- r'\.\s*(include|undef|error|warning|if|else|elif|endif|for|endfor)|'
- # GNU Make
- r'\s*(ifeq|ifneq|ifdef|ifndef|else|endif|-?include|define|endef|:))(?=\s)')
- r_comment = re.compile(r'^\s*@?#')
-
- def get_tokens_unprocessed(self, text):
- ins = []
- lines = text.splitlines(True)
- done = ''
- lex = BaseMakefileLexer(**self.options)
- backslashflag = False
- for line in lines:
- if self.r_special.match(line) or backslashflag:
- ins.append((len(done), [(0, Comment.Preproc, line)]))
- backslashflag = line.strip().endswith('\\')
- elif self.r_comment.match(line):
- ins.append((len(done), [(0, Comment, line)]))
- else:
- done += line
- for item in do_insertions(ins, lex.get_tokens_unprocessed(done)):
- yield item
-
- def analyse_text(text):
- # Many makefiles have $(BIG_CAPS) style variables
- if re.search(r'\$\([A-Z_]+\)', text):
- return 0.1
-
-
-class BaseMakefileLexer(RegexLexer):
- """
- Lexer for simple Makefiles (no preprocessing).
-
- .. versionadded:: 0.10
- """
-
- name = 'Base Makefile'
- aliases = ['basemake']
- filenames = []
- mimetypes = []
-
- tokens = {
- 'root': [
- # recipes (need to allow spaces because of expandtabs)
- (r'^(?:[\t ]+.*\n|\n)+', using(BashLexer)),
- # special variables
- (r'\$[<@$+%?|*]', Keyword),
- (r'\s+', Text),
- (r'#.*?\n', Comment),
- (r'(export)(\s+)(?=[\w${}\t -]+\n)',
- bygroups(Keyword, Text), 'export'),
- (r'export\s+', Keyword),
- # assignment
- (r'([\w${}.-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n)+|.*\n)',
- bygroups(Name.Variable, Text, Operator, Text, using(BashLexer))),
- # strings
- (r'(?s)"(\\\\|\\.|[^"\\])*"', String.Double),
- (r"(?s)'(\\\\|\\.|[^'\\])*'", String.Single),
- # targets
- (r'([^\n:]+)(:+)([ \t]*)', bygroups(Name.Function, Operator, Text),
- 'block-header'),
- # expansions
- (r'\$\(', Keyword, 'expansion'),
- ],
- 'expansion': [
- (r'[^$a-zA-Z_)]+', Text),
- (r'[a-zA-Z_]+', Name.Variable),
- (r'\$', Keyword),
- (r'\(', Keyword, '#push'),
- (r'\)', Keyword, '#pop'),
- ],
- 'export': [
- (r'[\w${}-]+', Name.Variable),
- (r'\n', Text, '#pop'),
- (r'\s+', Text),
- ],
- 'block-header': [
- (r'[,|]', Punctuation),
- (r'#.*?\n', Comment, '#pop'),
- (r'\\\n', Text), # line continuation
- (r'\$\(', Keyword, 'expansion'),
- (r'[a-zA-Z_]+', Name),
- (r'\n', Text, '#pop'),
- (r'.', Text),
- ],
- }
-
-
-class DiffLexer(RegexLexer):
- """
- Lexer for unified or context-style diffs or patches.
- """
-
- name = 'Diff'
- aliases = ['diff', 'udiff']
- filenames = ['*.diff', '*.patch']
- mimetypes = ['text/x-diff', 'text/x-patch']
-
- tokens = {
- 'root': [
- (r' .*\n', Text),
- (r'\+.*\n', Generic.Inserted),
- (r'-.*\n', Generic.Deleted),
- (r'!.*\n', Generic.Strong),
- (r'@.*\n', Generic.Subheading),
- (r'([Ii]ndex|diff).*\n', Generic.Heading),
- (r'=.*\n', Generic.Heading),
- (r'.*\n', Text),
- ]
- }
-
- def analyse_text(text):
- if text[:7] == 'Index: ':
- return True
- if text[:5] == 'diff ':
- return True
- if text[:4] == '--- ':
- return 0.9
-
-
-DPATCH_KEYWORDS = ['hunk', 'addfile', 'adddir', 'rmfile', 'rmdir', 'move',
- 'replace']
-
-class DarcsPatchLexer(RegexLexer):
- """
- DarcsPatchLexer is a lexer for the various versions of the darcs patch
- format. Examples of this format are derived by commands such as
- ``darcs annotate --patch`` and ``darcs send``.
-
- .. versionadded:: 0.10
- """
- name = 'Darcs Patch'
- aliases = ['dpatch']
- filenames = ['*.dpatch', '*.darcspatch']
-
- tokens = {
- 'root': [
- (r'<', Operator),
- (r'>', Operator),
- (r'{', Operator),
- (r'}', Operator),
- (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)(\])',
- bygroups(Operator, Keyword, Name, Text, Name, Operator,
- Literal.Date, Text, Operator)),
- (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)',
- bygroups(Operator, Keyword, Name, Text, Name, Operator,
- Literal.Date, Text), 'comment'),
- (r'New patches:', Generic.Heading),
- (r'Context:', Generic.Heading),
- (r'Patch bundle hash:', Generic.Heading),
- (r'(\s*)(%s)(.*\n)' % '|'.join(DPATCH_KEYWORDS),
- bygroups(Text, Keyword, Text)),
- (r'\+', Generic.Inserted, "insert"),
- (r'-', Generic.Deleted, "delete"),
- (r'.*\n', Text),
- ],
- 'comment': [
- (r'[^\]].*\n', Comment),
- (r'\]', Operator, "#pop"),
- ],
- 'specialText': [ # darcs add [_CODE_] special operators for clarity
- (r'\n', Text, "#pop"), # line-based
- (r'\[_[^_]*_]', Operator),
- ],
- 'insert': [
- include('specialText'),
- (r'\[', Generic.Inserted),
- (r'[^\n\[]+', Generic.Inserted),
- ],
- 'delete': [
- include('specialText'),
- (r'\[', Generic.Deleted),
- (r'[^\n\[]+', Generic.Deleted),
- ],
- }
-
-
-class IrcLogsLexer(RegexLexer):
- """
- Lexer for IRC logs in *irssi*, *xchat* or *weechat* style.
- """
-
- name = 'IRC logs'
- aliases = ['irc']
- filenames = ['*.weechatlog']
- mimetypes = ['text/x-irclog']
-
- flags = re.VERBOSE | re.MULTILINE
- timestamp = r"""
- (
- # irssi / xchat and others
- (?: \[|\()? # Opening bracket or paren for the timestamp
- (?: # Timestamp
- (?: (?:\d{1,4} [-/]?)+ # Date as - or /-separated groups of digits
- [T ])? # Date/time separator: T or space
- (?: \d?\d [:.]?)+ # Time as :/.-separated groups of 1 or 2 digits
- )
- (?: \]|\))?\s+ # Closing bracket or paren for the timestamp
- |
- # weechat
- \d{4}\s\w{3}\s\d{2}\s # Date
- \d{2}:\d{2}:\d{2}\s+ # Time + Whitespace
- |
- # xchat
- \w{3}\s\d{2}\s # Date
- \d{2}:\d{2}:\d{2}\s+ # Time + Whitespace
- )?
- """
- tokens = {
- 'root': [
- # log start/end
- (r'^\*\*\*\*(.*)\*\*\*\*$', Comment),
- # hack
- ("^" + timestamp + r'(\s*<[^>]*>\s*)$', bygroups(Comment.Preproc, Name.Tag)),
- # normal msgs
- ("^" + timestamp + r"""
- (\s*<.*?>\s*) # Nick """,
- bygroups(Comment.Preproc, Name.Tag), 'msg'),
- # /me msgs
- ("^" + timestamp + r"""
- (\s*[*]\s+) # Star
- (\S+\s+.*?\n) # Nick + rest of message """,
- bygroups(Comment.Preproc, Keyword, Generic.Inserted)),
- # join/part msgs
- ("^" + timestamp + r"""
- (\s*(?:\*{3}|<?-[!@=P]?->?)\s*) # Star(s) or symbols
- (\S+\s+) # Nick + Space
- (.*?\n) # Rest of message """,
- bygroups(Comment.Preproc, Keyword, String, Comment)),
- (r"^.*?\n", Text),
- ],
- 'msg': [
- (r"\S+:(?!//)", Name.Attribute), # Prefix
- (r".*\n", Text, '#pop'),
- ],
- }
-
-
-class BBCodeLexer(RegexLexer):
- """
- A lexer that highlights BBCode(-like) syntax.
-
- .. versionadded:: 0.6
- """
-
- name = 'BBCode'
- aliases = ['bbcode']
- mimetypes = ['text/x-bbcode']
-
- tokens = {
- 'root': [
- (r'[^[]+', Text),
- # tag/end tag begin
- (r'\[/?\w+', Keyword, 'tag'),
- # stray bracket
- (r'\[', Text),
- ],
- 'tag': [
- (r'\s+', Text),
- # attribute with value
- (r'(\w+)(=)("?[^\s"\]]+"?)',
- bygroups(Name.Attribute, Operator, String)),
- # tag argument (a la [color=green])
- (r'(=)("?[^\s"\]]+"?)',
- bygroups(Operator, String)),
- # tag end
- (r'\]', Keyword, '#pop'),
- ],
- }
-
-
-class TexLexer(RegexLexer):
- """
- Lexer for the TeX and LaTeX typesetting languages.
- """
-
- name = 'TeX'
- aliases = ['tex', 'latex']
- filenames = ['*.tex', '*.aux', '*.toc']
- mimetypes = ['text/x-tex', 'text/x-latex']
-
- tokens = {
- 'general': [
- (r'%.*?\n', Comment),
- (r'[{}]', Name.Builtin),
- (r'[&_^]', Name.Builtin),
- ],
- 'root': [
- (r'\\\[', String.Backtick, 'displaymath'),
- (r'\\\(', String, 'inlinemath'),
- (r'\$\$', String.Backtick, 'displaymath'),
- (r'\$', String, 'inlinemath'),
- (r'\\([a-zA-Z]+|.)', Keyword, 'command'),
- include('general'),
- (r'[^\\$%&_^{}]+', Text),
- ],
- 'math': [
- (r'\\([a-zA-Z]+|.)', Name.Variable),
- include('general'),
- (r'[0-9]+', Number),
- (r'[-=!+*/()\[\]]', Operator),
- (r'[^=!+*/()\[\]\\$%&_^{}0-9-]+', Name.Builtin),
- ],
- 'inlinemath': [
- (r'\\\)', String, '#pop'),
- (r'\$', String, '#pop'),
- include('math'),
- ],
- 'displaymath': [
- (r'\\\]', String, '#pop'),
- (r'\$\$', String, '#pop'),
- (r'\$', Name.Builtin),
- include('math'),
- ],
- 'command': [
- (r'\[.*?\]', Name.Attribute),
- (r'\*', Keyword),
- default('#pop'),
- ],
- }
-
- def analyse_text(text):
- for start in ("\\documentclass", "\\input", "\\documentstyle",
- "\\relax"):
- if text[:len(start)] == start:
- return True
-
-
-class GroffLexer(RegexLexer):
- """
- Lexer for the (g)roff typesetting language, supporting groff
- extensions. Mainly useful for highlighting manpage sources.
-
- .. versionadded:: 0.6
- """
-
- name = 'Groff'
- aliases = ['groff', 'nroff', 'man']
- filenames = ['*.[1234567]', '*.man']
- mimetypes = ['application/x-troff', 'text/troff']
-
- tokens = {
- 'root': [
- (r'(\.)(\w+)', bygroups(Text, Keyword), 'request'),
- (r'\.', Punctuation, 'request'),
- # Regular characters, slurp till we find a backslash or newline
- (r'[^\\\n]*', Text, 'textline'),
- ],
- 'textline': [
- include('escapes'),
- (r'[^\\\n]+', Text),
- (r'\n', Text, '#pop'),
- ],
- 'escapes': [
- # groff has many ways to write escapes.
- (r'\\"[^\n]*', Comment),
- (r'\\[fn]\w', String.Escape),
- (r'\\\(.{2}', String.Escape),
- (r'\\.\[.*\]', String.Escape),
- (r'\\.', String.Escape),
- (r'\\\n', Text, 'request'),
- ],
- 'request': [
- (r'\n', Text, '#pop'),
- include('escapes'),
- (r'"[^\n"]+"', String.Double),
- (r'\d+', Number),
- (r'\S+', String),
- (r'\s+', Text),
- ],
- }
-
- def analyse_text(text):
- if text[:1] != '.':
- return False
- if text[:3] == '.\\"':
- return True
- if text[:4] == '.TH ':
- return True
- if text[1:3].isalnum() and text[3].isspace():
- return 0.9
-
-
-class ApacheConfLexer(RegexLexer):
- """
- Lexer for configuration files following the Apache config file
- format.
-
- .. versionadded:: 0.6
- """
-
- name = 'ApacheConf'
- aliases = ['apacheconf', 'aconf', 'apache']
- filenames = ['.htaccess', 'apache.conf', 'apache2.conf']
- mimetypes = ['text/x-apacheconf']
- flags = re.MULTILINE | re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'(#.*?)$', Comment),
- (r'(<[^\s>]+)(?:(\s+)(.*?))?(>)',
- bygroups(Name.Tag, Text, String, Name.Tag)),
- (r'([a-z]\w*)(\s+)',
- bygroups(Name.Builtin, Text), 'value'),
- (r'\.+', Text),
- ],
- 'value': [
- (r'$', Text, '#pop'),
- (r'[^\S\n]+', Text),
- (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
- (r'\d+', Number),
- (r'/([a-z0-9][\w./-]+)', String.Other),
- (r'(on|off|none|any|all|double|email|dns|min|minimal|'
- r'os|productonly|full|emerg|alert|crit|error|warn|'
- r'notice|info|debug|registry|script|inetd|standalone|'
- r'user|group)\b', Keyword),
- (r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double),
- (r'[^\s"]+', Text)
- ]
- }
-
-
-class MoinWikiLexer(RegexLexer):
- """
- For MoinMoin (and Trac) Wiki markup.
-
- .. versionadded:: 0.7
- """
-
- name = 'MoinMoin/Trac Wiki markup'
- aliases = ['trac-wiki', 'moin']
- filenames = []
- mimetypes = ['text/x-trac-wiki']
- flags = re.MULTILINE | re.IGNORECASE
-
- tokens = {
- 'root': [
- (r'^#.*$', Comment),
- (r'(!)(\S+)', bygroups(Keyword, Text)), # Ignore-next
- # Titles
- (r'^(=+)([^=]+)(=+)(\s*#.+)?$',
- bygroups(Generic.Heading, using(this), Generic.Heading, String)),
- # Literal code blocks, with optional shebang
- (r'({{{)(\n#!.+)?', bygroups(Name.Builtin, Name.Namespace), 'codeblock'),
- (r'(\'\'\'?|\|\||`|__|~~|\^|,,|::)', Comment), # Formatting
- # Lists
- (r'^( +)([.*-])( )', bygroups(Text, Name.Builtin, Text)),
- (r'^( +)([a-z]{1,5}\.)( )', bygroups(Text, Name.Builtin, Text)),
- # Other Formatting
- (r'\[\[\w+.*?\]\]', Keyword), # Macro
- (r'(\[[^\s\]]+)(\s+[^\]]+?)?(\])',
- bygroups(Keyword, String, Keyword)), # Link
- (r'^----+$', Keyword), # Horizontal rules
- (r'[^\n\'\[{!_~^,|]+', Text),
- (r'\n', Text),
- (r'.', Text),
- ],
- 'codeblock': [
- (r'}}}', Name.Builtin, '#pop'),
- # these blocks are allowed to be nested in Trac, but not MoinMoin
- (r'{{{', Text, '#push'),
- (r'[^{}]+', Comment.Preproc), # slurp boring text
- (r'.', Comment.Preproc), # allow loose { or }
- ],
- }
-
-
-class RstLexer(RegexLexer):
- """
- For `reStructuredText <http://docutils.sf.net/rst.html>`_ markup.
-
- .. versionadded:: 0.7
-
- Additional options accepted:
-
- `handlecodeblocks`
- Highlight the contents of ``.. sourcecode:: language``,
- ``.. code:: language`` and ``.. code-block:: language``
- directives with a lexer for the given language (default:
- ``True``).
-
- .. versionadded:: 0.8
- """
- name = 'reStructuredText'
- aliases = ['rst', 'rest', 'restructuredtext']
- filenames = ['*.rst', '*.rest']
- mimetypes = ["text/x-rst", "text/prs.fallenstein.rst"]
- flags = re.MULTILINE
-
- def _handle_sourcecode(self, match):
- from pygments.lexers import get_lexer_by_name
-
- # section header
- yield match.start(1), Punctuation, match.group(1)
- yield match.start(2), Text, match.group(2)
- yield match.start(3), Operator.Word, match.group(3)
- yield match.start(4), Punctuation, match.group(4)
- yield match.start(5), Text, match.group(5)
- yield match.start(6), Keyword, match.group(6)
- yield match.start(7), Text, match.group(7)
-
- # lookup lexer if wanted and existing
- lexer = None
- if self.handlecodeblocks:
- try:
- lexer = get_lexer_by_name(match.group(6).strip())
- except ClassNotFound:
- pass
- indention = match.group(8)
- indention_size = len(indention)
- code = (indention + match.group(9) + match.group(10) + match.group(11))
-
- # no lexer for this language. handle it like it was a code block
- if lexer is None:
- yield match.start(8), String, code
- return
-
- # highlight the lines with the lexer.
- ins = []
- codelines = code.splitlines(True)
- code = ''
- for line in codelines:
- if len(line) > indention_size:
- ins.append((len(code), [(0, Text, line[:indention_size])]))
- code += line[indention_size:]
- else:
- code += line
- for item in do_insertions(ins, lexer.get_tokens_unprocessed(code)):
- yield item
-
- # from docutils.parsers.rst.states
- closers = u'\'")]}>\u2019\u201d\xbb!?'
- unicode_delimiters = u'\u2010\u2011\u2012\u2013\u2014\u00a0'
- end_string_suffix = (r'((?=$)|(?=[-/:.,; \n\x00%s%s]))'
- % (re.escape(unicode_delimiters),
- re.escape(closers)))
-
- tokens = {
- 'root': [
- # Heading with overline
- (r'^(=+|-+|`+|:+|\.+|\'+|"+|~+|\^+|_+|\*+|\++|#+)([ \t]*\n)'
- r'(.+)(\n)(\1)(\n)',
- bygroups(Generic.Heading, Text, Generic.Heading,
- Text, Generic.Heading, Text)),
- # Plain heading
- (r'^(\S.*)(\n)(={3,}|-{3,}|`{3,}|:{3,}|\.{3,}|\'{3,}|"{3,}|'
- r'~{3,}|\^{3,}|_{3,}|\*{3,}|\+{3,}|#{3,})(\n)',
- bygroups(Generic.Heading, Text, Generic.Heading, Text)),
- # Bulleted lists
- (r'^(\s*)([-*+])( .+\n(?:\1 .+\n)*)',
- bygroups(Text, Number, using(this, state='inline'))),
- # Numbered lists
- (r'^(\s*)([0-9#ivxlcmIVXLCM]+\.)( .+\n(?:\1 .+\n)*)',
- bygroups(Text, Number, using(this, state='inline'))),
- (r'^(\s*)(\(?[0-9#ivxlcmIVXLCM]+\))( .+\n(?:\1 .+\n)*)',
- bygroups(Text, Number, using(this, state='inline'))),
- # Numbered, but keep words at BOL from becoming lists
- (r'^(\s*)([A-Z]+\.)( .+\n(?:\1 .+\n)+)',
- bygroups(Text, Number, using(this, state='inline'))),
- (r'^(\s*)(\(?[A-Za-z]+\))( .+\n(?:\1 .+\n)+)',
- bygroups(Text, Number, using(this, state='inline'))),
- # Line blocks
- (r'^(\s*)(\|)( .+\n(?:\| .+\n)*)',
- bygroups(Text, Operator, using(this, state='inline'))),
- # Sourcecode directives
- (r'^( *\.\.)(\s*)((?:source)?code(?:-block)?)(::)([ \t]*)([^\n]+)'
- r'(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\8.*|)\n)+)',
- _handle_sourcecode),
- # A directive
- (r'^( *\.\.)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))',
- bygroups(Punctuation, Text, Operator.Word, Punctuation, Text,
- using(this, state='inline'))),
- # A reference target
- (r'^( *\.\.)(\s*)(_(?:[^:\\]|\\.)+:)(.*?)$',
- bygroups(Punctuation, Text, Name.Tag, using(this, state='inline'))),
- # A footnote/citation target
- (r'^( *\.\.)(\s*)(\[.+\])(.*?)$',
- bygroups(Punctuation, Text, Name.Tag, using(this, state='inline'))),
- # A substitution def
- (r'^( *\.\.)(\s*)(\|.+\|)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))',
- bygroups(Punctuation, Text, Name.Tag, Text, Operator.Word,
- Punctuation, Text, using(this, state='inline'))),
- # Comments
- (r'^ *\.\..*(\n( +.*\n|\n)+)?', Comment.Preproc),
- # Field list
- (r'^( *)(:[a-zA-Z-]+:)(\s*)$', bygroups(Text, Name.Class, Text)),
- (r'^( *)(:.*?:)([ \t]+)(.*?)$',
- bygroups(Text, Name.Class, Text, Name.Function)),
- # Definition list
- (r'^(\S.*(?<!::)\n)((?:(?: +.*)\n)+)',
- bygroups(using(this, state='inline'), using(this, state='inline'))),
- # Code blocks
- (r'(::)(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\3.*|)\n)+)',
- bygroups(String.Escape, Text, String, String, Text, String)),
- include('inline'),
- ],
- 'inline': [
- (r'\\.', Text), # escape
- (r'``', String, 'literal'), # code
- (r'(`.+?)(<.+?>)(`__?)', # reference with inline target
- bygroups(String, String.Interpol, String)),
- (r'`.+?`__?', String), # reference
- (r'(`.+?`)(:[a-zA-Z0-9:-]+?:)?',
- bygroups(Name.Variable, Name.Attribute)), # role
- (r'(:[a-zA-Z0-9:-]+?:)(`.+?`)',
- bygroups(Name.Attribute, Name.Variable)), # role (content first)
- (r'\*\*.+?\*\*', Generic.Strong), # Strong emphasis
- (r'\*.+?\*', Generic.Emph), # Emphasis
- (r'\[.*?\]_', String), # Footnote or citation
- (r'<.+?>', Name.Tag), # Hyperlink
- (r'[^\\\n\[*`:]+', Text),
- (r'.', Text),
- ],
- 'literal': [
- (r'[^`]+', String),
- (r'``' + end_string_suffix, String, '#pop'),
- (r'`', String),
- ]
- }
-
- def __init__(self, **options):
- self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True)
- RegexLexer.__init__(self, **options)
-
- def analyse_text(text):
- if text[:2] == '..' and text[2:3] != '.':
- return 0.3
- p1 = text.find("\n")
- p2 = text.find("\n", p1 + 1)
- if (p2 > -1 and # has two lines
- p1 * 2 + 1 == p2 and # they are the same length
- text[p1+1] in '-=' and # the next line both starts and ends with
- text[p1+1] == text[p2-1]): # ...a sufficiently high header
- return 0.5
-
-
-class VimLexer(RegexLexer):
- """
- Lexer for VimL script files.
-
- .. versionadded:: 0.8
- """
- name = 'VimL'
- aliases = ['vim']
- filenames = ['*.vim', '.vimrc', '.exrc', '.gvimrc',
- '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc']
- mimetypes = ['text/x-vim']
- flags = re.MULTILINE
-
- _python = r'py(?:t(?:h(?:o(?:n)?)?)?)?'
-
- tokens = {
- 'root': [
- (r'^([ \t:]*)(' + _python + r')([ \t]*)(<<)([ \t]*)(.*)((?:\n|.)*)(\6)',
- bygroups(using(this), Keyword, Text, Operator, Text, Text,
- using(PythonLexer), Text)),
- (r'^([ \t:]*)(' + _python + r')([ \t])(.*)',
- bygroups(using(this), Keyword, Text, using(PythonLexer))),
-
- (r'^\s*".*', Comment),
-
- (r'[ \t]+', Text),
- # TODO: regexes can have other delims
- (r'/(\\\\|\\/|[^\n/])*/', String.Regex),
- (r'"(\\\\|\\"|[^\n"])*"', String.Double),
- (r"'(''|[^\n'])*'", String.Single),
-
- # Who decided that doublequote was a good comment character??
- (r'(?<=\s)"[^\-:.%#=*].*', Comment),
- (r'-?\d+', Number),
- (r'#[0-9a-f]{6}', Number.Hex),
- (r'^:', Punctuation),
- (r'[()<>+=!|,~-]', Punctuation), # Inexact list. Looks decent.
- (r'\b(let|if|else|endif|elseif|fun|function|endfunction)\b',
- Keyword),
- (r'\b(NONE|bold|italic|underline|dark|light)\b', Name.Builtin),
- (r'\b\w+\b', Name.Other), # These are postprocessed below
- (r'.', Text),
- ],
- }
- def __init__(self, **options):
- from pygments.lexers._vimbuiltins import command, option, auto
- self._cmd = command
- self._opt = option
- self._aut = auto
-
- RegexLexer.__init__(self, **options)
-
- def is_in(self, w, mapping):
- r"""
- It's kind of difficult to decide if something might be a keyword
- in VimL because it allows you to abbreviate them. In fact,
- 'ab[breviate]' is a good example. :ab, :abbre, or :abbreviate are
- valid ways to call it so rather than making really awful regexps
- like::
-
- \bab(?:b(?:r(?:e(?:v(?:i(?:a(?:t(?:e)?)?)?)?)?)?)?)?\b
-
- we match `\b\w+\b` and then call is_in() on those tokens. See
- `scripts/get_vimkw.py` for how the lists are extracted.
- """
- p = bisect(mapping, (w,))
- if p > 0:
- if mapping[p-1][0] == w[:len(mapping[p-1][0])] and \
- mapping[p-1][1][:len(w)] == w: return True
- if p < len(mapping):
- return mapping[p][0] == w[:len(mapping[p][0])] and \
- mapping[p][1][:len(w)] == w
- return False
-
- def get_tokens_unprocessed(self, text):
- # TODO: builtins are only subsequent tokens on lines
- # and 'keywords' only happen at the beginning except
- # for :au ones
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text):
- if token is Name.Other:
- if self.is_in(value, self._cmd):
- yield index, Keyword, value
- elif self.is_in(value, self._opt) or \
- self.is_in(value, self._aut):
- yield index, Name.Builtin, value
- else:
- yield index, Text, value
- else:
- yield index, token, value
-
-
-class GettextLexer(RegexLexer):
- """
- Lexer for Gettext catalog files.
-
- .. versionadded:: 0.9
- """
- name = 'Gettext Catalog'
- aliases = ['pot', 'po']
- filenames = ['*.pot', '*.po']
- mimetypes = ['application/x-gettext', 'text/x-gettext', 'text/gettext']
-
- tokens = {
- 'root': [
- (r'^#,\s.*?$', Keyword.Type),
- (r'^#:\s.*?$', Keyword.Declaration),
- #(r'^#$', Comment),
- (r'^(#|#\.\s|#\|\s|#~\s|#\s).*$', Comment.Single),
- (r'^(")([A-Za-z-]+:)(.*")$',
- bygroups(String, Name.Property, String)),
- (r'^".*"$', String),
- (r'^(msgid|msgid_plural|msgstr)(\s+)(".*")$',
- bygroups(Name.Variable, Text, String)),
- (r'^(msgstr\[)(\d)(\])(\s+)(".*")$',
- bygroups(Name.Variable, Number.Integer, Name.Variable, Text, String)),
- ]
- }
-
-
-class SquidConfLexer(RegexLexer):
- """
- Lexer for `squid <http://www.squid-cache.org/>`_ configuration files.
-
- .. versionadded:: 0.9
- """
-
- name = 'SquidConf'
- aliases = ['squidconf', 'squid.conf', 'squid']
- filenames = ['squid.conf']
- mimetypes = ['text/x-squidconf']
- flags = re.IGNORECASE
-
- keywords = [
- "access_log", "acl", "always_direct", "announce_host",
- "announce_period", "announce_port", "announce_to", "anonymize_headers",
- "append_domain", "as_whois_server", "auth_param_basic",
- "authenticate_children", "authenticate_program", "authenticate_ttl",
- "broken_posts", "buffered_logs", "cache_access_log", "cache_announce",
- "cache_dir", "cache_dns_program", "cache_effective_group",
- "cache_effective_user", "cache_host", "cache_host_acl",
- "cache_host_domain", "cache_log", "cache_mem", "cache_mem_high",
- "cache_mem_low", "cache_mgr", "cachemgr_passwd", "cache_peer",
- "cache_peer_access", "cahce_replacement_policy", "cache_stoplist",
- "cache_stoplist_pattern", "cache_store_log", "cache_swap",
- "cache_swap_high", "cache_swap_log", "cache_swap_low", "client_db",
- "client_lifetime", "client_netmask", "connect_timeout", "coredump_dir",
- "dead_peer_timeout", "debug_options", "delay_access", "delay_class",
- "delay_initial_bucket_level", "delay_parameters", "delay_pools",
- "deny_info", "dns_children", "dns_defnames", "dns_nameservers",
- "dns_testnames", "emulate_httpd_log", "err_html_text",
- "fake_user_agent", "firewall_ip", "forwarded_for", "forward_snmpd_port",
- "fqdncache_size", "ftpget_options", "ftpget_program", "ftp_list_width",
- "ftp_passive", "ftp_user", "half_closed_clients", "header_access",
- "header_replace", "hierarchy_stoplist", "high_response_time_warning",
- "high_page_fault_warning", "hosts_file", "htcp_port", "http_access",
- "http_anonymizer", "httpd_accel", "httpd_accel_host",
- "httpd_accel_port", "httpd_accel_uses_host_header",
- "httpd_accel_with_proxy", "http_port", "http_reply_access",
- "icp_access", "icp_hit_stale", "icp_port", "icp_query_timeout",
- "ident_lookup", "ident_lookup_access", "ident_timeout",
- "incoming_http_average", "incoming_icp_average", "inside_firewall",
- "ipcache_high", "ipcache_low", "ipcache_size", "local_domain",
- "local_ip", "logfile_rotate", "log_fqdn", "log_icp_queries",
- "log_mime_hdrs", "maximum_object_size", "maximum_single_addr_tries",
- "mcast_groups", "mcast_icp_query_timeout", "mcast_miss_addr",
- "mcast_miss_encode_key", "mcast_miss_port", "memory_pools",
- "memory_pools_limit", "memory_replacement_policy", "mime_table",
- "min_http_poll_cnt", "min_icp_poll_cnt", "minimum_direct_hops",
- "minimum_object_size", "minimum_retry_timeout", "miss_access",
- "negative_dns_ttl", "negative_ttl", "neighbor_timeout",
- "neighbor_type_domain", "netdb_high", "netdb_low", "netdb_ping_period",
- "netdb_ping_rate", "never_direct", "no_cache", "passthrough_proxy",
- "pconn_timeout", "pid_filename", "pinger_program", "positive_dns_ttl",
- "prefer_direct", "proxy_auth", "proxy_auth_realm", "query_icmp",
- "quick_abort", "quick_abort", "quick_abort_max", "quick_abort_min",
- "quick_abort_pct", "range_offset_limit", "read_timeout",
- "redirect_children", "redirect_program",
- "redirect_rewrites_host_header", "reference_age", "reference_age",
- "refresh_pattern", "reload_into_ims", "request_body_max_size",
- "request_size", "request_timeout", "shutdown_lifetime",
- "single_parent_bypass", "siteselect_timeout", "snmp_access",
- "snmp_incoming_address", "snmp_port", "source_ping", "ssl_proxy",
- "store_avg_object_size", "store_objects_per_bucket",
- "strip_query_terms", "swap_level1_dirs", "swap_level2_dirs",
- "tcp_incoming_address", "tcp_outgoing_address", "tcp_recv_bufsize",
- "test_reachability", "udp_hit_obj", "udp_hit_obj_size",
- "udp_incoming_address", "udp_outgoing_address", "unique_hostname",
- "unlinkd_program", "uri_whitespace", "useragent_log",
- "visible_hostname", "wais_relay", "wais_relay_host", "wais_relay_port",
- ]
-
- opts = [
- "proxy-only", "weight", "ttl", "no-query", "default", "round-robin",
- "multicast-responder", "on", "off", "all", "deny", "allow", "via",
- "parent", "no-digest", "heap", "lru", "realm", "children", "q1", "q2",
- "credentialsttl", "none", "disable", "offline_toggle", "diskd",
- ]
-
- actions = [
- "shutdown", "info", "parameter", "server_list", "client_list",
- r'squid\.conf',
- ]
-
- actions_stats = [
- "objects", "vm_objects", "utilization", "ipcache", "fqdncache", "dns",
- "redirector", "io", "reply_headers", "filedescriptors", "netdb",
- ]
-
- actions_log = ["status", "enable", "disable", "clear"]
-
- acls = [
- "url_regex", "urlpath_regex", "referer_regex", "port", "proto",
- "req_mime_type", "rep_mime_type", "method", "browser", "user", "src",
- "dst", "time", "dstdomain", "ident", "snmp_community",
- ]
-
- ip_re = (
- r'(?:(?:(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|0x0*[0-9a-f]{1,2}|'
- r'0+[1-3]?[0-7]{0,2})(?:\.(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|'
- r'0x0*[0-9a-f]{1,2}|0+[1-3]?[0-7]{0,2})){3})|(?!.*::.*::)(?:(?!:)|'
- r':(?=:))(?:[0-9a-f]{0,4}(?:(?<=::)|(?<!::):)){6}(?:[0-9a-f]{0,4}'
- r'(?:(?<=::)|(?<!::):)[0-9a-f]{0,4}(?:(?<=::)|(?<!:)|(?<=:)(?<!::):)|'
- r'(?:25[0-4]|2[0-4]\d|1\d\d|[1-9]?\d)(?:\.(?:25[0-4]|2[0-4]\d|1\d\d|'
- r'[1-9]?\d)){3}))'
- )
-
- def makelistre(list):
- return r'\b(?:' + '|'.join(list) + r')\b'
-
- tokens = {
- 'root': [
- (r'\s+', Whitespace),
- (r'#', Comment, 'comment'),
- (makelistre(keywords), Keyword),
- (makelistre(opts), Name.Constant),
- # Actions
- (makelistre(actions), String),
- (r'stats/'+makelistre(actions), String),
- (r'log/'+makelistre(actions)+r'=', String),
- (makelistre(acls), Keyword),
- (ip_re + r'(?:/(?:' + ip_re + r'|\b\d+\b))?', Number.Float),
- (r'(?:\b\d+\b(?:-\b\d+|%)?)', Number),
- (r'\S+', Text),
- ],
- 'comment': [
- (r'\s*TAG:.*', String.Escape, '#pop'),
- (r'.*', Comment, '#pop'),
- ],
- }
-
-
-class DebianControlLexer(RegexLexer):
- """
- Lexer for Debian ``control`` files and ``apt-cache show <pkg>`` outputs.
-
- .. versionadded:: 0.9
- """
- name = 'Debian Control file'
- aliases = ['control', 'debcontrol']
- filenames = ['control']
-
- tokens = {
- 'root': [
- (r'^(Description)', Keyword, 'description'),
- (r'^(Maintainer)(:\s*)', bygroups(Keyword, Text), 'maintainer'),
- (r'^((Build-)?Depends)', Keyword, 'depends'),
- (r'^((?:Python-)?Version)(:\s*)(\S+)$',
- bygroups(Keyword, Text, Number)),
- (r'^((?:Installed-)?Size)(:\s*)(\S+)$',
- bygroups(Keyword, Text, Number)),
- (r'^(MD5Sum|SHA1|SHA256)(:\s*)(\S+)$',
- bygroups(Keyword, Text, Number)),
- (r'^([a-zA-Z\-0-9\.]*?)(:\s*)(.*?)$',
- bygroups(Keyword, Whitespace, String)),
- ],
- 'maintainer': [
- (r'<[^>]+>', Generic.Strong),
- (r'<[^>]+>$', Generic.Strong, '#pop'),
- (r',\n?', Text),
- (r'.', Text),
- ],
- 'description': [
- (r'(.*)(Homepage)(: )(\S+)',
- bygroups(Text, String, Name, Name.Class)),
- (r':.*\n', Generic.Strong),
- (r' .*\n', Text),
- ('', Text, '#pop'),
- ],
- 'depends': [
- (r':\s*', Text),
- (r'(\$)(\{)(\w+\s*:\s*\w+)', bygroups(Operator, Text, Name.Entity)),
- (r'\(', Text, 'depend_vers'),
- (r',', Text),
- (r'\|', Operator),
- (r'[\s]+', Text),
- (r'[}\)]\s*$', Text, '#pop'),
- (r'}', Text),
- (r'[^,]$', Name.Function, '#pop'),
- (r'([\+\.a-zA-Z0-9-])(\s*)', bygroups(Name.Function, Text)),
- (r'\[.*?\]', Name.Entity),
- ],
- 'depend_vers': [
- (r'\),', Text, '#pop'),
- (r'\)[^,]', Text, '#pop:2'),
- (r'([><=]+)(\s*)([^\)]+)', bygroups(Operator, Text, Number))
- ]
- }
-
-
-class YamlLexerContext(LexerContext):
- """Indentation context for the YAML lexer."""
-
- def __init__(self, *args, **kwds):
- super(YamlLexerContext, self).__init__(*args, **kwds)
- self.indent_stack = []
- self.indent = -1
- self.next_indent = 0
- self.block_scalar_indent = None
-
-
-class YamlLexer(ExtendedRegexLexer):
- """
- Lexer for `YAML <http://yaml.org/>`_, a human-friendly data serialization
- language.
-
- .. versionadded:: 0.11
- """
-
- name = 'YAML'
- aliases = ['yaml']
- filenames = ['*.yaml', '*.yml']
- mimetypes = ['text/x-yaml']
-
-
- def something(token_class):
- """Do not produce empty tokens."""
- def callback(lexer, match, context):
- text = match.group()
- if not text:
- return
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- def reset_indent(token_class):
- """Reset the indentation levels."""
- def callback(lexer, match, context):
- text = match.group()
- context.indent_stack = []
- context.indent = -1
- context.next_indent = 0
- context.block_scalar_indent = None
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- def save_indent(token_class, start=False):
- """Save a possible indentation level."""
- def callback(lexer, match, context):
- text = match.group()
- extra = ''
- if start:
- context.next_indent = len(text)
- if context.next_indent < context.indent:
- while context.next_indent < context.indent:
- context.indent = context.indent_stack.pop()
- if context.next_indent > context.indent:
- extra = text[context.indent:]
- text = text[:context.indent]
- else:
- context.next_indent += len(text)
- if text:
- yield match.start(), token_class, text
- if extra:
- yield match.start()+len(text), token_class.Error, extra
- context.pos = match.end()
- return callback
-
- def set_indent(token_class, implicit=False):
- """Set the previously saved indentation level."""
- def callback(lexer, match, context):
- text = match.group()
- if context.indent < context.next_indent:
- context.indent_stack.append(context.indent)
- context.indent = context.next_indent
- if not implicit:
- context.next_indent += len(text)
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- def set_block_scalar_indent(token_class):
- """Set an explicit indentation level for a block scalar."""
- def callback(lexer, match, context):
- text = match.group()
- context.block_scalar_indent = None
- if not text:
- return
- increment = match.group(1)
- if increment:
- current_indent = max(context.indent, 0)
- increment = int(increment)
- context.block_scalar_indent = current_indent + increment
- if text:
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- def parse_block_scalar_empty_line(indent_token_class, content_token_class):
- """Process an empty line in a block scalar."""
- def callback(lexer, match, context):
- text = match.group()
- if (context.block_scalar_indent is None or
- len(text) <= context.block_scalar_indent):
- if text:
- yield match.start(), indent_token_class, text
- else:
- indentation = text[:context.block_scalar_indent]
- content = text[context.block_scalar_indent:]
- yield match.start(), indent_token_class, indentation
- yield (match.start()+context.block_scalar_indent,
- content_token_class, content)
- context.pos = match.end()
- return callback
-
- def parse_block_scalar_indent(token_class):
- """Process indentation spaces in a block scalar."""
- def callback(lexer, match, context):
- text = match.group()
- if context.block_scalar_indent is None:
- if len(text) <= max(context.indent, 0):
- context.stack.pop()
- context.stack.pop()
- return
- context.block_scalar_indent = len(text)
- else:
- if len(text) < context.block_scalar_indent:
- context.stack.pop()
- context.stack.pop()
- return
- if text:
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
- def parse_plain_scalar_indent(token_class):
- """Process indentation spaces in a plain scalar."""
- def callback(lexer, match, context):
- text = match.group()
- if len(text) <= context.indent:
- context.stack.pop()
- context.stack.pop()
- return
- if text:
- yield match.start(), token_class, text
- context.pos = match.end()
- return callback
-
-
-
- tokens = {
- # the root rules
- 'root': [
- # ignored whitespaces
- (r'[ ]+(?=#|$)', Text),
- # line breaks
- (r'\n+', Text),
- # a comment
- (r'#[^\n]*', Comment.Single),
- # the '%YAML' directive
- (r'^%YAML(?=[ ]|$)', reset_indent(Name.Tag), 'yaml-directive'),
- # the %TAG directive
- (r'^%TAG(?=[ ]|$)', reset_indent(Name.Tag), 'tag-directive'),
- # document start and document end indicators
- (r'^(?:---|\.\.\.)(?=[ ]|$)', reset_indent(Name.Namespace),
- 'block-line'),
- # indentation spaces
- (r'[ ]*(?![ \t\n\r\f\v]|$)', save_indent(Text, start=True),
- ('block-line', 'indentation')),
- ],
-
- # trailing whitespaces after directives or a block scalar indicator
- 'ignored-line': [
- # ignored whitespaces
- (r'[ ]+(?=#|$)', Text),
- # a comment
- (r'#[^\n]*', Comment.Single),
- # line break
- (r'\n', Text, '#pop:2'),
- ],
-
- # the %YAML directive
- 'yaml-directive': [
- # the version number
- (r'([ ]+)([0-9]+\.[0-9]+)',
- bygroups(Text, Number), 'ignored-line'),
- ],
-
- # the %YAG directive
- 'tag-directive': [
- # a tag handle and the corresponding prefix
- (r'([ ]+)(!|![0-9A-Za-z_-]*!)'
- r'([ ]+)(!|!?[0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+)',
- bygroups(Text, Keyword.Type, Text, Keyword.Type),
- 'ignored-line'),
- ],
-
- # block scalar indicators and indentation spaces
- 'indentation': [
- # trailing whitespaces are ignored
- (r'[ ]*$', something(Text), '#pop:2'),
- # whitespaces preceeding block collection indicators
- (r'[ ]+(?=[?:-](?:[ ]|$))', save_indent(Text)),
- # block collection indicators
- (r'[?:-](?=[ ]|$)', set_indent(Punctuation.Indicator)),
- # the beginning a block line
- (r'[ ]*', save_indent(Text), '#pop'),
- ],
-
- # an indented line in the block context
- 'block-line': [
- # the line end
- (r'[ ]*(?=#|$)', something(Text), '#pop'),
- # whitespaces separating tokens
- (r'[ ]+', Text),
- # tags, anchors and aliases,
- include('descriptors'),
- # block collections and scalars
- include('block-nodes'),
- # flow collections and quoted scalars
- include('flow-nodes'),
- # a plain scalar
- (r'(?=[^ \t\n\r\f\v?:,\[\]{}#&*!|>\'"%@`-]|[?:-][^ \t\n\r\f\v])',
- something(Name.Variable),
- 'plain-scalar-in-block-context'),
- ],
-
- # tags, anchors, aliases
- 'descriptors' : [
- # a full-form tag
- (r'!<[0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+>', Keyword.Type),
- # a tag in the form '!', '!suffix' or '!handle!suffix'
- (r'!(?:[0-9A-Za-z_-]+)?'
- r'(?:![0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+)?', Keyword.Type),
- # an anchor
- (r'&[0-9A-Za-z_-]+', Name.Label),
- # an alias
- (r'\*[0-9A-Za-z_-]+', Name.Variable),
- ],
-
- # block collections and scalars
- 'block-nodes': [
- # implicit key
- (r':(?=[ ]|$)', set_indent(Punctuation.Indicator, implicit=True)),
- # literal and folded scalars
- (r'[|>]', Punctuation.Indicator,
- ('block-scalar-content', 'block-scalar-header')),
- ],
-
- # flow collections and quoted scalars
- 'flow-nodes': [
- # a flow sequence
- (r'\[', Punctuation.Indicator, 'flow-sequence'),
- # a flow mapping
- (r'\{', Punctuation.Indicator, 'flow-mapping'),
- # a single-quoted scalar
- (r'\'', String, 'single-quoted-scalar'),
- # a double-quoted scalar
- (r'\"', String, 'double-quoted-scalar'),
- ],
-
- # the content of a flow collection
- 'flow-collection': [
- # whitespaces
- (r'[ ]+', Text),
- # line breaks
- (r'\n+', Text),
- # a comment
- (r'#[^\n]*', Comment.Single),
- # simple indicators
- (r'[?:,]', Punctuation.Indicator),
- # tags, anchors and aliases
- include('descriptors'),
- # nested collections and quoted scalars
- include('flow-nodes'),
- # a plain scalar
- (r'(?=[^ \t\n\r\f\v?:,\[\]{}#&*!|>\'"%@`])',
- something(Name.Variable),
- 'plain-scalar-in-flow-context'),
- ],
-
- # a flow sequence indicated by '[' and ']'
- 'flow-sequence': [
- # include flow collection rules
- include('flow-collection'),
- # the closing indicator
- (r'\]', Punctuation.Indicator, '#pop'),
- ],
-
- # a flow mapping indicated by '{' and '}'
- 'flow-mapping': [
- # include flow collection rules
- include('flow-collection'),
- # the closing indicator
- (r'\}', Punctuation.Indicator, '#pop'),
- ],
-
- # block scalar lines
- 'block-scalar-content': [
- # line break
- (r'\n', Text),
- # empty line
- (r'^[ ]+$',
- parse_block_scalar_empty_line(Text, Name.Constant)),
- # indentation spaces (we may leave the state here)
- (r'^[ ]*', parse_block_scalar_indent(Text)),
- # line content
- (r'[^\n\r\f\v]+', Name.Constant),
- ],
-
- # the content of a literal or folded scalar
- 'block-scalar-header': [
- # indentation indicator followed by chomping flag
- (r'([1-9])?[+-]?(?=[ ]|$)',
- set_block_scalar_indent(Punctuation.Indicator),
- 'ignored-line'),
- # chomping flag followed by indentation indicator
- (r'[+-]?([1-9])?(?=[ ]|$)',
- set_block_scalar_indent(Punctuation.Indicator),
- 'ignored-line'),
- ],
-
- # ignored and regular whitespaces in quoted scalars
- 'quoted-scalar-whitespaces': [
- # leading and trailing whitespaces are ignored
- (r'^[ ]+', Text),
- (r'[ ]+$', Text),
- # line breaks are ignored
- (r'\n+', Text),
- # other whitespaces are a part of the value
- (r'[ ]+', Name.Variable),
- ],
-
- # single-quoted scalars
- 'single-quoted-scalar': [
- # include whitespace and line break rules
- include('quoted-scalar-whitespaces'),
- # escaping of the quote character
- (r'\'\'', String.Escape),
- # regular non-whitespace characters
- (r'[^ \t\n\r\f\v\']+', String),
- # the closing quote
- (r'\'', String, '#pop'),
- ],
-
- # double-quoted scalars
- 'double-quoted-scalar': [
- # include whitespace and line break rules
- include('quoted-scalar-whitespaces'),
- # escaping of special characters
- (r'\\[0abt\tn\nvfre "\\N_LP]', String),
- # escape codes
- (r'\\(?:x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})',
- String.Escape),
- # regular non-whitespace characters
- (r'[^ \t\n\r\f\v\"\\]+', String),
- # the closing quote
- (r'"', String, '#pop'),
- ],
-
- # the beginning of a new line while scanning a plain scalar
- 'plain-scalar-in-block-context-new-line': [
- # empty lines
- (r'^[ ]+$', Text),
- # line breaks
- (r'\n+', Text),
- # document start and document end indicators
- (r'^(?=---|\.\.\.)', something(Name.Namespace), '#pop:3'),
- # indentation spaces (we may leave the block line state here)
- (r'^[ ]*', parse_plain_scalar_indent(Text), '#pop'),
- ],
-
- # a plain scalar in the block context
- 'plain-scalar-in-block-context': [
- # the scalar ends with the ':' indicator
- (r'[ ]*(?=:[ ]|:$)', something(Text), '#pop'),
- # the scalar ends with whitespaces followed by a comment
- (r'[ ]+(?=#)', Text, '#pop'),
- # trailing whitespaces are ignored
- (r'[ ]+$', Text),
- # line breaks are ignored
- (r'\n+', Text, 'plain-scalar-in-block-context-new-line'),
- # other whitespaces are a part of the value
- (r'[ ]+', Literal.Scalar.Plain),
- # regular non-whitespace characters
- (r'(?::(?![ \t\n\r\f\v])|[^ \t\n\r\f\v:])+', Literal.Scalar.Plain),
- ],
-
- # a plain scalar is the flow context
- 'plain-scalar-in-flow-context': [
- # the scalar ends with an indicator character
- (r'[ ]*(?=[,:?\[\]{}])', something(Text), '#pop'),
- # the scalar ends with a comment
- (r'[ ]+(?=#)', Text, '#pop'),
- # leading and trailing whitespaces are ignored
- (r'^[ ]+', Text),
- (r'[ ]+$', Text),
- # line breaks are ignored
- (r'\n+', Text),
- # other whitespaces are a part of the value
- (r'[ ]+', Name.Variable),
- # regular non-whitespace characters
- (r'[^ \t\n\r\f\v,:?\[\]{}]+', Name.Variable),
- ],
-
- }
-
- def get_tokens_unprocessed(self, text=None, context=None):
- if context is None:
- context = YamlLexerContext(text, 0)
- return super(YamlLexer, self).get_tokens_unprocessed(text, context)
-
-
-class LighttpdConfLexer(RegexLexer):
- """
- Lexer for `Lighttpd <http://lighttpd.net/>`_ configuration files.
-
- .. versionadded:: 0.11
- """
- name = 'Lighttpd configuration file'
- aliases = ['lighty', 'lighttpd']
- filenames = []
- mimetypes = ['text/x-lighttpd-conf']
-
- tokens = {
- 'root': [
- (r'#.*\n', Comment.Single),
- (r'/\S*', Name), # pathname
- (r'[a-zA-Z._-]+', Keyword),
- (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number),
- (r'[0-9]+', Number),
- (r'=>|=~|\+=|==|=|\+', Operator),
- (r'\$[A-Z]+', Name.Builtin),
- (r'[(){}\[\],]', Punctuation),
- (r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double),
- (r'\s+', Text),
- ],
-
- }
-
-
-class NginxConfLexer(RegexLexer):
- """
- Lexer for `Nginx <http://nginx.net/>`_ configuration files.
-
- .. versionadded:: 0.11
- """
- name = 'Nginx configuration file'
- aliases = ['nginx']
- filenames = []
- mimetypes = ['text/x-nginx-conf']
-
- tokens = {
- 'root': [
- (r'(include)(\s+)([^\s;]+)', bygroups(Keyword, Text, Name)),
- (r'[^\s;#]+', Keyword, 'stmt'),
- include('base'),
- ],
- 'block': [
- (r'}', Punctuation, '#pop:2'),
- (r'[^\s;#]+', Keyword.Namespace, 'stmt'),
- include('base'),
- ],
- 'stmt': [
- (r'{', Punctuation, 'block'),
- (r';', Punctuation, '#pop'),
- include('base'),
- ],
- 'base': [
- (r'#.*\n', Comment.Single),
- (r'on|off', Name.Constant),
- (r'\$[^\s;#()]+', Name.Variable),
- (r'([a-z0-9.-]+)(:)([0-9]+)',
- bygroups(Name, Punctuation, Number.Integer)),
- (r'[a-z-]+/[a-z-+]+', String), # mimetype
- #(r'[a-zA-Z._-]+', Keyword),
- (r'[0-9]+[km]?\b', Number.Integer),
- (r'(~)(\s*)([^\s{]+)', bygroups(Punctuation, Text, String.Regex)),
- (r'[:=~]', Punctuation),
- (r'[^\s;#{}$]+', String), # catch all
- (r'/[^\s;#]*', Name), # pathname
- (r'\s+', Text),
- (r'[$;]', Text), # leftover characters
- ],
- }
-
-
-class CMakeLexer(RegexLexer):
- """
- Lexer for `CMake <http://cmake.org/Wiki/CMake>`_ files.
-
- .. versionadded:: 1.2
- """
- name = 'CMake'
- aliases = ['cmake']
- filenames = ['*.cmake', 'CMakeLists.txt']
- mimetypes = ['text/x-cmake']
-
- tokens = {
- 'root': [
- #(r'(ADD_CUSTOM_COMMAND|ADD_CUSTOM_TARGET|ADD_DEFINITIONS|'
- # r'ADD_DEPENDENCIES|ADD_EXECUTABLE|ADD_LIBRARY|ADD_SUBDIRECTORY|'
- # r'ADD_TEST|AUX_SOURCE_DIRECTORY|BUILD_COMMAND|BUILD_NAME|'
- # r'CMAKE_MINIMUM_REQUIRED|CONFIGURE_FILE|CREATE_TEST_SOURCELIST|'
- # r'ELSE|ELSEIF|ENABLE_LANGUAGE|ENABLE_TESTING|ENDFOREACH|'
- # r'ENDFUNCTION|ENDIF|ENDMACRO|ENDWHILE|EXEC_PROGRAM|'
- # r'EXECUTE_PROCESS|EXPORT_LIBRARY_DEPENDENCIES|FILE|FIND_FILE|'
- # r'FIND_LIBRARY|FIND_PACKAGE|FIND_PATH|FIND_PROGRAM|FLTK_WRAP_UI|'
- # r'FOREACH|FUNCTION|GET_CMAKE_PROPERTY|GET_DIRECTORY_PROPERTY|'
- # r'GET_FILENAME_COMPONENT|GET_SOURCE_FILE_PROPERTY|'
- # r'GET_TARGET_PROPERTY|GET_TEST_PROPERTY|IF|INCLUDE|'
- # r'INCLUDE_DIRECTORIES|INCLUDE_EXTERNAL_MSPROJECT|'
- # r'INCLUDE_REGULAR_EXPRESSION|INSTALL|INSTALL_FILES|'
- # r'INSTALL_PROGRAMS|INSTALL_TARGETS|LINK_DIRECTORIES|'
- # r'LINK_LIBRARIES|LIST|LOAD_CACHE|LOAD_COMMAND|MACRO|'
- # r'MAKE_DIRECTORY|MARK_AS_ADVANCED|MATH|MESSAGE|OPTION|'
- # r'OUTPUT_REQUIRED_FILES|PROJECT|QT_WRAP_CPP|QT_WRAP_UI|REMOVE|'
- # r'REMOVE_DEFINITIONS|SEPARATE_ARGUMENTS|SET|'
- # r'SET_DIRECTORY_PROPERTIES|SET_SOURCE_FILES_PROPERTIES|'
- # r'SET_TARGET_PROPERTIES|SET_TESTS_PROPERTIES|SITE_NAME|'
- # r'SOURCE_GROUP|STRING|SUBDIR_DEPENDS|SUBDIRS|'
- # r'TARGET_LINK_LIBRARIES|TRY_COMPILE|TRY_RUN|UNSET|'
- # r'USE_MANGLED_MESA|UTILITY_SOURCE|VARIABLE_REQUIRES|'
- # r'VTK_MAKE_INSTANTIATOR|VTK_WRAP_JAVA|VTK_WRAP_PYTHON|'
- # r'VTK_WRAP_TCL|WHILE|WRITE_FILE|'
- # r'COUNTARGS)\b', Name.Builtin, 'args'),
- (r'\b(\w+)([ \t]*)(\()', bygroups(Name.Builtin, Text,
- Punctuation), 'args'),
- include('keywords'),
- include('ws')
- ],
- 'args': [
- (r'\(', Punctuation, '#push'),
- (r'\)', Punctuation, '#pop'),
- (r'(\${)(.+?)(})', bygroups(Operator, Name.Variable, Operator)),
- (r'(\$<)(.+?)(>)', bygroups(Operator, Name.Variable, Operator)),
- (r'(?s)".*?"', String.Double),
- (r'\\\S+', String),
- (r'[^\)$"# \t\n]+', String),
- (r'\n', Text), # explicitly legal
- include('keywords'),
- include('ws')
- ],
- 'string': [
-
- ],
- 'keywords': [
- (r'\b(WIN32|UNIX|APPLE|CYGWIN|BORLAND|MINGW|MSVC|MSVC_IDE|MSVC60|'
- r'MSVC70|MSVC71|MSVC80|MSVC90)\b', Keyword),
- ],
- 'ws': [
- (r'[ \t]+', Text),
- (r'#.*\n', Comment),
- ]
- }
-
- def analyse_text(text):
- exp = r'^ *CMAKE_MINIMUM_REQUIRED *\( *VERSION *\d(\.\d)* *( FATAL_ERROR)? *\) *$'
- if re.search(exp, text, flags=re.MULTILINE | re.IGNORECASE):
- return 0.8
- return 0.0
-
-
-class HttpLexer(RegexLexer):
- """
- Lexer for HTTP sessions.
-
- .. versionadded:: 1.5
- """
-
- name = 'HTTP'
- aliases = ['http']
-
- flags = re.DOTALL
-
- def header_callback(self, match):
- if match.group(1).lower() == 'content-type':
- content_type = match.group(5).strip()
- if ';' in content_type:
- content_type = content_type[:content_type.find(';')].strip()
- self.content_type = content_type
- yield match.start(1), Name.Attribute, match.group(1)
- yield match.start(2), Text, match.group(2)
- yield match.start(3), Operator, match.group(3)
- yield match.start(4), Text, match.group(4)
- yield match.start(5), Literal, match.group(5)
- yield match.start(6), Text, match.group(6)
-
- def continuous_header_callback(self, match):
- yield match.start(1), Text, match.group(1)
- yield match.start(2), Literal, match.group(2)
- yield match.start(3), Text, match.group(3)
-
- def content_callback(self, match):
- content_type = getattr(self, 'content_type', None)
- content = match.group()
- offset = match.start()
- if content_type:
- from pygments.lexers import get_lexer_for_mimetype
- try:
- lexer = get_lexer_for_mimetype(content_type)
- except ClassNotFound:
- pass
- else:
- for idx, token, value in lexer.get_tokens_unprocessed(content):
- yield offset + idx, token, value
- return
- yield offset, Text, content
-
- tokens = {
- 'root': [
- (r'(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH)( +)([^ ]+)( +)'
- r'(HTTP)(/)(1\.[01])(\r?\n|$)',
- bygroups(Name.Function, Text, Name.Namespace, Text,
- Keyword.Reserved, Operator, Number, Text),
- 'headers'),
- (r'(HTTP)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|$)',
- bygroups(Keyword.Reserved, Operator, Number, Text, Number,
- Text, Name.Exception, Text),
- 'headers'),
- ],
- 'headers': [
- (r'([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|$)', header_callback),
- (r'([\t ]+)([^\r\n]+)(\r?\n|$)', continuous_header_callback),
- (r'\r?\n', Text, 'content')
- ],
- 'content': [
- (r'.+', content_callback)
- ]
- }
-
-
-class PyPyLogLexer(RegexLexer):
- """
- Lexer for PyPy log files.
-
- .. versionadded:: 1.5
- """
- name = "PyPy Log"
- aliases = ["pypylog", "pypy"]
- filenames = ["*.pypylog"]
- mimetypes = ['application/x-pypylog']
-
- tokens = {
- "root": [
- (r"\[\w+\] {jit-log-.*?$", Keyword, "jit-log"),
- (r"\[\w+\] {jit-backend-counts$", Keyword, "jit-backend-counts"),
- include("extra-stuff"),
- ],
- "jit-log": [
- (r"\[\w+\] jit-log-.*?}$", Keyword, "#pop"),
- (r"^\+\d+: ", Comment),
- (r"--end of the loop--", Comment),
- (r"[ifp]\d+", Name),
- (r"ptr\d+", Name),
- (r"(\()(\w+(?:\.\w+)?)(\))",
- bygroups(Punctuation, Name.Builtin, Punctuation)),
- (r"[\[\]=,()]", Punctuation),
- (r"(\d+\.\d+|inf|-inf)", Number.Float),
- (r"-?\d+", Number.Integer),
- (r"'.*'", String),
- (r"(None|descr|ConstClass|ConstPtr|TargetToken)", Name),
- (r"<.*?>+", Name.Builtin),
- (r"(label|debug_merge_point|jump|finish)", Name.Class),
- (r"(int_add_ovf|int_add|int_sub_ovf|int_sub|int_mul_ovf|int_mul|"
- r"int_floordiv|int_mod|int_lshift|int_rshift|int_and|int_or|"
- r"int_xor|int_eq|int_ne|int_ge|int_gt|int_le|int_lt|int_is_zero|"
- r"int_is_true|"
- r"uint_floordiv|uint_ge|uint_lt|"
- r"float_add|float_sub|float_mul|float_truediv|float_neg|"
- r"float_eq|float_ne|float_ge|float_gt|float_le|float_lt|float_abs|"
- r"ptr_eq|ptr_ne|instance_ptr_eq|instance_ptr_ne|"
- r"cast_int_to_float|cast_float_to_int|"
- r"force_token|quasiimmut_field|same_as|virtual_ref_finish|"
- r"virtual_ref|mark_opaque_ptr|"
- r"call_may_force|call_assembler|call_loopinvariant|"
- r"call_release_gil|call_pure|call|"
- r"new_with_vtable|new_array|newstr|newunicode|new|"
- r"arraylen_gc|"
- r"getarrayitem_gc_pure|getarrayitem_gc|setarrayitem_gc|"
- r"getarrayitem_raw|setarrayitem_raw|getfield_gc_pure|"
- r"getfield_gc|getinteriorfield_gc|setinteriorfield_gc|"
- r"getfield_raw|setfield_gc|setfield_raw|"
- r"strgetitem|strsetitem|strlen|copystrcontent|"
- r"unicodegetitem|unicodesetitem|unicodelen|"
- r"guard_true|guard_false|guard_value|guard_isnull|"
- r"guard_nonnull_class|guard_nonnull|guard_class|guard_no_overflow|"
- r"guard_not_forced|guard_no_exception|guard_not_invalidated)",
- Name.Builtin),
- include("extra-stuff"),
- ],
- "jit-backend-counts": [
- (r"\[\w+\] jit-backend-counts}$", Keyword, "#pop"),
- (r":", Punctuation),
- (r"\d+", Number),
- include("extra-stuff"),
- ],
- "extra-stuff": [
- (r"\s+", Text),
- (r"#.*?$", Comment),
- ],
- }
-
-
-class HxmlLexer(RegexLexer):
- """
- Lexer for `haXe build <http://haxe.org/doc/compiler>`_ files.
-
- .. versionadded:: 1.6
- """
- name = 'Hxml'
- aliases = ['haxeml', 'hxml']
- filenames = ['*.hxml']
-
- tokens = {
- 'root': [
- # Seperator
- (r'(--)(next)', bygroups(Punctuation, Generic.Heading)),
- # Compiler switches with one dash
- (r'(-)(prompt|debug|v)', bygroups(Punctuation, Keyword.Keyword)),
- # Compilerswitches with two dashes
- (r'(--)(neko-source|flash-strict|flash-use-stage|no-opt|no-traces|'
- r'no-inline|times|no-output)', bygroups(Punctuation, Keyword)),
- # Targets and other options that take an argument
- (r'(-)(cpp|js|neko|x|as3|swf9?|swf-lib|php|xml|main|lib|D|resource|'
- r'cp|cmd)( +)(.+)',
- bygroups(Punctuation, Keyword, Whitespace, String)),
- # Options that take only numerical arguments
- (r'(-)(swf-version)( +)(\d+)',
- bygroups(Punctuation, Keyword, Number.Integer)),
- # An Option that defines the size, the fps and the background
- # color of an flash movie
- (r'(-)(swf-header)( +)(\d+)(:)(\d+)(:)(\d+)(:)([A-Fa-f0-9]{6})',
- bygroups(Punctuation, Keyword, Whitespace, Number.Integer,
- Punctuation, Number.Integer, Punctuation, Number.Integer,
- Punctuation, Number.Hex)),
- # options with two dashes that takes arguments
- (r'(--)(js-namespace|php-front|php-lib|remap|gen-hx-classes)( +)'
- r'(.+)', bygroups(Punctuation, Keyword, Whitespace, String)),
- # Single line comment, multiline ones are not allowed.
- (r'#.*', Comment.Single)
- ]
- }
-
-
-class EbnfLexer(RegexLexer):
- """
- Lexer for `ISO/IEC 14977 EBNF
- <http://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_Form>`_
- grammars.
-
- .. versionadded:: 2.0
- """
-
- name = 'EBNF'
- aliases = ['ebnf']
- filenames = ['*.ebnf']
- mimetypes = ['text/x-ebnf']
-
- tokens = {
- 'root': [
- include('whitespace'),
- include('comment_start'),
- include('identifier'),
- (r'=', Operator, 'production'),
- ],
- 'production': [
- include('whitespace'),
- include('comment_start'),
- include('identifier'),
- (r'"[^"]*"', String.Double),
- (r"'[^']*'", String.Single),
- (r'(\?[^?]*\?)', Name.Entity),
- (r'[\[\]{}(),|]', Punctuation),
- (r'-', Operator),
- (r';', Punctuation, '#pop'),
- ],
- 'whitespace': [
- (r'\s+', Text),
- ],
- 'comment_start': [
- (r'\(\*', Comment.Multiline, 'comment'),
- ],
- 'comment': [
- (r'[^*)]', Comment.Multiline),
- include('comment_start'),
- (r'\*\)', Comment.Multiline, '#pop'),
- (r'[*)]', Comment.Multiline),
- ],
- 'identifier': [
- (r'([a-zA-Z][a-zA-Z0-9 \-]*)', Keyword),
- ],
- }
-
-class TodotxtLexer(RegexLexer):
- """
- Lexer for `Todo.txt <http://todotxt.com/>`_ todo list format.
-
- .. versionadded:: 2.0
- """
-
- name = 'Todotxt'
- aliases = ['todotxt']
- # *.todotxt is not a standard extension for Todo.txt files; including it
- # makes testing easier, and also makes autodetecting file type easier.
- filenames = ['todo.txt', '*.todotxt']
- mimetypes = ['text/x-todo']
-
- ## Aliases mapping standard token types of Todo.txt format concepts
- CompleteTaskText = Operator # Chosen to de-emphasize complete tasks
- IncompleteTaskText = Text # Incomplete tasks should look like plain text
-
- # Priority should have most emphasis to indicate importance of tasks
- Priority = Generic.Heading
- # Dates should have next most emphasis because time is important
- Date = Generic.Subheading
-
- # Project and context should have equal weight, and be in different colors
- Project = Generic.Error
- Context = String
-
- # If tag functionality is added, it should have the same weight as Project
- # and Context, and a different color. Generic.Traceback would work well.
-
- # Regex patterns for building up rules; dates, priorities, projects, and
- # contexts are all atomic
- # TODO: Make date regex more ISO 8601 compliant
- date_regex = r'\d{4,}-\d{2}-\d{2}'
- priority_regex = r'\([A-Z]\)'
- project_regex = r'\+\S+'
- context_regex = r'@\S+'
-
- # Compound regex expressions
- complete_one_date_regex = r'(x )(' + date_regex + r')'
- complete_two_date_regex = (complete_one_date_regex + r'( )(' +
- date_regex + r')')
- priority_date_regex = r'(' + priority_regex + r')( )(' + date_regex + r')'
-
- tokens = {
- # Should parse starting at beginning of line; each line is a task
- 'root': [
- ## Complete task entry points: two total:
- # 1. Complete task with two dates
- (complete_two_date_regex, bygroups(CompleteTaskText, Date,
- CompleteTaskText, Date),
- 'complete'),
- # 2. Complete task with one date
- (complete_one_date_regex, bygroups(CompleteTaskText, Date),
- 'complete'),
-
- ## Incomplete task entry points: six total:
- # 1. Priority plus date
- (priority_date_regex, bygroups(Priority, IncompleteTaskText, Date),
- 'incomplete'),
- # 2. Priority only
- (priority_regex, Priority, 'incomplete'),
- # 3. Leading date
- (date_regex, Date, 'incomplete'),
- # 4. Leading context
- (context_regex, Context, 'incomplete'),
- # 5. Leading project
- (project_regex, Project, 'incomplete'),
- # 6. Non-whitespace catch-all
- ('\S+', IncompleteTaskText, 'incomplete'),
- ],
-
- # Parse a complete task
- 'complete': [
- # Newline indicates end of task, should return to root
- (r'\s*\n', CompleteTaskText, '#pop'),
- # Tokenize contexts and projects
- (context_regex, Context),
- (project_regex, Project),
- # Tokenize non-whitespace text
- ('\S+', CompleteTaskText),
- # Tokenize whitespace not containing a newline
- ('\s+', CompleteTaskText),
- ],
-
- # Parse an incomplete task
- 'incomplete': [
- # Newline indicates end of task, should return to root
- (r'\s*\n', IncompleteTaskText, '#pop'),
- # Tokenize contexts and projects
- (context_regex, Context),
- (project_regex, Project),
- # Tokenize non-whitespace text
- ('\S+', IncompleteTaskText),
- # Tokenize whitespace not containing a newline
- ('\s+', IncompleteTaskText),
- ],
- }
-
-
-class DockerLexer(RegexLexer):
- """
- Lexer for `Docker <http://docker.io>`_ configuration files.
-
- .. versionadded:: 2.0
- """
- name = 'Docker'
- aliases = ['docker', 'dockerfile']
- filenames = ['Dockerfile', '*.docker']
- mimetypes = ['text/x-dockerfile-config']
-
- _keywords = (r'(?:FROM|MAINTAINER|RUN|CMD|EXPOSE|ENV|ADD|ENTRYPOINT|'
- r'VOLUME|WORKDIR)')
-
- flags = re.IGNORECASE | re.MULTILINE
-
- tokens = {
- 'root': [
- (r'^(ONBUILD)(\s+)(%s)\b' % (_keywords,),
- bygroups(Name.Keyword, Whitespace, Keyword)),
- (_keywords + r'\b', Keyword),
- (r'#.*', Comment),
- (r'.+', using(BashLexer)),
- ],
- }
+from pygments.lexers.configs import ApacheConfLexer, NginxConfLexer, \
+ SquidConfLexer, LighttpdConfLexer, IniLexer, RegeditLexer, PropertiesLexer
+from pygments.lexers.console import PyPyLogLexer
+from pygments.lexers.textedit import VimLexer
+from pygments.lexers.markup import BBCodeLexer, MoinWikiLexer, RstLexer, \
+ TexLexer, GroffLexer
+from pygments.lexers.installers import DebianControlLexer, SourcesListLexer
+from pygments.lexers.make import MakefileLexer, BaseMakefileLexer, CMakeLexer
+from pygments.lexers.haxe import HxmlLexer
+from pygments.lexers.diff import DiffLexer, DarcsPatchLexer
+from pygments.lexers.data import YamlLexer
+from pygments.lexers.textfmts import IrcLogsLexer, GettextLexer, HttpLexer
+
+__all__ = []
diff --git a/pygments/lexers/textedit.py b/pygments/lexers/textedit.py
new file mode 100644
index 00000000..04dd879a
--- /dev/null
+++ b/pygments/lexers/textedit.py
@@ -0,0 +1,169 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.textedit
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for languages related to text processing.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+from bisect import bisect
+
+from pygments.lexer import RegexLexer, include, default, bygroups, using, this
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+from pygments.lexers.python import PythonLexer
+
+__all__ = ['AwkLexer', 'VimLexer']
+
+
+class AwkLexer(RegexLexer):
+ """
+ For Awk scripts.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Awk'
+ aliases = ['awk', 'gawk', 'mawk', 'nawk']
+ filenames = ['*.awk']
+ mimetypes = ['application/x-awk']
+
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'#.*$', Comment.Single)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'\B', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ default('#pop')
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ (r'^(?=\s|/)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
+ (r'\+\+|--|\|\||&&|in\b|\$|!?~|'
+ r'(\*\*|[-<>+*%\^/!=])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+ (r'(break|continue|do|while|exit|for|if|else|'
+ r'return)\b', Keyword, 'slashstartsregex'),
+ (r'function\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|'
+ r'length|match|split|sprintf|sub|substr|tolower|toupper|close|'
+ r'fflush|getline|next|nextfile|print|printf|strftime|systime|'
+ r'delete|system)\b', Keyword.Reserved),
+ (r'(ARGC|ARGIND|ARGV|CONVFMT|ENVIRON|ERRNO|FIELDWIDTHS|FILENAME|FNR|FS|'
+ r'IGNORECASE|NF|NR|OFMT|OFS|ORFS|RLENGTH|RS|RSTART|RT|'
+ r'SUBSEP)\b', Name.Builtin),
+ (r'[$a-zA-Z_]\w*', Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ ]
+ }
+
+
+class VimLexer(RegexLexer):
+ """
+ Lexer for VimL script files.
+
+ .. versionadded:: 0.8
+ """
+ name = 'VimL'
+ aliases = ['vim']
+ filenames = ['*.vim', '.vimrc', '.exrc', '.gvimrc',
+ '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc']
+ mimetypes = ['text/x-vim']
+ flags = re.MULTILINE
+
+ _python = r'py(?:t(?:h(?:o(?:n)?)?)?)?'
+
+ tokens = {
+ 'root': [
+ (r'^([ \t:]*)(' + _python + r')([ \t]*)(<<)([ \t]*)(.*)((?:\n|.)*)(\6)',
+ bygroups(using(this), Keyword, Text, Operator, Text, Text,
+ using(PythonLexer), Text)),
+ (r'^([ \t:]*)(' + _python + r')([ \t])(.*)',
+ bygroups(using(this), Keyword, Text, using(PythonLexer))),
+
+ (r'^\s*".*', Comment),
+
+ (r'[ \t]+', Text),
+ # TODO: regexes can have other delims
+ (r'/(\\\\|\\/|[^\n/])*/', String.Regex),
+ (r'"(\\\\|\\"|[^\n"])*"', String.Double),
+ (r"'(''|[^\n'])*'", String.Single),
+
+ # Who decided that doublequote was a good comment character??
+ (r'(?<=\s)"[^\-:.%#=*].*', Comment),
+ (r'-?\d+', Number),
+ (r'#[0-9a-f]{6}', Number.Hex),
+ (r'^:', Punctuation),
+ (r'[()<>+=!|,~-]', Punctuation), # Inexact list. Looks decent.
+ (r'\b(let|if|else|endif|elseif|fun|function|endfunction)\b',
+ Keyword),
+ (r'\b(NONE|bold|italic|underline|dark|light)\b', Name.Builtin),
+ (r'\b\w+\b', Name.Other), # These are postprocessed below
+ (r'.', Text),
+ ],
+ }
+
+ def __init__(self, **options):
+ from pygments.lexers._vim_builtins import command, option, auto
+ self._cmd = command
+ self._opt = option
+ self._aut = auto
+
+ RegexLexer.__init__(self, **options)
+
+ def is_in(self, w, mapping):
+ r"""
+ It's kind of difficult to decide if something might be a keyword
+ in VimL because it allows you to abbreviate them. In fact,
+ 'ab[breviate]' is a good example. :ab, :abbre, or :abbreviate are
+ valid ways to call it so rather than making really awful regexps
+ like::
+
+ \bab(?:b(?:r(?:e(?:v(?:i(?:a(?:t(?:e)?)?)?)?)?)?)?)?\b
+
+ we match `\b\w+\b` and then call is_in() on those tokens. See
+ `scripts/get_vimkw.py` for how the lists are extracted.
+ """
+ p = bisect(mapping, (w,))
+ if p > 0:
+ if mapping[p-1][0] == w[:len(mapping[p-1][0])] and \
+ mapping[p-1][1][:len(w)] == w:
+ return True
+ if p < len(mapping):
+ return mapping[p][0] == w[:len(mapping[p][0])] and \
+ mapping[p][1][:len(w)] == w
+ return False
+
+ def get_tokens_unprocessed(self, text):
+ # TODO: builtins are only subsequent tokens on lines
+ # and 'keywords' only happen at the beginning except
+ # for :au ones
+ for index, token, value in \
+ RegexLexer.get_tokens_unprocessed(self, text):
+ if token is Name.Other:
+ if self.is_in(value, self._cmd):
+ yield index, Keyword, value
+ elif self.is_in(value, self._opt) or \
+ self.is_in(value, self._aut):
+ yield index, Name.Builtin, value
+ else:
+ yield index, Text, value
+ else:
+ yield index, token, value
diff --git a/pygments/lexers/textfmts.py b/pygments/lexers/textfmts.py
new file mode 100644
index 00000000..e3700e9f
--- /dev/null
+++ b/pygments/lexers/textfmts.py
@@ -0,0 +1,279 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.textfmts
+ ~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for various text formats.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, bygroups
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Generic, Literal
+from pygments.util import ClassNotFound
+
+__all__ = ['IrcLogsLexer', 'TodotxtLexer', 'HttpLexer', 'GettextLexer']
+
+
+class IrcLogsLexer(RegexLexer):
+ """
+ Lexer for IRC logs in *irssi*, *xchat* or *weechat* style.
+ """
+
+ name = 'IRC logs'
+ aliases = ['irc']
+ filenames = ['*.weechatlog']
+ mimetypes = ['text/x-irclog']
+
+ flags = re.VERBOSE | re.MULTILINE
+ timestamp = r"""
+ (
+ # irssi / xchat and others
+ (?: \[|\()? # Opening bracket or paren for the timestamp
+ (?: # Timestamp
+ (?: (?:\d{1,4} [-/])* # Date as - or /-separated groups of digits
+ (?:\d{1,4})
+ [T ])? # Date/time separator: T or space
+ (?: \d?\d [:.])* # Time as :/.-separated groups of 1 or 2 digits
+ (?: \d?\d [:.])
+ )
+ (?: \]|\))?\s+ # Closing bracket or paren for the timestamp
+ |
+ # weechat
+ \d{4}\s\w{3}\s\d{2}\s # Date
+ \d{2}:\d{2}:\d{2}\s+ # Time + Whitespace
+ |
+ # xchat
+ \w{3}\s\d{2}\s # Date
+ \d{2}:\d{2}:\d{2}\s+ # Time + Whitespace
+ )?
+ """
+ tokens = {
+ 'root': [
+ # log start/end
+ (r'^\*\*\*\*(.*)\*\*\*\*$', Comment),
+ # hack
+ ("^" + timestamp + r'(\s*<[^>]*>\s*)$', bygroups(Comment.Preproc, Name.Tag)),
+ # normal msgs
+ ("^" + timestamp + r"""
+ (\s*<.*?>\s*) # Nick """,
+ bygroups(Comment.Preproc, Name.Tag), 'msg'),
+ # /me msgs
+ ("^" + timestamp + r"""
+ (\s*[*]\s+) # Star
+ (\S+\s+.*?\n) # Nick + rest of message """,
+ bygroups(Comment.Preproc, Keyword, Generic.Inserted)),
+ # join/part msgs
+ ("^" + timestamp + r"""
+ (\s*(?:\*{3}|<?-[!@=P]?->?)\s*) # Star(s) or symbols
+ (\S+\s+) # Nick + Space
+ (.*?\n) # Rest of message """,
+ bygroups(Comment.Preproc, Keyword, String, Comment)),
+ (r"^.*?\n", Text),
+ ],
+ 'msg': [
+ (r"\S+:(?!//)", Name.Attribute), # Prefix
+ (r".*\n", Text, '#pop'),
+ ],
+ }
+
+
+class GettextLexer(RegexLexer):
+ """
+ Lexer for Gettext catalog files.
+
+ .. versionadded:: 0.9
+ """
+ name = 'Gettext Catalog'
+ aliases = ['pot', 'po']
+ filenames = ['*.pot', '*.po']
+ mimetypes = ['application/x-gettext', 'text/x-gettext', 'text/gettext']
+
+ tokens = {
+ 'root': [
+ (r'^#,\s.*?$', Keyword.Type),
+ (r'^#:\s.*?$', Keyword.Declaration),
+ # (r'^#$', Comment),
+ (r'^(#|#\.\s|#\|\s|#~\s|#\s).*$', Comment.Single),
+ (r'^(")([A-Za-z-]+:)(.*")$',
+ bygroups(String, Name.Property, String)),
+ (r'^".*"$', String),
+ (r'^(msgid|msgid_plural|msgstr)(\s+)(".*")$',
+ bygroups(Name.Variable, Text, String)),
+ (r'^(msgstr\[)(\d)(\])(\s+)(".*")$',
+ bygroups(Name.Variable, Number.Integer, Name.Variable, Text, String)),
+ ]
+ }
+
+
+class HttpLexer(RegexLexer):
+ """
+ Lexer for HTTP sessions.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'HTTP'
+ aliases = ['http']
+
+ flags = re.DOTALL
+
+ def header_callback(self, match):
+ if match.group(1).lower() == 'content-type':
+ content_type = match.group(5).strip()
+ if ';' in content_type:
+ content_type = content_type[:content_type.find(';')].strip()
+ self.content_type = content_type
+ yield match.start(1), Name.Attribute, match.group(1)
+ yield match.start(2), Text, match.group(2)
+ yield match.start(3), Operator, match.group(3)
+ yield match.start(4), Text, match.group(4)
+ yield match.start(5), Literal, match.group(5)
+ yield match.start(6), Text, match.group(6)
+
+ def continuous_header_callback(self, match):
+ yield match.start(1), Text, match.group(1)
+ yield match.start(2), Literal, match.group(2)
+ yield match.start(3), Text, match.group(3)
+
+ def content_callback(self, match):
+ content_type = getattr(self, 'content_type', None)
+ content = match.group()
+ offset = match.start()
+ if content_type:
+ from pygments.lexers import get_lexer_for_mimetype
+ try:
+ lexer = get_lexer_for_mimetype(content_type)
+ except ClassNotFound:
+ pass
+ else:
+ for idx, token, value in lexer.get_tokens_unprocessed(content):
+ yield offset + idx, token, value
+ return
+ yield offset, Text, content
+
+ tokens = {
+ 'root': [
+ (r'(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH)( +)([^ ]+)( +)'
+ r'(HTTP)(/)(1\.[01])(\r?\n|$)',
+ bygroups(Name.Function, Text, Name.Namespace, Text,
+ Keyword.Reserved, Operator, Number, Text),
+ 'headers'),
+ (r'(HTTP)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|$)',
+ bygroups(Keyword.Reserved, Operator, Number, Text, Number,
+ Text, Name.Exception, Text),
+ 'headers'),
+ ],
+ 'headers': [
+ (r'([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|$)', header_callback),
+ (r'([\t ]+)([^\r\n]+)(\r?\n|$)', continuous_header_callback),
+ (r'\r?\n', Text, 'content')
+ ],
+ 'content': [
+ (r'.+', content_callback)
+ ]
+ }
+
+
+class TodotxtLexer(RegexLexer):
+ """
+ Lexer for `Todo.txt <http://todotxt.com/>`_ todo list format.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Todotxt'
+ aliases = ['todotxt']
+ # *.todotxt is not a standard extension for Todo.txt files; including it
+ # makes testing easier, and also makes autodetecting file type easier.
+ filenames = ['todo.txt', '*.todotxt']
+ mimetypes = ['text/x-todo']
+
+ # Aliases mapping standard token types of Todo.txt format concepts
+ CompleteTaskText = Operator # Chosen to de-emphasize complete tasks
+ IncompleteTaskText = Text # Incomplete tasks should look like plain text
+
+ # Priority should have most emphasis to indicate importance of tasks
+ Priority = Generic.Heading
+ # Dates should have next most emphasis because time is important
+ Date = Generic.Subheading
+
+ # Project and context should have equal weight, and be in different colors
+ Project = Generic.Error
+ Context = String
+
+ # If tag functionality is added, it should have the same weight as Project
+ # and Context, and a different color. Generic.Traceback would work well.
+
+ # Regex patterns for building up rules; dates, priorities, projects, and
+ # contexts are all atomic
+ # TODO: Make date regex more ISO 8601 compliant
+ date_regex = r'\d{4,}-\d{2}-\d{2}'
+ priority_regex = r'\([A-Z]\)'
+ project_regex = r'\+\S+'
+ context_regex = r'@\S+'
+
+ # Compound regex expressions
+ complete_one_date_regex = r'(x )(' + date_regex + r')'
+ complete_two_date_regex = (complete_one_date_regex + r'( )(' +
+ date_regex + r')')
+ priority_date_regex = r'(' + priority_regex + r')( )(' + date_regex + r')'
+
+ tokens = {
+ # Should parse starting at beginning of line; each line is a task
+ 'root': [
+ # Complete task entry points: two total:
+ # 1. Complete task with two dates
+ (complete_two_date_regex, bygroups(CompleteTaskText, Date,
+ CompleteTaskText, Date),
+ 'complete'),
+ # 2. Complete task with one date
+ (complete_one_date_regex, bygroups(CompleteTaskText, Date),
+ 'complete'),
+
+ # Incomplete task entry points: six total:
+ # 1. Priority plus date
+ (priority_date_regex, bygroups(Priority, IncompleteTaskText, Date),
+ 'incomplete'),
+ # 2. Priority only
+ (priority_regex, Priority, 'incomplete'),
+ # 3. Leading date
+ (date_regex, Date, 'incomplete'),
+ # 4. Leading context
+ (context_regex, Context, 'incomplete'),
+ # 5. Leading project
+ (project_regex, Project, 'incomplete'),
+ # 6. Non-whitespace catch-all
+ ('\S+', IncompleteTaskText, 'incomplete'),
+ ],
+
+ # Parse a complete task
+ 'complete': [
+ # Newline indicates end of task, should return to root
+ (r'\s*\n', CompleteTaskText, '#pop'),
+ # Tokenize contexts and projects
+ (context_regex, Context),
+ (project_regex, Project),
+ # Tokenize non-whitespace text
+ ('\S+', CompleteTaskText),
+ # Tokenize whitespace not containing a newline
+ ('\s+', CompleteTaskText),
+ ],
+
+ # Parse an incomplete task
+ 'incomplete': [
+ # Newline indicates end of task, should return to root
+ (r'\s*\n', IncompleteTaskText, '#pop'),
+ # Tokenize contexts and projects
+ (context_regex, Context),
+ (project_regex, Project),
+ # Tokenize non-whitespace text
+ ('\S+', IncompleteTaskText),
+ # Tokenize whitespace not containing a newline
+ ('\s+', IncompleteTaskText),
+ ],
+ }
diff --git a/pygments/lexers/theorem.py b/pygments/lexers/theorem.py
new file mode 100644
index 00000000..ba942dbf
--- /dev/null
+++ b/pygments/lexers/theorem.py
@@ -0,0 +1,466 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.theorem
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for theorem-proving languages.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, default, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Generic
+
+__all__ = ['CoqLexer', 'IsabelleLexer', 'LeanLexer']
+
+
+class CoqLexer(RegexLexer):
+ """
+ For the `Coq <http://coq.inria.fr/>`_ theorem prover.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'Coq'
+ aliases = ['coq']
+ filenames = ['*.v']
+ mimetypes = ['text/x-coq']
+
+ keywords1 = (
+ # Vernacular commands
+ 'Section', 'Module', 'End', 'Require', 'Import', 'Export', 'Variable',
+ 'Variables', 'Parameter', 'Parameters', 'Axiom', 'Hypothesis',
+ 'Hypotheses', 'Notation', 'Local', 'Tactic', 'Reserved', 'Scope',
+ 'Open', 'Close', 'Bind', 'Delimit', 'Definition', 'Let', 'Ltac',
+ 'Fixpoint', 'CoFixpoint', 'Morphism', 'Relation', 'Implicit',
+ 'Arguments', 'Set', 'Unset', 'Contextual', 'Strict', 'Prenex',
+ 'Implicits', 'Inductive', 'CoInductive', 'Record', 'Structure',
+ 'Canonical', 'Coercion', 'Theorem', 'Lemma', 'Corollary',
+ 'Proposition', 'Fact', 'Remark', 'Example', 'Proof', 'Goal', 'Save',
+ 'Qed', 'Defined', 'Hint', 'Resolve', 'Rewrite', 'View', 'Search',
+ 'Show', 'Print', 'Printing', 'All', 'Graph', 'Projections', 'inside',
+ 'outside', 'Check',
+ )
+ keywords2 = (
+ # Gallina
+ 'forall', 'exists', 'exists2', 'fun', 'fix', 'cofix', 'struct',
+ 'match', 'end', 'in', 'return', 'let', 'if', 'is', 'then', 'else',
+ 'for', 'of', 'nosimpl', 'with', 'as',
+ )
+ keywords3 = (
+ # Sorts
+ 'Type', 'Prop',
+ )
+ keywords4 = (
+ # Tactics
+ 'pose', 'set', 'move', 'case', 'elim', 'apply', 'clear', 'hnf', 'intro',
+ 'intros', 'generalize', 'rename', 'pattern', 'after', 'destruct',
+ 'induction', 'using', 'refine', 'inversion', 'injection', 'rewrite',
+ 'congr', 'unlock', 'compute', 'ring', 'field', 'replace', 'fold',
+ 'unfold', 'change', 'cutrewrite', 'simpl', 'have', 'suff', 'wlog',
+ 'suffices', 'without', 'loss', 'nat_norm', 'assert', 'cut', 'trivial',
+ 'revert', 'bool_congr', 'nat_congr', 'symmetry', 'transitivity', 'auto',
+ 'split', 'left', 'right', 'autorewrite', 'tauto',
+ )
+ keywords5 = (
+ # Terminators
+ 'by', 'done', 'exact', 'reflexivity', 'tauto', 'romega', 'omega',
+ 'assumption', 'solve', 'contradiction', 'discriminate',
+ )
+ keywords6 = (
+ # Control
+ 'do', 'last', 'first', 'try', 'idtac', 'repeat',
+ )
+ # 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
+ # 'downto', 'else', 'end', 'exception', 'external', 'false',
+ # 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
+ # 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
+ # 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
+ # 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
+ # 'type', 'val', 'virtual', 'when', 'while', 'with'
+ keyopts = (
+ '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-', r'-\.',
+ '->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<', '<-',
+ '<->', '=', '>', '>]', '>}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
+ r'\[\|', ']', '_', '`', '{', '{<', r'\|', r'\|]', '}', '~', '=>',
+ r'/\\', r'\\/',
+ u'Π', u'λ',
+ )
+ operators = r'[!$%&*+\./:<=>?@^|~-]'
+ word_operators = ('and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or')
+ prefix_syms = r'[!?~]'
+ infix_syms = r'[=<>@^|&+\*/$%-]'
+ primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list',
+ 'array')
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
+ (r'\(\*', Comment, 'comment'),
+ (words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
+ (words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type),
+ (words(keywords4, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keywords5, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
+ (words(keywords6, prefix=r'\b', suffix=r'\b'), Keyword.Reserved),
+ (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
+ (r'\b([A-Z][\w\']*)', Name.Class),
+ (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
+ (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
+ (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
+ (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
+
+ (r"[^\W\d][\w']*", Name),
+
+ (r'\d[\d_]*', Number.Integer),
+ (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
+ (r'0[oO][0-7][0-7_]*', Number.Oct),
+ (r'0[bB][01][01_]*', Number.Bin),
+ (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
+
+ (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
+ String.Char),
+ (r"'.'", String.Char),
+ (r"'", Keyword), # a stray quote is another syntax element
+
+ (r'"', String.Double, 'string'),
+
+ (r'[~?][a-z][\w\']*:', Name.Variable),
+ ],
+ 'comment': [
+ (r'[^(*)]+', Comment),
+ (r'\(\*', Comment, '#push'),
+ (r'\*\)', Comment, '#pop'),
+ (r'[(*)]', Comment),
+ ],
+ 'string': [
+ (r'[^"]+', String.Double),
+ (r'""', String.Double),
+ (r'"', String.Double, '#pop'),
+ ],
+ 'dotted': [
+ (r'\s+', Text),
+ (r'\.', Punctuation),
+ (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
+ (r'[A-Z][\w\']*', Name.Class, '#pop'),
+ (r'[a-z][a-z0-9_\']*', Name, '#pop'),
+ default('#pop')
+ ],
+ }
+
+ def analyse_text(text):
+ if text.startswith('(*'):
+ return True
+
+
+class IsabelleLexer(RegexLexer):
+ """
+ For the `Isabelle <http://isabelle.in.tum.de/>`_ proof assistant.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Isabelle'
+ aliases = ['isabelle']
+ filenames = ['*.thy']
+ mimetypes = ['text/x-isabelle']
+
+ keyword_minor = (
+ 'and', 'assumes', 'attach', 'avoids', 'binder', 'checking',
+ 'class_instance', 'class_relation', 'code_module', 'congs',
+ 'constant', 'constrains', 'datatypes', 'defines', 'file', 'fixes',
+ 'for', 'functions', 'hints', 'identifier', 'if', 'imports', 'in',
+ 'includes', 'infix', 'infixl', 'infixr', 'is', 'keywords', 'lazy',
+ 'module_name', 'monos', 'morphisms', 'no_discs_sels', 'notes',
+ 'obtains', 'open', 'output', 'overloaded', 'parametric', 'permissive',
+ 'pervasive', 'rep_compat', 'shows', 'structure', 'type_class',
+ 'type_constructor', 'unchecked', 'unsafe', 'where',
+ )
+
+ keyword_diag = (
+ 'ML_command', 'ML_val', 'class_deps', 'code_deps', 'code_thms',
+ 'display_drafts', 'find_consts', 'find_theorems', 'find_unused_assms',
+ 'full_prf', 'help', 'locale_deps', 'nitpick', 'pr', 'prf',
+ 'print_abbrevs', 'print_antiquotations', 'print_attributes',
+ 'print_binds', 'print_bnfs', 'print_bundles',
+ 'print_case_translations', 'print_cases', 'print_claset',
+ 'print_classes', 'print_codeproc', 'print_codesetup',
+ 'print_coercions', 'print_commands', 'print_context',
+ 'print_defn_rules', 'print_dependencies', 'print_facts',
+ 'print_induct_rules', 'print_inductives', 'print_interps',
+ 'print_locale', 'print_locales', 'print_methods', 'print_options',
+ 'print_orders', 'print_quot_maps', 'print_quotconsts',
+ 'print_quotients', 'print_quotientsQ3', 'print_quotmapsQ3',
+ 'print_rules', 'print_simpset', 'print_state', 'print_statement',
+ 'print_syntax', 'print_theorems', 'print_theory', 'print_trans_rules',
+ 'prop', 'pwd', 'quickcheck', 'refute', 'sledgehammer', 'smt_status',
+ 'solve_direct', 'spark_status', 'term', 'thm', 'thm_deps', 'thy_deps',
+ 'try', 'try0', 'typ', 'unused_thms', 'value', 'values', 'welcome',
+ 'print_ML_antiquotations', 'print_term_bindings', 'values_prolog',
+ )
+
+ keyword_thy = ('theory', 'begin', 'end')
+
+ keyword_section = ('header', 'chapter')
+
+ keyword_subsection = (
+ 'section', 'subsection', 'subsubsection', 'sect', 'subsect',
+ 'subsubsect',
+ )
+
+ keyword_theory_decl = (
+ 'ML', 'ML_file', 'abbreviation', 'adhoc_overloading', 'arities',
+ 'atom_decl', 'attribute_setup', 'axiomatization', 'bundle',
+ 'case_of_simps', 'class', 'classes', 'classrel', 'codatatype',
+ 'code_abort', 'code_class', 'code_const', 'code_datatype',
+ 'code_identifier', 'code_include', 'code_instance', 'code_modulename',
+ 'code_monad', 'code_printing', 'code_reflect', 'code_reserved',
+ 'code_type', 'coinductive', 'coinductive_set', 'consts', 'context',
+ 'datatype', 'datatype_new', 'datatype_new_compat', 'declaration',
+ 'declare', 'default_sort', 'defer_recdef', 'definition', 'defs',
+ 'domain', 'domain_isomorphism', 'domaindef', 'equivariance',
+ 'export_code', 'extract', 'extract_type', 'fixrec', 'fun',
+ 'fun_cases', 'hide_class', 'hide_const', 'hide_fact', 'hide_type',
+ 'import_const_map', 'import_file', 'import_tptp', 'import_type_map',
+ 'inductive', 'inductive_set', 'instantiation', 'judgment', 'lemmas',
+ 'lifting_forget', 'lifting_update', 'local_setup', 'locale',
+ 'method_setup', 'nitpick_params', 'no_adhoc_overloading',
+ 'no_notation', 'no_syntax', 'no_translations', 'no_type_notation',
+ 'nominal_datatype', 'nonterminal', 'notation', 'notepad', 'oracle',
+ 'overloading', 'parse_ast_translation', 'parse_translation',
+ 'partial_function', 'primcorec', 'primrec', 'primrec_new',
+ 'print_ast_translation', 'print_translation', 'quickcheck_generator',
+ 'quickcheck_params', 'realizability', 'realizers', 'recdef', 'record',
+ 'refute_params', 'setup', 'setup_lifting', 'simproc_setup',
+ 'simps_of_case', 'sledgehammer_params', 'spark_end', 'spark_open',
+ 'spark_open_siv', 'spark_open_vcg', 'spark_proof_functions',
+ 'spark_types', 'statespace', 'syntax', 'syntax_declaration', 'text',
+ 'text_raw', 'theorems', 'translations', 'type_notation',
+ 'type_synonym', 'typed_print_translation', 'typedecl', 'hoarestate',
+ 'install_C_file', 'install_C_types', 'wpc_setup', 'c_defs', 'c_types',
+ 'memsafe', 'SML_export', 'SML_file', 'SML_import', 'approximate',
+ 'bnf_axiomatization', 'cartouche', 'datatype_compat',
+ 'free_constructors', 'functor', 'nominal_function',
+ 'nominal_termination', 'permanent_interpretation',
+ 'binds', 'defining', 'smt2_status', 'term_cartouche',
+ 'boogie_file', 'datatype_compat', 'text_cartouche',
+ )
+
+ keyword_theory_script = ('inductive_cases', 'inductive_simps')
+
+ keyword_theory_goal = (
+ 'ax_specification', 'bnf', 'code_pred', 'corollary', 'cpodef',
+ 'crunch', 'crunch_ignore',
+ 'enriched_type', 'function', 'instance', 'interpretation', 'lemma',
+ 'lift_definition', 'nominal_inductive', 'nominal_inductive2',
+ 'nominal_primrec', 'pcpodef', 'primcorecursive',
+ 'quotient_definition', 'quotient_type', 'recdef_tc', 'rep_datatype',
+ 'schematic_corollary', 'schematic_lemma', 'schematic_theorem',
+ 'spark_vc', 'specification', 'subclass', 'sublocale', 'termination',
+ 'theorem', 'typedef', 'wrap_free_constructors',
+ )
+
+ keyword_qed = ('by', 'done', 'qed')
+ keyword_abandon_proof = ('sorry', 'oops')
+
+ keyword_proof_goal = ('have', 'hence', 'interpret')
+
+ keyword_proof_block = ('next', 'proof')
+
+ keyword_proof_chain = (
+ 'finally', 'from', 'then', 'ultimately', 'with',
+ )
+
+ keyword_proof_decl = (
+ 'ML_prf', 'also', 'include', 'including', 'let', 'moreover', 'note',
+ 'txt', 'txt_raw', 'unfolding', 'using', 'write',
+ )
+
+ keyword_proof_asm = ('assume', 'case', 'def', 'fix', 'presume')
+
+ keyword_proof_asm_goal = ('guess', 'obtain', 'show', 'thus')
+
+ keyword_proof_script = (
+ 'apply', 'apply_end', 'apply_trace', 'back', 'defer', 'prefer',
+ )
+
+ operators = (
+ '::', ':', '(', ')', '[', ']', '_', '=', ',', '|',
+ '+', '-', '!', '?',
+ )
+
+ proof_operators = ('{', '}', '.', '..')
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'\(\*', Comment, 'comment'),
+ (r'\{\*', Comment, 'text'),
+
+ (words(operators), Operator),
+ (words(proof_operators), Operator.Word),
+
+ (words(keyword_minor, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
+
+ (words(keyword_diag, prefix=r'\b', suffix=r'\b'), Keyword.Type),
+
+ (words(keyword_thy, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keyword_theory_decl, prefix=r'\b', suffix=r'\b'), Keyword),
+
+ (words(keyword_section, prefix=r'\b', suffix=r'\b'), Generic.Heading),
+ (words(keyword_subsection, prefix=r'\b', suffix=r'\b'), Generic.Subheading),
+
+ (words(keyword_theory_goal, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
+ (words(keyword_theory_script, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
+
+ (words(keyword_abandon_proof, prefix=r'\b', suffix=r'\b'), Generic.Error),
+
+ (words(keyword_qed, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keyword_proof_goal, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keyword_proof_block, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keyword_proof_decl, prefix=r'\b', suffix=r'\b'), Keyword),
+
+ (words(keyword_proof_chain, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keyword_proof_asm, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keyword_proof_asm_goal, prefix=r'\b', suffix=r'\b'), Keyword),
+
+ (words(keyword_proof_script, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
+
+ (r'\\<\w*>', Text.Symbol),
+
+ (r"[^\W\d][.\w']*", Name),
+ (r"\?[^\W\d][.\w']*", Name),
+ (r"'[^\W\d][.\w']*", Name.Type),
+
+ (r'\d[\d_]*', Name), # display numbers as name
+ (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
+ (r'0[oO][0-7][0-7_]*', Number.Oct),
+ (r'0[bB][01][01_]*', Number.Bin),
+
+ (r'"', String, 'string'),
+ (r'`', String.Other, 'fact'),
+ ],
+ 'comment': [
+ (r'[^(*)]+', Comment),
+ (r'\(\*', Comment, '#push'),
+ (r'\*\)', Comment, '#pop'),
+ (r'[(*)]', Comment),
+ ],
+ 'text': [
+ (r'[^\*\}]+', Comment),
+ (r'\*\}', Comment, '#pop'),
+ (r'\*', Comment),
+ (r'\}', Comment),
+ ],
+ 'string': [
+ (r'[^"\\]+', String),
+ (r'\\<\w*>', String.Symbol),
+ (r'\\"', String),
+ (r'\\', String),
+ (r'"', String, '#pop'),
+ ],
+ 'fact': [
+ (r'[^`\\]+', String.Other),
+ (r'\\<\w*>', String.Symbol),
+ (r'\\`', String.Other),
+ (r'\\', String.Other),
+ (r'`', String.Other, '#pop'),
+ ],
+ }
+
+
+class LeanLexer(RegexLexer):
+ """
+ For the `Lean <https://github.com/leanprover/lean>`_
+ theorem prover.
+
+ .. versionadded:: 2.0
+ """
+ name = 'Lean'
+ aliases = ['lean']
+ filenames = ['*.lean']
+ mimetypes = ['text/x-lean']
+
+ flags = re.MULTILINE | re.UNICODE
+
+ keywords1 = ('import', 'abbreviation', 'opaque_hint', 'tactic_hint', 'definition', 'renaming',
+ 'inline', 'hiding', 'exposing', 'parameter', 'parameters', 'conjecture',
+ 'hypothesis', 'lemma', 'corollary', 'variable', 'variables', 'print', 'theorem',
+ 'axiom', 'inductive', 'structure', 'universe', 'alias', 'help',
+ 'options', 'precedence', 'postfix', 'prefix', 'calc_trans', 'calc_subst', 'calc_refl',
+ 'infix', 'infixl', 'infixr', 'notation', 'eval', 'check', 'exit', 'coercion', 'end',
+ 'private', 'using', 'namespace', 'including', 'instance', 'section', 'context',
+ 'protected', 'expose', 'export', 'set_option', 'add_rewrite', 'extends')
+
+ keywords2 = (
+ 'forall', 'exists', 'fun', 'Pi', 'obtain', 'from', 'have', 'show', 'assume', 'take',
+ 'let', 'if', 'else', 'then', 'by', 'in', 'with', 'begin', 'proof', 'qed', 'calc'
+ )
+
+ keywords3 = (
+ # Sorts
+ 'Type', 'Prop',
+ )
+
+ keywords4 = (
+ # Tactics
+ 'apply', 'and_then', 'or_else', 'append', 'interleave', 'par', 'fixpoint', 'repeat',
+ 'at_most', 'discard', 'focus_at', 'rotate', 'try_for', 'now', 'assumption', 'eassumption',
+ 'state', 'intro', 'generalize', 'exact', 'unfold', 'beta', 'trace', 'focus', 'repeat1',
+ 'determ', 'destruct', 'try', 'auto', 'intros'
+ )
+
+ operators = (
+ '!=', '#', '&', '&&', '*', '+', '-', '/', '#', '@',
+ '-.', '->', '.', '..', '...', '::', ':>', ';', ';;', '<',
+ '<-', '=', '==', '>', '_', '`', '|', '||', '~', '=>', '<=', '>=',
+ '/\\', '\\/', u'∀', u'Π', u'λ', u'↔', u'∧', u'∨', u'≠', u'≤', u'≥',
+ u'¬', u'⁻¹', u'⬝', u'▸', u'→', u'∃', u'ℕ', u'ℤ', u'≈'
+ )
+
+ word_operators = ('and', 'or', 'not', 'iff', 'eq')
+
+ punctuation = ('(', ')', ':', '{', '}', '[', ']', u'⦃', u'⦄', ':=', ',')
+
+ primitives = ('unit', 'int', 'bool', 'string', 'char', 'list',
+ 'array', 'prod', 'sum', 'pair', 'real', 'nat', 'num', 'path')
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ (r'\b(false|true)\b|\(\)|\[\]', Name.Builtin.Pseudo),
+ (r'/-', Comment, 'comment'),
+ (r'--.*?$', Comment.Single),
+ (words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
+ (words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type),
+ (words(keywords4, prefix=r'\b', suffix=r'\b'), Keyword),
+ (words(operators), Name.Builtin.Pseudo),
+ (words(word_operators, prefix=r'\b', suffix=r'\b'), Name.Builtin.Pseudo),
+ (words(punctuation), Operator),
+ (words(primitives, prefix=r'\b', suffix=r'\b'), Keyword.Type),
+ (u"[A-Za-z_\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2100-\u214f]"
+ u"[A-Za-z_'\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2070-\u2079"
+ u"\u207f-\u2089\u2090-\u209c\u2100-\u214f]*", Name),
+ (r'\d+', Number.Integer),
+ (r'"', String.Double, 'string'),
+ (r'[~?][a-z][\w\']*:', Name.Variable)
+ ],
+ 'comment': [
+ # Multiline Comments
+ (r'[^/-]', Comment.Multiline),
+ (r'/-', Comment.Multiline, '#push'),
+ (r'-/', Comment.Multiline, '#pop'),
+ (r'[/-]', Comment.Multiline)
+ ],
+ 'string': [
+ (r'[^\\"]+', String.Double),
+ (r'\\[n"\\]', String.Escape),
+ ('"', String.Double, '#pop'),
+ ],
+ }
diff --git a/pygments/lexers/urbi.py b/pygments/lexers/urbi.py
new file mode 100644
index 00000000..b8ac1516
--- /dev/null
+++ b/pygments/lexers/urbi.py
@@ -0,0 +1,133 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.urbi
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for UrbiScript language.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import ExtendedRegexLexer, words
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation
+
+__all__ = ['UrbiscriptLexer']
+
+
+class UrbiscriptLexer(ExtendedRegexLexer):
+ """
+ For UrbiScript source code.
+
+ .. versionadded:: 1.5
+ """
+
+ name = 'UrbiScript'
+ aliases = ['urbiscript']
+ filenames = ['*.u']
+ mimetypes = ['application/x-urbiscript']
+
+ flags = re.DOTALL
+
+ # TODO
+ # - handle Experimental and deprecated tags with specific tokens
+ # - handle Angles and Durations with specific tokens
+
+ def blob_callback(lexer, match, ctx):
+ text_before_blob = match.group(1)
+ blob_start = match.group(2)
+ blob_size_str = match.group(3)
+ blob_size = int(blob_size_str)
+ yield match.start(), String, text_before_blob
+ ctx.pos += len(text_before_blob)
+
+ # if blob size doesn't match blob format (example : "\B(2)(aaa)")
+ # yield blob as a string
+ if ctx.text[match.end() + blob_size] != ")":
+ result = "\\B(" + blob_size_str + ")("
+ yield match.start(), String, result
+ ctx.pos += len(result)
+ return
+
+ # if blob is well formated, yield as Escape
+ blob_text = blob_start + ctx.text[match.end():match.end()+blob_size] + ")"
+ yield match.start(), String.Escape, blob_text
+ ctx.pos = match.end() + blob_size + 1 # +1 is the ending ")"
+
+ tokens = {
+ 'root': [
+ (r'\s+', Text),
+ # comments
+ (r'//.*?\n', Comment),
+ (r'/\*', Comment.Multiline, 'comment'),
+ (r'(every|for|loop|while)(?:;|&|\||,)', Keyword),
+ (words((
+ 'assert', 'at', 'break', 'case', 'catch', 'closure', 'compl',
+ 'continue', 'default', 'else', 'enum', 'every', 'external',
+ 'finally', 'for', 'freezeif', 'if', 'new', 'onleave', 'return',
+ 'stopif', 'switch', 'this', 'throw', 'timeout', 'try',
+ 'waituntil', 'whenever', 'while'), suffix=r'\b'),
+ Keyword),
+ (words((
+ 'asm', 'auto', 'bool', 'char', 'const_cast', 'delete', 'double',
+ 'dynamic_cast', 'explicit', 'export', 'extern', 'float', 'friend',
+ 'goto', 'inline', 'int', 'long', 'mutable', 'namespace', 'register',
+ 'reinterpret_cast', 'short', 'signed', 'sizeof', 'static_cast',
+ 'struct', 'template', 'typedef', 'typeid', 'typename', 'union',
+ 'unsigned', 'using', 'virtual', 'volatile', 'wchar_t'), suffix=r'\b'),
+ Keyword.Reserved),
+ # deprecated keywords, use a meaningfull token when available
+ (r'(emit|foreach|internal|loopn|static)\b', Keyword),
+ # ignored keywords, use a meaningfull token when available
+ (r'(private|protected|public)\b', Keyword),
+ (r'(var|do|const|function|class)\b', Keyword.Declaration),
+ (r'(true|false|nil|void)\b', Keyword.Constant),
+ (words((
+ 'Barrier', 'Binary', 'Boolean', 'CallMessage', 'Channel', 'Code',
+ 'Comparable', 'Container', 'Control', 'Date', 'Dictionary', 'Directory',
+ 'Duration', 'Enumeration', 'Event', 'Exception', 'Executable', 'File',
+ 'Finalizable', 'Float', 'FormatInfo', 'Formatter', 'Global', 'Group',
+ 'Hash', 'InputStream', 'IoService', 'Job', 'Kernel', 'Lazy', 'List',
+ 'Loadable', 'Lobby', 'Location', 'Logger', 'Math', 'Mutex', 'nil',
+ 'Object', 'Orderable', 'OutputStream', 'Pair', 'Path', 'Pattern',
+ 'Position', 'Primitive', 'Process', 'Profile', 'PseudoLazy', 'PubSub',
+ 'RangeIterable', 'Regexp', 'Semaphore', 'Server', 'Singleton', 'Socket',
+ 'StackFrame', 'Stream', 'String', 'System', 'Tag', 'Timeout',
+ 'Traceable', 'TrajectoryGenerator', 'Triplet', 'Tuple', 'UObject',
+ 'UValue', 'UVar'), suffix=r'\b'),
+ Name.Builtin),
+ (r'(?:this)\b', Name.Builtin.Pseudo),
+ # don't match single | and &
+ (r'(?:[-=+*%/<>~^:]+|\.&?|\|\||&&)', Operator),
+ (r'(?:and_eq|and|bitand|bitor|in|not|not_eq|or_eq|or|xor_eq|xor)\b',
+ Operator.Word),
+ (r'[{}\[\]()]+', Punctuation),
+ (r'(?:;|\||,|&|\?|!)+', Punctuation),
+ (r'[$a-zA-Z_]\w*', Name.Other),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ # Float, Integer, Angle and Duration
+ (r'(?:[0-9]+(?:(?:\.[0-9]+)?(?:[eE][+-]?[0-9]+)?)?'
+ r'((?:rad|deg|grad)|(?:ms|s|min|h|d))?)\b', Number.Float),
+ # handle binary blob in strings
+ (r'"', String.Double, "string.double"),
+ (r"'", String.Single, "string.single"),
+ ],
+ 'string.double': [
+ (r'((?:\\\\|\\"|[^"])*?)(\\B\((\d+)\)\()', blob_callback),
+ (r'(\\\\|\\"|[^"])*?"', String.Double, '#pop'),
+ ],
+ 'string.single': [
+ (r"((?:\\\\|\\'|[^'])*?)(\\B\((\d+)\)\()", blob_callback),
+ (r"(\\\\|\\'|[^'])*?'", String.Single, '#pop'),
+ ],
+ # from http://pygments.org/docs/lexerdevelopment/#changing-states
+ 'comment': [
+ (r'[^*/]', Comment.Multiline),
+ (r'/\*', Comment.Multiline, '#push'),
+ (r'\*/', Comment.Multiline, '#pop'),
+ (r'[*/]', Comment.Multiline),
+ ]
+ }
diff --git a/pygments/lexers/web.py b/pygments/lexers/web.py
index cd619596..8db0da5b 100644
--- a/pygments/lexers/web.py
+++ b/pygments/lexers/web.py
@@ -3,4508 +3,22 @@
pygments.lexers.web
~~~~~~~~~~~~~~~~~~~
- Lexers for web-related languages and markup.
+ Just export previously exported lexers.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import re
-import copy
-
-from pygments.lexer import RegexLexer, ExtendedRegexLexer, bygroups, using, \
- include, this, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
- Number, Other, Punctuation, Literal
-from pygments.util import get_bool_opt, get_list_opt, looks_like_xml, \
- html_doctype_matches, unirange, iteritems
-from pygments.lexers.agile import RubyLexer
-from pygments.lexers.compiled import ScalaLexer
-
-
-__all__ = ['HtmlLexer', 'XmlLexer', 'JavascriptLexer', 'JsonLexer', 'CssLexer',
- 'PhpLexer', 'ActionScriptLexer', 'XsltLexer', 'ActionScript3Lexer',
- 'MxmlLexer', 'HaxeLexer', 'HamlLexer', 'SassLexer', 'ScssLexer',
- 'ObjectiveJLexer', 'CoffeeScriptLexer', 'LiveScriptLexer',
- 'DuelLexer', 'ScamlLexer', 'JadeLexer', 'XQueryLexer',
- 'DtdLexer', 'DartLexer', 'LassoLexer', 'QmlLexer', 'TypeScriptLexer',
- 'KalLexer', 'CirruLexer', 'MaskLexer', 'ZephirLexer', 'SlimLexer']
-
-
-class JavascriptLexer(RegexLexer):
- """
- For JavaScript source code.
- """
-
- name = 'JavaScript'
- aliases = ['js', 'javascript']
- filenames = ['*.js', ]
- mimetypes = ['application/javascript', 'application/x-javascript',
- 'text/x-javascript', 'text/javascript', ]
-
- flags = re.DOTALL
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'<!--', Comment),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- default('#pop')
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root': [
- (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
- r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
- r'throw|try|catch|finally|new|delete|typeof|instanceof|void|yield|'
- r'this)\b', Keyword, 'slashstartsregex'),
- (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
- (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
- r'extends|final|float|goto|implements|import|int|interface|long|native|'
- r'package|private|protected|public|short|static|super|synchronized|throws|'
- r'transient|volatile)\b', Keyword.Reserved),
- (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
- (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
- r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
- r'window)\b', Name.Builtin),
- (r'[$a-zA-Z_]\w*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- ]
- }
-
-
-class JsonLexer(RegexLexer):
- """
- For JSON data structures.
-
- .. versionadded:: 1.5
- """
-
- name = 'JSON'
- aliases = ['json']
- filenames = ['*.json']
- mimetypes = [ 'application/json', ]
-
- # integer part of a number
- int_part = r'-?(0|[1-9]\d*)'
-
- # fractional part of a number
- frac_part = r'\.\d+'
-
- # exponential part of a number
- exp_part = r'[eE](\+|-)?\d+'
-
-
- flags = re.DOTALL
- tokens = {
- 'whitespace': [
- (r'\s+', Text),
- ],
-
- # represents a simple terminal value
- 'simplevalue': [
- (r'(true|false|null)\b', Keyword.Constant),
- (('%(int_part)s(%(frac_part)s%(exp_part)s|'
- '%(exp_part)s|%(frac_part)s)') % vars(),
- Number.Float),
- (int_part, Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- ],
-
-
- # the right hand side of an object, after the attribute name
- 'objectattribute': [
- include('value'),
- (r':', Punctuation),
- # comma terminates the attribute but expects more
- (r',', Punctuation, '#pop'),
- # a closing bracket terminates the entire object, so pop twice
- (r'}', Punctuation, ('#pop', '#pop')),
- ],
-
- # a json object - { attr, attr, ... }
- 'objectvalue': [
- include('whitespace'),
- (r'"(\\\\|\\"|[^"])*"', Name.Tag, 'objectattribute'),
- (r'}', Punctuation, '#pop'),
- ],
-
- # json array - [ value, value, ... }
- 'arrayvalue': [
- include('whitespace'),
- include('value'),
- (r',', Punctuation),
- (r']', Punctuation, '#pop'),
- ],
-
- # a json value - either a simple value or a complex value (object or array)
- 'value': [
- include('whitespace'),
- include('simplevalue'),
- (r'{', Punctuation, 'objectvalue'),
- (r'\[', Punctuation, 'arrayvalue'),
- ],
-
-
- # the root of a json document whould be a value
- 'root': [
- include('value'),
- ],
-
- }
-
+from pygments.lexers.html import HtmlLexer, DtdLexer, XmlLexer, XsltLexer, \
+ HamlLexer, ScamlLexer, JadeLexer
+from pygments.lexers.css import CssLexer, SassLexer, ScssLexer
+from pygments.lexers.javascript import JavascriptLexer, LiveScriptLexer, \
+ DartLexer, TypeScriptLexer, LassoLexer, ObjectiveJLexer, CoffeeScriptLexer
+from pygments.lexers.actionscript import ActionScriptLexer, \
+ ActionScript3Lexer, MxmlLexer
+from pygments.lexers.php import PhpLexer
+from pygments.lexers.webmisc import DuelLexer, XQueryLexer, SlimLexer, QmlLexer
+from pygments.lexers.data import JsonLexer
JSONLexer = JsonLexer # for backwards compatibility with Pygments 1.5
-
-class ActionScriptLexer(RegexLexer):
- """
- For ActionScript source code.
-
- .. versionadded:: 0.9
- """
-
- name = 'ActionScript'
- aliases = ['as', 'actionscript']
- filenames = ['*.as']
- mimetypes = ['application/x-actionscript', 'text/x-actionscript',
- 'text/actionscript']
-
- flags = re.DOTALL
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex),
- (r'[~\^\*!%&<>\|+=:;,/?\\-]+', Operator),
- (r'[{}\[\]();.]+', Punctuation),
- (r'(case|default|for|each|in|while|do|break|return|continue|if|else|'
- r'throw|try|catch|var|with|new|typeof|arguments|instanceof|this|'
- r'switch)\b', Keyword),
- (r'(class|public|final|internal|native|override|private|protected|'
- r'static|import|extends|implements|interface|intrinsic|return|super|'
- r'dynamic|function|const|get|namespace|package|set)\b',
- Keyword.Declaration),
- (r'(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b',
- Keyword.Constant),
- (r'(Accessibility|AccessibilityProperties|ActionScriptVersion|'
- r'ActivityEvent|AntiAliasType|ApplicationDomain|AsBroadcaster|Array|'
- r'AsyncErrorEvent|AVM1Movie|BevelFilter|Bitmap|BitmapData|'
- r'BitmapDataChannel|BitmapFilter|BitmapFilterQuality|BitmapFilterType|'
- r'BlendMode|BlurFilter|Boolean|ByteArray|Camera|Capabilities|CapsStyle|'
- r'Class|Color|ColorMatrixFilter|ColorTransform|ContextMenu|'
- r'ContextMenuBuiltInItems|ContextMenuEvent|ContextMenuItem|'
- r'ConvultionFilter|CSMSettings|DataEvent|Date|DefinitionError|'
- r'DeleteObjectSample|Dictionary|DisplacmentMapFilter|DisplayObject|'
- r'DisplacmentMapFilterMode|DisplayObjectContainer|DropShadowFilter|'
- r'Endian|EOFError|Error|ErrorEvent|EvalError|Event|EventDispatcher|'
- r'EventPhase|ExternalInterface|FileFilter|FileReference|'
- r'FileReferenceList|FocusDirection|FocusEvent|Font|FontStyle|FontType|'
- r'FrameLabel|FullScreenEvent|Function|GlowFilter|GradientBevelFilter|'
- r'GradientGlowFilter|GradientType|Graphics|GridFitType|HTTPStatusEvent|'
- r'IBitmapDrawable|ID3Info|IDataInput|IDataOutput|IDynamicPropertyOutput'
- r'IDynamicPropertyWriter|IEventDispatcher|IExternalizable|'
- r'IllegalOperationError|IME|IMEConversionMode|IMEEvent|int|'
- r'InteractiveObject|InterpolationMethod|InvalidSWFError|InvokeEvent|'
- r'IOError|IOErrorEvent|JointStyle|Key|Keyboard|KeyboardEvent|KeyLocation|'
- r'LineScaleMode|Loader|LoaderContext|LoaderInfo|LoadVars|LocalConnection|'
- r'Locale|Math|Matrix|MemoryError|Microphone|MorphShape|Mouse|MouseEvent|'
- r'MovieClip|MovieClipLoader|Namespace|NetConnection|NetStatusEvent|'
- r'NetStream|NewObjectSample|Number|Object|ObjectEncoding|PixelSnapping|'
- r'Point|PrintJob|PrintJobOptions|PrintJobOrientation|ProgressEvent|Proxy|'
- r'QName|RangeError|Rectangle|ReferenceError|RegExp|Responder|Sample|Scene|'
- r'ScriptTimeoutError|Security|SecurityDomain|SecurityError|'
- r'SecurityErrorEvent|SecurityPanel|Selection|Shape|SharedObject|'
- r'SharedObjectFlushStatus|SimpleButton|Socket|Sound|SoundChannel|'
- r'SoundLoaderContext|SoundMixer|SoundTransform|SpreadMethod|Sprite|'
- r'StackFrame|StackOverflowError|Stage|StageAlign|StageDisplayState|'
- r'StageQuality|StageScaleMode|StaticText|StatusEvent|String|StyleSheet|'
- r'SWFVersion|SyncEvent|SyntaxError|System|TextColorType|TextField|'
- r'TextFieldAutoSize|TextFieldType|TextFormat|TextFormatAlign|'
- r'TextLineMetrics|TextRenderer|TextSnapshot|Timer|TimerEvent|Transform|'
- r'TypeError|uint|URIError|URLLoader|URLLoaderDataFormat|URLRequest|'
- r'URLRequestHeader|URLRequestMethod|URLStream|URLVariabeles|VerifyError|'
- r'Video|XML|XMLDocument|XMLList|XMLNode|XMLNodeType|XMLSocket|XMLUI)\b',
- Name.Builtin),
- (r'(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|'
- r'isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|'
- r'isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|'
- r'unescape)\b',Name.Function),
- (r'[$a-zA-Z_]\w*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-f]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- ]
- }
-
-
-class ActionScript3Lexer(RegexLexer):
- """
- For ActionScript 3 source code.
-
- .. versionadded:: 0.11
- """
-
- name = 'ActionScript 3'
- aliases = ['as3', 'actionscript3']
- filenames = ['*.as']
- mimetypes = ['application/x-actionscript3', 'text/x-actionscript3',
- 'text/actionscript3']
-
- identifier = r'[$a-zA-Z_]\w*'
- typeidentifier = identifier + '(?:\.<\w+>)?'
-
- flags = re.DOTALL | re.MULTILINE
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'(function\s+)(' + identifier + r')(\s*)(\()',
- bygroups(Keyword.Declaration, Name.Function, Text, Operator),
- 'funcparams'),
- (r'(var|const)(\s+)(' + identifier + r')(\s*)(:)(\s*)(' +
- typeidentifier + r')',
- bygroups(Keyword.Declaration, Text, Name, Text, Punctuation, Text,
- Keyword.Type)),
- (r'(import|package)(\s+)((?:' + identifier + r'|\.)+)(\s*)',
- bygroups(Keyword, Text, Name.Namespace, Text)),
- (r'(new)(\s+)(' + typeidentifier + r')(\s*)(\()',
- bygroups(Keyword, Text, Keyword.Type, Text, Operator)),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'/(\\\\|\\/|[^\n])*/[gisx]*', String.Regex),
- (r'(\.)(' + identifier + r')', bygroups(Operator, Name.Attribute)),
- (r'(case|default|for|each|in|while|do|break|return|continue|if|else|'
- r'throw|try|catch|with|new|typeof|arguments|instanceof|this|'
- r'switch|import|include|as|is)\b',
- Keyword),
- (r'(class|public|final|internal|native|override|private|protected|'
- r'static|import|extends|implements|interface|intrinsic|return|super|'
- r'dynamic|function|const|get|namespace|package|set)\b',
- Keyword.Declaration),
- (r'(true|false|null|NaN|Infinity|-Infinity|undefined|void)\b',
- Keyword.Constant),
- (r'(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|'
- r'isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|'
- r'isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|'
- r'unescape)\b', Name.Function),
- (identifier, Name),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-f]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- (r'[~\^\*!%&<>\|+=:;,/?\\{}\[\]().-]+', Operator),
- ],
- 'funcparams': [
- (r'\s+', Text),
- (r'(\s*)(\.\.\.)?(' + identifier + r')(\s*)(:)(\s*)(' +
- typeidentifier + r'|\*)(\s*)',
- bygroups(Text, Punctuation, Name, Text, Operator, Text,
- Keyword.Type, Text), 'defval'),
- (r'\)', Operator, 'type')
- ],
- 'type': [
- (r'(\s*)(:)(\s*)(' + typeidentifier + r'|\*)',
- bygroups(Text, Operator, Text, Keyword.Type), '#pop:2'),
- (r'\s*', Text, '#pop:2')
- ],
- 'defval': [
- (r'(=)(\s*)([^(),]+)(\s*)(,?)',
- bygroups(Operator, Text, using(this), Text, Operator), '#pop'),
- (r',?', Operator, '#pop')
- ]
- }
-
- def analyse_text(text):
- if re.match(r'\w+\s*:\s*\w', text):
- return 0.3
- return 0
-
-
-class CssLexer(RegexLexer):
- """
- For CSS (Cascading Style Sheets).
- """
-
- name = 'CSS'
- aliases = ['css']
- filenames = ['*.css']
- mimetypes = ['text/css']
-
- tokens = {
- 'root': [
- include('basics'),
- ],
- 'basics': [
- (r'\s+', Text),
- (r'/\*(?:.|\n)*?\*/', Comment),
- (r'{', Punctuation, 'content'),
- (r'\:[\w-]+', Name.Decorator),
- (r'\.[\w-]+', Name.Class),
- (r'\#[\w-]+', Name.Function),
- (r'@[\w-]+', Keyword, 'atrule'),
- (r'[\w-]+', Name.Tag),
- (r'[~\^\*!%&$\[\]\(\)<>\|+=@:;,./?-]', Operator),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single)
- ],
- 'atrule': [
- (r'{', Punctuation, 'atcontent'),
- (r';', Punctuation, '#pop'),
- include('basics'),
- ],
- 'atcontent': [
- include('basics'),
- (r'}', Punctuation, '#pop:2'),
- ],
- 'content': [
- (r'\s+', Text),
- (r'}', Punctuation, '#pop'),
- (r'url\(.*?\)', String.Other),
- (r'^@.*?$', Comment.Preproc),
- (r'(azimuth|background-attachment|background-color|'
- r'background-image|background-position|background-repeat|'
- r'background|border-bottom-color|border-bottom-style|'
- r'border-bottom-width|border-left-color|border-left-style|'
- r'border-left-width|border-right|border-right-color|'
- r'border-right-style|border-right-width|border-top-color|'
- r'border-top-style|border-top-width|border-bottom|'
- r'border-collapse|border-left|border-width|border-color|'
- r'border-spacing|border-style|border-top|border|caption-side|'
- r'clear|clip|color|content|counter-increment|counter-reset|'
- r'cue-after|cue-before|cue|cursor|direction|display|'
- r'elevation|empty-cells|float|font-family|font-size|'
- r'font-size-adjust|font-stretch|font-style|font-variant|'
- r'font-weight|font|height|letter-spacing|line-height|'
- r'list-style-type|list-style-image|list-style-position|'
- r'list-style|margin-bottom|margin-left|margin-right|'
- r'margin-top|margin|marker-offset|marks|max-height|max-width|'
- r'min-height|min-width|opacity|orphans|outline-color|'
- r'outline-style|outline-width|outline|overflow(?:-x|-y)?|padding-bottom|'
- r'padding-left|padding-right|padding-top|padding|page|'
- r'page-break-after|page-break-before|page-break-inside|'
- r'pause-after|pause-before|pause|pitch-range|pitch|'
- r'play-during|position|quotes|richness|right|size|'
- r'speak-header|speak-numeral|speak-punctuation|speak|'
- r'speech-rate|stress|table-layout|text-align|text-decoration|'
- r'text-indent|text-shadow|text-transform|top|unicode-bidi|'
- r'vertical-align|visibility|voice-family|volume|white-space|'
- r'widows|width|word-spacing|z-index|bottom|'
- r'above|absolute|always|armenian|aural|auto|avoid|baseline|'
- r'behind|below|bidi-override|blink|block|bolder|bold|both|'
- r'capitalize|center-left|center-right|center|circle|'
- r'cjk-ideographic|close-quote|collapse|condensed|continuous|'
- r'crop|crosshair|cross|cursive|dashed|decimal-leading-zero|'
- r'decimal|default|digits|disc|dotted|double|e-resize|embed|'
- r'extra-condensed|extra-expanded|expanded|fantasy|far-left|'
- r'far-right|faster|fast|fixed|georgian|groove|hebrew|help|'
- r'hidden|hide|higher|high|hiragana-iroha|hiragana|icon|'
- r'inherit|inline-table|inline|inset|inside|invert|italic|'
- r'justify|katakana-iroha|katakana|landscape|larger|large|'
- r'left-side|leftwards|left|level|lighter|line-through|list-item|'
- r'loud|lower-alpha|lower-greek|lower-roman|lowercase|ltr|'
- r'lower|low|medium|message-box|middle|mix|monospace|'
- r'n-resize|narrower|ne-resize|no-close-quote|no-open-quote|'
- r'no-repeat|none|normal|nowrap|nw-resize|oblique|once|'
- r'open-quote|outset|outside|overline|pointer|portrait|px|'
- r'relative|repeat-x|repeat-y|repeat|rgb|ridge|right-side|'
- r'rightwards|s-resize|sans-serif|scroll|se-resize|'
- r'semi-condensed|semi-expanded|separate|serif|show|silent|'
- r'slower|slow|small-caps|small-caption|smaller|soft|solid|'
- r'spell-out|square|static|status-bar|super|sw-resize|'
- r'table-caption|table-cell|table-column|table-column-group|'
- r'table-footer-group|table-header-group|table-row|'
- r'table-row-group|text-bottom|text-top|text|thick|thin|'
- r'transparent|ultra-condensed|ultra-expanded|underline|'
- r'upper-alpha|upper-latin|upper-roman|uppercase|url|'
- r'visible|w-resize|wait|wider|x-fast|x-high|x-large|x-loud|'
- r'x-low|x-small|x-soft|xx-large|xx-small|yes)\b', Keyword),
- (r'(indigo|gold|firebrick|indianred|yellow|darkolivegreen|'
- r'darkseagreen|mediumvioletred|mediumorchid|chartreuse|'
- r'mediumslateblue|black|springgreen|crimson|lightsalmon|brown|'
- r'turquoise|olivedrab|cyan|silver|skyblue|gray|darkturquoise|'
- r'goldenrod|darkgreen|darkviolet|darkgray|lightpink|teal|'
- r'darkmagenta|lightgoldenrodyellow|lavender|yellowgreen|thistle|'
- r'violet|navy|orchid|blue|ghostwhite|honeydew|cornflowerblue|'
- r'darkblue|darkkhaki|mediumpurple|cornsilk|red|bisque|slategray|'
- r'darkcyan|khaki|wheat|deepskyblue|darkred|steelblue|aliceblue|'
- r'gainsboro|mediumturquoise|floralwhite|coral|purple|lightgrey|'
- r'lightcyan|darksalmon|beige|azure|lightsteelblue|oldlace|'
- r'greenyellow|royalblue|lightseagreen|mistyrose|sienna|'
- r'lightcoral|orangered|navajowhite|lime|palegreen|burlywood|'
- r'seashell|mediumspringgreen|fuchsia|papayawhip|blanchedalmond|'
- r'peru|aquamarine|white|darkslategray|ivory|dodgerblue|'
- r'lemonchiffon|chocolate|orange|forestgreen|slateblue|olive|'
- r'mintcream|antiquewhite|darkorange|cadetblue|moccasin|'
- r'limegreen|saddlebrown|darkslateblue|lightskyblue|deeppink|'
- r'plum|aqua|darkgoldenrod|maroon|sandybrown|magenta|tan|'
- r'rosybrown|pink|lightblue|palevioletred|mediumseagreen|'
- r'dimgray|powderblue|seagreen|snow|mediumblue|midnightblue|'
- r'paleturquoise|palegoldenrod|whitesmoke|darkorchid|salmon|'
- r'lightslategray|lawngreen|lightgreen|tomato|hotpink|'
- r'lightyellow|lavenderblush|linen|mediumaquamarine|green|'
- r'blueviolet|peachpuff)\b', Name.Builtin),
- (r'\!important', Comment.Preproc),
- (r'/\*(?:.|\n)*?\*/', Comment),
- (r'\#[a-zA-Z0-9]{1,6}', Number),
- (r'[\.-]?[0-9]*[\.]?[0-9]+(em|px|pt|pc|in|mm|cm|ex|s)\b', Number),
- # Separate regex for percentages, as can't do word boundaries with %
- (r'[\.-]?[0-9]*[\.]?[0-9]+%', Number),
- (r'-?[0-9]+', Number),
- (r'[~\^\*!%&<>\|+=@:,./?-]+', Operator),
- (r'[\[\]();]+', Punctuation),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- (r'[a-zA-Z_]\w*', Name)
- ]
- }
-
-
-class ObjectiveJLexer(RegexLexer):
- """
- For Objective-J source code with preprocessor directives.
-
- .. versionadded:: 1.3
- """
-
- name = 'Objective-J'
- aliases = ['objective-j', 'objectivej', 'obj-j', 'objj']
- filenames = ['*.j']
- mimetypes = ['text/x-objective-j']
-
- #: optional Comment or Whitespace
- _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)*'
-
- flags = re.DOTALL | re.MULTILINE
-
- tokens = {
- 'root': [
- include('whitespace'),
-
- # function definition
- (r'^(' + _ws + r'[\+-]' + _ws + r')([\(a-zA-Z_].*?[^\(])(' + _ws + '{)',
- bygroups(using(this), using(this, state='function_signature'),
- using(this))),
-
- # class definition
- (r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text),
- 'classname'),
- (r'(@class|@protocol)(\s*)', bygroups(Keyword, Text),
- 'forward_classname'),
- (r'(\s*)(@end)(\s*)', bygroups(Text, Keyword, Text)),
-
- include('statements'),
- ('[{\(\)}]', Punctuation),
- (';', Punctuation),
- ],
- 'whitespace': [
- (r'(@import)(\s+)("(?:\\\\|\\"|[^"])*")',
- bygroups(Comment.Preproc, Text, String.Double)),
- (r'(@import)(\s+)(<(?:\\\\|\\>|[^>])*>)',
- bygroups(Comment.Preproc, Text, String.Double)),
- (r'(#(?:include|import))(\s+)("(?:\\\\|\\"|[^"])*")',
- bygroups(Comment.Preproc, Text, String.Double)),
- (r'(#(?:include|import))(\s+)(<(?:\\\\|\\>|[^>])*>)',
- bygroups(Comment.Preproc, Text, String.Double)),
-
- (r'#if\s+0', Comment.Preproc, 'if0'),
- (r'#', Comment.Preproc, 'macro'),
-
- (r'\n', Text),
- (r'\s+', Text),
- (r'\\\n', Text), # line continuation
- (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single),
- (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
- (r'<!--', Comment),
- ],
- 'slashstartsregex': [
- include('whitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- default('#pop'),
- ],
- 'badregex': [
- (r'\n', Text, '#pop'),
- ],
- 'statements': [
- (r'(L|@)?"', String, 'string'),
- (r"(L|@)?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
- String.Char),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
- (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
- (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex),
- (r'0[0-7]+[Ll]?', Number.Oct),
- (r'\d+[Ll]?', Number.Integer),
-
- (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
-
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
- r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?',
- Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
-
- (r'(for|in|while|do|break|return|continue|switch|case|default|if|'
- r'else|throw|try|catch|finally|new|delete|typeof|instanceof|void|'
- r'prototype|__proto__)\b', Keyword, 'slashstartsregex'),
-
- (r'(var|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
-
- (r'(@selector|@private|@protected|@public|@encode|'
- r'@synchronized|@try|@throw|@catch|@finally|@end|@property|'
- r'@synthesize|@dynamic|@for|@accessors|new)\b', Keyword),
-
- (r'(int|long|float|short|double|char|unsigned|signed|void|'
- r'id|BOOL|bool|boolean|IBOutlet|IBAction|SEL|@outlet|@action)\b',
- Keyword.Type),
-
- (r'(self|super)\b', Name.Builtin),
-
- (r'(TRUE|YES|FALSE|NO|Nil|nil|NULL)\b', Keyword.Constant),
- (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
- (r'(ABS|ASIN|ACOS|ATAN|ATAN2|SIN|COS|TAN|EXP|POW|CEIL|FLOOR|ROUND|'
- r'MIN|MAX|RAND|SQRT|E|LN2|LN10|LOG2E|LOG10E|PI|PI2|PI_2|SQRT1_2|'
- r'SQRT2)\b', Keyword.Constant),
-
- (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
- r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
- r'window)\b', Name.Builtin),
-
- (r'([$a-zA-Z_]\w*)(' + _ws + r')(?=\()',
- bygroups(Name.Function, using(this))),
-
- (r'[$a-zA-Z_]\w*', Name),
- ],
- 'classname' : [
- # interface definition that inherits
- (r'([a-zA-Z_]\w*)(' + _ws + r':' + _ws +
- r')([a-zA-Z_]\w*)?',
- bygroups(Name.Class, using(this), Name.Class), '#pop'),
- # interface definition for a category
- (r'([a-zA-Z_]\w*)(' + _ws + r'\()([a-zA-Z_]\w*)(\))',
- bygroups(Name.Class, using(this), Name.Label, Text), '#pop'),
- # simple interface / implementation
- (r'([a-zA-Z_]\w*)', Name.Class, '#pop'),
- ],
- 'forward_classname' : [
- (r'([a-zA-Z_]\w*)(\s*,\s*)',
- bygroups(Name.Class, Text), '#push'),
- (r'([a-zA-Z_]\w*)(\s*;?)',
- bygroups(Name.Class, Text), '#pop'),
- ],
- 'function_signature': [
- include('whitespace'),
-
- # start of a selector w/ parameters
- (r'(\(' + _ws + r')' # open paren
- r'([a-zA-Z_]\w+)' # return type
- r'(' + _ws + r'\)' + _ws + r')' # close paren
- r'([$a-zA-Z_]\w+' + _ws + r':)', # function name
- bygroups(using(this), Keyword.Type, using(this),
- Name.Function), 'function_parameters'),
-
- # no-param function
- (r'(\(' + _ws + r')' # open paren
- r'([a-zA-Z_]\w+)' # return type
- r'(' + _ws + r'\)' + _ws + r')' # close paren
- r'([$a-zA-Z_]\w+)', # function name
- bygroups(using(this), Keyword.Type, using(this),
- Name.Function), "#pop"),
-
- # no return type given, start of a selector w/ parameters
- (r'([$a-zA-Z_]\w+' + _ws + r':)', # function name
- bygroups (Name.Function), 'function_parameters'),
-
- # no return type given, no-param function
- (r'([$a-zA-Z_]\w+)', # function name
- bygroups(Name.Function), "#pop"),
-
- ('', Text, '#pop'),
- ],
- 'function_parameters': [
- include('whitespace'),
-
- # parameters
- (r'(\(' + _ws + ')' # open paren
- r'([^\)]+)' # type
- r'(' + _ws + r'\)' + _ws + r')' # close paren
- r'([$a-zA-Z_]\w+)', # param name
- bygroups(using(this), Keyword.Type, using(this), Text)),
-
- # one piece of a selector name
- (r'([$a-zA-Z_]\w+' + _ws + r':)', # function name
- Name.Function),
-
- # smallest possible selector piece
- (r'(:)', Name.Function),
-
- # var args
- (r'(,' + _ws + r'\.\.\.)', using(this)),
-
- # param name
- (r'([$a-zA-Z_]\w+)', Text),
- ],
- 'expression' : [
- (r'([$a-zA-Z_]\w*)(\()', bygroups(Name.Function,
- Punctuation)),
- (r'(\))', Punctuation, "#pop"),
- ],
- 'string': [
- (r'"', String, '#pop'),
- (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
- (r'[^\\"\n]+', String), # all other characters
- (r'\\\n', String), # line continuation
- (r'\\', String), # stray backslash
- ],
- 'macro': [
- (r'[^/\n]+', Comment.Preproc),
- (r'/[*](.|\n)*?[*]/', Comment.Multiline),
- (r'//.*?\n', Comment.Single, '#pop'),
- (r'/', Comment.Preproc),
- (r'(?<=\\)\n', Comment.Preproc),
- (r'\n', Comment.Preproc, '#pop'),
- ],
- 'if0': [
- (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'),
- (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'),
- (r'.*?\n', Comment),
- ]
- }
-
- def analyse_text(text):
- if re.search('^\s*@import\s+[<"]', text, re.MULTILINE):
- # special directive found in most Objective-J files
- return True
- return False
-
-
-class HtmlLexer(RegexLexer):
- """
- For HTML 4 and XHTML 1 markup. Nested JavaScript and CSS is highlighted
- by the appropriate lexer.
- """
-
- name = 'HTML'
- aliases = ['html']
- filenames = ['*.html', '*.htm', '*.xhtml', '*.xslt']
- mimetypes = ['text/html', 'application/xhtml+xml']
-
- flags = re.IGNORECASE | re.DOTALL
- tokens = {
- 'root': [
- ('[^<&]+', Text),
- (r'&\S*?;', Name.Entity),
- (r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc),
- ('<!--', Comment, 'comment'),
- (r'<\?.*?\?>', Comment.Preproc),
- ('<![^>]*>', Comment.Preproc),
- (r'<\s*script\s*', Name.Tag, ('script-content', 'tag')),
- (r'<\s*style\s*', Name.Tag, ('style-content', 'tag')),
- # note: this allows tag names not used in HTML like <x:with-dash>,
- # this is to support yet-unknown template engines and the like
- (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
- (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
- ],
- 'comment': [
- ('[^-]+', Comment),
- ('-->', Comment, '#pop'),
- ('-', Comment),
- ],
- 'tag': [
- (r'\s+', Text),
- (r'[\w:-]+\s*=', Name.Attribute, 'attr'),
- (r'[\w:-]+', Name.Attribute),
- (r'/?\s*>', Name.Tag, '#pop'),
- ],
- 'script-content': [
- (r'<\s*/\s*script\s*>', Name.Tag, '#pop'),
- (r'.+?(?=<\s*/\s*script\s*>)', using(JavascriptLexer)),
- ],
- 'style-content': [
- (r'<\s*/\s*style\s*>', Name.Tag, '#pop'),
- (r'.+?(?=<\s*/\s*style\s*>)', using(CssLexer)),
- ],
- 'attr': [
- ('".*?"', String, '#pop'),
- ("'.*?'", String, '#pop'),
- (r'[^\s>]+', String, '#pop'),
- ],
- }
-
- def analyse_text(text):
- if html_doctype_matches(text):
- return 0.5
-
-
-class PhpLexer(RegexLexer):
- """
- For `PHP <http://www.php.net/>`_ source code.
- For PHP embedded in HTML, use the `HtmlPhpLexer`.
-
- Additional options accepted:
-
- `startinline`
- If given and ``True`` the lexer starts highlighting with
- php code (i.e.: no starting ``<?php`` required). The default
- is ``False``.
- `funcnamehighlighting`
- If given and ``True``, highlight builtin function names
- (default: ``True``).
- `disabledmodules`
- If given, must be a list of module names whose function names
- should not be highlighted. By default all modules are highlighted
- except the special ``'unknown'`` module that includes functions
- that are known to php but are undocumented.
-
- To get a list of allowed modules have a look into the
- `_phpbuiltins` module:
-
- .. sourcecode:: pycon
-
- >>> from pygments.lexers._phpbuiltins import MODULES
- >>> MODULES.keys()
- ['PHP Options/Info', 'Zip', 'dba', ...]
-
- In fact the names of those modules match the module names from
- the php documentation.
- """
-
- name = 'PHP'
- aliases = ['php', 'php3', 'php4', 'php5']
- filenames = ['*.php', '*.php[345]', '*.inc']
- mimetypes = ['text/x-php']
-
- # Note that a backslash is included in the following two patterns
- # PHP uses a backslash as a namespace separator
- _ident_char = r'[\\\w]|[^\x00-\x7f]'
- _ident_begin = r'(?:[\\_a-z]|[^\x00-\x7f])'
- _ident_end = r'(?:' + _ident_char + ')*'
- _ident_inner = _ident_begin + _ident_end
-
- flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
- tokens = {
- 'root': [
- (r'<\?(php)?', Comment.Preproc, 'php'),
- (r'[^<]+', Other),
- (r'<', Other)
- ],
- 'php': [
- (r'\?>', Comment.Preproc, '#pop'),
- (r'<<<(\'?)(' + _ident_inner + ')\1\n.*?\n\2\;?\n', String),
- (r'\s+', Text),
- (r'#.*?\n', Comment.Single),
- (r'//.*?\n', Comment.Single),
- # put the empty comment here, it is otherwise seen as
- # the start of a docstring
- (r'/\*\*/', Comment.Multiline),
- (r'/\*\*.*?\*/', String.Doc),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'(->|::)(\s*)(' + _ident_inner + ')',
- bygroups(Operator, Text, Name.Attribute)),
- (r'[~!%^&*+=|:.<>/?@-]+', Operator),
- (r'[\[\]{}();,]+', Punctuation),
- (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
- (r'(function)(\s*)(?=\()', bygroups(Keyword, Text)),
- (r'(function)(\s+)(&?)(\s*)',
- bygroups(Keyword, Text, Operator, Text), 'functionname'),
- (r'(const)(\s+)(' + _ident_inner + ')',
- bygroups(Keyword, Text, Name.Constant)),
- (r'(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|'
- r'eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|'
- r'FALSE|print|for|require|continue|foreach|require_once|'
- r'declare|return|default|static|do|switch|die|stdClass|'
- r'echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|'
- r'virtual|endfor|include_once|while|endforeach|global|__FILE__|'
- r'endif|list|__LINE__|endswitch|new|__sleep|endwhile|not|'
- r'array|__wakeup|E_ALL|NULL|final|php_user_filter|interface|'
- r'implements|public|private|protected|abstract|clone|try|'
- r'catch|throw|this|use|namespace|trait|yield|'
- r'finally)\b', Keyword),
- (r'(true|false|null)\b', Keyword.Constant),
- (r'\$\{\$+' + _ident_inner + '\}', Name.Variable),
- (r'\$+' + _ident_inner, Name.Variable),
- (_ident_inner, Name.Other),
- (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
- (r'\d+[eE][+-]?[0-9]+', Number.Float),
- (r'0[0-7]+', Number.Oct),
- (r'0[xX][a-f0-9]+', Number.Hex),
- (r'\d+', Number.Integer),
- (r'0b[01]+', Number.Bin),
- (r"'([^'\\]*(?:\\.[^'\\]*)*)'", String.Single),
- (r'`([^`\\]*(?:\\.[^`\\]*)*)`', String.Backtick),
- (r'"', String.Double, 'string'),
- ],
- 'classname': [
- (_ident_inner, Name.Class, '#pop')
- ],
- 'functionname': [
- (_ident_inner, Name.Function, '#pop')
- ],
- 'string': [
- (r'"', String.Double, '#pop'),
- (r'[^{$"\\]+', String.Double),
- (r'\\([nrt\"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})', String.Escape),
- (r'\$' + _ident_inner + '(\[\S+?\]|->' + _ident_inner + ')?',
- String.Interpol),
- (r'(\{\$\{)(.*?)(\}\})',
- bygroups(String.Interpol, using(this, _startinline=True),
- String.Interpol)),
- (r'(\{)(\$.*?)(\})',
- bygroups(String.Interpol, using(this, _startinline=True),
- String.Interpol)),
- (r'(\$\{)(\S+)(\})',
- bygroups(String.Interpol, Name.Variable, String.Interpol)),
- (r'[${\\]+', String.Double)
- ],
- }
-
- def __init__(self, **options):
- self.funcnamehighlighting = get_bool_opt(
- options, 'funcnamehighlighting', True)
- self.disabledmodules = get_list_opt(
- options, 'disabledmodules', ['unknown'])
- self.startinline = get_bool_opt(options, 'startinline', False)
-
- # private option argument for the lexer itself
- if '_startinline' in options:
- self.startinline = options.pop('_startinline')
-
- # collect activated functions in a set
- self._functions = set()
- if self.funcnamehighlighting:
- from pygments.lexers._phpbuiltins import MODULES
- for key, value in iteritems(MODULES):
- if key not in self.disabledmodules:
- self._functions.update(value)
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- stack = ['root']
- if self.startinline:
- stack.append('php')
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text, stack):
- if token is Name.Other:
- if value in self._functions:
- yield index, Name.Builtin, value
- continue
- yield index, token, value
-
- def analyse_text(text):
- rv = 0.0
- if re.search(r'<\?(?!xml)', text):
- rv += 0.3
- if '?>' in text:
- rv += 0.1
- return rv
-
-
-class DtdLexer(RegexLexer):
- """
- A lexer for DTDs (Document Type Definitions).
-
- .. versionadded:: 1.5
- """
-
- flags = re.MULTILINE | re.DOTALL
-
- name = 'DTD'
- aliases = ['dtd']
- filenames = ['*.dtd']
- mimetypes = ['application/xml-dtd']
-
- tokens = {
- 'root': [
- include('common'),
-
- (r'(<!ELEMENT)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Tag), 'element'),
- (r'(<!ATTLIST)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Tag), 'attlist'),
- (r'(<!ENTITY)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Entity), 'entity'),
- (r'(<!NOTATION)(\s+)(\S+)',
- bygroups(Keyword, Text, Name.Tag), 'notation'),
- (r'(<!\[)([^\[\s]+)(\s*)(\[)', # conditional sections
- bygroups(Keyword, Name.Entity, Text, Keyword)),
-
- (r'(<!DOCTYPE)(\s+)([^>\s]+)',
- bygroups(Keyword, Text, Name.Tag)),
- (r'PUBLIC|SYSTEM', Keyword.Constant),
- (r'[\[\]>]', Keyword),
- ],
-
- 'common': [
- (r'\s+', Text),
- (r'(%|&)[^;]*;', Name.Entity),
- ('<!--', Comment, 'comment'),
- (r'[(|)*,?+]', Operator),
- (r'"[^"]*"', String.Double),
- (r'\'[^\']*\'', String.Single),
- ],
-
- 'comment': [
- ('[^-]+', Comment),
- ('-->', Comment, '#pop'),
- ('-', Comment),
- ],
-
- 'element': [
- include('common'),
- (r'EMPTY|ANY|#PCDATA', Keyword.Constant),
- (r'[^>\s\|()?+*,]+', Name.Tag),
- (r'>', Keyword, '#pop'),
- ],
-
- 'attlist': [
- include('common'),
- (r'CDATA|IDREFS|IDREF|ID|NMTOKENS|NMTOKEN|ENTITIES|ENTITY|NOTATION',
- Keyword.Constant),
- (r'#REQUIRED|#IMPLIED|#FIXED', Keyword.Constant),
- (r'xml:space|xml:lang', Keyword.Reserved),
- (r'[^>\s\|()?+*,]+', Name.Attribute),
- (r'>', Keyword, '#pop'),
- ],
-
- 'entity': [
- include('common'),
- (r'SYSTEM|PUBLIC|NDATA', Keyword.Constant),
- (r'[^>\s\|()?+*,]+', Name.Entity),
- (r'>', Keyword, '#pop'),
- ],
-
- 'notation': [
- include('common'),
- (r'SYSTEM|PUBLIC', Keyword.Constant),
- (r'[^>\s\|()?+*,]+', Name.Attribute),
- (r'>', Keyword, '#pop'),
- ],
- }
-
- def analyse_text(text):
- if not looks_like_xml(text) and \
- ('<!ELEMENT' in text or '<!ATTLIST' in text or '<!ENTITY' in text):
- return 0.8
-
-class XmlLexer(RegexLexer):
- """
- Generic lexer for XML (eXtensible Markup Language).
- """
-
- flags = re.MULTILINE | re.DOTALL | re.UNICODE
-
- name = 'XML'
- aliases = ['xml']
- filenames = ['*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd',
- '*.wsdl', '*.wsf']
- mimetypes = ['text/xml', 'application/xml', 'image/svg+xml',
- 'application/rss+xml', 'application/atom+xml']
-
- tokens = {
- 'root': [
- ('[^<&]+', Text),
- (r'&\S*?;', Name.Entity),
- (r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc),
- ('<!--', Comment, 'comment'),
- (r'<\?.*?\?>', Comment.Preproc),
- ('<![^>]*>', Comment.Preproc),
- (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
- (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
- ],
- 'comment': [
- ('[^-]+', Comment),
- ('-->', Comment, '#pop'),
- ('-', Comment),
- ],
- 'tag': [
- (r'\s+', Text),
- (r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
- (r'/?\s*>', Name.Tag, '#pop'),
- ],
- 'attr': [
- ('\s+', Text),
- ('".*?"', String, '#pop'),
- ("'.*?'", String, '#pop'),
- (r'[^\s>]+', String, '#pop'),
- ],
- }
-
- def analyse_text(text):
- if looks_like_xml(text):
- return 0.5
-
-
-class XsltLexer(XmlLexer):
- '''
- A lexer for XSLT.
-
- .. versionadded:: 0.10
- '''
-
- name = 'XSLT'
- aliases = ['xslt']
- filenames = ['*.xsl', '*.xslt', '*.xpl'] # xpl is XProc
- mimetypes = ['application/xsl+xml', 'application/xslt+xml']
-
- EXTRA_KEYWORDS = set([
- 'apply-imports', 'apply-templates', 'attribute',
- 'attribute-set', 'call-template', 'choose', 'comment',
- 'copy', 'copy-of', 'decimal-format', 'element', 'fallback',
- 'for-each', 'if', 'import', 'include', 'key', 'message',
- 'namespace-alias', 'number', 'otherwise', 'output', 'param',
- 'preserve-space', 'processing-instruction', 'sort',
- 'strip-space', 'stylesheet', 'template', 'text', 'transform',
- 'value-of', 'variable', 'when', 'with-param'
- ])
-
- def get_tokens_unprocessed(self, text):
- for index, token, value in XmlLexer.get_tokens_unprocessed(self, text):
- m = re.match('</?xsl:([^>]*)/?>?', value)
-
- if token is Name.Tag and m and m.group(1) in self.EXTRA_KEYWORDS:
- yield index, Keyword, value
- else:
- yield index, token, value
-
- def analyse_text(text):
- if looks_like_xml(text) and '<xsl' in text:
- return 0.8
-
-
-class MxmlLexer(RegexLexer):
- """
- For MXML markup.
- Nested AS3 in <script> tags is highlighted by the appropriate lexer.
-
- .. versionadded:: 1.1
- """
- flags = re.MULTILINE | re.DOTALL
- name = 'MXML'
- aliases = ['mxml']
- filenames = ['*.mxml']
- mimetimes = ['text/xml', 'application/xml']
-
- tokens = {
- 'root': [
- ('[^<&]+', Text),
- (r'&\S*?;', Name.Entity),
- (r'(\<\!\[CDATA\[)(.*?)(\]\]\>)',
- bygroups(String, using(ActionScript3Lexer), String)),
- ('<!--', Comment, 'comment'),
- (r'<\?.*?\?>', Comment.Preproc),
- ('<![^>]*>', Comment.Preproc),
- (r'<\s*[\w:.-]+', Name.Tag, 'tag'),
- (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
- ],
- 'comment': [
- ('[^-]+', Comment),
- ('-->', Comment, '#pop'),
- ('-', Comment),
- ],
- 'tag': [
- (r'\s+', Text),
- (r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
- (r'/?\s*>', Name.Tag, '#pop'),
- ],
- 'attr': [
- ('\s+', Text),
- ('".*?"', String, '#pop'),
- ("'.*?'", String, '#pop'),
- (r'[^\s>]+', String, '#pop'),
- ],
- }
-
-
-class HaxeLexer(ExtendedRegexLexer):
- """
- For Haxe source code (http://haxe.org/).
-
- .. versionadded:: 1.3
- """
-
- name = 'Haxe'
- aliases = ['hx', 'haxe', 'hxsl']
- filenames = ['*.hx', '*.hxsl']
- mimetypes = ['text/haxe', 'text/x-haxe', 'text/x-hx']
-
- # keywords extracted from lexer.mll in the haxe compiler source
- keyword = (r'(?:function|class|static|var|if|else|while|do|for|'
- r'break|return|continue|extends|implements|import|'
- r'switch|case|default|public|private|try|untyped|'
- r'catch|new|this|throw|extern|enum|in|interface|'
- r'cast|override|dynamic|typedef|package|'
- r'inline|using|null|true|false|abstract)\b')
-
- # idtype in lexer.mll
- typeid = r'_*[A-Z]\w*'
-
- # combined ident and dollar and idtype
- ident = r'(?:_*[a-z]\w*|_+[0-9]\w*|' + typeid + '|_+|\$\w+)'
-
- binop = (r'(?:%=|&=|\|=|\^=|\+=|\-=|\*=|/=|<<=|>\s*>\s*=|>\s*>\s*>\s*=|==|'
- r'!=|<=|>\s*=|&&|\|\||<<|>>>|>\s*>|\.\.\.|<|>|%|&|\||\^|\+|\*|'
- r'/|\-|=>|=)')
-
- # ident except keywords
- ident_no_keyword = r'(?!' + keyword + ')' + ident
-
- flags = re.DOTALL | re.MULTILINE
-
- preproc_stack = []
-
- def preproc_callback(self, match, ctx):
- proc = match.group(2)
-
- if proc == 'if':
- # store the current stack
- self.preproc_stack.append(ctx.stack[:])
- elif proc in ['else', 'elseif']:
- # restore the stack back to right before #if
- if self.preproc_stack: ctx.stack = self.preproc_stack[-1][:]
- elif proc == 'end':
- # remove the saved stack of previous #if
- if self.preproc_stack: self.preproc_stack.pop()
-
- # #if and #elseif should follow by an expr
- if proc in ['if', 'elseif']:
- ctx.stack.append('preproc-expr')
-
- # #error can be optionally follow by the error msg
- if proc in ['error']:
- ctx.stack.append('preproc-error')
-
- yield match.start(), Comment.Preproc, '#' + proc
- ctx.pos = match.end()
-
-
- tokens = {
- 'root': [
- include('spaces'),
- include('meta'),
- (r'(?:package)\b', Keyword.Namespace, ('semicolon', 'package')),
- (r'(?:import)\b', Keyword.Namespace, ('semicolon', 'import')),
- (r'(?:using)\b', Keyword.Namespace, ('semicolon', 'using')),
- (r'(?:extern|private)\b', Keyword.Declaration),
- (r'(?:abstract)\b', Keyword.Declaration, 'abstract'),
- (r'(?:class|interface)\b', Keyword.Declaration, 'class'),
- (r'(?:enum)\b', Keyword.Declaration, 'enum'),
- (r'(?:typedef)\b', Keyword.Declaration, 'typedef'),
-
- # top-level expression
- # although it is not supported in haxe, but it is common to write
- # expression in web pages the positive lookahead here is to prevent
- # an infinite loop at the EOF
- (r'(?=.)', Text, 'expr-statement'),
- ],
-
- # space/tab/comment/preproc
- 'spaces': [
- (r'\s+', Text),
- (r'//[^\n\r]*', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'(#)(if|elseif|else|end|error)\b', preproc_callback),
- ],
-
- 'string-single-interpol': [
- (r'\$\{', String.Interpol, ('string-interpol-close', 'expr')),
- (r'\$\$', String.Escape),
- (r'\$(?=' + ident + ')', String.Interpol, 'ident'),
- include('string-single'),
- ],
-
- 'string-single': [
- (r"'", String.Single, '#pop'),
- (r'\\.', String.Escape),
- (r'.', String.Single),
- ],
-
- 'string-double': [
- (r'"', String.Double, '#pop'),
- (r'\\.', String.Escape),
- (r'.', String.Double),
- ],
-
- 'string-interpol-close': [
- (r'\$'+ident, String.Interpol),
- (r'\}', String.Interpol, '#pop'),
- ],
-
- 'package': [
- include('spaces'),
- (ident, Name.Namespace),
- (r'\.', Punctuation, 'import-ident'),
- default('#pop'),
- ],
-
- 'import': [
- include('spaces'),
- (ident, Name.Namespace),
- (r'\*', Keyword), # wildcard import
- (r'\.', Punctuation, 'import-ident'),
- (r'in', Keyword.Namespace, 'ident'),
- default('#pop'),
- ],
-
- 'import-ident': [
- include('spaces'),
- (r'\*', Keyword, '#pop'), # wildcard import
- (ident, Name.Namespace, '#pop'),
- ],
-
- 'using': [
- include('spaces'),
- (ident, Name.Namespace),
- (r'\.', Punctuation, 'import-ident'),
- default('#pop'),
- ],
-
- 'preproc-error': [
- (r'\s+', Comment.Preproc),
- (r"'", String.Single, ('#pop', 'string-single')),
- (r'"', String.Double, ('#pop', 'string-double')),
- default('#pop'),
- ],
-
- 'preproc-expr': [
- (r'\s+', Comment.Preproc),
- (r'\!', Comment.Preproc),
- (r'\(', Comment.Preproc, ('#pop', 'preproc-parenthesis')),
-
- (ident, Comment.Preproc, '#pop'),
-
- # Float
- (r'\.[0-9]+', Number.Float),
- (r'[0-9]+[eE][\+\-]?[0-9]+', Number.Float),
- (r'[0-9]+\.[0-9]*[eE][\+\-]?[0-9]+', Number.Float),
- (r'[0-9]+\.[0-9]+', Number.Float),
- (r'[0-9]+\.(?!' + ident + '|\.\.)', Number.Float),
-
- # Int
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
-
- # String
- (r"'", String.Single, ('#pop', 'string-single')),
- (r'"', String.Double, ('#pop', 'string-double')),
- ],
-
- 'preproc-parenthesis': [
- (r'\s+', Comment.Preproc),
- (r'\)', Comment.Preproc, '#pop'),
- ('', Text, 'preproc-expr-in-parenthesis'),
- ],
-
- 'preproc-expr-chain': [
- (r'\s+', Comment.Preproc),
- (binop, Comment.Preproc, ('#pop', 'preproc-expr-in-parenthesis')),
- default('#pop'),
- ],
-
- # same as 'preproc-expr' but able to chain 'preproc-expr-chain'
- 'preproc-expr-in-parenthesis': [
- (r'\s+', Comment.Preproc),
- (r'\!', Comment.Preproc),
- (r'\(', Comment.Preproc,
- ('#pop', 'preproc-expr-chain', 'preproc-parenthesis')),
-
- (ident, Comment.Preproc, ('#pop', 'preproc-expr-chain')),
-
- # Float
- (r'\.[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
- (r'[0-9]+[eE][\+\-]?[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
- (r'[0-9]+\.[0-9]*[eE][\+\-]?[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
- (r'[0-9]+\.[0-9]+', Number.Float, ('#pop', 'preproc-expr-chain')),
- (r'[0-9]+\.(?!' + ident + '|\.\.)', Number.Float, ('#pop', 'preproc-expr-chain')),
-
- # Int
- (r'0x[0-9a-fA-F]+', Number.Hex, ('#pop', 'preproc-expr-chain')),
- (r'[0-9]+', Number.Integer, ('#pop', 'preproc-expr-chain')),
-
- # String
- (r"'", String.Single,
- ('#pop', 'preproc-expr-chain', 'string-single')),
- (r'"', String.Double,
- ('#pop', 'preproc-expr-chain', 'string-double')),
- ],
-
- 'abstract' : [
- include('spaces'),
- default(('#pop', 'abstract-body', 'abstract-relation',
- 'abstract-opaque', 'type-param-constraint', 'type-name')),
- ],
-
- 'abstract-body' : [
- include('spaces'),
- (r'\{', Punctuation, ('#pop', 'class-body')),
- ],
-
- 'abstract-opaque' : [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'parenthesis-close', 'type')),
- default('#pop'),
- ],
-
- 'abstract-relation': [
- include('spaces'),
- (r'(?:to|from)', Keyword.Declaration, 'type'),
- (r',', Punctuation),
- default('#pop'),
- ],
-
- 'meta': [
- include('spaces'),
- (r'@', Name.Decorator, ('meta-body', 'meta-ident', 'meta-colon')),
- ],
-
- # optional colon
- 'meta-colon': [
- include('spaces'),
- (r':', Name.Decorator, '#pop'),
- default('#pop'),
- ],
-
- # same as 'ident' but set token as Name.Decorator instead of Name
- 'meta-ident': [
- include('spaces'),
- (ident, Name.Decorator, '#pop'),
- ],
-
- 'meta-body': [
- include('spaces'),
- (r'\(', Name.Decorator, ('#pop', 'meta-call')),
- default('#pop'),
- ],
-
- 'meta-call': [
- include('spaces'),
- (r'\)', Name.Decorator, '#pop'),
- default(('#pop', 'meta-call-sep', 'expr')),
- ],
-
- 'meta-call-sep': [
- include('spaces'),
- (r'\)', Name.Decorator, '#pop'),
- (r',', Punctuation, ('#pop', 'meta-call')),
- ],
-
- 'typedef': [
- include('spaces'),
- default(('#pop', 'typedef-body', 'type-param-constraint',
- 'type-name')),
- ],
-
- 'typedef-body': [
- include('spaces'),
- (r'=', Operator, ('#pop', 'optional-semicolon', 'type')),
- ],
-
- 'enum': [
- include('spaces'),
- default(('#pop', 'enum-body', 'bracket-open',
- 'type-param-constraint', 'type-name')),
- ],
-
- 'enum-body': [
- include('spaces'),
- include('meta'),
- (r'\}', Punctuation, '#pop'),
- (ident_no_keyword, Name, ('enum-member', 'type-param-constraint')),
- ],
-
- 'enum-member': [
- include('spaces'),
- (r'\(', Punctuation,
- ('#pop', 'semicolon', 'flag', 'function-param')),
- default(('#pop', 'semicolon', 'flag')),
- ],
-
- 'class': [
- include('spaces'),
- default(('#pop', 'class-body', 'bracket-open', 'extends',
- 'type-param-constraint', 'type-name')),
- ],
-
- 'extends': [
- include('spaces'),
- (r'(?:extends|implements)\b', Keyword.Declaration, 'type'),
- (r',', Punctuation), # the comma is made optional here, since haxe2
- # requires the comma but haxe3 does not allow it
- default('#pop'),
- ],
-
- 'bracket-open': [
- include('spaces'),
- (r'\{', Punctuation, '#pop'),
- ],
-
- 'bracket-close': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- ],
-
- 'class-body': [
- include('spaces'),
- include('meta'),
- (r'\}', Punctuation, '#pop'),
- (r'(?:static|public|private|override|dynamic|inline|macro)\b',
- Keyword.Declaration),
- default('class-member'),
- ],
-
- 'class-member': [
- include('spaces'),
- (r'(var)\b', Keyword.Declaration,
- ('#pop', 'optional-semicolon', 'var')),
- (r'(function)\b', Keyword.Declaration,
- ('#pop', 'optional-semicolon', 'class-method')),
- ],
-
- # local function, anonymous or not
- 'function-local': [
- include('spaces'),
- (r'(' + ident_no_keyword + ')?', Name.Function,
- ('#pop', 'optional-expr', 'flag', 'function-param',
- 'parenthesis-open', 'type-param-constraint')),
- ],
-
- 'optional-expr': [
- include('spaces'),
- include('expr'),
- default('#pop'),
- ],
-
- 'class-method': [
- include('spaces'),
- (ident, Name.Function, ('#pop', 'optional-expr', 'flag',
- 'function-param', 'parenthesis-open',
- 'type-param-constraint')),
- ],
-
- # function arguments
- 'function-param': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- (r'\?', Punctuation),
- (ident_no_keyword, Name,
- ('#pop', 'function-param-sep', 'assign', 'flag')),
- ],
-
- 'function-param-sep': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'function-param')),
- ],
-
- 'prop-get-set': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'parenthesis-close',
- 'prop-get-set-opt', 'comma', 'prop-get-set-opt')),
- default('#pop'),
- ],
-
- 'prop-get-set-opt': [
- include('spaces'),
- (r'(?:default|null|never|dynamic|get|set)\b', Keyword, '#pop'),
- (ident_no_keyword, Text, '#pop'), #custom getter/setter
- ],
-
- 'expr-statement': [
- include('spaces'),
- # makes semicolon optional here, just to avoid checking the last
- # one is bracket or not.
- default(('#pop', 'optional-semicolon', 'expr')),
- ],
-
- 'expr': [
- include('spaces'),
- (r'@', Name.Decorator, ('#pop', 'optional-expr', 'meta-body',
- 'meta-ident', 'meta-colon')),
- (r'(?:\+\+|\-\-|~(?!/)|!|\-)', Operator),
- (r'\(', Punctuation, ('#pop', 'expr-chain', 'parenthesis')),
- (r'(?:static|public|private|override|dynamic|inline)\b',
- Keyword.Declaration),
- (r'(?:function)\b', Keyword.Declaration, ('#pop', 'expr-chain',
- 'function-local')),
- (r'\{', Punctuation, ('#pop', 'expr-chain', 'bracket')),
- (r'(?:true|false|null)\b', Keyword.Constant, ('#pop', 'expr-chain')),
- (r'(?:this)\b', Keyword, ('#pop', 'expr-chain')),
- (r'(?:cast)\b', Keyword, ('#pop', 'expr-chain', 'cast')),
- (r'(?:try)\b', Keyword, ('#pop', 'catch', 'expr')),
- (r'(?:var)\b', Keyword.Declaration, ('#pop', 'var')),
- (r'(?:new)\b', Keyword, ('#pop', 'expr-chain', 'new')),
- (r'(?:switch)\b', Keyword, ('#pop', 'switch')),
- (r'(?:if)\b', Keyword, ('#pop', 'if')),
- (r'(?:do)\b', Keyword, ('#pop', 'do')),
- (r'(?:while)\b', Keyword, ('#pop', 'while')),
- (r'(?:for)\b', Keyword, ('#pop', 'for')),
- (r'(?:untyped|throw)\b', Keyword),
- (r'(?:return)\b', Keyword, ('#pop', 'optional-expr')),
- (r'(?:macro)\b', Keyword, ('#pop', 'macro')),
- (r'(?:continue|break)\b', Keyword, '#pop'),
- (r'(?:\$\s*[a-z]\b|\$(?!'+ident+'))', Name, ('#pop', 'dollar')),
- (ident_no_keyword, Name, ('#pop', 'expr-chain')),
-
- # Float
- (r'\.[0-9]+', Number.Float, ('#pop', 'expr-chain')),
- (r'[0-9]+[eE][\+\-]?[0-9]+', Number.Float, ('#pop', 'expr-chain')),
- (r'[0-9]+\.[0-9]*[eE][\+\-]?[0-9]+', Number.Float, ('#pop', 'expr-chain')),
- (r'[0-9]+\.[0-9]+', Number.Float, ('#pop', 'expr-chain')),
- (r'[0-9]+\.(?!' + ident + '|\.\.)', Number.Float, ('#pop', 'expr-chain')),
-
- # Int
- (r'0x[0-9a-fA-F]+', Number.Hex, ('#pop', 'expr-chain')),
- (r'[0-9]+', Number.Integer, ('#pop', 'expr-chain')),
-
- # String
- (r"'", String.Single, ('#pop', 'expr-chain', 'string-single-interpol')),
- (r'"', String.Double, ('#pop', 'expr-chain', 'string-double')),
-
- # EReg
- (r'~/(\\\\|\\/|[^/\n])*/[gimsu]*', String.Regex, ('#pop', 'expr-chain')),
-
- # Array
- (r'\[', Punctuation, ('#pop', 'expr-chain', 'array-decl')),
- ],
-
- 'expr-chain': [
- include('spaces'),
- (r'(?:\+\+|\-\-)', Operator),
- (binop, Operator, ('#pop', 'expr')),
- (r'(?:in)\b', Keyword, ('#pop', 'expr')),
- (r'\?', Operator, ('#pop', 'expr', 'ternary', 'expr')),
- (r'(\.)(' + ident_no_keyword + ')', bygroups(Punctuation, Name)),
- (r'\[', Punctuation, 'array-access'),
- (r'\(', Punctuation, 'call'),
- default('#pop'),
- ],
-
- # macro reification
- 'macro': [
- include('spaces'),
- include('meta'),
- (r':', Punctuation, ('#pop', 'type')),
-
- (r'(?:extern|private)\b', Keyword.Declaration),
- (r'(?:abstract)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'abstract')),
- (r'(?:class|interface)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'class')),
- (r'(?:enum)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'enum')),
- (r'(?:typedef)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'typedef')),
-
- default(('#pop', 'expr')),
- ],
-
- # cast can be written as "cast expr" or "cast(expr, type)"
- 'cast': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'parenthesis-close',
- 'cast-type', 'expr')),
- default(('#pop', 'expr')),
- ],
-
- # optionally give a type as the 2nd argument of cast()
- 'cast-type': [
- include('spaces'),
- (r',', Punctuation, ('#pop', 'type')),
- default('#pop'),
- ],
-
- 'catch': [
- include('spaces'),
- (r'(?:catch)\b', Keyword, ('expr', 'function-param',
- 'parenthesis-open')),
- default('#pop'),
- ],
-
- # do-while loop
- 'do': [
- include('spaces'),
- default(('#pop', 'do-while', 'expr')),
- ],
-
- # the while after do
- 'do-while': [
- include('spaces'),
- (r'(?:while)\b', Keyword, ('#pop', 'parenthesis',
- 'parenthesis-open')),
- ],
-
- 'while': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'expr', 'parenthesis')),
- ],
-
- 'for': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'expr', 'parenthesis')),
- ],
-
- 'if': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'else', 'optional-semicolon', 'expr',
- 'parenthesis')),
- ],
-
- 'else': [
- include('spaces'),
- (r'(?:else)\b', Keyword, ('#pop', 'expr')),
- default('#pop'),
- ],
-
- 'switch': [
- include('spaces'),
- default(('#pop', 'switch-body', 'bracket-open', 'expr')),
- ],
-
- 'switch-body': [
- include('spaces'),
- (r'(?:case|default)\b', Keyword, ('case-block', 'case')),
- (r'\}', Punctuation, '#pop'),
- ],
-
- 'case': [
- include('spaces'),
- (r':', Punctuation, '#pop'),
- default(('#pop', 'case-sep', 'case-guard', 'expr')),
- ],
-
- 'case-sep': [
- include('spaces'),
- (r':', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'case')),
- ],
-
- 'case-guard': [
- include('spaces'),
- (r'(?:if)\b', Keyword, ('#pop', 'parenthesis', 'parenthesis-open')),
- default('#pop'),
- ],
-
- # optional multiple expr under a case
- 'case-block': [
- include('spaces'),
- (r'(?!(?:case|default)\b|\})', Keyword, 'expr-statement'),
- default('#pop'),
- ],
-
- 'new': [
- include('spaces'),
- default(('#pop', 'call', 'parenthesis-open', 'type')),
- ],
-
- 'array-decl': [
- include('spaces'),
- (r'\]', Punctuation, '#pop'),
- default(('#pop', 'array-decl-sep', 'expr')),
- ],
-
- 'array-decl-sep': [
- include('spaces'),
- (r'\]', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'array-decl')),
- ],
-
- 'array-access': [
- include('spaces'),
- default(('#pop', 'array-access-close', 'expr')),
- ],
-
- 'array-access-close': [
- include('spaces'),
- (r'\]', Punctuation, '#pop'),
- ],
-
- 'comma': [
- include('spaces'),
- (r',', Punctuation, '#pop'),
- ],
-
- 'colon': [
- include('spaces'),
- (r':', Punctuation, '#pop'),
- ],
-
- 'semicolon': [
- include('spaces'),
- (r';', Punctuation, '#pop'),
- ],
-
- 'optional-semicolon': [
- include('spaces'),
- (r';', Punctuation, '#pop'),
- default('#pop'),
- ],
-
- # identity that CAN be a Haxe keyword
- 'ident': [
- include('spaces'),
- (ident, Name, '#pop'),
- ],
-
- 'dollar': [
- include('spaces'),
- (r'\{', Punctuation, ('#pop', 'expr-chain', 'bracket-close', 'expr')),
- default(('#pop', 'expr-chain')),
- ],
-
- 'type-name': [
- include('spaces'),
- (typeid, Name, '#pop'),
- ],
-
- 'type-full-name': [
- include('spaces'),
- (r'\.', Punctuation, 'ident'),
- default('#pop'),
- ],
-
- 'type': [
- include('spaces'),
- (r'\?', Punctuation),
- (ident, Name, ('#pop', 'type-check', 'type-full-name')),
- (r'\{', Punctuation, ('#pop', 'type-check', 'type-struct')),
- (r'\(', Punctuation, ('#pop', 'type-check', 'type-parenthesis')),
- ],
-
- 'type-parenthesis': [
- include('spaces'),
- default(('#pop', 'parenthesis-close', 'type')),
- ],
-
- 'type-check': [
- include('spaces'),
- (r'->', Punctuation, ('#pop', 'type')),
- (r'<(?!=)', Punctuation, 'type-param'),
- default('#pop'),
- ],
-
- 'type-struct': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- (r'\?', Punctuation),
- (r'>', Punctuation, ('comma', 'type')),
- (ident_no_keyword, Name, ('#pop', 'type-struct-sep', 'type', 'colon')),
- include('class-body'),
- ],
-
- 'type-struct-sep': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'type-struct')),
- ],
-
- # type-param can be a normal type or a constant literal...
- 'type-param-type': [
- # Float
- (r'\.[0-9]+', Number.Float, '#pop'),
- (r'[0-9]+[eE][\+\-]?[0-9]+', Number.Float, '#pop'),
- (r'[0-9]+\.[0-9]*[eE][\+\-]?[0-9]+', Number.Float, '#pop'),
- (r'[0-9]+\.[0-9]+', Number.Float, '#pop'),
- (r'[0-9]+\.(?!' + ident + '|\.\.)', Number.Float, '#pop'),
-
- # Int
- (r'0x[0-9a-fA-F]+', Number.Hex, '#pop'),
- (r'[0-9]+', Number.Integer, '#pop'),
-
- # String
- (r"'", String.Single, ('#pop', 'string-single')),
- (r'"', String.Double, ('#pop', 'string-double')),
-
- # EReg
- (r'~/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex, '#pop'),
-
- # Array
- (r'\[', Operator, ('#pop', 'array-decl')),
-
- include('type'),
- ],
-
- # type-param part of a type
- # ie. the <A,B> path in Map<A,B>
- 'type-param': [
- include('spaces'),
- default(('#pop', 'type-param-sep', 'type-param-type')),
- ],
-
- 'type-param-sep': [
- include('spaces'),
- (r'>', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'type-param')),
- ],
-
- # optional type-param that may include constraint
- # ie. <T:Constraint, T2:(ConstraintA,ConstraintB)>
- 'type-param-constraint': [
- include('spaces'),
- (r'<(?!=)', Punctuation, ('#pop', 'type-param-constraint-sep',
- 'type-param-constraint-flag', 'type-name')),
- default('#pop'),
- ],
-
- 'type-param-constraint-sep': [
- include('spaces'),
- (r'>', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'type-param-constraint-sep',
- 'type-param-constraint-flag', 'type-name')),
- ],
-
- # the optional constraint inside type-param
- 'type-param-constraint-flag': [
- include('spaces'),
- (r':', Punctuation, ('#pop', 'type-param-constraint-flag-type')),
- default('#pop'),
- ],
-
- 'type-param-constraint-flag-type': [
- include('spaces'),
- (r'\(', Punctuation, ('#pop', 'type-param-constraint-flag-type-sep',
- 'type')),
- default(('#pop', 'type')),
- ],
-
- 'type-param-constraint-flag-type-sep': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation, 'type'),
- ],
-
- # a parenthesis expr that contain exactly one expr
- 'parenthesis': [
- include('spaces'),
- default(('#pop', 'parenthesis-close', 'flag', 'expr')),
- ],
-
- 'parenthesis-open': [
- include('spaces'),
- (r'\(', Punctuation, '#pop'),
- ],
-
- 'parenthesis-close': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- ],
-
- 'var': [
- include('spaces'),
- (ident_no_keyword, Text, ('#pop', 'var-sep', 'assign', 'flag', 'prop-get-set')),
- ],
-
- # optional more var decl.
- 'var-sep': [
- include('spaces'),
- (r',', Punctuation, ('#pop', 'var')),
- default('#pop'),
- ],
-
- # optional assignment
- 'assign': [
- include('spaces'),
- (r'=', Operator, ('#pop', 'expr')),
- default('#pop'),
- ],
-
- # optional type flag
- 'flag': [
- include('spaces'),
- (r':', Punctuation, ('#pop', 'type')),
- default('#pop'),
- ],
-
- # colon as part of a ternary operator (?:)
- 'ternary': [
- include('spaces'),
- (r':', Operator, '#pop'),
- ],
-
- # function call
- 'call': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- default(('#pop', 'call-sep', 'expr')),
- ],
-
- # after a call param
- 'call-sep': [
- include('spaces'),
- (r'\)', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'call')),
- ],
-
- # bracket can be block or object
- 'bracket': [
- include('spaces'),
- (r'(?!(?:\$\s*[a-z]\b|\$(?!'+ident+')))' + ident_no_keyword, Name,
- ('#pop', 'bracket-check')),
- (r"'", String.Single, ('#pop', 'bracket-check', 'string-single')),
- (r'"', String.Double, ('#pop', 'bracket-check', 'string-double')),
- default(('#pop', 'block')),
- ],
-
- 'bracket-check': [
- include('spaces'),
- (r':', Punctuation, ('#pop', 'object-sep', 'expr')), #is object
- default(('#pop', 'block', 'optional-semicolon', 'expr-chain')), #is block
- ],
-
- # code block
- 'block': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- default('expr-statement'),
- ],
-
- # object in key-value pairs
- 'object': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- default(('#pop', 'object-sep', 'expr', 'colon', 'ident-or-string'))
- ],
-
- # a key of an object
- 'ident-or-string': [
- include('spaces'),
- (ident_no_keyword, Name, '#pop'),
- (r"'", String.Single, ('#pop', 'string-single')),
- (r'"', String.Double, ('#pop', 'string-double')),
- ],
-
- # after a key-value pair in object
- 'object-sep': [
- include('spaces'),
- (r'\}', Punctuation, '#pop'),
- (r',', Punctuation, ('#pop', 'object')),
- ],
-
-
-
- }
-
- def analyse_text(text):
- if re.match(r'\w+\s*:\s*\w', text): return 0.3
-
-
-def _indentation(lexer, match, ctx):
- indentation = match.group(0)
- yield match.start(), Text, indentation
- ctx.last_indentation = indentation
- ctx.pos = match.end()
-
- if hasattr(ctx, 'block_state') and ctx.block_state and \
- indentation.startswith(ctx.block_indentation) and \
- indentation != ctx.block_indentation:
- ctx.stack.append(ctx.block_state)
- else:
- ctx.block_state = None
- ctx.block_indentation = None
- ctx.stack.append('content')
-
-def _starts_block(token, state):
- def callback(lexer, match, ctx):
- yield match.start(), token, match.group(0)
-
- if hasattr(ctx, 'last_indentation'):
- ctx.block_indentation = ctx.last_indentation
- else:
- ctx.block_indentation = ''
-
- ctx.block_state = state
- ctx.pos = match.end()
-
- return callback
-
-
-class HamlLexer(ExtendedRegexLexer):
- """
- For Haml markup.
-
- .. versionadded:: 1.3
- """
-
- name = 'Haml'
- aliases = ['haml']
- filenames = ['*.haml']
- mimetypes = ['text/x-haml']
-
- flags = re.IGNORECASE
- # Haml can include " |\n" anywhere,
- # which is ignored and used to wrap long lines.
- # To accomodate this, use this custom faux dot instead.
- _dot = r'(?: \|\n(?=.* \|)|.)'
-
- # In certain places, a comma at the end of the line
- # allows line wrapping as well.
- _comma_dot = r'(?:,\s*\n|' + _dot + ')'
- tokens = {
- 'root': [
- (r'[ \t]*\n', Text),
- (r'[ \t]*', _indentation),
- ],
-
- 'css': [
- (r'\.[\w:-]+', Name.Class, 'tag'),
- (r'\#[\w:-]+', Name.Function, 'tag'),
- ],
-
- 'eval-or-plain': [
- (r'[&!]?==', Punctuation, 'plain'),
- (r'([&!]?[=~])(' + _comma_dot + r'*\n)',
- bygroups(Punctuation, using(RubyLexer)),
- 'root'),
- default('plain'),
- ],
-
- 'content': [
- include('css'),
- (r'%[\w:-]+', Name.Tag, 'tag'),
- (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
- (r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)',
- bygroups(Comment, Comment.Special, Comment),
- '#pop'),
- (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
- '#pop'),
- (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
- 'haml-comment-block'), '#pop'),
- (r'(-)(' + _comma_dot + r'*\n)',
- bygroups(Punctuation, using(RubyLexer)),
- '#pop'),
- (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
- '#pop'),
- include('eval-or-plain'),
- ],
-
- 'tag': [
- include('css'),
- (r'\{(,\n|' + _dot + ')*?\}', using(RubyLexer)),
- (r'\[' + _dot + '*?\]', using(RubyLexer)),
- (r'\(', Text, 'html-attributes'),
- (r'/[ \t]*\n', Punctuation, '#pop:2'),
- (r'[<>]{1,2}(?=[ \t=])', Punctuation),
- include('eval-or-plain'),
- ],
-
- 'plain': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
- (r'(#\{)(' + _dot + '*?)(\})',
- bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
-
- 'html-attributes': [
- (r'\s+', Text),
- (r'[\w:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
- (r'[\w:-]+', Name.Attribute),
- (r'\)', Text, '#pop'),
- ],
-
- 'html-attribute-value': [
- (r'[ \t]+', Text),
- (r'\w+', Name.Variable, '#pop'),
- (r'@\w+', Name.Variable.Instance, '#pop'),
- (r'\$\w+', Name.Variable.Global, '#pop'),
- (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
- (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
- ],
-
- 'html-comment-block': [
- (_dot + '+', Comment),
- (r'\n', Text, 'root'),
- ],
-
- 'haml-comment-block': [
- (_dot + '+', Comment.Preproc),
- (r'\n', Text, 'root'),
- ],
-
- 'filter-block': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
- (r'(#\{)(' + _dot + '*?)(\})',
- bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
- }
-
-
-common_sass_tokens = {
- 'value': [
- (r'[ \t]+', Text),
- (r'[!$][\w-]+', Name.Variable),
- (r'url\(', String.Other, 'string-url'),
- (r'[a-z_-][\w-]*(?=\()', Name.Function),
- (r'(azimuth|background-attachment|background-color|'
- r'background-image|background-position|background-repeat|'
- r'background|border-bottom-color|border-bottom-style|'
- r'border-bottom-width|border-left-color|border-left-style|'
- r'border-left-width|border-right|border-right-color|'
- r'border-right-style|border-right-width|border-top-color|'
- r'border-top-style|border-top-width|border-bottom|'
- r'border-collapse|border-left|border-width|border-color|'
- r'border-spacing|border-style|border-top|border|caption-side|'
- r'clear|clip|color|content|counter-increment|counter-reset|'
- r'cue-after|cue-before|cue|cursor|direction|display|'
- r'elevation|empty-cells|float|font-family|font-size|'
- r'font-size-adjust|font-stretch|font-style|font-variant|'
- r'font-weight|font|height|letter-spacing|line-height|'
- r'list-style-type|list-style-image|list-style-position|'
- r'list-style|margin-bottom|margin-left|margin-right|'
- r'margin-top|margin|marker-offset|marks|max-height|max-width|'
- r'min-height|min-width|opacity|orphans|outline|outline-color|'
- r'outline-style|outline-width|overflow|padding-bottom|'
- r'padding-left|padding-right|padding-top|padding|page|'
- r'page-break-after|page-break-before|page-break-inside|'
- r'pause-after|pause-before|pause|pitch|pitch-range|'
- r'play-during|position|quotes|richness|right|size|'
- r'speak-header|speak-numeral|speak-punctuation|speak|'
- r'speech-rate|stress|table-layout|text-align|text-decoration|'
- r'text-indent|text-shadow|text-transform|top|unicode-bidi|'
- r'vertical-align|visibility|voice-family|volume|white-space|'
- r'widows|width|word-spacing|z-index|bottom|left|'
- r'above|absolute|always|armenian|aural|auto|avoid|baseline|'
- r'behind|below|bidi-override|blink|block|bold|bolder|both|'
- r'capitalize|center-left|center-right|center|circle|'
- r'cjk-ideographic|close-quote|collapse|condensed|continuous|'
- r'crop|crosshair|cross|cursive|dashed|decimal-leading-zero|'
- r'decimal|default|digits|disc|dotted|double|e-resize|embed|'
- r'extra-condensed|extra-expanded|expanded|fantasy|far-left|'
- r'far-right|faster|fast|fixed|georgian|groove|hebrew|help|'
- r'hidden|hide|higher|high|hiragana-iroha|hiragana|icon|'
- r'inherit|inline-table|inline|inset|inside|invert|italic|'
- r'justify|katakana-iroha|katakana|landscape|larger|large|'
- r'left-side|leftwards|level|lighter|line-through|list-item|'
- r'loud|lower-alpha|lower-greek|lower-roman|lowercase|ltr|'
- r'lower|low|medium|message-box|middle|mix|monospace|'
- r'n-resize|narrower|ne-resize|no-close-quote|no-open-quote|'
- r'no-repeat|none|normal|nowrap|nw-resize|oblique|once|'
- r'open-quote|outset|outside|overline|pointer|portrait|px|'
- r'relative|repeat-x|repeat-y|repeat|rgb|ridge|right-side|'
- r'rightwards|s-resize|sans-serif|scroll|se-resize|'
- r'semi-condensed|semi-expanded|separate|serif|show|silent|'
- r'slow|slower|small-caps|small-caption|smaller|soft|solid|'
- r'spell-out|square|static|status-bar|super|sw-resize|'
- r'table-caption|table-cell|table-column|table-column-group|'
- r'table-footer-group|table-header-group|table-row|'
- r'table-row-group|text|text-bottom|text-top|thick|thin|'
- r'transparent|ultra-condensed|ultra-expanded|underline|'
- r'upper-alpha|upper-latin|upper-roman|uppercase|url|'
- r'visible|w-resize|wait|wider|x-fast|x-high|x-large|x-loud|'
- r'x-low|x-small|x-soft|xx-large|xx-small|yes)\b', Name.Constant),
- (r'(indigo|gold|firebrick|indianred|darkolivegreen|'
- r'darkseagreen|mediumvioletred|mediumorchid|chartreuse|'
- r'mediumslateblue|springgreen|crimson|lightsalmon|brown|'
- r'turquoise|olivedrab|cyan|skyblue|darkturquoise|'
- r'goldenrod|darkgreen|darkviolet|darkgray|lightpink|'
- r'darkmagenta|lightgoldenrodyellow|lavender|yellowgreen|thistle|'
- r'violet|orchid|ghostwhite|honeydew|cornflowerblue|'
- r'darkblue|darkkhaki|mediumpurple|cornsilk|bisque|slategray|'
- r'darkcyan|khaki|wheat|deepskyblue|darkred|steelblue|aliceblue|'
- r'gainsboro|mediumturquoise|floralwhite|coral|lightgrey|'
- r'lightcyan|darksalmon|beige|azure|lightsteelblue|oldlace|'
- r'greenyellow|royalblue|lightseagreen|mistyrose|sienna|'
- r'lightcoral|orangered|navajowhite|palegreen|burlywood|'
- r'seashell|mediumspringgreen|papayawhip|blanchedalmond|'
- r'peru|aquamarine|darkslategray|ivory|dodgerblue|'
- r'lemonchiffon|chocolate|orange|forestgreen|slateblue|'
- r'mintcream|antiquewhite|darkorange|cadetblue|moccasin|'
- r'limegreen|saddlebrown|darkslateblue|lightskyblue|deeppink|'
- r'plum|darkgoldenrod|sandybrown|magenta|tan|'
- r'rosybrown|pink|lightblue|palevioletred|mediumseagreen|'
- r'dimgray|powderblue|seagreen|snow|mediumblue|midnightblue|'
- r'paleturquoise|palegoldenrod|whitesmoke|darkorchid|salmon|'
- r'lightslategray|lawngreen|lightgreen|tomato|hotpink|'
- r'lightyellow|lavenderblush|linen|mediumaquamarine|'
- r'blueviolet|peachpuff)\b', Name.Entity),
- (r'(black|silver|gray|white|maroon|red|purple|fuchsia|green|'
- r'lime|olive|yellow|navy|blue|teal|aqua)\b', Name.Builtin),
- (r'\!(important|default)', Name.Exception),
- (r'(true|false)', Name.Pseudo),
- (r'(and|or|not)', Operator.Word),
- (r'/\*', Comment.Multiline, 'inline-comment'),
- (r'//[^\n]*', Comment.Single),
- (r'\#[a-z0-9]{1,6}', Number.Hex),
- (r'(-?\d+)(\%|[a-z]+)?', bygroups(Number.Integer, Keyword.Type)),
- (r'(-?\d*\.\d+)(\%|[a-z]+)?', bygroups(Number.Float, Keyword.Type)),
- (r'#{', String.Interpol, 'interpolation'),
- (r'[~\^\*!&%<>\|+=@:,./?-]+', Operator),
- (r'[\[\]()]+', Punctuation),
- (r'"', String.Double, 'string-double'),
- (r"'", String.Single, 'string-single'),
- (r'[a-z_-][\w-]*', Name),
- ],
-
- 'interpolation': [
- (r'\}', String.Interpol, '#pop'),
- include('value'),
- ],
-
- 'selector': [
- (r'[ \t]+', Text),
- (r'\:', Name.Decorator, 'pseudo-class'),
- (r'\.', Name.Class, 'class'),
- (r'\#', Name.Namespace, 'id'),
- (r'[\w-]+', Name.Tag),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'&', Keyword),
- (r'[~\^\*!&\[\]\(\)<>\|+=@:;,./?-]', Operator),
- (r'"', String.Double, 'string-double'),
- (r"'", String.Single, 'string-single'),
- ],
-
- 'string-double': [
- (r'(\\.|#(?=[^\n{])|[^\n"#])+', String.Double),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'"', String.Double, '#pop'),
- ],
-
- 'string-single': [
- (r"(\\.|#(?=[^\n{])|[^\n'#])+", String.Double),
- (r'#\{', String.Interpol, 'interpolation'),
- (r"'", String.Double, '#pop'),
- ],
-
- 'string-url': [
- (r'(\\#|#(?=[^\n{])|[^\n#)])+', String.Other),
- (r'#\{', String.Interpol, 'interpolation'),
- (r'\)', String.Other, '#pop'),
- ],
-
- 'pseudo-class': [
- (r'[\w-]+', Name.Decorator),
- (r'#\{', String.Interpol, 'interpolation'),
- default('#pop'),
- ],
-
- 'class': [
- (r'[\w-]+', Name.Class),
- (r'#\{', String.Interpol, 'interpolation'),
- default('#pop'),
- ],
-
- 'id': [
- (r'[\w-]+', Name.Namespace),
- (r'#\{', String.Interpol, 'interpolation'),
- default('#pop'),
- ],
-
- 'for': [
- (r'(from|to|through)', Operator.Word),
- include('value'),
- ],
-}
-
-class SassLexer(ExtendedRegexLexer):
- """
- For Sass stylesheets.
-
- .. versionadded:: 1.3
- """
-
- name = 'Sass'
- aliases = ['sass']
- filenames = ['*.sass']
- mimetypes = ['text/x-sass']
-
- flags = re.IGNORECASE
- tokens = {
- 'root': [
- (r'[ \t]*\n', Text),
- (r'[ \t]*', _indentation),
- ],
-
- 'content': [
- (r'//[^\n]*', _starts_block(Comment.Single, 'single-comment'),
- 'root'),
- (r'/\*[^\n]*', _starts_block(Comment.Multiline, 'multi-comment'),
- 'root'),
- (r'@import', Keyword, 'import'),
- (r'@for', Keyword, 'for'),
- (r'@(debug|warn|if|while)', Keyword, 'value'),
- (r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'),
- (r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'),
- (r'@extend', Keyword, 'selector'),
- (r'@[\w-]+', Keyword, 'selector'),
- (r'=[\w-]+', Name.Function, 'value'),
- (r'\+[\w-]+', Name.Decorator, 'value'),
- (r'([!$][\w-]\w*)([ \t]*(?:(?:\|\|)?=|:))',
- bygroups(Name.Variable, Operator), 'value'),
- (r':', Name.Attribute, 'old-style-attr'),
- (r'(?=.+?[=:]([^a-z]|$))', Name.Attribute, 'new-style-attr'),
- default('selector'),
- ],
-
- 'single-comment': [
- (r'.+', Comment.Single),
- (r'\n', Text, 'root'),
- ],
-
- 'multi-comment': [
- (r'.+', Comment.Multiline),
- (r'\n', Text, 'root'),
- ],
-
- 'import': [
- (r'[ \t]+', Text),
- (r'\S+', String),
- (r'\n', Text, 'root'),
- ],
-
- 'old-style-attr': [
- (r'[^\s:="\[]+', Name.Attribute),
- (r'#{', String.Interpol, 'interpolation'),
- (r'[ \t]*=', Operator, 'value'),
- default('value'),
- ],
-
- 'new-style-attr': [
- (r'[^\s:="\[]+', Name.Attribute),
- (r'#{', String.Interpol, 'interpolation'),
- (r'[ \t]*[=:]', Operator, 'value'),
- ],
-
- 'inline-comment': [
- (r"(\\#|#(?=[^\n{])|\*(?=[^\n/])|[^\n#*])+", Comment.Multiline),
- (r'#\{', String.Interpol, 'interpolation'),
- (r"\*/", Comment, '#pop'),
- ],
- }
- for group, common in iteritems(common_sass_tokens):
- tokens[group] = copy.copy(common)
- tokens['value'].append((r'\n', Text, 'root'))
- tokens['selector'].append((r'\n', Text, 'root'))
-
-
-class ScssLexer(RegexLexer):
- """
- For SCSS stylesheets.
- """
-
- name = 'SCSS'
- aliases = ['scss']
- filenames = ['*.scss']
- mimetypes = ['text/x-scss']
-
- flags = re.IGNORECASE | re.DOTALL
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'@import', Keyword, 'value'),
- (r'@for', Keyword, 'for'),
- (r'@(debug|warn|if|while)', Keyword, 'value'),
- (r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'),
- (r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'),
- (r'@extend', Keyword, 'selector'),
- (r'@[\w-]+', Keyword, 'selector'),
- (r'(\$[\w-]*\w)([ \t]*:)', bygroups(Name.Variable, Operator), 'value'),
- (r'(?=[^;{}][;}])', Name.Attribute, 'attr'),
- (r'(?=[^;{}:]+:[^a-z])', Name.Attribute, 'attr'),
- default('selector'),
- ],
-
- 'attr': [
- (r'[^\s:="\[]+', Name.Attribute),
- (r'#{', String.Interpol, 'interpolation'),
- (r'[ \t]*:', Operator, 'value'),
- ],
-
- 'inline-comment': [
- (r"(\\#|#(?=[^{])|\*(?=[^/])|[^#*])+", Comment.Multiline),
- (r'#\{', String.Interpol, 'interpolation'),
- (r"\*/", Comment, '#pop'),
- ],
- }
- for group, common in iteritems(common_sass_tokens):
- tokens[group] = copy.copy(common)
- tokens['value'].extend([(r'\n', Text), (r'[;{}]', Punctuation, 'root')])
- tokens['selector'].extend([(r'\n', Text), (r'[;{}]', Punctuation, 'root')])
-
-
-class CoffeeScriptLexer(RegexLexer):
- """
- For `CoffeeScript`_ source code.
-
- .. _CoffeeScript: http://coffeescript.org
-
- .. versionadded:: 1.3
- """
-
- name = 'CoffeeScript'
- aliases = ['coffee-script', 'coffeescript', 'coffee']
- filenames = ['*.coffee']
- mimetypes = ['text/coffeescript']
-
- flags = re.DOTALL
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'###[^#].*?###', Comment.Multiline),
- (r'#(?!##[^#]).*?\n', Comment.Single),
- ],
- 'multilineregex': [
- (r'[^/#]+', String.Regex),
- (r'///([gim]+\b|\B)', String.Regex, '#pop'),
- (r'#{', String.Interpol, 'interpoling_string'),
- (r'[/#]', String.Regex),
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'///', String.Regex, ('#pop', 'multilineregex')),
- (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- default('#pop'),
- ],
- 'root': [
- # this next expr leads to infinite loops root -> slashstartsregex
- #(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'\+\+|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|'
- r'\|\||\\(?=\n)|'
- r'(<<|>>>?|==?(?!>)|!=?|=(?!>)|-(?!>)|[<>+*`%&\|\^/])=?',
- Operator, 'slashstartsregex'),
- (r'(?:\([^()]*\))?\s*[=-]>', Name.Function),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'(?<![\.\$])(for|own|in|of|while|until|'
- r'loop|break|return|continue|'
- r'switch|when|then|if|unless|else|'
- r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
- r'extends|this|class|by)\b', Keyword, 'slashstartsregex'),
- (r'(?<![\.\$])(true|false|yes|no|on|off|null|'
- r'NaN|Infinity|undefined)\b',
- Keyword.Constant),
- (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
- r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b',
- Name.Builtin),
- (r'[$a-zA-Z_][\w\.:\$]*\s*[:=]\s', Name.Variable,
- 'slashstartsregex'),
- (r'@[$a-zA-Z_][\w\.:\$]*\s*[:=]\s', Name.Variable.Instance,
- 'slashstartsregex'),
- (r'@', Name.Other, 'slashstartsregex'),
- (r'@?[$a-zA-Z_][\w\$]*', Name.Other, 'slashstartsregex'),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- ('"""', String, 'tdqs'),
- ("'''", String, 'tsqs'),
- ('"', String, 'dqs'),
- ("'", String, 'sqs'),
- ],
- 'strings': [
- (r'[^#\\\'"]+', String),
- # note that all coffee script strings are multi-line.
- # hashmarks, quotes and backslashes must be parsed one at a time
- ],
- 'interpoling_string' : [
- (r'}', String.Interpol, "#pop"),
- include('root')
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- (r'\\.|\'', String), # double-quoted string don't need ' escapes
- (r'#{', String.Interpol, "interpoling_string"),
- (r'#', String),
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- (r'#|\\.|"', String), # single quoted strings don't need " escapses
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- (r'\\.|\'|"', String), # no need to escape quotes in triple-string
- (r'#{', String.Interpol, "interpoling_string"),
- (r'#', String),
- include('strings'),
- ],
- 'tsqs': [
- (r"'''", String, '#pop'),
- (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
- include('strings')
- ],
- }
-
-
-class KalLexer(RegexLexer):
- """
- For `Kal`_ source code.
-
- .. _Kal: http://rzimmerman.github.io/kal
-
-
- .. versionadded:: 2.0
- """
-
- name = 'Kal'
- aliases = ['kal']
- filenames = ['*.kal']
- mimetypes = ['text/kal', 'application/kal']
-
- flags = re.DOTALL
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'###[^#].*?###', Comment.Multiline),
- (r'#(?!##[^#]).*?\n', Comment.Single),
- ],
- 'functiondef': [
- (r'[$a-zA-Z_][\w\$]*\s*', Name.Function, '#pop'),
- include('commentsandwhitespace'),
- ],
- 'classdef': [
- (r'\binherits\s+from\b', Keyword),
- (r'[$a-zA-Z_][\w\$]*\s*\n', Name.Class, '#pop'),
- (r'[$a-zA-Z_][\w\$]*\s*', Name.Class),
- include('commentsandwhitespace'),
- ],
- 'listcomprehension': [
- (r'\]', Punctuation, '#pop'),
- (r'\b(property|value)\b', Keyword),
- include('root'),
- ],
- 'waitfor': [
- (r'\n', Punctuation, '#pop'),
- (r'\bfrom\b', Keyword),
- include('root'),
- ],
- 'root': [
- include('commentsandwhitespace'),
- (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex),
- (r'\?|:|_(?=\n)|==?|!=|-(?!>)|[<>+*/-]=?',
- Operator),
- (r'\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|'
- r'\bbut\b|\bbitwise\b|\bmod\b|\^|\bxor\b|\bexists\b|\bdoesnt\s+exist\b',
- Operator.Word),
- (r'(?:\([^()]+\))?\s*>', Name.Function),
- (r'[{(]', Punctuation),
- (r'\[', Punctuation, 'listcomprehension'),
- (r'[})\]\.\,]', Punctuation),
- (r'\b(function|method|task)\b', Keyword.Declaration, 'functiondef'),
- (r'\bclass\b', Keyword.Declaration, 'classdef'),
- (r'\b(safe\s+)?wait\s+for\b', Keyword, 'waitfor'),
- (r'\b(me|this)(\.[$a-zA-Z_][\w\.\$]*)?\b', Name.Variable.Instance),
- (r'(?<![\.\$])(for(\s+(parallel|series))?|in|of|while|until|'
- r'break|return|continue|'
- r'when|if|unless|else|otherwise|except\s+when|'
- r'throw|raise|fail\s+with|try|catch|finally|new|delete|'
- r'typeof|instanceof|super|run\s+in\s+parallel|'
- r'inherits\s+from)\b', Keyword),
- (r'(?<![\.\$])(true|false|yes|no|on|off|null|nothing|none|'
- r'NaN|Infinity|undefined)\b',
- Keyword.Constant),
- (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
- r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'eval|isFinite|isNaN|parseFloat|parseInt|document|window|'
- r'print)\b',
- Name.Builtin),
- (r'[$a-zA-Z_][\w\.\$]*\s*(:|[\+\-\*\/]?\=)?\b', Name.Variable),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- ('"""', String, 'tdqs'),
- ("'''", String, 'tsqs'),
- ('"', String, 'dqs'),
- ("'", String, 'sqs'),
- ],
- 'strings': [
- (r'[^#\\\'"]+', String),
- # note that all kal strings are multi-line.
- # hashmarks, quotes and backslashes must be parsed one at a time
- ],
- 'interpoling_string' : [
- (r'}', String.Interpol, "#pop"),
- include('root')
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- (r'\\.|\'', String), # double-quoted string don't need ' escapes
- (r'#{', String.Interpol, "interpoling_string"),
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- (r'#|\\.|"', String), # single quoted strings don't need " escapses
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- (r'\\.|\'|"', String), # no need to escape quotes in triple-string
- (r'#{', String.Interpol, "interpoling_string"),
- include('strings'),
- ],
- 'tsqs': [
- (r"'''", String, '#pop'),
- (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
- include('strings')
- ],
- }
-
-
-class LiveScriptLexer(RegexLexer):
- """
- For `LiveScript`_ source code.
-
- .. _LiveScript: http://gkz.github.com/LiveScript/
-
- New in Pygments 1.6.
- """
-
- name = 'LiveScript'
- aliases = ['live-script', 'livescript']
- filenames = ['*.ls']
- mimetypes = ['text/livescript']
-
- flags = re.DOTALL
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'#.*?\n', Comment.Single),
- ],
- 'multilineregex': [
- include('commentsandwhitespace'),
- (r'//([gim]+\b|\B)', String.Regex, '#pop'),
- (r'/', String.Regex),
- (r'[^/#]+', String.Regex)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'//', String.Regex, ('#pop', 'multilineregex')),
- (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- default('#pop'),
- ],
- 'root': [
- # this next expr leads to infinite loops root -> slashstartsregex
- #(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'(?:\([^()]+\))?[ ]*[~-]{1,2}>|'
- r'(?:\(?[^()\n]+\)?)?[ ]*<[~-]{1,2}', Name.Function),
- (r'\+\+|&&|(?<![\.\$])\b(?:and|x?or|is|isnt|not)\b|\?|:|=|'
- r'\|\||\\(?=\n)|(<<|>>>?|==?|!=?|'
- r'~(?!\~?>)|-(?!\-?>)|<(?!\[)|(?<!\])>|'
- r'[+*`%&\|\^/])=?',
- Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'(?<![\.\$])(for|own|in|of|while|until|loop|break|'
- r'return|continue|switch|when|then|if|unless|else|'
- r'throw|try|catch|finally|new|delete|typeof|instanceof|super|'
- r'extends|this|class|by|const|var|to|til)\b', Keyword,
- 'slashstartsregex'),
- (r'(?<![\.\$])(true|false|yes|no|on|off|'
- r'null|NaN|Infinity|undefined|void)\b',
- Keyword.Constant),
- (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
- r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b',
- Name.Builtin),
- (r'[$a-zA-Z_][\w\.\-:\$]*\s*[:=]\s', Name.Variable,
- 'slashstartsregex'),
- (r'@[$a-zA-Z_][\w\.\-:\$]*\s*[:=]\s', Name.Variable.Instance,
- 'slashstartsregex'),
- (r'@', Name.Other, 'slashstartsregex'),
- (r'@?[$a-zA-Z_][\w\-]*', Name.Other, 'slashstartsregex'),
- (r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?(?:[a-zA-Z_]+)?', Number.Float),
- (r'[0-9]+(~[0-9a-z]+)?(?:[a-zA-Z_]+)?', Number.Integer),
- ('"""', String, 'tdqs'),
- ("'''", String, 'tsqs'),
- ('"', String, 'dqs'),
- ("'", String, 'sqs'),
- (r'\\\S+', String),
- (r'<\[.*?\]>', String),
- ],
- 'strings': [
- (r'[^#\\\'"]+', String),
- # note that all coffee script strings are multi-line.
- # hashmarks, quotes and backslashes must be parsed one at a time
- ],
- 'interpoling_string' : [
- (r'}', String.Interpol, "#pop"),
- include('root')
- ],
- 'dqs': [
- (r'"', String, '#pop'),
- (r'\\.|\'', String), # double-quoted string don't need ' escapes
- (r'#{', String.Interpol, "interpoling_string"),
- (r'#', String),
- include('strings')
- ],
- 'sqs': [
- (r"'", String, '#pop'),
- (r'#|\\.|"', String), # single quoted strings don't need " escapses
- include('strings')
- ],
- 'tdqs': [
- (r'"""', String, '#pop'),
- (r'\\.|\'|"', String), # no need to escape quotes in triple-string
- (r'#{', String.Interpol, "interpoling_string"),
- (r'#', String),
- include('strings'),
- ],
- 'tsqs': [
- (r"'''", String, '#pop'),
- (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings
- include('strings')
- ],
- }
-
-
-class DuelLexer(RegexLexer):
- """
- Lexer for Duel Views Engine (formerly JBST) markup with JavaScript code blocks.
- See http://duelengine.org/.
- See http://jsonml.org/jbst/.
-
- .. versionadded:: 1.4
- """
-
- name = 'Duel'
- aliases = ['duel', 'jbst', 'jsonml+bst']
- filenames = ['*.duel','*.jbst']
- mimetypes = ['text/x-duel','text/x-jbst']
-
- flags = re.DOTALL
-
- tokens = {
- 'root': [
- (r'(<%[@=#!:]?)(.*?)(%>)',
- bygroups(Name.Tag, using(JavascriptLexer), Name.Tag)),
- (r'(<%\$)(.*?)(:)(.*?)(%>)',
- bygroups(Name.Tag, Name.Function, Punctuation, String, Name.Tag)),
- (r'(<%--)(.*?)(--%>)',
- bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
- (r'(<script.*?>)(.*?)(</script>)',
- bygroups(using(HtmlLexer),
- using(JavascriptLexer), using(HtmlLexer))),
- (r'(.+?)(?=<)', using(HtmlLexer)),
- (r'.+', using(HtmlLexer)),
- ],
- }
-
-
-class ScamlLexer(ExtendedRegexLexer):
- """
- For `Scaml markup <http://scalate.fusesource.org/>`_. Scaml is Haml for Scala.
-
- .. versionadded:: 1.4
- """
-
- name = 'Scaml'
- aliases = ['scaml']
- filenames = ['*.scaml']
- mimetypes = ['text/x-scaml']
-
- flags = re.IGNORECASE
- # Scaml does not yet support the " |\n" notation to
- # wrap long lines. Once it does, use the custom faux
- # dot instead.
- # _dot = r'(?: \|\n(?=.* \|)|.)'
- _dot = r'.'
-
- tokens = {
- 'root': [
- (r'[ \t]*\n', Text),
- (r'[ \t]*', _indentation),
- ],
-
- 'css': [
- (r'\.[\w:-]+', Name.Class, 'tag'),
- (r'\#[\w:-]+', Name.Function, 'tag'),
- ],
-
- 'eval-or-plain': [
- (r'[&!]?==', Punctuation, 'plain'),
- (r'([&!]?[=~])(' + _dot + r'*\n)',
- bygroups(Punctuation, using(ScalaLexer)),
- 'root'),
- default('plain'),
- ],
-
- 'content': [
- include('css'),
- (r'%[\w:-]+', Name.Tag, 'tag'),
- (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
- (r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)',
- bygroups(Comment, Comment.Special, Comment),
- '#pop'),
- (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
- '#pop'),
- (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
- 'scaml-comment-block'), '#pop'),
- (r'(-@\s*)(import)?(' + _dot + r'*\n)',
- bygroups(Punctuation, Keyword, using(ScalaLexer)),
- '#pop'),
- (r'(-)(' + _dot + r'*\n)',
- bygroups(Punctuation, using(ScalaLexer)),
- '#pop'),
- (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
- '#pop'),
- include('eval-or-plain'),
- ],
-
- 'tag': [
- include('css'),
- (r'\{(,\n|' + _dot + ')*?\}', using(ScalaLexer)),
- (r'\[' + _dot + '*?\]', using(ScalaLexer)),
- (r'\(', Text, 'html-attributes'),
- (r'/[ \t]*\n', Punctuation, '#pop:2'),
- (r'[<>]{1,2}(?=[ \t=])', Punctuation),
- include('eval-or-plain'),
- ],
-
- 'plain': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
- (r'(#\{)(' + _dot + '*?)(\})',
- bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
-
- 'html-attributes': [
- (r'\s+', Text),
- (r'[\w:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
- (r'[\w:-]+', Name.Attribute),
- (r'\)', Text, '#pop'),
- ],
-
- 'html-attribute-value': [
- (r'[ \t]+', Text),
- (r'\w+', Name.Variable, '#pop'),
- (r'@\w+', Name.Variable.Instance, '#pop'),
- (r'\$\w+', Name.Variable.Global, '#pop'),
- (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
- (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
- ],
-
- 'html-comment-block': [
- (_dot + '+', Comment),
- (r'\n', Text, 'root'),
- ],
-
- 'scaml-comment-block': [
- (_dot + '+', Comment.Preproc),
- (r'\n', Text, 'root'),
- ],
-
- 'filter-block': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
- (r'(#\{)(' + _dot + '*?)(\})',
- bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
- }
-
-
-class JadeLexer(ExtendedRegexLexer):
- """
- For Jade markup.
- Jade is a variant of Scaml, see:
- http://scalate.fusesource.org/documentation/scaml-reference.html
-
- .. versionadded:: 1.4
- """
-
- name = 'Jade'
- aliases = ['jade']
- filenames = ['*.jade']
- mimetypes = ['text/x-jade']
-
- flags = re.IGNORECASE
- _dot = r'.'
-
- tokens = {
- 'root': [
- (r'[ \t]*\n', Text),
- (r'[ \t]*', _indentation),
- ],
-
- 'css': [
- (r'\.[\w:-]+', Name.Class, 'tag'),
- (r'\#[\w:-]+', Name.Function, 'tag'),
- ],
-
- 'eval-or-plain': [
- (r'[&!]?==', Punctuation, 'plain'),
- (r'([&!]?[=~])(' + _dot + r'*\n)',
- bygroups(Punctuation, using(ScalaLexer)), 'root'),
- default('plain'),
- ],
-
- 'content': [
- include('css'),
- (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'),
- (r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)',
- bygroups(Comment, Comment.Special, Comment),
- '#pop'),
- (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'),
- '#pop'),
- (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc,
- 'scaml-comment-block'), '#pop'),
- (r'(-@\s*)(import)?(' + _dot + r'*\n)',
- bygroups(Punctuation, Keyword, using(ScalaLexer)),
- '#pop'),
- (r'(-)(' + _dot + r'*\n)',
- bygroups(Punctuation, using(ScalaLexer)),
- '#pop'),
- (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'),
- '#pop'),
- (r'[\w:-]+', Name.Tag, 'tag'),
- (r'\|', Text, 'eval-or-plain'),
- ],
-
- 'tag': [
- include('css'),
- (r'\{(,\n|' + _dot + ')*?\}', using(ScalaLexer)),
- (r'\[' + _dot + '*?\]', using(ScalaLexer)),
- (r'\(', Text, 'html-attributes'),
- (r'/[ \t]*\n', Punctuation, '#pop:2'),
- (r'[<>]{1,2}(?=[ \t=])', Punctuation),
- include('eval-or-plain'),
- ],
-
- 'plain': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
- (r'(#\{)(' + _dot + '*?)(\})',
- bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
-
- 'html-attributes': [
- (r'\s+', Text),
- (r'[\w:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'),
- (r'[\w:-]+', Name.Attribute),
- (r'\)', Text, '#pop'),
- ],
-
- 'html-attribute-value': [
- (r'[ \t]+', Text),
- (r'\w+', Name.Variable, '#pop'),
- (r'@\w+', Name.Variable.Instance, '#pop'),
- (r'\$\w+', Name.Variable.Global, '#pop'),
- (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'),
- (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'),
- ],
-
- 'html-comment-block': [
- (_dot + '+', Comment),
- (r'\n', Text, 'root'),
- ],
-
- 'scaml-comment-block': [
- (_dot + '+', Comment.Preproc),
- (r'\n', Text, 'root'),
- ],
-
- 'filter-block': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator),
- (r'(#\{)(' + _dot + '*?)(\})',
- bygroups(String.Interpol, using(ScalaLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
- }
-
-
-class XQueryLexer(ExtendedRegexLexer):
- """
- An XQuery lexer, parsing a stream and outputting the tokens needed to
- highlight xquery code.
-
- .. versionadded:: 1.4
- """
- name = 'XQuery'
- aliases = ['xquery', 'xqy', 'xq', 'xql', 'xqm']
- filenames = ['*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm']
- mimetypes = ['text/xquery', 'application/xquery']
-
- xquery_parse_state = []
-
- # FIX UNICODE LATER
- #ncnamestartchar = (
- # ur"[A-Z]|_|[a-z]|[\u00C0-\u00D6]|[\u00D8-\u00F6]|[\u00F8-\u02FF]|"
- # ur"[\u0370-\u037D]|[\u037F-\u1FFF]|[\u200C-\u200D]|[\u2070-\u218F]|"
- # ur"[\u2C00-\u2FEF]|[\u3001-\uD7FF]|[\uF900-\uFDCF]|[\uFDF0-\uFFFD]|"
- # ur"[\u10000-\uEFFFF]"
- #)
- ncnamestartchar = r"(?:[A-Z]|_|[a-z])"
- # FIX UNICODE LATER
- #ncnamechar = ncnamestartchar + (ur"|-|\.|[0-9]|\u00B7|[\u0300-\u036F]|"
- # ur"[\u203F-\u2040]")
- ncnamechar = r"(?:" + ncnamestartchar + r"|-|\.|[0-9])"
- ncname = "(?:%s+%s*)" % (ncnamestartchar, ncnamechar)
- pitarget_namestartchar = r"(?:[A-KN-WY-Z]|_|:|[a-kn-wy-z])"
- pitarget_namechar = r"(?:" + pitarget_namestartchar + r"|-|\.|[0-9])"
- pitarget = "%s+%s*" % (pitarget_namestartchar, pitarget_namechar)
- prefixedname = "%s:%s" % (ncname, ncname)
- unprefixedname = ncname
- qname = "(?:%s|%s)" % (prefixedname, unprefixedname)
-
- entityref = r'(?:&(?:lt|gt|amp|quot|apos|nbsp);)'
- charref = r'(?:&#[0-9]+;|&#x[0-9a-fA-F]+;)'
-
- stringdouble = r'(?:"(?:' + entityref + r'|' + charref + r'|""|[^&"])*")'
- stringsingle = r"(?:'(?:" + entityref + r"|" + charref + r"|''|[^&'])*')"
-
- # FIX UNICODE LATER
- #elementcontentchar = (ur'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
- # ur'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
- elementcontentchar = r'[A-Za-z]|\s|\d|[!"#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_\'`\|~]'
- #quotattrcontentchar = (ur'\t|\r|\n|[\u0020-\u0021]|[\u0023-\u0025]|'
- # ur'[\u0027-\u003b]|[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
- quotattrcontentchar = r'[A-Za-z]|\s|\d|[!#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_\'`\|~]'
- #aposattrcontentchar = (ur'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
- # ur'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
- aposattrcontentchar = r'[A-Za-z]|\s|\d|[!"#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_`\|~]'
-
-
- # CHAR elements - fix the above elementcontentchar, quotattrcontentchar,
- # aposattrcontentchar
- #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]
-
- flags = re.DOTALL | re.MULTILINE | re.UNICODE
-
- def punctuation_root_callback(lexer, match, ctx):
- yield match.start(), Punctuation, match.group(1)
- # transition to root always - don't pop off stack
- ctx.stack = ['root']
- ctx.pos = match.end()
-
- def operator_root_callback(lexer, match, ctx):
- yield match.start(), Operator, match.group(1)
- # transition to root always - don't pop off stack
- ctx.stack = ['root']
- ctx.pos = match.end()
-
- def popstate_tag_callback(lexer, match, ctx):
- yield match.start(), Name.Tag, match.group(1)
- ctx.stack.append(lexer.xquery_parse_state.pop())
- ctx.pos = match.end()
-
- def popstate_xmlcomment_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append(lexer.xquery_parse_state.pop())
- ctx.pos = match.end()
-
- def popstate_kindtest_callback(lexer, match, ctx):
- yield match.start(), Punctuation, match.group(1)
- next_state = lexer.xquery_parse_state.pop()
- if next_state == 'occurrenceindicator':
- if re.match("[?*+]+", match.group(2)):
- yield match.start(), Punctuation, match.group(2)
- ctx.stack.append('operator')
- ctx.pos = match.end()
- else:
- ctx.stack.append('operator')
- ctx.pos = match.end(1)
- else:
- ctx.stack.append(next_state)
- ctx.pos = match.end(1)
-
- def popstate_callback(lexer, match, ctx):
- yield match.start(), Punctuation, match.group(1)
- # if we have run out of our state stack, pop whatever is on the pygments
- # state stack
- if len(lexer.xquery_parse_state) == 0:
- ctx.stack.pop()
- elif len(ctx.stack) > 1:
- ctx.stack.append(lexer.xquery_parse_state.pop())
- else:
- # i don't know if i'll need this, but in case, default back to root
- ctx.stack = ['root']
- ctx.pos = match.end()
-
- def pushstate_element_content_starttag_callback(lexer, match, ctx):
- yield match.start(), Name.Tag, match.group(1)
- lexer.xquery_parse_state.append('element_content')
- ctx.stack.append('start_tag')
- ctx.pos = match.end()
-
- def pushstate_cdata_section_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('cdata_section')
- lexer.xquery_parse_state.append(ctx.state.pop)
- ctx.pos = match.end()
-
- def pushstate_starttag_callback(lexer, match, ctx):
- yield match.start(), Name.Tag, match.group(1)
- lexer.xquery_parse_state.append(ctx.state.pop)
- ctx.stack.append('start_tag')
- ctx.pos = match.end()
-
- def pushstate_operator_order_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- ctx.stack = ['root']
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_operator_root_validate(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- ctx.stack = ['root']
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_operator_root_validate_withmode(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Keyword, match.group(3)
- ctx.stack = ['root']
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_operator_processing_instruction_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('processing_instruction')
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_element_content_processing_instruction_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('processing_instruction')
- lexer.xquery_parse_state.append('element_content')
- ctx.pos = match.end()
-
- def pushstate_element_content_cdata_section_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('cdata_section')
- lexer.xquery_parse_state.append('element_content')
- ctx.pos = match.end()
-
- def pushstate_operator_cdata_section_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('cdata_section')
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_element_content_xmlcomment_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('xml_comment')
- lexer.xquery_parse_state.append('element_content')
- ctx.pos = match.end()
-
- def pushstate_operator_xmlcomment_callback(lexer, match, ctx):
- yield match.start(), String.Doc, match.group(1)
- ctx.stack.append('xml_comment')
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- def pushstate_kindtest_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('kindtest')
- ctx.stack.append('kindtest')
- ctx.pos = match.end()
-
- def pushstate_operator_kindtestforpi_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('operator')
- ctx.stack.append('kindtestforpi')
- ctx.pos = match.end()
-
- def pushstate_operator_kindtest_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('operator')
- ctx.stack.append('kindtest')
- ctx.pos = match.end()
-
- def pushstate_occurrenceindicator_kindtest_callback(lexer, match, ctx):
- yield match.start(), Name.Tag, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('occurrenceindicator')
- ctx.stack.append('kindtest')
- ctx.pos = match.end()
-
- def pushstate_operator_starttag_callback(lexer, match, ctx):
- yield match.start(), Name.Tag, match.group(1)
- lexer.xquery_parse_state.append('operator')
- ctx.stack.append('start_tag')
- ctx.pos = match.end()
-
- def pushstate_operator_root_callback(lexer, match, ctx):
- yield match.start(), Punctuation, match.group(1)
- lexer.xquery_parse_state.append('operator')
- ctx.stack = ['root']#.append('root')
- ctx.pos = match.end()
-
- def pushstate_operator_root_construct_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('operator')
- ctx.stack = ['root']
- ctx.pos = match.end()
-
- def pushstate_root_callback(lexer, match, ctx):
- yield match.start(), Punctuation, match.group(1)
- cur_state = ctx.stack.pop()
- lexer.xquery_parse_state.append(cur_state)
- ctx.stack = ['root']#.append('root')
- ctx.pos = match.end()
-
- def pushstate_operator_attribute_callback(lexer, match, ctx):
- yield match.start(), Name.Attribute, match.group(1)
- ctx.stack.append('operator')
- ctx.pos = match.end()
-
- def pushstate_operator_callback(lexer, match, ctx):
- yield match.start(), Keyword, match.group(1)
- yield match.start(), Text, match.group(2)
- yield match.start(), Punctuation, match.group(3)
- lexer.xquery_parse_state.append('operator')
- ctx.pos = match.end()
-
- tokens = {
- 'comment': [
- # xquery comments
- (r'(:\))', Comment, '#pop'),
- (r'(\(:)', Comment, '#push'),
- (r'[^:)]', Comment),
- (r'([^:)]|:|\))', Comment),
- ],
- 'whitespace': [
- (r'\s+', Text),
- ],
- 'operator': [
- include('whitespace'),
- (r'(\})', popstate_callback),
- (r'\(:', Comment, 'comment'),
-
- (r'(\{)', pushstate_root_callback),
- (r'then|else|external|at|div|except', Keyword, 'root'),
- (r'order by', Keyword, 'root'),
- (r'is|mod|order\s+by|stable\s+order\s+by', Keyword, 'root'),
- (r'and|or', Operator.Word, 'root'),
- (r'(eq|ge|gt|le|lt|ne|idiv|intersect|in)(?=\b)',
- Operator.Word, 'root'),
- (r'return|satisfies|to|union|where|preserve\s+strip',
- Keyword, 'root'),
- (r'(>=|>>|>|<=|<<|<|-|\*|!=|\+|\||:=|=)',
- operator_root_callback),
- (r'(::|;|\[|//|/|,)',
- punctuation_root_callback),
- (r'(castable|cast)(\s+)(as)\b',
- bygroups(Keyword, Text, Keyword), 'singletype'),
- (r'(instance)(\s+)(of)\b',
- bygroups(Keyword, Text, Keyword), 'itemtype'),
- (r'(treat)(\s+)(as)\b',
- bygroups(Keyword, Text, Keyword), 'itemtype'),
- (r'(case|as)\b', Keyword, 'itemtype'),
- (r'(\))(\s*)(as)',
- bygroups(Punctuation, Text, Keyword), 'itemtype'),
- (r'\$', Name.Variable, 'varname'),
- (r'(for|let)(\s+)(\$)',
- bygroups(Keyword, Text, Name.Variable), 'varname'),
- #(r'\)|\?|\]', Punctuation, '#push'),
- (r'\)|\?|\]', Punctuation),
- (r'(empty)(\s+)(greatest|least)', bygroups(Keyword, Text, Keyword)),
- (r'ascending|descending|default', Keyword, '#push'),
- (r'external', Keyword),
- (r'collation', Keyword, 'uritooperator'),
- # finally catch all string literals and stay in operator state
- (stringdouble, String.Double),
- (stringsingle, String.Single),
-
- (r'(catch)(\s*)', bygroups(Keyword, Text), 'root'),
- ],
- 'uritooperator': [
- (stringdouble, String.Double, '#pop'),
- (stringsingle, String.Single, '#pop'),
- ],
- 'namespacedecl': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (r'(at)(\s+)('+stringdouble+')', bygroups(Keyword, Text, String.Double)),
- (r"(at)(\s+)("+stringsingle+')', bygroups(Keyword, Text, String.Single)),
- (stringdouble, String.Double),
- (stringsingle, String.Single),
- (r',', Punctuation),
- (r'=', Operator),
- (r';', Punctuation, 'root'),
- (ncname, Name.Namespace),
- ],
- 'namespacekeyword': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (stringdouble, String.Double, 'namespacedecl'),
- (stringsingle, String.Single, 'namespacedecl'),
- (r'inherit|no-inherit', Keyword, 'root'),
- (r'namespace', Keyword, 'namespacedecl'),
- (r'(default)(\s+)(element)', bygroups(Keyword, Text, Keyword)),
- (r'preserve|no-preserve', Keyword),
- (r',', Punctuation),
- ],
- 'varname': [
- (r'\(:', Comment, 'comment'),
- (qname, Name.Variable, 'operator'),
- ],
- 'singletype': [
- (r'\(:', Comment, 'comment'),
- (ncname + r'(:\*)', Name.Variable, 'operator'),
- (qname, Name.Variable, 'operator'),
- ],
- 'itemtype': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (r'\$', Punctuation, 'varname'),
- (r'(void)(\s*)(\()(\s*)(\))',
- bygroups(Keyword, Text, Punctuation, Text, Punctuation), 'operator'),
- (r'(element|attribute|schema-element|schema-attribute|comment|text|'
- r'node|binary|document-node|empty-sequence)(\s*)(\()',
- pushstate_occurrenceindicator_kindtest_callback),
- # Marklogic specific type?
- (r'(processing-instruction)(\s*)(\()',
- bygroups(Keyword, Text, Punctuation),
- ('occurrenceindicator', 'kindtestforpi')),
- (r'(item)(\s*)(\()(\s*)(\))(?=[*+?])',
- bygroups(Keyword, Text, Punctuation, Text, Punctuation),
- 'occurrenceindicator'),
- (r'\(\#', Punctuation, 'pragma'),
- (r';', Punctuation, '#pop'),
- (r'then|else', Keyword, '#pop'),
- (r'(at)(\s+)(' + stringdouble + ')',
- bygroups(Keyword, Text, String.Double), 'namespacedecl'),
- (r'(at)(\s+)(' + stringsingle + ')',
- bygroups(Keyword, Text, String.Single), 'namespacedecl'),
- (r'except|intersect|in|is|return|satisfies|to|union|where',
- Keyword, 'root'),
- (r'and|div|eq|ge|gt|le|lt|ne|idiv|mod|or', Operator.Word, 'root'),
- (r':=|=|,|>=|>>|>|\[|\(|<=|<<|<|-|!=|\|', Operator, 'root'),
- (r'external|at', Keyword, 'root'),
- (r'(stable)(\s+)(order)(\s+)(by)',
- bygroups(Keyword, Text, Keyword, Text, Keyword), 'root'),
- (r'(castable|cast)(\s+)(as)',
- bygroups(Keyword, Text, Keyword), 'singletype'),
- (r'(treat)(\s+)(as)', bygroups(Keyword, Text, Keyword)),
- (r'(instance)(\s+)(of)', bygroups(Keyword, Text, Keyword)),
- (r'case|as', Keyword, 'itemtype'),
- (r'(\))(\s*)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
- (ncname + r':\*', Keyword.Type, 'operator'),
- (qname, Keyword.Type, 'occurrenceindicator'),
- ],
- 'kindtest': [
- (r'\(:', Comment, 'comment'),
- (r'{', Punctuation, 'root'),
- (r'(\))([*+?]?)', popstate_kindtest_callback),
- (r'\*', Name, 'closekindtest'),
- (qname, Name, 'closekindtest'),
- (r'(element|schema-element)(\s*)(\()', pushstate_kindtest_callback),
- ],
- 'kindtestforpi': [
- (r'\(:', Comment, 'comment'),
- (r'\)', Punctuation, '#pop'),
- (ncname, Name.Variable),
- (stringdouble, String.Double),
- (stringsingle, String.Single),
- ],
- 'closekindtest': [
- (r'\(:', Comment, 'comment'),
- (r'(\))', popstate_callback),
- (r',', Punctuation),
- (r'(\{)', pushstate_operator_root_callback),
- (r'\?', Punctuation),
- ],
- 'xml_comment': [
- (r'(-->)', popstate_xmlcomment_callback),
- (r'[^-]{1,2}', Literal),
- (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
- unirange(0x10000, 0x10ffff), Literal),
- ],
- 'processing_instruction': [
- (r'\s+', Text, 'processing_instruction_content'),
- (r'\?>', String.Doc, '#pop'),
- (pitarget, Name),
- ],
- 'processing_instruction_content': [
- (r'\?>', String.Doc, '#pop'),
- (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
- unirange(0x10000, 0x10ffff), Literal),
- ],
- 'cdata_section': [
- (r']]>', String.Doc, '#pop'),
- (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
- unirange(0x10000, 0x10ffff), Literal),
- ],
- 'start_tag': [
- include('whitespace'),
- (r'(/>)', popstate_tag_callback),
- (r'>', Name.Tag, 'element_content'),
- (r'"', Punctuation, 'quot_attribute_content'),
- (r"'", Punctuation, 'apos_attribute_content'),
- (r'=', Operator),
- (qname, Name.Tag),
- ],
- 'quot_attribute_content': [
- (r'"', Punctuation, 'start_tag'),
- (r'(\{)', pushstate_root_callback),
- (r'""', Name.Attribute),
- (quotattrcontentchar, Name.Attribute),
- (entityref, Name.Attribute),
- (charref, Name.Attribute),
- (r'\{\{|\}\}', Name.Attribute),
- ],
- 'apos_attribute_content': [
- (r"'", Punctuation, 'start_tag'),
- (r'\{', Punctuation, 'root'),
- (r"''", Name.Attribute),
- (aposattrcontentchar, Name.Attribute),
- (entityref, Name.Attribute),
- (charref, Name.Attribute),
- (r'\{\{|\}\}', Name.Attribute),
- ],
- 'element_content': [
- (r'</', Name.Tag, 'end_tag'),
- (r'(\{)', pushstate_root_callback),
- (r'(<!--)', pushstate_element_content_xmlcomment_callback),
- (r'(<\?)', pushstate_element_content_processing_instruction_callback),
- (r'(<!\[CDATA\[)', pushstate_element_content_cdata_section_callback),
- (r'(<)', pushstate_element_content_starttag_callback),
- (elementcontentchar, Literal),
- (entityref, Literal),
- (charref, Literal),
- (r'\{\{|\}\}', Literal),
- ],
- 'end_tag': [
- include('whitespace'),
- (r'(>)', popstate_tag_callback),
- (qname, Name.Tag),
- ],
- 'xmlspace_decl': [
- (r'\(:', Comment, 'comment'),
- (r'preserve|strip', Keyword, '#pop'),
- ],
- 'declareordering': [
- (r'\(:', Comment, 'comment'),
- include('whitespace'),
- (r'ordered|unordered', Keyword, '#pop'),
- ],
- 'xqueryversion': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (stringdouble, String.Double),
- (stringsingle, String.Single),
- (r'encoding', Keyword),
- (r';', Punctuation, '#pop'),
- ],
- 'pragma': [
- (qname, Name.Variable, 'pragmacontents'),
- ],
- 'pragmacontents': [
- (r'#\)', Punctuation, 'operator'),
- (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
- unirange(0x10000, 0x10ffff), Literal),
- (r'(\s+)', Text),
- ],
- 'occurrenceindicator': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
- (r'\*|\?|\+', Operator, 'operator'),
- (r':=', Operator, 'root'),
- default('operator'),
- ],
- 'option': [
- include('whitespace'),
- (qname, Name.Variable, '#pop'),
- ],
- 'qname_braren': [
- include('whitespace'),
- (r'(\{)', pushstate_operator_root_callback),
- (r'(\()', Punctuation, 'root'),
- ],
- 'element_qname': [
- (qname, Name.Variable, 'root'),
- ],
- 'attribute_qname': [
- (qname, Name.Variable, 'root'),
- ],
- 'root': [
- include('whitespace'),
- (r'\(:', Comment, 'comment'),
-
- # handle operator state
- # order on numbers matters - handle most complex first
- (r'\d+(\.\d*)?[eE][\+\-]?\d+', Number.Float, 'operator'),
- (r'(\.\d+)[eE][\+\-]?\d+', Number.Float, 'operator'),
- (r'(\.\d+|\d+\.\d*)', Number.Float, 'operator'),
- (r'(\d+)', Number.Integer, 'operator'),
- (r'(\.\.|\.|\))', Punctuation, 'operator'),
- (r'(declare)(\s+)(construction)',
- bygroups(Keyword, Text, Keyword), 'operator'),
- (r'(declare)(\s+)(default)(\s+)(order)',
- bygroups(Keyword, Text, Keyword, Text, Keyword), 'operator'),
- (ncname + ':\*', Name, 'operator'),
- ('\*:'+ncname, Name.Tag, 'operator'),
- ('\*', Name.Tag, 'operator'),
- (stringdouble, String.Double, 'operator'),
- (stringsingle, String.Single, 'operator'),
-
- (r'(\})', popstate_callback),
-
- #NAMESPACE DECL
- (r'(declare)(\s+)(default)(\s+)(collation)',
- bygroups(Keyword, Text, Keyword, Text, Keyword)),
- (r'(module|declare)(\s+)(namespace)',
- bygroups(Keyword, Text, Keyword), 'namespacedecl'),
- (r'(declare)(\s+)(base-uri)',
- bygroups(Keyword, Text, Keyword), 'namespacedecl'),
-
- #NAMESPACE KEYWORD
- (r'(declare)(\s+)(default)(\s+)(element|function)',
- bygroups(Keyword, Text, Keyword, Text, Keyword), 'namespacekeyword'),
- (r'(import)(\s+)(schema|module)',
- bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'namespacekeyword'),
- (r'(declare)(\s+)(copy-namespaces)',
- bygroups(Keyword, Text, Keyword), 'namespacekeyword'),
-
- #VARNAMEs
- (r'(for|let|some|every)(\s+)(\$)',
- bygroups(Keyword, Text, Name.Variable), 'varname'),
- (r'\$', Name.Variable, 'varname'),
- (r'(declare)(\s+)(variable)(\s+)(\$)',
- bygroups(Keyword, Text, Keyword, Text, Name.Variable), 'varname'),
-
- #ITEMTYPE
- (r'(\))(\s+)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
-
- (r'(element|attribute|schema-element|schema-attribute|comment|'
- r'text|node|document-node|empty-sequence)(\s+)(\()',
- pushstate_operator_kindtest_callback),
-
- (r'(processing-instruction)(\s+)(\()',
- pushstate_operator_kindtestforpi_callback),
-
- (r'(<!--)', pushstate_operator_xmlcomment_callback),
-
- (r'(<\?)', pushstate_operator_processing_instruction_callback),
-
- (r'(<!\[CDATA\[)', pushstate_operator_cdata_section_callback),
-
- # (r'</', Name.Tag, 'end_tag'),
- (r'(<)', pushstate_operator_starttag_callback),
-
- (r'(declare)(\s+)(boundary-space)',
- bygroups(Keyword, Text, Keyword), 'xmlspace_decl'),
-
- (r'(validate)(\s+)(lax|strict)',
- pushstate_operator_root_validate_withmode),
- (r'(validate)(\s*)(\{)', pushstate_operator_root_validate),
- (r'(typeswitch)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
- (r'(element|attribute)(\s*)(\{)',
- pushstate_operator_root_construct_callback),
-
- (r'(document|text|processing-instruction|comment)(\s*)(\{)',
- pushstate_operator_root_construct_callback),
- #ATTRIBUTE
- (r'(attribute)(\s+)(?=' + qname + r')',
- bygroups(Keyword, Text), 'attribute_qname'),
- #ELEMENT
- (r'(element)(\s+)(?=' +qname+ r')',
- bygroups(Keyword, Text), 'element_qname'),
- #PROCESSING_INSTRUCTION
- (r'(processing-instruction)(\s+)(' + ncname + r')(\s*)(\{)',
- bygroups(Keyword, Text, Name.Variable, Text, Punctuation),
- 'operator'),
-
- (r'(declare|define)(\s+)(function)',
- bygroups(Keyword, Text, Keyword)),
-
- (r'(\{)', pushstate_operator_root_callback),
-
- (r'(unordered|ordered)(\s*)(\{)',
- pushstate_operator_order_callback),
-
- (r'(declare)(\s+)(ordering)',
- bygroups(Keyword, Text, Keyword), 'declareordering'),
-
- (r'(xquery)(\s+)(version)',
- bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'xqueryversion'),
-
- (r'(\(#)', Punctuation, 'pragma'),
-
- # sometimes return can occur in root state
- (r'return', Keyword),
-
- (r'(declare)(\s+)(option)', bygroups(Keyword, Text, Keyword),
- 'option'),
-
- #URI LITERALS - single and double quoted
- (r'(at)(\s+)('+stringdouble+')', String.Double, 'namespacedecl'),
- (r'(at)(\s+)('+stringsingle+')', String.Single, 'namespacedecl'),
-
- (r'(ancestor-or-self|ancestor|attribute|child|descendant-or-self)(::)',
- bygroups(Keyword, Punctuation)),
- (r'(descendant|following-sibling|following|parent|preceding-sibling'
- r'|preceding|self)(::)', bygroups(Keyword, Punctuation)),
-
- (r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
-
- (r'then|else', Keyword),
-
- # ML specific
- (r'(try)(\s*)', bygroups(Keyword, Text), 'root'),
- (r'(catch)(\s*)(\()(\$)',
- bygroups(Keyword, Text, Punctuation, Name.Variable), 'varname'),
-
- (r'(@'+qname+')', Name.Attribute),
- (r'(@'+ncname+')', Name.Attribute),
- (r'@\*:'+ncname, Name.Attribute),
- (r'(@)', Name.Attribute),
-
- (r'//|/|\+|-|;|,|\(|\)', Punctuation),
-
- # STANDALONE QNAMES
- (qname + r'(?=\s*{)', Name.Tag, 'qname_braren'),
- (qname + r'(?=\s*\([^:])', Name.Function, 'qname_braren'),
- (qname, Name.Tag, 'operator'),
- ]
- }
-
-
-class DartLexer(RegexLexer):
- """
- For `Dart <http://dartlang.org/>`_ source code.
-
- .. versionadded:: 1.5
- """
-
- name = 'Dart'
- aliases = ['dart']
- filenames = ['*.dart']
- mimetypes = ['text/x-dart']
-
- flags = re.MULTILINE | re.DOTALL
-
- tokens = {
- 'root': [
- include('string_literal'),
- (r'#!(.*?)$', Comment.Preproc),
- (r'\b(import|export)\b', Keyword, 'import_decl'),
- (r'\b(library|source|part of|part)\b', Keyword),
- (r'[^\S\n]+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'\b(class)\b(\s+)',
- bygroups(Keyword.Declaration, Text), 'class'),
- (r'\b(assert|break|case|catch|continue|default|do|else|finally|for|'
- r'if|in|is|new|return|super|switch|this|throw|try|while)\b',
- Keyword),
- (r'\b(abstract|const|extends|factory|final|get|implements|'
- r'native|operator|set|static|typedef|var)\b', Keyword.Declaration),
- (r'\b(bool|double|Dynamic|int|num|Object|String|void)\b', Keyword.Type),
- (r'\b(false|null|true)\b', Keyword.Constant),
- (r'[~!%^&*+=|?:<>/-]|as\b', Operator),
- (r'[a-zA-Z_$]\w*:', Name.Label),
- (r'[a-zA-Z_$]\w*', Name),
- (r'[(){}\[\],.;]', Punctuation),
- (r'0[xX][0-9a-fA-F]+', Number.Hex),
- # DIGIT+ (‘.’ DIGIT*)? EXPONENT?
- (r'\d+(\.\d*)?([eE][+-]?\d+)?', Number),
- (r'\.\d+([eE][+-]?\d+)?', Number), # ‘.’ DIGIT+ EXPONENT?
- (r'\n', Text)
- # pseudo-keyword negate intentionally left out
- ],
- 'class': [
- (r'[a-zA-Z_$]\w*', Name.Class, '#pop')
- ],
- 'import_decl': [
- include('string_literal'),
- (r'\s+', Text),
- (r'\b(as|show|hide)\b', Keyword),
- (r'[a-zA-Z_$]\w*', Name),
- (r'\,', Punctuation),
- (r'\;', Punctuation, '#pop')
- ],
- 'string_literal': [
- # Raw strings.
- (r'r"""([\s|\S]*?)"""', String.Double),
- (r"r'''([\s|\S]*?)'''", String.Single),
- (r'r"(.*?)"', String.Double),
- (r"r'(.*?)'", String.Single),
- # Normal Strings.
- (r'"""', String.Double, 'string_double_multiline'),
- (r"'''", String.Single, 'string_single_multiline'),
- (r'"', String.Double, 'string_double'),
- (r"'", String.Single, 'string_single')
- ],
- 'string_common': [
- (r"\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|u\{[0-9A-Fa-f]*\}|[a-z\'\"$\\])",
- String.Escape),
- (r'(\$)([a-zA-Z_]\w*)', bygroups(String.Interpol, Name)),
- (r'(\$\{)(.*?)(\})',
- bygroups(String.Interpol, using(this), String.Interpol))
- ],
- 'string_double': [
- (r'"', String.Double, '#pop'),
- (r'[^\"$\\\n]+', String.Double),
- include('string_common'),
- (r'\$+', String.Double)
- ],
- 'string_double_multiline': [
- (r'"""', String.Double, '#pop'),
- (r'[^\"$\\]+', String.Double),
- include('string_common'),
- (r'(\$|\")+', String.Double)
- ],
- 'string_single': [
- (r"'", String.Single, '#pop'),
- (r"[^\'$\\\n]+", String.Single),
- include('string_common'),
- (r'\$+', String.Single)
- ],
- 'string_single_multiline': [
- (r"'''", String.Single, '#pop'),
- (r'[^\'$\\]+', String.Single),
- include('string_common'),
- (r'(\$|\')+', String.Single)
- ]
- }
-
-
-class TypeScriptLexer(RegexLexer):
- """
- For `TypeScript <http://typescriptlang.org/>`_ source code.
-
- .. versionadded:: 1.6
- """
-
- name = 'TypeScript'
- aliases = ['ts']
- filenames = ['*.ts']
- mimetypes = ['text/x-typescript']
-
- flags = re.DOTALL
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'<!--', Comment),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- default('#pop')
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root': [
- (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
- r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
- r'throw|try|catch|finally|new|delete|typeof|instanceof|void|'
- r'this)\b', Keyword, 'slashstartsregex'),
- (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
- (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
- r'extends|final|float|goto|implements|import|int|interface|long|native|'
- r'package|private|protected|public|short|static|super|synchronized|throws|'
- r'transient|volatile)\b', Keyword.Reserved),
- (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
- (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
- r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
- r'window)\b', Name.Builtin),
- # Match stuff like: module name {...}
- (r'\b(module)(\s*)(\s*[\w?.$][\w?.$]*)(\s*)',
- bygroups(Keyword.Reserved, Text, Name.Other, Text), 'slashstartsregex'),
- # Match variable type keywords
- (r'\b(string|bool|number)\b', Keyword.Type),
- # Match stuff like: constructor
- (r'\b(constructor|declare|interface|as|AS)\b', Keyword.Reserved),
- # Match stuff like: super(argument, list)
- (r'(super)(\s*)(\([a-zA-Z0-9,_?.$\s]+\s*\))',
- bygroups(Keyword.Reserved, Text), 'slashstartsregex'),
- # Match stuff like: function() {...}
- (r'([a-zA-Z_?.$][\w?.$]*)\(\) \{', Name.Other, 'slashstartsregex'),
- # Match stuff like: (function: return type)
- (r'([\w?.$][\w?.$]*)(\s*:\s*)([\w?.$][\w?.$]*)',
- bygroups(Name.Other, Text, Keyword.Type)),
- (r'[$a-zA-Z_]\w*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- ]
- }
-
-
-class LassoLexer(RegexLexer):
- """
- For `Lasso <http://www.lassosoft.com/>`_ source code, covering both Lasso 9
- syntax and LassoScript for Lasso 8.6 and earlier. For Lasso embedded in
- HTML, use the `LassoHtmlLexer`.
-
- Additional options accepted:
-
- `builtinshighlighting`
- If given and ``True``, highlight builtin types, traits, methods, and
- members (default: ``True``).
- `requiredelimiters`
- If given and ``True``, only highlight code between delimiters as Lasso
- (default: ``False``).
-
- .. versionadded:: 1.6
- """
-
- name = 'Lasso'
- aliases = ['lasso', 'lassoscript']
- filenames = ['*.lasso', '*.lasso[89]']
- alias_filenames = ['*.incl', '*.inc', '*.las']
- mimetypes = ['text/x-lasso']
- flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
-
- tokens = {
- 'root': [
- (r'^#!.+lasso9\b', Comment.Preproc, 'lasso'),
- (r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'),
- (r'\[noprocess\]', Comment.Preproc, ('delimiters', 'noprocess')),
- (r'\[', Comment.Preproc, ('delimiters', 'squarebrackets')),
- (r'<\?(LassoScript|lasso|=)', Comment.Preproc,
- ('delimiters', 'anglebrackets')),
- (r'<(!--.*?-->)?', Other, 'delimiters'),
- (r'\s+', Other),
- default(('delimiters', 'lassofile')),
- ],
- 'delimiters': [
- (r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'),
- (r'\[noprocess\]', Comment.Preproc, 'noprocess'),
- (r'\[', Comment.Preproc, 'squarebrackets'),
- (r'<\?(LassoScript|lasso|=)', Comment.Preproc, 'anglebrackets'),
- (r'<(!--.*?-->)?', Other),
- (r'[^[<]+', Other),
- ],
- 'nosquarebrackets': [
- (r'<\?(LassoScript|lasso|=)', Comment.Preproc, 'anglebrackets'),
- (r'<', Other),
- (r'[^<]+', Other),
- ],
- 'noprocess': [
- (r'\[/noprocess\]', Comment.Preproc, '#pop'),
- (r'\[', Other),
- (r'[^[]', Other),
- ],
- 'squarebrackets': [
- (r'\]', Comment.Preproc, '#pop'),
- include('lasso'),
- ],
- 'anglebrackets': [
- (r'\?>', Comment.Preproc, '#pop'),
- include('lasso'),
- ],
- 'lassofile': [
- (r'\]|\?>', Comment.Preproc, '#pop'),
- include('lasso'),
- ],
- 'whitespacecomments': [
- (r'\s+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*\*!.*?\*/', String.Doc),
- (r'/\*.*?\*/', Comment.Multiline),
- ],
- 'lasso': [
- # whitespace/comments
- include('whitespacecomments'),
-
- # literals
- (r'\d*\.\d+(e[+-]?\d+)?', Number.Float),
- (r'0x[\da-f]+', Number.Hex),
- (r'\d+', Number.Integer),
- (r'([+-]?)(infinity|NaN)\b', bygroups(Operator, Number)),
- (r"'", String.Single, 'singlestring'),
- (r'"', String.Double, 'doublestring'),
- (r'`[^`]*`', String.Backtick),
-
- # names
- (r'\$[a-z_][\w.]*', Name.Variable),
- (r'#([a-z_][\w.]*|\d+)', Name.Variable.Instance),
- (r"(\.)('[a-z_][\w.]*')",
- bygroups(Name.Builtin.Pseudo, Name.Variable.Class)),
- (r"(self)(\s*->\s*)('[a-z_][\w.]*')",
- bygroups(Name.Builtin.Pseudo, Operator, Name.Variable.Class)),
- (r'(\.\.?)([a-z_][\w.]*(=(?!=))?)',
- bygroups(Name.Builtin.Pseudo, Name.Other.Member)),
- (r'(->\\?\s*|&\s*)([a-z_][\w.]*(=(?!=))?)',
- bygroups(Operator, Name.Other.Member)),
- (r'(self|inherited)\b', Name.Builtin.Pseudo),
- (r'-[a-z_][\w.]*', Name.Attribute),
- (r'::\s*[a-z_][\w.]*', Name.Label),
- (r'(error_(code|msg)_\w+|Error_AddError|Error_ColumnRestriction|'
- r'Error_DatabaseConnectionUnavailable|Error_DatabaseTimeout|'
- r'Error_DeleteError|Error_FieldRestriction|Error_FileNotFound|'
- r'Error_InvalidDatabase|Error_InvalidPassword|'
- r'Error_InvalidUsername|Error_ModuleNotFound|'
- r'Error_NoError|Error_NoPermission|Error_OutOfMemory|'
- r'Error_ReqColumnMissing|Error_ReqFieldMissing|'
- r'Error_RequiredColumnMissing|Error_RequiredFieldMissing|'
- r'Error_UpdateError)\b', Name.Exception),
-
- # definitions
- (r'(define)(\s+)([a-z_][\w.]*)(\s*=>\s*)(type|trait|thread)\b',
- bygroups(Keyword.Declaration, Text, Name.Class, Operator, Keyword)),
- (r'(define)(\s+)([a-z_][\w.]*)(\s*->\s*)([a-z_][\w.]*=?|[-+*/%])',
- bygroups(Keyword.Declaration, Text, Name.Class, Operator,
- Name.Function), 'signature'),
- (r'(define)(\s+)([a-z_][\w.]*)',
- bygroups(Keyword.Declaration, Text, Name.Function), 'signature'),
- (r'(public|protected|private|provide)(\s+)(([a-z_][\w.]*=?|[-+*/%])'
- r'(?=\s*\())', bygroups(Keyword, Text, Name.Function),
- 'signature'),
- (r'(public|protected|private|provide)(\s+)([a-z_][\w.]*)',
- bygroups(Keyword, Text, Name.Function)),
-
- # keywords
- (r'(true|false|none|minimal|full|all|void)\b', Keyword.Constant),
- (r'(local|var|variable|global|data(?=\s))\b', Keyword.Declaration),
- (r'(array|date|decimal|duration|integer|map|pair|string|tag|xml|'
- r'null|bytes|list|queue|set|stack|staticarray|tie)\b', Keyword.Type),
- (r'([a-z_][\w.]*)(\s+)(in)\b', bygroups(Name, Text, Keyword)),
- (r'(let|into)(\s+)([a-z_][\w.]*)', bygroups(Keyword, Text, Name)),
- (r'require\b', Keyword, 'requiresection'),
- (r'(/?)(Namespace_Using)\b', bygroups(Punctuation, Keyword.Namespace)),
- (r'(/?)(Cache|Database_Names|Database_SchemaNames|'
- r'Database_TableNames|Define_Tag|Define_Type|Email_Batch|'
- r'Encode_Set|HTML_Comment|Handle|Handle_Error|Header|If|Inline|'
- r'Iterate|LJAX_Target|Link|Link_CurrentAction|Link_CurrentGroup|'
- r'Link_CurrentRecord|Link_Detail|Link_FirstGroup|'
- r'Link_FirstRecord|Link_LastGroup|Link_LastRecord|Link_NextGroup|'
- r'Link_NextRecord|Link_PrevGroup|Link_PrevRecord|Log|Loop|'
- r'NoProcess|Output_None|Portal|Private|Protect|Records|Referer|'
- r'Referrer|Repeating|ResultSet|Rows|Search_Args|Search_Arguments|'
- r'Select|Sort_Args|Sort_Arguments|Thread_Atomic|Value_List|While|'
- r'Abort|Case|Else|If_Empty|If_False|If_Null|If_True|Loop_Abort|'
- r'Loop_Continue|Loop_Count|Params|Params_Up|Return|Return_Value|'
- r'Run_Children|SOAP_DefineTag|SOAP_LastRequest|SOAP_LastResponse|'
- r'Tag_Name|ascending|average|by|define|descending|do|equals|'
- r'frozen|group|handle_failure|import|in|into|join|let|match|max|'
- r'min|on|order|parent|protected|provide|public|require|returnhome|'
- r'skip|split_thread|sum|take|thread|to|trait|type|where|with|'
- r'yield|yieldhome)\b',
- bygroups(Punctuation, Keyword)),
-
- # other
- (r',', Punctuation, 'commamember'),
- (r'(and|or|not)\b', Operator.Word),
- (r'([a-z_][\w.]*)(\s*::\s*[a-z_][\w.]*)?(\s*=(?!=))',
- bygroups(Name, Name.Label, Operator)),
- (r'(/?)([\w.]+)', bygroups(Punctuation, Name.Other)),
- (r'(=)(n?bw|n?ew|n?cn|lte?|gte?|n?eq|n?rx|ft)\b',
- bygroups(Operator, Operator.Word)),
- (r':=|[-+*/%=<>&|!?\\]+', Operator),
- (r'[{}():;,@^]', Punctuation),
- ],
- 'singlestring': [
- (r"'", String.Single, '#pop'),
- (r"[^'\\]+", String.Single),
- include('escape'),
- (r"\\", String.Single),
- ],
- 'doublestring': [
- (r'"', String.Double, '#pop'),
- (r'[^"\\]+', String.Double),
- include('escape'),
- (r'\\', String.Double),
- ],
- 'escape': [
- (r'\\(U[\da-f]{8}|u[\da-f]{4}|x[\da-f]{1,2}|[0-7]{1,3}|:[^:]+:|'
- r'[abefnrtv?\"\'\\]|$)', String.Escape),
- ],
- 'signature': [
- (r'=>', Operator, '#pop'),
- (r'\)', Punctuation, '#pop'),
- (r'[(,]', Punctuation, 'parameter'),
- include('lasso'),
- ],
- 'parameter': [
- (r'\)', Punctuation, '#pop'),
- (r'-?[a-z_][\w.]*', Name.Attribute, '#pop'),
- (r'\.\.\.', Name.Builtin.Pseudo),
- include('lasso'),
- ],
- 'requiresection': [
- (r'(([a-z_][\w.]*=?|[-+*/%])(?=\s*\())', Name, 'requiresignature'),
- (r'(([a-z_][\w.]*=?|[-+*/%])(?=(\s*::\s*[\w.]+)?\s*,))', Name),
- (r'[a-z_][\w.]*=?|[-+*/%]', Name, '#pop'),
- (r'::\s*[a-z_][\w.]*', Name.Label),
- (r',', Punctuation),
- include('whitespacecomments'),
- ],
- 'requiresignature': [
- (r'(\)(?=(\s*::\s*[\w.]+)?\s*,))', Punctuation, '#pop'),
- (r'\)', Punctuation, '#pop:2'),
- (r'-?[a-z_][\w.]*', Name.Attribute),
- (r'::\s*[a-z_][\w.]*', Name.Label),
- (r'\.\.\.', Name.Builtin.Pseudo),
- (r'[(,]', Punctuation),
- include('whitespacecomments'),
- ],
- 'commamember': [
- (r'(([a-z_][\w.]*=?|[-+*/%])'
- r'(?=\s*(\(([^()]*\([^()]*\))*[^)]*\)\s*)?(::[\w.\s]+)?=>))',
- Name.Function, 'signature'),
- include('whitespacecomments'),
- default('#pop'),
- ],
- }
-
- def __init__(self, **options):
- self.builtinshighlighting = get_bool_opt(
- options, 'builtinshighlighting', True)
- self.requiredelimiters = get_bool_opt(
- options, 'requiredelimiters', False)
-
- self._builtins = set()
- self._members = set()
- if self.builtinshighlighting:
- from pygments.lexers._lassobuiltins import BUILTINS, MEMBERS
- for key, value in iteritems(BUILTINS):
- self._builtins.update(value)
- for key, value in iteritems(MEMBERS):
- self._members.update(value)
- RegexLexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- stack = ['root']
- if self.requiredelimiters:
- stack.append('delimiters')
- for index, token, value in \
- RegexLexer.get_tokens_unprocessed(self, text, stack):
- if (token is Name.Other and value.lower() in self._builtins or
- token is Name.Other.Member and
- value.lower().rstrip('=') in self._members):
- yield index, Name.Builtin, value
- continue
- yield index, token, value
-
- def analyse_text(text):
- rv = 0.0
- if 'bin/lasso9' in text:
- rv += 0.8
- if re.search(r'<\?(=|lasso)|\A\[', text, re.I):
- rv += 0.4
- if re.search(r'local\(', text, re.I):
- rv += 0.4
- if '?>' in text:
- rv += 0.1
- return rv
-
-
-class QmlLexer(RegexLexer):
- """
- For QML files. See http://doc.qt.digia.com/4.7/qdeclarativeintroduction.html.
-
- .. versionadded:: 1.6
- """
-
- # QML is based on javascript, so much of this is taken from the
- # JavascriptLexer above.
-
- name = 'QML'
- aliases = ['qml']
- filenames = ['*.qml',]
- mimetypes = [ 'application/x-qml',]
-
-
- # pasted from JavascriptLexer, with some additions
- flags = re.DOTALL
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'<!--', Comment),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- (r'(?=/)', Text, ('#pop', 'badregex')),
- default('#pop')
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root' : [
- (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
- r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
-
- # QML insertions
- (r'\bid\s*:\s*[A-Za-z][_A-Za-z.0-9]*',Keyword.Declaration,
- 'slashstartsregex'),
- (r'\b[A-Za-z][_A-Za-z.0-9]*\s*:',Keyword, 'slashstartsregex'),
-
- # the rest from JavascriptLexer
- (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
- r'throw|try|catch|finally|new|delete|typeof|instanceof|void|'
- r'this)\b', Keyword, 'slashstartsregex'),
- (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
- (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
- r'extends|final|float|goto|implements|import|int|interface|long|native|'
- r'package|private|protected|public|short|static|super|synchronized|throws|'
- r'transient|volatile)\b', Keyword.Reserved),
- (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
- (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
- r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
- r'decodeURIComponent|encodeURI|encodeURIComponent|'
- r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
- r'window)\b', Name.Builtin),
- (r'[$a-zA-Z_]\w*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- ]
- }
-
-
-class CirruLexer(RegexLexer):
- """
- Syntax rules of Cirru can be found at:
- http://grammar.cirru.org/
-
- * using ``()`` to markup blocks, but limited in the same line
- * using ``""`` to markup strings, allow ``\`` to escape
- * using ``$`` as a shorthand for ``()`` till indentation end or ``)``
- * using indentations for create nesting
-
- .. versionadded:: 2.0
- """
-
- name = 'Cirru'
- aliases = ['cirru']
- filenames = ['*.cirru', '*.cr']
- mimetypes = ['text/x-cirru']
- flags = re.MULTILINE
-
- tokens = {
- 'string': [
- (r'[^"\\\n]', String),
- (r'\\', String.Escape, 'escape'),
- (r'"', String, '#pop'),
- ],
- 'escape': [
- (r'.', String.Escape, '#pop'),
- ],
- 'function': [
- (r'[\w-][^\s\(\)\"]*', Name.Function, '#pop'),
- (r'\)', Operator, '#pop'),
- (r'(?=\n)', Text, '#pop'),
- (r'\(', Operator, '#push'),
- (r'"', String, ('#pop', 'string')),
- (r'\s+', Text.Whitespace),
- (r'\,', Operator, '#pop'),
- ],
- 'line': [
- (r'^\B', Text.Whitespace, 'function'),
- (r'\$', Operator, 'function'),
- (r'\(', Operator, 'function'),
- (r'\)', Operator),
- (r'(?=\n)', Text, '#pop'),
- (r'\n', Text, '#pop'),
- (r'"', String, 'string'),
- (r'\s+', Text.Whitespace),
- (r'[\d\.]+', Number),
- (r'[\w-][^\"\(\)\s]*', Name.Variable),
- (r'--', Comment.Single)
- ],
- 'root': [
- (r'^\s*', Text.Whitespace, ('line', 'function')),
- (r'^\s+$', Text.Whitespace),
- ]
- }
-
-
-class MaskLexer(RegexLexer):
- """
- For `Mask <http://github.com/atmajs/MaskJS>`__ markup.
-
- .. versionadded:: 2.0
- """
- name = 'Mask'
- aliases = ['mask']
- filenames = ['*.mask']
- mimetypes = ['text/x-mask']
-
- flags = re.MULTILINE | re.IGNORECASE | re.DOTALL
- tokens = {
- 'root': [
- (r'\s+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline),
- (r'[\{\};>]', Punctuation),
- (r"'''", String, 'string-trpl-single'),
- (r'"""', String, 'string-trpl-double'),
- (r"'", String, 'string-single'),
- (r'"', String, 'string-double'),
- (r'([\w-]+)', Name.Tag, 'node'),
- (r'([^\.#;{>\s]+)', Name.Class, 'node'),
- (r'(#[\w-]+)', Name.Function, 'node'),
- (r'(\.[\w-]+)', Name.Variable.Class, 'node')
- ],
- 'string-base': [
- (r'\\.', String.Escape),
- (r'~\[', String.Interpol, 'interpolation'),
- (r'.', String.Single),
- ],
- 'string-single':[
- (r"'", String.Single, '#pop'),
- include('string-base')
- ],
- 'string-double':[
- (r'"', String.Single, '#pop'),
- include('string-base')
- ],
- 'string-trpl-single':[
- (r"'''", String.Single, '#pop'),
- include('string-base')
- ],
- 'string-trpl-double':[
- (r'"""', String.Single, '#pop'),
- include('string-base')
- ],
- 'interpolation': [
- (r'\]', String.Interpol, '#pop'),
- (r'\s*:', String.Interpol, 'expression'),
- (r'\s*\w+:', Name.Other),
- (r'[^\]]+', String.Interpol)
- ],
- 'expression': [
- (r'[^\]]+', using(JavascriptLexer), '#pop')
- ],
- 'node': [
- (r'\s+', Text),
- (r'\.', Name.Variable.Class, 'node-class'),
- (r'\#', Name.Function, 'node-id'),
- (r'style[ \t]*=', Name.Attribute, 'node-attr-style-value'),
- (r'[\w:-]+[ \t]*=', Name.Attribute, 'node-attr-value'),
- (r'[\w:-]+', Name.Attribute),
- (r'[>{;]', Punctuation, '#pop')
- ],
- 'node-class': [
- (r'[\w-]+', Name.Variable.Class),
- (r'~\[', String.Interpol, 'interpolation'),
- default('#pop')
- ],
- 'node-id': [
- (r'[\w-]+', Name.Function),
- (r'~\[', String.Interpol, 'interpolation'),
- default('#pop')
- ],
- 'node-attr-value':[
- (r'\s+', Text),
- (r'\w+', Name.Variable, '#pop'),
- (r"'", String, 'string-single-pop2'),
- (r'"', String, 'string-double-pop2'),
- default('#pop')
- ],
- 'node-attr-style-value':[
- (r'\s+', Text),
- (r"'", String.Single, 'css-single-end'),
- (r'"', String.Single, 'css-double-end'),
- include('node-attr-value')
- ],
- 'css-base': [
- (r'\s+', Text),
- (r";", Punctuation),
- (r"[\w\-]+\s*:", Name.Builtin)
- ],
- 'css-single-end': [
- include('css-base'),
- (r"'", String.Single, '#pop:2'),
- (r"[^;']+", Name.Entity)
- ],
- 'css-double-end': [
- include('css-base'),
- (r'"', String.Single, '#pop:2'),
- (r"[^;\"]+", Name.Entity)
- ],
- 'string-single-pop2':[
- (r"'", String.Single, '#pop:2'),
- include('string-base')
- ],
- 'string-double-pop2':[
- (r'"', String.Single, '#pop:2'),
- include('string-base')
- ],
- }
-
-
-class ZephirLexer(RegexLexer):
- """
- For `Zephir language <http://zephir-lang.com/>`_ source code.
-
- Zephir is a compiled high level language aimed
- to the creation of C-extensions for PHP.
-
- .. versionadded:: 2.0
- """
-
- name = 'Zephir'
- aliases = ['zephir']
- filenames = ['*.zep']
-
- zephir_keywords = [ 'fetch', 'echo', 'isset', 'empty']
- zephir_type = [ 'bit', 'bits' , 'string' ]
-
- flags = re.DOTALL | re.MULTILINE
-
- tokens = {
- 'commentsandwhitespace': [
- (r'\s+', Text),
- (r'//.*?\n', Comment.Single),
- (r'/\*.*?\*/', Comment.Multiline)
- ],
- 'slashstartsregex': [
- include('commentsandwhitespace'),
- (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
- r'([gim]+\b|\B)', String.Regex, '#pop'),
- default('#pop')
- ],
- 'badregex': [
- (r'\n', Text, '#pop')
- ],
- 'root': [
- (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
- include('commentsandwhitespace'),
- (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
- r'(<<|>>>?|==?|!=?|->|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'),
- (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
- (r'[})\].]', Punctuation),
- (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|loop|'
- r'require|inline|throw|try|catch|finally|new|delete|typeof|instanceof|void|'
- r'namespace|use|extends|this|fetch|isset|unset|echo|fetch|likely|unlikely|'
- r'empty)\b', Keyword, 'slashstartsregex'),
- (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
- (r'(abstract|boolean|bool|char|class|const|double|enum|export|extends|final|'
- r'native|goto|implements|import|int|string|interface|long|ulong|char|uchar|'
- r'float|unsigned|private|protected|public|short|static|self|throws|reverse|'
- r'transient|volatile)\b', Keyword.Reserved),
- (r'(true|false|null|undefined)\b', Keyword.Constant),
- (r'(Array|Boolean|Date|_REQUEST|_COOKIE|_SESSION|'
- r'_GET|_POST|_SERVER|this|stdClass|range|count|iterator|'
- r'window)\b', Name.Builtin),
- (r'[$a-zA-Z_][\w\\]*', Name.Other),
- (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
- (r'0x[0-9a-fA-F]+', Number.Hex),
- (r'[0-9]+', Number.Integer),
- (r'"(\\\\|\\"|[^"])*"', String.Double),
- (r"'(\\\\|\\'|[^'])*'", String.Single),
- ]
- }
-
-class SlimLexer(ExtendedRegexLexer):
- """
- For Slim markup.
- """
-
- name = 'Slim'
- aliases = ['slim']
- filenames = ['*.slim']
- mimetypes = ['text/x-slim']
-
- flags = re.IGNORECASE
- _dot = r'(?: \|\n(?=.* \|)|.)'
- tokens = {
- 'root': [
- (r'[ \t]*\n', Text),
- (r'[ \t]*', _indentation),
- ],
-
- 'css': [
- (r'\.[\w:-]+', Name.Class, 'tag'),
- (r'\#[\w:-]+', Name.Function, 'tag'),
- ],
-
- 'eval-or-plain': [
- (r'([ \t]*==?)(.*\n)',
- bygroups(Punctuation, using(RubyLexer)),
- 'root'),
- (r'[ \t]+[\w:-]+(?=[=])', Name.Attribute, 'html-attributes'),
- (r'', Text, 'plain'),
- ],
-
- 'content': [
- include('css'),
- (r'[\w:-]+:[ \t]*\n', Text, 'plain'),
- (r'(-)(.*\n)',
- bygroups(Punctuation, using(RubyLexer)),
- '#pop'),
- (r'\|' + _dot + r'*\n', _starts_block(Text, 'plain'), '#pop'),
- (r'/' + _dot + r'*\n', _starts_block(Comment.Preproc, 'slim-comment-block'), '#pop'),
- (r'[\w:-]+', Name.Tag, 'tag'),
- include('eval-or-plain'),
- ],
-
- 'tag': [
- include('css'),
- (r'[<>]{1,2}(?=[ \t=])', Punctuation),
- (r'[ \t]+\n', Punctuation, '#pop:2'),
- include('eval-or-plain'),
- ],
-
- 'plain': [
- (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
- (r'(#\{)(.*?)(\})',
- bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
- (r'\n', Text, 'root'),
- ],
-
- 'html-attributes': [
- (r'=', Punctuation),
- (r'"[^\"]+"', using(RubyLexer), 'tag'),
- (r'\'[^\']+\'', using(RubyLexer), 'tag'),
- (r'[\w]+', Text, 'tag'),
- ],
-
- 'slim-comment-block': [
- (_dot + '+', Comment.Preproc),
- (r'\n', Text, 'root'),
- ],
- }
+__all__ = []
diff --git a/pygments/lexers/webmisc.py b/pygments/lexers/webmisc.py
new file mode 100644
index 00000000..fe0fae64
--- /dev/null
+++ b/pygments/lexers/webmisc.py
@@ -0,0 +1,922 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers.webmisc
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Lexers for misc. web stuff.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, bygroups, \
+ default, using
+from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
+ Number, Punctuation, Literal
+from pygments.util import unirange
+
+from pygments.lexers.css import _indentation, _starts_block
+from pygments.lexers.html import HtmlLexer
+from pygments.lexers.javascript import JavascriptLexer
+from pygments.lexers.ruby import RubyLexer
+
+__all__ = ['DuelLexer', 'SlimLexer', 'XQueryLexer', 'QmlLexer', 'CirruLexer']
+
+
+class DuelLexer(RegexLexer):
+ """
+ Lexer for Duel Views Engine (formerly JBST) markup with JavaScript code blocks.
+ See http://duelengine.org/.
+ See http://jsonml.org/jbst/.
+
+ .. versionadded:: 1.4
+ """
+
+ name = 'Duel'
+ aliases = ['duel', 'jbst', 'jsonml+bst']
+ filenames = ['*.duel', '*.jbst']
+ mimetypes = ['text/x-duel', 'text/x-jbst']
+
+ flags = re.DOTALL
+
+ tokens = {
+ 'root': [
+ (r'(<%[@=#!:]?)(.*?)(%>)',
+ bygroups(Name.Tag, using(JavascriptLexer), Name.Tag)),
+ (r'(<%\$)(.*?)(:)(.*?)(%>)',
+ bygroups(Name.Tag, Name.Function, Punctuation, String, Name.Tag)),
+ (r'(<%--)(.*?)(--%>)',
+ bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
+ (r'(<script.*?>)(.*?)(</script>)',
+ bygroups(using(HtmlLexer),
+ using(JavascriptLexer), using(HtmlLexer))),
+ (r'(.+?)(?=<)', using(HtmlLexer)),
+ (r'.+', using(HtmlLexer)),
+ ],
+ }
+
+
+class XQueryLexer(ExtendedRegexLexer):
+ """
+ An XQuery lexer, parsing a stream and outputting the tokens needed to
+ highlight xquery code.
+
+ .. versionadded:: 1.4
+ """
+ name = 'XQuery'
+ aliases = ['xquery', 'xqy', 'xq', 'xql', 'xqm']
+ filenames = ['*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm']
+ mimetypes = ['text/xquery', 'application/xquery']
+
+ xquery_parse_state = []
+
+ # FIX UNICODE LATER
+ # ncnamestartchar = (
+ # ur"[A-Z]|_|[a-z]|[\u00C0-\u00D6]|[\u00D8-\u00F6]|[\u00F8-\u02FF]|"
+ # ur"[\u0370-\u037D]|[\u037F-\u1FFF]|[\u200C-\u200D]|[\u2070-\u218F]|"
+ # ur"[\u2C00-\u2FEF]|[\u3001-\uD7FF]|[\uF900-\uFDCF]|[\uFDF0-\uFFFD]|"
+ # ur"[\u10000-\uEFFFF]"
+ # )
+ ncnamestartchar = r"(?:[A-Z]|_|[a-z])"
+ # FIX UNICODE LATER
+ # ncnamechar = ncnamestartchar + (ur"|-|\.|[0-9]|\u00B7|[\u0300-\u036F]|"
+ # ur"[\u203F-\u2040]")
+ ncnamechar = r"(?:" + ncnamestartchar + r"|-|\.|[0-9])"
+ ncname = "(?:%s+%s*)" % (ncnamestartchar, ncnamechar)
+ pitarget_namestartchar = r"(?:[A-KN-WY-Z]|_|:|[a-kn-wy-z])"
+ pitarget_namechar = r"(?:" + pitarget_namestartchar + r"|-|\.|[0-9])"
+ pitarget = "%s+%s*" % (pitarget_namestartchar, pitarget_namechar)
+ prefixedname = "%s:%s" % (ncname, ncname)
+ unprefixedname = ncname
+ qname = "(?:%s|%s)" % (prefixedname, unprefixedname)
+
+ entityref = r'(?:&(?:lt|gt|amp|quot|apos|nbsp);)'
+ charref = r'(?:&#[0-9]+;|&#x[0-9a-fA-F]+;)'
+
+ stringdouble = r'(?:"(?:' + entityref + r'|' + charref + r'|""|[^&"])*")'
+ stringsingle = r"(?:'(?:" + entityref + r"|" + charref + r"|''|[^&'])*')"
+
+ # FIX UNICODE LATER
+ # elementcontentchar = (ur'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
+ # ur'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
+ elementcontentchar = r'[A-Za-z]|\s|\d|[!"#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_\'`\|~]'
+ # quotattrcontentchar = (ur'\t|\r|\n|[\u0020-\u0021]|[\u0023-\u0025]|'
+ # ur'[\u0027-\u003b]|[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
+ quotattrcontentchar = r'[A-Za-z]|\s|\d|[!#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_\'`\|~]'
+ # aposattrcontentchar = (ur'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|'
+ # ur'[\u003d-\u007a]|\u007c|[\u007e-\u007F]')
+ aposattrcontentchar = r'[A-Za-z]|\s|\d|[!"#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_`\|~]'
+
+ # CHAR elements - fix the above elementcontentchar, quotattrcontentchar,
+ # aposattrcontentchar
+ # x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]
+
+ flags = re.DOTALL | re.MULTILINE | re.UNICODE
+
+ def punctuation_root_callback(lexer, match, ctx):
+ yield match.start(), Punctuation, match.group(1)
+ # transition to root always - don't pop off stack
+ ctx.stack = ['root']
+ ctx.pos = match.end()
+
+ def operator_root_callback(lexer, match, ctx):
+ yield match.start(), Operator, match.group(1)
+ # transition to root always - don't pop off stack
+ ctx.stack = ['root']
+ ctx.pos = match.end()
+
+ def popstate_tag_callback(lexer, match, ctx):
+ yield match.start(), Name.Tag, match.group(1)
+ ctx.stack.append(lexer.xquery_parse_state.pop())
+ ctx.pos = match.end()
+
+ def popstate_xmlcomment_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append(lexer.xquery_parse_state.pop())
+ ctx.pos = match.end()
+
+ def popstate_kindtest_callback(lexer, match, ctx):
+ yield match.start(), Punctuation, match.group(1)
+ next_state = lexer.xquery_parse_state.pop()
+ if next_state == 'occurrenceindicator':
+ if re.match("[?*+]+", match.group(2)):
+ yield match.start(), Punctuation, match.group(2)
+ ctx.stack.append('operator')
+ ctx.pos = match.end()
+ else:
+ ctx.stack.append('operator')
+ ctx.pos = match.end(1)
+ else:
+ ctx.stack.append(next_state)
+ ctx.pos = match.end(1)
+
+ def popstate_callback(lexer, match, ctx):
+ yield match.start(), Punctuation, match.group(1)
+ # if we have run out of our state stack, pop whatever is on the pygments
+ # state stack
+ if len(lexer.xquery_parse_state) == 0:
+ ctx.stack.pop()
+ elif len(ctx.stack) > 1:
+ ctx.stack.append(lexer.xquery_parse_state.pop())
+ else:
+ # i don't know if i'll need this, but in case, default back to root
+ ctx.stack = ['root']
+ ctx.pos = match.end()
+
+ def pushstate_element_content_starttag_callback(lexer, match, ctx):
+ yield match.start(), Name.Tag, match.group(1)
+ lexer.xquery_parse_state.append('element_content')
+ ctx.stack.append('start_tag')
+ ctx.pos = match.end()
+
+ def pushstate_cdata_section_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('cdata_section')
+ lexer.xquery_parse_state.append(ctx.state.pop)
+ ctx.pos = match.end()
+
+ def pushstate_starttag_callback(lexer, match, ctx):
+ yield match.start(), Name.Tag, match.group(1)
+ lexer.xquery_parse_state.append(ctx.state.pop)
+ ctx.stack.append('start_tag')
+ ctx.pos = match.end()
+
+ def pushstate_operator_order_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ ctx.stack = ['root']
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_operator_root_validate(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ ctx.stack = ['root']
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_operator_root_validate_withmode(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Keyword, match.group(3)
+ ctx.stack = ['root']
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_operator_processing_instruction_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('processing_instruction')
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_element_content_processing_instruction_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('processing_instruction')
+ lexer.xquery_parse_state.append('element_content')
+ ctx.pos = match.end()
+
+ def pushstate_element_content_cdata_section_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('cdata_section')
+ lexer.xquery_parse_state.append('element_content')
+ ctx.pos = match.end()
+
+ def pushstate_operator_cdata_section_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('cdata_section')
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_element_content_xmlcomment_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('xml_comment')
+ lexer.xquery_parse_state.append('element_content')
+ ctx.pos = match.end()
+
+ def pushstate_operator_xmlcomment_callback(lexer, match, ctx):
+ yield match.start(), String.Doc, match.group(1)
+ ctx.stack.append('xml_comment')
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_kindtest_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ lexer.xquery_parse_state.append('kindtest')
+ ctx.stack.append('kindtest')
+ ctx.pos = match.end()
+
+ def pushstate_operator_kindtestforpi_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ lexer.xquery_parse_state.append('operator')
+ ctx.stack.append('kindtestforpi')
+ ctx.pos = match.end()
+
+ def pushstate_operator_kindtest_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ lexer.xquery_parse_state.append('operator')
+ ctx.stack.append('kindtest')
+ ctx.pos = match.end()
+
+ def pushstate_occurrenceindicator_kindtest_callback(lexer, match, ctx):
+ yield match.start(), Name.Tag, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ lexer.xquery_parse_state.append('occurrenceindicator')
+ ctx.stack.append('kindtest')
+ ctx.pos = match.end()
+
+ def pushstate_operator_starttag_callback(lexer, match, ctx):
+ yield match.start(), Name.Tag, match.group(1)
+ lexer.xquery_parse_state.append('operator')
+ ctx.stack.append('start_tag')
+ ctx.pos = match.end()
+
+ def pushstate_operator_root_callback(lexer, match, ctx):
+ yield match.start(), Punctuation, match.group(1)
+ lexer.xquery_parse_state.append('operator')
+ ctx.stack = ['root']
+ ctx.pos = match.end()
+
+ def pushstate_operator_root_construct_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ lexer.xquery_parse_state.append('operator')
+ ctx.stack = ['root']
+ ctx.pos = match.end()
+
+ def pushstate_root_callback(lexer, match, ctx):
+ yield match.start(), Punctuation, match.group(1)
+ cur_state = ctx.stack.pop()
+ lexer.xquery_parse_state.append(cur_state)
+ ctx.stack = ['root']
+ ctx.pos = match.end()
+
+ def pushstate_operator_attribute_callback(lexer, match, ctx):
+ yield match.start(), Name.Attribute, match.group(1)
+ ctx.stack.append('operator')
+ ctx.pos = match.end()
+
+ def pushstate_operator_callback(lexer, match, ctx):
+ yield match.start(), Keyword, match.group(1)
+ yield match.start(), Text, match.group(2)
+ yield match.start(), Punctuation, match.group(3)
+ lexer.xquery_parse_state.append('operator')
+ ctx.pos = match.end()
+
+ tokens = {
+ 'comment': [
+ # xquery comments
+ (r'(:\))', Comment, '#pop'),
+ (r'(\(:)', Comment, '#push'),
+ (r'[^:)]', Comment),
+ (r'([^:)]|:|\))', Comment),
+ ],
+ 'whitespace': [
+ (r'\s+', Text),
+ ],
+ 'operator': [
+ include('whitespace'),
+ (r'(\})', popstate_callback),
+ (r'\(:', Comment, 'comment'),
+
+ (r'(\{)', pushstate_root_callback),
+ (r'then|else|external|at|div|except', Keyword, 'root'),
+ (r'order by', Keyword, 'root'),
+ (r'is|mod|order\s+by|stable\s+order\s+by', Keyword, 'root'),
+ (r'and|or', Operator.Word, 'root'),
+ (r'(eq|ge|gt|le|lt|ne|idiv|intersect|in)(?=\b)',
+ Operator.Word, 'root'),
+ (r'return|satisfies|to|union|where|preserve\s+strip',
+ Keyword, 'root'),
+ (r'(>=|>>|>|<=|<<|<|-|\*|!=|\+|\|\||\||:=|=)',
+ operator_root_callback),
+ (r'(::|;|\[|//|/|,)',
+ punctuation_root_callback),
+ (r'(castable|cast)(\s+)(as)\b',
+ bygroups(Keyword, Text, Keyword), 'singletype'),
+ (r'(instance)(\s+)(of)\b',
+ bygroups(Keyword, Text, Keyword), 'itemtype'),
+ (r'(treat)(\s+)(as)\b',
+ bygroups(Keyword, Text, Keyword), 'itemtype'),
+ (r'(case|as)\b', Keyword, 'itemtype'),
+ (r'(\))(\s*)(as)',
+ bygroups(Punctuation, Text, Keyword), 'itemtype'),
+ (r'\$', Name.Variable, 'varname'),
+ (r'(for|let)(\s+)(\$)',
+ bygroups(Keyword, Text, Name.Variable), 'varname'),
+ # (r'\)|\?|\]', Punctuation, '#push'),
+ (r'\)|\?|\]', Punctuation),
+ (r'(empty)(\s+)(greatest|least)', bygroups(Keyword, Text, Keyword)),
+ (r'ascending|descending|default', Keyword, '#push'),
+ (r'external', Keyword),
+ (r'collation', Keyword, 'uritooperator'),
+ # finally catch all string literals and stay in operator state
+ (stringdouble, String.Double),
+ (stringsingle, String.Single),
+
+ (r'(catch)(\s*)', bygroups(Keyword, Text), 'root'),
+ ],
+ 'uritooperator': [
+ (stringdouble, String.Double, '#pop'),
+ (stringsingle, String.Single, '#pop'),
+ ],
+ 'namespacedecl': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+ (r'(at)(\s+)('+stringdouble+')', bygroups(Keyword, Text, String.Double)),
+ (r"(at)(\s+)("+stringsingle+')', bygroups(Keyword, Text, String.Single)),
+ (stringdouble, String.Double),
+ (stringsingle, String.Single),
+ (r',', Punctuation),
+ (r'=', Operator),
+ (r';', Punctuation, 'root'),
+ (ncname, Name.Namespace),
+ ],
+ 'namespacekeyword': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+ (stringdouble, String.Double, 'namespacedecl'),
+ (stringsingle, String.Single, 'namespacedecl'),
+ (r'inherit|no-inherit', Keyword, 'root'),
+ (r'namespace', Keyword, 'namespacedecl'),
+ (r'(default)(\s+)(element)', bygroups(Keyword, Text, Keyword)),
+ (r'preserve|no-preserve', Keyword),
+ (r',', Punctuation),
+ ],
+ 'varname': [
+ (r'\(:', Comment, 'comment'),
+ (qname, Name.Variable, 'operator'),
+ ],
+ 'singletype': [
+ (r'\(:', Comment, 'comment'),
+ (ncname + r'(:\*)', Name.Variable, 'operator'),
+ (qname, Name.Variable, 'operator'),
+ ],
+ 'itemtype': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+ (r'\$', Punctuation, 'varname'),
+ (r'(void)(\s*)(\()(\s*)(\))',
+ bygroups(Keyword, Text, Punctuation, Text, Punctuation), 'operator'),
+ (r'(element|attribute|schema-element|schema-attribute|comment|text|'
+ r'node|binary|document-node|empty-sequence)(\s*)(\()',
+ pushstate_occurrenceindicator_kindtest_callback),
+ # Marklogic specific type?
+ (r'(processing-instruction)(\s*)(\()',
+ bygroups(Keyword, Text, Punctuation),
+ ('occurrenceindicator', 'kindtestforpi')),
+ (r'(item)(\s*)(\()(\s*)(\))(?=[*+?])',
+ bygroups(Keyword, Text, Punctuation, Text, Punctuation),
+ 'occurrenceindicator'),
+ (r'\(\#', Punctuation, 'pragma'),
+ (r';', Punctuation, '#pop'),
+ (r'then|else', Keyword, '#pop'),
+ (r'(at)(\s+)(' + stringdouble + ')',
+ bygroups(Keyword, Text, String.Double), 'namespacedecl'),
+ (r'(at)(\s+)(' + stringsingle + ')',
+ bygroups(Keyword, Text, String.Single), 'namespacedecl'),
+ (r'except|intersect|in|is|return|satisfies|to|union|where',
+ Keyword, 'root'),
+ (r'and|div|eq|ge|gt|le|lt|ne|idiv|mod|or', Operator.Word, 'root'),
+ (r':=|=|,|>=|>>|>|\[|\(|<=|<<|<|-|!=|\|\||\|', Operator, 'root'),
+ (r'external|at', Keyword, 'root'),
+ (r'(stable)(\s+)(order)(\s+)(by)',
+ bygroups(Keyword, Text, Keyword, Text, Keyword), 'root'),
+ (r'(castable|cast)(\s+)(as)',
+ bygroups(Keyword, Text, Keyword), 'singletype'),
+ (r'(treat)(\s+)(as)', bygroups(Keyword, Text, Keyword)),
+ (r'(instance)(\s+)(of)', bygroups(Keyword, Text, Keyword)),
+ (r'case|as', Keyword, 'itemtype'),
+ (r'(\))(\s*)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
+ (ncname + r':\*', Keyword.Type, 'operator'),
+ (qname, Keyword.Type, 'occurrenceindicator'),
+ ],
+ 'kindtest': [
+ (r'\(:', Comment, 'comment'),
+ (r'{', Punctuation, 'root'),
+ (r'(\))([*+?]?)', popstate_kindtest_callback),
+ (r'\*', Name, 'closekindtest'),
+ (qname, Name, 'closekindtest'),
+ (r'(element|schema-element)(\s*)(\()', pushstate_kindtest_callback),
+ ],
+ 'kindtestforpi': [
+ (r'\(:', Comment, 'comment'),
+ (r'\)', Punctuation, '#pop'),
+ (ncname, Name.Variable),
+ (stringdouble, String.Double),
+ (stringsingle, String.Single),
+ ],
+ 'closekindtest': [
+ (r'\(:', Comment, 'comment'),
+ (r'(\))', popstate_callback),
+ (r',', Punctuation),
+ (r'(\{)', pushstate_operator_root_callback),
+ (r'\?', Punctuation),
+ ],
+ 'xml_comment': [
+ (r'(-->)', popstate_xmlcomment_callback),
+ (r'[^-]{1,2}', Literal),
+ (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
+ unirange(0x10000, 0x10ffff), Literal),
+ ],
+ 'processing_instruction': [
+ (r'\s+', Text, 'processing_instruction_content'),
+ (r'\?>', String.Doc, '#pop'),
+ (pitarget, Name),
+ ],
+ 'processing_instruction_content': [
+ (r'\?>', String.Doc, '#pop'),
+ (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
+ unirange(0x10000, 0x10ffff), Literal),
+ ],
+ 'cdata_section': [
+ (r']]>', String.Doc, '#pop'),
+ (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
+ unirange(0x10000, 0x10ffff), Literal),
+ ],
+ 'start_tag': [
+ include('whitespace'),
+ (r'(/>)', popstate_tag_callback),
+ (r'>', Name.Tag, 'element_content'),
+ (r'"', Punctuation, 'quot_attribute_content'),
+ (r"'", Punctuation, 'apos_attribute_content'),
+ (r'=', Operator),
+ (qname, Name.Tag),
+ ],
+ 'quot_attribute_content': [
+ (r'"', Punctuation, 'start_tag'),
+ (r'(\{)', pushstate_root_callback),
+ (r'""', Name.Attribute),
+ (quotattrcontentchar, Name.Attribute),
+ (entityref, Name.Attribute),
+ (charref, Name.Attribute),
+ (r'\{\{|\}\}', Name.Attribute),
+ ],
+ 'apos_attribute_content': [
+ (r"'", Punctuation, 'start_tag'),
+ (r'\{', Punctuation, 'root'),
+ (r"''", Name.Attribute),
+ (aposattrcontentchar, Name.Attribute),
+ (entityref, Name.Attribute),
+ (charref, Name.Attribute),
+ (r'\{\{|\}\}', Name.Attribute),
+ ],
+ 'element_content': [
+ (r'</', Name.Tag, 'end_tag'),
+ (r'(\{)', pushstate_root_callback),
+ (r'(<!--)', pushstate_element_content_xmlcomment_callback),
+ (r'(<\?)', pushstate_element_content_processing_instruction_callback),
+ (r'(<!\[CDATA\[)', pushstate_element_content_cdata_section_callback),
+ (r'(<)', pushstate_element_content_starttag_callback),
+ (elementcontentchar, Literal),
+ (entityref, Literal),
+ (charref, Literal),
+ (r'\{\{|\}\}', Literal),
+ ],
+ 'end_tag': [
+ include('whitespace'),
+ (r'(>)', popstate_tag_callback),
+ (qname, Name.Tag),
+ ],
+ 'xmlspace_decl': [
+ (r'\(:', Comment, 'comment'),
+ (r'preserve|strip', Keyword, '#pop'),
+ ],
+ 'declareordering': [
+ (r'\(:', Comment, 'comment'),
+ include('whitespace'),
+ (r'ordered|unordered', Keyword, '#pop'),
+ ],
+ 'xqueryversion': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+ (stringdouble, String.Double),
+ (stringsingle, String.Single),
+ (r'encoding', Keyword),
+ (r';', Punctuation, '#pop'),
+ ],
+ 'pragma': [
+ (qname, Name.Variable, 'pragmacontents'),
+ ],
+ 'pragmacontents': [
+ (r'#\)', Punctuation, 'operator'),
+ (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' +
+ unirange(0x10000, 0x10ffff), Literal),
+ (r'(\s+)', Text),
+ ],
+ 'occurrenceindicator': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+ (r'\*|\?|\+', Operator, 'operator'),
+ (r':=', Operator, 'root'),
+ default('operator'),
+ ],
+ 'option': [
+ include('whitespace'),
+ (qname, Name.Variable, '#pop'),
+ ],
+ 'qname_braren': [
+ include('whitespace'),
+ (r'(\{)', pushstate_operator_root_callback),
+ (r'(\()', Punctuation, 'root'),
+ ],
+ 'element_qname': [
+ (qname, Name.Variable, 'root'),
+ ],
+ 'attribute_qname': [
+ (qname, Name.Variable, 'root'),
+ ],
+ 'root': [
+ include('whitespace'),
+ (r'\(:', Comment, 'comment'),
+
+ # handle operator state
+ # order on numbers matters - handle most complex first
+ (r'\d+(\.\d*)?[eE][\+\-]?\d+', Number.Float, 'operator'),
+ (r'(\.\d+)[eE][\+\-]?\d+', Number.Float, 'operator'),
+ (r'(\.\d+|\d+\.\d*)', Number.Float, 'operator'),
+ (r'(\d+)', Number.Integer, 'operator'),
+ (r'(\.\.|\.|\))', Punctuation, 'operator'),
+ (r'(declare)(\s+)(construction)',
+ bygroups(Keyword, Text, Keyword), 'operator'),
+ (r'(declare)(\s+)(default)(\s+)(order)',
+ bygroups(Keyword, Text, Keyword, Text, Keyword), 'operator'),
+ (ncname + ':\*', Name, 'operator'),
+ ('\*:'+ncname, Name.Tag, 'operator'),
+ ('\*', Name.Tag, 'operator'),
+ (stringdouble, String.Double, 'operator'),
+ (stringsingle, String.Single, 'operator'),
+
+ (r'(\})', popstate_callback),
+
+ # NAMESPACE DECL
+ (r'(declare)(\s+)(default)(\s+)(collation)',
+ bygroups(Keyword, Text, Keyword, Text, Keyword)),
+ (r'(module|declare)(\s+)(namespace)',
+ bygroups(Keyword, Text, Keyword), 'namespacedecl'),
+ (r'(declare)(\s+)(base-uri)',
+ bygroups(Keyword, Text, Keyword), 'namespacedecl'),
+
+ # NAMESPACE KEYWORD
+ (r'(declare)(\s+)(default)(\s+)(element|function)',
+ bygroups(Keyword, Text, Keyword, Text, Keyword), 'namespacekeyword'),
+ (r'(import)(\s+)(schema|module)',
+ bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'namespacekeyword'),
+ (r'(declare)(\s+)(copy-namespaces)',
+ bygroups(Keyword, Text, Keyword), 'namespacekeyword'),
+
+ # VARNAMEs
+ (r'(for|let|some|every)(\s+)(\$)',
+ bygroups(Keyword, Text, Name.Variable), 'varname'),
+ (r'\$', Name.Variable, 'varname'),
+ (r'(declare)(\s+)(variable)(\s+)(\$)',
+ bygroups(Keyword, Text, Keyword, Text, Name.Variable), 'varname'),
+
+ # ITEMTYPE
+ (r'(\))(\s+)(as)', bygroups(Operator, Text, Keyword), 'itemtype'),
+
+ (r'(element|attribute|schema-element|schema-attribute|comment|'
+ r'text|node|document-node|empty-sequence)(\s+)(\()',
+ pushstate_operator_kindtest_callback),
+
+ (r'(processing-instruction)(\s+)(\()',
+ pushstate_operator_kindtestforpi_callback),
+
+ (r'(<!--)', pushstate_operator_xmlcomment_callback),
+
+ (r'(<\?)', pushstate_operator_processing_instruction_callback),
+
+ (r'(<!\[CDATA\[)', pushstate_operator_cdata_section_callback),
+
+ # (r'</', Name.Tag, 'end_tag'),
+ (r'(<)', pushstate_operator_starttag_callback),
+
+ (r'(declare)(\s+)(boundary-space)',
+ bygroups(Keyword, Text, Keyword), 'xmlspace_decl'),
+
+ (r'(validate)(\s+)(lax|strict)',
+ pushstate_operator_root_validate_withmode),
+ (r'(validate)(\s*)(\{)', pushstate_operator_root_validate),
+ (r'(typeswitch)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
+ (r'(element|attribute)(\s*)(\{)',
+ pushstate_operator_root_construct_callback),
+
+ (r'(document|text|processing-instruction|comment)(\s*)(\{)',
+ pushstate_operator_root_construct_callback),
+ # ATTRIBUTE
+ (r'(attribute)(\s+)(?=' + qname + r')',
+ bygroups(Keyword, Text), 'attribute_qname'),
+ # ELEMENT
+ (r'(element)(\s+)(?=' + qname + r')',
+ bygroups(Keyword, Text), 'element_qname'),
+ # PROCESSING_INSTRUCTION
+ (r'(processing-instruction)(\s+)(' + ncname + r')(\s*)(\{)',
+ bygroups(Keyword, Text, Name.Variable, Text, Punctuation),
+ 'operator'),
+
+ (r'(declare|define)(\s+)(function)',
+ bygroups(Keyword, Text, Keyword)),
+
+ (r'(\{)', pushstate_operator_root_callback),
+
+ (r'(unordered|ordered)(\s*)(\{)',
+ pushstate_operator_order_callback),
+
+ (r'(declare)(\s+)(ordering)',
+ bygroups(Keyword, Text, Keyword), 'declareordering'),
+
+ (r'(xquery)(\s+)(version)',
+ bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'xqueryversion'),
+
+ (r'(\(#)', Punctuation, 'pragma'),
+
+ # sometimes return can occur in root state
+ (r'return', Keyword),
+
+ (r'(declare)(\s+)(option)', bygroups(Keyword, Text, Keyword),
+ 'option'),
+
+ # URI LITERALS - single and double quoted
+ (r'(at)(\s+)('+stringdouble+')', String.Double, 'namespacedecl'),
+ (r'(at)(\s+)('+stringsingle+')', String.Single, 'namespacedecl'),
+
+ (r'(ancestor-or-self|ancestor|attribute|child|descendant-or-self)(::)',
+ bygroups(Keyword, Punctuation)),
+ (r'(descendant|following-sibling|following|parent|preceding-sibling'
+ r'|preceding|self)(::)', bygroups(Keyword, Punctuation)),
+
+ (r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation)),
+
+ (r'then|else', Keyword),
+
+ # ML specific
+ (r'(try)(\s*)', bygroups(Keyword, Text), 'root'),
+ (r'(catch)(\s*)(\()(\$)',
+ bygroups(Keyword, Text, Punctuation, Name.Variable), 'varname'),
+
+ (r'(@'+qname+')', Name.Attribute),
+ (r'(@'+ncname+')', Name.Attribute),
+ (r'@\*:'+ncname, Name.Attribute),
+ (r'(@)', Name.Attribute),
+
+ (r'//|/|\+|-|;|,|\(|\)', Punctuation),
+
+ # STANDALONE QNAMES
+ (qname + r'(?=\s*{)', Name.Tag, 'qname_braren'),
+ (qname + r'(?=\s*\([^:])', Name.Function, 'qname_braren'),
+ (qname, Name.Tag, 'operator'),
+ ]
+ }
+
+
+class QmlLexer(RegexLexer):
+ """
+ For QML files. See http://doc.qt.digia.com/4.7/qdeclarativeintroduction.html.
+
+ .. versionadded:: 1.6
+ """
+
+ # QML is based on javascript, so much of this is taken from the
+ # JavascriptLexer above.
+
+ name = 'QML'
+ aliases = ['qml']
+ filenames = ['*.qml']
+ mimetypes = ['application/x-qml']
+
+ # pasted from JavascriptLexer, with some additions
+ flags = re.DOTALL
+ tokens = {
+ 'commentsandwhitespace': [
+ (r'\s+', Text),
+ (r'<!--', Comment),
+ (r'//.*?\n', Comment.Single),
+ (r'/\*.*?\*/', Comment.Multiline)
+ ],
+ 'slashstartsregex': [
+ include('commentsandwhitespace'),
+ (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
+ r'([gim]+\b|\B)', String.Regex, '#pop'),
+ (r'(?=/)', Text, ('#pop', 'badregex')),
+ default('#pop')
+ ],
+ 'badregex': [
+ (r'\n', Text, '#pop')
+ ],
+ 'root': [
+ (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
+ include('commentsandwhitespace'),
+ (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
+ r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'),
+ (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
+ (r'[})\].]', Punctuation),
+
+ # QML insertions
+ (r'\bid\s*:\s*[A-Za-z][_A-Za-z.0-9]*', Keyword.Declaration,
+ 'slashstartsregex'),
+ (r'\b[A-Za-z][_A-Za-z.0-9]*\s*:', Keyword, 'slashstartsregex'),
+
+ # the rest from JavascriptLexer
+ (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
+ r'throw|try|catch|finally|new|delete|typeof|instanceof|void|'
+ r'this)\b', Keyword, 'slashstartsregex'),
+ (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
+ (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
+ r'extends|final|float|goto|implements|import|int|interface|long|native|'
+ r'package|private|protected|public|short|static|super|synchronized|throws|'
+ r'transient|volatile)\b', Keyword.Reserved),
+ (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
+ (r'(Array|Boolean|Date|Error|Function|Math|netscape|'
+ r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
+ r'decodeURIComponent|encodeURI|encodeURIComponent|'
+ r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
+ r'window)\b', Name.Builtin),
+ (r'[$a-zA-Z_]\w*', Name.Other),
+ (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
+ (r'0x[0-9a-fA-F]+', Number.Hex),
+ (r'[0-9]+', Number.Integer),
+ (r'"(\\\\|\\"|[^"])*"', String.Double),
+ (r"'(\\\\|\\'|[^'])*'", String.Single),
+ ]
+ }
+
+
+class CirruLexer(RegexLexer):
+ """
+ Syntax rules of Cirru can be found at:
+ http://grammar.cirru.org/
+
+ * using ``()`` to markup blocks, but limited in the same line
+ * using ``""`` to markup strings, allow ``\`` to escape
+ * using ``$`` as a shorthand for ``()`` till indentation end or ``)``
+ * using indentations for create nesting
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Cirru'
+ aliases = ['cirru']
+ filenames = ['*.cirru', '*.cr']
+ mimetypes = ['text/x-cirru']
+ flags = re.MULTILINE
+
+ tokens = {
+ 'string': [
+ (r'[^"\\\n]', String),
+ (r'\\', String.Escape, 'escape'),
+ (r'"', String, '#pop'),
+ ],
+ 'escape': [
+ (r'.', String.Escape, '#pop'),
+ ],
+ 'function': [
+ (r'[\w-][^\s\(\)\"]*', Name.Function, '#pop'),
+ (r'\)', Operator, '#pop'),
+ (r'(?=\n)', Text, '#pop'),
+ (r'\(', Operator, '#push'),
+ (r'"', String, ('#pop', 'string')),
+ (r'\s+', Text.Whitespace),
+ (r'\,', Operator, '#pop'),
+ ],
+ 'line': [
+ (r'^\B', Text.Whitespace, 'function'),
+ (r'\$', Operator, 'function'),
+ (r'\(', Operator, 'function'),
+ (r'\)', Operator),
+ (r'(?=\n)', Text, '#pop'),
+ (r'\n', Text, '#pop'),
+ (r'"', String, 'string'),
+ (r'\s+', Text.Whitespace),
+ (r'[\d\.]+', Number),
+ (r'[\w-][^\"\(\)\s]*', Name.Variable),
+ (r'--', Comment.Single)
+ ],
+ 'root': [
+ (r'^\s*', Text.Whitespace, ('line', 'function')),
+ (r'^\s+$', Text.Whitespace),
+ ]
+ }
+
+
+class SlimLexer(ExtendedRegexLexer):
+ """
+ For Slim markup.
+
+ .. versionadded:: 2.0
+ """
+
+ name = 'Slim'
+ aliases = ['slim']
+ filenames = ['*.slim']
+ mimetypes = ['text/x-slim']
+
+ flags = re.IGNORECASE
+ _dot = r'(?: \|\n(?=.* \|)|.)'
+ tokens = {
+ 'root': [
+ (r'[ \t]*\n', Text),
+ (r'[ \t]*', _indentation),
+ ],
+
+ 'css': [
+ (r'\.[\w:-]+', Name.Class, 'tag'),
+ (r'\#[\w:-]+', Name.Function, 'tag'),
+ ],
+
+ 'eval-or-plain': [
+ (r'([ \t]*==?)(.*\n)',
+ bygroups(Punctuation, using(RubyLexer)),
+ 'root'),
+ (r'[ \t]+[\w:-]+(?=[=])', Name.Attribute, 'html-attributes'),
+ (r'', Text, 'plain'),
+ ],
+
+ 'content': [
+ include('css'),
+ (r'[\w:-]+:[ \t]*\n', Text, 'plain'),
+ (r'(-)(.*\n)',
+ bygroups(Punctuation, using(RubyLexer)),
+ '#pop'),
+ (r'\|' + _dot + r'*\n', _starts_block(Text, 'plain'), '#pop'),
+ (r'/' + _dot + r'*\n', _starts_block(Comment.Preproc, 'slim-comment-block'), '#pop'),
+ (r'[\w:-]+', Name.Tag, 'tag'),
+ include('eval-or-plain'),
+ ],
+
+ 'tag': [
+ include('css'),
+ (r'[<>]{1,2}(?=[ \t=])', Punctuation),
+ (r'[ \t]+\n', Punctuation, '#pop:2'),
+ include('eval-or-plain'),
+ ],
+
+ 'plain': [
+ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text),
+ (r'(#\{)(.*?)(\})',
+ bygroups(String.Interpol, using(RubyLexer), String.Interpol)),
+ (r'\n', Text, 'root'),
+ ],
+
+ 'html-attributes': [
+ (r'=', Punctuation),
+ (r'"[^\"]+"', using(RubyLexer), 'tag'),
+ (r'\'[^\']+\'', using(RubyLexer), 'tag'),
+ (r'[\w]+', Text, 'tag'),
+ ],
+
+ 'slim-comment-block': [
+ (_dot + '+', Comment.Preproc),
+ (r'\n', Text, 'root'),
+ ],
+ }
diff --git a/pygments/regexopt.py b/pygments/regexopt.py
new file mode 100644
index 00000000..ec048309
--- /dev/null
+++ b/pygments/regexopt.py
@@ -0,0 +1,92 @@
+# -*- coding: utf-8 -*-
+"""
+ pygments.regexopt
+ ~~~~~~~~~~~~~~~~~
+
+ An algorithm that generates optimized regexes for matching long lists of
+ literal strings.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+from re import escape
+from os.path import commonprefix
+from itertools import groupby
+from operator import itemgetter
+
+CS_ESCAPE = re.compile(r'[\^\\\-\]]')
+FIRST_ELEMENT = itemgetter(0)
+
+
+def make_charset(letters):
+ return '[' + CS_ESCAPE.sub(lambda m: '\\' + m.group(), ''.join(letters)) + ']'
+
+
+def regex_opt_inner(strings, open_paren):
+ """Return a regex that matches any string in the sorted list of strings."""
+ close_paren = open_paren and ')' or ''
+ # print strings, repr(open_paren)
+ if not strings:
+ # print '-> nothing left'
+ return ''
+ first = strings[0]
+ if len(strings) == 1:
+ # print '-> only 1 string'
+ return open_paren + escape(first) + close_paren
+ if not first:
+ # print '-> first string empty'
+ return open_paren + regex_opt_inner(strings[1:], '(?:') \
+ + '?' + close_paren
+ if len(first) == 1:
+ # multiple one-char strings? make a charset
+ oneletter = []
+ rest = []
+ for s in strings:
+ if len(s) == 1:
+ oneletter.append(s)
+ else:
+ rest.append(s)
+ if len(oneletter) > 1: # do we have more than one oneletter string?
+ if rest:
+ # print '-> 1-character + rest'
+ return open_paren + regex_opt_inner(rest, '') + '|' \
+ + make_charset(oneletter) + close_paren
+ # print '-> only 1-character'
+ return make_charset(oneletter)
+ prefix = commonprefix(strings)
+ if prefix:
+ plen = len(prefix)
+ # we have a prefix for all strings
+ # print '-> prefix:', prefix
+ return open_paren + escape(prefix) \
+ + regex_opt_inner([s[plen:] for s in strings], '(?:') \
+ + close_paren
+ # is there a suffix?
+ strings_rev = [s[::-1] for s in strings]
+ suffix = commonprefix(strings_rev)
+ if suffix:
+ slen = len(suffix)
+ # print '-> suffix:', suffix[::-1]
+ return open_paren \
+ + regex_opt_inner(sorted(s[:-slen] for s in strings), '(?:') \
+ + escape(suffix[::-1]) + close_paren
+ # recurse on common 1-string prefixes
+ # print '-> last resort'
+ return open_paren + \
+ '|'.join(regex_opt_inner(list(group[1]), '')
+ for group in groupby(strings, lambda s: s[0] == first[0])) \
+ + close_paren
+
+
+def regex_opt(strings, prefix='', suffix=''):
+ """Return a compiled regex that matches any string in the given list.
+
+ The strings to match must be literal strings, not regexes. They will be
+ regex-escaped.
+
+ *prefix* and *suffix* are pre- and appended to the final regex.
+ """
+ strings = sorted(strings)
+ return prefix + regex_opt_inner(strings, '(') + suffix
diff --git a/pygments/unistring.py b/pygments/unistring.py
index 3db3ae8e..c2a171dd 100644
--- a/pygments/unistring.py
+++ b/pygments/unistring.py
@@ -99,11 +99,8 @@ if __name__ == '__main__':
categories = {}
- f = open(__file__)
- try:
- content = f.read()
- finally:
- f.close()
+ with open(__file__) as fp:
+ content = fp.read()
header = content[:content.find('Cc =')]
footer = content[content.find("def combine("):]
diff --git a/pygments/util.py b/pygments/util.py
index 5dc6981f..8376a67f 100644
--- a/pygments/util.py
+++ b/pygments/util.py
@@ -26,9 +26,7 @@ tag_re = re.compile(r'<(.+?)(\s.*?)?>.*?</.+?>(?uism)')
class ClassNotFound(ValueError):
- """
- If one of the get_*_by_* functions didn't find a matching class.
- """
+ """Raised if one of the lookup functions didn't find a matching class."""
class OptionError(Exception):
@@ -104,10 +102,7 @@ def docstring_headline(obj):
def make_analysator(f):
- """
- Return a static text analysation function that
- returns float values.
- """
+ """Return a static text analyser function that returns float values."""
def text_analyse(text):
try:
rv = f(text)
@@ -124,8 +119,7 @@ def make_analysator(f):
def shebang_matches(text, regex):
- """
- Check if the given regular expression matches the last part of the
+ """Check if the given regular expression matches the last part of the
shebang if one exists.
>>> from pygments.util import shebang_matches
@@ -170,8 +164,8 @@ def shebang_matches(text, regex):
def doctype_matches(text, regex):
- """
- Check if the doctype matches a regular expression (if present).
+ """Check if the doctype matches a regular expression (if present).
+
Note that this method only checks the first part of a DOCTYPE.
eg: 'html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"'
"""
@@ -183,17 +177,13 @@ def doctype_matches(text, regex):
def html_doctype_matches(text):
- """
- Check if the file looks like it has a html doctype.
- """
+ """Check if the file looks like it has a html doctype."""
return doctype_matches(text, r'html\s+PUBLIC\s+"-//W3C//DTD X?HTML.*')
_looks_like_xml_cache = {}
def looks_like_xml(text):
- """
- Check if a doctype exists or if we have some tags.
- """
+ """Check if a doctype exists or if we have some tags."""
key = hash(text)
try:
return _looks_like_xml_cache[key]
@@ -216,9 +206,7 @@ def _surrogatepair(c):
return (0xd7c0 + (c >> 10), (0xdc00 + (c & 0x3ff)))
def unirange(a, b):
- """
- Returns a regular expression string to match the given non-BMP range.
- """
+ """Returns a regular expression string to match the given non-BMP range."""
if b < a:
raise ValueError("Bad character range")
if a < 0x10000 or b < 0x10000:
@@ -255,6 +243,82 @@ def unirange(a, b):
return u'(?:' + u'|'.join(buf) + u')'
+
+def format_lines(var_name, seq, raw=False, indent_level=0):
+ """Formats a sequence of strings for output."""
+ lines = []
+ base_indent = ' ' * indent_level * 4
+ inner_indent = ' ' * (indent_level + 1) * 4
+ lines.append(base_indent + var_name + ' = (')
+ if raw:
+ # These should be preformatted reprs of, say, tuples.
+ for i in seq:
+ lines.append(inner_indent + i + ',')
+ else:
+ for i in seq:
+ # Force use of single quotes
+ r = repr(i + '"')
+ lines.append(inner_indent + r[:-2] + r[-1] + ',')
+ lines.append(base_indent + ')')
+ return '\n'.join(lines)
+
+
+class Future(object):
+ """Generic class to defer some work.
+
+ Handled specially in RegexLexerMeta, to support regex string construction at
+ first use.
+ """
+ def get(self):
+ raise NotImplementedError
+
+
+def guess_decode(text):
+ """Decode *text* with guessed encoding.
+
+ First try UTF-8; this should fail for non-UTF-8 encodings.
+ Then try the preferred locale encoding.
+ Fall back to latin-1, which always works.
+ """
+ try:
+ text = text.decode('utf-8')
+ return text, 'utf-8'
+ except UnicodeDecodeError:
+ try:
+ import locale
+ prefencoding = locale.getpreferredencoding()
+ text = text.decode()
+ return text, prefencoding
+ except (UnicodeDecodeError, LookupError):
+ text = text.decode('latin1')
+ return text, 'latin1'
+
+
+def guess_decode_from_terminal(text, term):
+ """Decode *text* coming from terminal *term*.
+
+ First try the terminal encoding, if given.
+ Then try UTF-8. Then try the preferred locale encoding.
+ Fall back to latin-1, which always works.
+ """
+ if getattr(term, 'encoding', None):
+ try:
+ text = text.decode(term.encoding)
+ except UnicodeDecodeError:
+ pass
+ else:
+ return text, term.encoding
+ return guess_decode(text)
+
+
+def terminal_encoding(term):
+ """Return our best guess of encoding for the given *term*."""
+ if getattr(term, 'encoding', None):
+ return term.encoding
+ import locale
+ return locale.getpreferredencoding()
+
+
# Python 2/3 compatibility
if sys.version_info < (3, 0):
@@ -279,6 +343,7 @@ else:
itervalues = dict.values
from io import StringIO, BytesIO
+
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
diff --git a/scripts/check_sources.py b/scripts/check_sources.py
index 94c2c15d..5f233887 100755
--- a/scripts/check_sources.py
+++ b/scripts/check_sources.py
@@ -23,8 +23,10 @@ from os.path import join, splitext, abspath
checkers = {}
+
def checker(*suffixes, **kwds):
only_pkg = kwds.pop('only_pkg', False)
+
def deco(func):
for suffix in suffixes:
checkers.setdefault(suffix, []).append(func)
@@ -38,8 +40,6 @@ copyright_re = re.compile(r'^ :copyright: Copyright 2006-2014 by '
r'the Pygments team, see AUTHORS\.$', re.UNICODE)
copyright_2_re = re.compile(r'^ %s(, %s)*[,.]$' %
(name_mail_re, name_mail_re), re.UNICODE)
-coding_re = re.compile(r'coding[:=]\s*([-\w.]+)')
-not_ix_re = re.compile(r'\bnot\s+\S+?\s+i[sn]\s\S+')
is_const_re = re.compile(r'if.*?==\s+(None|False|True)\b')
misspellings = ["developement", "adress", "verificate", # ALLOW-MISSPELLING
@@ -48,44 +48,30 @@ misspellings = ["developement", "adress", "verificate", # ALLOW-MISSPELLING
@checker('.py')
def check_syntax(fn, lines):
+ if '#!/' in lines[0]:
+ lines = lines[1:]
+ if 'coding:' in lines[0]:
+ lines = lines[1:]
try:
- compile(''.join(lines), fn, "exec")
+ compile('\n'.join(lines), fn, "exec")
except SyntaxError as err:
yield 0, "not compilable: %s" % err
@checker('.py')
def check_style_and_encoding(fn, lines):
- encoding = 'ascii'
for lno, line in enumerate(lines):
- if len(line) > 90:
+ if len(line) > 110:
yield lno+1, "line too long"
- m = not_ix_re.search(line)
- if m:
- yield lno+1, '"' + m.group() + '"'
if is_const_re.search(line):
yield lno+1, 'using == None/True/False'
- if lno < 2:
- co = coding_re.search(line)
- if co:
- encoding = co.group(1)
- try:
- line.decode(encoding)
- except AttributeError:
- # Python 3 - encoding was already checked
- pass
- except UnicodeDecodeError as err:
- yield lno+1, "not decodable: %s\n Line: %r" % (err, line)
- except LookupError as err:
- yield 0, "unknown encoding: %s" % encoding
- encoding = 'latin1'
@checker('.py', only_pkg=True)
def check_fileheader(fn, lines):
# line number correction
c = 1
- if lines[0:1] == ['#!/usr/bin/env python\n']:
+ if lines[0:1] == ['#!/usr/bin/env python']:
lines = lines[1:]
c = 2
@@ -94,31 +80,28 @@ def check_fileheader(fn, lines):
for lno, l in enumerate(lines):
llist.append(l)
if lno == 0:
- if l == '# -*- coding: rot13 -*-\n':
- # special-case pony package
- return
- elif l != '# -*- coding: utf-8 -*-\n':
+ if l != '# -*- coding: utf-8 -*-':
yield 1, "missing coding declaration"
elif lno == 1:
- if l != '"""\n' and l != 'r"""\n':
+ if l != '"""' and l != 'r"""':
yield 2, 'missing docstring begin (""")'
else:
docopen = True
elif docopen:
- if l == '"""\n':
+ if l == '"""':
# end of docstring
if lno <= 4:
yield lno+c, "missing module name in docstring"
break
- if l != "\n" and l[:4] != ' ' and docopen:
+ if l != "" and l[:4] != ' ' and docopen:
yield lno+c, "missing correct docstring indentation"
if lno == 2:
# if not in package, don't check the module name
modname = fn[:-3].replace('/', '.').replace('.__init__', '')
while modname:
- if l.lower()[4:-1] == modname:
+ if l.lower()[4:] == modname:
break
modname = '.'.join(modname.split('.')[1:])
else:
@@ -133,7 +116,7 @@ def check_fileheader(fn, lines):
# check for copyright and license fields
license = llist[-2:-1]
- if license != [" :license: BSD, see LICENSE for details.\n"]:
+ if license != [" :license: BSD, see LICENSE for details."]:
yield 0, "no correct license info"
ci = -3
@@ -176,16 +159,19 @@ def main(argv):
for root, dirs, files in os.walk(path):
if '.hg' in dirs:
dirs.remove('.hg')
+ if 'examplefiles' in dirs:
+ dirs.remove('examplefiles')
if '-i' in opts and abspath(root) in opts['-i']:
del dirs[:]
continue
# XXX: awkward: for the Makefile call: don't check non-package
# files for file headers
- in_pocoo_pkg = root.startswith('./pygments')
+ in_pygments_pkg = root.startswith('./pygments')
for fn in files:
fn = join(root, fn)
- if fn[:2] == './': fn = fn[2:]
+ if fn[:2] == './':
+ fn = fn[2:]
if '-i' in opts and abspath(fn) in opts['-i']:
continue
@@ -199,15 +185,14 @@ def main(argv):
print("Checking %s..." % fn)
try:
- f = open(fn, 'r')
- lines = list(f)
+ lines = open(fn, 'rb').read().decode('utf-8').splitlines()
except (IOError, OSError) as err:
print("%s: cannot open: %s" % (fn, err))
num += 1
continue
for checker in checkerlist:
- if not in_pocoo_pkg and checker.only_pkg:
+ if not in_pygments_pkg and checker.only_pkg:
continue
for lno, msg in checker(fn, lines):
print(u"%s:%d: %s" % (fn, lno, msg), file=out)
diff --git a/scripts/debug_lexer.py b/scripts/debug_lexer.py
new file mode 100755
index 00000000..dfc28ce2
--- /dev/null
+++ b/scripts/debug_lexer.py
@@ -0,0 +1,233 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+"""
+ Lexing error finder
+ ~~~~~~~~~~~~~~~~~~~
+
+ For the source files given on the command line, display
+ the text where Error tokens are being generated, along
+ with some context.
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from __future__ import print_function
+
+import os
+import sys
+
+# always prefer Pygments from source if exists
+srcpath = os.path.join(os.path.dirname(__file__), '..')
+if os.path.isdir(os.path.join(srcpath, 'pygments')):
+ sys.path.insert(0, srcpath)
+
+
+from pygments.lexer import RegexLexer, ProfilingRegexLexer, ProfilingRegexLexerMeta
+from pygments.lexers import get_lexer_by_name, find_lexer_class, \
+ find_lexer_class_for_filename
+from pygments.token import Error, Text, _TokenType
+from pygments.cmdline import _parse_options
+
+
+class DebuggingRegexLexer(RegexLexer):
+ """Make the state stack, position and current match instance attributes."""
+
+ def get_tokens_unprocessed(self, text, stack=('root',)):
+ """
+ Split ``text`` into (tokentype, text) pairs.
+
+ ``stack`` is the inital stack (default: ``['root']``)
+ """
+ self.pos = 0
+ tokendefs = self._tokens
+ self.statestack = list(stack)
+ statetokens = tokendefs[self.statestack[-1]]
+ while 1:
+ for rexmatch, action, new_state in statetokens:
+ self.m = m = rexmatch(text, self.pos)
+ if m:
+ if action is not None:
+ if type(action) is _TokenType:
+ yield self.pos, action, m.group()
+ else:
+ for item in action(self, m):
+ yield item
+ self.pos = m.end()
+ if new_state is not None:
+ # state transition
+ if isinstance(new_state, tuple):
+ for state in new_state:
+ if state == '#pop':
+ self.statestack.pop()
+ elif state == '#push':
+ self.statestack.append(self.statestack[-1])
+ else:
+ self.statestack.append(state)
+ elif isinstance(new_state, int):
+ # pop
+ del self.statestack[new_state:]
+ elif new_state == '#push':
+ self.statestack.append(self.statestack[-1])
+ else:
+ assert False, 'wrong state def: %r' % new_state
+ statetokens = tokendefs[self.statestack[-1]]
+ break
+ else:
+ try:
+ if text[self.pos] == '\n':
+ # at EOL, reset state to 'root'
+ self.pos += 1
+ self.statestack = ['root']
+ statetokens = tokendefs['root']
+ yield self.pos, Text, u'\n'
+ continue
+ yield self.pos, Error, text[self.pos]
+ self.pos += 1
+ except IndexError:
+ break
+
+
+def main(fn, lexer=None, options={}):
+ if lexer is not None:
+ lxcls = get_lexer_by_name(lexer).__class__
+ else:
+ lxcls = find_lexer_class_for_filename(os.path.basename(fn))
+ if lxcls is None:
+ name, rest = fn.split('_', 1)
+ lxcls = find_lexer_class(name)
+ if lxcls is None:
+ raise AssertionError('no lexer found for file %r' % fn)
+ debug_lexer = False
+ if profile:
+ # does not work for e.g. ExtendedRegexLexers
+ if lxcls.__bases__ == (RegexLexer,):
+ # yes we can! (change the metaclass)
+ lxcls.__class__ = ProfilingRegexLexerMeta
+ lxcls.__bases__ = (ProfilingRegexLexer,)
+ lxcls._prof_sort_index = profsort
+ else:
+ if lxcls.__bases__ == (RegexLexer,):
+ lxcls.__bases__ = (DebuggingRegexLexer,)
+ debug_lexer = True
+ elif lxcls.__bases__ == (DebuggingRegexLexer,):
+ # already debugged before
+ debug_lexer = True
+ else:
+ # HACK: ExtendedRegexLexer subclasses will only partially work here.
+ lxcls.__bases__ = (DebuggingRegexLexer,)
+ debug_lexer = True
+
+ lx = lxcls(**options)
+ lno = 1
+ if fn == '-':
+ text = sys.stdin.read()
+ else:
+ with open(fn, 'rb') as fp:
+ text = fp.read().decode('utf-8')
+ text = text.strip('\n') + '\n'
+ tokens = []
+ states = []
+
+ def show_token(tok, state):
+ reprs = list(map(repr, tok))
+ print(' ' + reprs[1] + ' ' + ' ' * (29-len(reprs[1])) + reprs[0], end=' ')
+ if debug_lexer:
+ print(' ' + ' ' * (29-len(reprs[0])) + repr(state), end=' ')
+ print()
+
+ for type, val in lx.get_tokens(text):
+ lno += val.count('\n')
+ if type == Error:
+ print('Error parsing', fn, 'on line', lno)
+ print('Previous tokens' + (debug_lexer and ' and states' or '') + ':')
+ if showall:
+ for tok, state in map(None, tokens, states):
+ show_token(tok, state)
+ else:
+ for i in range(max(len(tokens) - num, 0), len(tokens)):
+ if debug_lexer:
+ show_token(tokens[i], states[i])
+ else:
+ show_token(tokens[i], None)
+ print('Error token:')
+ l = len(repr(val))
+ print(' ' + repr(val), end=' ')
+ if debug_lexer and hasattr(lx, 'statestack'):
+ print(' ' * (60-l) + repr(lx.statestack), end=' ')
+ print()
+ print()
+ return 1
+ tokens.append((type, val))
+ if debug_lexer:
+ if hasattr(lx, 'statestack'):
+ states.append(lx.statestack[:])
+ else:
+ states.append(None)
+ if showall:
+ for tok, state in zip(tokens, states):
+ show_token(tok, state)
+ return 0
+
+
+def print_help():
+ print('''\
+Pygments development helper to quickly debug lexers.
+
+ scripts/debug_lexer.py [options] file ...
+
+Give one or more filenames to lex them and display possible error tokens
+and/or profiling info. Files are assumed to be encoded in UTF-8.
+
+Selecting lexer and options:
+
+ -l NAME use lexer named NAME (default is to guess from
+ the given filenames)
+ -O OPTIONSTR use lexer options parsed from OPTIONSTR
+
+Debugging lexing errors:
+
+ -n N show the last N tokens on error
+ -a always show all lexed tokens (default is only
+ to show them when an error occurs)
+
+Profiling:
+
+ -p use the ProfilingRegexLexer to profile regexes
+ instead of the debugging lexer
+ -s N sort profiling output by column N (default is
+ column 4, the time per call)
+''')
+
+num = 10
+showall = False
+lexer = None
+options = {}
+profile = False
+profsort = 4
+
+if __name__ == '__main__':
+ import getopt
+ opts, args = getopt.getopt(sys.argv[1:], 'n:l:apO:s:h')
+ for opt, val in opts:
+ if opt == '-n':
+ num = int(val)
+ elif opt == '-a':
+ showall = True
+ elif opt == '-l':
+ lexer = val
+ elif opt == '-p':
+ profile = True
+ elif opt == '-s':
+ profsort = int(val)
+ elif opt == '-O':
+ options = _parse_options([val])
+ elif opt == '-h':
+ print_help()
+ sys.exit(0)
+ ret = 0
+ if not args:
+ print_help()
+ for f in args:
+ ret += main(f, lexer, options)
+ sys.exit(bool(ret))
diff --git a/scripts/find_codetags.py b/scripts/find_codetags.py
index f8204e6e..5063f61f 100755
--- a/scripts/find_codetags.py
+++ b/scripts/find_codetags.py
@@ -39,12 +39,12 @@ def escape_html(text):
def process_file(store, filename):
try:
- f = open(filename, 'r')
+ fp = open(filename, 'r')
except (IOError, OSError):
return False
llmatch = 0
- try:
- for lno, line in enumerate(f):
+ with fp:
+ for lno, line in enumerate(fp):
# just some random heuristics to filter out binary files
if lno < 100 and binary_re.search(line):
return False
@@ -69,8 +69,6 @@ def process_file(store, filename):
continue
llmatch = 0
return True
- finally:
- f.close()
def main():
@@ -198,13 +196,13 @@ td { padding: 2px 5px 2px 5px;
'<td class="tag %%(tag)s">%%(tag)s</td>'
'<td class="who">%%(who)s</td><td class="what">%%(what)s</td></tr>')
- f = open(output, 'w')
table = '\n'.join(TABLE % fname +
'\n'.join(TR % (no % 2,) % entry
for no, entry in enumerate(store[fname]))
for fname in sorted(store))
- f.write(HTML % (', '.join(map(abspath, args)), table))
- f.close()
+
+ with open(output, 'w') as fp:
+ fp.write(HTML % (', '.join(map(abspath, args)), table))
print("Report written to %s." % output)
return 0
diff --git a/scripts/find_error.py b/scripts/find_error.py
index 7aaa9bee..ba0b76f1 100755..120000
--- a/scripts/find_error.py
+++ b/scripts/find_error.py
@@ -1,173 +1 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-"""
- Lexing error finder
- ~~~~~~~~~~~~~~~~~~~
-
- For the source files given on the command line, display
- the text where Error tokens are being generated, along
- with some context.
-
- :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from __future__ import print_function
-
-import os
-import sys
-
-# always prefer Pygments from source if exists
-srcpath = os.path.join(os.path.dirname(__file__), '..')
-if os.path.isdir(os.path.join(srcpath, 'pygments')):
- sys.path.insert(0, srcpath)
-
-
-from pygments.lexer import RegexLexer
-from pygments.lexers import get_lexer_for_filename, get_lexer_by_name
-from pygments.token import Error, Text, _TokenType
-from pygments.cmdline import _parse_options
-
-
-class DebuggingRegexLexer(RegexLexer):
- """Make the state stack, position and current match instance attributes."""
-
- def get_tokens_unprocessed(self, text, stack=('root',)):
- """
- Split ``text`` into (tokentype, text) pairs.
-
- ``stack`` is the inital stack (default: ``['root']``)
- """
- self.pos = 0
- tokendefs = self._tokens
- self.statestack = list(stack)
- statetokens = tokendefs[self.statestack[-1]]
- while 1:
- for rexmatch, action, new_state in statetokens:
- self.m = m = rexmatch(text, self.pos)
- if m:
- if type(action) is _TokenType:
- yield self.pos, action, m.group()
- else:
- for item in action(self, m):
- yield item
- self.pos = m.end()
- if new_state is not None:
- # state transition
- if isinstance(new_state, tuple):
- for state in new_state:
- if state == '#pop':
- self.statestack.pop()
- elif state == '#push':
- self.statestack.append(self.statestack[-1])
- else:
- self.statestack.append(state)
- elif isinstance(new_state, int):
- # pop
- del self.statestack[new_state:]
- elif new_state == '#push':
- self.statestack.append(self.statestack[-1])
- else:
- assert False, 'wrong state def: %r' % new_state
- statetokens = tokendefs[self.statestack[-1]]
- break
- else:
- try:
- if text[self.pos] == '\n':
- # at EOL, reset state to 'root'
- self.pos += 1
- self.statestack = ['root']
- statetokens = tokendefs['root']
- yield self.pos, Text, u'\n'
- continue
- yield self.pos, Error, text[self.pos]
- self.pos += 1
- except IndexError:
- break
-
-
-def main(fn, lexer=None, options={}):
- if lexer is not None:
- lx = get_lexer_by_name(lexer)
- else:
- try:
- lx = get_lexer_for_filename(os.path.basename(fn), **options)
- except ValueError:
- try:
- name, rest = fn.split('_', 1)
- lx = get_lexer_by_name(name, **options)
- except ValueError:
- raise AssertionError('no lexer found for file %r' % fn)
- debug_lexer = False
- # does not work for e.g. ExtendedRegexLexers
- if lx.__class__.__bases__ == (RegexLexer,):
- lx.__class__.__bases__ = (DebuggingRegexLexer,)
- debug_lexer = True
- elif lx.__class__.__bases__ == (DebuggingRegexLexer,):
- # already debugged before
- debug_lexer = True
- lno = 1
- text = open(fn, 'U').read()
- text = text.strip('\n') + '\n'
- tokens = []
- states = []
-
- def show_token(tok, state):
- reprs = map(repr, tok)
- print(' ' + reprs[1] + ' ' + ' ' * (29-len(reprs[1])) + reprs[0], end=' ')
- if debug_lexer:
- print(' ' + ' ' * (29-len(reprs[0])) + repr(state), end=' ')
- print()
-
- for type, val in lx.get_tokens(text):
- lno += val.count('\n')
- if type == Error:
- print('Error parsing', fn, 'on line', lno)
- print('Previous tokens' + (debug_lexer and ' and states' or '') + ':')
- if showall:
- for tok, state in map(None, tokens, states):
- show_token(tok, state)
- else:
- for i in range(max(len(tokens) - num, 0), len(tokens)):
- show_token(tokens[i], states[i])
- print('Error token:')
- l = len(repr(val))
- print(' ' + repr(val), end=' ')
- if debug_lexer and hasattr(lx, 'statestack'):
- print(' ' * (60-l) + repr(lx.statestack), end=' ')
- print()
- print()
- return 1
- tokens.append((type, val))
- if debug_lexer:
- if hasattr(lx, 'statestack'):
- states.append(lx.statestack[:])
- else:
- states.append(None)
- if showall:
- for tok, state in map(None, tokens, states):
- show_token(tok, state)
- return 0
-
-
-num = 10
-showall = False
-lexer = None
-options = {}
-
-if __name__ == '__main__':
- import getopt
- opts, args = getopt.getopt(sys.argv[1:], 'n:l:aO:')
- for opt, val in opts:
- if opt == '-n':
- num = int(val)
- elif opt == '-a':
- showall = True
- elif opt == '-l':
- lexer = val
- elif opt == '-O':
- options = _parse_options([val])
- ret = 0
- for f in args:
- ret += main(f, lexer, options)
- sys.exit(bool(ret))
+debug_lexer.py \ No newline at end of file
diff --git a/scripts/get_vimkw.py b/scripts/get_vimkw.py
index 4ea302f4..fc4d5ec6 100644
--- a/scripts/get_vimkw.py
+++ b/scripts/get_vimkw.py
@@ -1,13 +1,42 @@
from __future__ import print_function
+
import re
+from pygments.util import format_lines
+
r_line = re.compile(r"^(syn keyword vimCommand contained|syn keyword vimOption "
r"contained|syn keyword vimAutoEvent contained)\s+(.*)")
r_item = re.compile(r"(\w+)(?:\[(\w+)\])?")
+HEADER = '''\
+# -*- coding: utf-8 -*-
+"""
+ pygments.lexers._vim_builtins
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ This file is autogenerated by scripts/get_vimkw.py
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+# Split up in multiple functions so it's importable by jython, which has a
+# per-method size limit.
+'''
+
+METHOD = '''\
+def _get%(key)s():
+%(body)s
+ return var
+%(key)s = _get%(key)s()
+'''
+
def getkw(input, output):
out = file(output, 'w')
+ # Copy template from an existing file.
+ print(HEADER, file=out)
+
output_info = {'command': [], 'option': [], 'auto': []}
for line in file(input):
m = r_line.match(line)
@@ -29,9 +58,10 @@ def getkw(input, output):
output_info['option'].append("('inoremap','inoremap')")
output_info['option'].append("('vnoremap','vnoremap')")
- for a, b in output_info.items():
- b.sort()
- print('%s=[%s]' % (a, ','.join(b)), file=out)
+ for key, keywordlist in output_info.items():
+ keywordlist.sort()
+ body = format_lines('var', keywordlist, raw=True, indent_level=1)
+ print(METHOD % locals(), file=out)
def is_keyword(w, keywords):
for i in range(len(w), 0, -1):
@@ -40,4 +70,5 @@ def is_keyword(w, keywords):
return False
if __name__ == "__main__":
- getkw("/usr/share/vim/vim73/syntax/vim.vim", "temp.py")
+ getkw("/usr/share/vim/vim74/syntax/vim.vim",
+ "pygments/lexers/_vim_builtins.py")
diff --git a/setup.py b/setup.py
index a0b2e90b..f1bbfbb2 100755
--- a/setup.py
+++ b/setup.py
@@ -1,27 +1,20 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-"""
- Pygments
+"""Pygments
~~~~~~~~
Pygments is a syntax highlighting package written in Python.
- It is a generic syntax highlighter for general use in all kinds of software
- such as forum systems, wikis or other applications that need to prettify
- source code. Highlights are:
+ It is a generic syntax highlighter suitable for use in code hosting, forums,
+ wikis or other applications that need to prettify source code. Highlights
+ are:
- * a wide range of common languages and markup formats is supported
+ * a wide range of over 300 languages and other text formats is supported
* special attention is paid to details, increasing quality by a fair amount
* support for new languages and formats are added easily
* a number of output formats, presently HTML, LaTeX, RTF, SVG, all image \
formats that PIL supports and ANSI sequences
* it is usable as a command-line tool and as a library
- * ... and it highlights even Brainfuck!
-
- The `Pygments tip`_ is installable with ``easy_install Pygments==dev``.
-
- .. _Pygments tip:
- http://bitbucket.org/birkenfeld/pygments-main/get/default.zip#egg=Pygments-dev
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
diff --git a/tests/examplefiles/99_bottles_of_beer.chpl b/tests/examplefiles/99_bottles_of_beer.chpl
index f73be7b1..47fcaaf6 100644
--- a/tests/examplefiles/99_bottles_of_beer.chpl
+++ b/tests/examplefiles/99_bottles_of_beer.chpl
@@ -112,7 +112,50 @@ c = nil;
c = new Oval(r=1.0, r2=2.0);
writeln("Area of oval: " + c.area());
+// This is a valid decimal integer:
+var x = 0000000000012;
+
union U {
var i: int;
var r: real;
}
+
+// chapel ranges are awesome.
+var r1 = 1..10, // 1 2 3 4 5 6 7 8 9 10
+ r2 = 10..1, // no values in this range
+ r3 = 1..10 by -1, // 10 9 8 7 6 5 4 3 2 1
+ r4 = 1..10 by 2, // 1 3 5 7 9
+ r5 = 1..10 by 2 align 0, // 2 4 6 8 10
+ r6 = 1..10 by 2 align 2, // 2 4 6 8 10
+ r7 = 1..10 # 3, // 1 2 3
+ r8 = 1..10 # -2, // 9 10
+ r9 = 1..100 # 10 by 2, // 1 3 5 7 9
+ ra = 1..100 by 2 # 10, // 1 3 5 7 9 11 13 15 17 19
+ rb = 1.. # 100 by 10; // 1 11 21 31 41 51 61 71 81 91
+
+// create a variable with default initialization
+var myVarWithoutInit: real = noinit;
+myVarWithoutInit = 1.0;
+
+// Chapel has <~> operator for read and write I/O operations.
+class IntPair {
+ var x: int;
+ var y: int;
+ proc readWriteThis(f) {
+ f <~> x <~> new ioLiteral(",") <~> y <~> new ioNewline();
+ }
+}
+var ip = new IntPair(17,2);
+write(ip);
+
+var targetDom: {1..10},
+ target: [targetDom] int;
+coforall i in targetDom with (ref target) {
+ targetDom[i] = i ** 3;
+}
+
+var wideOpen = 0o777,
+ mememe = 0o600,
+ clique_y = 0O660,
+ zeroOct = 0o0,
+ minPosOct = 0O1;
diff --git a/tests/examplefiles/Errors.scala b/tests/examplefiles/Errors.scala
index 67198c05..7af70280 100644
--- a/tests/examplefiles/Errors.scala
+++ b/tests/examplefiles/Errors.scala
@@ -11,6 +11,11 @@ String
val foo_+ = "foo plus"
val foo_⌬⌬ = "double benzene"
+ // Test some interpolated strings
+ val mu = s"${if (true) "a:b" else "c" {with "braces"}}"
+ val mu2 = f"${if (true) "a:b" else "c" {with "braces"}}"
+ val raw = raw"a raw\nstring\"with escaped quotes"
+
def main(argv: Array[String]) {
println(⌘.interface + " " + foo_+ + " " + foo_⌬⌬ )
}
diff --git a/tests/examplefiles/all.nit b/tests/examplefiles/all.nit
new file mode 100644
index 00000000..d4e1ddfa
--- /dev/null
+++ b/tests/examplefiles/all.nit
@@ -0,0 +1,1986 @@
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2013 Alexis Laferrière <alexis.laf@xymus.net>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import gtk
+
+class CalculatorContext
+ var result : nullable Float = null
+
+ var last_op : nullable Char = null
+
+ var current : nullable Float = null
+ var after_point : nullable Int = null
+
+ fun push_op( op : Char )
+ do
+ apply_last_op_if_any
+ if op == 'C' then
+ self.result = 0.0
+ last_op = null
+ else
+ last_op = op # store for next push_op
+ end
+
+ # prepare next current
+ after_point = null
+ current = null
+ end
+
+ fun push_digit( digit : Int )
+ do
+ var current = current
+ if current == null then current = 0.0
+
+ var after_point = after_point
+ if after_point == null then
+ current = current * 10.0 + digit.to_f
+ else
+ current = current + digit.to_f * 10.0.pow(after_point.to_f)
+ self.after_point -= 1
+ end
+
+ self.current = current
+ end
+
+ fun switch_to_decimals
+ do
+ if self.current == null then current = 0.0
+ if after_point != null then return
+
+ after_point = -1
+ end
+
+ fun apply_last_op_if_any
+ do
+ var op = last_op
+
+ var result = result
+ if result == null then result = 0.0
+
+ var current = current
+ if current == null then current = 0.0
+
+ if op == null then
+ result = current
+ else if op == '+' then
+ result = result + current
+ else if op == '-' then
+ result = result - current
+ else if op == '/' then
+ result = result / current
+ else if op == '*' then
+ result = result * current
+ end
+ self.result = result
+ self.current = null
+ end
+end
+
+class CalculatorGui
+ super GtkCallable
+
+ var win : GtkWindow
+ var container : GtkGrid
+
+ var lbl_disp : GtkLabel
+ var but_eq : GtkButton
+ var but_dot : GtkButton
+
+ var context = new CalculatorContext
+
+ redef fun signal( sender, user_data )
+ do
+ var after_point = context.after_point
+ if after_point == null then
+ after_point = 0
+ else
+ after_point = (after_point.abs)
+ end
+
+ if user_data isa Char then # is an operation
+ var c = user_data
+ if c == '.' then
+ but_dot.sensitive= false
+ context.switch_to_decimals
+ lbl_disp.text = "{context.current.to_i}."
+ else
+ but_dot.sensitive= true
+ context.push_op( c )
+
+ var s = context.result.to_precision_native(6)
+ var index : nullable Int = null
+ for i in s.length.times do
+ var chiffre = s.chars[i]
+ if chiffre == '0' and index == null then
+ index = i
+ else if chiffre != '0' then
+ index = null
+ end
+ end
+ if index != null then
+ s = s.substring(0, index)
+ if s.chars[s.length-1] == ',' then s = s.substring(0, s.length-1)
+ end
+ lbl_disp.text = s
+ end
+ else if user_data isa Int then # is a number
+ var n = user_data
+ context.push_digit( n )
+ lbl_disp.text = context.current.to_precision_native(after_point)
+ end
+ end
+
+ init
+ do
+ init_gtk
+
+ win = new GtkWindow( 0 )
+
+ container = new GtkGrid(5,5,true)
+ win.add( container )
+
+ lbl_disp = new GtkLabel( "_" )
+ container.attach( lbl_disp, 0, 0, 5, 1 )
+
+ # digits
+ for n in [0..9] do
+ var but = new GtkButton.with_label( n.to_s )
+ but.request_size( 64, 64 )
+ but.signal_connect( "clicked", self, n )
+ if n == 0 then
+ container.attach( but, 0, 4, 1, 1 )
+ else container.attach( but, (n-1)%3, 3-(n-1)/3, 1, 1 )
+ end
+
+ # operators
+ var r = 1
+ for op in ['+', '-', '*', '/' ] do
+ var but = new GtkButton.with_label( op.to_s )
+ but.request_size( 64, 64 )
+ but.signal_connect( "clicked", self, op )
+ container.attach( but, 3, r, 1, 1 )
+ r+=1
+ end
+
+ # =
+ but_eq = new GtkButton.with_label( "=" )
+ but_eq.request_size( 64, 64 )
+ but_eq.signal_connect( "clicked", self, '=' )
+ container.attach( but_eq, 4, 3, 1, 2 )
+
+ # .
+ but_dot = new GtkButton.with_label( "." )
+ but_dot.request_size( 64, 64 )
+ but_dot.signal_connect( "clicked", self, '.' )
+ container.attach( but_dot, 1, 4, 1, 1 )
+
+ #C
+ var but_c = new GtkButton.with_label( "C" )
+ but_c.request_size( 64, 64 )
+ but_c.signal_connect("clicked", self, 'C')
+ container.attach( but_c, 2, 4, 1, 1 )
+
+ win.show_all
+ end
+end
+
+# context tests
+var context = new CalculatorContext
+context.push_digit( 1 )
+context.push_digit( 2 )
+context.push_op( '+' )
+context.push_digit( 3 )
+context.push_op( '*' )
+context.push_digit( 2 )
+context.push_op( '=' )
+var r = context.result.to_precision( 2 )
+assert r == "30.00" else print r
+
+context = new CalculatorContext
+context.push_digit( 1 )
+context.push_digit( 4 )
+context.switch_to_decimals
+context.push_digit( 1 )
+context.push_op( '*' )
+context.push_digit( 3 )
+context.push_op( '=' )
+r = context.result.to_precision( 2 )
+assert r == "42.30" else print r
+
+context.push_op( '+' )
+context.push_digit( 1 )
+context.push_digit( 1 )
+context.push_op( '=' )
+r = context.result.to_precision( 2 )
+assert r == "53.30" else print r
+
+context = new CalculatorContext
+context.push_digit( 4 )
+context.push_digit( 2 )
+context.switch_to_decimals
+context.push_digit( 3 )
+context.push_op( '/' )
+context.push_digit( 3 )
+context.push_op( '=' )
+r = context.result.to_precision( 2 )
+assert r == "14.10" else print r
+
+#test multiple decimals
+context = new CalculatorContext
+context.push_digit( 5 )
+context.push_digit( 0 )
+context.switch_to_decimals
+context.push_digit( 1 )
+context.push_digit( 2 )
+context.push_digit( 3 )
+context.push_op( '+' )
+context.push_digit( 1 )
+context.push_op( '=' )
+r = context.result.to_precision( 3 )
+assert r == "51.123" else print r
+
+#test 'C' button
+context = new CalculatorContext
+context.push_digit( 1 )
+context.push_digit( 0 )
+context.push_op( '+' )
+context.push_digit( 1 )
+context.push_digit( 0 )
+context.push_op( '=' )
+context.push_op( 'C' )
+r = context.result.to_precision( 1 )
+assert r == "0.0" else print r
+
+# graphical application
+
+if "NIT_TESTING".environ != "true" then
+ var app = new CalculatorGui
+ run_gtk
+end
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2013 Matthieu Lucas <lucasmatthieu@gmail.com>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This sample has been implemented to show you how simple is it to play
+# with native callbacks (C) through an high level with NIT program.
+
+module callback_chimpanze
+import callback_monkey
+
+class Chimpanze
+ super MonkeyActionCallable
+
+ fun create
+ do
+ var monkey = new Monkey
+ print "Hum, I'm sleeping ..."
+ # Invoking method which will take some time to compute, and
+ # will be back in wokeUp method with information.
+ # - Callback method defined in MonkeyActionCallable Interface
+ monkey.wokeUpAction(self, "Hey, I'm awake.")
+ end
+
+ # Inherit callback method, defined by MonkeyActionCallable interface
+ # - Back of wokeUpAction method
+ redef fun wokeUp( sender:Monkey, message:Object )
+ do
+ print message
+ end
+end
+
+var m = new Chimpanze
+m.create
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2013 Matthieu Lucas <lucasmatthieu@gmail.com>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This sample has been implemented to show you how simple is it to play
+# with native callbacks (C) through an high level with NIT program.
+
+module callback_monkey
+
+in "C header" `{
+ #include <stdio.h>
+ #include <stdlib.h>
+
+ typedef struct {
+ int id;
+ int age;
+ } CMonkey;
+
+ typedef struct {
+ MonkeyActionCallable toCall;
+ Object message;
+ } MonkeyAction;
+`}
+
+in "C body" `{
+ // Method which reproduce a callback answer
+ // Please note that a function pointer is only used to reproduce the callback
+ void cbMonkey(CMonkey *mkey, void callbackFunc(CMonkey*, MonkeyAction*), MonkeyAction *data)
+ {
+ sleep(2);
+ callbackFunc( mkey, data );
+ }
+
+ // Back of background treatment, will be redirected to callback function
+ void nit_monkey_callback_func( CMonkey *mkey, MonkeyAction *data )
+ {
+ // To call a your method, the signature must be written like this :
+ // <Interface Name>_<Method>...
+ MonkeyActionCallable_wokeUp( data->toCall, mkey, data->message );
+ }
+`}
+
+# Implementable interface to get callback in defined methods
+interface MonkeyActionCallable
+ fun wokeUp( sender:Monkey, message: Object) is abstract
+end
+
+# Defining my object type Monkey, which is, in a low level, a pointer to a C struct (CMonkey)
+extern class Monkey `{ CMonkey * `}
+
+ new `{
+ CMonkey *monkey = malloc( sizeof(CMonkey) );
+ monkey->age = 10;
+ monkey->id = 1;
+ return monkey;
+ `}
+
+ # Object method which will get a callback in wokeUp method, defined in MonkeyActionCallable interface
+ # Must be defined as Nit/C method because of C call inside
+ fun wokeUpAction( toCall: MonkeyActionCallable, message: Object ) is extern import MonkeyActionCallable.wokeUp `{
+
+ // Allocating memory to keep reference of received parameters :
+ // - Object receiver
+ // - Message
+ MonkeyAction *data = malloc( sizeof(MonkeyAction) );
+
+ // Incrementing reference counter to prevent from releasing
+ MonkeyActionCallable_incr_ref( toCall );
+ Object_incr_ref( message );
+
+ data->toCall = toCall;
+ data->message = message;
+
+ // Calling method which reproduce a callback by passing :
+ // - Receiver
+ // - Function pointer to object return method
+ // - Datas
+ cbMonkey( recv, &nit_monkey_callback_func, data );
+ `}
+end
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Implementation of circular lists
+# This example shows the usage of generics and somewhat a specialisation of collections.
+module circular_list
+
+# Sequences of elements implemented with a double-linked circular list
+class CircularList[E]
+ # Like standard Array or LinkedList, CircularList is a Sequence.
+ super Sequence[E]
+
+ # The first node of the list if any
+ # The special case of an empty list is handled by a null node
+ private var node: nullable CLNode[E] = null
+
+ redef fun iterator do return new CircularListIterator[E](self)
+
+ redef fun first do return self.node.item
+
+ redef fun push(e)
+ do
+ var new_node = new CLNode[E](e)
+ var n = self.node
+ if n == null then
+ # the first node
+ self.node = new_node
+ else
+ # not the first one, so attach nodes correctly.
+ var old_last_node = n.prev
+ new_node.next = n
+ new_node.prev = old_last_node
+ old_last_node.next = new_node
+ n.prev = new_node
+ end
+ end
+
+ redef fun pop
+ do
+ var n = self.node
+ assert n != null
+ var prev = n.prev
+ if prev == n then
+ # the only node
+ self.node = null
+ return n.item
+ end
+ # not the only one do detach nodes correctly.
+ var prev_prev = prev.prev
+ n.prev = prev_prev
+ prev_prev.next = n
+ return prev.item
+ end
+
+ redef fun unshift(e)
+ do
+ # Circularity has benefits.
+ push(e)
+ self.node = self.node.prev
+ end
+
+ redef fun shift
+ do
+ # Circularity has benefits.
+ self.node = self.node.next
+ return self.pop
+ end
+
+ # Move the first at the last position, the second at the first, etc.
+ fun rotate
+ do
+ var n = self.node
+ if n == null then return
+ self.node = n.next
+ end
+
+ # Sort the list using the Josephus algorithm.
+ fun josephus(step: Int)
+ do
+ var res = new CircularList[E]
+ while not self.is_empty do
+ # count 'step'
+ for i in [1..step[ do self.rotate
+ # kill
+ var x = self.shift
+ res.add(x)
+ end
+ self.node = res.node
+ end
+end
+
+# Nodes of a CircularList
+private class CLNode[E]
+ # The current item
+ var item: E
+
+ # The next item in the circular list.
+ # Because of circularity, there is always a next;
+ # so by default let it be self
+ var next: CLNode[E] = self
+
+ # The previous item in the circular list.
+ # Coherence between next and previous nodes has to be maintained by the
+ # circular list.
+ var prev: CLNode[E] = self
+end
+
+# An iterator of a CircularList.
+private class CircularListIterator[E]
+ super IndexedIterator[E]
+
+ redef var index: Int
+
+ # The current node pointed.
+ # Is null if the list is empty.
+ var node: nullable CLNode[E]
+
+ # The list iterated.
+ var list: CircularList[E]
+
+ redef fun is_ok
+ do
+ # Empty lists are not OK.
+ # Pointing again the first node is not OK.
+ return self.node != null and (self.index == 0 or self.node != self.list.node)
+ end
+
+ redef fun next
+ do
+ self.node = self.node.next
+ self.index += 1
+ end
+
+ redef fun item do return self.node.item
+
+ init(list: CircularList[E])
+ do
+ self.node = list.node
+ self.list = list
+ self.index = 0
+ end
+end
+
+var i = new CircularList[Int]
+i.add_all([1, 2, 3, 4, 5, 6, 7])
+print i.first
+print i.join(":")
+
+i.push(8)
+print i.shift
+print i.pop
+i.unshift(0)
+print i.join(":")
+
+i.josephus(3)
+print i.join(":")
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This module beef up the clock module by allowing a clock to be comparable.
+# It show the usage of class refinement
+module clock_more
+
+import clock
+
+redef class Clock
+ # Clock are now comparable
+ super Comparable
+
+ # Comparaison of a clock make only sense with an other clock
+ redef type OTHER: Clock
+
+ redef fun <(o)
+ do
+ # Note: < is the only abstract method of Comparable.
+ # All other operators and methods rely on < and ==.
+ return self.total_minutes < o.total_minutes
+ end
+end
+
+var c1 = new Clock(8, 12)
+var c2 = new Clock(8, 13)
+var c3 = new Clock(9, 13)
+
+print "{c1}<{c2}? {c1<c2}"
+print "{c1}<={c2}? {c1<=c2}"
+print "{c1}>{c2}? {c1>c2}"
+print "{c1}>={c2}? {c1>=c2}"
+print "{c1}<=>{c2}? {c1<=>c2}"
+print "{c1},{c2}? max={c1.max(c2)} min={c1.min(c2)}"
+print "{c1}.is_between({c2}, {c3})? {c1.is_between(c2, c3)}"
+print "{c2}.is_between({c1}, {c3})? {c2.is_between(c1, c3)}"
+
+print "-"
+
+c1.minutes += 1
+
+print "{c1}<{c2}? {c1<c2}"
+print "{c1}<={c2}? {c1<=c2}"
+print "{c1}>{c2}? {c1>c2}"
+print "{c1}>={c2}? {c1>=c2}"
+print "{c1}<=>{c2}? {c1<=>c2}"
+print "{c1},{c2}? max={c1.max(c2)} min={c1.min(c2)}"
+print "{c1}.is_between({c2}, {c3})? {c1.is_between(c2, c3)}"
+print "{c2}.is_between({c1}, {c3})? {c2.is_between(c1, c3)}"
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This module provide a simple wall clock.
+# It is an example of getters and setters.
+# A beefed-up module is available in clock_more
+module clock
+
+# A simple wall clock with 60 minutes and 12 hours.
+class Clock
+ # total number of minutes from 0 to 719
+ var total_minutes: Int
+ # Note: only the read acces is public, the write access is private.
+
+ # number of minutes in the current hour (from 0 to 59)
+ fun minutes: Int do return self.total_minutes % 60
+
+ # set the number of minutes in the current hour.
+ # if m < 0 or m >= 60, the hour will be changed accordinlgy
+ fun minutes=(m: Int) do self.total_minutes = self.hours * 60 + m
+
+ # number of hours (from 0 to 11)
+ fun hours: Int do return self.total_minutes / 60
+
+ # set the number of hours
+ # the minutes will not be updated
+ fun hours=(h: Int) do self.total_minutes = h * 60 + minutes
+
+ # the position of the hour arrow in the [0..60[ interval
+ fun hour_pos: Int do return total_minutes / 12
+
+ # replace the arrow of hours (from 0 to 59).
+ # the hours and the minutes will be updated.
+ fun hour_pos=(h: Int) do self.total_minutes = h * 12
+
+ redef fun to_s do return "{hours}:{minutes}"
+
+ fun reset(hours, minutes: Int) do self.total_minutes = hours*60 + minutes
+
+ init(hours, minutes: Int) do self.reset(hours, minutes)
+
+ redef fun ==(o)
+ do
+ # Note: o is a nullable Object, a type test is required
+ # Thanks to adaptive typing, there is no downcast
+ # i.e. the code is safe!
+ return o isa Clock and self.total_minutes == o.total_minutes
+ end
+end
+
+var c = new Clock(10,50)
+print "It's {c} o'clock."
+
+c.minutes += 22
+print "Now it's {c} o'clock."
+
+print "The short arrow in on the {c.hour_pos/5} and the long arrow in on the {c.minutes/5}."
+
+c.hours -= 2
+print "Now it's {c} o'clock."
+
+var c2 = new Clock(9, 11)
+print "It's {c2} on the second clock."
+print "The two clocks are synchronized: {c == c2}."
+c2.minutes += 1
+print "It's now {c2} on the second clock."
+print "The two clocks are synchronized: {c == c2}."
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2013 Matthieu Lucas <lucasmatthieu@gmail.com>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Sample of the Curl module.
+module curl_http
+
+import curl
+
+# Small class to represent an Http Fetcher
+class MyHttpFetcher
+ super CurlCallbacks
+
+ var curl: Curl
+ var our_body: String = ""
+
+ init(curl: Curl) do self.curl = curl
+
+ # Release curl object
+ fun destroy do self.curl.destroy
+
+ # Header callback
+ redef fun header_callback(line: String) do
+ # We keep this callback silent for testing purposes
+ #if not line.has_prefix("Date:") then print "Header_callback : {line}"
+ end
+
+ # Body callback
+ redef fun body_callback(line: String) do self.our_body = "{self.our_body}{line}"
+
+ # Stream callback - Cf : No one is registered
+ redef fun stream_callback(buffer: String, size: Int, count: Int) do print "Stream_callback : {buffer} - {size} - {count}"
+end
+
+
+# Program
+if args.length < 2 then
+ print "Usage: curl_http <method wished [POST, GET, GET_FILE]> <target url>"
+else
+ var curl = new Curl
+ var url = args[1]
+ var request = new CurlHTTPRequest(url, curl)
+
+ # HTTP Get Request
+ if args[0] == "GET" then
+ request.verbose = false
+ var getResponse = request.execute
+
+ if getResponse isa CurlResponseSuccess then
+ print "Status code : {getResponse.status_code}"
+ print "Body : {getResponse.body_str}"
+ else if getResponse isa CurlResponseFailed then
+ print "Error code : {getResponse.error_code}"
+ print "Error msg : {getResponse.error_msg}"
+ end
+
+ # HTTP Post Request
+ else if args[0] == "POST" then
+ var myHttpFetcher = new MyHttpFetcher(curl)
+ request.delegate = myHttpFetcher
+
+ var postDatas = new HeaderMap
+ postDatas["Bugs Bunny"] = "Daffy Duck"
+ postDatas["Batman"] = "Robin likes special characters @#ùà!è§'(\"é&://,;<>∞~*"
+ postDatas["Batman"] = "Yes you can set multiple identical keys, but APACHE will consider only once, the last one"
+ request.datas = postDatas
+ request.verbose = false
+ var postResponse = request.execute
+
+ print "Our body from the callback : {myHttpFetcher.our_body}"
+
+ if postResponse isa CurlResponseSuccess then
+ print "*** Answer ***"
+ print "Status code : {postResponse.status_code}"
+ print "Body should be empty, because we decided to manage callbacks : {postResponse.body_str.length}"
+ else if postResponse isa CurlResponseFailed then
+ print "Error code : {postResponse.error_code}"
+ print "Error msg : {postResponse.error_msg}"
+ end
+
+ # HTTP Get to file Request
+ else if args[0] == "GET_FILE" then
+ var headers = new HeaderMap
+ headers["Accept"] = "Moo"
+ request.headers = headers
+ request.verbose = false
+ var downloadResponse = request.download_to_file(null)
+
+ if downloadResponse isa CurlFileResponseSuccess then
+ print "*** Answer ***"
+ print "Status code : {downloadResponse.status_code}"
+ print "Size downloaded : {downloadResponse.size_download}"
+ else if downloadResponse isa CurlResponseFailed then
+ print "Error code : {downloadResponse.error_code}"
+ print "Error msg : {downloadResponse.error_msg}"
+ end
+ # Program logic
+ else
+ print "Usage : Method[POST, GET, GET_FILE]"
+ end
+end
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2013 Matthieu Lucas <lucasmatthieu@gmail.com>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Mail sender sample using the Curl module
+module curl_mail
+
+import curl
+
+var curl = new Curl
+var mail_request = new CurlMailRequest(curl)
+
+# Networks
+var response = mail_request.set_outgoing_server("smtps://smtp.example.org:465", "user@example.org", "mypassword")
+if response isa CurlResponseFailed then
+ print "Error code : {response.error_code}"
+ print "Error msg : {response.error_msg}"
+end
+
+# Headers
+mail_request.from = "Billy Bob"
+mail_request.to = ["user@example.org"]
+mail_request.cc = ["bob@example.org"]
+mail_request.bcc = null
+
+var headers_body = new HeaderMap
+headers_body["Content-Type:"] = "text/html; charset=\"UTF-8\""
+headers_body["Content-Transfer-Encoding:"] = "quoted-printable"
+mail_request.headers_body = headers_body
+
+# Content
+mail_request.body = "<h1>Here you can write HTML stuff.</h1>"
+mail_request.subject = "Hello From My Nit Program"
+
+# Others
+mail_request.verbose = false
+
+# Send mail
+response = mail_request.execute
+if response isa CurlResponseFailed then
+ print "Error code : {response.error_code}"
+ print "Error msg : {response.error_msg}"
+else if response isa CurlMailResponseSuccess then
+ print "Mail Sent"
+else
+ print "Unknown Curl Response type"
+end
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2012-2013 Alexis Laferrière <alexis.laf@xymus.net>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Draws an arithmetic operation to the terminal
+module draw_operation
+
+redef enum Int
+ fun n_chars: Int `{
+ int c;
+ if ( abs(recv) >= 10 )
+ c = 1+(int)log10f( (float)abs(recv) );
+ else
+ c = 1;
+ if ( recv < 0 ) c ++;
+ return c;
+ `}
+end
+
+redef enum Char
+ fun as_operator(a, b: Int): Int
+ do
+ if self == '+' then return a + b
+ if self == '-' then return a - b
+ if self == '*' then return a * b
+ if self == '/' then return a / b
+ if self == '%' then return a % b
+ abort
+ end
+
+ fun override_dispc: Bool
+ do
+ return self == '+' or self == '-' or self == '*' or self == '/' or self == '%'
+ end
+
+ fun lines(s: Int): Array[Line]
+ do
+ if self == '+' then
+ return [new Line(new P(0,s/2),1,0,s), new Line(new P(s/2,1),0,1,s-2)]
+ else if self == '-' then
+ return [new Line(new P(0,s/2),1,0,s)]
+ else if self == '*' then
+ var lines = new Array[Line]
+ for y in [1..s-1[ do
+ lines.add( new Line(new P(1,y), 1,0,s-2) )
+ end
+ return lines
+ else if self == '/' then
+ return [new Line(new P(s-1,0), -1,1, s )]
+ else if self == '%' then
+ var q4 = s/4
+ var lines = [new Line(new P(s-1,0),-1,1,s)]
+ for l in [0..q4[ do
+ lines.append([ new Line( new P(0,l), 1,0,q4), new Line( new P(s-1,s-1-l), -1,0,q4) ])
+ end
+ return lines
+ else if self == '1' then
+ return [new Line(new P(s/2,0), 0,1,s),new Line(new P(0,s-1),1,0,s),
+ new Line( new P(s/2,0),-1,1,s/2)]
+ else if self == '2' then
+ return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s/2),
+ new Line( new P(0,s-1),1,0,s), new Line( new P(0,s/2), 0,1,s/2),
+ new Line( new P(0,s/2), 1,0,s)]
+ else if self == '3' then
+ return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s),
+ new Line( new P(0,s-1),1,0,s), new Line( new P(0,s/2), 1,0,s)]
+ else if self == '4' then
+ return [new Line(new P(s-1,0),0,1,s), new Line( new P(0,0), 0,1,s/2),
+ new Line( new P(0,s/2), 1,0,s)]
+ else if self == '5' then
+ return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,s/2),0,1,s/2),
+ new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s/2),
+ new Line( new P(0,s/2), 1,0,s)]
+ else if self == '6' then
+ return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,s/2),0,1,s/2),
+ new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s),
+ new Line( new P(0,s/2), 1,0,s)]
+ else if self == '7' then
+ var tl = new P(0,0)
+ var tr = new P(s-1,0)
+ return [new Line(tl, 1,0,s), new Line(tr,-1,1,s)]
+ else if self == '8' then
+ return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s),
+ new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s),
+ new Line( new P(0,s/2), 1,0,s)]
+ else if self == '9' then
+ return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s),
+ new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s/2),
+ new Line( new P(0,s/2), 1,0,s)]
+ else if self == '0' then
+ return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s),
+ new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s)]
+ end
+ return new Array[Line]
+ end
+end
+
+class P
+ var x : Int
+ var y : Int
+end
+
+redef class String
+ # hack is to support a bug in the evaluation software
+ fun draw(dispc: Char, size, gap: Int, hack: Bool)
+ do
+ var w = size * length +(length-1)*gap
+ var h = size
+ var map = new Array[Array[Char]]
+ for x in [0..w[ do
+ map[x] = new Array[Char].filled_with( ' ', h )
+ end
+
+ var ci = 0
+ for c in self.chars do
+ var local_dispc
+ if c.override_dispc then
+ local_dispc = c
+ else
+ local_dispc = dispc
+ end
+
+ var lines = c.lines( size )
+ for line in lines do
+ var x = line.o.x+ci*size
+ x += ci*gap
+ var y = line.o.y
+ for s in [0..line.len[ do
+ assert map.length > x and map[x].length > y else print "setting {x},{y} as {local_dispc}"
+ map[x][y] = local_dispc
+ x += line.step_x
+ y += line.step_y
+ end
+ end
+
+ ci += 1
+ end
+
+ if hack then
+ for c in [0..size[ do
+ map[c][0] = map[map.length-size+c][0]
+ map[map.length-size+c][0] = ' '
+ end
+ end
+
+ for y in [0..h[ do
+ for x in [0..w[ do
+ printn map[x][y]
+ end
+ print ""
+ end
+ end
+end
+
+class Line
+ var o : P
+ var step_x : Int
+ var step_y : Int
+ var len : Int
+end
+
+var a
+var b
+var op_char
+var disp_char
+var disp_size
+var disp_gap
+
+if "NIT_TESTING".environ == "true" then
+ a = 567
+ b = 13
+ op_char = '*'
+ disp_char = 'O'
+ disp_size = 8
+ disp_gap = 1
+else
+ printn "Left operand: "
+ a = gets.to_i
+
+ printn "Right operand: "
+ b = gets.to_i
+
+ printn "Operator (+, -, *, /, %): "
+ op_char = gets.chars[0]
+
+ printn "Char to display: "
+ disp_char = gets.chars[0]
+
+ printn "Size of text: "
+ disp_size = gets.to_i
+
+ printn "Space between digits: "
+ disp_gap = gets.to_i
+end
+
+var result = op_char.as_operator( a, b )
+
+var len_a = a.n_chars
+var len_b = b.n_chars
+var len_res = result.n_chars
+var max_len = len_a.max( len_b.max( len_res ) ) + 1
+
+# draw first line
+var d = max_len - len_a
+var line_a = ""
+for i in [0..d[ do line_a += " "
+line_a += a.to_s
+line_a.draw( disp_char, disp_size, disp_gap, false )
+
+print ""
+# draw second line
+d = max_len - len_b-1
+var line_b = op_char.to_s
+for i in [0..d[ do line_b += " "
+line_b += b.to_s
+line_b.draw( disp_char, disp_size, disp_gap, false )
+
+# draw -----
+print ""
+for i in [0..disp_size*max_len+(max_len-1)*disp_gap] do
+ printn "_"
+end
+print ""
+print ""
+
+# draw result
+d = max_len - len_res
+var line_res = ""
+for i in [0..d[ do line_res += " "
+line_res += result.to_s
+line_res.draw( disp_char, disp_size, disp_gap, false )
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2013 Alexis Laferrière <alexis.laf@xymus.net>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Example using the privileges module to drop privileges from root
+module drop_privileges
+
+import privileges
+
+# basic command line options
+var opts = new OptionContext
+var opt_ug = new OptionUserAndGroup.for_dropping_privileges
+opt_ug.mandatory = true
+opts.add_option(opt_ug)
+
+# parse and check command line options
+opts.parse(args)
+if not opts.errors.is_empty then
+ print opts.errors
+ print "Usage: drop_privileges [options]"
+ opts.usage
+ exit 1
+end
+
+# original user
+print "before {sys.uid}:{sys.gid}"
+
+# make the switch
+var user_group = opt_ug.value
+assert user_group != null
+user_group.drop_privileges
+
+# final user
+print "after {sys.uid}:{sys.egid}"
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2012-2013 Alexis Laferrière <alexis.laf@xymus.net>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This module illustrates some uses of the FFI, specifically
+# how to use extern methods. Which means to implement a Nit method in C.
+module extern_methods
+
+redef enum Int
+ # Returns self'th fibonnaci number
+ # implemented here in C for optimization purposes
+ fun fib : Int import fib `{
+ if ( recv < 2 )
+ return recv;
+ else
+ return Int_fib( recv-1 ) + Int_fib( recv-2 );
+ `}
+
+ # System call to sleep for "self" seconds
+ fun sleep `{
+ sleep( recv );
+ `}
+
+ # Return atan2l( self, x ) from libmath
+ fun atan_with( x : Int ) : Float `{
+ return atan2( recv, x );
+ `}
+
+ # This method callback to Nit methods from C code
+ # It will use from C code:
+ # * the local fib method
+ # * the + operator, a method of Int
+ # * to_s, a method of all objects
+ # * String.to_cstring, a method of String to return an equivalent char*
+ fun foo import fib, +, to_s, String.to_cstring `{
+ long recv_fib = Int_fib( recv );
+ long recv_plus_fib = Int__plus( recv, recv_fib );
+
+ String nit_string = Int_to_s( recv_plus_fib );
+ char *c_string = String_to_cstring( nit_string );
+
+ printf( "from C: self + fib(self) = %s\n", c_string );
+ `}
+
+ # Equivalent to foo but written in pure Nit
+ fun bar do print "from Nit: self + fib(self) = {self+self.fib}"
+end
+
+print 12.fib
+
+print "sleeping 1 second..."
+1.sleep
+
+print 100.atan_with( 200 )
+8.foo
+8.bar
+
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2004-2008 Jean Privat <jean@pryen.org>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# A simple exemple of refinement where a method is added to the integer class.
+module fibonacci
+
+redef class Int
+ # Calculate the self-th element of the fibonacci sequence.
+ fun fibonacci: Int
+ do
+ if self < 2 then
+ return 1
+ else
+ return (self-2).fibonacci + (self-1).fibonacci
+ end
+ end
+end
+
+# Print usage and exit.
+fun usage
+do
+ print "Usage: fibonnaci <integer>"
+ exit 0
+end
+
+# Main part
+if args.length != 1 then
+ usage
+end
+print args.first.to_i.fibonacci
+print "hello world"
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import html
+
+class NitHomepage
+ super HTMLPage
+
+ redef fun head do
+ add("meta").attr("charset", "utf-8")
+ add("title").text("Nit")
+ add("link").attr("rel", "icon").attr("href", "http://nitlanguage.org/favicon.ico").attr("type", "image/x-icon")
+ add("link").attr("rel", "stylesheet").attr("href", "http://nitlanguage.org/style.css").attr("type", "text/css")
+ add("link").attr("rel", "stylesheet").attr("href", "http://nitlanguage.org/local.css").attr("type", "text/css")
+ end
+
+ redef fun body do
+ open("article").add_class("page")
+ open("section").add_class("pageheader")
+ add_html("<a id='toptitle_first' class='toptitle'>the</a><a id='toptitle_second' class='toptitle' href=''>Nit</a><a id='toptitle_third' class='toptitle' href=''>Programming Language</a>")
+ open("header").add_class("header")
+ open("div").add_class("topsubtitle")
+ add("p").text("A Fun Language for Serious Programming")
+ close("div")
+ close("header")
+ close("section")
+
+ open("div").attr("id", "pagebody")
+ open("section").attr("id", "content")
+ add("h1").text("# What is Nit?")
+ add("p").text("Nit is an object-oriented programming language. The goal of Nit is to propose a robust statically typed programming language where structure is not a pain.")
+ add("p").text("So, what does the famous hello world program look like, in Nit?")
+ add_html("<pre><tt><span class='normal'>print </span><span class='string'>'Hello, World!'</span></tt></pre>")
+
+ add("h1").text("# Feature Highlights")
+ add("h2").text("Usability")
+ add("p").text("Nit's goal is to be usable by real programmers for real projects")
+
+ open("ul")
+ open("li")
+ add("a").attr("href", "http://en.wikipedia.org/wiki/KISS_principle").text("KISS principle")
+ close("li")
+ add("li").text("Script-like language without verbosity nor cryptic statements")
+ add("li").text("Painless static types: static typing should help programmers")
+ add("li").text("Efficient development, efficient execution, efficient evolution.")
+ close("ul")
+
+ add("h2").text("Robustness")
+ add("p").text("Nit will help you to write bug-free programs")
+
+ open("ul")
+ add("li").text("Strong static typing")
+ add("li").text("No more NullPointerException")
+ close("ul")
+
+ add("h2").text("Object-Oriented")
+ add("p").text("Nit's guideline is to follow the most powerful OO principles")
+
+ open("ul")
+ open("li")
+ add("a").attr("href", "./everything_is_an_object/").text("Everything is an object")
+ close("li")
+ open("li")
+ add("a").attr("href", "./multiple_inheritance/").text("Multiple inheritance")
+ close("li")
+ open("li")
+ add("a").attr("href", "./refinement/").text("Open classes")
+ close("li")
+ open("li")
+ add("a").attr("href", "./virtual_types/").text("Virtual types")
+ close("li")
+ close("ul")
+
+
+ add("h1").text("# Getting Started")
+ add("p").text("Get Nit from its Git repository:")
+
+ add_html("<pre><code>$ git clone http://nitlanguage.org/nit.git</code></pre>")
+ add("p").text("Build the compiler (may be long):")
+ add_html("<pre><code>$ cd nit\n")
+ add_html("$ make</code></pre>")
+ add("p").text("Compile a program:")
+ add_html("<pre><code>$ bin/nitc examples/hello_world.nit</code></pre>")
+ add("p").text("Execute the program:")
+ add_html("<pre><code>$ ./hello_world</code></pre>")
+ close("section")
+ close("div")
+ close("article")
+ end
+end
+
+var page = new NitHomepage
+page.write_to stdout
+page.write_to_file("nit.html")
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# An example that defines and uses stacks of integers.
+# The implementation is done with a simple linked list.
+# It features: free constructors, nullable types and some adaptive typing.
+module int_stack
+
+# A stack of integer implemented by a simple linked list.
+# Note that this is only a toy class since a real linked list will gain to use
+# generics and extends interfaces, like Collection, from the standard library.
+class IntStack
+ # The head node of the list.
+ # Null means that the stack is empty.
+ private var head: nullable ISNode = null
+
+ # Add a new integer in the stack.
+ fun push(val: Int)
+ do
+ self.head = new ISNode(val, self.head)
+ end
+
+ # Remove and return the last pushed integer.
+ # Return null if the stack is empty.
+ fun pop: nullable Int
+ do
+ var head = self.head
+ if head == null then return null
+ # Note: the followings are statically safe because of the
+ # previous 'if'.
+ var val = head.val
+ self.head = head.next
+ return val
+ end
+
+ # Return the sum of all integers of the stack.
+ # Return 0 if the stack is empty.
+ fun sumall: Int
+ do
+ var sum = 0
+ var cur = self.head
+ while cur != null do
+ # Note: the followings are statically safe because of
+ # the condition of the 'while'.
+ sum += cur.val
+ cur = cur.next
+ end
+ return sum
+ end
+
+ # Note: Because all attributes have a default value, a free constructor
+ # "init()" is implicitly defined.
+end
+
+# A node of a IntStack
+private class ISNode
+ # The integer value stored in the node.
+ var val: Int
+
+ # The next node, if any.
+ var next: nullable ISNode
+
+ # Note: A free constructor "init(val: Int, next: nullable ISNode)" is
+ # implicitly defined.
+end
+
+var l = new IntStack
+l.push(1)
+l.push(2)
+l.push(3)
+
+print l.sumall
+
+# Note: the 'for' control structure cannot be used on IntStack in its current state.
+# It requires a more advanced topic.
+# However, why not using the 'loop' control structure?
+loop
+ var i = l.pop
+ if i == null then break
+ # The following is statically safe because of the previous 'if'.
+ print i * 10
+end
+
+# Note: 'or else' is used to give an alternative of a null expression.
+l.push(5)
+print l.pop or else 0 # l.pop gives 5, so print 5
+print l.pop or else 0 # l.pop gives null, so print the alternative: 0
+
+
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2014 Alexis Laferrière <alexis.laf@xymus.net>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Basic example of OpenGL ES 2.0 usage from the book OpenGL ES 2.0 Programming Guide.
+#
+# Code reference:
+# https://code.google.com/p/opengles-book-samples/source/browse/trunk/LinuxX11/Chapter_2/Hello_Triangle/Hello_Triangle.c
+module opengles2_hello_triangle
+
+import glesv2
+import egl
+import mnit_linux # for sdl
+import x11
+
+if "NIT_TESTING".environ == "true" then exit(0)
+
+var window_width = 800
+var window_height = 600
+
+#
+## SDL
+#
+var sdl_display = new SDLDisplay(window_width, window_height)
+var sdl_wm_info = new SDLSystemWindowManagerInfo
+var x11_window_handle = sdl_wm_info.x11_window_handle
+
+#
+## X11
+#
+var x_display = x_open_default_display
+assert x_display != 0 else print "x11 fail"
+
+#
+## EGL
+#
+var egl_display = new EGLDisplay(x_display)
+assert egl_display.is_valid else print "EGL display is not valid"
+egl_display.initialize
+
+print "EGL version: {egl_display.version}"
+print "EGL vendor: {egl_display.vendor}"
+print "EGL extensions: {egl_display.extensions.join(", ")}"
+print "EGL client APIs: {egl_display.client_apis.join(", ")}"
+
+assert egl_display.is_valid else print egl_display.error
+
+var config_chooser = new EGLConfigChooser
+#config_chooser.surface_type_egl
+config_chooser.blue_size = 8
+config_chooser.green_size = 8
+config_chooser.red_size = 8
+#config_chooser.alpha_size = 8
+#config_chooser.depth_size = 8
+#config_chooser.stencil_size = 8
+#config_chooser.sample_buffers = 1
+config_chooser.close
+
+var configs = config_chooser.choose(egl_display)
+assert configs != null else print "choosing config failed: {egl_display.error}"
+assert not configs.is_empty else print "no EGL config"
+
+print "{configs.length} EGL configs available"
+for config in configs do
+ var attribs = config.attribs(egl_display)
+ print "* caveats: {attribs.caveat}"
+ print " conformant to: {attribs.conformant}"
+ print " size of RGBA: {attribs.red_size} {attribs.green_size} {attribs.blue_size} {attribs.alpha_size}"
+ print " buffer, depth, stencil: {attribs.buffer_size} {attribs.depth_size} {attribs.stencil_size}"
+end
+
+var config = configs.first
+
+var format = config.attribs(egl_display).native_visual_id
+
+# TODO android part
+# Opengles1Display_midway_init(recv, format);
+
+var surface = egl_display.create_window_surface(config, x11_window_handle, [0])
+assert surface.is_ok else print egl_display.error
+
+var context = egl_display.create_context(config)
+assert context.is_ok else print egl_display.error
+
+var make_current_res = egl_display.make_current(surface, surface, context)
+assert make_current_res
+
+var width = surface.attribs(egl_display).width
+var height = surface.attribs(egl_display).height
+print "Width: {width}"
+print "Height: {height}"
+
+assert egl_bind_opengl_es_api else print "eglBingAPI failed: {egl_display.error}"
+
+#
+## GLESv2
+#
+
+print "Can compile shaders? {gl_shader_compiler}"
+assert_no_gl_error
+
+assert gl_shader_compiler else print "Cannot compile shaders"
+
+# gl program
+print gl_error.to_s
+var program = new GLProgram
+if not program.is_ok then
+ print "Program is not ok: {gl_error.to_s}\nLog:"
+ print program.info_log
+ abort
+end
+assert_no_gl_error
+
+# vertex shader
+var vertex_shader = new GLVertexShader
+assert vertex_shader.is_ok else print "Vertex shader is not ok: {gl_error}"
+vertex_shader.source = """
+attribute vec4 vPosition;
+void main()
+{
+ gl_Position = vPosition;
+} """
+vertex_shader.compile
+assert vertex_shader.is_compiled else print "Vertex shader compilation failed with: {vertex_shader.info_log} {program.info_log}"
+assert_no_gl_error
+
+# fragment shader
+var fragment_shader = new GLFragmentShader
+assert fragment_shader.is_ok else print "Fragment shader is not ok: {gl_error}"
+fragment_shader.source = """
+precision mediump float;
+void main()
+{
+ gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);
+}
+"""
+fragment_shader.compile
+assert fragment_shader.is_compiled else print "Fragment shader compilation failed with: {fragment_shader.info_log}"
+assert_no_gl_error
+
+program.attach_shader vertex_shader
+program.attach_shader fragment_shader
+program.bind_attrib_location(0, "vPosition")
+program.link
+assert program.is_linked else print "Linking failed: {program.info_log}"
+assert_no_gl_error
+
+# draw!
+var vertices = [0.0, 0.5, 0.0, -0.5, -0.5, 0.0, 0.5, -0.5, 0.0]
+var vertex_array = new VertexArray(0, 3, vertices)
+vertex_array.attrib_pointer
+gl_clear_color(0.5, 0.0, 0.5, 1.0)
+for i in [0..10000[ do
+ printn "."
+ assert_no_gl_error
+ gl_viewport(0, 0, width, height)
+ gl_clear_color_buffer
+ program.use
+ vertex_array.enable
+ vertex_array.draw_arrays_triangles
+ egl_display.swap_buffers(surface)
+end
+
+# delete
+program.delete
+vertex_shader.delete
+fragment_shader.delete
+
+#
+## EGL
+#
+# close
+egl_display.make_current(new EGLSurface.none, new EGLSurface.none, new EGLContext.none)
+egl_display.destroy_context(context)
+egl_display.destroy_surface(surface)
+
+#
+## SDL
+#
+# close
+sdl_display.destroy
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2004-2008 Jean Privat <jean@pryen.org>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# How to print arguments of the command line.
+module print_arguments
+
+for a in args do
+ print a
+end
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2004-2008 Jean Privat <jean@pryen.org>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# A procedural program (without explicit class definition).
+# This program manipulates arrays of integers.
+module procedural_array
+
+# The sum of the elements of `a'.
+# Uses a 'for' control structure.
+fun array_sum(a: Array[Int]): Int
+do
+ var sum = 0
+ for i in a do
+ sum = sum + i
+ end
+ return sum
+end
+
+# The sum of the elements of `a' (alternative version).
+# Uses a 'while' control structure.
+fun array_sum_alt(a: Array[Int]): Int
+do
+ var sum = 0
+ var i = 0
+ while i < a.length do
+ sum = sum + a[i]
+ i = i + 1
+ end
+ return sum
+end
+
+# The main part of the program.
+var a = [10, 5, 8, 9]
+print(array_sum(a))
+print(array_sum_alt(a))
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2013 Matthieu Lucas <lucasmatthieu@gmail.com>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Client sample using the Socket module which connect to the server sample.
+module socket_client
+
+import socket
+
+if args.length < 2 then
+ print "Usage : socket_client <host> <port>"
+ return
+end
+
+var s = new Socket.client(args[0], args[1].to_i)
+print "[HOST ADDRESS] : {s.address}"
+print "[HOST] : {s.host}"
+print "[PORT] : {s.port}"
+print "Connecting ... {s.connected}"
+if s.connected then
+ print "Writing ... Hello server !"
+ s.write("Hello server !")
+ print "[Response from server] : {s.read(100)}"
+ print "Closing ..."
+ s.close
+end
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2013 Matthieu Lucas <lucasmatthieu@gmail.com>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Server sample using the Socket module which allow client to connect
+module socket_server
+
+import socket
+
+if args.is_empty then
+ print "Usage : socket_server <port>"
+ return
+end
+
+var socket = new Socket.server(args[0].to_i, 1)
+print "[PORT] : {socket.port.to_s}"
+
+var clients = new Array[Socket]
+var max = socket
+loop
+ var fs = new SocketObserver(true, true, true)
+ fs.readset.set(socket)
+
+ for c in clients do fs.readset.set(c)
+
+ if fs.select(max, 4, 0) == 0 then
+ print "Error occured in select {sys.errno.strerror}"
+ break
+ end
+
+ if fs.readset.is_set(socket) then
+ var ns = socket.accept
+ print "Accepting {ns.address} ... "
+ print "[Message from {ns.address}] : {ns.read(100)}"
+ ns.write("Goodbye client.")
+ print "Closing {ns.address} ..."
+ ns.close
+ end
+end
+
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import template
+
+### Here, definition of the specific templates
+
+# The root template for composers
+class TmplComposers
+ super Template
+
+ # Short list of composers
+ var composers = new Array[TmplComposer]
+
+ # Detailled list of composers
+ var composer_details = new Array[TmplComposerDetail]
+
+ # Add a composer in both lists
+ fun add_composer(firstname, lastname: String, birth, death: Int)
+ do
+ composers.add(new TmplComposer(lastname))
+ composer_details.add(new TmplComposerDetail(firstname, lastname, birth, death))
+ end
+
+ redef fun rendering do
+ add """
+COMPOSERS
+=========
+"""
+ add_all composers
+ add """
+
+DETAILS
+=======
+"""
+ add_all composer_details
+ end
+end
+
+# A composer in the short list of composers
+class TmplComposer
+ super Template
+
+ # Short name
+ var name: String
+
+ init(name: String) do self.name = name
+
+ redef fun rendering do add "- {name}\n"
+end
+
+# A composer in the detailled list of composers
+class TmplComposerDetail
+ super Template
+
+ var firstname: String
+ var lastname: String
+ var birth: Int
+ var death: Int
+
+ init(firstname, lastname: String, birth, death: Int) do
+ self.firstname = firstname
+ self.lastname = lastname
+ self.birth = birth
+ self.death = death
+ end
+
+ redef fun rendering do add """
+
+COMPOSER: {{{firstname}}} {{{lastname}}}
+BIRTH...: {{{birth}}}
+DEATH...: {{{death}}}
+"""
+
+end
+
+### Here a simple usage of the templates
+
+var f = new TmplComposers
+f.add_composer("Johann Sebastian", "Bach", 1685, 1750)
+f.add_composer("George Frideric", "Handel", 1685, 1759)
+f.add_composer("Wolfgang Amadeus", "Mozart", 1756, 1791)
+f.write_to(stdout)
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Copyright 2014 Lucas Bajolet <r4pass@hotmail.com>
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Sample module for a minimal chat server using Websockets on port 8088
+module websocket_server
+
+import websocket
+
+var sock = new WebSocket(8088, 1)
+
+var msg: String
+
+if sock.listener.eof then
+ print sys.errno.strerror
+end
+
+sock.accept
+
+while not sock.listener.eof do
+ if not sock.connected then sock.accept
+ if sys.stdin.poll_in then
+ msg = gets
+ printn "Received message : {msg}"
+ if msg == "exit" then sock.close
+ if msg == "disconnect" then sock.disconnect_client
+ sock.write(msg)
+ end
+ if sock.can_read(10) then
+ msg = sock.read_line
+ if msg != "" then print msg
+ end
+end
+
diff --git a/tests/examplefiles/docker.docker b/tests/examplefiles/docker.docker
new file mode 100644
index 00000000..d65385b6
--- /dev/null
+++ b/tests/examplefiles/docker.docker
@@ -0,0 +1,5 @@
+maintainer First O'Last
+
+run echo \
+ 123 $bar
+# comment
diff --git a/tests/examplefiles/example.cob b/tests/examplefiles/example.cob
index 3f65e498..92d2e300 100644
--- a/tests/examplefiles/example.cob
+++ b/tests/examplefiles/example.cob
@@ -2617,940 +2617,4 @@ GC0710 88 Token-Is-Reserved-Word VALUE " ".
*****************************************************************
** Perform all program-wide initialization operations **
*****************************************************************
- 101-Establish-Working-Env.
- MOVE TRIM(Src-Filename,Leading) TO Src-Filename
- ACCEPT Env-TEMP
- FROM ENVIRONMENT "TEMP"
- END-ACCEPT
- ACCEPT Lines-Per-Page-ENV
- FROM ENVIRONMENT "OCXREF_LINES"
- END-ACCEPT
- INSPECT Src-Filename REPLACING ALL "\" BY "/"
- INSPECT Env-TEMP REPLACING ALL "\" BY "/"
- MOVE Src-Filename TO Program-Path
- MOVE Program-Path TO Heading-2
- CALL "C$JUSTIFY"
- USING Heading-2, "Right"
- END-CALL
- MOVE LENGTH(TRIM(Src-Filename,Trailing)) TO I
- MOVE 0 TO J
- PERFORM UNTIL Src-Filename(I:1) = '/'
- OR I = 0
- SUBTRACT 1 FROM I
- ADD 1 TO J
- END-PERFORM
- UNSTRING Src-Filename((I + 1):J) DELIMITED BY "."
- INTO Filename, Dummy
- END-UNSTRING
- STRING TRIM(Env-TEMP,Trailing)
- "/"
- TRIM(Filename,Trailing)
- ".i"
- DELIMITED SIZE
- INTO Expanded-Src-Filename
- END-STRING
- STRING Program-Path(1:I)
- TRIM(Filename,Trailing)
- ".lst"
- DELIMITED SIZE
- INTO Report-Filename
- END-STRING
- IF Lines-Per-Page-ENV NOT = SPACES
- MOVE NUMVAL(Lines-Per-Page-ENV) TO Lines-Per-Page
- ELSE
- MOVE 60 TO Lines-Per-Page
- END-IF
- ACCEPT Todays-Date
- FROM DATE YYYYMMDD
- END-ACCEPT
- MOVE Todays-Date TO H1X-Date
- H1S-Date
- MOVE "????????????..." TO SPI-Current-Program-ID
- MOVE SPACES TO SPI-Current-Verb
- Held-Reference
- MOVE "Y" TO F-First-Record
- .
- /
- 200-Execute-cobc SECTION.
- 201-Build-Cmd.
- STRING "cobc -E "
- TRIM(Program-Path, Trailing)
- " > "
- TRIM(Expanded-Src-Filename,Trailing)
- DELIMITED SIZE
- INTO Cmd
- END-STRING
- CALL "SYSTEM"
- USING Cmd
- END-CALL
- IF RETURN-CODE NOT = 0
- DISPLAY
- "Cross-reference terminated by previous errors"
- UPON SYSERR
- END-DISPLAY
- GOBACK
- END-IF
- .
-
- 209-Exit.
- EXIT
- .
- /
- 300-Tokenize-Source SECTION.
- 301-Driver.
- OPEN INPUT Expand-Code
- MOVE SPACES TO Expand-Code-Rec
- MOVE 256 TO Src-Ptr
- MOVE 0 TO Num-UserNames
- SPI-Current-Line-No
- MOVE "?" TO SPI-Current-Division
-GC0710 MOVE "N" TO F-Verb-Has-Been-Found.
- PERFORM FOREVER
- PERFORM 310-Get-Token
- IF Token-Is-EOF
- EXIT PERFORM
- END-IF
- MOVE UPPER-CASE(SPI-Current-Token)
- TO SPI-Current-Token-UC
- IF Token-Is-Verb
- MOVE SPI-Current-Token-UC TO SPI-Current-Verb
- SPI-Prior-Token
- IF Held-Reference NOT = SPACES
- MOVE Held-Reference TO Sort-Rec
- MOVE SPACES TO Held-Reference
- RELEASE Sort-Rec
- END-IF
- END-IF
- EVALUATE TRUE
- WHEN In-IDENTIFICATION-DIVISION
- PERFORM 320-IDENTIFICATION-DIVISION
- WHEN In-ENVIRONMENT-DIVISION
- PERFORM 330-ENVIRONMENT-DIVISION
- WHEN In-DATA-DIVISION
- PERFORM 340-DATA-DIVISION
- WHEN In-PROCEDURE-DIVISION
- PERFORM 350-PROCEDURE-DIVISION
- END-EVALUATE
- IF Token-Is-Key-Word
- MOVE SPI-Current-Token-UC TO SPI-Prior-Token
- END-IF
- IF F-Token-Ended-Sentence = "Y"
- AND SPI-Current-Division NOT = "I"
- MOVE SPACES TO SPI-Prior-Token
- SPI-Current-Verb
- END-IF
-
- END-PERFORM
- CLOSE Expand-Code
- EXIT SECTION
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 310-Get-Token.
- *>-- Position to 1st non-blank character
- MOVE F-Token-Ended-Sentence TO F-Last-Token-Ended-Sent
- MOVE "N" TO F-Token-Ended-Sentence
- PERFORM UNTIL Expand-Code-Rec(Src-Ptr : 1) NOT = SPACE
- IF Src-Ptr > 255
- READ Expand-Code AT END
- IF Held-Reference NOT = SPACES
- MOVE Held-Reference TO Sort-Rec
- MOVE SPACES TO Held-Reference
- RELEASE Sort-Rec
- END-IF
- SET Token-Is-EOF TO TRUE
- MOVE 0 TO SPI-Current-Line-No
- EXIT PARAGRAPH
- END-READ
- IF ECR-1 = "#"
- PERFORM 311-Control-Record
- ELSE
- PERFORM 312-Expand-Code-Record
- END-IF
- ELSE
- ADD 1 TO Src-Ptr
- END-IF
- END-PERFORM
- *>-- Extract token string
- MOVE Expand-Code-Rec(Src-Ptr : 1) TO SPI-Current-Char
- MOVE Expand-Code-Rec(Src-Ptr + 1: 1) TO SPI-Next-Char
- IF SPI-Current-Char = "."
- ADD 1 TO Src-Ptr
- MOVE SPI-Current-Char TO SPI-Current-Token
- MOVE SPACE TO SPI-Token-Type
- MOVE "Y" TO F-Token-Ended-Sentence
- EXIT PARAGRAPH
- END-IF
- IF Current-Char-Is-Punct
- AND SPI-Current-Char = "="
- AND SPI-Current-Division = "P"
- ADD 1 TO Src-Ptr
- MOVE "EQUALS" TO SPI-Current-Token
- MOVE "K" TO SPI-Token-Type
- EXIT PARAGRAPH
- END-IF
- IF Current-Char-Is-Punct *> So subscripts don't get flagged w/ "*"
- AND SPI-Current-Char = "("
- AND SPI-Current-Division = "P"
- MOVE SPACES TO SPI-Prior-Token
- END-IF
- IF Current-Char-Is-Punct
- ADD 1 TO Src-Ptr
- MOVE SPI-Current-Char TO SPI-Current-Token
- MOVE SPACE TO SPI-Token-Type
- EXIT PARAGRAPH
- END-IF
- IF Current-Char-Is-Quote
- ADD 1 TO Src-Ptr
- UNSTRING Expand-Code-Rec
- DELIMITED BY SPI-Current-Char
- INTO SPI-Current-Token
- WITH POINTER Src-Ptr
- END-UNSTRING
- IF Expand-Code-Rec(Src-Ptr : 1) = "."
- MOVE "Y" TO F-Token-Ended-Sentence
- ADD 1 TO Src-Ptr
- END-IF
- SET Token-Is-Literal-Alpha TO TRUE
- EXIT PARAGRAPH
- END-IF
- IF Current-Char-Is-X AND Next-Char-Is-Quote
- ADD 2 TO Src-Ptr
- UNSTRING Expand-Code-Rec
- DELIMITED BY SPI-Next-Char
- INTO SPI-Current-Token
- WITH POINTER Src-Ptr
- END-UNSTRING
- IF Expand-Code-Rec(Src-Ptr : 1) = "."
- MOVE "Y" TO F-Token-Ended-Sentence
- ADD 1 TO Src-Ptr
- END-IF
- SET Token-Is-Literal-Number TO TRUE
- EXIT PARAGRAPH
- END-IF
- IF Current-Char-Is-Z AND Next-Char-Is-Quote
- ADD 2 TO Src-Ptr
- UNSTRING Expand-Code-Rec
- DELIMITED BY SPI-Next-Char
- INTO SPI-Current-Token
- WITH POINTER Src-Ptr
- END-UNSTRING
- IF Expand-Code-Rec(Src-Ptr : 1) = "."
- MOVE "Y" TO F-Token-Ended-Sentence
- ADD 1 TO Src-Ptr
- END-IF
- SET Token-Is-Literal-Alpha TO TRUE
- EXIT PARAGRAPH
- END-IF
- IF F-Processing-PICTURE = "Y"
- UNSTRING Expand-Code-Rec
- DELIMITED BY ". " OR " "
- INTO SPI-Current-Token
- DELIMITER IN Delim
- WITH POINTER Src-Ptr
- END-UNSTRING
- IF Delim = ". "
- MOVE "Y" TO F-Token-Ended-Sentence
- ADD 1 TO Src-Ptr
- END-IF
- IF UPPER-CASE(SPI-Current-Token) = "IS"
- MOVE SPACE TO SPI-Token-Type
- EXIT PARAGRAPH
- ELSE
- MOVE "N" TO F-Processing-PICTURE
- MOVE SPACE TO SPI-Token-Type
- EXIT PARAGRAPH
- END-IF
- END-IF
- UNSTRING Expand-Code-Rec
- DELIMITED BY ". " OR " " OR "=" OR "(" OR ")" OR "*"
- OR "/" OR "&" OR ";" OR "," OR "<"
- OR ">" OR ":"
- INTO SPI-Current-Token
- DELIMITER IN Delim
- WITH POINTER Src-Ptr
- END-UNSTRING
- IF Delim = ". "
- MOVE "Y" TO F-Token-Ended-Sentence
- END-IF
- IF Delim NOT = ". " AND " "
- SUBTRACT 1 FROM Src-Ptr
- END-IF
- *>-- Classify Token
- MOVE UPPER-CASE(SPI-Current-Token) TO Search-Token
- IF Search-Token = "EQUAL" OR "EQUALS"
- MOVE "EQUALS" TO SPI-Current-Token
- MOVE "K" TO SPI-Token-Type
- EXIT PARAGRAPH
- END-IF
- SEARCH ALL Reserved-Word
- WHEN RW-Word (RW-Idx) = Search-Token
- MOVE RW-Type (RW-Idx) TO SPI-Token-Type
-GC0710 IF Token-Is-Verb
-GC0710 MOVE "Y" TO F-Verb-Has-Been-Found
-GC0710 END-IF
- EXIT PARAGRAPH
- END-SEARCH
- *>-- Not a reserved word, must be a user name
- SET Token-Is-Identifier TO TRUE *> NEEDS EXPANSION!!!!
- PERFORM 313-Check-For-Numeric-Token
- IF Token-Is-Literal-Number
- IF (F-Last-Token-Ended-Sent = "Y")
- AND (SPI-Current-Division = "D")
- MOVE "LEVEL #" TO SPI-Current-Token
- MOVE "K" TO SPI-Token-Type
- EXIT PARAGRAPH
- ELSE
- EXIT PARAGRAPH
- END-IF
- END-IF
- EXIT PARAGRAPH
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 311-Control-Record.
- UNSTRING ECR-2-256
- DELIMITED BY '"'
- INTO PIC-X10, PIC-X256, Dummy
- END-UNSTRING
- INSPECT PIC-X10 REPLACING ALL '"' BY SPACE
- COMPUTE I = NUMVAL(PIC-X10) - 1
- IF TRIM(PIC-X256,Trailing) = TRIM(Program-Path,Trailing)
- MOVE I TO SPI-Current-Line-No
- SET In-Main-Module TO TRUE
- IF Saved-Section NOT = SPACES
- MOVE Saved-Section TO SPI-Current-Section
- END-IF
- ELSE
- SET In-Copybook TO TRUE
- IF Saved-Section = SPACES
- MOVE SPI-Current-Section TO Saved-Section
- END-IF
- MOVE LENGTH(TRIM(PIC-X256,Trailing)) TO I
- MOVE 0 TO J
- PERFORM UNTIL PIC-X256(I:1) = '/'
- OR I = 0
- SUBTRACT 1 FROM I
- ADD 1 TO J
- END-PERFORM
- UNSTRING PIC-X256((I + 1):J) DELIMITED BY "."
- INTO Filename, Dummy
- END-UNSTRING
- MOVE "[" TO SPI-CS-1
- MOVE Filename TO SPI-CS-2-14
- IF SPI-CS-11-14 NOT = SPACES
- MOVE "..." TO SPI-CS-11-14
- END-IF
- MOVE "]" TO SPI-CS-15
- END-IF
- MOVE SPACES TO Expand-Code-Rec *> Force another READ
- MOVE 256 TO Src-Ptr
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 312-Expand-Code-Record.
- MOVE 1 TO Src-Ptr
- IF In-Main-Module
- ADD 1 To SPI-Current-Line-No
- END-IF
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 313-Check-For-Numeric-Token.
- MOVE SPI-Current-Token TO PIC-X32
- INSPECT PIC-X32
- REPLACING TRAILING SPACES BY "0"
- IF PIC-X32 IS NUMERIC *> Simple Unsigned Integer
- SET Token-Is-Literal-Number TO TRUE
- EXIT PARAGRAPH
- END-IF
- IF PIC-X32(1:1) = "+" OR "-"
- MOVE "0" TO PIC-X32(1:1)
- END-IF
- MOVE 0 TO Tally
- INSPECT PIC-X32
- TALLYING Tally FOR ALL "."
- IF Tally = 1
- INSPECT PIC-X32 REPLACING ALL "." BY "0"
- END-IF
- IF PIC-X32 IS NUMERIC
- SET Token-Is-Literal-Number TO TRUE
- EXIT PARAGRAPH
- END-IF
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 320-IDENTIFICATION-DIVISION.
-GC0710 MOVE "N" TO F-Verb-Has-Been-Found
- IF Token-Is-Key-Word AND SPI-Current-Token = "DIVISION"
- MOVE SPI-Prior-Token TO SPI-Current-Division
- EXIT PARAGRAPH
- END-IF
- IF SPI-Prior-Token = "PROGRAM-ID"
- MOVE SPACES TO SPI-Prior-Token
- MOVE SPI-Current-Token TO SPI-Current-Program-ID
- IF SPI-CP-13-15 NOT = SPACES
- MOVE "..." TO SPI-CP-13-15
- END-IF
- EXIT PARAGRAPH
- END-IF
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 330-ENVIRONMENT-DIVISION.
- IF Token-Is-Key-Word AND SPI-Current-Token = "DIVISION"
- MOVE SPI-Prior-Token TO SPI-Current-Division
- EXIT PARAGRAPH
- END-IF
- IF Token-Is-Key-Word AND SPI-Current-Token = "SECTION"
- MOVE SPI-Prior-Token TO SPI-Current-Section
- EXIT PARAGRAPH
- END-IF
- IF Token-Is-Identifier
- PERFORM 361-Release-Ref
- END-IF
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 340-DATA-DIVISION.
- IF Token-Is-Key-Word AND SPI-Current-Token = "DIVISION"
- MOVE SPI-Prior-Token TO SPI-Current-Division
- EXIT PARAGRAPH
- END-IF
- IF Token-Is-Key-Word AND SPI-Current-Token = "SECTION"
- MOVE SPI-Prior-Token TO SPI-Current-Section
- EXIT PARAGRAPH
- END-IF
- IF (SPI-Current-Token = "PIC" OR "PICTURE")
- AND (Token-Is-Key-Word)
- MOVE "Y" TO F-Processing-PICTURE
- EXIT PARAGRAPH
- END-IF
-GC0710 IF Token-Is-Reserved-Word
-GC0710 AND SPI-Prior-Token = "LEVEL #"
-GC0710 MOVE SPACES TO SPI-Prior-Token
-GC0710 EXIT PARAGRAPH
-GC0710 END-IF
- IF Token-Is-Identifier
- EVALUATE SPI-Prior-Token
- WHEN "FD"
- PERFORM 360-Release-Def
- MOVE SPACES TO SPI-Prior-Token
- WHEN "SD"
- PERFORM 360-Release-Def
- MOVE SPACES TO SPI-Prior-Token
- WHEN "LEVEL #"
- PERFORM 360-Release-Def
- MOVE SPACES TO SPI-Prior-Token
- WHEN "INDEXED"
- PERFORM 360-Release-Def
- MOVE SPACES TO SPI-Prior-Token
- WHEN "USING"
- PERFORM 362-Release-Upd
- MOVE SPACES TO SPI-Prior-Token
- WHEN "INTO"
- PERFORM 362-Release-Upd
- MOVE SPACES TO SPI-Prior-Token
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- EXIT PARAGRAPH
- END-IF
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 350-PROCEDURE-DIVISION.
- IF SPI-Current-Section NOT = "PROCEDURE"
- MOVE "PROCEDURE" TO SPI-Current-Section
- END-IF
-GC0710 IF SPI-Current-Token-UC = "PROGRAM"
-GC0710 AND SPI-Prior-Token = "END"
-GC0710 MOVE "?" TO SPI-Current-Division
-GC0710 EXIT PARAGRAPH
-GC0710 END-IF
- IF Token-Is-Key-Word AND SPI-Current-Token = "DIVISION"
- MOVE SPI-Prior-Token TO SPI-Current-Division
- EXIT PARAGRAPH
- END-IF
- IF SPI-Current-Verb = SPACES
-GC0710 AND F-Verb-Has-Been-Found = "Y"
- IF Token-Is-Identifier
- PERFORM 360-Release-Def
- MOVE SPACES TO SPI-Prior-Token
- END-IF
- EXIT PARAGRAPH
- END-IF
- IF NOT Token-Is-Identifier
- EXIT PARAGRAPH
- END-IF
- EVALUATE SPI-Current-Verb
- WHEN "ACCEPT"
- PERFORM 351-ACCEPT
- WHEN "ADD"
- PERFORM 351-ADD
- WHEN "ALLOCATE"
- PERFORM 351-ALLOCATE
- WHEN "CALL"
- PERFORM 351-CALL
- WHEN "COMPUTE"
- PERFORM 351-COMPUTE
- WHEN "DIVIDE"
- PERFORM 351-DIVIDE
- WHEN "FREE"
- PERFORM 351-FREE
- WHEN "INITIALIZE"
- PERFORM 351-INITIALIZE
- WHEN "INSPECT"
- PERFORM 351-INSPECT
- WHEN "MOVE"
- PERFORM 351-MOVE
- WHEN "MULTIPLY"
- PERFORM 351-MULTIPLY
- WHEN "PERFORM"
- PERFORM 351-PERFORM
- WHEN "SET"
- PERFORM 351-SET
- WHEN "STRING"
- PERFORM 351-STRING
- WHEN "SUBTRACT"
- PERFORM 351-SUBTRACT
- WHEN "TRANSFORM"
- PERFORM 351-TRANSFORM
- WHEN "UNSTRING"
- PERFORM 351-UNSTRING
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-ACCEPT.
- EVALUATE SPI-Prior-Token
- WHEN "ACCEPT"
- PERFORM 362-Release-Upd
- MOVE SPACES TO SPI-Prior-Token
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-ADD.
- EVALUATE SPI-Prior-Token
- WHEN "GIVING"
- PERFORM 362-Release-Upd
- WHEN "TO"
- PERFORM 362-Release-Upd
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-ALLOCATE.
- EVALUATE SPI-Prior-Token
- WHEN "ALLOCATE"
- PERFORM 362-Release-Upd
- MOVE SPACES TO SPI-Prior-Token
- WHEN "RETURNING"
- PERFORM 362-Release-Upd
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-CALL.
- EVALUATE SPI-Prior-Token
- WHEN "RETURNING"
- PERFORM 362-Release-Upd
- WHEN "GIVING"
- PERFORM 362-Release-Upd
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-COMPUTE.
- EVALUATE SPI-Prior-Token
- WHEN "COMPUTE"
- PERFORM 362-Release-Upd
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-DIVIDE.
- EVALUATE SPI-Prior-Token
- WHEN "INTO"
- PERFORM 363-Set-Upd
- MOVE Sort-Rec TO Held-Reference
- WHEN "GIVING"
- IF Held-Reference NOT = SPACES
- MOVE Held-Reference To Sort-Rec
- MOVE SPACES To Held-Reference
- SR-Ref-Flag
- RELEASE Sort-Rec
- END-IF
- PERFORM 362-Release-Upd
- WHEN "REMAINDER"
- PERFORM 362-Release-Upd
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-FREE.
- PERFORM 362-Release-Upd
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-INITIALIZE.
- EVALUATE SPI-Prior-Token
- WHEN "INITIALIZE"
- PERFORM 362-Release-Upd
- WHEN "REPLACING"
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-INSPECT.
- EVALUATE SPI-Prior-Token
- WHEN "INSPECT"
- PERFORM 364-Set-Ref
- MOVE SPACES TO Held-Reference
- MOVE SPACES TO SPI-Prior-Token
- WHEN "TALLYING"
- PERFORM 362-Release-Upd
- MOVE SPACES TO SPI-Prior-Token
- WHEN "REPLACING"
- IF Held-Reference NOT = SPACES
- MOVE Held-Reference TO Sort-Rec
- MOVE SPACES TO Held-Reference
- MOVE "*" TO SR-Ref-Flag
- RELEASE Sort-Rec
- END-IF
- MOVE SPACES TO SPI-Prior-Token
- WHEN "CONVERTING"
- IF Held-Reference NOT = SPACES
- MOVE Held-Reference TO Sort-Rec
- MOVE SPACES TO Held-Reference
- MOVE "*" TO SR-Ref-Flag
- RELEASE Sort-Rec
- END-IF
- MOVE SPACES TO SPI-Prior-Token
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-MOVE.
- EVALUATE SPI-Prior-Token
- WHEN "TO"
- PERFORM 362-Release-Upd
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-MULTIPLY.
- EVALUATE SPI-Prior-Token
- WHEN "BY"
- PERFORM 363-Set-Upd
- MOVE Sort-Rec TO Held-Reference
- WHEN "GIVING"
- MOVE Held-Reference TO Sort-Rec
- MOVE SPACES TO Held-Reference
- SR-Ref-Flag
- RELEASE Sort-Rec
- PERFORM 362-Release-Upd
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-PERFORM.
- EVALUATE SPI-Prior-Token
- WHEN "VARYING"
- PERFORM 362-Release-Upd
- MOVE SPACES TO SPI-Prior-Token
- WHEN "AFTER"
- PERFORM 362-Release-Upd
- MOVE SPACES TO SPI-Prior-Token
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-SET.
- EVALUATE SPI-Prior-Token
- WHEN "SET"
- PERFORM 362-Release-Upd
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-STRING.
- EVALUATE SPI-Prior-Token
- WHEN "INTO"
- PERFORM 362-Release-Upd
- WHEN "POINTER"
- PERFORM 362-Release-Upd
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-SUBTRACT.
- EVALUATE SPI-Prior-Token
- WHEN "GIVING"
- PERFORM 362-Release-Upd
- WHEN "FROM"
- PERFORM 362-Release-Upd
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-TRANSFORM.
- EVALUATE SPI-Prior-Token
- WHEN "TRANSFORM"
- PERFORM 362-Release-Upd
- MOVE SPACES TO SPI-Prior-Token
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 351-UNSTRING.
- EVALUATE SPI-Prior-Token
- WHEN "INTO"
- PERFORM 362-Release-Upd
- WHEN "DELIMITER"
- PERFORM 362-Release-Upd
- WHEN "COUNT"
- PERFORM 362-Release-Upd
- WHEN "POINTER"
- PERFORM 362-Release-Upd
- WHEN "TALLYING"
- PERFORM 362-Release-Upd
- WHEN OTHER
- PERFORM 361-Release-Ref
- END-EVALUATE
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 360-Release-Def.
- MOVE SPACES TO Sort-Rec
- MOVE SPI-Current-Program-ID TO SR-Prog-ID
- MOVE SPI-Current-Token-UC TO SR-Token-UC
- MOVE SPI-Current-Token TO SR-Token
- MOVE SPI-Current-Section TO SR-Section
- MOVE SPI-Current-Line-No TO SR-Line-No-Def
- MOVE 0 TO SR-Line-No-Ref
- RELEASE Sort-Rec
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 361-Release-Ref.
- PERFORM 364-Set-Ref
- RELEASE Sort-Rec
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 362-Release-Upd.
- PERFORM 363-Set-Upd
- RELEASE Sort-Rec
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 363-Set-Upd.
- MOVE SPACES TO Sort-Rec
- MOVE SPI-Current-Program-ID TO SR-Prog-ID
- MOVE SPI-Current-Token-UC TO SR-Token-UC
- MOVE SPI-Current-Token TO SR-Token
- MOVE SPI-Current-Section TO SR-Section
- MOVE SPI-Current-Line-No TO SR-Line-No-Ref
- MOVE "*" TO SR-Ref-Flag
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 364-Set-Ref.
- MOVE SPACES TO Sort-Rec
- MOVE SPI-Current-Program-ID TO SR-Prog-ID
- MOVE SPI-Current-Token-UC TO SR-Token-UC
- MOVE SPI-Current-Token TO SR-Token
- MOVE SPI-Current-Section TO SR-Section
- MOVE SPI-Current-Line-No TO SR-Line-No-Ref
- .
- /
- 400-Produce-Xref-Listing SECTION.
- 401-Init.
- MOVE SPACES TO Detail-Line-X
- Group-Indicators
- MOVE 0 TO I
- Lines-Left
-GC0710 MOVE 'N' TO F-Duplicate
- .
-
- 402-Process-Sorted-Recs.
- PERFORM FOREVER
- RETURN Sort-File AT END
- EXIT PERFORM
- END-RETURN
- IF SR-Prog-ID NOT = GI-Prog-ID
- OR SR-Token-UC NOT = GI-Token
-GC0710 MOVE 'N' TO F-Duplicate
- IF Detail-Line-X NOT = SPACES
- PERFORM 410-Generate-Report-Line
- END-IF
- IF SR-Prog-ID NOT = GI-Prog-ID
- MOVE 0 TO Lines-Left
- END-IF
- MOVE SR-Prog-ID TO GI-Prog-ID
- MOVE SR-Token-UC TO GI-Token
- END-IF
-GC0710 IF SR-Token-UC = GI-Token
-GC0710 AND SR-Line-No-Def NOT = SPACES
-GC0710 AND Detail-Line-X NOT = SPACES
-GC0710 MOVE 'Y' TO F-Duplicate
-GC0710 PERFORM 410-Generate-Report-Line
-GC0710 MOVE 0 TO I
-GC0710 MOVE SR-Prog-ID TO DLX-Prog-ID
-GC0710 MOVE ' (Duplicate Definition)' TO DLX-Token
-GC0710 MOVE SR-Section TO DLX-Section
-GC0710 MOVE SR-Line-No-Def TO DLX-Line-No-Def
-GC0710 EXIT PERFORM CYCLE
-GC0710 END-IF
-GC0710 IF SR-Token-UC = GI-Token
-GC0710 AND SR-Line-No-Def = SPACES
-GC0710 AND F-Duplicate = 'Y'
-GC0710 MOVE 'N' TO F-Duplicate
-GC0710 PERFORM 410-Generate-Report-Line
-GC0710 MOVE 0 TO I
-GC0710 MOVE SR-Prog-ID TO DLX-Prog-ID
-GC0710 MOVE ' (Duplicate References)' TO DLX-Token
-GC0710 END-IF
- IF Detail-Line-X = SPACES
- MOVE SR-Prog-ID TO DLX-Prog-ID
- MOVE SR-Token TO DLX-Token
- MOVE SR-Section TO DLX-Section
- IF SR-Line-No-Def NOT = SPACES
- MOVE SR-Line-No-Def TO DLX-Line-No-Def
- END-IF
- END-IF
- IF SR-Reference > '000000'
- ADD 1 TO I
- IF I > Line-Nos-Per-Rec
- PERFORM 410-Generate-Report-Line
- MOVE 1 TO I
- END-IF
- MOVE SR-Line-No-Ref TO DLX-Line-No-Ref (I)
- MOVE SR-Ref-Flag TO DLX-Ref-Flag (I)
- END-IF
- END-PERFORM
- IF Detail-Line-X NOT = SPACES
- PERFORM 410-Generate-Report-Line
- END-IF
- EXIT SECTION
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 410-Generate-Report-Line.
- IF Lines-Left < 1
- IF F-First-Record = "Y"
- MOVE "N" TO F-First-Record
- WRITE Report-Rec FROM Heading-1X BEFORE 1
- ELSE
- MOVE SPACES TO Report-Rec
- WRITE Report-Rec BEFORE PAGE
- MOVE SPACES TO Report-Rec
- WRITE Report-Rec BEFORE 1
- WRITE Report-Rec FROM Heading-1X BEFORE 1
- END-IF
- WRITE Report-Rec FROM Heading-2 BEFORE 1
- WRITE Report-Rec FROM Heading-4X BEFORE 1
- WRITE Report-Rec FROM Heading-5X BEFORE 1
- COMPUTE
- Lines-Left = Lines-Per-Page - 4
- END-COMPUTE
- END-IF
- WRITE Report-Rec FROM Detail-Line-X BEFORE 1
- MOVE SPACES TO Detail-Line-X
- MOVE 0 TO I
- SUBTRACT 1 FROM Lines-Left
- .
- /
- 500-Produce-Source-Listing SECTION.
- 501-Generate-Source-Listing.
- OPEN INPUT Source-Code
- Expand-Code
- MOVE 0 TO Source-Line-No
- PERFORM FOREVER
- READ Expand-Code AT END
- EXIT PERFORM
- END-READ
- IF ECR-1 = "#"
- PERFORM 510-Control-Record
- ELSE
- PERFORM 520-Expand-Code-Record
- END-IF
- END-PERFORM
- CLOSE Source-Code
- Expand-Code
- EXIT SECTION
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 510-Control-Record.
- UNSTRING ECR-2-256
- DELIMITED BY '"'
- INTO PIC-X10, PIC-X256, Dummy
- END-UNSTRING
- IF TRIM(PIC-X256,Trailing) = TRIM(Program-Path,Trailing) *> Main Pgm
- SET In-Main-Module TO TRUE
- IF Source-Line-No > 0
- READ Expand-Code END-READ
- END-IF
- ELSE *> COPY
- SET In-Copybook TO TRUE
- END-IF
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 520-Expand-Code-Record.
- IF In-Main-Module
- ADD 1 To SPI-Current-Line-No
- READ Source-Code AT END NEXT SENTENCE END-READ
- ADD 1 TO Source-Line-No
- MOVE SPACES TO Detail-Line-S
- MOVE Source-Line-No TO DLS-Line-No
- MOVE SCR-1-128 TO DLS-Statement
-GC0410 IF SCR-7 = "/"
-GC0410 MOVE 0 TO Lines-Left
-GC0410 END-IF
- PERFORM 530-Generate-Source-Line
- IF SCR-129-256 NOT = SPACES
- MOVE SPACES TO Detail-Line-S
- MOVE SCR-129-256 TO DLS-Statement
- PERFORM 530-Generate-Source-Line
- END-IF
- ELSE
- IF Expand-Code-Rec NOT = SPACES
- MOVE SPACES TO Detail-Line-S
- MOVE ECR-1-128 TO DLS-Statement
- PERFORM 530-Generate-Source-Line
- IF ECR-129-256 NOT = SPACES
- MOVE SPACES TO Detail-Line-S
- MOVE ECR-129-256 TO DLS-Statement
- PERFORM 530-Generate-Source-Line
- END-IF
- END-IF
- END-IF
- .
- *>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
- 530-Generate-Source-Line.
- IF Lines-Left < 1
- IF F-First-Record = "Y"
- MOVE "N" TO F-First-Record
- WRITE Report-Rec FROM Heading-1S BEFORE 1
- ELSE
- MOVE SPACES TO Report-Rec
- WRITE Report-Rec BEFORE PAGE
- MOVE SPACES TO Report-Rec
- WRITE Report-Rec BEFORE 1
- WRITE Report-Rec FROM Heading-1S BEFORE 1
- END-IF
- WRITE Report-Rec FROM Heading-2 BEFORE 1
- WRITE Report-Rec FROM Heading-4S BEFORE 1
- WRITE Report-Rec FROM Heading-5S BEFORE 1
- COMPUTE
- Lines-Left = Lines-Per-Page - 4
- END-COMPUTE
- END-IF
- WRITE Report-Rec FROM Detail-Line-S BEFORE 1
- MOVE SPACES TO Detail-Line-S
- SUBTRACT 1 FROM Lines-Left
- .
-
END PROGRAM LISTING.
diff --git a/tests/examplefiles/example.golo b/tests/examplefiles/example.golo
new file mode 100644
index 00000000..92ff78b5
--- /dev/null
+++ b/tests/examplefiles/example.golo
@@ -0,0 +1,113 @@
+#
+# Comments
+#
+
+module pygments.Example
+
+import some.Module
+
+local function foo = |a, b| -> a + b
+
+----
+golodoc string
+----
+augment java.util.Collection {
+
+ ----
+ sub doc
+ ----
+ function plop = |this, v| {
+ return this: length() + v
+ }
+}
+
+function bar = |a, b| {
+ let msg = "a string"
+ var tmp = ""
+ tmp = tmp + a: toString()
+ println(tmp + b)
+}
+
+function baz = {
+ foreach i in range(0, 5) {
+ if i % 2 == 0 and true or false {
+ print("e")
+ } else {
+ print("o")
+ }
+ }
+}
+
+function userMatch = |v| ->
+ match {
+ when v % 2 == 0 then "e"
+ otherwise "o"
+ }
+}
+
+function add = |x| -> |y| -> x + y
+
+let aChar = 'a'
+
+let multiline =
+"""
+foo
+bar
+baz
+"""
+
+local function myObj = -> DynamicObject():
+ name("foo"):
+ age(25):
+ define("meth", |this| -> this: name() + this: age()
+
+----
+Golo doc string
+----
+function nullTest = {
+ let m = map[
+ ["a", 1],
+ ["b", 2]
+ ]
+
+ println(map: get("a") orIfNull 0)
+ println(map: get("b")?: toString() orIfNull "0")
+
+}
+
+struct Point = { x, y }
+
+function deco1 = |fun| {
+ return |args...| {
+ return "deco1 + " + fun: invokeWithArguments(args)
+ }
+}
+
+@deco1
+function decofoo = |a| {
+ return "foo: " + a
+}
+
+@deco1
+function decobar = |a| -> "bar: " + a
+
+function deco2 = |fun| {
+ return |args...| {
+ return "deco2 + " + fun: invokeWithArguments(args)
+ }
+}
+
+@deco2
+@deco1
+function decobaz = |a| -> "baz: " + a
+
+let deco3 = ^deco1: andThen(^deco2)
+
+@deco3
+function decospam = |a| -> "spam: " + a
+
+@another.Module.deco
+function ping = -> "pong"
+
+@deco("with", params)
+function gnop = -> "gnip"
diff --git a/tests/examplefiles/example.hs b/tests/examplefiles/example.hs
index 9efd3364..f5e2b555 100644
--- a/tests/examplefiles/example.hs
+++ b/tests/examplefiles/example.hs
@@ -25,3 +25,7 @@ data ĈrazyThings =
House |
Peár
deriving (Show, Eq)
+
+-- some char literals:
+
+charl = ['"', 'a', '\ESC', '\'', ' ']
diff --git a/tests/examplefiles/example.hx b/tests/examplefiles/example.hx
index 381cf825..7584fc81 100644
--- a/tests/examplefiles/example.hx
+++ b/tests/examplefiles/example.hx
@@ -181,5 +181,12 @@ var c = macro class MyClass {
var c = macro interface IClass {};
+//macro class could have no name...
+var def = macro class {
+ private inline function new(loader) this = loader;
+ private var loader(get,never) : $loaderType;
+ inline private function get_loader() : $loaderType return this;
+};
+
//ECheckType
var f = (123:Float); \ No newline at end of file
diff --git a/tests/examplefiles/example.stan b/tests/examplefiles/example.stan
index 7eb6fdfc..716b4d12 100644
--- a/tests/examplefiles/example.stan
+++ b/tests/examplefiles/example.stan
@@ -5,6 +5,14 @@ It is not a real model and will not compile
*/
# also a comment
// also a comment
+functions {
+ void f1(void a, real b) {
+ return 1 / a;
+ }
+ real f2(int a, vector b, real c) {
+ return a + b + c;
+ }
+}
data {
// valid name
int abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_abc;
@@ -20,6 +28,7 @@ data {
corr_matrix[3] grault;
cov_matrix[3] garply;
cholesky_factor_cov[3] waldo;
+ cholesky_factor_corr[3] waldo2;
real<lower=-1,upper=1> foo1;
real<lower=0> foo2;
@@ -87,6 +96,7 @@ model {
tmp / tmp;
tmp .* tmp;
tmp ./ tmp;
+ tmp ^ tmp;
! tmp;
- tmp;
+ tmp;
@@ -107,4 +117,3 @@ generated quantities {
real bar1;
bar1 <- foo + 1;
}
-
diff --git a/tests/examplefiles/example.thy b/tests/examplefiles/example.thy
new file mode 100644
index 00000000..abaa1af8
--- /dev/null
+++ b/tests/examplefiles/example.thy
@@ -0,0 +1,751 @@
+(* from Isabelle2013-2 src/HOL/Power.thy; BSD license *)
+
+(* Title: HOL/Power.thy
+ Author: Lawrence C Paulson, Cambridge University Computer Laboratory
+ Copyright 1997 University of Cambridge
+*)
+
+header {* Exponentiation *}
+
+theory Power
+imports Num
+begin
+
+subsection {* Powers for Arbitrary Monoids *}
+
+class power = one + times
+begin
+
+primrec power :: "'a \<Rightarrow> nat \<Rightarrow> 'a" (infixr "^" 80) where
+ power_0: "a ^ 0 = 1"
+ | power_Suc: "a ^ Suc n = a * a ^ n"
+
+notation (latex output)
+ power ("(_\<^bsup>_\<^esup>)" [1000] 1000)
+
+notation (HTML output)
+ power ("(_\<^bsup>_\<^esup>)" [1000] 1000)
+
+text {* Special syntax for squares. *}
+
+abbreviation (xsymbols)
+ power2 :: "'a \<Rightarrow> 'a" ("(_\<^sup>2)" [1000] 999) where
+ "x\<^sup>2 \<equiv> x ^ 2"
+
+notation (latex output)
+ power2 ("(_\<^sup>2)" [1000] 999)
+
+notation (HTML output)
+ power2 ("(_\<^sup>2)" [1000] 999)
+
+end
+
+context monoid_mult
+begin
+
+subclass power .
+
+lemma power_one [simp]:
+ "1 ^ n = 1"
+ by (induct n) simp_all
+
+lemma power_one_right [simp]:
+ "a ^ 1 = a"
+ by simp
+
+lemma power_commutes:
+ "a ^ n * a = a * a ^ n"
+ by (induct n) (simp_all add: mult_assoc)
+
+lemma power_Suc2:
+ "a ^ Suc n = a ^ n * a"
+ by (simp add: power_commutes)
+
+lemma power_add:
+ "a ^ (m + n) = a ^ m * a ^ n"
+ by (induct m) (simp_all add: algebra_simps)
+
+lemma power_mult:
+ "a ^ (m * n) = (a ^ m) ^ n"
+ by (induct n) (simp_all add: power_add)
+
+lemma power2_eq_square: "a\<^sup>2 = a * a"
+ by (simp add: numeral_2_eq_2)
+
+lemma power3_eq_cube: "a ^ 3 = a * a * a"
+ by (simp add: numeral_3_eq_3 mult_assoc)
+
+lemma power_even_eq:
+ "a ^ (2 * n) = (a ^ n)\<^sup>2"
+ by (subst mult_commute) (simp add: power_mult)
+
+lemma power_odd_eq:
+ "a ^ Suc (2*n) = a * (a ^ n)\<^sup>2"
+ by (simp add: power_even_eq)
+
+lemma power_numeral_even:
+ "z ^ numeral (Num.Bit0 w) = (let w = z ^ (numeral w) in w * w)"
+ unfolding numeral_Bit0 power_add Let_def ..
+
+lemma power_numeral_odd:
+ "z ^ numeral (Num.Bit1 w) = (let w = z ^ (numeral w) in z * w * w)"
+ unfolding numeral_Bit1 One_nat_def add_Suc_right add_0_right
+ unfolding power_Suc power_add Let_def mult_assoc ..
+
+lemma funpow_times_power:
+ "(times x ^^ f x) = times (x ^ f x)"
+proof (induct "f x" arbitrary: f)
+ case 0 then show ?case by (simp add: fun_eq_iff)
+next
+ case (Suc n)
+ def g \<equiv> "\<lambda>x. f x - 1"
+ with Suc have "n = g x" by simp
+ with Suc have "times x ^^ g x = times (x ^ g x)" by simp
+ moreover from Suc g_def have "f x = g x + 1" by simp
+ ultimately show ?case by (simp add: power_add funpow_add fun_eq_iff mult_assoc)
+qed
+
+end
+
+context comm_monoid_mult
+begin
+
+lemma power_mult_distrib:
+ "(a * b) ^ n = (a ^ n) * (b ^ n)"
+ by (induct n) (simp_all add: mult_ac)
+
+end
+
+context semiring_numeral
+begin
+
+lemma numeral_sqr: "numeral (Num.sqr k) = numeral k * numeral k"
+ by (simp only: sqr_conv_mult numeral_mult)
+
+lemma numeral_pow: "numeral (Num.pow k l) = numeral k ^ numeral l"
+ by (induct l, simp_all only: numeral_class.numeral.simps pow.simps
+ numeral_sqr numeral_mult power_add power_one_right)
+
+lemma power_numeral [simp]: "numeral k ^ numeral l = numeral (Num.pow k l)"
+ by (rule numeral_pow [symmetric])
+
+end
+
+context semiring_1
+begin
+
+lemma of_nat_power:
+ "of_nat (m ^ n) = of_nat m ^ n"
+ by (induct n) (simp_all add: of_nat_mult)
+
+lemma power_zero_numeral [simp]: "(0::'a) ^ numeral k = 0"
+ by (simp add: numeral_eq_Suc)
+
+lemma zero_power2: "0\<^sup>2 = 0" (* delete? *)
+ by (rule power_zero_numeral)
+
+lemma one_power2: "1\<^sup>2 = 1" (* delete? *)
+ by (rule power_one)
+
+end
+
+context comm_semiring_1
+begin
+
+text {* The divides relation *}
+
+lemma le_imp_power_dvd:
+ assumes "m \<le> n" shows "a ^ m dvd a ^ n"
+proof
+ have "a ^ n = a ^ (m + (n - m))"
+ using `m \<le> n` by simp
+ also have "\<dots> = a ^ m * a ^ (n - m)"
+ by (rule power_add)
+ finally show "a ^ n = a ^ m * a ^ (n - m)" .
+qed
+
+lemma power_le_dvd:
+ "a ^ n dvd b \<Longrightarrow> m \<le> n \<Longrightarrow> a ^ m dvd b"
+ by (rule dvd_trans [OF le_imp_power_dvd])
+
+lemma dvd_power_same:
+ "x dvd y \<Longrightarrow> x ^ n dvd y ^ n"
+ by (induct n) (auto simp add: mult_dvd_mono)
+
+lemma dvd_power_le:
+ "x dvd y \<Longrightarrow> m \<ge> n \<Longrightarrow> x ^ n dvd y ^ m"
+ by (rule power_le_dvd [OF dvd_power_same])
+
+lemma dvd_power [simp]:
+ assumes "n > (0::nat) \<or> x = 1"
+ shows "x dvd (x ^ n)"
+using assms proof
+ assume "0 < n"
+ then have "x ^ n = x ^ Suc (n - 1)" by simp
+ then show "x dvd (x ^ n)" by simp
+next
+ assume "x = 1"
+ then show "x dvd (x ^ n)" by simp
+qed
+
+end
+
+context ring_1
+begin
+
+lemma power_minus:
+ "(- a) ^ n = (- 1) ^ n * a ^ n"
+proof (induct n)
+ case 0 show ?case by simp
+next
+ case (Suc n) then show ?case
+ by (simp del: power_Suc add: power_Suc2 mult_assoc)
+qed
+
+lemma power_minus_Bit0:
+ "(- x) ^ numeral (Num.Bit0 k) = x ^ numeral (Num.Bit0 k)"
+ by (induct k, simp_all only: numeral_class.numeral.simps power_add
+ power_one_right mult_minus_left mult_minus_right minus_minus)
+
+lemma power_minus_Bit1:
+ "(- x) ^ numeral (Num.Bit1 k) = - (x ^ numeral (Num.Bit1 k))"
+ by (simp only: eval_nat_numeral(3) power_Suc power_minus_Bit0 mult_minus_left)
+
+lemma power_neg_numeral_Bit0 [simp]:
+ "neg_numeral k ^ numeral (Num.Bit0 l) = numeral (Num.pow k (Num.Bit0 l))"
+ by (simp only: neg_numeral_def power_minus_Bit0 power_numeral)
+
+lemma power_neg_numeral_Bit1 [simp]:
+ "neg_numeral k ^ numeral (Num.Bit1 l) = neg_numeral (Num.pow k (Num.Bit1 l))"
+ by (simp only: neg_numeral_def power_minus_Bit1 power_numeral pow.simps)
+
+lemma power2_minus [simp]:
+ "(- a)\<^sup>2 = a\<^sup>2"
+ by (rule power_minus_Bit0)
+
+lemma power_minus1_even [simp]:
+ "-1 ^ (2*n) = 1"
+proof (induct n)
+ case 0 show ?case by simp
+next
+ case (Suc n) then show ?case by (simp add: power_add power2_eq_square)
+qed
+
+lemma power_minus1_odd:
+ "-1 ^ Suc (2*n) = -1"
+ by simp
+
+lemma power_minus_even [simp]:
+ "(-a) ^ (2*n) = a ^ (2*n)"
+ by (simp add: power_minus [of a])
+
+end
+
+context ring_1_no_zero_divisors
+begin
+
+lemma field_power_not_zero:
+ "a \<noteq> 0 \<Longrightarrow> a ^ n \<noteq> 0"
+ by (induct n) auto
+
+lemma zero_eq_power2 [simp]:
+ "a\<^sup>2 = 0 \<longleftrightarrow> a = 0"
+ unfolding power2_eq_square by simp
+
+lemma power2_eq_1_iff:
+ "a\<^sup>2 = 1 \<longleftrightarrow> a = 1 \<or> a = - 1"
+ unfolding power2_eq_square by (rule square_eq_1_iff)
+
+end
+
+context idom
+begin
+
+lemma power2_eq_iff: "x\<^sup>2 = y\<^sup>2 \<longleftrightarrow> x = y \<or> x = - y"
+ unfolding power2_eq_square by (rule square_eq_iff)
+
+end
+
+context division_ring
+begin
+
+text {* FIXME reorient or rename to @{text nonzero_inverse_power} *}
+lemma nonzero_power_inverse:
+ "a \<noteq> 0 \<Longrightarrow> inverse (a ^ n) = (inverse a) ^ n"
+ by (induct n)
+ (simp_all add: nonzero_inverse_mult_distrib power_commutes field_power_not_zero)
+
+end
+
+context field
+begin
+
+lemma nonzero_power_divide:
+ "b \<noteq> 0 \<Longrightarrow> (a / b) ^ n = a ^ n / b ^ n"
+ by (simp add: divide_inverse power_mult_distrib nonzero_power_inverse)
+
+end
+
+
+subsection {* Exponentiation on ordered types *}
+
+context linordered_ring (* TODO: move *)
+begin
+
+lemma sum_squares_ge_zero:
+ "0 \<le> x * x + y * y"
+ by (intro add_nonneg_nonneg zero_le_square)
+
+lemma not_sum_squares_lt_zero:
+ "\<not> x * x + y * y < 0"
+ by (simp add: not_less sum_squares_ge_zero)
+
+end
+
+context linordered_semidom
+begin
+
+lemma zero_less_power [simp]:
+ "0 < a \<Longrightarrow> 0 < a ^ n"
+ by (induct n) (simp_all add: mult_pos_pos)
+
+lemma zero_le_power [simp]:
+ "0 \<le> a \<Longrightarrow> 0 \<le> a ^ n"
+ by (induct n) (simp_all add: mult_nonneg_nonneg)
+
+lemma power_mono:
+ "a \<le> b \<Longrightarrow> 0 \<le> a \<Longrightarrow> a ^ n \<le> b ^ n"
+ by (induct n) (auto intro: mult_mono order_trans [of 0 a b])
+
+lemma one_le_power [simp]: "1 \<le> a \<Longrightarrow> 1 \<le> a ^ n"
+ using power_mono [of 1 a n] by simp
+
+lemma power_le_one: "\<lbrakk>0 \<le> a; a \<le> 1\<rbrakk> \<Longrightarrow> a ^ n \<le> 1"
+ using power_mono [of a 1 n] by simp
+
+lemma power_gt1_lemma:
+ assumes gt1: "1 < a"
+ shows "1 < a * a ^ n"
+proof -
+ from gt1 have "0 \<le> a"
+ by (fact order_trans [OF zero_le_one less_imp_le])
+ have "1 * 1 < a * 1" using gt1 by simp
+ also have "\<dots> \<le> a * a ^ n" using gt1
+ by (simp only: mult_mono `0 \<le> a` one_le_power order_less_imp_le
+ zero_le_one order_refl)
+ finally show ?thesis by simp
+qed
+
+lemma power_gt1:
+ "1 < a \<Longrightarrow> 1 < a ^ Suc n"
+ by (simp add: power_gt1_lemma)
+
+lemma one_less_power [simp]:
+ "1 < a \<Longrightarrow> 0 < n \<Longrightarrow> 1 < a ^ n"
+ by (cases n) (simp_all add: power_gt1_lemma)
+
+lemma power_le_imp_le_exp:
+ assumes gt1: "1 < a"
+ shows "a ^ m \<le> a ^ n \<Longrightarrow> m \<le> n"
+proof (induct m arbitrary: n)
+ case 0
+ show ?case by simp
+next
+ case (Suc m)
+ show ?case
+ proof (cases n)
+ case 0
+ with Suc.prems Suc.hyps have "a * a ^ m \<le> 1" by simp
+ with gt1 show ?thesis
+ by (force simp only: power_gt1_lemma
+ not_less [symmetric])
+ next
+ case (Suc n)
+ with Suc.prems Suc.hyps show ?thesis
+ by (force dest: mult_left_le_imp_le
+ simp add: less_trans [OF zero_less_one gt1])
+ qed
+qed
+
+text{*Surely we can strengthen this? It holds for @{text "0<a<1"} too.*}
+lemma power_inject_exp [simp]:
+ "1 < a \<Longrightarrow> a ^ m = a ^ n \<longleftrightarrow> m = n"
+ by (force simp add: order_antisym power_le_imp_le_exp)
+
+text{*Can relax the first premise to @{term "0<a"} in the case of the
+natural numbers.*}
+lemma power_less_imp_less_exp:
+ "1 < a \<Longrightarrow> a ^ m < a ^ n \<Longrightarrow> m < n"
+ by (simp add: order_less_le [of m n] less_le [of "a^m" "a^n"]
+ power_le_imp_le_exp)
+
+lemma power_strict_mono [rule_format]:
+ "a < b \<Longrightarrow> 0 \<le> a \<Longrightarrow> 0 < n \<longrightarrow> a ^ n < b ^ n"
+ by (induct n)
+ (auto simp add: mult_strict_mono le_less_trans [of 0 a b])
+
+text{*Lemma for @{text power_strict_decreasing}*}
+lemma power_Suc_less:
+ "0 < a \<Longrightarrow> a < 1 \<Longrightarrow> a * a ^ n < a ^ n"
+ by (induct n)
+ (auto simp add: mult_strict_left_mono)
+
+lemma power_strict_decreasing [rule_format]:
+ "n < N \<Longrightarrow> 0 < a \<Longrightarrow> a < 1 \<longrightarrow> a ^ N < a ^ n"
+proof (induct N)
+ case 0 then show ?case by simp
+next
+ case (Suc N) then show ?case
+ apply (auto simp add: power_Suc_less less_Suc_eq)
+ apply (subgoal_tac "a * a^N < 1 * a^n")
+ apply simp
+ apply (rule mult_strict_mono) apply auto
+ done
+qed
+
+text{*Proof resembles that of @{text power_strict_decreasing}*}
+lemma power_decreasing [rule_format]:
+ "n \<le> N \<Longrightarrow> 0 \<le> a \<Longrightarrow> a \<le> 1 \<longrightarrow> a ^ N \<le> a ^ n"
+proof (induct N)
+ case 0 then show ?case by simp
+next
+ case (Suc N) then show ?case
+ apply (auto simp add: le_Suc_eq)
+ apply (subgoal_tac "a * a^N \<le> 1 * a^n", simp)
+ apply (rule mult_mono) apply auto
+ done
+qed
+
+lemma power_Suc_less_one:
+ "0 < a \<Longrightarrow> a < 1 \<Longrightarrow> a ^ Suc n < 1"
+ using power_strict_decreasing [of 0 "Suc n" a] by simp
+
+text{*Proof again resembles that of @{text power_strict_decreasing}*}
+lemma power_increasing [rule_format]:
+ "n \<le> N \<Longrightarrow> 1 \<le> a \<Longrightarrow> a ^ n \<le> a ^ N"
+proof (induct N)
+ case 0 then show ?case by simp
+next
+ case (Suc N) then show ?case
+ apply (auto simp add: le_Suc_eq)
+ apply (subgoal_tac "1 * a^n \<le> a * a^N", simp)
+ apply (rule mult_mono) apply (auto simp add: order_trans [OF zero_le_one])
+ done
+qed
+
+text{*Lemma for @{text power_strict_increasing}*}
+lemma power_less_power_Suc:
+ "1 < a \<Longrightarrow> a ^ n < a * a ^ n"
+ by (induct n) (auto simp add: mult_strict_left_mono less_trans [OF zero_less_one])
+
+lemma power_strict_increasing [rule_format]:
+ "n < N \<Longrightarrow> 1 < a \<longrightarrow> a ^ n < a ^ N"
+proof (induct N)
+ case 0 then show ?case by simp
+next
+ case (Suc N) then show ?case
+ apply (auto simp add: power_less_power_Suc less_Suc_eq)
+ apply (subgoal_tac "1 * a^n < a * a^N", simp)
+ apply (rule mult_strict_mono) apply (auto simp add: less_trans [OF zero_less_one] less_imp_le)
+ done
+qed
+
+lemma power_increasing_iff [simp]:
+ "1 < b \<Longrightarrow> b ^ x \<le> b ^ y \<longleftrightarrow> x \<le> y"
+ by (blast intro: power_le_imp_le_exp power_increasing less_imp_le)
+
+lemma power_strict_increasing_iff [simp]:
+ "1 < b \<Longrightarrow> b ^ x < b ^ y \<longleftrightarrow> x < y"
+by (blast intro: power_less_imp_less_exp power_strict_increasing)
+
+lemma power_le_imp_le_base:
+ assumes le: "a ^ Suc n \<le> b ^ Suc n"
+ and ynonneg: "0 \<le> b"
+ shows "a \<le> b"
+proof (rule ccontr)
+ assume "~ a \<le> b"
+ then have "b < a" by (simp only: linorder_not_le)
+ then have "b ^ Suc n < a ^ Suc n"
+ by (simp only: assms power_strict_mono)
+ from le and this show False
+ by (simp add: linorder_not_less [symmetric])
+qed
+
+lemma power_less_imp_less_base:
+ assumes less: "a ^ n < b ^ n"
+ assumes nonneg: "0 \<le> b"
+ shows "a < b"
+proof (rule contrapos_pp [OF less])
+ assume "~ a < b"
+ hence "b \<le> a" by (simp only: linorder_not_less)
+ hence "b ^ n \<le> a ^ n" using nonneg by (rule power_mono)
+ thus "\<not> a ^ n < b ^ n" by (simp only: linorder_not_less)
+qed
+
+lemma power_inject_base:
+ "a ^ Suc n = b ^ Suc n \<Longrightarrow> 0 \<le> a \<Longrightarrow> 0 \<le> b \<Longrightarrow> a = b"
+by (blast intro: power_le_imp_le_base antisym eq_refl sym)
+
+lemma power_eq_imp_eq_base:
+ "a ^ n = b ^ n \<Longrightarrow> 0 \<le> a \<Longrightarrow> 0 \<le> b \<Longrightarrow> 0 < n \<Longrightarrow> a = b"
+ by (cases n) (simp_all del: power_Suc, rule power_inject_base)
+
+lemma power2_le_imp_le:
+ "x\<^sup>2 \<le> y\<^sup>2 \<Longrightarrow> 0 \<le> y \<Longrightarrow> x \<le> y"
+ unfolding numeral_2_eq_2 by (rule power_le_imp_le_base)
+
+lemma power2_less_imp_less:
+ "x\<^sup>2 < y\<^sup>2 \<Longrightarrow> 0 \<le> y \<Longrightarrow> x < y"
+ by (rule power_less_imp_less_base)
+
+lemma power2_eq_imp_eq:
+ "x\<^sup>2 = y\<^sup>2 \<Longrightarrow> 0 \<le> x \<Longrightarrow> 0 \<le> y \<Longrightarrow> x = y"
+ unfolding numeral_2_eq_2 by (erule (2) power_eq_imp_eq_base) simp
+
+end
+
+context linordered_ring_strict
+begin
+
+lemma sum_squares_eq_zero_iff:
+ "x * x + y * y = 0 \<longleftrightarrow> x = 0 \<and> y = 0"
+ by (simp add: add_nonneg_eq_0_iff)
+
+lemma sum_squares_le_zero_iff:
+ "x * x + y * y \<le> 0 \<longleftrightarrow> x = 0 \<and> y = 0"
+ by (simp add: le_less not_sum_squares_lt_zero sum_squares_eq_zero_iff)
+
+lemma sum_squares_gt_zero_iff:
+ "0 < x * x + y * y \<longleftrightarrow> x \<noteq> 0 \<or> y \<noteq> 0"
+ by (simp add: not_le [symmetric] sum_squares_le_zero_iff)
+
+end
+
+context linordered_idom
+begin
+
+lemma power_abs:
+ "abs (a ^ n) = abs a ^ n"
+ by (induct n) (auto simp add: abs_mult)
+
+lemma abs_power_minus [simp]:
+ "abs ((-a) ^ n) = abs (a ^ n)"
+ by (simp add: power_abs)
+
+lemma zero_less_power_abs_iff [simp, no_atp]:
+ "0 < abs a ^ n \<longleftrightarrow> a \<noteq> 0 \<or> n = 0"
+proof (induct n)
+ case 0 show ?case by simp
+next
+ case (Suc n) show ?case by (auto simp add: Suc zero_less_mult_iff)
+qed
+
+lemma zero_le_power_abs [simp]:
+ "0 \<le> abs a ^ n"
+ by (rule zero_le_power [OF abs_ge_zero])
+
+lemma zero_le_power2 [simp]:
+ "0 \<le> a\<^sup>2"
+ by (simp add: power2_eq_square)
+
+lemma zero_less_power2 [simp]:
+ "0 < a\<^sup>2 \<longleftrightarrow> a \<noteq> 0"
+ by (force simp add: power2_eq_square zero_less_mult_iff linorder_neq_iff)
+
+lemma power2_less_0 [simp]:
+ "\<not> a\<^sup>2 < 0"
+ by (force simp add: power2_eq_square mult_less_0_iff)
+
+lemma abs_power2 [simp]:
+ "abs (a\<^sup>2) = a\<^sup>2"
+ by (simp add: power2_eq_square abs_mult abs_mult_self)
+
+lemma power2_abs [simp]:
+ "(abs a)\<^sup>2 = a\<^sup>2"
+ by (simp add: power2_eq_square abs_mult_self)
+
+lemma odd_power_less_zero:
+ "a < 0 \<Longrightarrow> a ^ Suc (2*n) < 0"
+proof (induct n)
+ case 0
+ then show ?case by simp
+next
+ case (Suc n)
+ have "a ^ Suc (2 * Suc n) = (a*a) * a ^ Suc(2*n)"
+ by (simp add: mult_ac power_add power2_eq_square)
+ thus ?case
+ by (simp del: power_Suc add: Suc mult_less_0_iff mult_neg_neg)
+qed
+
+lemma odd_0_le_power_imp_0_le:
+ "0 \<le> a ^ Suc (2*n) \<Longrightarrow> 0 \<le> a"
+ using odd_power_less_zero [of a n]
+ by (force simp add: linorder_not_less [symmetric])
+
+lemma zero_le_even_power'[simp]:
+ "0 \<le> a ^ (2*n)"
+proof (induct n)
+ case 0
+ show ?case by simp
+next
+ case (Suc n)
+ have "a ^ (2 * Suc n) = (a*a) * a ^ (2*n)"
+ by (simp add: mult_ac power_add power2_eq_square)
+ thus ?case
+ by (simp add: Suc zero_le_mult_iff)
+qed
+
+lemma sum_power2_ge_zero:
+ "0 \<le> x\<^sup>2 + y\<^sup>2"
+ by (intro add_nonneg_nonneg zero_le_power2)
+
+lemma not_sum_power2_lt_zero:
+ "\<not> x\<^sup>2 + y\<^sup>2 < 0"
+ unfolding not_less by (rule sum_power2_ge_zero)
+
+lemma sum_power2_eq_zero_iff:
+ "x\<^sup>2 + y\<^sup>2 = 0 \<longleftrightarrow> x = 0 \<and> y = 0"
+ unfolding power2_eq_square by (simp add: add_nonneg_eq_0_iff)
+
+lemma sum_power2_le_zero_iff:
+ "x\<^sup>2 + y\<^sup>2 \<le> 0 \<longleftrightarrow> x = 0 \<and> y = 0"
+ by (simp add: le_less sum_power2_eq_zero_iff not_sum_power2_lt_zero)
+
+lemma sum_power2_gt_zero_iff:
+ "0 < x\<^sup>2 + y\<^sup>2 \<longleftrightarrow> x \<noteq> 0 \<or> y \<noteq> 0"
+ unfolding not_le [symmetric] by (simp add: sum_power2_le_zero_iff)
+
+end
+
+
+subsection {* Miscellaneous rules *}
+
+lemma power_eq_if: "p ^ m = (if m=0 then 1 else p * (p ^ (m - 1)))"
+ unfolding One_nat_def by (cases m) simp_all
+
+lemma power2_sum:
+ fixes x y :: "'a::comm_semiring_1"
+ shows "(x + y)\<^sup>2 = x\<^sup>2 + y\<^sup>2 + 2 * x * y"
+ by (simp add: algebra_simps power2_eq_square mult_2_right)
+
+lemma power2_diff:
+ fixes x y :: "'a::comm_ring_1"
+ shows "(x - y)\<^sup>2 = x\<^sup>2 + y\<^sup>2 - 2 * x * y"
+ by (simp add: ring_distribs power2_eq_square mult_2) (rule mult_commute)
+
+lemma power_0_Suc [simp]:
+ "(0::'a::{power, semiring_0}) ^ Suc n = 0"
+ by simp
+
+text{*It looks plausible as a simprule, but its effect can be strange.*}
+lemma power_0_left:
+ "0 ^ n = (if n = 0 then 1 else (0::'a::{power, semiring_0}))"
+ by (induct n) simp_all
+
+lemma power_eq_0_iff [simp]:
+ "a ^ n = 0 \<longleftrightarrow>
+ a = (0::'a::{mult_zero,zero_neq_one,no_zero_divisors,power}) \<and> n \<noteq> 0"
+ by (induct n)
+ (auto simp add: no_zero_divisors elim: contrapos_pp)
+
+lemma (in field) power_diff:
+ assumes nz: "a \<noteq> 0"
+ shows "n \<le> m \<Longrightarrow> a ^ (m - n) = a ^ m / a ^ n"
+ by (induct m n rule: diff_induct) (simp_all add: nz field_power_not_zero)
+
+text{*Perhaps these should be simprules.*}
+lemma power_inverse:
+ fixes a :: "'a::division_ring_inverse_zero"
+ shows "inverse (a ^ n) = inverse a ^ n"
+apply (cases "a = 0")
+apply (simp add: power_0_left)
+apply (simp add: nonzero_power_inverse)
+done (* TODO: reorient or rename to inverse_power *)
+
+lemma power_one_over:
+ "1 / (a::'a::{field_inverse_zero, power}) ^ n = (1 / a) ^ n"
+ by (simp add: divide_inverse) (rule power_inverse)
+
+lemma power_divide:
+ "(a / b) ^ n = (a::'a::field_inverse_zero) ^ n / b ^ n"
+apply (cases "b = 0")
+apply (simp add: power_0_left)
+apply (rule nonzero_power_divide)
+apply assumption
+done
+
+text {* Simprules for comparisons where common factors can be cancelled. *}
+
+lemmas zero_compare_simps =
+ add_strict_increasing add_strict_increasing2 add_increasing
+ zero_le_mult_iff zero_le_divide_iff
+ zero_less_mult_iff zero_less_divide_iff
+ mult_le_0_iff divide_le_0_iff
+ mult_less_0_iff divide_less_0_iff
+ zero_le_power2 power2_less_0
+
+
+subsection {* Exponentiation for the Natural Numbers *}
+
+lemma nat_one_le_power [simp]:
+ "Suc 0 \<le> i \<Longrightarrow> Suc 0 \<le> i ^ n"
+ by (rule one_le_power [of i n, unfolded One_nat_def])
+
+lemma nat_zero_less_power_iff [simp]:
+ "x ^ n > 0 \<longleftrightarrow> x > (0::nat) \<or> n = 0"
+ by (induct n) auto
+
+lemma nat_power_eq_Suc_0_iff [simp]:
+ "x ^ m = Suc 0 \<longleftrightarrow> m = 0 \<or> x = Suc 0"
+ by (induct m) auto
+
+lemma power_Suc_0 [simp]:
+ "Suc 0 ^ n = Suc 0"
+ by simp
+
+text{*Valid for the naturals, but what if @{text"0<i<1"}?
+Premises cannot be weakened: consider the case where @{term "i=0"},
+@{term "m=1"} and @{term "n=0"}.*}
+lemma nat_power_less_imp_less:
+ assumes nonneg: "0 < (i\<Colon>nat)"
+ assumes less: "i ^ m < i ^ n"
+ shows "m < n"
+proof (cases "i = 1")
+ case True with less power_one [where 'a = nat] show ?thesis by simp
+next
+ case False with nonneg have "1 < i" by auto
+ from power_strict_increasing_iff [OF this] less show ?thesis ..
+qed
+
+lemma power_dvd_imp_le:
+ "i ^ m dvd i ^ n \<Longrightarrow> (1::nat) < i \<Longrightarrow> m \<le> n"
+ apply (rule power_le_imp_le_exp, assumption)
+ apply (erule dvd_imp_le, simp)
+ done
+
+lemma power2_nat_le_eq_le:
+ fixes m n :: nat
+ shows "m\<^sup>2 \<le> n\<^sup>2 \<longleftrightarrow> m \<le> n"
+ by (auto intro: power2_le_imp_le power_mono)
+
+lemma power2_nat_le_imp_le:
+ fixes m n :: nat
+ assumes "m\<^sup>2 \<le> n"
+ shows "m \<le> n"
+ using assms by (cases m) (simp_all add: power2_eq_square)
+
+
+
+subsection {* Code generator tweak *}
+
+lemma power_power_power [code]:
+ "power = power.power (1::'a::{power}) (op *)"
+ unfolding power_def power.power_def ..
+
+declare power.power.simps [code]
+
+code_identifier
+ code_module Power \<rightharpoonup> (SML) Arith and (OCaml) Arith and (Haskell) Arith
+
+end
+
diff --git a/tests/examplefiles/example.weechatlog b/tests/examplefiles/example.weechatlog
index 9f036166..15e8130f 100644
--- a/tests/examplefiles/example.weechatlog
+++ b/tests/examplefiles/example.weechatlog
@@ -6,4 +6,6 @@
2007 Sep 01 00:23:55 -=- Das Topic von &bitlbee lautet: "Welcome to the control channel. Type help for help information."
2007 Sep 01 00:23:55 <root> Welcome to the BitlBee gateway!
2007 Sep 01 00:23:55 <root>
-2007 Sep 01 00:23:55 <root> If you've never used BitlBee before, please do read the help information using the help command. Lots of FAQ's are answered there. \ No newline at end of file
+2007 Sep 01 00:23:55 <root> If you've never used BitlBee before, please do read the help information using the help command. Lots of FAQ's are answered there.
+# check for fixed pathological matching behavior
+1111111111111111111111111111111
diff --git a/tests/examplefiles/example_coq.v b/tests/examplefiles/example_coq.v
new file mode 100644
index 00000000..fd1a7bc8
--- /dev/null
+++ b/tests/examplefiles/example_coq.v
@@ -0,0 +1,4 @@
+Lemma FalseLemma : False <-> False.
+tauto.
+Qed.
+Check FalseLemma.
diff --git a/tests/examplefiles/example_elixir.ex b/tests/examplefiles/example_elixir.ex
index 0912d099..ddca7f60 100644
--- a/tests/examplefiles/example_elixir.ex
+++ b/tests/examplefiles/example_elixir.ex
@@ -1,13 +1,19 @@
# Numbers
0b0101011
-1234 ; 0x1A ; 0xbeef ; 0763
+1234 ; 0x1A ; 0xbeef ; 0763 ; 0o123
3.14 ; 5.0e21 ; 0.5e-12
100_000_000
+# these are not valid numbers
+0b012 ; 0xboar ; 0o888
+0B01 ; 0XAF ; 0O123
+
# Characters
?a ; ?1 ; ?\n ; ?\s ; ?\c ; ? ; ?,
?\x{12} ; ?\x{abcd}
-?\x34 ; ?\xf
+?\x34 ; ?\xF
+
+# these show that only the first digit is part of the character
?\123 ; ?\12 ; ?\7
# Atoms
@@ -18,12 +24,13 @@
line ' \s \123 \xff
atom"
:... ; :<<>> ; :%{} ; :% ; :{}
-:++; :--; :*; :~~~
+:++; :--; :*; :~~~; :::
+:% ; :. ; :<-
# Strings
"Hello world"
-"Interspersed \x{ff} codes \7 \8 \65 \016 and \t\s\z\+ \\ escapes"
-"Quotes ' inside \" \123 the \"\" \xF string \\\" end"
+"Interspersed \x{ff} codes \7 \8 \65 \016 and \t\s\\s\z\+ \\ escapes"
+"Quotes ' inside \" \123 the \"\" \xF \\xF string \\\" end"
"Multiline
string"
@@ -45,18 +52,28 @@ atom"
~w(hello #{ ["has" <> "123", '\c\d', "\123 interpol" | []] } world)s
~W(hello #{no "123" \c\d \123 interpol} world)s
+~s{Escapes terminators \{ and \}, but no {balancing} # outside of sigil here }
+
~S"No escapes \s\t\n and no #{interpolation}"
:"atoms work #{"to" <> "o"}"
# Operators
x = 1 + 2.0 * 3
-y = true and false; z = false xor true
+y = true and false; z = false or true
... = 144
... == !x && y || z
"hello" |> String.upcase |> String.downcase()
{^z, a} = {true, x}
+# Free operators (added in 1.0.0)
+p ~>> f = bind(p, f)
+p1 ~> p2 = pair_right(p1, p2)
+p1 <~ p2 = pair_left(p1, p2)
+p1 <~> p2 = pair_both(p1, p2)
+p |~> f = map(p, f)
+p1 <|> p2 = either(p1, p2)
+
# Lists, tuples, maps, keywords
[1, :a, 'hello'] ++ [2, 3]
[:head | [?t, ?a, ?i, ?l]]
@@ -75,13 +92,18 @@ map = %{shortcut: "syntax"}
# Comprehensions
for x <- 1..10, x < 5, do: {x, x}
pixels = "12345678"
-for << <<r::4, g::4, b::4, a::4>> <- pixels >> do
+for << <<r::4, g::4, b::4, a::size(4)>> <- pixels >> do
[r, {g, %{"b" => a}}]
end
# String interpolation
"String #{inspect "interpolation"} is quite #{1+4+7} difficult"
+# Identifiers
+abc_123 = 1
+_018OP = 2
+A__0 == 3
+
# Modules
defmodule Long.Module.Name do
@moduledoc "Simple module docstring"
@@ -89,7 +111,12 @@ defmodule Long.Module.Name do
@doc """
Multiline docstring
"with quotes"
- and #{ %{"interpolation" => "in" <> "action"} }
+ and #{ inspect %{"interpolation" => "in" <> "action"} }
+ now with #{ {:a, 'tuple'} }
+ and #{ inspect {
+ :tuple,
+ %{ with: "nested #{ inspect %{ :interpolation => %{} } }" }
+ } }
"""
defstruct [:a, :name, :height]
@@ -110,7 +137,8 @@ end
# Structs
defmodule Second.Module do
s = %Long.Module.Name{name: "Silly"}
- %{s | height: {192, :cm}}
+ %Long.Module.Name{s | height: {192, :cm}}
+ ".. #{%Long.Module.Name{s | height: {192, :cm}}} .."
end
# Types, pseudo-vars, attributes
@@ -182,7 +210,7 @@ end
# Lexical scope modifiers
import Kernel, except: [spawn: 1, +: 2, /: 2, Unless: 2]
-alias Long.Module.Name, as: Namen
+alias Long.Module.Name, as: N0men123_and4
use Bitwise
4 &&& 5
diff --git a/tests/examplefiles/interp.scala b/tests/examplefiles/interp.scala
new file mode 100644
index 00000000..4131b75e
--- /dev/null
+++ b/tests/examplefiles/interp.scala
@@ -0,0 +1,10 @@
+val n = 123;
+val a = s"n=$n";
+val a2 = s"n=$n''";
+val b = s"""n=$n""";
+val c = f"n=$n%f";
+val d = f"""n=$n%f""";
+val d2 = s"""a"""";
+val e = s"abc\u00e9";
+val f = s"a${n}b";
+val g = s"a${n + 1}b";
diff --git a/tests/examplefiles/matlab_sample b/tests/examplefiles/matlab_sample
index 4f61afe8..bb00b517 100644
--- a/tests/examplefiles/matlab_sample
+++ b/tests/examplefiles/matlab_sample
@@ -28,3 +28,7 @@ y = exp(x);
{%
a block comment
%}
+
+function no_arg_func
+fprintf('%s\n', 'function with no args')
+end
diff --git a/tests/examplefiles/pycon_test.pycon b/tests/examplefiles/pycon_test.pycon
index ff702864..9c4fc3d3 100644
--- a/tests/examplefiles/pycon_test.pycon
+++ b/tests/examplefiles/pycon_test.pycon
@@ -9,6 +9,9 @@ KeyboardInterrupt
>>> 1/0
Traceback (most recent call last):
-...
+ ...
ZeroDivisionError
+>>> 1/0 # this used to swallow the traceback
+Traceback (most recent call last):
+ ...
diff --git a/tests/examplefiles/rust_example.rs b/tests/examplefiles/rust_example.rs
index 1c0a70c3..8c44af1d 100644
--- a/tests/examplefiles/rust_example.rs
+++ b/tests/examplefiles/rust_example.rs
@@ -11,6 +11,8 @@
// based on:
// http://shootout.alioth.debian.org/u32/benchmark.php?test=nbody&lang=java
+/* nest some /* comments */ */
+
extern mod std;
use core::os;
@@ -22,7 +24,7 @@ use core::os;
// an llvm intrinsic.
#[nolink]
extern mod libc {
- #[legacy_exports];
+ #![legacy_exports];
fn sqrt(n: float) -> float;
}
diff --git a/tests/examplefiles/test.agda b/tests/examplefiles/test.agda
index d930a77b..f6cea91c 100644
--- a/tests/examplefiles/test.agda
+++ b/tests/examplefiles/test.agda
@@ -12,11 +12,18 @@ open import Data.List hiding ([_])
open import Data.Vec hiding ([_])
open import Relation.Nullary.Core
open import Relation.Binary.PropositionalEquality using (_≡_; refl; cong; trans; inspect; [_])
+ renaming (setoid to setiod)
open SemiringSolver
{- this is a {- nested -} comment -}
+postulate pierce : {A B : Set} → ((A → B) → A) → A
+
+instance
+ someBool : Bool
+ someBool = true
+
-- Factorial
_! : ℕ → ℕ
0 ! = 1
diff --git a/tests/examplefiles/test.idr b/tests/examplefiles/test.idr
index f0e96d88..fd008d31 100644
--- a/tests/examplefiles/test.idr
+++ b/tests/examplefiles/test.idr
@@ -60,6 +60,7 @@ using (G : Vect n Ty)
fromInteger = Val . fromInteger
+ ||| Evaluates an expression in the given context.
interp : Env G -> {static} Expr G t -> interpTy t
interp env (Var i) = lookup i env
interp env (Val x) = x
@@ -87,6 +88,13 @@ using (G : Vect n Ty)
testFac : Int
testFac = interp [] eFac 4
+--testFacTooBig : Int
+--testFacTooBig = interp [] eFac 100000
+
+ {-testFacTooBig2 : Int
+testFacTooBig2 = interp [] eFac 1000
+-}
+
main : IO ()
main = print testFac
diff --git a/tests/examplefiles/test.php b/tests/examplefiles/test.php
index 218892fe..2ce4023e 100644
--- a/tests/examplefiles/test.php
+++ b/tests/examplefiles/test.php
@@ -504,4 +504,12 @@ function &byref() {
$x = array();
return $x;
}
+
+ echo <<<EOF
+
+ Test the heredocs...
+
+ EOF;
+
?>
+
diff --git a/tests/examplefiles/test.pypylog b/tests/examplefiles/test.pypylog
index f85030cb..1a6aa5ed 100644
--- a/tests/examplefiles/test.pypylog
+++ b/tests/examplefiles/test.pypylog
@@ -998,842 +998,3 @@ setfield_gc(p73, i14, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntO
setarrayitem_gc(p60, 9, p73, descr=<GcPtrArrayDescr>)
p76 = new_with_vtable(19800744)
setfield_gc(p76, f15, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-setarrayitem_gc(p60, 10, p76, descr=<GcPtrArrayDescr>)
-setfield_gc(p1, 1, descr=<BoolFieldDescr pypy.interpreter.pyframe.PyFrame.inst_frame_finished_execution 148>)
-setfield_gc(p1, ConstPtr(ptr79), descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_pycode 112>)
-setfield_gc(p1, ConstPtr(ptr55), descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_lastblock 104>)
-setfield_gc(p1, 0, descr=<SignedFieldDescr pypy.interpreter.pyframe.PyFrame.inst_valuestackdepth 128>)
-setfield_gc(p1, p4, descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_last_exception 88>)
-setfield_gc(p1, 0, descr=<BoolFieldDescr pypy.interpreter.pyframe.PyFrame.inst_is_being_profiled 149>)
-setfield_gc(p1, 307, descr=<SignedFieldDescr pypy.interpreter.pyframe.PyFrame.inst_last_instr 96>)
-p84 = new_with_vtable(19800744)
-setfield_gc(p84, f36, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-finish(p84, descr=<DoneWithThisFrameDescrRef object at 0x140bcc0>)
-[5ed6619e9448] jit-log-opt-bridge}
-[5ed74f2eef6e] {jit-log-opt-loop
-# Loop 2 : loop with 394 ops
-[p0, p1, p2, p3, p4, p5, p6, p7, i8, f9, i10, i11, p12, p13]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #21 LOAD_FAST', 0)
-guard_nonnull_class(p7, 19800744, descr=<Guard180>) [p1, p0, p7, p2, p3, p4, p5, p6, i8]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #24 LOAD_FAST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #27 COMPARE_OP', 0)
-f15 = getfield_gc_pure(p7, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-i16 = float_gt(f15, f9)
-guard_true(i16, descr=<Guard181>) [p1, p0, p6, p7, p2, p3, p4, p5, i8]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #30 POP_JUMP_IF_FALSE', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #33 LOAD_FAST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #36 POP_JUMP_IF_FALSE', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #39 LOAD_FAST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #42 LOAD_FAST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #45 COMPARE_OP', 0)
-i17 = int_ge(i8, i10)
-guard_false(i17, descr=<Guard182>) [p1, p0, p5, p2, p3, p4, p6, p7, i8]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #48 POP_JUMP_IF_FALSE', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #55 LOAD_GLOBAL', 0)
-p18 = getfield_gc(p0, descr=<GcPtrFieldDescr pypy.interpreter.eval.Frame.inst_w_globals 8>)
-guard_value(p18, ConstPtr(ptr19), descr=<Guard183>) [p1, p0, p18, p2, p3, p4, p5, p6, p7, i8]
-p20 = getfield_gc(p18, descr=<GcPtrFieldDescr pypy.objspace.std.dictmultiobject.W_DictMultiObject.inst_r_dict_content 8>)
-guard_isnull(p20, descr=<Guard184>) [p1, p0, p20, p18, p2, p3, p4, p5, p6, p7, i8]
-p22 = getfield_gc(ConstPtr(ptr21), descr=<GcPtrFieldDescr pypy.objspace.std.celldict.ModuleCell.inst_w_value 8>)
-guard_nonnull_class(p22, ConstClass(Function), descr=<Guard185>) [p1, p0, p22, p2, p3, p4, p5, p6, p7, i8]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #58 LOAD_FAST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #61 CALL_FUNCTION', 0)
-p24 = getfield_gc(p22, descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_code 24>)
-guard_value(p24, ConstPtr(ptr25), descr=<Guard186>) [p1, p0, p24, p22, p2, p3, p4, p5, p6, p7, i8]
-p26 = getfield_gc(p22, descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_w_func_globals 64>)
-p27 = getfield_gc(p22, descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_closure 16>)
-i28 = force_token()
-i29 = int_is_zero(i11)
-guard_true(i29, descr=<Guard187>) [p1, p0, p12, p2, p3, p22, p4, p5, p6, p7, p26, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #0 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #3 LOAD_ATTR', 1)
-p30 = getfield_gc(p4, descr=<GcPtrFieldDescr pypy.objspace.std.mapdict.W_ObjectObjectSize5.inst_map 48>)
-guard_value(p30, ConstPtr(ptr31), descr=<Guard188>) [p1, p0, p12, p4, p30, p2, p3, p22, p5, p6, p7, p26, p13, i28, i8]
-p33 = getfield_gc(ConstPtr(ptr32), descr=<GcPtrFieldDescr pypy.objspace.std.typeobject.W_TypeObject.inst__version_tag 16>)
-guard_value(p33, ConstPtr(ptr34), descr=<Guard189>) [p1, p0, p12, p4, p33, p2, p3, p22, p5, p6, p7, p26, p13, i28, i8]
-p35 = getfield_gc(p4, descr=<GcPtrFieldDescr pypy.objspace.std.mapdict.W_ObjectObjectSize5.inst__value2 24>)
-guard_nonnull_class(p35, 19800744, descr=<Guard190>) [p1, p0, p12, p35, p4, p2, p3, p22, p5, p6, p7, p26, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #6 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #9 LOAD_ATTR', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #12 BINARY_MULTIPLY', 1)
-f37 = getfield_gc_pure(p35, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-f38 = float_mul(f37, f37)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #13 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #16 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #19 LOAD_ATTR', 1)
-p39 = getfield_gc(p4, descr=<GcPtrFieldDescr pypy.objspace.std.mapdict.W_ObjectObjectSize5.inst__value3 32>)
-guard_nonnull_class(p39, 19800744, descr=<Guard191>) [p1, p0, p12, p39, p4, p2, p3, p22, p5, p6, p7, f38, p26, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #22 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #25 LOAD_ATTR', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #28 BINARY_MULTIPLY', 1)
-f41 = getfield_gc_pure(p39, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-f42 = float_mul(f41, f41)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #29 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #32 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #35 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #38 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #41 BINARY_ADD', 1)
-f43 = float_add(f38, f42)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #42 BINARY_DIVIDE', 1)
-i45 = float_eq(f43, 0.000000)
-guard_false(i45, descr=<Guard192>) [p1, p0, p12, f43, p2, p3, p22, p4, p5, p6, p7, f42, f38, p26, p13, i28, i8]
-f47 = float_truediv(0.500000, f43)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #43 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #46 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #49 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #52 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #55 LOAD_ATTR', 1)
-p48 = getfield_gc(p4, descr=<GcPtrFieldDescr pypy.objspace.std.mapdict.W_ObjectObjectSize5.inst__value0 8>)
-guard_nonnull_class(p48, ConstClass(W_IntObject), descr=<Guard193>) [p1, p0, p12, p48, p4, p2, p3, p22, p5, p6, p7, f47, f42, f38, p26, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #58 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #61 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #64 LOAD_ATTR', 1)
-p50 = getfield_gc(p4, descr=<GcPtrFieldDescr pypy.objspace.std.mapdict.W_ObjectObjectSize5.inst__value1 16>)
-guard_nonnull_class(p50, ConstClass(W_IntObject), descr=<Guard194>) [p1, p0, p12, p50, p4, p2, p3, p22, p5, p6, p7, p48, f47, f42, f38, p26, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #67 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #70 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #73 LOAD_ATTR', 1)
-p52 = getfield_gc(p4, descr=<GcPtrFieldDescr pypy.objspace.std.mapdict.W_ObjectObjectSize5.inst__value4 40>)
-guard_nonnull_class(p52, 19886912, descr=<Guard195>) [p1, p0, p12, p52, p4, p2, p3, p22, p5, p6, p7, p50, p48, f47, f42, f38, p26, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #76 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #79 SETUP_LOOP', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #82 LOAD_GLOBAL', 1)
-guard_value(p26, ConstPtr(ptr54), descr=<Guard196>) [p1, p0, p12, p26, p2, p3, p22, p4, p5, p6, p7, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-p56 = getfield_gc(p26, descr=<GcPtrFieldDescr pypy.objspace.std.dictmultiobject.W_DictMultiObject.inst_r_dict_content 8>)
-guard_isnull(p56, descr=<Guard197>) [p1, p0, p12, p56, p26, p2, p3, p22, p4, p5, p6, p7, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-p58 = getfield_gc(ConstPtr(ptr57), descr=<GcPtrFieldDescr pypy.objspace.std.celldict.ModuleCell.inst_w_value 8>)
-guard_isnull(p58, descr=<Guard198>) [p1, p0, p12, p58, p2, p3, p22, p4, p5, p6, p7, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-p60 = getfield_gc(ConstPtr(ptr59), descr=<GcPtrFieldDescr pypy.interpreter.module.Module.inst_w_dict 8>)
-guard_value(p60, ConstPtr(ptr61), descr=<Guard199>) [p1, p0, p12, p60, p2, p3, p22, p4, p5, p6, p7, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-p62 = getfield_gc(p60, descr=<GcPtrFieldDescr pypy.objspace.std.dictmultiobject.W_DictMultiObject.inst_r_dict_content 8>)
-guard_isnull(p62, descr=<Guard200>) [p1, p0, p12, p62, p60, p2, p3, p22, p4, p5, p6, p7, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-p64 = getfield_gc(ConstPtr(ptr63), descr=<GcPtrFieldDescr pypy.objspace.std.celldict.ModuleCell.inst_w_value 8>)
-guard_value(p64, ConstPtr(ptr65), descr=<Guard201>) [p1, p0, p12, p64, p2, p3, p22, p4, p5, p6, p7, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #85 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #88 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #91 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #94 BINARY_SUBTRACT', 1)
-i66 = getfield_gc_pure(p48, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-i68 = int_sub_ovf(i66, 1)
-guard_no_overflow(, descr=<Guard202>) [p1, p0, p12, p48, i68, p2, p3, p22, p4, p5, p6, p7, p64, p52, p50, None, f47, f42, f38, None, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #95 CALL_FUNCTION', 1)
-p70 = getfield_gc(ConstPtr(ptr69), descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_name 40>)
-p71 = getfield_gc(ConstPtr(ptr69), descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_defs 32>)
-i72 = getfield_gc_pure(p71, descr=<BoolFieldDescr pypy.interpreter.function.Defaults.inst_promote 16>)
-guard_false(i72, descr=<Guard203>) [p1, p0, p12, p70, p71, p2, p3, p22, p4, p5, p6, p7, i68, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-p73 = getfield_gc_pure(p71, descr=<GcPtrFieldDescr pypy.interpreter.function.Defaults.inst_items 8>)
-i74 = arraylen_gc(p73, descr=<GcPtrArrayDescr>)
-i76 = int_sub(4, i74)
-i78 = int_ge(3, i76)
-guard_true(i78, descr=<Guard204>) [p1, p0, p12, p70, i76, p71, p2, p3, p22, p4, p5, p6, p7, i68, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i79 = int_sub(3, i76)
-i80 = getfield_gc_pure(p71, descr=<BoolFieldDescr pypy.interpreter.function.Defaults.inst_promote 16>)
-guard_false(i80, descr=<Guard205>) [p1, p0, p12, p70, i79, i76, p71, p2, p3, p22, p4, p5, p6, p7, i68, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-p81 = getfield_gc_pure(p71, descr=<GcPtrFieldDescr pypy.interpreter.function.Defaults.inst_items 8>)
-p82 = getarrayitem_gc(p81, i79, descr=<GcPtrArrayDescr>)
-guard_class(p82, ConstClass(W_IntObject), descr=<Guard206>) [p1, p0, p12, p82, p2, p3, p22, p4, p5, p6, p7, i68, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i84 = getfield_gc_pure(p82, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-i85 = int_is_zero(i84)
-guard_false(i85, descr=<Guard207>) [p1, p0, p12, i84, i68, p2, p3, p22, p4, p5, p6, p7, p82, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i87 = int_lt(i84, 0)
-guard_false(i87, descr=<Guard208>) [p1, p0, p12, i84, i68, p2, p3, p22, p4, p5, p6, p7, p82, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i89 = int_lt(1, i68)
-guard_true(i89, descr=<Guard209>) [p1, p0, p12, i84, i68, p2, p3, p22, p4, p5, p6, p7, p82, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i90 = int_sub(i68, 1)
-i92 = int_sub(i90, 1)
-i93 = uint_floordiv(i92, i84)
-i95 = int_add(i93, 1)
-i97 = int_lt(i95, 0)
-guard_false(i97, descr=<Guard210>) [p1, p0, p12, i84, i95, p2, p3, p22, p4, p5, p6, p7, p82, i68, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #98 GET_ITER', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #99 FOR_ITER', 1)
-i99 = int_gt(i95, 0)
-guard_true(i99, descr=<Guard211>) [p1, p0, p12, p2, p3, p22, p4, p5, p6, p7, i84, i95, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i100 = int_add(1, i84)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #102 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #105 SETUP_LOOP', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #108 LOAD_GLOBAL', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #111 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #114 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #117 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #120 BINARY_SUBTRACT', 1)
-i101 = getfield_gc_pure(p50, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-i103 = int_sub_ovf(i101, 1)
-guard_no_overflow(, descr=<Guard212>) [p1, p0, p12, p50, i103, p2, p3, p22, p4, p5, p6, p7, i100, i93, i84, None, None, None, None, p52, None, p48, f47, f42, f38, None, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #121 CALL_FUNCTION', 1)
-i104 = getfield_gc_pure(p71, descr=<BoolFieldDescr pypy.interpreter.function.Defaults.inst_promote 16>)
-guard_false(i104, descr=<Guard213>) [p1, p0, p12, p70, p71, p2, p3, p22, p4, p5, p6, p7, i103, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-p105 = getfield_gc_pure(p71, descr=<GcPtrFieldDescr pypy.interpreter.function.Defaults.inst_items 8>)
-i106 = arraylen_gc(p105, descr=<GcPtrArrayDescr>)
-i108 = int_sub(4, i106)
-i110 = int_ge(3, i108)
-guard_true(i110, descr=<Guard214>) [p1, p0, p12, p70, i108, p71, p2, p3, p22, p4, p5, p6, p7, i103, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i111 = int_sub(3, i108)
-i112 = getfield_gc_pure(p71, descr=<BoolFieldDescr pypy.interpreter.function.Defaults.inst_promote 16>)
-guard_false(i112, descr=<Guard215>) [p1, p0, p12, p70, i111, i108, p71, p2, p3, p22, p4, p5, p6, p7, i103, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-p113 = getfield_gc_pure(p71, descr=<GcPtrFieldDescr pypy.interpreter.function.Defaults.inst_items 8>)
-p114 = getarrayitem_gc(p113, i111, descr=<GcPtrArrayDescr>)
-guard_class(p114, ConstClass(W_IntObject), descr=<Guard216>) [p1, p0, p12, p114, p2, p3, p22, p4, p5, p6, p7, i103, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i116 = getfield_gc_pure(p114, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-i117 = int_is_zero(i116)
-guard_false(i117, descr=<Guard217>) [p1, p0, p12, i116, i103, p2, p3, p22, p4, p5, p6, p7, p114, None, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i119 = int_lt(i116, 0)
-guard_false(i119, descr=<Guard218>) [p1, p0, p12, i116, i103, p2, p3, p22, p4, p5, p6, p7, p114, None, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i121 = int_lt(1, i103)
-guard_true(i121, descr=<Guard219>) [p1, p0, p12, i116, i103, p2, p3, p22, p4, p5, p6, p7, p114, None, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i122 = int_sub(i103, 1)
-i124 = int_sub(i122, 1)
-i125 = uint_floordiv(i124, i116)
-i127 = int_add(i125, 1)
-i129 = int_lt(i127, 0)
-guard_false(i129, descr=<Guard220>) [p1, p0, p12, i116, i127, p2, p3, p22, p4, p5, p6, p7, p114, i103, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #124 GET_ITER', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #125 FOR_ITER', 1)
-i131 = int_gt(i127, 0)
-guard_true(i131, descr=<Guard221>) [p1, p0, p12, p2, p3, p22, p4, p5, p6, p7, i116, i127, None, None, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i132 = int_add(1, i116)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #128 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #131 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #134 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #137 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #140 BINARY_MULTIPLY', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #141 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #144 BINARY_ADD', 1)
-i133 = int_add_ovf(i66, 1)
-guard_no_overflow(, descr=<Guard222>) [p1, p0, p12, i133, p2, p3, p22, p4, p5, p6, p7, i132, i125, i66, i116, None, None, None, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #145 BINARY_SUBSCR', 1)
-i134 = getfield_gc(p52, descr=<SignedFieldDescr pypy.module.array.interp_array.W_ArrayTyped.inst_len 32>)
-i135 = int_lt(i133, i134)
-guard_true(i135, descr=<Guard223>) [p1, p0, p12, p52, i133, p2, p3, p22, p4, p5, p6, p7, i132, i125, None, i116, None, None, None, i100, i93, i84, None, None, None, None, None, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i136 = getfield_gc(p52, descr=<NonGcPtrFieldDescr pypy.module.array.interp_array.W_ArrayTyped.inst_buffer 24>)
-f137 = getarrayitem_raw(i136, i133, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #146 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #149 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #152 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #155 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #158 BINARY_SUBTRACT', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #159 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #162 BINARY_MULTIPLY', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #163 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #166 BINARY_ADD', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #167 BINARY_SUBSCR', 1)
-f138 = getarrayitem_raw(i136, 1, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #168 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #171 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #174 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #177 BINARY_ADD', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #178 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #181 BINARY_MULTIPLY', 1)
-i140 = int_mul_ovf(2, i66)
-guard_no_overflow(, descr=<Guard224>) [p1, p0, p12, p48, i140, p2, p3, p22, p4, p5, p6, p7, f138, f137, i132, i125, None, i116, None, None, None, i100, i93, i84, None, None, None, None, p52, p50, None, f47, f42, f38, None, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #182 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #185 BINARY_ADD', 1)
-i141 = int_add_ovf(i140, 1)
-guard_no_overflow(, descr=<Guard225>) [p1, p0, p12, i141, p2, p3, p22, p4, p5, p6, p7, i140, f138, f137, i132, i125, None, i116, None, None, None, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #186 BINARY_SUBSCR', 1)
-i143 = int_lt(i141, 0)
-guard_false(i143, descr=<Guard226>) [p1, p0, p12, p52, i141, i134, p2, p3, p22, p4, p5, p6, p7, None, f138, f137, i132, i125, None, i116, None, None, None, i100, i93, i84, None, None, None, None, None, p50, p48, f47, f42, f38, None, p13, i28, i8]
-i144 = int_lt(i141, i134)
-guard_true(i144, descr=<Guard227>) [p1, p0, p12, p52, i141, p2, p3, p22, p4, p5, p6, p7, None, f138, f137, i132, i125, None, i116, None, None, None, i100, i93, i84, None, None, None, None, None, p50, p48, f47, f42, f38, None, p13, i28, i8]
-f145 = getarrayitem_raw(i136, i141, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #187 BINARY_ADD', 1)
-f146 = float_add(f138, f145)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #188 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #191 BINARY_MULTIPLY', 1)
-f147 = float_mul(f146, f42)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #192 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #195 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #198 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #201 BINARY_MULTIPLY', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #202 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #205 BINARY_ADD', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #206 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #209 BINARY_SUBTRACT', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #210 BINARY_SUBSCR', 1)
-i148 = int_lt(i66, i134)
-guard_true(i148, descr=<Guard228>) [p1, p0, p12, p52, i66, p2, p3, p22, p4, p5, p6, p7, f147, None, None, f137, i132, i125, None, i116, None, None, None, i100, i93, i84, None, None, None, None, None, p50, p48, f47, f42, f38, None, p13, i28, i8]
-f149 = getarrayitem_raw(i136, i66, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #211 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #214 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #217 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #220 BINARY_MULTIPLY', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #221 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #224 BINARY_ADD', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #225 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #228 BINARY_ADD', 1)
-i151 = int_add(i133, 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #229 BINARY_SUBSCR', 1)
-i152 = int_lt(i151, i134)
-guard_true(i152, descr=<Guard229>) [p1, p0, p12, p52, i151, p2, p3, p22, p4, p5, p6, p7, f149, f147, None, None, f137, i132, i125, None, i116, None, None, None, i100, i93, i84, None, None, None, None, None, p50, p48, f47, f42, f38, None, p13, i28, i8]
-f153 = getarrayitem_raw(i136, i151, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #230 BINARY_ADD', 1)
-f154 = float_add(f149, f153)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #231 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #234 BINARY_MULTIPLY', 1)
-f155 = float_mul(f154, f38)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #235 BINARY_ADD', 1)
-f156 = float_add(f147, f155)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #236 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #239 BINARY_MULTIPLY', 1)
-f157 = float_mul(f156, f47)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #240 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #243 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #246 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #249 BINARY_MULTIPLY', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #250 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #253 BINARY_ADD', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #254 STORE_SUBSCR', 1)
-setarrayitem_raw(i136, i133, f157, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #255 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #258 LOAD_GLOBAL', 1)
-p159 = getfield_gc(ConstPtr(ptr158), descr=<GcPtrFieldDescr pypy.objspace.std.celldict.ModuleCell.inst_w_value 8>)
-guard_nonnull_class(p159, ConstClass(Function), descr=<Guard230>) [p1, p0, p12, p159, p2, p3, p22, p4, p5, p6, p7, None, None, None, None, f137, i132, i125, None, i116, None, None, None, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #261 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #264 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #267 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #270 BINARY_MULTIPLY', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #271 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #274 BINARY_ADD', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #275 BINARY_SUBSCR', 1)
-f161 = getarrayitem_raw(i136, i133, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #276 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #279 BINARY_SUBTRACT', 1)
-f162 = float_sub(f161, f137)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #280 CALL_FUNCTION', 1)
-p163 = getfield_gc(p159, descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_code 24>)
-guard_value(p163, ConstPtr(ptr164), descr=<Guard231>) [p1, p0, p12, p163, p159, p2, p3, p22, p4, p5, p6, p7, f162, None, None, None, None, f137, i132, i125, None, i116, None, None, None, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-p165 = getfield_gc(p159, descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_w_func_globals 64>)
-p166 = getfield_gc(p159, descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_closure 16>)
-i167 = force_token()
-debug_merge_point('<code object sqr, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 7> #0 LOAD_FAST', 2)
-debug_merge_point('<code object sqr, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 7> #3 LOAD_FAST', 2)
-debug_merge_point('<code object sqr, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 7> #6 BINARY_MULTIPLY', 2)
-f168 = float_mul(f162, f162)
-debug_merge_point('<code object sqr, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 7> #7 RETURN_VALUE', 2)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #283 INPLACE_ADD', 1)
-f170 = float_add(0.000000, f168)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #284 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #287 JUMP_ABSOLUTE', 1)
-i172 = getfield_raw(38968960, descr=<SignedFieldDescr pypysig_long_struct.c_value 0>)
-i174 = int_sub(i172, 100)
-setfield_raw(38968960, i174, descr=<SignedFieldDescr pypysig_long_struct.c_value 0>)
-i176 = int_lt(i174, 0)
-guard_false(i176, descr=<Guard232>) [p1, p0, p12, p2, p3, p22, p4, p5, p6, p7, f170, None, None, None, None, None, f137, i132, i125, None, i116, None, None, None, i100, i93, i84, None, None, None, None, p52, p50, p48, f47, f42, f38, None, p13, i28, i8]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #125 FOR_ITER', 1)
-i177 = force_token()
-p179 = new_with_vtable(19809200)
-setfield_gc(p179, i28, descr=<SignedFieldDescr JitVirtualRef.virtual_token 8>)
-setfield_gc(p12, p179, descr=<GcPtrFieldDescr pypy.interpreter.executioncontext.ExecutionContext.inst_topframeref 56>)
-setfield_gc(p0, i177, descr=<SignedFieldDescr pypy.interpreter.pyframe.PyFrame.vable_token 24>)
-p181 = new_with_vtable(19863424)
-setfield_gc(p181, p13, descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_f_backref 48>)
-setfield_gc(p181, ConstPtr(ptr54), descr=<GcPtrFieldDescr pypy.interpreter.eval.Frame.inst_w_globals 8>)
-setfield_gc(p181, 34, descr=<INTFieldDescr pypy.interpreter.pyframe.PyFrame.inst_f_lineno 144>)
-setfield_gc(p181, ConstPtr(ptr25), descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_pycode 112>)
-p184 = new_array(8, descr=<GcPtrArrayDescr>)
-p186 = new_with_vtable(19861240)
-setfield_gc(p186, i100, descr=<SignedFieldDescr pypy.module.__builtin__.functional.W_XRangeIterator.inst_current 8>)
-setfield_gc(p186, i93, descr=<SignedFieldDescr pypy.module.__builtin__.functional.W_XRangeIterator.inst_remaining 16>)
-setfield_gc(p186, i84, descr=<SignedFieldDescr pypy.module.__builtin__.functional.W_XRangeIterator.inst_step 24>)
-setarrayitem_gc(p184, 0, p186, descr=<GcPtrArrayDescr>)
-p189 = new_with_vtable(19861240)
-setfield_gc(p189, i132, descr=<SignedFieldDescr pypy.module.__builtin__.functional.W_XRangeIterator.inst_current 8>)
-setfield_gc(p189, i125, descr=<SignedFieldDescr pypy.module.__builtin__.functional.W_XRangeIterator.inst_remaining 16>)
-setfield_gc(p189, i116, descr=<SignedFieldDescr pypy.module.__builtin__.functional.W_XRangeIterator.inst_step 24>)
-setarrayitem_gc(p184, 1, p189, descr=<GcPtrArrayDescr>)
-setfield_gc(p181, p184, descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_valuestack_w 120>)
-setfield_gc(p181, 125, descr=<SignedFieldDescr pypy.interpreter.pyframe.PyFrame.inst_last_instr 96>)
-p193 = new_with_vtable(19865144)
-setfield_gc(p193, 291, descr=<UnsignedFieldDescr pypy.interpreter.pyopcode.FrameBlock.inst_handlerposition 8>)
-setfield_gc(p193, 1, descr=<SignedFieldDescr pypy.interpreter.pyopcode.FrameBlock.inst_valuestackdepth 24>)
-p197 = new_with_vtable(19865144)
-setfield_gc(p197, 295, descr=<UnsignedFieldDescr pypy.interpreter.pyopcode.FrameBlock.inst_handlerposition 8>)
-setfield_gc(p193, p197, descr=<GcPtrFieldDescr pypy.interpreter.pyopcode.FrameBlock.inst_previous 16>)
-setfield_gc(p181, p193, descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_lastblock 104>)
-p200 = new_array(11, descr=<GcPtrArrayDescr>)
-setarrayitem_gc(p200, 0, p4, descr=<GcPtrArrayDescr>)
-p203 = new_with_vtable(19800744)
-setfield_gc(p203, f38, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-setarrayitem_gc(p200, 1, p203, descr=<GcPtrArrayDescr>)
-p206 = new_with_vtable(19800744)
-setfield_gc(p206, f42, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-setarrayitem_gc(p200, 2, p206, descr=<GcPtrArrayDescr>)
-p209 = new_with_vtable(19800744)
-setfield_gc(p209, f47, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-setarrayitem_gc(p200, 3, p209, descr=<GcPtrArrayDescr>)
-p212 = new_with_vtable(19800744)
-setfield_gc(p212, f170, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-setarrayitem_gc(p200, 4, p212, descr=<GcPtrArrayDescr>)
-setarrayitem_gc(p200, 5, p48, descr=<GcPtrArrayDescr>)
-setarrayitem_gc(p200, 6, p50, descr=<GcPtrArrayDescr>)
-setarrayitem_gc(p200, 7, p52, descr=<GcPtrArrayDescr>)
-p218 = new_with_vtable(ConstClass(W_IntObject))
-setfield_gc(p218, 1, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-setarrayitem_gc(p200, 8, p218, descr=<GcPtrArrayDescr>)
-p221 = new_with_vtable(ConstClass(W_IntObject))
-setfield_gc(p221, 1, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-setarrayitem_gc(p200, 9, p221, descr=<GcPtrArrayDescr>)
-p224 = new_with_vtable(19800744)
-setfield_gc(p224, f137, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-setarrayitem_gc(p200, 10, p224, descr=<GcPtrArrayDescr>)
-setfield_gc(p181, p200, descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_fastlocals_w 56>)
-setfield_gc(p181, 2, descr=<SignedFieldDescr pypy.interpreter.pyframe.PyFrame.inst_valuestackdepth 128>)
-p235 = call_assembler(p181, p12, ConstPtr(ptr25), p193, 2, ConstPtr(ptr227), 0, 125, p186, p189, ConstPtr(ptr229), ConstPtr(ptr230), ConstPtr(ptr231), ConstPtr(ptr232), ConstPtr(ptr233), ConstPtr(ptr234), p4, p203, p206, p209, p212, p48, p50, p52, p218, p221, p224, descr=<Loop1>)
-guard_not_forced(, descr=<Guard233>) [p1, p0, p12, p181, p235, p179, p2, p3, p22, p4, p5, p6, p7, i8]
-guard_no_exception(, descr=<Guard234>) [p1, p0, p12, p181, p235, p179, p2, p3, p22, p4, p5, p6, p7, i8]
-p236 = getfield_gc(p12, descr=<GcPtrFieldDescr pypy.interpreter.executioncontext.ExecutionContext.inst_w_tracefunc 72>)
-guard_isnull(p236, descr=<Guard235>) [p1, p0, p12, p235, p181, p236, p179, p2, p3, p22, p4, p5, p6, p7, i8]
-i237 = ptr_eq(p181, p0)
-guard_false(i237, descr=<Guard236>) [p1, p0, p12, p235, p181, p179, p2, p3, p22, p4, p5, p6, p7, i8]
-i238 = getfield_gc(p12, descr=<NonGcPtrFieldDescr pypy.interpreter.executioncontext.ExecutionContext.inst_profilefunc 40>)
-setfield_gc(p181, ConstPtr(ptr239), descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_last_exception 88>)
-i240 = int_is_true(i238)
-guard_false(i240, descr=<Guard237>) [p1, p0, p235, p181, p12, p179, p2, p3, p22, p4, p5, p6, p7, i8]
-p241 = getfield_gc(p181, descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_f_backref 48>)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #64 STORE_FAST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #67 LOAD_FAST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #70 LOAD_CONST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #73 INPLACE_ADD', 0)
-i243 = int_add(i8, 1)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #74 STORE_FAST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #77 JUMP_ABSOLUTE', 0)
-i245 = getfield_raw(38968960, descr=<SignedFieldDescr pypysig_long_struct.c_value 0>)
-i247 = int_sub(i245, 100)
-setfield_raw(38968960, i247, descr=<SignedFieldDescr pypysig_long_struct.c_value 0>)
-setfield_gc(p12, p241, descr=<GcPtrFieldDescr pypy.interpreter.executioncontext.ExecutionContext.inst_topframeref 56>)
-setfield_gc(p179, p181, descr=<GcPtrFieldDescr JitVirtualRef.forced 16>)
-setfield_gc(p179, -3, descr=<SignedFieldDescr JitVirtualRef.virtual_token 8>)
-i250 = int_lt(i247, 0)
-guard_false(i250, descr=<Guard238>) [p1, p0, p2, p3, p4, p5, p6, p235, i243, None]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #21 LOAD_FAST', 0)
-jump(p0, p1, p2, p3, p4, p5, p6, p235, i243, f9, i10, i238, p12, p241, descr=<Loop2>)
-[5ed74fc965fa] jit-log-opt-loop}
-[5ed74fe43ee0] {jit-log-opt-loop
-# Loop 3 : entry bridge with 413 ops
-[p0, p1, p2, p3, i4, p5, i6, i7, p8, p9, p10, p11, p12, p13, p14]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #21 LOAD_FAST', 0)
-guard_value(i4, 0, descr=<Guard239>) [i4, p1, p0, p2, p3, p5, i6, i7, p8, p9, p10, p11, p12, p13, p14]
-guard_nonnull_class(p13, 19800744, descr=<Guard240>) [p1, p0, p13, p2, p3, p5, i6, p8, p9, p10, p11, p12, p14]
-guard_value(i6, 0, descr=<Guard241>) [i6, p1, p0, p2, p3, p5, p13, p9, p10, p11, p12, p14]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #24 LOAD_FAST', 0)
-guard_nonnull_class(p12, 19800744, descr=<Guard242>) [p1, p0, p12, p2, p3, p5, p13, p9, p10, p11, p14]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #27 COMPARE_OP', 0)
-f19 = getfield_gc_pure(p13, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-f20 = getfield_gc_pure(p12, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-i21 = float_gt(f19, f20)
-guard_true(i21, descr=<Guard243>) [p1, p0, p12, p13, p2, p3, p5, p10, p11, p14]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #30 POP_JUMP_IF_FALSE', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #33 LOAD_FAST', 0)
-guard_nonnull_class(p11, ConstClass(W_IntObject), descr=<Guard244>) [p1, p0, p11, p2, p3, p5, p10, p12, p13, p14]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #36 POP_JUMP_IF_FALSE', 0)
-i23 = getfield_gc_pure(p11, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-i24 = int_is_true(i23)
-guard_true(i24, descr=<Guard245>) [p1, p0, p11, p2, p3, p5, p10, p12, p13, p14]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #39 LOAD_FAST', 0)
-guard_nonnull_class(p14, ConstClass(W_IntObject), descr=<Guard246>) [p1, p0, p14, p2, p3, p5, p10, p11, p12, p13]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #42 LOAD_FAST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #45 COMPARE_OP', 0)
-i26 = getfield_gc_pure(p14, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-i27 = int_ge(i26, i23)
-guard_false(i27, descr=<Guard247>) [p1, p0, p11, p14, p2, p3, p5, p10, p12, p13]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #48 POP_JUMP_IF_FALSE', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #55 LOAD_GLOBAL', 0)
-guard_value(p2, ConstPtr(ptr28), descr=<Guard248>) [p1, p0, p2, p3, p5, p10, p11, p12, p13, p14]
-p29 = getfield_gc(p0, descr=<GcPtrFieldDescr pypy.interpreter.eval.Frame.inst_w_globals 8>)
-guard_value(p29, ConstPtr(ptr30), descr=<Guard249>) [p1, p0, p29, p3, p5, p10, p11, p12, p13, p14]
-p31 = getfield_gc(p29, descr=<GcPtrFieldDescr pypy.objspace.std.dictmultiobject.W_DictMultiObject.inst_r_dict_content 8>)
-guard_isnull(p31, descr=<Guard250>) [p1, p0, p31, p29, p3, p5, p10, p11, p12, p13, p14]
-p33 = getfield_gc(ConstPtr(ptr32), descr=<GcPtrFieldDescr pypy.objspace.std.celldict.ModuleCell.inst_w_value 8>)
-guard_nonnull_class(p33, ConstClass(Function), descr=<Guard251>) [p1, p0, p33, p3, p5, p10, p11, p12, p13, p14]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #58 LOAD_FAST', 0)
-guard_nonnull_class(p10, 19852624, descr=<Guard252>) [p1, p0, p10, p3, p5, p33, p11, p12, p13, p14]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #61 CALL_FUNCTION', 0)
-p36 = getfield_gc(p33, descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_code 24>)
-guard_value(p36, ConstPtr(ptr37), descr=<Guard253>) [p1, p0, p36, p33, p3, p5, p10, p11, p12, p13, p14]
-p38 = getfield_gc(p33, descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_w_func_globals 64>)
-p39 = getfield_gc(p33, descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_closure 16>)
-p41 = call(ConstClass(getexecutioncontext), descr=<GcPtrCallDescr>)
-p42 = getfield_gc(p41, descr=<GcPtrFieldDescr pypy.interpreter.executioncontext.ExecutionContext.inst_topframeref 56>)
-i43 = force_token()
-p44 = getfield_gc(p41, descr=<GcPtrFieldDescr pypy.interpreter.executioncontext.ExecutionContext.inst_w_tracefunc 72>)
-guard_isnull(p44, descr=<Guard254>) [p1, p0, p41, p44, p3, p5, p33, p10, p11, p12, p13, p14, i43, p42, p38]
-i45 = getfield_gc(p41, descr=<NonGcPtrFieldDescr pypy.interpreter.executioncontext.ExecutionContext.inst_profilefunc 40>)
-i46 = int_is_zero(i45)
-guard_true(i46, descr=<Guard255>) [p1, p0, p41, p3, p5, p33, p10, p11, p12, p13, p14, i43, p42, p38]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #0 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #3 LOAD_ATTR', 1)
-p47 = getfield_gc(p10, descr=<GcPtrFieldDescr pypy.objspace.std.mapdict.W_ObjectObjectSize5.inst_map 48>)
-guard_value(p47, ConstPtr(ptr48), descr=<Guard256>) [p1, p0, p41, p10, p47, p3, p5, p33, p11, p12, p13, p14, i43, p42, p38]
-p50 = getfield_gc(ConstPtr(ptr49), descr=<GcPtrFieldDescr pypy.objspace.std.typeobject.W_TypeObject.inst__version_tag 16>)
-guard_value(p50, ConstPtr(ptr51), descr=<Guard257>) [p1, p0, p41, p10, p50, p3, p5, p33, p11, p12, p13, p14, i43, p42, p38]
-p52 = getfield_gc(p10, descr=<GcPtrFieldDescr pypy.objspace.std.mapdict.W_ObjectObjectSize5.inst__value2 24>)
-guard_nonnull_class(p52, 19800744, descr=<Guard258>) [p1, p0, p41, p52, p10, p3, p5, p33, p11, p12, p13, p14, i43, p42, p38]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #6 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #9 LOAD_ATTR', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #12 BINARY_MULTIPLY', 1)
-f54 = getfield_gc_pure(p52, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-f55 = float_mul(f54, f54)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #13 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #16 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #19 LOAD_ATTR', 1)
-p56 = getfield_gc(p10, descr=<GcPtrFieldDescr pypy.objspace.std.mapdict.W_ObjectObjectSize5.inst__value3 32>)
-guard_nonnull_class(p56, 19800744, descr=<Guard259>) [p1, p0, p41, p56, p10, p3, p5, p33, p11, p12, p13, p14, f55, i43, p42, p38]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #22 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #25 LOAD_ATTR', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #28 BINARY_MULTIPLY', 1)
-f58 = getfield_gc_pure(p56, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-f59 = float_mul(f58, f58)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #29 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #32 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #35 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #38 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #41 BINARY_ADD', 1)
-f60 = float_add(f55, f59)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #42 BINARY_DIVIDE', 1)
-i62 = float_eq(f60, 0.000000)
-guard_false(i62, descr=<Guard260>) [p1, p0, p41, f60, p3, p5, p33, p10, p11, p12, p13, p14, f59, f55, i43, p42, p38]
-f64 = float_truediv(0.500000, f60)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #43 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #46 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #49 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #52 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #55 LOAD_ATTR', 1)
-p65 = getfield_gc(p10, descr=<GcPtrFieldDescr pypy.objspace.std.mapdict.W_ObjectObjectSize5.inst__value0 8>)
-guard_nonnull_class(p65, ConstClass(W_IntObject), descr=<Guard261>) [p1, p0, p41, p65, p10, p3, p5, p33, p11, p12, p13, p14, f64, f59, f55, i43, p42, p38]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #58 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #61 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #64 LOAD_ATTR', 1)
-p67 = getfield_gc(p10, descr=<GcPtrFieldDescr pypy.objspace.std.mapdict.W_ObjectObjectSize5.inst__value1 16>)
-guard_nonnull_class(p67, ConstClass(W_IntObject), descr=<Guard262>) [p1, p0, p41, p67, p10, p3, p5, p33, p11, p12, p13, p14, p65, f64, f59, f55, i43, p42, p38]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #67 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #70 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #73 LOAD_ATTR', 1)
-p69 = getfield_gc(p10, descr=<GcPtrFieldDescr pypy.objspace.std.mapdict.W_ObjectObjectSize5.inst__value4 40>)
-guard_nonnull_class(p69, 19886912, descr=<Guard263>) [p1, p0, p41, p69, p10, p3, p5, p33, p11, p12, p13, p14, p67, p65, f64, f59, f55, i43, p42, p38]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #76 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #79 SETUP_LOOP', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #82 LOAD_GLOBAL', 1)
-guard_value(p38, ConstPtr(ptr71), descr=<Guard264>) [p1, p0, p41, p38, p3, p5, p33, p10, p11, p12, p13, p14, p69, p67, p65, f64, f59, f55, i43, p42, None]
-p73 = getfield_gc(p38, descr=<GcPtrFieldDescr pypy.objspace.std.dictmultiobject.W_DictMultiObject.inst_r_dict_content 8>)
-guard_isnull(p73, descr=<Guard265>) [p1, p0, p41, p73, p38, p3, p5, p33, p10, p11, p12, p13, p14, p69, p67, p65, f64, f59, f55, i43, p42, None]
-p75 = getfield_gc(ConstPtr(ptr74), descr=<GcPtrFieldDescr pypy.objspace.std.celldict.ModuleCell.inst_w_value 8>)
-guard_isnull(p75, descr=<Guard266>) [p1, p0, p41, p75, p3, p5, p33, p10, p11, p12, p13, p14, p69, p67, p65, f64, f59, f55, i43, p42, None]
-p77 = getfield_gc(ConstPtr(ptr76), descr=<GcPtrFieldDescr pypy.interpreter.module.Module.inst_w_dict 8>)
-guard_value(p77, ConstPtr(ptr78), descr=<Guard267>) [p1, p0, p41, p77, p3, p5, p33, p10, p11, p12, p13, p14, p69, p67, p65, f64, f59, f55, i43, p42, None]
-p79 = getfield_gc(p77, descr=<GcPtrFieldDescr pypy.objspace.std.dictmultiobject.W_DictMultiObject.inst_r_dict_content 8>)
-guard_isnull(p79, descr=<Guard268>) [p1, p0, p41, p79, p77, p3, p5, p33, p10, p11, p12, p13, p14, p69, p67, p65, f64, f59, f55, i43, p42, None]
-p81 = getfield_gc(ConstPtr(ptr80), descr=<GcPtrFieldDescr pypy.objspace.std.celldict.ModuleCell.inst_w_value 8>)
-guard_value(p81, ConstPtr(ptr82), descr=<Guard269>) [p1, p0, p41, p81, p3, p5, p33, p10, p11, p12, p13, p14, p69, p67, p65, f64, f59, f55, i43, p42, None]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #85 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #88 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #91 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #94 BINARY_SUBTRACT', 1)
-i83 = getfield_gc_pure(p65, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-i85 = int_sub_ovf(i83, 1)
-guard_no_overflow(, descr=<Guard270>) [p1, p0, p41, p65, i85, p3, p5, p33, p10, p11, p12, p13, p14, p81, p69, p67, None, f64, f59, f55, i43, p42, None]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #95 CALL_FUNCTION', 1)
-p87 = getfield_gc(ConstPtr(ptr86), descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_name 40>)
-p88 = getfield_gc(ConstPtr(ptr86), descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_defs 32>)
-i89 = getfield_gc_pure(p88, descr=<BoolFieldDescr pypy.interpreter.function.Defaults.inst_promote 16>)
-guard_false(i89, descr=<Guard271>) [p1, p0, p41, p87, p88, p3, p5, p33, p10, p11, p12, p13, p14, i85, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-p90 = getfield_gc_pure(p88, descr=<GcPtrFieldDescr pypy.interpreter.function.Defaults.inst_items 8>)
-i91 = arraylen_gc(p90, descr=<GcPtrArrayDescr>)
-i93 = int_sub(4, i91)
-i95 = int_ge(3, i93)
-guard_true(i95, descr=<Guard272>) [p1, p0, p41, p87, i93, p88, p3, p5, p33, p10, p11, p12, p13, p14, i85, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-i96 = int_sub(3, i93)
-i97 = getfield_gc_pure(p88, descr=<BoolFieldDescr pypy.interpreter.function.Defaults.inst_promote 16>)
-guard_false(i97, descr=<Guard273>) [p1, p0, p41, p87, i96, i93, p88, p3, p5, p33, p10, p11, p12, p13, p14, i85, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-p98 = getfield_gc_pure(p88, descr=<GcPtrFieldDescr pypy.interpreter.function.Defaults.inst_items 8>)
-p99 = getarrayitem_gc(p98, i96, descr=<GcPtrArrayDescr>)
-guard_class(p99, ConstClass(W_IntObject), descr=<Guard274>) [p1, p0, p41, p99, p3, p5, p33, p10, p11, p12, p13, p14, i85, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-i101 = getfield_gc_pure(p99, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-i102 = int_is_zero(i101)
-guard_false(i102, descr=<Guard275>) [p1, p0, p41, i101, i85, p3, p5, p33, p10, p11, p12, p13, p14, p99, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-i104 = int_lt(i101, 0)
-guard_false(i104, descr=<Guard276>) [p1, p0, p41, i101, i85, p3, p5, p33, p10, p11, p12, p13, p14, p99, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-i106 = int_lt(1, i85)
-guard_true(i106, descr=<Guard277>) [p1, p0, p41, i101, i85, p3, p5, p33, p10, p11, p12, p13, p14, p99, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-i107 = int_sub(i85, 1)
-i109 = int_sub(i107, 1)
-i110 = uint_floordiv(i109, i101)
-i112 = int_add(i110, 1)
-i114 = int_lt(i112, 0)
-guard_false(i114, descr=<Guard278>) [p1, p0, p41, i101, i112, p3, p5, p33, p10, p11, p12, p13, p14, p99, i85, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #98 GET_ITER', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #99 FOR_ITER', 1)
-i116 = int_gt(i112, 0)
-guard_true(i116, descr=<Guard279>) [p1, p0, p41, p3, p5, p33, p10, p11, p12, p13, p14, i112, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-i117 = int_add(1, i101)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #102 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #105 SETUP_LOOP', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #108 LOAD_GLOBAL', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #111 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #114 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #117 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #120 BINARY_SUBTRACT', 1)
-i118 = getfield_gc_pure(p67, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-i120 = int_sub_ovf(i118, 1)
-guard_no_overflow(, descr=<Guard280>) [p1, p0, p41, p67, i120, p3, p5, p33, p10, p11, p12, p13, p14, i110, i117, None, i101, None, None, None, p69, None, p65, f64, f59, f55, i43, p42, None]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #121 CALL_FUNCTION', 1)
-i121 = getfield_gc_pure(p88, descr=<BoolFieldDescr pypy.interpreter.function.Defaults.inst_promote 16>)
-guard_false(i121, descr=<Guard281>) [p1, p0, p41, p87, p88, p3, p5, p33, p10, p11, p12, p13, p14, i120, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-p122 = getfield_gc_pure(p88, descr=<GcPtrFieldDescr pypy.interpreter.function.Defaults.inst_items 8>)
-i123 = arraylen_gc(p122, descr=<GcPtrArrayDescr>)
-i125 = int_sub(4, i123)
-i127 = int_ge(3, i125)
-guard_true(i127, descr=<Guard282>) [p1, p0, p41, p87, i125, p88, p3, p5, p33, p10, p11, p12, p13, p14, i120, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-i128 = int_sub(3, i125)
-i129 = getfield_gc_pure(p88, descr=<BoolFieldDescr pypy.interpreter.function.Defaults.inst_promote 16>)
-guard_false(i129, descr=<Guard283>) [p1, p0, p41, p87, i128, i125, p88, p3, p5, p33, p10, p11, p12, p13, p14, i120, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-p130 = getfield_gc_pure(p88, descr=<GcPtrFieldDescr pypy.interpreter.function.Defaults.inst_items 8>)
-p131 = getarrayitem_gc(p130, i128, descr=<GcPtrArrayDescr>)
-guard_class(p131, ConstClass(W_IntObject), descr=<Guard284>) [p1, p0, p41, p131, p3, p5, p33, p10, p11, p12, p13, p14, i120, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-i133 = getfield_gc_pure(p131, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-i134 = int_is_zero(i133)
-guard_false(i134, descr=<Guard285>) [p1, p0, p41, i133, i120, p3, p5, p33, p10, p11, p12, p13, p14, p131, None, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-i136 = int_lt(i133, 0)
-guard_false(i136, descr=<Guard286>) [p1, p0, p41, i133, i120, p3, p5, p33, p10, p11, p12, p13, p14, p131, None, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-i138 = int_lt(1, i120)
-guard_true(i138, descr=<Guard287>) [p1, p0, p41, i133, i120, p3, p5, p33, p10, p11, p12, p13, p14, p131, None, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-i139 = int_sub(i120, 1)
-i141 = int_sub(i139, 1)
-i142 = uint_floordiv(i141, i133)
-i144 = int_add(i142, 1)
-i146 = int_lt(i144, 0)
-guard_false(i146, descr=<Guard288>) [p1, p0, p41, i133, i144, p3, p5, p33, p10, p11, p12, p13, p14, p131, i120, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #124 GET_ITER', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #125 FOR_ITER', 1)
-i148 = int_gt(i144, 0)
-guard_true(i148, descr=<Guard289>) [p1, p0, p41, p3, p5, p33, p10, p11, p12, p13, p14, i144, i133, None, None, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-i149 = int_add(1, i133)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #128 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #131 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #134 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #137 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #140 BINARY_MULTIPLY', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #141 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #144 BINARY_ADD', 1)
-i150 = int_add_ovf(i83, 1)
-guard_no_overflow(, descr=<Guard290>) [p1, p0, p41, i150, p3, p5, p33, p10, p11, p12, p13, p14, i83, i149, i142, None, i133, None, None, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #145 BINARY_SUBSCR', 1)
-i151 = getfield_gc(p69, descr=<SignedFieldDescr pypy.module.array.interp_array.W_ArrayTyped.inst_len 32>)
-i152 = int_lt(i150, i151)
-guard_true(i152, descr=<Guard291>) [p1, p0, p41, p69, i150, p3, p5, p33, p10, p11, p12, p13, p14, None, i149, i142, None, i133, None, None, i110, i117, None, i101, None, None, None, None, p67, p65, f64, f59, f55, i43, p42, None]
-i153 = getfield_gc(p69, descr=<NonGcPtrFieldDescr pypy.module.array.interp_array.W_ArrayTyped.inst_buffer 24>)
-f154 = getarrayitem_raw(i153, i150, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #146 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #149 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #152 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #155 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #158 BINARY_SUBTRACT', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #159 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #162 BINARY_MULTIPLY', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #163 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #166 BINARY_ADD', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #167 BINARY_SUBSCR', 1)
-f155 = getarrayitem_raw(i153, 1, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #168 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #171 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #174 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #177 BINARY_ADD', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #178 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #181 BINARY_MULTIPLY', 1)
-i157 = int_mul_ovf(2, i83)
-guard_no_overflow(, descr=<Guard292>) [p1, p0, p41, p65, i157, p3, p5, p33, p10, p11, p12, p13, p14, f154, f155, None, i149, i142, None, i133, None, None, i110, i117, None, i101, None, None, None, p69, p67, None, f64, f59, f55, i43, p42, None]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #182 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #185 BINARY_ADD', 1)
-i158 = int_add_ovf(i157, 1)
-guard_no_overflow(, descr=<Guard293>) [p1, p0, p41, i158, p3, p5, p33, p10, p11, p12, p13, p14, i157, f154, f155, None, i149, i142, None, i133, None, None, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #186 BINARY_SUBSCR', 1)
-i160 = int_lt(i158, 0)
-guard_false(i160, descr=<Guard294>) [p1, p0, p41, p69, i158, i151, p3, p5, p33, p10, p11, p12, p13, p14, None, f154, f155, None, i149, i142, None, i133, None, None, i110, i117, None, i101, None, None, None, None, p67, p65, f64, f59, f55, i43, p42, None]
-i161 = int_lt(i158, i151)
-guard_true(i161, descr=<Guard295>) [p1, p0, p41, p69, i158, p3, p5, p33, p10, p11, p12, p13, p14, None, f154, f155, None, i149, i142, None, i133, None, None, i110, i117, None, i101, None, None, None, None, p67, p65, f64, f59, f55, i43, p42, None]
-f162 = getarrayitem_raw(i153, i158, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #187 BINARY_ADD', 1)
-f163 = float_add(f155, f162)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #188 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #191 BINARY_MULTIPLY', 1)
-f164 = float_mul(f163, f59)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #192 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #195 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #198 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #201 BINARY_MULTIPLY', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #202 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #205 BINARY_ADD', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #206 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #209 BINARY_SUBTRACT', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #210 BINARY_SUBSCR', 1)
-i165 = int_lt(i83, i151)
-guard_true(i165, descr=<Guard296>) [p1, p0, p41, p69, i83, p3, p5, p33, p10, p11, p12, p13, p14, f164, None, f154, None, None, i149, i142, None, i133, None, None, i110, i117, None, i101, None, None, None, None, p67, p65, f64, f59, f55, i43, p42, None]
-f166 = getarrayitem_raw(i153, i83, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #211 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #214 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #217 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #220 BINARY_MULTIPLY', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #221 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #224 BINARY_ADD', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #225 LOAD_CONST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #228 BINARY_ADD', 1)
-i168 = int_add(i150, 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #229 BINARY_SUBSCR', 1)
-i169 = int_lt(i168, i151)
-guard_true(i169, descr=<Guard297>) [p1, p0, p41, p69, i168, p3, p5, p33, p10, p11, p12, p13, p14, f166, f164, None, f154, None, None, i149, i142, None, i133, None, None, i110, i117, None, i101, None, None, None, None, p67, p65, f64, f59, f55, i43, p42, None]
-f170 = getarrayitem_raw(i153, i168, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #230 BINARY_ADD', 1)
-f171 = float_add(f166, f170)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #231 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #234 BINARY_MULTIPLY', 1)
-f172 = float_mul(f171, f55)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #235 BINARY_ADD', 1)
-f173 = float_add(f164, f172)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #236 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #239 BINARY_MULTIPLY', 1)
-f174 = float_mul(f173, f64)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #240 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #243 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #246 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #249 BINARY_MULTIPLY', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #250 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #253 BINARY_ADD', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #254 STORE_SUBSCR', 1)
-setarrayitem_raw(i153, i150, f174, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #255 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #258 LOAD_GLOBAL', 1)
-p176 = getfield_gc(ConstPtr(ptr175), descr=<GcPtrFieldDescr pypy.objspace.std.celldict.ModuleCell.inst_w_value 8>)
-guard_nonnull_class(p176, ConstClass(Function), descr=<Guard298>) [p1, p0, p41, p176, p3, p5, p33, p10, p11, p12, p13, p14, None, None, None, f154, None, None, i149, i142, None, i133, None, None, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #261 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #264 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #267 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #270 BINARY_MULTIPLY', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #271 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #274 BINARY_ADD', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #275 BINARY_SUBSCR', 1)
-f178 = getarrayitem_raw(i153, i150, descr=<FloatArrayNoLengthDescr>)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #276 LOAD_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #279 BINARY_SUBTRACT', 1)
-f179 = float_sub(f178, f154)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #280 CALL_FUNCTION', 1)
-p180 = getfield_gc(p176, descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_code 24>)
-guard_value(p180, ConstPtr(ptr181), descr=<Guard299>) [p1, p0, p41, p180, p176, p3, p5, p33, p10, p11, p12, p13, p14, f179, None, None, None, f154, None, None, i149, i142, None, i133, None, None, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-p182 = getfield_gc(p176, descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_w_func_globals 64>)
-p183 = getfield_gc(p176, descr=<GcPtrFieldDescr pypy.interpreter.function.Function.inst_closure 16>)
-i184 = force_token()
-debug_merge_point('<code object sqr, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 7> #0 LOAD_FAST', 2)
-debug_merge_point('<code object sqr, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 7> #3 LOAD_FAST', 2)
-debug_merge_point('<code object sqr, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 7> #6 BINARY_MULTIPLY', 2)
-f185 = float_mul(f179, f179)
-debug_merge_point('<code object sqr, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 7> #7 RETURN_VALUE', 2)
-i186 = int_is_true(i45)
-guard_false(i186, descr=<Guard300>) [p1, p0, p41, p3, p5, p33, p10, p11, p12, p13, p14, p182, i184, p176, f185, f179, None, None, None, f154, None, None, i149, i142, None, i133, None, None, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #283 INPLACE_ADD', 1)
-f188 = float_add(0.000000, f185)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #284 STORE_FAST', 1)
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #287 JUMP_ABSOLUTE', 1)
-i190 = getfield_raw(38968960, descr=<SignedFieldDescr pypysig_long_struct.c_value 0>)
-i192 = int_sub(i190, 100)
-setfield_raw(38968960, i192, descr=<SignedFieldDescr pypysig_long_struct.c_value 0>)
-i194 = int_lt(i192, 0)
-guard_false(i194, descr=<Guard301>) [p1, p0, p41, p3, p5, p33, p10, p11, p12, p13, p14, f188, None, None, None, None, None, None, None, None, f154, None, None, i149, i142, None, i133, None, None, i110, i117, None, i101, None, None, None, p69, p67, p65, f64, f59, f55, i43, p42, None]
-debug_merge_point('<code object time_step, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 34> #125 FOR_ITER', 1)
-i195 = force_token()
-p197 = new_with_vtable(19809200)
-setfield_gc(p197, i43, descr=<SignedFieldDescr JitVirtualRef.virtual_token 8>)
-setfield_gc(p41, p197, descr=<GcPtrFieldDescr pypy.interpreter.executioncontext.ExecutionContext.inst_topframeref 56>)
-setfield_gc(p0, i195, descr=<SignedFieldDescr pypy.interpreter.pyframe.PyFrame.vable_token 24>)
-p199 = new_with_vtable(19863424)
-setfield_gc(p199, p42, descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_f_backref 48>)
-setfield_gc(p199, ConstPtr(ptr71), descr=<GcPtrFieldDescr pypy.interpreter.eval.Frame.inst_w_globals 8>)
-setfield_gc(p199, 34, descr=<INTFieldDescr pypy.interpreter.pyframe.PyFrame.inst_f_lineno 144>)
-setfield_gc(p199, ConstPtr(ptr37), descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_pycode 112>)
-p202 = new_array(8, descr=<GcPtrArrayDescr>)
-p204 = new_with_vtable(19861240)
-setfield_gc(p204, i117, descr=<SignedFieldDescr pypy.module.__builtin__.functional.W_XRangeIterator.inst_current 8>)
-setfield_gc(p204, i110, descr=<SignedFieldDescr pypy.module.__builtin__.functional.W_XRangeIterator.inst_remaining 16>)
-setfield_gc(p204, i101, descr=<SignedFieldDescr pypy.module.__builtin__.functional.W_XRangeIterator.inst_step 24>)
-setarrayitem_gc(p202, 0, p204, descr=<GcPtrArrayDescr>)
-p207 = new_with_vtable(19861240)
-setfield_gc(p207, i149, descr=<SignedFieldDescr pypy.module.__builtin__.functional.W_XRangeIterator.inst_current 8>)
-setfield_gc(p207, i142, descr=<SignedFieldDescr pypy.module.__builtin__.functional.W_XRangeIterator.inst_remaining 16>)
-setfield_gc(p207, i133, descr=<SignedFieldDescr pypy.module.__builtin__.functional.W_XRangeIterator.inst_step 24>)
-setarrayitem_gc(p202, 1, p207, descr=<GcPtrArrayDescr>)
-setfield_gc(p199, p202, descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_valuestack_w 120>)
-setfield_gc(p199, 125, descr=<SignedFieldDescr pypy.interpreter.pyframe.PyFrame.inst_last_instr 96>)
-p211 = new_with_vtable(19865144)
-setfield_gc(p211, 291, descr=<UnsignedFieldDescr pypy.interpreter.pyopcode.FrameBlock.inst_handlerposition 8>)
-setfield_gc(p211, 1, descr=<SignedFieldDescr pypy.interpreter.pyopcode.FrameBlock.inst_valuestackdepth 24>)
-p215 = new_with_vtable(19865144)
-setfield_gc(p215, 295, descr=<UnsignedFieldDescr pypy.interpreter.pyopcode.FrameBlock.inst_handlerposition 8>)
-setfield_gc(p211, p215, descr=<GcPtrFieldDescr pypy.interpreter.pyopcode.FrameBlock.inst_previous 16>)
-setfield_gc(p199, p211, descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_lastblock 104>)
-p218 = new_array(11, descr=<GcPtrArrayDescr>)
-setarrayitem_gc(p218, 0, p10, descr=<GcPtrArrayDescr>)
-p221 = new_with_vtable(19800744)
-setfield_gc(p221, f55, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-setarrayitem_gc(p218, 1, p221, descr=<GcPtrArrayDescr>)
-p224 = new_with_vtable(19800744)
-setfield_gc(p224, f59, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-setarrayitem_gc(p218, 2, p224, descr=<GcPtrArrayDescr>)
-p227 = new_with_vtable(19800744)
-setfield_gc(p227, f64, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-setarrayitem_gc(p218, 3, p227, descr=<GcPtrArrayDescr>)
-p230 = new_with_vtable(19800744)
-setfield_gc(p230, f188, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-setarrayitem_gc(p218, 4, p230, descr=<GcPtrArrayDescr>)
-setarrayitem_gc(p218, 5, p65, descr=<GcPtrArrayDescr>)
-setarrayitem_gc(p218, 6, p67, descr=<GcPtrArrayDescr>)
-setarrayitem_gc(p218, 7, p69, descr=<GcPtrArrayDescr>)
-p236 = new_with_vtable(ConstClass(W_IntObject))
-setfield_gc(p236, 1, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-setarrayitem_gc(p218, 8, p236, descr=<GcPtrArrayDescr>)
-p239 = new_with_vtable(ConstClass(W_IntObject))
-setfield_gc(p239, 1, descr=<SignedFieldDescr pypy.objspace.std.intobject.W_IntObject.inst_intval 8>)
-setarrayitem_gc(p218, 9, p239, descr=<GcPtrArrayDescr>)
-p242 = new_with_vtable(19800744)
-setfield_gc(p242, f154, descr=<FloatFieldDescr pypy.objspace.std.floatobject.W_FloatObject.inst_floatval 8>)
-setarrayitem_gc(p218, 10, p242, descr=<GcPtrArrayDescr>)
-setfield_gc(p199, p218, descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_fastlocals_w 56>)
-setfield_gc(p199, 2, descr=<SignedFieldDescr pypy.interpreter.pyframe.PyFrame.inst_valuestackdepth 128>)
-p253 = call_assembler(p199, p41, ConstPtr(ptr37), p211, 2, ConstPtr(ptr245), 0, 125, p204, p207, ConstPtr(ptr247), ConstPtr(ptr248), ConstPtr(ptr249), ConstPtr(ptr250), ConstPtr(ptr251), ConstPtr(ptr252), p10, p221, p224, p227, p230, p65, p67, p69, p236, p239, p242, descr=<Loop1>)
-guard_not_forced(, descr=<Guard302>) [p1, p0, p41, p199, p253, p197, p3, p5, p33, p10, p11, p12, p13, p14]
-guard_no_exception(, descr=<Guard303>) [p1, p0, p41, p199, p253, p197, p3, p5, p33, p10, p11, p12, p13, p14]
-p254 = getfield_gc(p41, descr=<GcPtrFieldDescr pypy.interpreter.executioncontext.ExecutionContext.inst_w_tracefunc 72>)
-guard_isnull(p254, descr=<Guard304>) [p1, p0, p41, p253, p199, p254, p197, p3, p5, p33, p10, p11, p12, p13, p14]
-i255 = ptr_eq(p199, p0)
-guard_false(i255, descr=<Guard305>) [p1, p0, p41, p253, p199, p197, p3, p5, p33, p10, p11, p12, p13, p14]
-i256 = getfield_gc(p41, descr=<NonGcPtrFieldDescr pypy.interpreter.executioncontext.ExecutionContext.inst_profilefunc 40>)
-setfield_gc(p199, ConstPtr(ptr257), descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_last_exception 88>)
-i258 = int_is_true(i256)
-guard_false(i258, descr=<Guard306>) [p1, p0, p253, p199, p41, p197, p3, p5, p33, p10, p11, p12, p13, p14]
-p259 = getfield_gc(p199, descr=<GcPtrFieldDescr pypy.interpreter.pyframe.PyFrame.inst_f_backref 48>)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #64 STORE_FAST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #67 LOAD_FAST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #70 LOAD_CONST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #73 INPLACE_ADD', 0)
-i261 = int_add(i26, 1)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #74 STORE_FAST', 0)
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #77 JUMP_ABSOLUTE', 0)
-i263 = getfield_raw(38968960, descr=<SignedFieldDescr pypysig_long_struct.c_value 0>)
-i265 = int_sub(i263, 100)
-setfield_raw(38968960, i265, descr=<SignedFieldDescr pypysig_long_struct.c_value 0>)
-setfield_gc(p41, p259, descr=<GcPtrFieldDescr pypy.interpreter.executioncontext.ExecutionContext.inst_topframeref 56>)
-setfield_gc(p197, p199, descr=<GcPtrFieldDescr JitVirtualRef.forced 16>)
-setfield_gc(p197, -3, descr=<SignedFieldDescr JitVirtualRef.virtual_token 8>)
-i268 = int_lt(i265, 0)
-guard_false(i268, descr=<Guard307>) [p1, p0, p3, p5, p10, p11, p12, p253, i261]
-debug_merge_point('<code object laplace_solve, file '/home/alex/projects/hack/benchmarks/laplace/laplace.py', line 52> #21 LOAD_FAST', 0)
-jump(p0, p1, p3, p5, p10, p11, p12, p253, i261, f20, i23, i256, p41, p259, descr=<Loop2>)
-[5ed74ff695c8] jit-log-opt-loop}
-[5ed8737e9776] {jit-backend-counts
-0:493724565
-1:2281802
-2:1283242
-3:993105
-4:2933
-5:2163
-6:2492
-7:1799
-8:963
-9:36
-[5ed8737ee19c] jit-backend-counts}
diff --git a/tests/examplefiles/unicode.js b/tests/examplefiles/unicode.js
new file mode 100644
index 00000000..e77bfb80
--- /dev/null
+++ b/tests/examplefiles/unicode.js
@@ -0,0 +1,5 @@
+var école;
+var sinθ;
+var เมือง;
+var a\u1234b;
+
diff --git a/tests/examplefiles/vpath.mk b/tests/examplefiles/vpath.mk
new file mode 100644
index 00000000..a7f18fc3
--- /dev/null
+++ b/tests/examplefiles/vpath.mk
@@ -0,0 +1,16 @@
+vpath %.c src
+vpath %.h header
+EXEC=hello
+SRC= hello.c main.c
+OBJ= $(SRC:.c=.o)
+
+all: $(EXEC)
+
+hello: $(OBJ)
+ $(CC) -o $@ $^ $(LDFLAGS)
+
+main.o: hello.h
+
+%.o: %.c
+ $(CC) -I header -o $@ \
+ -c $< $(CFLAGS)
diff --git a/tests/run.py b/tests/run.py
index e87837e5..94d629e8 100644
--- a/tests/run.py
+++ b/tests/run.py
@@ -14,7 +14,8 @@
from __future__ import print_function
-import sys, os
+import os
+import sys
# only find tests in this directory
if os.path.dirname(__file__):
diff --git a/tests/string_asserts.py b/tests/string_asserts.py
index 025a5281..3aa50420 100644
--- a/tests/string_asserts.py
+++ b/tests/string_asserts.py
@@ -11,12 +11,12 @@ class StringTests(object):
def assertStartsWith(self, haystack, needle, msg=None):
if msg is None:
- msg = "'{}' does not start with '{}'".format(haystack, needle)
+ msg = "'{0}' does not start with '{1}'".format(haystack, needle)
if not haystack.startswith(needle):
raise(AssertionError(msg))
def assertEndsWith(self, haystack, needle, msg=None):
if msg is None:
- msg = "'{}' does not end with '{}'".format(haystack, needle)
+ msg = "'{0}' does not end with '{1}'".format(haystack, needle)
if not haystack.endswith(needle):
raise(AssertionError(msg))
diff --git a/tests/support.py b/tests/support.py
index 505c17da..c66ac663 100644
--- a/tests/support.py
+++ b/tests/support.py
@@ -5,6 +5,8 @@ Support for Pygments tests
import os
+from nose import SkipTest
+
def location(mod_name):
"""
diff --git a/tests/test_basic_api.py b/tests/test_basic_api.py
index 893fa90c..7485df1a 100644
--- a/tests/test_basic_api.py
+++ b/tests/test_basic_api.py
@@ -16,7 +16,7 @@ from pygments import lexers, formatters, filters, format
from pygments.token import _TokenType, Text
from pygments.lexer import RegexLexer
from pygments.formatters.img import FontNotFound
-from pygments.util import text_type, StringIO, xrange, ClassNotFound
+from pygments.util import text_type, StringIO, BytesIO, xrange, ClassNotFound
import support
@@ -27,10 +27,12 @@ random.shuffle(test_content)
test_content = ''.join(test_content) + '\n'
-def test_lexer_import_all():
+def test_lexer_instantiate_all():
# instantiate every lexer, to see if the token type defs are correct
+ def verify(name):
+ getattr(lexers, name)
for x in lexers.LEXERS:
- c = getattr(lexers, x)()
+ yield verify, x
def test_lexer_classes():
@@ -40,7 +42,7 @@ def test_lexer_classes():
for attr in 'aliases', 'filenames', 'alias_filenames', 'mimetypes':
assert hasattr(cls, attr)
assert type(getattr(cls, attr)) is list, \
- "%s: %s attribute wrong" % (cls, attr)
+ "%s: %s attribute wrong" % (cls, attr)
result = cls.analyse_text("abc")
assert isinstance(result, float) and 0.0 <= result <= 1.0
result = cls.analyse_text(".abc")
@@ -66,19 +68,19 @@ def test_lexer_classes():
try:
tokens = list(inst.get_tokens(test_content))
except KeyboardInterrupt:
- raise KeyboardInterrupt('interrupted %s.get_tokens(): test_content=%r' % (cls.__name__, test_content))
+ raise KeyboardInterrupt(
+ 'interrupted %s.get_tokens(): test_content=%r' %
+ (cls.__name__, test_content))
txt = ""
for token in tokens:
assert isinstance(token, tuple)
assert isinstance(token[0], _TokenType)
- if isinstance(token[1], str):
- print(repr(token[1]))
assert isinstance(token[1], text_type)
txt += token[1]
assert txt == test_content, "%s lexer roundtrip failed: %r != %r" % \
- (cls.name, test_content, txt)
+ (cls.name, test_content, txt)
- for lexer in lexers._iter_lexerclasses():
+ for lexer in lexers._iter_lexerclasses(plugins=False):
yield verify, lexer
@@ -87,7 +89,8 @@ def test_lexer_options():
def ensure(tokens, output):
concatenated = ''.join(token[1] for token in tokens)
assert concatenated == output, \
- '%s: %r != %r' % (lexer, concatenated, output)
+ '%s: %r != %r' % (lexer, concatenated, output)
+
def verify(cls):
inst = cls(stripnl=False)
ensure(inst.get_tokens('a\nb'), 'a\nb\n')
@@ -96,18 +99,18 @@ def test_lexer_options():
ensure(inst.get_tokens(' \n b\n\n\n'), 'b\n')
# some lexers require full lines in input
if cls.__name__ not in (
- 'PythonConsoleLexer', 'RConsoleLexer', 'RubyConsoleLexer',
- 'SqliteConsoleLexer', 'MatlabSessionLexer', 'ErlangShellLexer',
- 'BashSessionLexer', 'LiterateHaskellLexer', 'LiterateAgdaLexer',
- 'PostgresConsoleLexer', 'ElixirConsoleLexer', 'JuliaConsoleLexer',
- 'RobotFrameworkLexer', 'DylanConsoleLexer', 'ShellSessionLexer',
- 'LiterateIdrisLexer', 'LiterateCryptolLexer'):
+ 'PythonConsoleLexer', 'RConsoleLexer', 'RubyConsoleLexer',
+ 'SqliteConsoleLexer', 'MatlabSessionLexer', 'ErlangShellLexer',
+ 'BashSessionLexer', 'LiterateHaskellLexer', 'LiterateAgdaLexer',
+ 'PostgresConsoleLexer', 'ElixirConsoleLexer', 'JuliaConsoleLexer',
+ 'RobotFrameworkLexer', 'DylanConsoleLexer', 'ShellSessionLexer',
+ 'LiterateIdrisLexer', 'LiterateCryptolLexer'):
inst = cls(ensurenl=False)
ensure(inst.get_tokens('a\nb'), 'a\nb')
inst = cls(ensurenl=False, stripall=True)
ensure(inst.get_tokens('a\nb\n\n'), 'a\nb')
- for lexer in lexers._iter_lexerclasses():
+ for lexer in lexers._iter_lexerclasses(plugins=False):
if lexer.__name__ == 'RawTokenLexer':
# this one is special
continue
@@ -147,32 +150,38 @@ def test_get_lexers():
def test_formatter_public_api():
- ts = list(lexers.PythonLexer().get_tokens("def f(): pass"))
- out = StringIO()
# test that every formatter class has the correct public API
- def verify(formatter, info):
- assert len(info) == 4
- assert info[0], "missing formatter name"
- assert info[1], "missing formatter aliases"
- assert info[3], "missing formatter docstring"
-
- if formatter.name == 'Raw tokens':
- # will not work with Unicode output file
- return
+ ts = list(lexers.PythonLexer().get_tokens("def f(): pass"))
+ string_out = StringIO()
+ bytes_out = BytesIO()
+
+ def verify(formatter):
+ info = formatters.FORMATTERS[formatter.__name__]
+ assert len(info) == 5
+ assert info[1], "missing formatter name"
+ assert info[2], "missing formatter aliases"
+ assert info[4], "missing formatter docstring"
try:
inst = formatter(opt1="val1")
except (ImportError, FontNotFound):
- return
+ raise support.SkipTest
+
try:
inst.get_style_defs()
except NotImplementedError:
# may be raised by formatters for which it doesn't make sense
pass
- inst.format(ts, out)
- for formatter, info in formatters.FORMATTERS.items():
- yield verify, formatter, info
+ if formatter.unicodeoutput:
+ inst.format(ts, string_out)
+ else:
+ inst.format(ts, bytes_out)
+
+ for name in formatters.FORMATTERS:
+ formatter = getattr(formatters, name)
+ yield verify, formatter
+
def test_formatter_encodings():
from pygments.formatters import HtmlFormatter
@@ -205,12 +214,12 @@ def test_formatter_unicode_handling():
inst = formatter(encoding=None)
except (ImportError, FontNotFound):
# some dependency or font not installed
- return
+ raise support.SkipTest
if formatter.name != 'Raw tokens':
out = format(tokens, inst)
if formatter.unicodeoutput:
- assert type(out) is text_type
+ assert type(out) is text_type, '%s: %r' % (formatter, out)
inst = formatter(encoding='utf-8')
out = format(tokens, inst)
@@ -223,7 +232,9 @@ def test_formatter_unicode_handling():
assert type(out) is bytes, '%s: %r' % (formatter, out)
for formatter, info in formatters.FORMATTERS.items():
- yield verify, formatter
+ # this tests the automatic importing as well
+ fmter = getattr(formatters, formatter)
+ yield verify, fmter
def test_get_formatters():
@@ -240,27 +251,33 @@ def test_get_formatters():
def test_styles():
# minimal style test
from pygments.formatters import HtmlFormatter
- fmt = HtmlFormatter(style="pastie")
+ HtmlFormatter(style="pastie")
class FiltersTest(unittest.TestCase):
def test_basic(self):
- filter_args = {
- 'whitespace': {'spaces': True, 'tabs': True, 'newlines': True},
- 'highlight': {'names': ['isinstance', 'lexers', 'x']},
- }
- for x in filters.FILTERS:
+ filters_args = [
+ ('whitespace', {'spaces': True, 'tabs': True, 'newlines': True}),
+ ('whitespace', {'wstokentype': False, 'spaces': True}),
+ ('highlight', {'names': ['isinstance', 'lexers', 'x']}),
+ ('codetagify', {'codetags': 'API'}),
+ ('keywordcase', {'case': 'capitalize'}),
+ ('raiseonerror', {}),
+ ('gobble', {'n': 4}),
+ ('tokenmerge', {}),
+ ]
+ for x, args in filters_args:
lx = lexers.PythonLexer()
- lx.add_filter(x, **filter_args.get(x, {}))
- fp = open(TESTFILE, 'rb')
- try:
+ lx.add_filter(x, **args)
+ with open(TESTFILE, 'rb') as fp:
text = fp.read().decode('utf-8')
- finally:
- fp.close()
tokens = list(lx.get_tokens(text))
+ self.assertTrue(all(isinstance(t[1], text_type)
+ for t in tokens),
+ '%s filter did not return Unicode' % x)
roundtext = ''.join([t[1] for t in tokens])
- if x not in ('whitespace', 'keywordcase'):
+ if x not in ('whitespace', 'keywordcase', 'gobble'):
# these filters change the text
self.assertEqual(roundtext, text,
"lexer roundtrip with %s filter failed" % x)
@@ -273,22 +290,16 @@ class FiltersTest(unittest.TestCase):
def test_whitespace(self):
lx = lexers.PythonLexer()
lx.add_filter('whitespace', spaces='%')
- fp = open(TESTFILE, 'rb')
- try:
+ with open(TESTFILE, 'rb') as fp:
text = fp.read().decode('utf-8')
- finally:
- fp.close()
lxtext = ''.join([t[1] for t in list(lx.get_tokens(text))])
self.assertFalse(' ' in lxtext)
def test_keywordcase(self):
lx = lexers.PythonLexer()
lx.add_filter('keywordcase', case='capitalize')
- fp = open(TESTFILE, 'rb')
- try:
+ with open(TESTFILE, 'rb') as fp:
text = fp.read().decode('utf-8')
- finally:
- fp.close()
lxtext = ''.join([t[1] for t in list(lx.get_tokens(text))])
self.assertTrue('Def' in lxtext and 'Class' in lxtext)
diff --git a/tests/test_cmdline.py b/tests/test_cmdline.py
index ef14661c..9e26ce17 100644
--- a/tests/test_cmdline.py
+++ b/tests/test_cmdline.py
@@ -7,14 +7,14 @@
:license: BSD, see LICENSE for details.
"""
-# Test the command line interface
+from __future__ import print_function
import io
import sys
import unittest
from pygments import highlight
-from pygments.util import StringIO
+from pygments.util import StringIO, BytesIO
from pygments.cmdline import main as cmdline_main
import support
@@ -25,14 +25,24 @@ TESTFILE, TESTDIR = support.location(__file__)
def run_cmdline(*args):
saved_stdout = sys.stdout
saved_stderr = sys.stderr
- new_stdout = sys.stdout = StringIO()
- new_stderr = sys.stderr = StringIO()
+ if sys.version_info > (3,):
+ stdout_buffer = BytesIO()
+ stderr_buffer = BytesIO()
+ new_stdout = sys.stdout = io.TextIOWrapper(stdout_buffer)
+ new_stderr = sys.stderr = io.TextIOWrapper(stderr_buffer)
+ else:
+ stdout_buffer = new_stdout = sys.stdout = StringIO()
+ stderr_buffer = new_stderr = sys.stderr = StringIO()
try:
ret = cmdline_main(["pygmentize"] + list(args))
finally:
sys.stdout = saved_stdout
sys.stderr = saved_stderr
- return (ret, new_stdout.getvalue(), new_stderr.getvalue())
+ new_stdout.flush()
+ new_stderr.flush()
+ out, err = stdout_buffer.getvalue().decode('utf-8'), \
+ stderr_buffer.getvalue().decode('utf-8')
+ return (ret, out, err)
class CmdLineTest(unittest.TestCase):
@@ -83,7 +93,7 @@ class CmdLineTest(unittest.TestCase):
def test_invalid_opts(self):
for opts in [("-L", "-lpy"), ("-L", "-fhtml"), ("-L", "-Ox"),
("-a",), ("-Sst", "-lpy"), ("-H",),
- ("-H", "formatter"),]:
+ ("-H", "formatter")]:
self.assertTrue(run_cmdline(*opts)[0] == 2)
def test_normal(self):
@@ -91,11 +101,8 @@ class CmdLineTest(unittest.TestCase):
from pygments.lexers import PythonLexer
from pygments.formatters import HtmlFormatter
filename = TESTFILE
- fp = open(filename, 'rb')
- try:
+ with open(filename, 'rb') as fp:
code = fp.read()
- finally:
- fp.close()
output = highlight(code, PythonLexer(), HtmlFormatter())
diff --git a/tests/test_examplefiles.py b/tests/test_examplefiles.py
index 0547ffd3..ac5b4244 100644
--- a/tests/test_examplefiles.py
+++ b/tests/test_examplefiles.py
@@ -20,29 +20,34 @@ from pygments.util import ClassNotFound
STORE_OUTPUT = False
+STATS = {}
+
+TESTDIR = os.path.dirname(__file__)
+
+
# generate methods
def test_example_files():
- testdir = os.path.dirname(__file__)
- outdir = os.path.join(testdir, 'examplefiles', 'output')
+ global STATS
+ STATS = {}
+ outdir = os.path.join(TESTDIR, 'examplefiles', 'output')
if STORE_OUTPUT and not os.path.isdir(outdir):
os.makedirs(outdir)
- for fn in os.listdir(os.path.join(testdir, 'examplefiles')):
+ for fn in os.listdir(os.path.join(TESTDIR, 'examplefiles')):
if fn.startswith('.') or fn.endswith('#'):
continue
- absfn = os.path.join(testdir, 'examplefiles', fn)
+ absfn = os.path.join(TESTDIR, 'examplefiles', fn)
if not os.path.isfile(absfn):
continue
print(absfn)
- code = open(absfn, 'rb').read()
+ with open(absfn, 'rb') as f:
+ code = f.read()
try:
code = code.decode('utf-8')
except UnicodeError:
code = code.decode('latin1')
- outfn = os.path.join(outdir, fn)
-
lx = None
if '_' in fn:
try:
@@ -57,14 +62,25 @@ def test_example_files():
'nor is of the form <lexer>_filename '
'for overriding, thus no lexer found.'
% fn)
- yield check_lexer, lx, absfn, outfn
+ yield check_lexer, lx, fn
-def check_lexer(lx, absfn, outfn):
- fp = open(absfn, 'rb')
- try:
+ N = 7
+ stats = list(STATS.items())
+ stats.sort(key=lambda x: x[1][1])
+ print('\nExample files that took longest absolute time:')
+ for fn, t in stats[-N:]:
+ print('%-30s %6d chars %8.2f ms %7.3f ms/char' % ((fn,) + t))
+ print()
+ stats.sort(key=lambda x: x[1][2])
+ print('\nExample files that took longest relative time:')
+ for fn, t in stats[-N:]:
+ print('%-30s %6d chars %8.2f ms %7.3f ms/char' % ((fn,) + t))
+
+
+def check_lexer(lx, fn):
+ absfn = os.path.join(TESTDIR, 'examplefiles', fn)
+ with open(absfn, 'rb') as fp:
text = fp.read()
- finally:
- fp.close()
text = text.replace(b'\r\n', b'\n')
text = text.strip(b'\n') + b'\n'
try:
@@ -75,12 +91,17 @@ def check_lexer(lx, absfn, outfn):
text = text.decode('latin1')
ntext = []
tokens = []
+ import time
+ t1 = time.time()
for type, val in lx.get_tokens(text):
ntext.append(val)
assert type != Error, \
'lexer %s generated error token for %s: %r at position %d' % \
(lx, absfn, val, len(u''.join(ntext)))
tokens.append((type, val))
+ t2 = time.time()
+ STATS[os.path.basename(absfn)] = (len(text),
+ 1000 * (t2 - t1), 1000 * (t2 - t1) / len(text))
if u''.join(ntext) != text:
print('\n'.join(difflib.unified_diff(u''.join(ntext).splitlines(),
text.splitlines())))
@@ -89,19 +110,14 @@ def check_lexer(lx, absfn, outfn):
# check output against previous run if enabled
if STORE_OUTPUT:
# no previous output -- store it
+ outfn = os.path.join(TESTDIR, 'examplefiles', 'output', fn)
if not os.path.isfile(outfn):
- fp = open(outfn, 'wb')
- try:
+ with open(outfn, 'wb') as fp:
pickle.dump(tokens, fp)
- finally:
- fp.close()
return
# otherwise load it and compare
- fp = open(outfn, 'rb')
- try:
+ with open(outfn, 'rb') as fp:
stored_tokens = pickle.load(fp)
- finally:
- fp.close()
if stored_tokens != tokens:
f1 = pprint.pformat(stored_tokens)
f2 = pprint.pformat(tokens)
diff --git a/tests/test_html_formatter.py b/tests/test_html_formatter.py
index 91225cd3..5b1b2576 100644
--- a/tests/test_html_formatter.py
+++ b/tests/test_html_formatter.py
@@ -25,11 +25,8 @@ import support
TESTFILE, TESTDIR = support.location(__file__)
-fp = io.open(TESTFILE, encoding='utf-8')
-try:
+with io.open(TESTFILE, encoding='utf-8') as fp:
tokensource = list(PythonLexer().get_tokens(fp.read()))
-finally:
- fp.close()
class HtmlFormatterTest(unittest.TestCase):
diff --git a/tests/test_java.py b/tests/test_java.py
new file mode 100644
index 00000000..9cf96373
--- /dev/null
+++ b/tests/test_java.py
@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+"""
+ Basic JavaLexer Test
+ ~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import unittest
+
+from pygments.token import Text, Name, Operator, Keyword
+from pygments.lexers import JavaLexer
+
+
+class JavaTest(unittest.TestCase):
+
+ def setUp(self):
+ self.lexer = JavaLexer()
+ self.maxDiff = None
+
+ def testEnhancedFor(self):
+ fragment = u'label:\nfor(String var2: var1) {}\n'
+ tokens = [
+ (Name.Label, u'label:'),
+ (Text, u'\n'),
+ (Keyword, u'for'),
+ (Operator, u'('),
+ (Name, u'String'),
+ (Text, u' '),
+ (Name, u'var2'),
+ (Operator, u':'),
+ (Text, u' '),
+ (Name, u'var1'),
+ (Operator, u')'),
+ (Text, u' '),
+ (Operator, u'{'),
+ (Operator, u'}'),
+ (Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
diff --git a/tests/test_latex_formatter.py b/tests/test_latex_formatter.py
index 13ae87cd..0a433c85 100644
--- a/tests/test_latex_formatter.py
+++ b/tests/test_latex_formatter.py
@@ -24,11 +24,8 @@ TESTFILE, TESTDIR = support.location(__file__)
class LatexFormatterTest(unittest.TestCase):
def test_valid_output(self):
- fp = open(TESTFILE)
- try:
+ with open(TESTFILE) as fp:
tokensource = list(PythonLexer().get_tokens(fp.read()))
- finally:
- fp.close()
fmt = LatexFormatter(full=True, encoding='latin1')
handle, pathname = tempfile.mkstemp('.tex')
diff --git a/tests/test_lexers_other.py b/tests/test_lexers_other.py
index 91b0dc70..e3625a2b 100644
--- a/tests/test_lexers_other.py
+++ b/tests/test_lexers_other.py
@@ -12,7 +12,7 @@ import os
import unittest
from pygments.lexers import guess_lexer
-from pygments.lexers.other import RexxLexer
+from pygments.lexers.scripting import RexxLexer
def _exampleFilePath(filename):
@@ -26,17 +26,14 @@ class AnalyseTextTest(unittest.TestCase):
for pattern in lexer.filenames:
exampleFilesPattern = _exampleFilePath(pattern)
for exampleFilePath in glob.glob(exampleFilesPattern):
- exampleFile = open(exampleFilePath, 'rb')
- try:
- text = exampleFile.read().decode('utf-8')
- probability = lexer.analyse_text(text)
- self.assertTrue(probability > 0,
- '%s must recognize %r' % (
- lexer.name, exampleFilePath))
- guessedLexer = guess_lexer(text)
- self.assertEqual(guessedLexer.name, lexer.name)
- finally:
- exampleFile.close()
+ with open(exampleFilePath, 'rb') as fp:
+ text = fp.read().decode('utf-8')
+ probability = lexer.analyse_text(text)
+ self.assertTrue(probability > 0,
+ '%s must recognize %r' % (
+ lexer.name, exampleFilePath))
+ guessedLexer = guess_lexer(text)
+ self.assertEqual(guessedLexer.name, lexer.name)
def testCanRecognizeAndGuessExampleFiles(self):
self._testCanRecognizeAndGuessExampleFiles(RexxLexer)
diff --git a/tests/test_perllexer.py b/tests/test_perllexer.py
index bfa3aeb8..e37539f2 100644
--- a/tests/test_perllexer.py
+++ b/tests/test_perllexer.py
@@ -11,7 +11,7 @@ import time
import unittest
from pygments.token import String
-from pygments.lexers.agile import PerlLexer
+from pygments.lexers.perl import PerlLexer
class RunawayRegexTest(unittest.TestCase):
diff --git a/tests/test_qbasiclexer.py b/tests/test_qbasiclexer.py
index 1b81b643..0290b7a1 100644
--- a/tests/test_qbasiclexer.py
+++ b/tests/test_qbasiclexer.py
@@ -12,7 +12,8 @@ import os
import unittest
from pygments.token import Token
-from pygments.lexers.qbasic import QBasicLexer
+from pygments.lexers.basic import QBasicLexer
+
class QBasicTest(unittest.TestCase):
def setUp(self):
@@ -40,4 +41,3 @@ class QBasicTest(unittest.TestCase):
(Token.Text, u'\n'),
]
self.assertEqual(expected, list(self.lexer.get_tokens(fragment)))
-
diff --git a/tests/test_regexopt.py b/tests/test_regexopt.py
new file mode 100644
index 00000000..f62a57ef
--- /dev/null
+++ b/tests/test_regexopt.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+"""
+ Tests for pygments.regexopt
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+import random
+import unittest
+import itertools
+
+from pygments.regexopt import regex_opt
+
+ALPHABET = ['a', 'b', 'c', 'd', 'e']
+
+try:
+ from itertools import combinations_with_replacement
+ N_TRIES = 15
+except ImportError:
+ # Python 2.6
+ def combinations_with_replacement(iterable, r):
+ pool = tuple(iterable)
+ n = len(pool)
+ for indices in itertools.product(range(n), repeat=r):
+ if sorted(indices) == list(indices):
+ yield tuple(pool[i] for i in indices)
+ N_TRIES = 9
+
+
+class RegexOptTestCase(unittest.TestCase):
+
+ def generate_keywordlist(self, length):
+ return [''.join(p) for p in
+ combinations_with_replacement(ALPHABET, length)]
+
+ def test_randomly(self):
+ # generate a list of all possible keywords of a certain length using
+ # a restricted alphabet, then choose some to match and make sure only
+ # those do
+ for n in range(3, N_TRIES):
+ kwlist = self.generate_keywordlist(n)
+ to_match = random.sample(kwlist,
+ random.randint(1, len(kwlist) - 1))
+ no_match = set(kwlist) - set(to_match)
+ rex = re.compile(regex_opt(to_match))
+ for w in to_match:
+ self.assertTrue(rex.match(w))
+ for w in no_match:
+ self.assertFalse(rex.match(w))
diff --git a/tests/test_ruby.py b/tests/test_ruby.py
new file mode 100644
index 00000000..89991f74
--- /dev/null
+++ b/tests/test_ruby.py
@@ -0,0 +1,145 @@
+# -*- coding: utf-8 -*-
+"""
+ Basic RubyLexer Test
+ ~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import unittest
+
+from pygments.token import Operator, Number, Text, Token
+from pygments.lexers import RubyLexer
+
+
+class RubyTest(unittest.TestCase):
+
+ def setUp(self):
+ self.lexer = RubyLexer()
+ self.maxDiff = None
+
+ def testRangeSyntax1(self):
+ fragment = u'1..3\n'
+ tokens = [
+ (Number.Integer, u'1'),
+ (Operator, u'..'),
+ (Number.Integer, u'3'),
+ (Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testRangeSyntax2(self):
+ fragment = u'1...3\n'
+ tokens = [
+ (Number.Integer, u'1'),
+ (Operator, u'...'),
+ (Number.Integer, u'3'),
+ (Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testRangeSyntax3(self):
+ fragment = u'1 .. 3\n'
+ tokens = [
+ (Number.Integer, u'1'),
+ (Text, u' '),
+ (Operator, u'..'),
+ (Text, u' '),
+ (Number.Integer, u'3'),
+ (Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testInterpolationNestedCurly(self):
+ fragment = (
+ u'"A#{ (3..5).group_by { |x| x/2}.map '
+ u'do |k,v| "#{k}" end.join }" + "Z"\n')
+
+ tokens = [
+ (Token.Literal.String.Double, u'"'),
+ (Token.Literal.String.Double, u'A'),
+ (Token.Literal.String.Interpol, u'#{'),
+ (Token.Text, u' '),
+ (Token.Punctuation, u'('),
+ (Token.Literal.Number.Integer, u'3'),
+ (Token.Operator, u'..'),
+ (Token.Literal.Number.Integer, u'5'),
+ (Token.Punctuation, u')'),
+ (Token.Operator, u'.'),
+ (Token.Name, u'group_by'),
+ (Token.Text, u' '),
+ (Token.Literal.String.Interpol, u'{'),
+ (Token.Text, u' '),
+ (Token.Operator, u'|'),
+ (Token.Name, u'x'),
+ (Token.Operator, u'|'),
+ (Token.Text, u' '),
+ (Token.Name, u'x'),
+ (Token.Operator, u'/'),
+ (Token.Literal.Number.Integer, u'2'),
+ (Token.Literal.String.Interpol, u'}'),
+ (Token.Operator, u'.'),
+ (Token.Name, u'map'),
+ (Token.Text, u' '),
+ (Token.Keyword, u'do'),
+ (Token.Text, u' '),
+ (Token.Operator, u'|'),
+ (Token.Name, u'k'),
+ (Token.Punctuation, u','),
+ (Token.Name, u'v'),
+ (Token.Operator, u'|'),
+ (Token.Text, u' '),
+ (Token.Literal.String.Double, u'"'),
+ (Token.Literal.String.Interpol, u'#{'),
+ (Token.Name, u'k'),
+ (Token.Literal.String.Interpol, u'}'),
+ (Token.Literal.String.Double, u'"'),
+ (Token.Text, u' '),
+ (Token.Keyword, u'end'),
+ (Token.Operator, u'.'),
+ (Token.Name, u'join'),
+ (Token.Text, u' '),
+ (Token.Literal.String.Interpol, u'}'),
+ (Token.Literal.String.Double, u'"'),
+ (Token.Text, u' '),
+ (Token.Operator, u'+'),
+ (Token.Text, u' '),
+ (Token.Literal.String.Double, u'"'),
+ (Token.Literal.String.Double, u'Z'),
+ (Token.Literal.String.Double, u'"'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testOperatorMethods(self):
+ fragment = u'x.==4\n'
+ tokens = [
+ (Token.Name, u'x'),
+ (Token.Operator, u'.'),
+ (Token.Name.Operator, u'=='),
+ (Token.Literal.Number.Integer, u'4'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testEscapedBracestring(self):
+ fragment = u'str.gsub(%r{\\\\\\\\}, "/")\n'
+ tokens = [
+ (Token.Name, u'str'),
+ (Token.Operator, u'.'),
+ (Token.Name, u'gsub'),
+ (Token.Punctuation, u'('),
+ (Token.Literal.String.Regex, u'%r{'),
+ (Token.Literal.String.Regex, u'\\\\'),
+ (Token.Literal.String.Regex, u'\\\\'),
+ (Token.Literal.String.Regex, u'}'),
+ (Token.Punctuation, u','),
+ (Token.Text, u' '),
+ (Token.Literal.String.Double, u'"'),
+ (Token.Literal.String.Double, u'/'),
+ (Token.Literal.String.Double, u'"'),
+ (Token.Punctuation, u')'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
diff --git a/tests/test_shell.py b/tests/test_shell.py
new file mode 100644
index 00000000..eb09e8d1
--- /dev/null
+++ b/tests/test_shell.py
@@ -0,0 +1,63 @@
+# -*- coding: utf-8 -*-
+"""
+ Basic Shell Tests
+ ~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import unittest
+
+from pygments.token import Token
+from pygments.lexers import BashLexer
+
+
+class BashTest(unittest.TestCase):
+
+ def setUp(self):
+ self.lexer = BashLexer()
+ self.maxDiff = None
+
+ def testCurlyNoEscapeAndQuotes(self):
+ fragment = u'echo "${a//["b"]/}"\n'
+ tokens = [
+ (Token.Name.Builtin, u'echo'),
+ (Token.Text, u' '),
+ (Token.Literal.String.Double, u'"'),
+ (Token.String.Interpol, u'${'),
+ (Token.Name.Variable, u'a'),
+ (Token.Punctuation, u'//['),
+ (Token.Literal.String.Double, u'"b"'),
+ (Token.Punctuation, u']/'),
+ (Token.String.Interpol, u'}'),
+ (Token.Literal.String.Double, u'"'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testCurlyWithEscape(self):
+ fragment = u'echo ${a//[\\"]/}\n'
+ tokens = [
+ (Token.Name.Builtin, u'echo'),
+ (Token.Text, u' '),
+ (Token.String.Interpol, u'${'),
+ (Token.Name.Variable, u'a'),
+ (Token.Punctuation, u'//['),
+ (Token.Literal.String.Escape, u'\\"'),
+ (Token.Punctuation, u']/'),
+ (Token.String.Interpol, u'}'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
+ def testParsedSingle(self):
+ fragment = u"a=$'abc\\''\n"
+ tokens = [
+ (Token.Name.Variable, u'a'),
+ (Token.Operator, u'='),
+ (Token.Literal.String.Single, u"$'abc\\''"),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
diff --git a/tests/test_smarty.py b/tests/test_smarty.py
new file mode 100644
index 00000000..20346afd
--- /dev/null
+++ b/tests/test_smarty.py
@@ -0,0 +1,40 @@
+# -*- coding: utf-8 -*-
+"""
+ Basic SmartyLexer Test
+ ~~~~~~~~~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import unittest
+
+from pygments.token import Operator, Number, Text, Token
+from pygments.lexers import SmartyLexer
+
+
+class SmartyTest(unittest.TestCase):
+
+ def setUp(self):
+ self.lexer = SmartyLexer()
+
+ def testNestedCurly(self):
+ fragment = u'{templateFunction param={anotherFunction} param2=$something}\n'
+ tokens = [
+ (Token.Comment.Preproc, u'{'),
+ (Token.Name.Function, u'templateFunction'),
+ (Token.Text, u' '),
+ (Token.Name.Attribute, u'param'),
+ (Token.Operator, u'='),
+ (Token.Comment.Preproc, u'{'),
+ (Token.Name.Attribute, u'anotherFunction'),
+ (Token.Comment.Preproc, u'}'),
+ (Token.Text, u' '),
+ (Token.Name.Attribute, u'param2'),
+ (Token.Operator, u'='),
+ (Token.Name.Variable, u'$something'),
+ (Token.Comment.Preproc, u'}'),
+ (Token.Other, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+
diff --git a/tests/test_string_asserts.py b/tests/test_string_asserts.py
index 0beed15c..90d81d67 100644
--- a/tests/test_string_asserts.py
+++ b/tests/test_string_asserts.py
@@ -17,23 +17,19 @@ class TestStringTests(StringTests, unittest.TestCase):
# @unittest.expectedFailure not supported by nose
def test_startswith_incorrect(self):
- with self.assertRaises(AssertionError):
- self.assertStartsWith("AAA", "B")
+ self.assertRaises(AssertionError, self.assertStartsWith, "AAA", "B")
# @unittest.expectedFailure not supported by nose
def test_startswith_short(self):
- with self.assertRaises(AssertionError):
- self.assertStartsWith("A", "AA")
+ self.assertRaises(AssertionError, self.assertStartsWith, "A", "AA")
def test_endswith_correct(self):
self.assertEndsWith("AAA", "A")
# @unittest.expectedFailure not supported by nose
def test_endswith_incorrect(self):
- with self.assertRaises(AssertionError):
- self.assertEndsWith("AAA", "B")
+ self.assertRaises(AssertionError, self.assertEndsWith, "AAA", "B")
# @unittest.expectedFailure not supported by nose
def test_endswith_short(self):
- with self.assertRaises(AssertionError):
- self.assertEndsWith("A", "AA")
+ self.assertRaises(AssertionError, self.assertEndsWith, "A", "AA")
diff --git a/tests/test_util.py b/tests/test_util.py
index 59ecf14f..7bf03409 100644
--- a/tests/test_util.py
+++ b/tests/test_util.py
@@ -123,7 +123,7 @@ class UtilTest(unittest.TestCase):
r = re.compile(util.unirange(0x10000, 0x20000))
m = r.match(first_non_bmp)
self.assertTrue(m)
- self.assertEquals(m.end(), len(first_non_bmp))
+ self.assertEqual(m.end(), len(first_non_bmp))
self.assertFalse(r.match(u'\uffff'))
self.assertFalse(r.match(u'xxx'))
# Tests that end is inclusive
@@ -132,4 +132,12 @@ class UtilTest(unittest.TestCase):
# build
m = r.match(first_non_bmp * 2)
self.assertTrue(m)
- self.assertEquals(m.end(), len(first_non_bmp) * 2)
+ self.assertEqual(m.end(), len(first_non_bmp) * 2)
+
+ def test_format_lines(self):
+ lst = ['cat', 'dog']
+ output = util.format_lines('var', lst)
+ d = {}
+ exec(output, d)
+ self.assertTrue(isinstance(d['var'], tuple))
+ self.assertEqual(('cat', 'dog'), d['var'])