summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authortk0miya <i.tkomiya@gmail.com>2014-09-23 17:58:02 +0900
committertk0miya <i.tkomiya@gmail.com>2014-09-23 17:58:02 +0900
commit3006fb5c08b0a0c7de932937a6a2b9de3be1d071 (patch)
tree20408eac5357958513d6ec28552b600c4bec0f6f
parent212852abbae31fc11d50b91a6e7503ae62fbc31f (diff)
parentbd577dae35fb9208ecf10a1b0244ca085ca9b3ad (diff)
downloadsphinx-3006fb5c08b0a0c7de932937a6a2b9de3be1d071.tar.gz
Merge with default
-rw-r--r--.hgignore4
-rw-r--r--CHANGES303
-rw-r--r--Makefile4
-rw-r--r--README.rst10
-rw-r--r--doc/_templates/index.html3
-rw-r--r--doc/_templates/indexsidebar.html2
-rw-r--r--doc/authors.rst18
-rw-r--r--doc/changes.rst2
-rw-r--r--doc/conf.py2
-rw-r--r--doc/config.rst2
-rw-r--r--doc/devguide.rst5
-rw-r--r--doc/extdev/appapi.rst33
-rw-r--r--doc/extdev/index.rst27
-rw-r--r--doc/extdev/tutorial.rst2
-rw-r--r--doc/markup/code.rst22
-rw-r--r--doc/markup/inline.rst51
-rw-r--r--sphinx/addnodes.py28
-rw-r--r--sphinx/apidoc.py8
-rw-r--r--sphinx/application.py88
-rw-r--r--sphinx/builders/__init__.py176
-rw-r--r--sphinx/builders/changes.py3
-rw-r--r--sphinx/builders/epub.py4
-rw-r--r--sphinx/builders/gettext.py6
-rw-r--r--sphinx/builders/html.py63
-rw-r--r--sphinx/cmdline.py313
-rw-r--r--sphinx/directives/__init__.py32
-rw-r--r--sphinx/directives/code.py3
-rw-r--r--sphinx/domains/__init__.py27
-rw-r--r--sphinx/domains/c.py23
-rw-r--r--sphinx/domains/cpp.py120
-rw-r--r--sphinx/domains/javascript.py24
-rw-r--r--sphinx/domains/python.py86
-rw-r--r--sphinx/domains/rst.py19
-rw-r--r--sphinx/domains/std.py115
-rw-r--r--sphinx/environment.py471
-rw-r--r--sphinx/errors.py13
-rw-r--r--sphinx/ext/autodoc.py69
-rw-r--r--sphinx/ext/autosummary/__init__.py6
-rw-r--r--sphinx/ext/coverage.py2
-rw-r--r--sphinx/ext/doctest.py32
-rw-r--r--sphinx/ext/extlinks.py2
-rw-r--r--sphinx/ext/graphviz.py2
-rw-r--r--sphinx/ext/ifconfig.py2
-rw-r--r--sphinx/ext/inheritance_diagram.py15
-rw-r--r--sphinx/ext/intersphinx.py26
-rw-r--r--sphinx/ext/jsmath.py2
-rw-r--r--sphinx/ext/linkcode.py5
-rw-r--r--sphinx/ext/mathjax.py2
-rw-r--r--sphinx/ext/napoleon/__init__.py2
-rw-r--r--sphinx/ext/pngmath.py2
-rw-r--r--sphinx/ext/todo.py11
-rw-r--r--sphinx/ext/viewcode.py37
-rw-r--r--sphinx/highlighting.py63
-rw-r--r--sphinx/quickstart.py23
-rw-r--r--sphinx/roles.py45
-rw-r--r--sphinx/themes/basic/static/jquery.js2
-rw-r--r--sphinx/themes/bizstyle/static/css3-mediaqueries_src.js2208
-rw-r--r--sphinx/theming.py4
-rw-r--r--sphinx/transforms.py26
-rw-r--r--sphinx/util/__init__.py28
-rw-r--r--sphinx/util/i18n.py178
-rw-r--r--sphinx/util/osutil.py6
-rw-r--r--sphinx/util/parallel.py131
-rwxr-xr-xtests/path.py6
-rw-r--r--tests/root/conf.py48
-rw-r--r--tests/root/contents.txt15
-rw-r--r--tests/root/markup.txt23
-rw-r--r--tests/root/objects.txt4
-rw-r--r--tests/root/undecodable.txt3
-rw-r--r--tests/roots/test-api-set-translator/conf.py160
-rw-r--r--tests/roots/test-api-set-translator/index.rst4
-rw-r--r--tests/roots/test-api-set-translator/nonext/conf.py18
-rw-r--r--tests/roots/test-api-set-translator/translator.py12
-rw-r--r--tests/roots/test-autosummary/conf.py4
-rw-r--r--tests/roots/test-autosummary/contents.rst13
-rw-r--r--tests/roots/test-autosummary/sphinx.rst (renamed from tests/root/autosummary.txt)0
-rw-r--r--tests/roots/test-build-text/conf.py2
-rw-r--r--tests/roots/test-build-text/contents.txt8
-rw-r--r--tests/roots/test-build-text/lineblock.txt6
-rw-r--r--tests/roots/test-build-text/maxwidth.txt6
-rw-r--r--tests/roots/test-build-text/nonascii_maxwidth.txt5
-rw-r--r--tests/roots/test-build-text/nonascii_table.txt7
-rw-r--r--tests/roots/test-build-text/nonascii_title.txt2
-rw-r--r--tests/roots/test-build-text/table.txt7
-rw-r--r--tests/roots/test-circular/conf.py0
-rw-r--r--tests/roots/test-circular/contents.rst4
-rw-r--r--tests/roots/test-circular/sub.rst3
-rw-r--r--tests/roots/test-directive-code/dedent.rst33
-rw-r--r--tests/roots/test-directive-code/dedent_code.rst53
-rw-r--r--tests/roots/test-doctest/conf.py5
-rw-r--r--tests/roots/test-doctest/doctest.txt (renamed from tests/root/doctest.txt)4
-rw-r--r--tests/roots/test-docutilsconf/contents.txt30
-rw-r--r--tests/roots/test-ext-viewcode/conf.py32
-rw-r--r--tests/roots/test-ext-viewcode/index.rst63
-rw-r--r--tests/roots/test-ext-viewcode/objects.rst169
-rw-r--r--tests/roots/test-ext-viewcode/spam/__init__.py14
-rw-r--r--tests/roots/test-ext-viewcode/spam/mod1.py30
-rw-r--r--tests/roots/test-ext-viewcode/spam/mod2.py30
-rw-r--r--tests/roots/test-intl/refs_python_domain.txt30
-rw-r--r--tests/roots/test-intl/subdir/contents.txt4
-rw-r--r--tests/roots/test-numbered-circular/conf.py0
-rw-r--r--tests/roots/test-numbered-circular/contents.rst5
-rw-r--r--tests/roots/test-numbered-circular/sub.rst3
-rw-r--r--tests/roots/test-setup/doc/contents.txt10
-rw-r--r--tests/roots/test-templating/autosummary_templating.txt8
-rw-r--r--tests/roots/test-versioning/added.txt (renamed from tests/root/versioning/added.txt)0
-rw-r--r--tests/roots/test-versioning/conf.py3
-rw-r--r--tests/roots/test-versioning/deleted.txt (renamed from tests/root/versioning/deleted.txt)0
-rw-r--r--tests/roots/test-versioning/deleted_end.txt (renamed from tests/root/versioning/deleted_end.txt)0
-rw-r--r--tests/roots/test-versioning/index.txt (renamed from tests/root/versioning/index.txt)0
-rw-r--r--tests/roots/test-versioning/insert.txt (renamed from tests/root/versioning/insert.txt)0
-rw-r--r--tests/roots/test-versioning/insert_beginning.txt (renamed from tests/root/versioning/insert_beginning.txt)0
-rw-r--r--tests/roots/test-versioning/insert_similar.txt (renamed from tests/root/versioning/insert_similar.txt)0
-rw-r--r--tests/roots/test-versioning/modified.txt (renamed from tests/root/versioning/modified.txt)0
-rw-r--r--tests/roots/test-versioning/original.txt (renamed from tests/root/versioning/original.txt)0
-rwxr-xr-xtests/run.py74
-rw-r--r--tests/test_api_translator.py135
-rw-r--r--tests/test_application.py89
-rw-r--r--tests/test_autodoc.py28
-rw-r--r--tests/test_build.py136
-rw-r--r--tests/test_build_gettext.py63
-rw-r--r--tests/test_build_html.py159
-rw-r--r--tests/test_build_latex.py24
-rw-r--r--tests/test_build_texinfo.py24
-rw-r--r--tests/test_build_text.py120
-rw-r--r--tests/test_catalogs.py (renamed from tests/test_build_base.py)155
-rw-r--r--tests/test_config.py6
-rw-r--r--tests/test_directive_code.py282
-rw-r--r--tests/test_directive_only.py10
-rw-r--r--tests/test_docutilsconf.py85
-rw-r--r--tests/test_domain_py.py (renamed from tests/test_py_domain.py)2
-rw-r--r--tests/test_domain_rst.py (renamed from tests/test_rst_domain.py)0
-rw-r--r--tests/test_domain_std.py160
-rw-r--r--tests/test_environment.py (renamed from tests/test_env.py)56
-rw-r--r--tests/test_ext_autosummary.py (renamed from tests/test_autosummary.py)63
-rw-r--r--tests/test_ext_coverage.py (renamed from tests/test_coverage.py)2
-rw-r--r--tests/test_ext_doctest.py (renamed from tests/test_doctest.py)16
-rw-r--r--tests/test_ext_intersphinx.py (renamed from tests/test_intersphinx.py)22
-rw-r--r--tests/test_ext_napoleon.py (renamed from tests/test_napoleon.py)0
-rw-r--r--tests/test_ext_napoleon_docstring.py (renamed from tests/test_napoleon_docstring.py)0
-rw-r--r--tests/test_ext_napoleon_iterators.py (renamed from tests/test_napoleon_iterators.py)0
-rw-r--r--tests/test_ext_viewcode.py87
-rw-r--r--tests/test_footnote.py37
-rw-r--r--tests/test_highlighting.py14
-rw-r--r--tests/test_i18n.py2
-rw-r--r--tests/test_intl.py798
-rw-r--r--tests/test_linkcode.py25
-rw-r--r--tests/test_markup.py12
-rw-r--r--tests/test_metadata.py22
-rw-r--r--tests/test_quickstart.py26
-rw-r--r--tests/test_searchadapters.py23
-rw-r--r--tests/test_setup_command.py9
-rw-r--r--tests/test_templating.py23
-rw-r--r--tests/test_theming.py15
-rw-r--r--tests/test_util_i18n.py326
-rw-r--r--tests/test_util_nodes.py242
-rw-r--r--tests/test_versioning.py36
-rw-r--r--tests/test_websupport.py27
-rw-r--r--tests/util.py170
-rw-r--r--tox.ini4
-rwxr-xr-xutils/convert.py43
161 files changed, 5048 insertions, 4533 deletions
diff --git a/.hgignore b/.hgignore
index 45ecebc3..16d29fcf 100644
--- a/.hgignore
+++ b/.hgignore
@@ -7,15 +7,15 @@
^build/
^dist/
^tests/.coverage
+^tests/build/
^sphinx/pycode/Grammar.*pickle
^Sphinx.egg-info/
^doc/_build/
^TAGS
+^\.tags
^\.ropeproject/
^env/
\.DS_Store$
~$
^utils/.*3\.py$
^distribute-
-^tests/root/_build/*
-^tests/root/generated/*
diff --git a/CHANGES b/CHANGES
index 6279c446..51b3b5db 100644
--- a/CHANGES
+++ b/CHANGES
@@ -12,12 +12,14 @@ Incompatible changes
* A new node, ``sphinx.addnodes.literal_strong``, has been added, for text that
should appear literally (i.e. no smart quotes) in strong font. Custom writers
will have to be adapted to handle this node.
-* PR#269, #1476: replace `<tt>` tag by `<code>`. User customized stylesheets
- should be updated If the css contain some styles for `<tt>` tag.
+* PR#269, #1476: replace ``<tt>`` tag by ``<code>``. User customized stylesheets
+ should be updated If the css contain some styles for ``tt>`` tag.
Thanks to Takeshi Komiya.
-* #1543: :confval:`templates_path` is automatically added to
- :confval:`exclude_patterns` to avoid reading autosummary rst templates in the
+* #1543: `templates_path` is automatically added to
+ `exclude_patterns` to avoid reading autosummary rst templates in the
templates directory.
+* Custom domains should implement the new `Domain.resolve_any_xref`
+ method to make the `any` role work properly.
Features added
--------------
@@ -26,22 +28,31 @@ Features added
* Add support for docutils 0.12
* Added ``sphinx.ext.napoleon`` extension for NumPy and Google style docstring
support.
+* Added support for parallel reading (parsing) of source files with the
+ `sphinx-build -j` option. Third-party extensions will need to be checked for
+ compatibility and may need to be adapted if they store information in the
+ build environment object. See `env-merge-info`.
+* Added the `any` role that can be used to find a cross-reference of
+ *any* type in *any* domain. Custom domains should implement the new
+ `Domain.resolve_any_xref` method to make this work properly.
* Exception logs now contain the last 10 messages emitted by Sphinx.
* Added support for extension versions (a string returned by ``setup()``, these
can be shown in the traceback log files). Version requirements for extensions
- can be specified in projects using the new :confval:`needs_extensions` config
+ can be specified in projects using the new `needs_extensions` config
value.
+* Changing the default role within a document with the :dudir:`default-role`
+ directive is now supported.
* PR#214: Added stemming support for 14 languages, so that the built-in document
search can now handle these. Thanks to Shibukawa Yoshiki.
* PR#202: Allow "." and "~" prefixed references in ``:param:`` doc fields
for Python.
-* PR#184: Add :confval:`autodoc_mock_imports`, allowing to mock imports of
+* PR#184: Add `autodoc_mock_imports`, allowing to mock imports of
external modules that need not be present when autodocumenting.
* #925: Allow list-typed config values to be provided on the command line,
like ``-D key=val1,val2``.
-* #668: Allow line numbering of ``code-block`` and ``literalinclude`` directives
+* #668: Allow line numbering of `code-block` and `literalinclude` directives
to start at an arbitrary line number, with a new ``lineno-start`` option.
-* PR#172, PR#266: The :rst:dir:`code-block` and :rst:dir:`literalinclude`
+* PR#172, PR#266: The `code-block` and `literalinclude`
directives now can have a ``caption`` option that shows a filename before the
code in the output. Thanks to Nasimul Haque, Takeshi Komiya.
* Prompt for the document language in sphinx-quickstart.
@@ -56,135 +67,43 @@ Features added
for the ids defined on the node. Thanks to Olivier Heurtier.
* PR#229: Allow registration of other translators. Thanks to Russell Sim.
* Add app.set_translator() API to register or override a Docutils translator
- class like :confval:`html_translator_class`.
+ class like `html_translator_class`.
* PR#267, #1134: add 'diff' parameter to literalinclude. Thanks to Richard Wall
and WAKAYAMA shirou.
* PR#272: Added 'bizstyle' theme. Thanks to Shoji KUMAGAI.
* Automatically compile ``*.mo`` files from ``*.po`` files when
- :confval:`gettext_auto_build` is True (default) and ``*.po`` is newer than
+ `gettext_auto_build` is True (default) and ``*.po`` is newer than
``*.mo`` file.
-* #623: :mod:`~sphinx.ext.viewcode` supports imported function/class aliases.
-* PR#275: :mod:`~sphinx.ext.intersphinx` supports multiple target for the
+* #623: `sphinx.ext.viewcode` supports imported function/class aliases.
+* PR#275: `sphinx.ext.intersphinx` supports multiple target for the
inventory. Thanks to Brigitta Sipocz.
+* PR#261: Added the `env-before-read-docs` event that can be connected to modify
+ the order of documents before they are read by the environment.
+* #1284: Program options documented with :rst:dir:`option` can now start with
+ ``+``.
+* PR#291: The caption of :rst:dir:`code-block` is recognised as a title of ref
+ target. Thanks to Takeshi Komiya.
Bugs fixed
----------
-* #1568: fix a crash when a "centered" directive contains a reference.
+* #1568: Fix a crash when a "centered" directive contains a reference.
* #1563: :meth:`~sphinx.application.Sphinx.add_search_language` raises
AssertionError for correct type of argument. Thanks to rikoman.
* #1174: Fix smart quotes being applied inside roles like :rst:role:`program` or
- :rst:role:`makevar`.
-* #1335: Fix autosummary template overloading with exclamation prefix like
- ``{% extends "!autosummary/class.rst" %}`` cause infinite recursive function
- call. This was caused by PR#181.
-* #1337: Fix autodoc with ``autoclass_content="both"`` uses useless
- ``object.__init__`` docstring when class does not have ``__init__``.
- This was caused by a change for #1138.
-* #1340: Can't search alphabetical words on the HTML quick search generated
- with language='ja'.
-* #1319: Do not crash if the :confval:`html_logo` file does not exist.
-* #603: Do not use the HTML-ized title for building the search index (that
- resulted in "literal" being found on every page with a literal in the
- title).
-* #751: Allow production lists longer than a page in LaTeX by using longtable.
-* #764: Always look for stopwords lowercased in JS search.
-* #814: autodoc: Guard against strange type objects that don't have
- ``__bases__``.
-* #932: autodoc: Do not crash if ``__doc__`` is not a string.
-* #933: Do not crash if an :rst:role:`option` value is malformed (contains
- spaces but no option name).
-* #908: On Python 3, handle error messages from LaTeX correctly in the pngmath
- extension.
-* #943: In autosummary, recognize "first sentences" to pull from the docstring
- if they contain uppercase letters.
-* #923: Take the entire LaTeX document into account when caching
- pngmath-generated images. This rebuilds them correctly when
- :confval:`pngmath_latex_preamble` changes.
-* #901: Emit a warning when using docutils' new "math" markup without a Sphinx
- math extension active.
-* #845: In code blocks, when the selected lexer fails, display line numbers
- nevertheless if configured.
-* #929: Support parsed-literal blocks in LaTeX output correctly.
-* #949: Update the tabulary.sty packed with Sphinx.
-* #1050: Add anonymous labels into ``objects.inv`` to be referenced via
- :mod:`~sphinx.ext.intersphinx`.
-* #1095: Fix print-media stylesheet being included always in the "scrolls"
- theme.
-* #1085: Fix current classname not getting set if class description has
- ``:noindex:`` set.
-* #1181: Report option errors in autodoc directives more gracefully.
-* #1155: Fix autodocumenting C-defined methods as attributes in Python 3.
-* #1233: Allow finding both Python classes and exceptions with the "class" and
- "exc" roles in intersphinx.
-* #1198: Allow "image" for the "figwidth" option of the :rst:dir:`figure`
- directive as documented by docutils.
-* #1152: Fix pycode parsing errors of Python 3 code by including two grammar
- versions for Python 2 and 3, and loading the appropriate version for the
- running Python version.
-* #1017: Be helpful and tell the user when the argument to :rst:dir:`option`
- does not match the required format.
-* #1345: Fix two bugs with :confval:`nitpick_ignore`; now you don't have to
- remove the store environment for changes to have effect.
-* #1072: In the JS search, fix issues searching for upper-cased words by
- lowercasing words before stemming.
-* #1299: Make behavior of the :rst:dir:`math` directive more consistent and
- avoid producing empty environments in LaTeX output.
-* #1308: Strip HTML tags from the content of "raw" nodes before feeding it
- to the search indexer.
-* #1249: Fix duplicate LaTeX page numbering for manual documents.
-* #1292: In the linkchecker, retry HEAD requests when denied by HTTP 405.
- Also make the redirect code apparent and tweak the output a bit to be
- more obvious.
-* #1285: Avoid name clashes between C domain objects and section titles.
-* #848: Always take the newest code in incremental rebuilds with the
- :mod:`sphinx.ext.viewcode` extension.
-* #979, #1266: Fix exclude handling in ``sphinx-apidoc``.
-* #1302: Fix regression in :mod:`sphinx.ext.inheritance_diagram` when
- documenting classes that can't be pickled.
-* #1316: Remove hard-coded ``font-face`` resources from epub theme.
-* #1329: Fix traceback with empty translation msgstr in .po files.
-* #1300: Fix references not working in translated documents in some instances.
-* #1283: Fix a bug in the detection of changed files that would try to access
- doctrees of deleted documents.
-* #1330: Fix :confval:`exclude_patterns` behavior with subdirectories in the
- :confval:`html_static_path`.
-* #1323: Fix emitting empty ``<ul>`` tags in the HTML writer, which is not
- valid HTML.
-* #1147: Don't emit a sidebar search box in the "singlehtml" builder.
-* PR#211: When checking for existence of the :confval:`html_logo` file, check
- the full relative path and not the basename.
-* #1357: Option names documented by :rst:dir:`option` are now again allowed to
- not start with a dash or slash, and referencing them will work correctly.
-* #1358: Fix handling of image paths outside of the source directory when using
- the "wildcard" style reference.
-* #1374: Fix for autosummary generating overly-long summaries if first line
- doesn't end with a period.
-* #1391: Actually prevent using "pngmath" and "mathjax" extensions at the same
- time in sphinx-quickstart.
-* #1386: Fix bug preventing more than one theme being added by the entry point
- mechanism.
-* #1370: Ignore "toctree" nodes in text writer, instead of raising.
-* #1364: Fix 'make gettext' fails when the '.. todolist::' directive is present.
-* #1367: Fix a change of PR#96 that break sphinx.util.docfields.Field.make_field
- interface/behavior for `item` argument usage.
-* #1363: Fix i18n: missing python domain's cross-references with currentmodule
- directive or currentclass directive.
-* #1419: Generated i18n sphinx.js files are missing message catalog entries
- from '.js_t' and '.html'. The issue was introduced in Sphinx 1.1.
-* #636: Keep straight single quotes in literal blocks in the LaTeX build.
+ `makevar`.
* PR#235: comment db schema of websupport lacked a length of the node_id field.
Thanks to solos.
* #1466,PR#241: Fix failure of the cpp domain parser to parse C+11
"variadic templates" declarations. Thanks to Victor Zverovich.
-* #1459,PR#244: Fix default mathjax js path point to `http://` that cause
+* #1459,PR#244: Fix default mathjax js path point to ``http://`` that cause
mixed-content error on HTTPS server. Thanks to sbrandtb and robo9k.
* PR#157: autodoc remove spurious signatures from @property decorated
attributes. Thanks to David Ham.
* PR#159: Add coverage targets to quickstart generated Makefile and make.bat.
Thanks to Matthias Troffaes.
* #1251: When specifying toctree :numbered: option and :tocdepth: metadata,
- sub section number that is larger depth than `:tocdepth:` is shrinked.
+ sub section number that is larger depth than ``:tocdepth:`` is shrunk.
* PR#260: Encode underscore in citation labels for latex export. Thanks to
Lennart Fricke.
* PR#264: Fix could not resolve xref for figure node with :name: option.
@@ -208,8 +127,8 @@ Bugs fixed
qualified name. It should be rather easy to change this behaviour and
potentially index by namespaces/classes as well.
-* PR#258, #939: Add dedent option for :rst:dir:`code-block` and
- :rst:dir:`literal-include`. Thanks to Zafar Siddiqui.
+* PR#258, #939: Add dedent option for `code-block` and
+ `literalinclude`. Thanks to Zafar Siddiqui.
* PR#268: Fix numbering section does not work at singlehtml mode. It still
ad-hoc fix because there is a issue that section IDs are conflicted.
Thanks to Takeshi Komiya.
@@ -217,20 +136,18 @@ Bugs fixed
Takeshi Komiya.
* PR#274: Set its URL as a default title value if URL appears in toctree.
Thanks to Takeshi Komiya.
-* PR#276, #1381: :rst:role:`rfc` and :rst:role:`pep` roles support custom link
+* PR#276, #1381: `rfc` and `pep` roles support custom link
text. Thanks to Takeshi Komiya.
* PR#277, #1513: highlights for function pointers in argument list of
- :rst:dir:`c:function`. Thanks to Takeshi Komiya.
+ `c:function`. Thanks to Takeshi Komiya.
* PR#278: Fix section entries were shown twice if toctree has been put under
only directive. Thanks to Takeshi Komiya.
-* #1547: pgen2 tokenizer doesn't recognize `...` literal (Ellipsis for py3).
+* #1547: pgen2 tokenizer doesn't recognize ``...`` literal (Ellipsis for py3).
Documentation
-------------
* Add clarification about the syntax of tags. (:file:`doc/markup/misc.rst`)
-* #1325: Added a "Intersphinx" tutorial section. (:file:`doc/tutorial.rst`)
-* Extended the :ref:`documentation about building extensions <dev-extensions>`.
Release 1.2.3 (released Sep 1, 2014)
@@ -239,7 +156,7 @@ Release 1.2.3 (released Sep 1, 2014)
Features added
--------------
-* #1518: `sphinx-apidoc` command now have a `--version` option to show version
+* #1518: ``sphinx-apidoc`` command now has a ``--version`` option to show version
information and exit
* New locales: Hebrew, European Portuguese, Vietnamese.
@@ -257,14 +174,14 @@ Bugs fixed
Thanks to Jorge_C.
* #1467: Exception on Python3 if nonexistent method is specified by automethod
* #1441: autosummary can't handle nested classes correctly.
-* #1499: With non-callable `setup` in a conf.py, now sphinx-build emits
- user-friendly error message.
+* #1499: With non-callable ``setup`` in a conf.py, now sphinx-build emits
+ a user-friendly error message.
* #1502: In autodoc, fix display of parameter defaults containing backslashes.
* #1226: autodoc, autosummary: importing setup.py by automodule will invoke
- setup process and execute `sys.exit()`. Now sphinx avoids SystemExit
+ setup process and execute ``sys.exit()``. Now sphinx avoids SystemExit
exception and emits warnings without unexpected termination.
* #1503: py:function directive generate incorrectly signature when specifying
- a default parameter with an empty list `[]`. Thanks to Geert Jansen.
+ a default parameter with an empty list ``[]``. Thanks to Geert Jansen.
* #1508: Non-ASCII filename raise exception on make singlehtml, latex, man,
texinfo and changes.
* #1531: On Python3 environment, docutils.conf with 'source_link=true' in the
@@ -274,11 +191,11 @@ Bugs fixed
* PR#281, PR#282, #1509: TODO extension not compatible with websupport. Thanks
to Takeshi Komiya.
* #1477: gettext does not extract nodes.line in a table or list.
-* #1544: `make text` generate wrong table when it has empty table cells.
+* #1544: ``make text`` generates wrong table when it has empty table cells.
* #1522: Footnotes from table get displayed twice in LaTeX. This problem has
been appeared from Sphinx-1.2.1 by #949.
* #508: Sphinx every time exit with zero when is invoked from setup.py command.
- ex. `python setup.py build_sphinx -b doctest` return zero even if doctest
+ ex. ``python setup.py build_sphinx -b doctest`` return zero even if doctest
failed.
Release 1.2.2 (released Mar 2, 2014)
@@ -287,7 +204,7 @@ Release 1.2.2 (released Mar 2, 2014)
Bugs fixed
----------
-* PR#211: When checking for existence of the :confval:`html_logo` file, check
+* PR#211: When checking for existence of the `html_logo` file, check
the full relative path and not the basename.
* PR#212: Fix traceback with autodoc and ``__init__`` methods without docstring.
* PR#213: Fix a missing import in the setup command.
@@ -305,7 +222,7 @@ Bugs fixed
* #1370: Ignore "toctree" nodes in text writer, instead of raising.
* #1364: Fix 'make gettext' fails when the '.. todolist::' directive is present.
* #1367: Fix a change of PR#96 that break sphinx.util.docfields.Field.make_field
- interface/behavior for `item` argument usage.
+ interface/behavior for ``item`` argument usage.
Documentation
-------------
@@ -327,7 +244,7 @@ Bugs fixed
This was caused by a change for #1138.
* #1340: Can't search alphabetical words on the HTML quick search generated
with language='ja'.
-* #1319: Do not crash if the :confval:`html_logo` file does not exist.
+* #1319: Do not crash if the `html_logo` file does not exist.
* #603: Do not use the HTML-ized title for building the search index (that
resulted in "literal" being found on every page with a literal in the
title).
@@ -344,7 +261,7 @@ Bugs fixed
if they contain uppercase letters.
* #923: Take the entire LaTeX document into account when caching
pngmath-generated images. This rebuilds them correctly when
- :confval:`pngmath_latex_preamble` changes.
+ `pngmath_latex_preamble` changes.
* #901: Emit a warning when using docutils' new "math" markup without a Sphinx
math extension active.
* #845: In code blocks, when the selected lexer fails, display line numbers
@@ -361,14 +278,14 @@ Bugs fixed
* #1155: Fix autodocumenting C-defined methods as attributes in Python 3.
* #1233: Allow finding both Python classes and exceptions with the "class" and
"exc" roles in intersphinx.
-* #1198: Allow "image" for the "figwidth" option of the :rst:dir:`figure`
+* #1198: Allow "image" for the "figwidth" option of the :dudir:`figure`
directive as documented by docutils.
* #1152: Fix pycode parsing errors of Python 3 code by including two grammar
versions for Python 2 and 3, and loading the appropriate version for the
running Python version.
* #1017: Be helpful and tell the user when the argument to :rst:dir:`option`
does not match the required format.
-* #1345: Fix two bugs with :confval:`nitpick_ignore`; now you don't have to
+* #1345: Fix two bugs with `nitpick_ignore`; now you don't have to
remove the store environment for changes to have effect.
* #1072: In the JS search, fix issues searching for upper-cased words by
lowercasing words before stemming.
@@ -391,8 +308,8 @@ Bugs fixed
* #1300: Fix references not working in translated documents in some instances.
* #1283: Fix a bug in the detection of changed files that would try to access
doctrees of deleted documents.
-* #1330: Fix :confval:`exclude_patterns` behavior with subdirectories in the
- :confval:`html_static_path`.
+* #1330: Fix `exclude_patterns` behavior with subdirectories in the
+ `html_static_path`.
* #1323: Fix emitting empty ``<ul>`` tags in the HTML writer, which is not
valid HTML.
* #1147: Don't emit a sidebar search box in the "singlehtml" builder.
@@ -424,7 +341,7 @@ Bugs fixed
* Restore ``versionmodified`` CSS class for versionadded/changed and deprecated
directives.
-* PR#181: Fix `html_theme_path=['.']` is a trigger of rebuild all documents
+* PR#181: Fix ``html_theme_path = ['.']`` is a trigger of rebuild all documents
always (This change keeps the current "theme changes cause a rebuild"
feature).
@@ -491,7 +408,7 @@ Features added
* Support docutils.conf 'writers' and 'html4css1 writer' section in the HTML
writer. The latex, manpage and texinfo writers also support their respective
'writers' sections.
-* The new :confval:`html_extra_path` config value allows to specify directories
+* The new `html_extra_path` config value allows to specify directories
with files that should be copied directly to the HTML output directory.
* Autodoc directives for module data and attributes now support an
``annotation`` option, so that the default display of the data/attribute
@@ -562,10 +479,10 @@ Incompatible changes
* Removed ``sphinx.util.compat.directive_dwim()`` and
``sphinx.roles.xfileref_role()`` which were deprecated since version 1.0.
-* PR#122: the files given in :confval:`latex_additional_files` now override TeX
+* PR#122: the files given in `latex_additional_files` now override TeX
files included by Sphinx, such as ``sphinx.sty``.
-* PR#124: the node generated by :rst:dir:`versionadded`,
- :rst:dir:`versionchanged` and :rst:dir:`deprecated` directives now includes
+* PR#124: the node generated by `versionadded`,
+ `versionchanged` and `deprecated` directives now includes
all added markup (such as "New in version X") as child nodes, and no
additional text must be generated by writers.
* PR#99: the :rst:dir:`seealso` directive now generates admonition nodes instead
@@ -619,7 +536,7 @@ Features added
asterisks ("*").
- The default value for the ``paragraphindent`` has been changed from 2 to 0
meaning that paragraphs are no longer indented by default.
- - #1110: A new configuration value :confval:`texinfo_no_detailmenu` has been
+ - #1110: A new configuration value `texinfo_no_detailmenu` has been
added for controlling whether a ``@detailmenu`` is added in the "Top"
node's menu.
- Detailed menus are no longer created except for the "Top" node.
@@ -628,16 +545,16 @@ Features added
* LaTeX builder:
- - PR#115: Add ``'transition'`` item in :confval:`latex_elements` for
+ - PR#115: Add ``'transition'`` item in `latex_elements` for
customizing how transitions are displayed. Thanks to Jeff Klukas.
- PR#114: The LaTeX writer now includes the "cmap" package by default. The
- ``'cmappkg'`` item in :confval:`latex_elements` can be used to control this.
+ ``'cmappkg'`` item in `latex_elements` can be used to control this.
Thanks to Dmitry Shachnev.
- - The ``'fontpkg'`` item in :confval:`latex_elements` now defaults to ``''``
- when the :confval:`language` uses the Cyrillic script. Suggested by Dmitry
+ - The ``'fontpkg'`` item in `latex_elements` now defaults to ``''``
+ when the `language` uses the Cyrillic script. Suggested by Dmitry
Shachnev.
- - The :confval:`latex_documents`, :confval:`texinfo_documents`, and
- :confval:`man_pages` configuration values will be set to default values based
+ - The `latex_documents`, `texinfo_documents`, and
+ `man_pages` configuration values will be set to default values based
on the :confval:`master_doc` if not explicitly set in :file:`conf.py`.
Previously, if these values were not set, no output would be generated by
their respective builders.
@@ -655,13 +572,13 @@ Features added
- Added the Docutils-native XML and pseudo-XML builders. See
:class:`XMLBuilder` and :class:`PseudoXMLBuilder`.
- PR#45: The linkcheck builder now checks ``#anchor``\ s for existence.
- - PR#123, #1106: Add :confval:`epub_use_index` configuration value. If
- provided, it will be used instead of :confval:`html_use_index` for epub
+ - PR#123, #1106: Add `epub_use_index` configuration value. If
+ provided, it will be used instead of `html_use_index` for epub
builder.
- - PR#126: Add :confval:`epub_tocscope` configuration value. The setting
+ - PR#126: Add `epub_tocscope` configuration value. The setting
controls the generation of the epub toc. The user can now also include
hidden toc entries.
- - PR#112: Add :confval:`epub_show_urls` configuration value.
+ - PR#112: Add `epub_show_urls` configuration value.
* Extensions:
@@ -729,7 +646,7 @@ Bugs fixed
* #1127: Fix traceback when autodoc tries to tokenize a non-Python file.
* #1126: Fix double-hyphen to en-dash conversion in wrong places such as
command-line option names in LaTeX.
-* #1123: Allow whitespaces in filenames given to :rst:dir:`literalinclude`.
+* #1123: Allow whitespaces in filenames given to `literalinclude`.
* #1120: Added improvements about i18n for themes "basic", "haiku" and
"scrolls" that Sphinx built-in. Thanks to Leonardo J. Caballero G.
* #1118: Updated Spanish translation. Thanks to Leonardo J. Caballero G.
@@ -737,7 +654,7 @@ Bugs fixed
* #1112: Avoid duplicate download files when referenced from documents in
different ways (absolute/relative).
* #1111: Fix failure to find uppercase words in search when
- :confval:`html_search_language` is 'ja'. Thanks to Tomo Saito.
+ `html_search_language` is 'ja'. Thanks to Tomo Saito.
* #1108: The text writer now correctly numbers enumerated lists with
non-default start values (based on patch by Ewan Edwards).
* #1102: Support multi-context "with" statements in autodoc.
@@ -802,7 +719,7 @@ Release 1.1.3 (Mar 10, 2012)
* #860: Do not crash when encountering invalid doctest examples, just
emit a warning.
-* #864: Fix crash with some settings of :confval:`modindex_common_prefix`.
+* #864: Fix crash with some settings of `modindex_common_prefix`.
* #862: Fix handling of ``-D`` and ``-A`` options on Python 3.
@@ -866,7 +783,7 @@ Release 1.1 (Oct 9, 2011)
Incompatible changes
--------------------
-* The :rst:dir:`py:module` directive doesn't output its ``platform`` option
+* The `py:module` directive doesn't output its ``platform`` option
value anymore. (It was the only thing that the directive did output, and
therefore quite inconsistent.)
@@ -902,7 +819,7 @@ Features added
:rst:dir:`toctree`\'s ``numbered`` option.
- #586: Implemented improved :rst:dir:`glossary` markup which allows
multiple terms per definition.
- - #478: Added :rst:dir:`py:decorator` directive to describe decorators.
+ - #478: Added `py:decorator` directive to describe decorators.
- C++ domain now supports array definitions.
- C++ domain now supports doc fields (``:param x:`` inside directives).
- Section headings in :rst:dir:`only` directives are now correctly
@@ -913,7 +830,7 @@ Features added
* HTML builder:
- Added ``pyramid`` theme.
- - #559: :confval:`html_add_permalinks` is now a string giving the
+ - #559: `html_add_permalinks` is now a string giving the
text to display in permalinks.
- #259: HTML table rows now have even/odd CSS classes to enable
"Zebra styling".
@@ -921,26 +838,26 @@ Features added
* Other builders:
- - #516: Added new value of the :confval:`latex_show_urls` option to
+ - #516: Added new value of the `latex_show_urls` option to
show the URLs in footnotes.
- - #209: Added :confval:`text_newlines` and :confval:`text_sectionchars`
+ - #209: Added `text_newlines` and `text_sectionchars`
config values.
- - Added :confval:`man_show_urls` config value.
+ - Added `man_show_urls` config value.
- #472: linkcheck builder: Check links in parallel, use HTTP HEAD
requests and allow configuring the timeout. New config values:
- :confval:`linkcheck_timeout` and :confval:`linkcheck_workers`.
- - #521: Added :confval:`linkcheck_ignore` config value.
+ `linkcheck_timeout` and `linkcheck_workers`.
+ - #521: Added `linkcheck_ignore` config value.
- #28: Support row/colspans in tables in the LaTeX builder.
* Configuration and extensibility:
- - #537: Added :confval:`nitpick_ignore`.
+ - #537: Added `nitpick_ignore`.
- #306: Added :event:`env-get-outdated` event.
- :meth:`.Application.add_stylesheet` now accepts full URIs.
* Autodoc:
- - #564: Add :confval:`autodoc_docstring_signature`. When enabled (the
+ - #564: Add `autodoc_docstring_signature`. When enabled (the
default), autodoc retrieves the signature from the first line of the
docstring, if it is found there.
- #176: Provide ``private-members`` option for autodoc directives.
@@ -958,12 +875,12 @@ Features added
- Added ``inline`` option to graphviz directives, and fixed the
default (block-style) in LaTeX output.
- #590: Added ``caption`` option to graphviz directives.
- - #553: Added :rst:dir:`testcleanup` blocks in the doctest extension.
- - #594: :confval:`trim_doctest_flags` now also removes ``<BLANKLINE>``
+ - #553: Added `testcleanup` blocks in the doctest extension.
+ - #594: `trim_doctest_flags` now also removes ``<BLANKLINE>``
indicators.
- #367: Added automatic exclusion of hidden members in inheritance
diagrams, and an option to selectively enable it.
- - Added :confval:`pngmath_add_tooltips`.
+ - Added `pngmath_add_tooltips`.
- The math extension displaymath directives now support ``name`` in
addition to ``label`` for giving the equation label, for compatibility
with Docutils.
@@ -1036,7 +953,7 @@ Release 1.0.8 (Sep 23, 2011)
* #669: Respect the ``noindex`` flag option in py:module directives.
* #675: Fix IndexErrors when including nonexisting lines with
- :rst:dir:`literalinclude`.
+ `literalinclude`.
* #676: Respect custom function/method parameter separator strings.
@@ -1119,7 +1036,7 @@ Release 1.0.6 (Jan 04, 2011)
* #570: Try decoding ``-D`` and ``-A`` command-line arguments with
the locale's preferred encoding.
-* #528: Observe :confval:`locale_dirs` when looking for the JS
+* #528: Observe `locale_dirs` when looking for the JS
translations file.
* #574: Add special code for better support of Japanese documents
@@ -1292,51 +1209,51 @@ Features added
- Added a "nitpicky" mode that emits warnings for all missing
references. It is activated by the :option:`-n` command-line switch
- or the :confval:`nitpicky` config value.
+ or the `nitpicky` config value.
- Added ``latexpdf`` target in quickstart Makefile.
* Markup:
- - The :rst:role:`menuselection` and :rst:role:`guilabel` roles now
+ - The `menuselection` and `guilabel` roles now
support ampersand accelerators.
- New more compact doc field syntax is now recognized: ``:param type
name: description``.
- - Added ``tab-width`` option to :rst:dir:`literalinclude` directive.
+ - Added ``tab-width`` option to `literalinclude` directive.
- Added ``titlesonly`` option to :rst:dir:`toctree` directive.
- Added the ``prepend`` and ``append`` options to the
- :rst:dir:`literalinclude` directive.
+ `literalinclude` directive.
- #284: All docinfo metadata is now put into the document metadata, not
just the author.
- - The :rst:role:`ref` role can now also reference tables by caption.
- - The :rst:dir:`include` directive now supports absolute paths, which
+ - The `ref` role can now also reference tables by caption.
+ - The :dudir:`include` directive now supports absolute paths, which
are interpreted as relative to the source directory.
- In the Python domain, references like ``:func:`.name``` now look for
matching names with any prefix if no direct match is found.
* Configuration:
- - Added :confval:`rst_prolog` config value.
- - Added :confval:`html_secnumber_suffix` config value to control
+ - Added `rst_prolog` config value.
+ - Added `html_secnumber_suffix` config value to control
section numbering format.
- - Added :confval:`html_compact_lists` config value to control
+ - Added `html_compact_lists` config value to control
docutils' compact lists feature.
- - The :confval:`html_sidebars` config value can now contain patterns
+ - The `html_sidebars` config value can now contain patterns
as keys, and the values can be lists that explicitly select which
sidebar templates should be rendered. That means that the builtin
sidebar contents can be included only selectively.
- - :confval:`html_static_path` can now contain single file entries.
- - The new universal config value :confval:`exclude_patterns` makes the
- old :confval:`unused_docs`, :confval:`exclude_trees` and
- :confval:`exclude_dirnames` obsolete.
- - Added :confval:`html_output_encoding` config value.
- - Added the :confval:`latex_docclass` config value and made the
+ - `html_static_path` can now contain single file entries.
+ - The new universal config value `exclude_patterns` makes the
+ old ``unused_docs``, ``exclude_trees`` and
+ ``exclude_dirnames`` obsolete.
+ - Added `html_output_encoding` config value.
+ - Added the `latex_docclass` config value and made the
"twoside" documentclass option overridable by "oneside".
- - Added the :confval:`trim_doctest_flags` config value, which is true
+ - Added the `trim_doctest_flags` config value, which is true
by default.
- - Added :confval:`html_show_copyright` config value.
- - Added :confval:`latex_show_pagerefs` and :confval:`latex_show_urls`
+ - Added `html_show_copyright` config value.
+ - Added `latex_show_pagerefs` and `latex_show_urls`
config values.
- - The behavior of :confval:`html_file_suffix` changed slightly: the
+ - The behavior of `html_file_suffix` changed slightly: the
empty string now means "no suffix" instead of "default suffix", use
``None`` for "default suffix".
@@ -1378,7 +1295,7 @@ Features added
* Extension API:
- Added :event:`html-collect-pages`.
- - Added :confval:`needs_sphinx` config value and
+ - Added `needs_sphinx` config value and
:meth:`~sphinx.application.Sphinx.require_sphinx` application API
method.
- #200: Added :meth:`~sphinx.application.Sphinx.add_stylesheet`
@@ -1390,7 +1307,7 @@ Features added
- Added the :mod:`~sphinx.ext.extlinks` extension.
- Added support for source ordering of members in autodoc, with
``autodoc_member_order = 'bysource'``.
- - Added :confval:`autodoc_default_flags` config value, which can be
+ - Added `autodoc_default_flags` config value, which can be
used to select default flags for all autodoc directives.
- Added a way for intersphinx to refer to named labels in other
projects, and to specify the project you want to link to.
@@ -1400,7 +1317,7 @@ Features added
extension, thanks to Pauli Virtanen.
- #309: The :mod:`~sphinx.ext.graphviz` extension can now output SVG
instead of PNG images, controlled by the
- :confval:`graphviz_output_format` config value.
+ `graphviz_output_format` config value.
- Added ``alt`` option to :rst:dir:`graphviz` extension directives.
- Added ``exclude`` argument to :func:`.autodoc.between`.
diff --git a/Makefile b/Makefile
index 128b2c80..0e4a9ade 100644
--- a/Makefile
+++ b/Makefile
@@ -48,10 +48,10 @@ reindent:
@$(PYTHON) utils/reindent.py -r -n .
endif
-test: build
+test:
@cd tests; $(PYTHON) run.py -d -m '^[tT]est' $(TEST)
-covertest: build
+covertest:
@cd tests; $(PYTHON) run.py -d -m '^[tT]est' --with-coverage \
--cover-package=sphinx $(TEST)
diff --git a/README.rst b/README.rst
index 9b22008b..ae92a2ce 100644
--- a/README.rst
+++ b/README.rst
@@ -2,6 +2,9 @@
README for Sphinx
=================
+This is the Sphinx documentation generator, see http://sphinx-doc.org/.
+
+
Installing
==========
@@ -17,7 +20,7 @@ Reading the docs
After installing::
cd doc
- sphinx-build . _build/html
+ make html
Then, direct your browser to ``_build/html/index.html``.
@@ -35,6 +38,11 @@ If you want to use a different interpreter, e.g. ``python3``, use::
PYTHON=python3 make test
+Continuous testing runs on drone.io:
+
+.. image:: https://drone.io/bitbucket.org/birkenfeld/sphinx/status.png
+ :target: https://drone.io/bitbucket.org/birkenfeld/sphinx/
+
Contributing
============
diff --git a/doc/_templates/index.html b/doc/_templates/index.html
index 45581e0f..2016ea9f 100644
--- a/doc/_templates/index.html
+++ b/doc/_templates/index.html
@@ -34,6 +34,9 @@
<li>{%trans path=pathto('extensions')%}<b>Extensions:</b> automatic testing of code snippets, inclusion of
docstrings from Python modules (API docs), and
<a href="{{ path }}#builtin-sphinx-extensions">more</a>{%endtrans%}</li>
+ <li>{%trans path=pathto('develop')%}<b>Contributed extensions:</b> more than
+ 50 extensions <a href="{{ path }}#extensions">contributed by users</a>
+ in a second repository; most of them installable from PyPI{%endtrans%}</li>
</ul>
<p>{%trans%}
Sphinx uses <a href="http://docutils.sf.net/rst.html">reStructuredText</a>
diff --git a/doc/_templates/indexsidebar.html b/doc/_templates/indexsidebar.html
index 019b20fc..db925c88 100644
--- a/doc/_templates/indexsidebar.html
+++ b/doc/_templates/indexsidebar.html
@@ -3,7 +3,7 @@
{%trans%}project{%endtrans%}</p>
<h3>Download</h3>
-{% if version.endswith('(hg)') %}
+{% if version.endswith('a0') %}
<p>{%trans%}This documentation is for version <b>{{ version }}</b>, which is
not released yet.{%endtrans%}</p>
<p>{%trans%}You can use it from the
diff --git a/doc/authors.rst b/doc/authors.rst
index 04c8b2b4..980b33e8 100644
--- a/doc/authors.rst
+++ b/doc/authors.rst
@@ -1,9 +1,9 @@
-:tocdepth: 2
-
-.. _authors:
-
-Sphinx authors
-==============
-
-.. include:: ../AUTHORS
-
+:tocdepth: 2
+
+.. _authors:
+
+Sphinx authors
+==============
+
+.. include:: ../AUTHORS
+
diff --git a/doc/changes.rst b/doc/changes.rst
index d5927a72..e4263687 100644
--- a/doc/changes.rst
+++ b/doc/changes.rst
@@ -1,5 +1,7 @@
:tocdepth: 2
+.. default-role:: any
+
.. _changes:
Changes in Sphinx
diff --git a/doc/conf.py b/doc/conf.py
index 3ae94821..4a6f8f58 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -83,7 +83,7 @@ texinfo_documents = [
# We're not using intersphinx right now, but if we did, this would be part of
# the mapping:
-intersphinx_mapping = {'python': ('http://docs.python.org/dev', None)}
+intersphinx_mapping = {'python': ('http://docs.python.org/2/', None)}
# Sphinx document translation with sphinx gettext feature uses these settings:
locale_dirs = ['locale/']
diff --git a/doc/config.rst b/doc/config.rst
index cc35a757..a11254ea 100644
--- a/doc/config.rst
+++ b/doc/config.rst
@@ -707,7 +707,7 @@ that use Sphinx's HTMLWriter class.
.. confval:: html_use_opensearch
- If nonempty, an `OpenSearch <http://opensearch.org>` description file will be
+ If nonempty, an `OpenSearch <http://opensearch.org>`_ description file will be
output, and all pages will contain a ``<link>`` tag referring to it. Since
OpenSearch doesn't support relative URLs for its search page location, the
value of this option must be the base URL from which these documents are
diff --git a/doc/devguide.rst b/doc/devguide.rst
index 885d52b0..9d85ec0b 100644
--- a/doc/devguide.rst
+++ b/doc/devguide.rst
@@ -130,6 +130,11 @@ These are the basic steps needed to start developing on Sphinx.
* For bug fixes, first add a test that fails without your changes and passes
after they are applied.
+ * Tests that need a sphinx-build run should be integrated in one of the
+ existing test modules if possible. New tests that to ``@with_app`` and
+ then ``build_all`` for a few assertions are not good since *the test suite
+ should not take more than a minute to run*.
+
#. Please add a bullet point to :file:`CHANGES` if the fix or feature is not
trivial (small doc updates, typo fixes). Then commit::
diff --git a/doc/extdev/appapi.rst b/doc/extdev/appapi.rst
index 8df81943..4fed158c 100644
--- a/doc/extdev/appapi.rst
+++ b/doc/extdev/appapi.rst
@@ -437,6 +437,19 @@ handlers to the events. Example:
.. versionadded:: 0.5
+.. event:: env-before-read-docs (app, env, docnames)
+
+ Emitted after the environment has determined the list of all added and
+ changed files and just before it reads them. It allows extension authors to
+ reorder the list of docnames (*inplace*) before processing, or add more
+ docnames that Sphinx did not consider changed (but never add any docnames
+ that are not in ``env.found_docs``).
+
+ You can also remove document names; do this with caution since it will make
+ Sphinx treat changed files as unchanged.
+
+ .. versionadded:: 1.3
+
.. event:: source-read (app, docname, source)
Emitted when a source file has been read. The *source* argument is a list
@@ -480,6 +493,26 @@ handlers to the events. Example:
Here is the place to replace custom nodes that don't have visitor methods in
the writers, so that they don't cause errors when the writers encounter them.
+.. event:: env-merge-info (env, docnames, other)
+
+ This event is only emitted when parallel reading of documents is enabled. It
+ is emitted once for every subprocess that has read some documents.
+
+ You must handle this event in an extension that stores data in the
+ environment in a custom location. Otherwise the environment in the main
+ process will not be aware of the information stored in the subprocess.
+
+ *other* is the environment object from the subprocess, *env* is the
+ environment from the main process. *docnames* is a set of document names
+ that have been read in the subprocess.
+
+ For a sample of how to deal with this event, look at the standard
+ ``sphinx.ext.todo`` extension. The implementation is often similar to that
+ of :event:`env-purge-doc`, only that information is not removed, but added to
+ the main environment from the other environment.
+
+ .. versionadded:: 1.3
+
.. event:: env-updated (app, env)
Emitted when the :meth:`update` method of the build environment has
diff --git a/doc/extdev/index.rst b/doc/extdev/index.rst
index a82f33a8..5144c5f8 100644
--- a/doc/extdev/index.rst
+++ b/doc/extdev/index.rst
@@ -18,15 +18,32 @@ imports this module and executes its ``setup()`` function, which in turn
notifies Sphinx of everything the extension offers -- see the extension tutorial
for examples.
-.. versionadded:: 1.3
- The ``setup()`` function can return a string, this is treated by Sphinx as
- the version of the extension and used for informational purposes such as the
- traceback file when an exception occurs.
-
The configuration file itself can be treated as an extension if it contains a
``setup()`` function. All other extensions to load must be listed in the
:confval:`extensions` configuration value.
+Extension metadata
+------------------
+
+.. versionadded:: 1.3
+
+The ``setup()`` function can return a dictionary. This is treated by Sphinx
+as metadata of the extension. Metadata keys currently recognized are:
+
+* ``'version'``: a string that identifies the extension version. It is used for
+ extension version requirement checking (see :confval:`needs_extensions`) and
+ informational purposes. If not given, ``"unknown version"`` is substituted.
+* ``'parallel_read_safe'``: a boolean that specifies if parallel reading of
+ source files can be used when the extension is loaded. It defaults to
+ ``False``, i.e. you have to explicitly specify your extension to be
+ parallel-read-safe after checking that it is.
+* ``'parallel_write_safe'``: a boolean that specifies if parallel writing of
+ output files can be used when the extension is loaded. Since extensions
+ usually don't negatively influence the process, this defaults to ``True``.
+
+APIs used for writing extensions
+--------------------------------
+
.. toctree::
tutorial
diff --git a/doc/extdev/tutorial.rst b/doc/extdev/tutorial.rst
index 8f1773cd..e7912406 100644
--- a/doc/extdev/tutorial.rst
+++ b/doc/extdev/tutorial.rst
@@ -162,7 +162,7 @@ new Python module called :file:`todo.py` and add the setup function::
app.connect('doctree-resolved', process_todo_nodes)
app.connect('env-purge-doc', purge_todos)
- return '0.1' # identifies the version of our extension
+ return {'version': '0.1'} # identifies the version of our extension
The calls in this function refer to classes and functions not yet written. What
the individual calls do is the following:
diff --git a/doc/markup/code.rst b/doc/markup/code.rst
index f69bb161..b948dc38 100644
--- a/doc/markup/code.rst
+++ b/doc/markup/code.rst
@@ -36,21 +36,29 @@ installed) and handled in a smart way:
highlighted as Python).
* The highlighting language can be changed using the ``highlight`` directive,
- used as follows::
+ used as follows:
- .. highlight:: c
+ .. rst:directive:: .. highlight:: language
- This language is used until the next ``highlight`` directive is encountered.
+ Example::
+
+ .. highlight:: c
+
+ This language is used until the next ``highlight`` directive is encountered.
* For documents that have to show snippets in different languages, there's also
a :rst:dir:`code-block` directive that is given the highlighting language
- directly::
+ directly:
+
+ .. rst:directive:: .. code-block:: language
+
+ Use it like this::
- .. code-block:: ruby
+ .. code-block:: ruby
- Some Ruby code.
+ Some Ruby code.
- The directive's alias name :rst:dir:`sourcecode` works as well.
+ The directive's alias name :rst:dir:`sourcecode` works as well.
* The valid values for the highlighting language are:
diff --git a/doc/markup/inline.rst b/doc/markup/inline.rst
index 0cc97f43..b5bb8d0c 100644
--- a/doc/markup/inline.rst
+++ b/doc/markup/inline.rst
@@ -12,7 +12,9 @@ They are written as ``:rolename:`content```.
The default role (```content```) has no special meaning by default. You are
free to use it for anything you like, e.g. variable names; use the
- :confval:`default_role` config value to set it to a known role.
+ :confval:`default_role` config value to set it to a known role -- the
+ :rst:role:`any` role to find anything or the :rst:role:`py:obj` role to find
+ Python objects are very useful for this.
See :ref:`domains` for roles added by domains.
@@ -38,12 +40,57 @@ more versatile:
* If you prefix the content with ``~``, the link text will only be the last
component of the target. For example, ``:py:meth:`~Queue.Queue.get``` will
- refer to ``Queue.Queue.get`` but only display ``get`` as the link text.
+ refer to ``Queue.Queue.get`` but only display ``get`` as the link text. This
+ does not work with all cross-reference roles, but is domain specific.
In HTML output, the link's ``title`` attribute (that is e.g. shown as a
tool-tip on mouse-hover) will always be the full target name.
+.. _any-role:
+
+Cross-referencing anything
+--------------------------
+
+.. rst:role:: any
+
+ .. versionadded:: 1.3
+
+ This convenience role tries to do its best to find a valid target for its
+ reference text.
+
+ * First, it tries standard cross-reference targets that would be referenced
+ by :rst:role:`doc`, :rst:role:`ref` or :rst:role:`option`.
+
+ Custom objects added to the standard domain by extensions (see
+ :meth:`.add_object_type`) are also searched.
+
+ * Then, it looks for objects (targets) in all loaded domains. It is up to
+ the domains how specific a match must be. For example, in the Python
+ domain a reference of ``:any:`Builder``` would match the
+ ``sphinx.builders.Builder`` class.
+
+ If none or multiple targets are found, a warning will be emitted. In the
+ case of multiple targets, you can change "any" to a specific role.
+
+ This role is a good candidate for setting :confval:`default_role`. If you
+ do, you can write cross-references without a lot of markup overhead. For
+ example, in this Python function documentation ::
+
+ .. function:: install()
+
+ This function installs a `handler` for every signal known by the
+ `signal` module. See the section `about-signals` for more information.
+
+ there could be references to a glossary term (usually ``:term:`handler```), a
+ Python module (usually ``:py:mod:`signal``` or ``:mod:`signal```) and a
+ section (usually ``:ref:`about-signals```).
+
+ The :rst:role:`any` role also works together with the
+ :mod:`~sphinx.ext.intersphinx` extension: when no local cross-reference is
+ found, all object types of intersphinx inventories are also searched.
+
+
Cross-referencing objects
-------------------------
diff --git a/sphinx/addnodes.py b/sphinx/addnodes.py
index 55abdb01..9d8c4690 100644
--- a/sphinx/addnodes.py
+++ b/sphinx/addnodes.py
@@ -25,6 +25,7 @@ class desc(nodes.Admonition, nodes.Element):
contains one or more ``desc_signature`` and a ``desc_content``.
"""
+
class desc_signature(nodes.Part, nodes.Inline, nodes.TextElement):
"""Node for object signatures.
@@ -39,33 +40,42 @@ class desc_addname(nodes.Part, nodes.Inline, nodes.TextElement):
# compatibility alias
desc_classname = desc_addname
+
class desc_type(nodes.Part, nodes.Inline, nodes.TextElement):
"""Node for return types or object type names."""
+
class desc_returns(desc_type):
"""Node for a "returns" annotation (a la -> in Python)."""
def astext(self):
return ' -> ' + nodes.TextElement.astext(self)
+
class desc_name(nodes.Part, nodes.Inline, nodes.TextElement):
"""Node for the main object name."""
+
class desc_parameterlist(nodes.Part, nodes.Inline, nodes.TextElement):
"""Node for a general parameter list."""
child_text_separator = ', '
+
class desc_parameter(nodes.Part, nodes.Inline, nodes.TextElement):
"""Node for a single parameter."""
+
class desc_optional(nodes.Part, nodes.Inline, nodes.TextElement):
"""Node for marking optional parts of the parameter list."""
child_text_separator = ', '
+
def astext(self):
return '[' + nodes.TextElement.astext(self) + ']'
+
class desc_annotation(nodes.Part, nodes.Inline, nodes.TextElement):
"""Node for signature annotations (not Python 3-style annotations)."""
+
class desc_content(nodes.General, nodes.Element):
"""Node for object description content.
@@ -82,15 +92,18 @@ class versionmodified(nodes.Admonition, nodes.TextElement):
directives.
"""
+
class seealso(nodes.Admonition, nodes.Element):
"""Custom "see also" admonition."""
+
class productionlist(nodes.Admonition, nodes.Element):
"""Node for grammar production lists.
Contains ``production`` nodes.
"""
+
class production(nodes.Part, nodes.Inline, nodes.TextElement):
"""Node for a single grammar production rule."""
@@ -107,26 +120,33 @@ class index(nodes.Invisible, nodes.Inline, nodes.TextElement):
*entrytype* is one of "single", "pair", "double", "triple".
"""
+
class centered(nodes.Part, nodes.TextElement):
"""Deprecated."""
+
class acks(nodes.Element):
"""Special node for "acks" lists."""
+
class hlist(nodes.Element):
"""Node for "horizontal lists", i.e. lists that should be compressed to
take up less vertical space.
"""
+
class hlistcol(nodes.Element):
"""Node for one column in a horizontal list."""
+
class compact_paragraph(nodes.paragraph):
"""Node for a compact paragraph (which never makes a <p> node)."""
+
class glossary(nodes.Element):
"""Node to insert a glossary."""
+
class only(nodes.Element):
"""Node for "only" directives (conditional inclusion based on tags)."""
@@ -136,14 +156,17 @@ class only(nodes.Element):
class start_of_file(nodes.Element):
"""Node to mark start of a new file, used in the LaTeX builder only."""
+
class highlightlang(nodes.Element):
"""Inserted to set the highlight language and line number options for
subsequent code blocks.
"""
+
class tabular_col_spec(nodes.Element):
"""Node for specifying tabular columns, used for LaTeX output."""
+
class meta(nodes.Special, nodes.PreBibliographic, nodes.Element):
"""Node for meta directive -- same as docutils' standard meta node,
but pickleable.
@@ -160,22 +183,27 @@ class pending_xref(nodes.Inline, nodes.Element):
BuildEnvironment.resolve_references.
"""
+
class download_reference(nodes.reference):
"""Node for download references, similar to pending_xref."""
+
class literal_emphasis(nodes.emphasis):
"""Node that behaves like `emphasis`, but further text processors are not
applied (e.g. smartypants for HTML output).
"""
+
class literal_strong(nodes.strong):
"""Node that behaves like `strong`, but further text processors are not
applied (e.g. smartypants for HTML output).
"""
+
class abbreviation(nodes.Inline, nodes.TextElement):
"""Node for abbreviations with explanations."""
+
class termsep(nodes.Structural, nodes.Element):
"""Separates two terms within a <term> node."""
diff --git a/sphinx/apidoc.py b/sphinx/apidoc.py
index f716286c..7b1a96d2 100644
--- a/sphinx/apidoc.py
+++ b/sphinx/apidoc.py
@@ -88,7 +88,7 @@ def create_module_file(package, module, opts):
text = format_heading(1, '%s module' % module)
else:
text = ''
- #text += format_heading(2, ':mod:`%s` Module' % module)
+ # text += format_heading(2, ':mod:`%s` Module' % module)
text += format_directive(module, package)
write_file(makename(package, module), text, opts)
@@ -173,7 +173,7 @@ def shall_skip(module, opts):
# skip if it has a "private" name and this is selected
filename = path.basename(module)
if filename != '__init__.py' and filename.startswith('_') and \
- not opts.includeprivate:
+ not opts.includeprivate:
return True
return False
@@ -218,7 +218,7 @@ def recurse_tree(rootpath, excludes, opts):
if is_pkg:
# we are in a package with something to document
if subs or len(py_files) > 1 or not \
- shall_skip(path.join(root, INITPY), opts):
+ shall_skip(path.join(root, INITPY), opts):
subpackage = root[len(rootpath):].lstrip(path.sep).\
replace(path.sep, '.')
create_package_file(root, root_package, subpackage,
@@ -318,7 +318,7 @@ Note: By default this script will not overwrite already created files.""")
(opts, args) = parser.parse_args(argv[1:])
if opts.show_version:
- print('Sphinx (sphinx-apidoc) %s' % __version__)
+ print('Sphinx (sphinx-apidoc) %s' % __version__)
return 0
if not args:
diff --git a/sphinx/application.py b/sphinx/application.py
index fe870401..13a2d272 100644
--- a/sphinx/application.py
+++ b/sphinx/application.py
@@ -20,7 +20,7 @@ import traceback
from os import path
from collections import deque
-from six import iteritems, itervalues
+from six import iteritems, itervalues, text_type
from six.moves import cStringIO
from docutils import nodes
from docutils.parsers.rst import convert_directive_function, \
@@ -39,7 +39,8 @@ from sphinx.environment import BuildEnvironment, SphinxStandaloneReader
from sphinx.util import pycompat # imported for side-effects
from sphinx.util.tags import Tags
from sphinx.util.osutil import ENOENT
-from sphinx.util.console import bold, lightgray, darkgray
+from sphinx.util.console import bold, lightgray, darkgray, darkgreen, \
+ term_width_line
if hasattr(sys, 'intern'):
intern = sys.intern
@@ -49,8 +50,10 @@ events = {
'builder-inited': '',
'env-get-outdated': 'env, added, changed, removed',
'env-purge-doc': 'env, docname',
+ 'env-before-read-docs': 'env, docnames',
'source-read': 'docname, source text',
'doctree-read': 'the doctree before being pickled',
+ 'env-merge-info': 'env, read docnames, other env instance',
'missing-reference': 'env, node, contnode',
'doctree-resolved': 'doctree, docname',
'env-updated': 'env',
@@ -72,7 +75,7 @@ class Sphinx(object):
self.verbosity = verbosity
self.next_listener_id = 0
self._extensions = {}
- self._extension_versions = {}
+ self._extension_metadata = {}
self._listeners = {}
self.domains = BUILTIN_DOMAINS.copy()
self.builderclasses = BUILTIN_BUILDERS.copy()
@@ -112,6 +115,10 @@ class Sphinx(object):
# status code for command-line application
self.statuscode = 0
+ if not path.isdir(outdir):
+ self.info('making output directory...')
+ os.makedirs(outdir)
+
# read config
self.tags = Tags(tags)
self.config = Config(confdir, CONFIG_FILENAME,
@@ -128,7 +135,7 @@ class Sphinx(object):
self.setup_extension(extension)
# the config file itself can be an extension
if self.config.setup:
- # py31 doesn't have 'callable' function for bellow check
+ # py31 doesn't have 'callable' function for below check
if hasattr(self.config.setup, '__call__'):
self.config.setup(self)
else:
@@ -156,7 +163,7 @@ class Sphinx(object):
'version requirement for extension %s, but it is '
'not loaded' % extname)
continue
- has_ver = self._extension_versions[extname]
+ has_ver = self._extension_metadata[extname]['version']
if has_ver == 'unknown version' or needs_ver > has_ver:
raise VersionRequirementError(
'This project needs the extension %s at least in '
@@ -200,8 +207,8 @@ class Sphinx(object):
else:
try:
self.info(bold('loading pickled environment... '), nonl=True)
- self.env = BuildEnvironment.frompickle(self.config,
- path.join(self.doctreedir, ENV_PICKLE_FILENAME))
+ self.env = BuildEnvironment.frompickle(
+ self.config, path.join(self.doctreedir, ENV_PICKLE_FILENAME))
self.env.domains = {}
for domain in self.domains.keys():
# this can raise if the data version doesn't fit
@@ -245,6 +252,15 @@ class Sphinx(object):
else:
self.builder.compile_update_catalogs()
self.builder.build_update()
+
+ status = (self.statuscode == 0
+ and 'succeeded' or 'finished with problems')
+ if self._warncount:
+ self.info(bold('build %s, %s warning%s.' %
+ (status, self._warncount,
+ self._warncount != 1 and 's' or '')))
+ else:
+ self.info(bold('build %s.' % status))
except Exception as err:
# delete the saved env to force a fresh build next time
envfile = path.join(self.doctreedir, ENV_PICKLE_FILENAME)
@@ -291,7 +307,7 @@ class Sphinx(object):
else:
location = None
warntext = location and '%s: %s%s\n' % (location, prefix, message) or \
- '%s%s\n' % (prefix, message)
+ '%s%s\n' % (prefix, message)
if self.warningiserror:
raise SphinxWarning(warntext)
self._warncount += 1
@@ -350,6 +366,48 @@ class Sphinx(object):
message = message % (args or kwargs)
self._log(lightgray(message), self._status)
+ def _display_chunk(chunk):
+ if isinstance(chunk, (list, tuple)):
+ if len(chunk) == 1:
+ return text_type(chunk[0])
+ return '%s .. %s' % (chunk[0], chunk[-1])
+ return text_type(chunk)
+
+ def old_status_iterator(self, iterable, summary, colorfunc=darkgreen,
+ stringify_func=_display_chunk):
+ l = 0
+ for item in iterable:
+ if l == 0:
+ self.info(bold(summary), nonl=1)
+ l = 1
+ self.info(colorfunc(stringify_func(item)) + ' ', nonl=1)
+ yield item
+ if l == 1:
+ self.info()
+
+ # new version with progress info
+ def status_iterator(self, iterable, summary, colorfunc=darkgreen, length=0,
+ stringify_func=_display_chunk):
+ if length == 0:
+ for item in self.old_status_iterator(iterable, summary, colorfunc,
+ stringify_func):
+ yield item
+ return
+ l = 0
+ summary = bold(summary)
+ for item in iterable:
+ l += 1
+ s = '%s[%3d%%] %s' % (summary, 100*l/length,
+ colorfunc(stringify_func(item)))
+ if self.verbosity:
+ s += '\n'
+ else:
+ s = term_width_line(s)
+ self.info(s, nonl=1)
+ yield item
+ if l > 0:
+ self.info()
+
# ---- general extensibility interface -------------------------------------
def setup_extension(self, extension):
@@ -366,20 +424,22 @@ class Sphinx(object):
if not hasattr(mod, 'setup'):
self.warn('extension %r has no setup() function; is it really '
'a Sphinx extension module?' % extension)
- version = None
+ ext_meta = None
else:
try:
- version = mod.setup(self)
+ ext_meta = mod.setup(self)
except VersionRequirementError as err:
# add the extension name to the version required
raise VersionRequirementError(
'The %s extension used by this project needs at least '
'Sphinx v%s; it therefore cannot be built with this '
'version.' % (extension, err))
- if version is None:
- version = 'unknown version'
+ if ext_meta is None:
+ ext_meta = {}
+ if not ext_meta.get('version'):
+ ext_meta['version'] = 'unknown version'
self._extensions[extension] = mod
- self._extension_versions[extension] = version
+ self._extension_metadata[extension] = ext_meta
def require_sphinx(self, version):
# check the Sphinx version if requested
@@ -461,7 +521,7 @@ class Sphinx(object):
else:
raise ExtensionError(
'Builder %r already exists (in module %s)' % (
- builder.name, self.builderclasses[builder.name].__module__))
+ builder.name, self.builderclasses[builder.name].__module__))
self.builderclasses[builder.name] = builder
def add_config_value(self, name, default, rebuild):
diff --git a/sphinx/builders/__init__.py b/sphinx/builders/__init__.py
index 833269c6..d52a7983 100644
--- a/sphinx/builders/__init__.py
+++ b/sphinx/builders/__init__.py
@@ -22,7 +22,9 @@ from docutils import nodes
from sphinx.util import i18n, path_stabilize
from sphinx.util.osutil import SEP, relative_uri, find_catalog
-from sphinx.util.console import bold, purple, darkgreen, term_width_line
+from sphinx.util.console import bold, darkgreen
+from sphinx.util.parallel import ParallelTasks, SerialTasks, make_chunks, \
+ parallel_available
# side effect: registers roles and directives
from sphinx import roles
@@ -62,10 +64,17 @@ class Builder(object):
self.tags.add(self.name)
self.tags.add("format_%s" % self.format)
self.tags.add("builder_%s" % self.name)
+ # compatibility aliases
+ self.status_iterator = app.status_iterator
+ self.old_status_iterator = app.old_status_iterator
# images that need to be copied over (source -> dest)
self.images = {}
+ # these get set later
+ self.parallel_ok = False
+ self.finish_tasks = None
+
# load default translator class
self.translator_class = app._translators.get(self.name)
@@ -113,41 +122,6 @@ class Builder(object):
"""
raise NotImplementedError
- def old_status_iterator(self, iterable, summary, colorfunc=darkgreen,
- stringify_func=lambda x: x):
- l = 0
- for item in iterable:
- if l == 0:
- self.info(bold(summary), nonl=1)
- l = 1
- self.info(colorfunc(stringify_func(item)) + ' ', nonl=1)
- yield item
- if l == 1:
- self.info()
-
- # new version with progress info
- def status_iterator(self, iterable, summary, colorfunc=darkgreen, length=0,
- stringify_func=lambda x: x):
- if length == 0:
- for item in self.old_status_iterator(iterable, summary, colorfunc,
- stringify_func):
- yield item
- return
- l = 0
- summary = bold(summary)
- for item in iterable:
- l += 1
- s = '%s[%3d%%] %s' % (summary, 100*l/length,
- colorfunc(stringify_func(item)))
- if self.app.verbosity:
- s += '\n'
- else:
- s = term_width_line(s)
- self.info(s, nonl=1)
- yield item
- if l > 0:
- self.info()
-
supported_image_types = []
def post_process_images(self, doctree):
@@ -179,9 +153,8 @@ class Builder(object):
def compile_catalogs(self, catalogs, message):
if not self.config.gettext_auto_build:
return
- self.info(bold('building [mo]: '), nonl=1)
- self.info(message)
- for catalog in self.status_iterator(
+ self.info(bold('building [mo]: ') + message)
+ for catalog in self.app.status_iterator(
catalogs, 'writing output... ', darkgreen, len(catalogs),
lambda c: c.mo_path):
catalog.write_mo(self.config.language)
@@ -263,25 +236,17 @@ class Builder(object):
First updates the environment, and then calls :meth:`write`.
"""
if summary:
- self.info(bold('building [%s]: ' % self.name), nonl=1)
- self.info(summary)
+ self.info(bold('building [%s]' % self.name) + ': ' + summary)
updated_docnames = set()
# while reading, collect all warnings from docutils
warnings = []
self.env.set_warnfunc(lambda *args: warnings.append(args))
- self.info(bold('updating environment: '), nonl=1)
- msg, length, iterator = self.env.update(self.config, self.srcdir,
- self.doctreedir, self.app)
- self.info(msg)
- for docname in self.status_iterator(iterator, 'reading sources... ',
- purple, length):
- updated_docnames.add(docname)
- # nothing further to do, the environment has already
- # done the reading
+ updated_docnames = self.env.update(self.config, self.srcdir,
+ self.doctreedir, self.app)
+ self.env.set_warnfunc(self.warn)
for warning in warnings:
self.warn(*warning)
- self.env.set_warnfunc(self.warn)
doccount = len(updated_docnames)
self.info(bold('looking for now-outdated files... '), nonl=1)
@@ -315,20 +280,33 @@ class Builder(object):
if docnames and docnames != ['__all__']:
docnames = set(docnames) & self.env.found_docs
- # another indirection to support builders that don't build
- # files individually
+ # determine if we can write in parallel
+ self.parallel_ok = False
+ if parallel_available and self.app.parallel > 1 and self.allow_parallel:
+ self.parallel_ok = True
+ for extname, md in self.app._extension_metadata.items():
+ par_ok = md.get('parallel_write_safe', True)
+ if not par_ok:
+ self.app.warn('the %s extension is not safe for parallel '
+ 'writing, doing serial read' % extname)
+ self.parallel_ok = False
+ break
+
+ # create a task executor to use for misc. "finish-up" tasks
+ # if self.parallel_ok:
+ # self.finish_tasks = ParallelTasks(self.app.parallel)
+ # else:
+ # for now, just execute them serially
+ self.finish_tasks = SerialTasks()
+
+ # write all "normal" documents (or everything for some builders)
self.write(docnames, list(updated_docnames), method)
# finish (write static files etc.)
self.finish()
- status = (self.app.statuscode == 0
- and 'succeeded' or 'finished with problems')
- if self.app._warncount:
- self.info(bold('build %s, %s warning%s.' %
- (status, self.app._warncount,
- self.app._warncount != 1 and 's' or '')))
- else:
- self.info(bold('build %s.' % status))
+
+ # wait for all tasks
+ self.finish_tasks.join()
def write(self, build_docnames, updated_docnames, method='update'):
if build_docnames is None or build_docnames == ['__all__']:
@@ -354,23 +332,17 @@ class Builder(object):
warnings = []
self.env.set_warnfunc(lambda *args: warnings.append(args))
- # check for prerequisites to parallel build
- # (parallel only works on POSIX, because the forking impl of
- # multiprocessing is required)
- if not (multiprocessing and
- self.app.parallel > 1 and
- self.allow_parallel and
- os.name == 'posix'):
- self._write_serial(sorted(docnames), warnings)
- else:
+ if self.parallel_ok:
# number of subprocesses is parallel-1 because the main process
# is busy loading doctrees and doing write_doc_serialized()
self._write_parallel(sorted(docnames), warnings,
nproc=self.app.parallel - 1)
+ else:
+ self._write_serial(sorted(docnames), warnings)
self.env.set_warnfunc(self.warn)
def _write_serial(self, docnames, warnings):
- for docname in self.status_iterator(
+ for docname in self.app.status_iterator(
docnames, 'writing output... ', darkgreen, len(docnames)):
doctree = self.env.get_and_resolve_doctree(docname, self)
self.write_doc_serialized(docname, doctree)
@@ -380,60 +352,34 @@ class Builder(object):
def _write_parallel(self, docnames, warnings, nproc):
def write_process(docs):
- try:
- for docname, doctree in docs:
- self.write_doc(docname, doctree)
- except KeyboardInterrupt:
- pass # do not print a traceback on Ctrl-C
- finally:
- for warning in warnings:
- self.warn(*warning)
-
- def process_thread(docs):
- p = multiprocessing.Process(target=write_process, args=(docs,))
- p.start()
- p.join()
- semaphore.release()
-
- # allow only "nproc" worker processes at once
- semaphore = threading.Semaphore(nproc)
- # list of threads to join when waiting for completion
- threads = []
+ for docname, doctree in docs:
+ self.write_doc(docname, doctree)
+ return warnings
+
+ def add_warnings(docs, wlist):
+ warnings.extend(wlist)
# warm up caches/compile templates using the first document
firstname, docnames = docnames[0], docnames[1:]
doctree = self.env.get_and_resolve_doctree(firstname, self)
self.write_doc_serialized(firstname, doctree)
self.write_doc(firstname, doctree)
- # for the rest, determine how many documents to write in one go
- ndocs = len(docnames)
- chunksize = min(ndocs // nproc, 10)
- if chunksize == 0:
- chunksize = 1
- nchunks, rest = divmod(ndocs, chunksize)
- if rest:
- nchunks += 1
- # partition documents in "chunks" that will be written by one Process
- chunks = [docnames[i*chunksize:(i+1)*chunksize] for i in range(nchunks)]
- for docnames in self.status_iterator(
- chunks, 'writing output... ', darkgreen, len(chunks),
- lambda chk: '%s .. %s' % (chk[0], chk[-1])):
- docs = []
- for docname in docnames:
+
+ tasks = ParallelTasks(nproc)
+ chunks = make_chunks(docnames, nproc)
+
+ for chunk in self.app.status_iterator(
+ chunks, 'writing output... ', darkgreen, len(chunks)):
+ arg = []
+ for i, docname in enumerate(chunk):
doctree = self.env.get_and_resolve_doctree(docname, self)
self.write_doc_serialized(docname, doctree)
- docs.append((docname, doctree))
- # start a new thread to oversee the completion of this chunk
- semaphore.acquire()
- t = threading.Thread(target=process_thread, args=(docs,))
- t.setDaemon(True)
- t.start()
- threads.append(t)
+ arg.append((docname, doctree))
+ tasks.add_task(write_process, arg, add_warnings)
# make sure all threads have finished
- self.info(bold('waiting for workers... '))
- for t in threads:
- t.join()
+ self.info(bold('waiting for workers...'))
+ tasks.join()
def prepare_writing(self, docnames):
"""A place where you can add logic before :meth:`write_doc` is run"""
diff --git a/sphinx/builders/changes.py b/sphinx/builders/changes.py
index aa947c96..069d0ce6 100644
--- a/sphinx/builders/changes.py
+++ b/sphinx/builders/changes.py
@@ -130,6 +130,9 @@ class ChangesBuilder(Builder):
self.env.config.source_encoding)
try:
lines = f.readlines()
+ except UnicodeDecodeError:
+ self.warn('could not read %r for changelog creation' % docname)
+ continue
finally:
f.close()
targetfn = path.join(self.outdir, 'rst', os_path(docname)) + '.html'
diff --git a/sphinx/builders/epub.py b/sphinx/builders/epub.py
index 95a0ef0a..5f9f6643 100644
--- a/sphinx/builders/epub.py
+++ b/sphinx/builders/epub.py
@@ -405,8 +405,8 @@ class EpubBuilder(StandaloneHTMLBuilder):
converting the format and resizing the image if necessary/possible.
"""
ensuredir(path.join(self.outdir, '_images'))
- for src in self.status_iterator(self.images, 'copying images... ',
- brown, len(self.images)):
+ for src in self.app.status_iterator(self.images, 'copying images... ',
+ brown, len(self.images)):
dest = self.images[src]
try:
img = Image.open(path.join(self.srcdir, src))
diff --git a/sphinx/builders/gettext.py b/sphinx/builders/gettext.py
index 657ce924..d21c79fc 100644
--- a/sphinx/builders/gettext.py
+++ b/sphinx/builders/gettext.py
@@ -170,8 +170,8 @@ class MessageCatalogBuilder(I18nBuilder):
extract_translations = self.templates.environment.extract_translations
- for template in self.status_iterator(files,
- 'reading templates... ', purple, len(files)):
+ for template in self.app.status_iterator(
+ files, 'reading templates... ', purple, len(files)):
with open(template, 'r', encoding='utf-8') as f:
context = f.read()
for line, meth, msg in extract_translations(context):
@@ -191,7 +191,7 @@ class MessageCatalogBuilder(I18nBuilder):
ctime = datetime.fromtimestamp(
timestamp, ltz).strftime('%Y-%m-%d %H:%M%z'),
)
- for textdomain, catalog in self.status_iterator(
+ for textdomain, catalog in self.app.status_iterator(
iteritems(self.catalogs), "writing message catalogs... ",
darkgreen, len(self.catalogs),
lambda textdomain__: textdomain__[0]):
diff --git a/sphinx/builders/html.py b/sphinx/builders/html.py
index ec3a8186..c2c30893 100644
--- a/sphinx/builders/html.py
+++ b/sphinx/builders/html.py
@@ -29,7 +29,7 @@ from docutils.readers.doctree import Reader as DoctreeReader
from sphinx import package_dir, __version__
from sphinx.util import jsonimpl, copy_static_entry
from sphinx.util.osutil import SEP, os_path, relative_uri, ensuredir, \
- movefile, ustrftime, copyfile
+ movefile, ustrftime, copyfile
from sphinx.util.nodes import inline_all_toctrees
from sphinx.util.matching import patmatch, compile_matchers
from sphinx.locale import _
@@ -40,7 +40,7 @@ from sphinx.application import ENV_PICKLE_FILENAME
from sphinx.highlighting import PygmentsBridge
from sphinx.util.console import bold, darkgreen, brown
from sphinx.writers.html import HTMLWriter, HTMLTranslator, \
- SmartyPantsHTMLTranslator
+ SmartyPantsHTMLTranslator
#: the filename for the inventory of objects
INVENTORY_FILENAME = 'objects.inv'
@@ -443,12 +443,19 @@ class StandaloneHTMLBuilder(Builder):
self.index_page(docname, doctree, title)
def finish(self):
- self.info(bold('writing additional files...'), nonl=1)
+ self.finish_tasks.add_task(self.gen_indices)
+ self.finish_tasks.add_task(self.gen_additional_pages)
+ self.finish_tasks.add_task(self.copy_image_files)
+ self.finish_tasks.add_task(self.copy_download_files)
+ self.finish_tasks.add_task(self.copy_static_files)
+ self.finish_tasks.add_task(self.copy_extra_files)
+ self.finish_tasks.add_task(self.write_buildinfo)
- # pages from extensions
- for pagelist in self.app.emit('html-collect-pages'):
- for pagename, context, template in pagelist:
- self.handle_page(pagename, context, template)
+ # dump the search index
+ self.handle_finish()
+
+ def gen_indices(self):
+ self.info(bold('generating indices...'), nonl=1)
# the global general index
if self.get_builder_config('use_index', 'html'):
@@ -457,16 +464,27 @@ class StandaloneHTMLBuilder(Builder):
# the global domain-specific indices
self.write_domain_indices()
- # the search page
- if self.name != 'htmlhelp':
- self.info(' search', nonl=1)
- self.handle_page('search', {}, 'search.html')
+ self.info()
+
+ def gen_additional_pages(self):
+ # pages from extensions
+ for pagelist in self.app.emit('html-collect-pages'):
+ for pagename, context, template in pagelist:
+ self.handle_page(pagename, context, template)
+
+ self.info(bold('writing additional pages...'), nonl=1)
# additional pages from conf.py
for pagename, template in self.config.html_additional_pages.items():
self.info(' '+pagename, nonl=1)
self.handle_page(pagename, {}, template)
+ # the search page
+ if self.name != 'htmlhelp':
+ self.info(' search', nonl=1)
+ self.handle_page('search', {}, 'search.html')
+
+ # the opensearch xml file
if self.config.html_use_opensearch and self.name != 'htmlhelp':
self.info(' opensearch', nonl=1)
fn = path.join(self.outdir, '_static', 'opensearch.xml')
@@ -474,15 +492,6 @@ class StandaloneHTMLBuilder(Builder):
self.info()
- self.copy_image_files()
- self.copy_download_files()
- self.copy_static_files()
- self.copy_extra_files()
- self.write_buildinfo()
-
- # dump the search index
- self.handle_finish()
-
def write_genindex(self):
# the total count of lines for each index letter, used to distribute
# the entries into two columns
@@ -526,8 +535,8 @@ class StandaloneHTMLBuilder(Builder):
# copy image files
if self.images:
ensuredir(path.join(self.outdir, '_images'))
- for src in self.status_iterator(self.images, 'copying images... ',
- brown, len(self.images)):
+ for src in self.app.status_iterator(self.images, 'copying images... ',
+ brown, len(self.images)):
dest = self.images[src]
try:
copyfile(path.join(self.srcdir, src),
@@ -540,9 +549,9 @@ class StandaloneHTMLBuilder(Builder):
# copy downloadable files
if self.env.dlfiles:
ensuredir(path.join(self.outdir, '_downloads'))
- for src in self.status_iterator(self.env.dlfiles,
- 'copying downloadable files... ',
- brown, len(self.env.dlfiles)):
+ for src in self.app.status_iterator(self.env.dlfiles,
+ 'copying downloadable files... ',
+ brown, len(self.env.dlfiles)):
dest = self.env.dlfiles[src][1]
try:
copyfile(path.join(self.srcdir, src),
@@ -786,8 +795,8 @@ class StandaloneHTMLBuilder(Builder):
copyfile(self.env.doc2path(pagename), source_name)
def handle_finish(self):
- self.dump_search_index()
- self.dump_inventory()
+ self.finish_tasks.add_task(self.dump_search_index)
+ self.finish_tasks.add_task(self.dump_inventory)
def dump_inventory(self):
self.info(bold('dumping object inventory... '), nonl=True)
diff --git a/sphinx/cmdline.py b/sphinx/cmdline.py
index 6e7ab326..18a17ab3 100644
--- a/sphinx/cmdline.py
+++ b/sphinx/cmdline.py
@@ -12,7 +12,7 @@ from __future__ import print_function
import os
import sys
-import getopt
+import optparse
import traceback
from os import path
@@ -32,89 +32,121 @@ def usage(argv, msg=None):
if msg:
print(msg, file=sys.stderr)
print(file=sys.stderr)
- print("""\
+
+USAGE = """\
Sphinx v%s
-Usage: %s [options] sourcedir outdir [filenames...]
-
-General options
-^^^^^^^^^^^^^^^
--b <builder> builder to use; default is html
--a write all files; default is to only write new and changed files
--E don't use a saved environment, always read all files
--d <path> path for the cached environment and doctree files
- (default: outdir/.doctrees)
--j <N> build in parallel with N processes where possible
--M <builder> "make" mode -- used by Makefile, like "sphinx-build -M html"
-
-Build configuration options
-^^^^^^^^^^^^^^^^^^^^^^^^^^^
--c <path> path where configuration file (conf.py) is located
- (default: same as sourcedir)
--C use no config file at all, only -D options
--D <setting=value> override a setting in configuration file
--t <tag> define tag: include "only" blocks with <tag>
--A <name=value> pass a value into the templates, for HTML builder
--n nit-picky mode, warn about all missing references
-
-Console output options
-^^^^^^^^^^^^^^^^^^^^^^
--v increase verbosity (can be repeated)
--q no output on stdout, just warnings on stderr
--Q no output at all, not even warnings
--w <file> write warnings (and errors) to given file
--W turn warnings into errors
--T show full traceback on exception
--N do not emit colored output
--P run Pdb on exception
-
-Filename arguments
-^^^^^^^^^^^^^^^^^^
-* without -a and without filenames, write new and changed files.
-* with -a, write all files.
-* with filenames, write these.
-
-Standard options
-^^^^^^^^^^^^^^^^
--h, --help show this help and exit
---version show version information and exit
-""" % (__version__, argv[0]), file=sys.stderr)
+Usage: %%prog [options] sourcedir outdir [filenames...]
+
+Filename arguments:
+ without -a and without filenames, write new and changed files.
+ with -a, write all files.
+ with filenames, write these.
+""" % __version__
+
+EPILOG = """\
+For more information, visit <http://sphinx-doc.org/>.
+"""
+
+
+class MyFormatter(optparse.IndentedHelpFormatter):
+ def format_usage(self, usage):
+ return usage
+
+ def format_help(self, formatter):
+ result = []
+ if self.description:
+ result.append(self.format_description(formatter))
+ if self.option_list:
+ result.append(self.format_option_help(formatter))
+ return "\n".join(result)
def main(argv):
if not color_terminal():
nocolor()
+ parser = optparse.OptionParser(USAGE, epilog=EPILOG, formatter=MyFormatter())
+ parser.add_option('--version', action='store_true', dest='version',
+ help='show version information and exit')
+
+ group = parser.add_option_group('General options')
+ group.add_option('-b', metavar='BUILDER', dest='builder', default='html',
+ help='builder to use; default is html')
+ group.add_option('-a', action='store_true', dest='force_all',
+ help='write all files; default is to only write new and '
+ 'changed files')
+ group.add_option('-E', action='store_true', dest='freshenv',
+ help='don\'t use a saved environment, always read '
+ 'all files')
+ group.add_option('-d', metavar='PATH', default=None, dest='doctreedir',
+ help='path for the cached environment and doctree files '
+ '(default: outdir/.doctrees)')
+ group.add_option('-j', metavar='N', default=1, type='int', dest='jobs',
+ help='build in parallel with N processes where possible')
+ # this option never gets through to this point (it is intercepted earlier)
+ # group.add_option('-M', metavar='BUILDER', dest='make_mode',
+ # help='"make" mode -- as used by Makefile, like '
+ # '"sphinx-build -M html"')
+
+ group = parser.add_option_group('Build configuration options')
+ group.add_option('-c', metavar='PATH', dest='confdir',
+ help='path where configuration file (conf.py) is located '
+ '(default: same as sourcedir)')
+ group.add_option('-C', action='store_true', dest='noconfig',
+ help='use no config file at all, only -D options')
+ group.add_option('-D', metavar='setting=value', action='append',
+ dest='define', default=[],
+ help='override a setting in configuration file')
+ group.add_option('-A', metavar='name=value', action='append',
+ dest='htmldefine', default=[],
+ help='pass a value into HTML templates')
+ group.add_option('-t', metavar='TAG', action='append',
+ dest='tags', default=[],
+ help='define tag: include "only" blocks with TAG')
+ group.add_option('-n', action='store_true', dest='nitpicky',
+ help='nit-picky mode, warn about all missing references')
+
+ group = parser.add_option_group('Console output options')
+ group.add_option('-v', action='count', dest='verbosity', default=0,
+ help='increase verbosity (can be repeated)')
+ group.add_option('-q', action='store_true', dest='quiet',
+ help='no output on stdout, just warnings on stderr')
+ group.add_option('-Q', action='store_true', dest='really_quiet',
+ help='no output at all, not even warnings')
+ group.add_option('-N', action='store_true', dest='nocolor',
+ help='do not emit colored output')
+ group.add_option('-w', metavar='FILE', dest='warnfile',
+ help='write warnings (and errors) to given file')
+ group.add_option('-W', action='store_true', dest='warningiserror',
+ help='turn warnings into errors')
+ group.add_option('-T', action='store_true', dest='traceback',
+ help='show full traceback on exception')
+ group.add_option('-P', action='store_true', dest='pdb',
+ help='run Pdb on exception')
+
# parse options
try:
- opts, args = getopt.getopt(argv[1:], 'ab:t:d:c:CD:A:nNEqQWw:PThvj:',
- ['help', 'version'])
- except getopt.error as err:
- usage(argv, 'Error: %s' % err)
- return 1
+ opts, args = parser.parse_args()
+ except SystemExit as err:
+ return err.code
# handle basic options
- allopts = set(opt[0] for opt in opts)
- # help and version options
- if '-h' in allopts or '--help' in allopts:
- usage(argv)
- print(file=sys.stderr)
- print('For more information, see <http://sphinx-doc.org/>.',
- file=sys.stderr)
- return 0
- if '--version' in allopts:
- print('Sphinx (sphinx-build) %s' % __version__)
+ if opts.version:
+ print('Sphinx (sphinx-build) %s' % __version__)
return 0
# get paths (first and second positional argument)
try:
- srcdir = confdir = abspath(args[0])
+ srcdir = abspath(args[0])
+ confdir = abspath(opts.confdir or srcdir)
+ if opts.noconfig:
+ confdir = None
if not path.isdir(srcdir):
print('Error: Cannot find source directory `%s\'.' % srcdir,
file=sys.stderr)
return 1
- if not path.isfile(path.join(srcdir, 'conf.py')) and \
- '-c' not in allopts and '-C' not in allopts:
- print('Error: Source directory doesn\'t contain a conf.py file.',
+ if not opts.noconfig and not path.isfile(path.join(confdir, 'conf.py')):
+ print('Error: Config directory doesn\'t contain a conf.py file.',
file=sys.stderr)
return 1
outdir = abspath(args[1])
@@ -144,116 +176,77 @@ def main(argv):
except Exception:
likely_encoding = None
- buildername = None
- force_all = freshenv = warningiserror = use_pdb = False
- show_traceback = False
- verbosity = 0
- parallel = 0
+ if opts.force_all and filenames:
+ print('Error: Cannot combine -a option and filenames.', file=sys.stderr)
+ return 1
+
+ if opts.nocolor:
+ nocolor()
+
+ doctreedir = abspath(opts.doctreedir or path.join(outdir, '.doctrees'))
+
status = sys.stdout
warning = sys.stderr
error = sys.stderr
- warnfile = None
+
+ if opts.quiet:
+ status = None
+ if opts.really_quiet:
+ status = warning = None
+ if warning and opts.warnfile:
+ try:
+ warnfp = open(opts.warnfile, 'w')
+ except Exception as exc:
+ print('Error: Cannot open warning file %r: %s' %
+ (opts.warnfile, exc), file=sys.stderr)
+ sys.exit(1)
+ warning = Tee(warning, warnfp)
+ error = warning
+
confoverrides = {}
- tags = []
- doctreedir = path.join(outdir, '.doctrees')
- for opt, val in opts:
- if opt == '-b':
- buildername = val
- elif opt == '-a':
- if filenames:
- usage(argv, 'Error: Cannot combine -a option and filenames.')
- return 1
- force_all = True
- elif opt == '-t':
- tags.append(val)
- elif opt == '-d':
- doctreedir = abspath(val)
- elif opt == '-c':
- confdir = abspath(val)
- if not path.isfile(path.join(confdir, 'conf.py')):
- print('Error: Configuration directory doesn\'t contain conf.py file.',
- file=sys.stderr)
- return 1
- elif opt == '-C':
- confdir = None
- elif opt == '-D':
+ for val in opts.define:
+ try:
+ key, val = val.split('=')
+ except ValueError:
+ print('Error: -D option argument must be in the form name=value.',
+ file=sys.stderr)
+ return 1
+ if likely_encoding and isinstance(val, binary_type):
try:
- key, val = val.split('=')
- except ValueError:
- print('Error: -D option argument must be in the form name=value.',
- file=sys.stderr)
- return 1
+ val = val.decode(likely_encoding)
+ except UnicodeError:
+ pass
+ confoverrides[key] = val
+
+ for val in opts.htmldefine:
+ try:
+ key, val = val.split('=')
+ except ValueError:
+ print('Error: -A option argument must be in the form name=value.',
+ file=sys.stderr)
+ return 1
+ try:
+ val = int(val)
+ except ValueError:
if likely_encoding and isinstance(val, binary_type):
try:
val = val.decode(likely_encoding)
except UnicodeError:
pass
- confoverrides[key] = val
- elif opt == '-A':
- try:
- key, val = val.split('=')
- except ValueError:
- print('Error: -A option argument must be in the form name=value.',
- file=sys.stderr)
- return 1
- try:
- val = int(val)
- except ValueError:
- if likely_encoding and isinstance(val, binary_type):
- try:
- val = val.decode(likely_encoding)
- except UnicodeError:
- pass
- confoverrides['html_context.%s' % key] = val
- elif opt == '-n':
- confoverrides['nitpicky'] = True
- elif opt == '-N':
- nocolor()
- elif opt == '-E':
- freshenv = True
- elif opt == '-q':
- status = None
- elif opt == '-Q':
- status = None
- warning = None
- elif opt == '-W':
- warningiserror = True
- elif opt == '-w':
- warnfile = val
- elif opt == '-P':
- use_pdb = True
- elif opt == '-T':
- show_traceback = True
- elif opt == '-v':
- verbosity += 1
- show_traceback = True
- elif opt == '-j':
- try:
- parallel = int(val)
- except ValueError:
- print('Error: -j option argument must be an integer.',
- file=sys.stderr)
- return 1
-
- if warning and warnfile:
- warnfp = open(warnfile, 'w')
- warning = Tee(warning, warnfp)
- error = warning
+ confoverrides['html_context.%s' % key] = val
- if not path.isdir(outdir):
- if status:
- print('Making output directory...', file=status)
- os.makedirs(outdir)
+ if opts.nitpicky:
+ confoverrides['nitpicky'] = True
app = None
try:
- app = Sphinx(srcdir, confdir, outdir, doctreedir, buildername,
- confoverrides, status, warning, freshenv,
- warningiserror, tags, verbosity, parallel)
- app.build(force_all, filenames)
+ app = Sphinx(srcdir, confdir, outdir, doctreedir, opts.builder,
+ confoverrides, status, warning, opts.freshenv,
+ opts.warningiserror, opts.tags, opts.verbosity, opts.jobs)
+ app.build(opts.force_all, filenames)
return app.statuscode
except (Exception, KeyboardInterrupt) as err:
- if use_pdb:
+ if opts.pdb:
import pdb
print(red('Exception occurred while building, starting debugger:'),
file=error)
@@ -261,7 +254,7 @@ def main(argv):
pdb.post_mortem(sys.exc_info()[2])
else:
print(file=error)
- if show_traceback:
+ if opts.verbosity or opts.traceback:
traceback.print_exc(None, error)
print(file=error)
if isinstance(err, KeyboardInterrupt):
diff --git a/sphinx/directives/__init__.py b/sphinx/directives/__init__.py
index 52b638fe..969426bc 100644
--- a/sphinx/directives/__init__.py
+++ b/sphinx/directives/__init__.py
@@ -11,7 +11,8 @@
import re
-from docutils.parsers.rst import Directive, directives
+from docutils import nodes
+from docutils.parsers.rst import Directive, directives, roles
from sphinx import addnodes
from sphinx.util.docfields import DocFieldTransformer
@@ -162,6 +163,34 @@ class ObjectDescription(Directive):
DescDirective = ObjectDescription
+class DefaultRole(Directive):
+ """
+ Set the default interpreted text role. Overridden from docutils.
+ """
+
+ optional_arguments = 1
+ final_argument_whitespace = False
+
+ def run(self):
+ if not self.arguments:
+ if '' in roles._roles:
+ # restore the "default" default role
+ del roles._roles['']
+ return []
+ role_name = self.arguments[0]
+ role, messages = roles.role(role_name, self.state_machine.language,
+ self.lineno, self.state.reporter)
+ if role is None:
+ error = self.state.reporter.error(
+ 'Unknown interpreted text role "%s".' % role_name,
+ nodes.literal_block(self.block_text, self.block_text),
+ line=self.lineno)
+ return messages + [error]
+ roles._roles[''] = role
+ self.state.document.settings.env.temp_data['default_role'] = role_name
+ return messages
+
+
class DefaultDomain(Directive):
"""
Directive to (re-)set the default domain for this source file.
@@ -186,6 +215,7 @@ class DefaultDomain(Directive):
return []
+directives.register_directive('default-role', DefaultRole)
directives.register_directive('default-domain', DefaultDomain)
directives.register_directive('describe', ObjectDescription)
# new, more consistent, name
diff --git a/sphinx/directives/code.py b/sphinx/directives/code.py
index 6ea525b0..543383da 100644
--- a/sphinx/directives/code.py
+++ b/sphinx/directives/code.py
@@ -47,7 +47,6 @@ class Highlight(Directive):
linenothreshold=linenothreshold)]
-
def dedent_lines(lines, dedent):
if not dedent:
return lines
@@ -93,7 +92,7 @@ class CodeBlock(Directive):
return [document.reporter.warning(str(err), line=self.lineno)]
else:
hl_lines = None
-
+
if 'dedent' in self.options:
lines = code.split('\n')
lines = dedent_lines(lines, self.options['dedent'])
diff --git a/sphinx/domains/__init__.py b/sphinx/domains/__init__.py
index 51b886fd..66d4c677 100644
--- a/sphinx/domains/__init__.py
+++ b/sphinx/domains/__init__.py
@@ -155,10 +155,13 @@ class Domain(object):
self._role_cache = {}
self._directive_cache = {}
self._role2type = {}
+ self._type2role = {}
for name, obj in iteritems(self.object_types):
for rolename in obj.roles:
self._role2type.setdefault(rolename, []).append(name)
+ self._type2role[name] = obj.roles[0] if obj.roles else ''
self.objtypes_for_role = self._role2type.get
+ self.role_for_objtype = self._type2role.get
def role(self, name):
"""Return a role adapter function that always gives the registered
@@ -199,6 +202,14 @@ class Domain(object):
"""Remove traces of a document in the domain-specific inventories."""
pass
+ def merge_domaindata(self, docnames, otherdata):
+ """Merge in data regarding *docnames* from a different domaindata
+ inventory (coming from a subprocess in parallel builds).
+ """
+ raise NotImplementedError('merge_domaindata must be implemented in %s '
+ 'to be able to do parallel builds!' %
+ self.__class__)
+
def process_doc(self, env, docname, document):
"""Process a document after it is read by the environment."""
pass
@@ -220,6 +231,22 @@ class Domain(object):
"""
pass
+ def resolve_any_xref(self, env, fromdocname, builder, target, node, contnode):
+ """Resolve the pending_xref *node* with the given *target*.
+
+ The reference comes from an "any" or similar role, which means that we
+ don't know the type. Otherwise, the arguments are the same as for
+ :meth:`resolve_xref`.
+
+ The method must return a list (potentially empty) of tuples
+ ``('domain:role', newnode)``, where ``'domain:role'`` is the name of a
+ role that could have created the same reference, e.g. ``'py:func'``.
+ ``newnode`` is what :meth:`resolve_xref` would return.
+
+ .. versionadded:: 1.3
+ """
+ raise NotImplementedError
+
def get_objects(self):
"""Return an iterable of "object descriptions", which are tuples with
five items:
diff --git a/sphinx/domains/c.py b/sphinx/domains/c.py
index 4d12c141..0754e317 100644
--- a/sphinx/domains/c.py
+++ b/sphinx/domains/c.py
@@ -130,7 +130,7 @@ class CObject(ObjectDescription):
if m:
name = m.group(1)
- typename = self.env.temp_data.get('c:type')
+ typename = self.env.ref_context.get('c:type')
if self.name == 'c:member' and typename:
fullname = typename + '.' + name
else:
@@ -212,12 +212,12 @@ class CObject(ObjectDescription):
self.typename_set = False
if self.name == 'c:type':
if self.names:
- self.env.temp_data['c:type'] = self.names[0]
+ self.env.ref_context['c:type'] = self.names[0]
self.typename_set = True
def after_content(self):
if self.typename_set:
- self.env.temp_data['c:type'] = None
+ self.env.ref_context.pop('c:type', None)
class CXRefRole(XRefRole):
@@ -269,6 +269,12 @@ class CDomain(Domain):
if fn == docname:
del self.data['objects'][fullname]
+ def merge_domaindata(self, docnames, otherdata):
+ # XXX check duplicates
+ for fullname, (fn, objtype) in otherdata['objects'].items():
+ if fn in docnames:
+ self.data['objects'][fullname] = (fn, objtype)
+
def resolve_xref(self, env, fromdocname, builder,
typ, target, node, contnode):
# strip pointer asterisk
@@ -279,6 +285,17 @@ class CDomain(Domain):
return make_refnode(builder, fromdocname, obj[0], 'c.' + target,
contnode, target)
+ def resolve_any_xref(self, env, fromdocname, builder, target,
+ node, contnode):
+ # strip pointer asterisk
+ target = target.rstrip(' *')
+ if target not in self.data['objects']:
+ return []
+ obj = self.data['objects'][target]
+ return [('c:' + self.role_for_objtype(obj[1]),
+ make_refnode(builder, fromdocname, obj[0], 'c.' + target,
+ contnode, target))]
+
def get_objects(self):
for refname, (docname, type) in list(self.data['objects'].items()):
yield (refname, refname, type, docname, 'c.' + refname, 1)
diff --git a/sphinx/domains/cpp.py b/sphinx/domains/cpp.py
index 23bd469f..d4455a22 100644
--- a/sphinx/domains/cpp.py
+++ b/sphinx/domains/cpp.py
@@ -7,11 +7,11 @@
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
-
+
See http://www.nongnu.org/hcb/ for the grammar.
See http://mentorembedded.github.io/cxx-abi/abi.html#mangling for the
inspiration for the id generation.
-
+
common grammar things:
simple-declaration
-> attribute-specifier-seq[opt] decl-specifier-seq[opt]
@@ -20,7 +20,7 @@
# Use at most 1 init-declerator.
-> decl-specifier-seq init-declerator
-> decl-specifier-seq declerator initializer
-
+
decl-specifier ->
storage-class-specifier -> "static" (only for member_object and
function_object)
@@ -76,8 +76,8 @@
constant-expression
| type-specifier-seq abstract-declerator
| id-expression
-
-
+
+
declerator ->
ptr-declerator
| noptr-declarator parameters-and-qualifiers trailing-return-type
@@ -108,11 +108,11 @@
memberFunctionInit -> "=" "0"
# (note: only "0" is allowed as the value, according to the standard,
# right?)
-
-
+
+
We additionally add the possibility for specifying the visibility as the
first thing.
-
+
type_object:
goal:
either a single type (e.g., "MyClass:Something_T" or a typedef-like
@@ -126,14 +126,14 @@
-> decl-specifier-seq abstract-declarator[opt]
grammar, typedef-like: no initilizer
decl-specifier-seq declerator
-
-
+
+
member_object:
goal: as a type_object which must have a declerator, and optionally
with a initializer
grammar:
decl-specifier-seq declerator initializer
-
+
function_object:
goal: a function declaration, TODO: what about templates? for now: skip
grammar: no initializer
@@ -141,7 +141,6 @@
"""
import re
-import traceback
from copy import deepcopy
from six import iteritems, text_type
@@ -222,9 +221,9 @@ _id_operator = {
'delete[]': 'da',
# the arguments will make the difference between unary and binary
# '+(unary)' : 'ps',
- #'-(unary)' : 'ng',
- #'&(unary)' : 'ad',
- #'*(unary)' : 'de',
+ # '-(unary)' : 'ng',
+ # '&(unary)' : 'ad',
+ # '*(unary)' : 'de',
'~': 'co',
'+': 'pl',
'-': 'mi',
@@ -319,7 +318,7 @@ class ASTBase(UnicodeMixin):
def _verify_description_mode(mode):
- if not mode in ('lastIsName', 'noneIsName', 'markType', 'param'):
+ if mode not in ('lastIsName', 'noneIsName', 'markType', 'param'):
raise Exception("Description mode '%s' is invalid." % mode)
@@ -328,7 +327,7 @@ class ASTOperatorBuildIn(ASTBase):
self.op = op
def get_id(self):
- if not self.op in _id_operator:
+ if self.op not in _id_operator:
raise Exception('Internal error: Build-in operator "%s" can not '
'be mapped to an id.' % self.op)
return _id_operator[self.op]
@@ -434,7 +433,7 @@ class ASTNestedNameElement(ASTBase):
'', refdomain='cpp', reftype='type',
reftarget=targetText, modname=None, classname=None)
if env: # during testing we don't have an env, do we?
- pnode['cpp:parent'] = env.temp_data.get('cpp:parent')
+ pnode['cpp:parent'] = env.ref_context.get('cpp:parent')
pnode += nodes.Text(text_type(self.identifier))
signode += pnode
elif mode == 'lastIsName':
@@ -532,7 +531,7 @@ class ASTTrailingTypeSpecFundamental(ASTBase):
return self.name
def get_id(self):
- if not self.name in _id_fundamental:
+ if self.name not in _id_fundamental:
raise Exception(
'Semi-internal error: Fundamental type "%s" can not be mapped '
'to an id. Is it a true fundamental type? If not so, the '
@@ -866,7 +865,7 @@ class ASTDeclerator(ASTBase):
isinstance(self.ptrOps[-1], ASTPtrOpParamPack)):
return False
else:
- return self.declId != None
+ return self.declId is not None
def __unicode__(self):
res = []
@@ -949,7 +948,7 @@ class ASTType(ASTBase):
_verify_description_mode(mode)
self.declSpecs.describe_signature(signode, 'markType', env)
if (self.decl.require_start_space() and
- len(text_type(self.declSpecs)) > 0):
+ len(text_type(self.declSpecs)) > 0):
signode += nodes.Text(' ')
self.decl.describe_signature(signode, mode, env)
@@ -1178,7 +1177,7 @@ class DefinitionParser(object):
else:
while not self.eof:
if (len(symbols) == 0 and
- self.current_char in (
+ self.current_char in (
',', '>')):
break
# TODO: actually implement nice handling
@@ -1190,8 +1189,7 @@ class DefinitionParser(object):
self.fail(
'Could not find end of constant '
'template argument.')
- value = self.definition[
- startPos:self.pos].strip()
+ value = self.definition[startPos:self.pos].strip()
templateArgs.append(ASTTemplateArgConstant(value))
self.skip_ws()
if self.skip_string('>'):
@@ -1422,7 +1420,7 @@ class DefinitionParser(object):
def _parse_declerator(self, named, paramMode=None, typed=True):
if paramMode:
- if not paramMode in ('type', 'function'):
+ if paramMode not in ('type', 'function'):
raise Exception(
"Internal error, unknown paramMode '%s'." % paramMode)
ptrOps = []
@@ -1493,7 +1491,7 @@ class DefinitionParser(object):
if outer == 'member':
value = self.read_rest().strip()
return ASTInitializer(value)
- elif outer == None: # function parameter
+ elif outer is None: # function parameter
symbols = []
startPos = self.pos
self.skip_ws()
@@ -1528,7 +1526,7 @@ class DefinitionParser(object):
doesn't need to name the arguments
"""
if outer: # always named
- if not outer in ('type', 'member', 'function'):
+ if outer not in ('type', 'member', 'function'):
raise Exception('Internal error, unknown outer "%s".' % outer)
assert not named
@@ -1652,12 +1650,12 @@ class CPPObject(ObjectDescription):
if theid not in self.state.document.ids:
# the name is not unique, the first one will win
objects = self.env.domaindata['cpp']['objects']
- if not name in objects:
+ if name not in objects:
signode['names'].append(name)
signode['ids'].append(theid)
signode['first'] = (not self.names)
self.state.document.note_explicit_target(signode)
- if not name in objects:
+ if name not in objects:
objects.setdefault(name,
(self.env.docname, ast.objectType, theid))
# add the uninstantiated template if it doesn't exist
@@ -1665,8 +1663,8 @@ class CPPObject(ObjectDescription):
if uninstantiated != name and uninstantiated not in objects:
signode['names'].append(uninstantiated)
objects.setdefault(uninstantiated, (
- self.env.docname, ast.objectType, theid))
- self.env.temp_data['cpp:lastname'] = ast.prefixedName
+ self.env.docname, ast.objectType, theid))
+ self.env.ref_context['cpp:lastname'] = ast.prefixedName
indextext = self.get_index_text(name)
if not re.compile(r'^[a-zA-Z0-9_]*$').match(theid):
@@ -1693,7 +1691,7 @@ class CPPObject(ObjectDescription):
raise ValueError
self.describe_signature(signode, ast)
- parent = self.env.temp_data.get('cpp:parent')
+ parent = self.env.ref_context.get('cpp:parent')
if parent and len(parent) > 0:
ast = ast.clone()
ast.prefixedName = ast.name.prefix_nested_name(parent[-1])
@@ -1741,15 +1739,15 @@ class CPPClassObject(CPPObject):
return _('%s (C++ class)') % name
def before_content(self):
- lastname = self.env.temp_data['cpp:lastname']
+ lastname = self.env.ref_context['cpp:lastname']
assert lastname
- if 'cpp:parent' in self.env.temp_data:
- self.env.temp_data['cpp:parent'].append(lastname)
+ if 'cpp:parent' in self.env.ref_context:
+ self.env.ref_context['cpp:parent'].append(lastname)
else:
- self.env.temp_data['cpp:parent'] = [lastname]
+ self.env.ref_context['cpp:parent'] = [lastname]
def after_content(self):
- self.env.temp_data['cpp:parent'].pop()
+ self.env.ref_context['cpp:parent'].pop()
def parse_definition(self, parser):
return parser.parse_class_object()
@@ -1774,7 +1772,7 @@ class CPPNamespaceObject(Directive):
def run(self):
env = self.state.document.settings.env
if self.arguments[0].strip() in ('NULL', '0', 'nullptr'):
- env.temp_data['cpp:parent'] = []
+ env.ref_context['cpp:parent'] = []
else:
parser = DefinitionParser(self.arguments[0])
try:
@@ -1784,13 +1782,13 @@ class CPPNamespaceObject(Directive):
self.state_machine.reporter.warning(e.description,
line=self.lineno)
else:
- env.temp_data['cpp:parent'] = [prefix]
+ env.ref_context['cpp:parent'] = [prefix]
return []
class CPPXRefRole(XRefRole):
def process_link(self, env, refnode, has_explicit_title, title, target):
- parent = env.temp_data.get('cpp:parent')
+ parent = env.ref_context.get('cpp:parent')
if parent:
refnode['cpp:parent'] = parent[:]
if not has_explicit_title:
@@ -1838,18 +1836,24 @@ class CPPDomain(Domain):
if data[0] == docname:
del self.data['objects'][fullname]
- def resolve_xref(self, env, fromdocname, builder,
- typ, target, node, contnode):
+ def merge_domaindata(self, docnames, otherdata):
+ # XXX check duplicates
+ for fullname, data in otherdata['objects'].items():
+ if data[0] in docnames:
+ self.data['objects'][fullname] = data
+
+ def _resolve_xref_inner(self, env, fromdocname, builder,
+ target, node, contnode, warn=True):
def _create_refnode(nameAst):
name = text_type(nameAst)
if name not in self.data['objects']:
# try dropping the last template
name = nameAst.get_name_no_last_template()
if name not in self.data['objects']:
- return None
+ return None, None
docname, objectType, id = self.data['objects'][name]
return make_refnode(builder, fromdocname, docname, id, contnode,
- name)
+ name), objectType
parser = DefinitionParser(target)
try:
@@ -1858,21 +1862,35 @@ class CPPDomain(Domain):
if not parser.eof:
raise DefinitionError('')
except DefinitionError:
- env.warn_node('unparseable C++ definition: %r' % target, node)
- return None
+ if warn:
+ env.warn_node('unparseable C++ definition: %r' % target, node)
+ return None, None
# try as is the name is fully qualified
- refNode = _create_refnode(nameAst)
- if refNode:
- return refNode
+ res = _create_refnode(nameAst)
+ if res[0]:
+ return res
# try qualifying it with the parent
parent = node.get('cpp:parent', None)
if parent and len(parent) > 0:
return _create_refnode(nameAst.prefix_nested_name(parent[-1]))
else:
- return None
+ return None, None
+
+ def resolve_xref(self, env, fromdocname, builder,
+ typ, target, node, contnode):
+ return self._resolve_xref_inner(env, fromdocname, builder, target, node,
+ contnode)[0]
+
+ def resolve_any_xref(self, env, fromdocname, builder, target,
+ node, contnode):
+ node, objtype = self._resolve_xref_inner(env, fromdocname, builder,
+ target, node, contnode, warn=False)
+ if node:
+ return [('cpp:' + self.role_for_objtype(objtype), node)]
+ return []
def get_objects(self):
for refname, (docname, type, theid) in iteritems(self.data['objects']):
- yield (refname, refname, type, docname, refname, 1) \ No newline at end of file
+ yield (refname, refname, type, docname, refname, 1)
diff --git a/sphinx/domains/javascript.py b/sphinx/domains/javascript.py
index 2718b872..af215fd6 100644
--- a/sphinx/domains/javascript.py
+++ b/sphinx/domains/javascript.py
@@ -45,7 +45,7 @@ class JSObject(ObjectDescription):
nameprefix = None
name = prefix
- objectname = self.env.temp_data.get('js:object')
+ objectname = self.env.ref_context.get('js:object')
if nameprefix:
if objectname:
# someone documenting the method of an attribute of the current
@@ -77,7 +77,7 @@ class JSObject(ObjectDescription):
def add_target_and_index(self, name_obj, sig, signode):
objectname = self.options.get(
- 'object', self.env.temp_data.get('js:object'))
+ 'object', self.env.ref_context.get('js:object'))
fullname = name_obj[0]
if fullname not in self.state.document.ids:
signode['names'].append(fullname)
@@ -140,7 +140,7 @@ class JSConstructor(JSCallable):
class JSXRefRole(XRefRole):
def process_link(self, env, refnode, has_explicit_title, title, target):
# basically what sphinx.domains.python.PyXRefRole does
- refnode['js:object'] = env.temp_data.get('js:object')
+ refnode['js:object'] = env.ref_context.get('js:object')
if not has_explicit_title:
title = title.lstrip('.')
target = target.lstrip('~')
@@ -179,7 +179,7 @@ class JavaScriptDomain(Domain):
'attr': JSXRefRole(),
}
initial_data = {
- 'objects': {}, # fullname -> docname, objtype
+ 'objects': {}, # fullname -> docname, objtype
}
def clear_doc(self, docname):
@@ -187,6 +187,12 @@ class JavaScriptDomain(Domain):
if fn == docname:
del self.data['objects'][fullname]
+ def merge_domaindata(self, docnames, otherdata):
+ # XXX check duplicates
+ for fullname, (fn, objtype) in otherdata['objects'].items():
+ if fn in docnames:
+ self.data['objects'][fullname] = (fn, objtype)
+
def find_obj(self, env, obj, name, typ, searchorder=0):
if name[-2:] == '()':
name = name[:-2]
@@ -214,6 +220,16 @@ class JavaScriptDomain(Domain):
return make_refnode(builder, fromdocname, obj[0],
name.replace('$', '_S_'), contnode, name)
+ def resolve_any_xref(self, env, fromdocname, builder, target, node,
+ contnode):
+ objectname = node.get('js:object')
+ name, obj = self.find_obj(env, objectname, target, None, 1)
+ if not obj:
+ return []
+ return [('js:' + self.role_for_objtype(obj[1]),
+ make_refnode(builder, fromdocname, obj[0],
+ name.replace('$', '_S_'), contnode, name))]
+
def get_objects(self):
for refname, (docname, type) in list(self.data['objects'].items()):
yield refname, refname, type, docname, \
diff --git a/sphinx/domains/python.py b/sphinx/domains/python.py
index a7a93cb1..4e08eba9 100644
--- a/sphinx/domains/python.py
+++ b/sphinx/domains/python.py
@@ -156,8 +156,8 @@ class PyObject(ObjectDescription):
# determine module and class name (if applicable), as well as full name
modname = self.options.get(
- 'module', self.env.temp_data.get('py:module'))
- classname = self.env.temp_data.get('py:class')
+ 'module', self.env.ref_context.get('py:module'))
+ classname = self.env.ref_context.get('py:class')
if classname:
add_module = False
if name_prefix and name_prefix.startswith(classname):
@@ -194,7 +194,7 @@ class PyObject(ObjectDescription):
# 'exceptions' module.
elif add_module and self.env.config.add_module_names:
modname = self.options.get(
- 'module', self.env.temp_data.get('py:module'))
+ 'module', self.env.ref_context.get('py:module'))
if modname and modname != 'exceptions':
nodetext = modname + '.'
signode += addnodes.desc_addname(nodetext, nodetext)
@@ -225,7 +225,7 @@ class PyObject(ObjectDescription):
def add_target_and_index(self, name_cls, sig, signode):
modname = self.options.get(
- 'module', self.env.temp_data.get('py:module'))
+ 'module', self.env.ref_context.get('py:module'))
fullname = (modname and modname + '.' or '') + name_cls[0]
# note target
if fullname not in self.state.document.ids:
@@ -254,7 +254,7 @@ class PyObject(ObjectDescription):
def after_content(self):
if self.clsname_set:
- self.env.temp_data['py:class'] = None
+ self.env.ref_context.pop('py:class', None)
class PyModulelevel(PyObject):
@@ -299,7 +299,7 @@ class PyClasslike(PyObject):
def before_content(self):
PyObject.before_content(self)
if self.names:
- self.env.temp_data['py:class'] = self.names[0][0]
+ self.env.ref_context['py:class'] = self.names[0][0]
self.clsname_set = True
@@ -377,8 +377,8 @@ class PyClassmember(PyObject):
def before_content(self):
PyObject.before_content(self)
lastname = self.names and self.names[-1][1]
- if lastname and not self.env.temp_data.get('py:class'):
- self.env.temp_data['py:class'] = lastname.strip('.')
+ if lastname and not self.env.ref_context.get('py:class'):
+ self.env.ref_context['py:class'] = lastname.strip('.')
self.clsname_set = True
@@ -434,7 +434,7 @@ class PyModule(Directive):
env = self.state.document.settings.env
modname = self.arguments[0].strip()
noindex = 'noindex' in self.options
- env.temp_data['py:module'] = modname
+ env.ref_context['py:module'] = modname
ret = []
if not noindex:
env.domaindata['py']['modules'][modname] = \
@@ -472,16 +472,16 @@ class PyCurrentModule(Directive):
env = self.state.document.settings.env
modname = self.arguments[0].strip()
if modname == 'None':
- env.temp_data['py:module'] = None
+ env.ref_context.pop('py:module', None)
else:
- env.temp_data['py:module'] = modname
+ env.ref_context['py:module'] = modname
return []
class PyXRefRole(XRefRole):
def process_link(self, env, refnode, has_explicit_title, title, target):
- refnode['py:module'] = env.temp_data.get('py:module')
- refnode['py:class'] = env.temp_data.get('py:class')
+ refnode['py:module'] = env.ref_context.get('py:module')
+ refnode['py:class'] = env.ref_context.get('py:class')
if not has_explicit_title:
title = title.lstrip('.') # only has a meaning for the target
target = target.lstrip('~') # only has a meaning for the title
@@ -627,6 +627,15 @@ class PythonDomain(Domain):
if fn == docname:
del self.data['modules'][modname]
+ def merge_domaindata(self, docnames, otherdata):
+ # XXX check duplicates?
+ for fullname, (fn, objtype) in otherdata['objects'].items():
+ if fn in docnames:
+ self.data['objects'][fullname] = (fn, objtype)
+ for modname, data in otherdata['modules'].items():
+ if data[0] in docnames:
+ self.data['modules'][modname] = data
+
def find_obj(self, env, modname, classname, name, type, searchmode=0):
"""Find a Python object for "name", perhaps using the given module
and/or classname. Returns a list of (name, object entry) tuples.
@@ -643,7 +652,10 @@ class PythonDomain(Domain):
newname = None
if searchmode == 1:
- objtypes = self.objtypes_for_role(type)
+ if type is None:
+ objtypes = list(self.object_types)
+ else:
+ objtypes = self.objtypes_for_role(type)
if objtypes is not None:
if modname and classname:
fullname = modname + '.' + classname + '.' + name
@@ -704,22 +716,44 @@ class PythonDomain(Domain):
name, obj = matches[0]
if obj[1] == 'module':
- # get additional info for modules
- docname, synopsis, platform, deprecated = self.data['modules'][name]
- assert docname == obj[0]
- title = name
- if synopsis:
- title += ': ' + synopsis
- if deprecated:
- title += _(' (deprecated)')
- if platform:
- title += ' (' + platform + ')'
- return make_refnode(builder, fromdocname, docname,
- 'module-' + name, contnode, title)
+ return self._make_module_refnode(builder, fromdocname, name,
+ contnode)
else:
return make_refnode(builder, fromdocname, obj[0], name,
contnode, name)
+ def resolve_any_xref(self, env, fromdocname, builder, target,
+ node, contnode):
+ modname = node.get('py:module')
+ clsname = node.get('py:class')
+ results = []
+
+ # always search in "refspecific" mode with the :any: role
+ matches = self.find_obj(env, modname, clsname, target, None, 1)
+ for name, obj in matches:
+ if obj[1] == 'module':
+ results.append(('py:mod',
+ self._make_module_refnode(builder, fromdocname,
+ name, contnode)))
+ else:
+ results.append(('py:' + self.role_for_objtype(obj[1]),
+ make_refnode(builder, fromdocname, obj[0], name,
+ contnode, name)))
+ return results
+
+ def _make_module_refnode(self, builder, fromdocname, name, contnode):
+ # get additional info for modules
+ docname, synopsis, platform, deprecated = self.data['modules'][name]
+ title = name
+ if synopsis:
+ title += ': ' + synopsis
+ if deprecated:
+ title += _(' (deprecated)')
+ if platform:
+ title += ' (' + platform + ')'
+ return make_refnode(builder, fromdocname, docname,
+ 'module-' + name, contnode, title)
+
def get_objects(self):
for modname, info in iteritems(self.data['modules']):
yield (modname, modname, 'module', info[0], 'module-' + modname, 0)
diff --git a/sphinx/domains/rst.py b/sphinx/domains/rst.py
index e213211a..2c304d0c 100644
--- a/sphinx/domains/rst.py
+++ b/sphinx/domains/rst.py
@@ -123,6 +123,12 @@ class ReSTDomain(Domain):
if doc == docname:
del self.data['objects'][typ, name]
+ def merge_domaindata(self, docnames, otherdata):
+ # XXX check duplicates
+ for (typ, name), doc in otherdata['objects'].items():
+ if doc in docnames:
+ self.data['objects'][typ, name] = doc
+
def resolve_xref(self, env, fromdocname, builder, typ, target, node,
contnode):
objects = self.data['objects']
@@ -134,6 +140,19 @@ class ReSTDomain(Domain):
objtype + '-' + target,
contnode, target + ' ' + objtype)
+ def resolve_any_xref(self, env, fromdocname, builder, target,
+ node, contnode):
+ objects = self.data['objects']
+ results = []
+ for objtype in self.object_types:
+ if (objtype, target) in self.data['objects']:
+ results.append(('rst:' + self.role_for_objtype(objtype),
+ make_refnode(builder, fromdocname,
+ objects[objtype, target],
+ objtype + '-' + target,
+ contnode, target + ' ' + objtype)))
+ return results
+
def get_objects(self):
for (typ, name), docname in iteritems(self.data['objects']):
yield name, name, typ, docname, typ + '-' + name, 1
diff --git a/sphinx/domains/std.py b/sphinx/domains/std.py
index bb044e30..a636299e 100644
--- a/sphinx/domains/std.py
+++ b/sphinx/domains/std.py
@@ -28,7 +28,9 @@ from sphinx.util.compat import Directive
# RE for option descriptions
-option_desc_re = re.compile(r'((?:/|-|--)?[-_a-zA-Z0-9]+)(\s*.*)')
+option_desc_re = re.compile(r'((?:/|--|-|\+)?[-?@#_a-zA-Z0-9]+)(=?\s*.*)')
+# RE for grammar tokens
+token_re = re.compile('`(\w+)`', re.U)
class GenericObject(ObjectDescription):
@@ -144,8 +146,9 @@ class Cmdoption(ObjectDescription):
self.env.warn(
self.env.docname,
'Malformed option description %r, should '
- 'look like "opt", "-opt args", "--opt args" or '
- '"/opt args"' % potential_option, self.lineno)
+ 'look like "opt", "-opt args", "--opt args", '
+ '"/opt args" or "+opt args"' % potential_option,
+ self.lineno)
continue
optname, args = m.groups()
if count:
@@ -163,7 +166,7 @@ class Cmdoption(ObjectDescription):
return firstname
def add_target_and_index(self, firstname, sig, signode):
- currprogram = self.env.temp_data.get('std:program')
+ currprogram = self.env.ref_context.get('std:program')
for optname in signode.get('allnames', []):
targetname = optname.replace('/', '-')
if not targetname.startswith('-'):
@@ -198,36 +201,19 @@ class Program(Directive):
env = self.state.document.settings.env
program = ws_re.sub('-', self.arguments[0].strip())
if program == 'None':
- env.temp_data['std:program'] = None
+ env.ref_context.pop('std:program', None)
else:
- env.temp_data['std:program'] = program
+ env.ref_context['std:program'] = program
return []
class OptionXRefRole(XRefRole):
- innernodeclass = addnodes.literal_emphasis
-
- def _split(self, text, refnode, env):
- try:
- program, target = re.split(' (?=-|--|/)', text, 1)
- except ValueError:
- env.warn_node('Malformed :option: %r, does not contain option '
- 'marker - or -- or /' % text, refnode)
- return None, text
- else:
- program = ws_re.sub('-', program)
- return program, target
-
def process_link(self, env, refnode, has_explicit_title, title, target):
- program = env.temp_data.get('std:program')
- if not has_explicit_title:
- if ' ' in title and not (title.startswith('/') or
- title.startswith('-')):
- program, target = self._split(title, refnode, env)
- target = target.strip()
- elif ' ' in target:
- program, target = self._split(target, refnode, env)
- refnode['refprogram'] = program
+ # validate content
+ if not re.match('(.+ )?[-/+]', target):
+ env.warn_node('Malformed :option: %r, does not contain option '
+ 'marker - or -- or / or +' % target, refnode)
+ refnode['std:program'] = env.ref_context.get('std:program')
return title, target
@@ -327,7 +313,7 @@ class Glossary(Directive):
else:
messages.append(self.state.reporter.system_message(
2, 'glossary seems to be misformatted, check '
- 'indentation', source=source, line=lineno))
+ 'indentation', source=source, line=lineno))
else:
if not in_definition:
# first line of definition, determines indentation
@@ -338,7 +324,7 @@ class Glossary(Directive):
else:
messages.append(self.state.reporter.system_message(
2, 'glossary seems to be misformatted, check '
- 'indentation', source=source, line=lineno))
+ 'indentation', source=source, line=lineno))
was_empty = False
# now, parse all the entries into a big definition list
@@ -359,7 +345,7 @@ class Glossary(Directive):
tmp.source = source
tmp.line = lineno
new_id, termtext, new_termnodes = \
- make_termnodes_from_paragraph_node(env, tmp)
+ make_termnodes_from_paragraph_node(env, tmp)
ids.append(new_id)
termtexts.append(termtext)
termnodes.extend(new_termnodes)
@@ -386,8 +372,6 @@ class Glossary(Directive):
return messages + [node]
-token_re = re.compile('`(\w+)`', re.U)
-
def token_xrefs(text):
retnodes = []
pos = 0
@@ -472,7 +456,7 @@ class StandardDomain(Domain):
'productionlist': ProductionList,
}
roles = {
- 'option': OptionXRefRole(innernodeclass=addnodes.literal_emphasis),
+ 'option': OptionXRefRole(),
'envvar': EnvVarXRefRole(),
# links to tokens in grammar productions
'token': XRefRole(),
@@ -522,6 +506,21 @@ class StandardDomain(Domain):
if fn == docname:
del self.data['anonlabels'][key]
+ def merge_domaindata(self, docnames, otherdata):
+ # XXX duplicates?
+ for key, data in otherdata['progoptions'].items():
+ if data[0] in docnames:
+ self.data['progoptions'][key] = data
+ for key, data in otherdata['objects'].items():
+ if data[0] in docnames:
+ self.data['objects'][key] = data
+ for key, data in otherdata['labels'].items():
+ if data[0] in docnames:
+ self.data['labels'][key] = data
+ for key, data in otherdata['anonlabels'].items():
+ if data[0] in docnames:
+ self.data['anonlabels'][key] = data
+
def process_doc(self, env, docname, document):
labels, anonlabels = self.data['labels'], self.data['anonlabels']
for name, explicit in iteritems(document.nametypes):
@@ -532,7 +531,7 @@ class StandardDomain(Domain):
continue
node = document.ids[labelid]
if name.isdigit() or 'refuri' in node or \
- node.tagname.startswith('desc_'):
+ node.tagname.startswith('desc_'):
# ignore footnote labels, labels automatically generated from a
# link and object descriptions
continue
@@ -541,7 +540,7 @@ class StandardDomain(Domain):
'in ' + env.doc2path(labels[name][0]), node)
anonlabels[name] = docname, labelid
if node.tagname == 'section':
- sectname = clean_astext(node[0]) # node[0] == title node
+ sectname = clean_astext(node[0]) # node[0] == title node
elif node.tagname == 'figure':
for n in node:
if n.tagname == 'caption':
@@ -563,6 +562,11 @@ class StandardDomain(Domain):
break
else:
continue
+ elif node.tagname == 'literal_block':
+ if 'caption' in node:
+ sectname = node['caption']
+ else:
+ continue
else:
# anonymous-only labels
continue
@@ -574,13 +578,13 @@ class StandardDomain(Domain):
if node['refexplicit']:
# reference to anonymous label; the reference uses
# the supplied link caption
- docname, labelid = self.data['anonlabels'].get(target, ('',''))
+ docname, labelid = self.data['anonlabels'].get(target, ('', ''))
sectname = node.astext()
else:
# reference to named label; the final node will
# contain the section name after the label
docname, labelid, sectname = self.data['labels'].get(target,
- ('','',''))
+ ('', '', ''))
if not docname:
return None
newnode = nodes.reference('', '', internal=True)
@@ -602,13 +606,22 @@ class StandardDomain(Domain):
return newnode
elif typ == 'keyword':
# keywords are oddballs: they are referenced by named labels
- docname, labelid, _ = self.data['labels'].get(target, ('','',''))
+ docname, labelid, _ = self.data['labels'].get(target, ('', '', ''))
if not docname:
return None
return make_refnode(builder, fromdocname, docname,
labelid, contnode)
elif typ == 'option':
- progname = node['refprogram']
+ target = target.strip()
+ # most obvious thing: we are a flag option without program
+ if target.startswith(('-', '/', '+')):
+ progname = node.get('std:program')
+ else:
+ try:
+ progname, target = re.split(r' (?=-|--|/|\+)', target, 1)
+ except ValueError:
+ return None
+ progname = ws_re.sub('-', progname.strip())
docname, labelid = self.data['progoptions'].get((progname, target),
('', ''))
if not docname:
@@ -628,6 +641,28 @@ class StandardDomain(Domain):
return make_refnode(builder, fromdocname, docname,
labelid, contnode)
+ def resolve_any_xref(self, env, fromdocname, builder, target,
+ node, contnode):
+ results = []
+ ltarget = target.lower() # :ref: lowercases its target automatically
+ for role in ('ref', 'option'): # do not try "keyword"
+ res = self.resolve_xref(env, fromdocname, builder, role,
+ ltarget if role == 'ref' else target,
+ node, contnode)
+ if res:
+ results.append(('std:' + role, res))
+ # all others
+ for objtype in self.object_types:
+ key = (objtype, target)
+ if objtype == 'term':
+ key = (objtype, ltarget)
+ if key in self.data['objects']:
+ docname, labelid = self.data['objects'][key]
+ results.append(('std:' + self.role_for_objtype(objtype),
+ make_refnode(builder, fromdocname, docname,
+ labelid, contnode)))
+ return results
+
def get_objects(self):
for (prog, option), info in iteritems(self.data['progoptions']):
yield (option, option, 'option', info[0], info[1], 1)
diff --git a/sphinx/environment.py b/sphinx/environment.py
index d51e7a16..d183710a 100644
--- a/sphinx/environment.py
+++ b/sphinx/environment.py
@@ -33,26 +33,31 @@ from docutils.parsers.rst import roles, directives
from docutils.parsers.rst.languages import en as english
from docutils.parsers.rst.directives.html import MetaBody
from docutils.writers import UnfilteredWriter
+from docutils.frontend import OptionParser
from sphinx import addnodes
from sphinx.util import url_re, get_matching_docs, docname_join, split_into, \
- FilenameUniqDict
+ FilenameUniqDict
from sphinx.util.nodes import clean_astext, make_refnode, WarningStream
-from sphinx.util.osutil import SEP, fs_encoding, find_catalog_files
+from sphinx.util.osutil import SEP, find_catalog_files, getcwd, fs_encoding
+from sphinx.util.console import bold, purple
from sphinx.util.matching import compile_matchers
+from sphinx.util.parallel import ParallelTasks, parallel_available, make_chunks
from sphinx.util.websupport import is_commentable
from sphinx.errors import SphinxError, ExtensionError
from sphinx.locale import _
from sphinx.versioning import add_uids, merge_doctrees
from sphinx.transforms import DefaultSubstitutions, MoveModuleTargets, \
- HandleCodeBlocks, SortIds, CitationReferences, Locale, \
- RemoveTranslatableInline, SphinxContentsFilter
+ HandleCodeBlocks, SortIds, CitationReferences, Locale, \
+ RemoveTranslatableInline, SphinxContentsFilter
orig_role_function = roles.role
orig_directive_function = directives.directive
-class ElementLookupError(Exception): pass
+
+class ElementLookupError(Exception):
+ pass
default_settings = {
@@ -69,7 +74,9 @@ default_settings = {
# This is increased every time an environment attribute is added
# or changed to properly invalidate pickle files.
-ENV_VERSION = 42 + (sys.version_info[0] - 2)
+#
+# NOTE: increase base version by 2 to have distinct numbers for Py2 and 3
+ENV_VERSION = 44 + (sys.version_info[0] - 2)
dummy_reporter = Reporter('', 4, 4)
@@ -105,6 +112,33 @@ class SphinxDummyWriter(UnfilteredWriter):
pass
+class SphinxFileInput(FileInput):
+ def __init__(self, app, env, *args, **kwds):
+ self.app = app
+ self.env = env
+ # don't call sys.exit() on IOErrors
+ kwds['handle_io_errors'] = False
+ kwds['error_handler'] = 'sphinx' # py3: handle error on open.
+ FileInput.__init__(self, *args, **kwds)
+
+ def decode(self, data):
+ if isinstance(data, text_type): # py3: `data` already decoded.
+ return data
+ return data.decode(self.encoding, 'sphinx') # py2: decoding
+
+ def read(self):
+ data = FileInput.read(self)
+ if self.app:
+ arg = [data]
+ self.app.emit('source-read', self.env.docname, arg)
+ data = arg[0]
+ if self.env.config.rst_epilog:
+ data = data + '\n' + self.env.config.rst_epilog + '\n'
+ if self.env.config.rst_prolog:
+ data = self.env.config.rst_prolog + '\n' + data
+ return data
+
+
class BuildEnvironment:
"""
The environment in which the ReST files are translated.
@@ -122,7 +156,7 @@ class BuildEnvironment:
finally:
picklefile.close()
if env.version != ENV_VERSION:
- raise IOError('env version not current')
+ raise IOError('build environment version not current')
env.config.values = config.values
return env
@@ -138,9 +172,9 @@ class BuildEnvironment:
# remove potentially pickling-problematic values from config
for key, val in list(vars(self.config).items()):
if key.startswith('_') or \
- isinstance(val, types.ModuleType) or \
- isinstance(val, types.FunctionType) or \
- isinstance(val, class_types):
+ isinstance(val, types.ModuleType) or \
+ isinstance(val, types.FunctionType) or \
+ isinstance(val, class_types):
del self.config[key]
try:
pickle.dump(self, picklefile, pickle.HIGHEST_PROTOCOL)
@@ -181,8 +215,8 @@ class BuildEnvironment:
# the source suffix.
self.found_docs = set() # contains all existing docnames
- self.all_docs = {} # docname -> mtime at the time of build
- # contains all built docnames
+ self.all_docs = {} # docname -> mtime at the time of reading
+ # contains all read docnames
self.dependencies = {} # docname -> set of dependent file
# names, relative to documentation root
self.reread_always = set() # docnames to re-read unconditionally on
@@ -223,6 +257,10 @@ class BuildEnvironment:
# temporary data storage while reading a document
self.temp_data = {}
+ # context for cross-references (e.g. current module or class)
+ # this is similar to temp_data, but will for example be copied to
+ # attributes of "any" cross references
+ self.ref_context = {}
def set_warnfunc(self, func):
self._warnfunc = func
@@ -292,6 +330,50 @@ class BuildEnvironment:
for domain in self.domains.values():
domain.clear_doc(docname)
+ def merge_info_from(self, docnames, other, app):
+ """Merge global information gathered about *docnames* while reading them
+ from the *other* environment.
+
+ This possibly comes from a parallel build process.
+ """
+ docnames = set(docnames)
+ for docname in docnames:
+ self.all_docs[docname] = other.all_docs[docname]
+ if docname in other.reread_always:
+ self.reread_always.add(docname)
+ self.metadata[docname] = other.metadata[docname]
+ if docname in other.dependencies:
+ self.dependencies[docname] = other.dependencies[docname]
+ self.titles[docname] = other.titles[docname]
+ self.longtitles[docname] = other.longtitles[docname]
+ self.tocs[docname] = other.tocs[docname]
+ self.toc_num_entries[docname] = other.toc_num_entries[docname]
+ # toc_secnumbers is not assigned during read
+ if docname in other.toctree_includes:
+ self.toctree_includes[docname] = other.toctree_includes[docname]
+ self.indexentries[docname] = other.indexentries[docname]
+ if docname in other.glob_toctrees:
+ self.glob_toctrees.add(docname)
+ if docname in other.numbered_toctrees:
+ self.numbered_toctrees.add(docname)
+
+ self.images.merge_other(docnames, other.images)
+ self.dlfiles.merge_other(docnames, other.dlfiles)
+
+ for subfn, fnset in other.files_to_rebuild.items():
+ self.files_to_rebuild.setdefault(subfn, set()).update(fnset & docnames)
+ for key, data in other.citations.items():
+ # XXX duplicates?
+ if data[0] in docnames:
+ self.citations[key] = data
+ for version, changes in other.versionchanges.items():
+ self.versionchanges.setdefault(version, []).extend(
+ change for change in changes if change[1] in docnames)
+
+ for domainname, domain in self.domains.items():
+ domain.merge_domaindata(docnames, other.domaindata[domainname])
+ app.emit('env-merge-info', self, docnames, other)
+
def doc2path(self, docname, base=True, suffix=None):
"""Return the filename for the document name.
@@ -407,13 +489,11 @@ class BuildEnvironment:
return added, changed, removed
- def update(self, config, srcdir, doctreedir, app=None):
+ def update(self, config, srcdir, doctreedir, app):
"""(Re-)read all files new or changed since last update.
- Returns a summary, the total count of documents to reread and an
- iterator that yields docnames as it processes them. Store all
- environment docnames in the canonical format (ie using SEP as a
- separator in place of os.path.sep).
+ Store all environment docnames in the canonical format (ie using SEP as
+ a separator in place of os.path.sep).
"""
config_changed = False
if self.config is None:
@@ -445,6 +525,8 @@ class BuildEnvironment:
# this cache also needs to be updated every time
self._nitpick_ignore = set(self.config.nitpick_ignore)
+ app.info(bold('updating environment: '), nonl=1)
+
added, changed, removed = self.get_outdated_files(config_changed)
# allow user intervention as well
@@ -459,30 +541,98 @@ class BuildEnvironment:
msg += '%s added, %s changed, %s removed' % (len(added), len(changed),
len(removed))
+ app.info(msg)
- def update_generator():
- self.app = app
+ self.app = app
+
+ # clear all files no longer present
+ for docname in removed:
+ app.emit('env-purge-doc', self, docname)
+ self.clear_doc(docname)
+
+ # read all new and changed files
+ docnames = sorted(added | changed)
+ # allow changing and reordering the list of docs to read
+ app.emit('env-before-read-docs', self, docnames)
+
+ # check if we should do parallel or serial read
+ par_ok = False
+ if parallel_available and len(docnames) > 5 and app.parallel > 1:
+ par_ok = True
+ for extname, md in app._extension_metadata.items():
+ ext_ok = md.get('parallel_read_safe')
+ if ext_ok:
+ continue
+ if ext_ok is None:
+ app.warn('the %s extension does not declare if it '
+ 'is safe for parallel reading, assuming it '
+ 'isn\'t - please ask the extension author to '
+ 'check and make it explicit' % extname)
+ app.warn('doing serial read')
+ else:
+ app.warn('the %s extension is not safe for parallel '
+ 'reading, doing serial read' % extname)
+ par_ok = False
+ break
+ if par_ok:
+ self._read_parallel(docnames, app, nproc=app.parallel)
+ else:
+ self._read_serial(docnames, app)
- # clear all files no longer present
- for docname in removed:
- if app:
- app.emit('env-purge-doc', self, docname)
- self.clear_doc(docname)
+ if config.master_doc not in self.all_docs:
+ self.warn(None, 'master file %s not found' %
+ self.doc2path(config.master_doc))
- # read all new and changed files
- for docname in sorted(added | changed):
- yield docname
- self.read_doc(docname, app=app)
+ self.app = None
+ app.emit('env-updated', self)
+ return docnames
- if config.master_doc not in self.all_docs:
- self.warn(None, 'master file %s not found' %
- self.doc2path(config.master_doc))
+ def _read_serial(self, docnames, app):
+ for docname in app.status_iterator(docnames, 'reading sources... ',
+ purple, len(docnames)):
+ # remove all inventory entries for that file
+ app.emit('env-purge-doc', self, docname)
+ self.clear_doc(docname)
+ self.read_doc(docname, app)
- self.app = None
- if app:
- app.emit('env-updated', self)
+ def _read_parallel(self, docnames, app, nproc):
+ # clear all outdated docs at once
+ for docname in docnames:
+ app.emit('env-purge-doc', self, docname)
+ self.clear_doc(docname)
- return msg, len(added | changed), update_generator()
+ def read_process(docs):
+ self.app = app
+ self.warnings = []
+ self.set_warnfunc(lambda *args: self.warnings.append(args))
+ for docname in docs:
+ self.read_doc(docname, app)
+ # allow pickling self to send it back
+ self.set_warnfunc(None)
+ del self.app
+ del self.domains
+ del self.config.values
+ del self.config
+ return self
+
+ def merge(docs, otherenv):
+ warnings.extend(otherenv.warnings)
+ self.merge_info_from(docs, otherenv, app)
+
+ tasks = ParallelTasks(nproc)
+ chunks = make_chunks(docnames, nproc)
+
+ warnings = []
+ for chunk in app.status_iterator(
+ chunks, 'reading sources... ', purple, len(chunks)):
+ tasks.add_task(read_process, chunk, merge)
+
+ # make sure all threads have finished
+ app.info(bold('waiting for workers...'))
+ tasks.join()
+
+ for warning in warnings:
+ self._warnfunc(*warning)
def check_dependents(self, already):
to_rewrite = self.assign_section_numbers()
@@ -496,7 +646,8 @@ class BuildEnvironment:
"""Custom decoding error handler that warns and replaces."""
linestart = error.object.rfind(b'\n', 0, error.start)
lineend = error.object.find(b'\n', error.start)
- if lineend == -1: lineend = len(error.object)
+ if lineend == -1:
+ lineend = len(error.object)
lineno = error.object.count(b'\n', 0, error.start) + 1
self.warn(self.docname, 'undecodable source characters, '
'replacing with "?": %r' %
@@ -550,19 +701,8 @@ class BuildEnvironment:
directives.directive = directive
roles.role = role
- def read_doc(self, docname, src_path=None, save_parsed=True, app=None):
- """Parse a file and add/update inventory entries for the doctree.
-
- If srcpath is given, read from a different source file.
- """
- # remove all inventory entries for that file
- if app:
- app.emit('env-purge-doc', self, docname)
-
- self.clear_doc(docname)
-
- if src_path is None:
- src_path = self.doc2path(docname)
+ def read_doc(self, docname, app=None):
+ """Parse a file and add/update inventory entries for the doctree."""
self.temp_data['docname'] = docname
# defaults to the global default, but can be re-set in a document
@@ -576,6 +716,12 @@ class BuildEnvironment:
self.patch_lookup_functions()
+ docutilsconf = path.join(self.srcdir, 'docutils.conf')
+ # read docutils.conf from source dir, not from current dir
+ OptionParser.standard_config_files[1] = docutilsconf
+ if path.isfile(docutilsconf):
+ self.note_dependency(docutilsconf)
+
if self.config.default_role:
role_fn, messages = roles.role(self.config.default_role, english,
0, dummy_reporter)
@@ -587,38 +733,17 @@ class BuildEnvironment:
codecs.register_error('sphinx', self.warn_and_replace)
- class SphinxSourceClass(FileInput):
- def __init__(self_, *args, **kwds):
- # don't call sys.exit() on IOErrors
- kwds['handle_io_errors'] = False
- kwds['error_handler'] = 'sphinx' # py3: handle error on open.
- FileInput.__init__(self_, *args, **kwds)
-
- def decode(self_, data):
- if isinstance(data, text_type): # py3: `data` already decoded.
- return data
- return data.decode(self_.encoding, 'sphinx') # py2: decoding
-
- def read(self_):
- data = FileInput.read(self_)
- if app:
- arg = [data]
- app.emit('source-read', docname, arg)
- data = arg[0]
- if self.config.rst_epilog:
- data = data + '\n' + self.config.rst_epilog + '\n'
- if self.config.rst_prolog:
- data = self.config.rst_prolog + '\n' + data
- return data
-
# publish manually
pub = Publisher(reader=SphinxStandaloneReader(),
writer=SphinxDummyWriter(),
- source_class=SphinxSourceClass,
destination_class=NullOutput)
pub.set_components(None, 'restructuredtext', None)
pub.process_programmatic_settings(None, self.settings, None)
- pub.set_source(None, src_path)
+ src_path = self.doc2path(docname)
+ source = SphinxFileInput(app, self, source=None, source_path=src_path,
+ encoding=self.config.source_encoding)
+ pub.source = source
+ pub.settings._source = src_path
pub.set_destination(None, None)
pub.publish()
doctree = pub.document
@@ -641,12 +766,12 @@ class BuildEnvironment:
if app:
app.emit('doctree-read', doctree)
- # store time of build, for outdated files detection
+ # store time of reading, for outdated files detection
# (Some filesystems have coarse timestamp resolution;
# therefore time.time() can be older than filesystem's timestamp.
# For example, FAT32 has 2sec timestamp resolution.)
self.all_docs[docname] = max(
- time.time(), path.getmtime(self.doc2path(docname)))
+ time.time(), path.getmtime(self.doc2path(docname)))
if self.versioning_condition:
# get old doctree
@@ -679,21 +804,20 @@ class BuildEnvironment:
# cleanup
self.temp_data.clear()
-
- if save_parsed:
- # save the parsed doctree
- doctree_filename = self.doc2path(docname, self.doctreedir,
- '.doctree')
- dirname = path.dirname(doctree_filename)
- if not path.isdir(dirname):
- os.makedirs(dirname)
- f = open(doctree_filename, 'wb')
- try:
- pickle.dump(doctree, f, pickle.HIGHEST_PROTOCOL)
- finally:
- f.close()
- else:
- return doctree
+ self.ref_context.clear()
+ roles._roles.pop('', None) # if a document has set a local default role
+
+ # save the parsed doctree
+ doctree_filename = self.doc2path(docname, self.doctreedir,
+ '.doctree')
+ dirname = path.dirname(doctree_filename)
+ if not path.isdir(dirname):
+ os.makedirs(dirname)
+ f = open(doctree_filename, 'wb')
+ try:
+ pickle.dump(doctree, f, pickle.HIGHEST_PROTOCOL)
+ finally:
+ f.close()
# utilities to use while reading a document
@@ -704,13 +828,17 @@ class BuildEnvironment:
@property
def currmodule(self):
- """Backwards compatible alias."""
- return self.temp_data.get('py:module')
+ """Backwards compatible alias. Will be removed."""
+ self.warn(self.docname, 'env.currmodule is being referenced by an '
+ 'extension; this API will be removed in the future')
+ return self.ref_context.get('py:module')
@property
def currclass(self):
- """Backwards compatible alias."""
- return self.temp_data.get('py:class')
+ """Backwards compatible alias. Will be removed."""
+ self.warn(self.docname, 'env.currclass is being referenced by an '
+ 'extension; this API will be removed in the future')
+ return self.ref_context.get('py:class')
def new_serialno(self, category=''):
"""Return a serial number, e.g. for index entry targets.
@@ -740,7 +868,7 @@ class BuildEnvironment:
def note_versionchange(self, type, version, node, lineno):
self.versionchanges.setdefault(version, []).append(
(type, self.temp_data['docname'], lineno,
- self.temp_data.get('py:module'),
+ self.ref_context.get('py:module'),
self.temp_data.get('object'), node.astext()))
# post-processing of read doctrees
@@ -755,7 +883,7 @@ class BuildEnvironment:
def process_dependencies(self, docname, doctree):
"""Process docutils-generated dependency info."""
- cwd = os.getcwd()
+ cwd = getcwd()
frompath = path.join(path.normpath(self.srcdir), 'dummy')
deps = doctree.settings.record_dependencies
if not deps:
@@ -763,6 +891,8 @@ class BuildEnvironment:
for dep in deps.list:
# the dependency path is relative to the working dir, so get
# one relative to the srcdir
+ if isinstance(dep, bytes):
+ dep = dep.decode(fs_encoding)
relpath = relative_path(frompath,
path.normpath(path.join(cwd, dep)))
self.dependencies.setdefault(docname, set()).add(relpath)
@@ -846,7 +976,7 @@ class BuildEnvironment:
# nodes are multiply inherited...
if isinstance(node, nodes.authors):
md['authors'] = [author.astext() for author in node]
- elif isinstance(node, nodes.TextElement): # e.g. author
+ elif isinstance(node, nodes.TextElement): # e.g. author
md[node.__class__.__name__] = node.astext()
else:
name, body = node
@@ -976,7 +1106,7 @@ class BuildEnvironment:
def build_toc_from(self, docname, document):
"""Build a TOC from the doctree and store it in the inventory."""
- numentries = [0] # nonlocal again...
+ numentries = [0] # nonlocal again...
def traverse_in_section(node, cls):
"""Like traverse(), but stay within the same section."""
@@ -1102,7 +1232,6 @@ class BuildEnvironment:
stream=WarningStream(self._warnfunc))
return doctree
-
def get_and_resolve_doctree(self, docname, builder, doctree=None,
prune_toctrees=True, includehidden=False):
"""Read the doctree from the pickle, resolve cross-references and
@@ -1117,7 +1246,8 @@ class BuildEnvironment:
# now, resolve all toctree nodes
for toctreenode in doctree.traverse(addnodes.toctree):
result = self.resolve_toctree(docname, builder, toctreenode,
- prune=prune_toctrees, includehidden=includehidden)
+ prune=prune_toctrees,
+ includehidden=includehidden)
if result is None:
toctreenode.replace_self([])
else:
@@ -1174,7 +1304,7 @@ class BuildEnvironment:
else:
# cull sub-entries whose parents aren't 'current'
if (collapse and depth > 1 and
- 'iscurrent' not in subnode.parent):
+ 'iscurrent' not in subnode.parent):
subnode.parent.remove(subnode)
else:
# recurse on visible children
@@ -1256,7 +1386,7 @@ class BuildEnvironment:
child = toc.children[0]
for refnode in child.traverse(nodes.reference):
if refnode['refuri'] == ref and \
- not refnode['anchorname']:
+ not refnode['anchorname']:
refnode.children = [nodes.Text(title)]
if not toc.children:
# empty toc means: no titles will show up in the toctree
@@ -1346,49 +1476,23 @@ class BuildEnvironment:
domain = self.domains[node['refdomain']]
except KeyError:
raise NoUri
- newnode = domain.resolve_xref(self, fromdocname, builder,
+ newnode = domain.resolve_xref(self, refdoc, builder,
typ, target, node, contnode)
# really hardwired reference types
+ elif typ == 'any':
+ newnode = self._resolve_any_reference(builder, node, contnode)
elif typ == 'doc':
- # directly reference to document by source name;
- # can be absolute or relative
- docname = docname_join(refdoc, target)
- if docname in self.all_docs:
- if node['refexplicit']:
- # reference with explicit title
- caption = node.astext()
- else:
- caption = clean_astext(self.titles[docname])
- innernode = nodes.emphasis(caption, caption)
- newnode = nodes.reference('', '', internal=True)
- newnode['refuri'] = builder.get_relative_uri(
- fromdocname, docname)
- newnode.append(innernode)
+ newnode = self._resolve_doc_reference(builder, node, contnode)
elif typ == 'citation':
- docname, labelid = self.citations.get(target, ('', ''))
- if docname:
- try:
- newnode = make_refnode(builder, fromdocname,
- docname, labelid, contnode)
- except NoUri:
- # remove the ids we added in the CitationReferences
- # transform since they can't be transfered to
- # the contnode (if it's a Text node)
- if not isinstance(contnode, nodes.Element):
- del node['ids'][:]
- raise
- elif 'ids' in node:
- # remove ids attribute that annotated at
- # transforms.CitationReference.apply.
- del node['ids'][:]
+ newnode = self._resolve_citation(builder, refdoc, node, contnode)
# no new node found? try the missing-reference event
if newnode is None:
newnode = builder.app.emit_firstresult(
'missing-reference', self, node, contnode)
- # still not found? warn if in nit-picky mode
+ # still not found? warn if node wishes to be warned about or
+ # we are in nit-picky mode
if newnode is None:
- self._warn_missing_reference(
- fromdocname, typ, target, node, domain)
+ self._warn_missing_reference(refdoc, typ, target, node, domain)
except NoUri:
newnode = contnode
node.replace_self(newnode or contnode)
@@ -1399,7 +1503,7 @@ class BuildEnvironment:
# allow custom references to be resolved
builder.app.emit('doctree-resolved', doctree, fromdocname)
- def _warn_missing_reference(self, fromdoc, typ, target, node, domain):
+ def _warn_missing_reference(self, refdoc, typ, target, node, domain):
warn = node.get('refwarn')
if self.config.nitpicky:
warn = True
@@ -1418,13 +1522,91 @@ class BuildEnvironment:
msg = 'unknown document: %(target)s'
elif typ == 'citation':
msg = 'citation not found: %(target)s'
- elif node.get('refdomain', 'std') != 'std':
+ elif node.get('refdomain', 'std') not in ('', 'std'):
msg = '%s:%s reference target not found: %%(target)s' % \
(node['refdomain'], typ)
else:
- msg = '%s reference target not found: %%(target)s' % typ
+ msg = '%r reference target not found: %%(target)s' % typ
self.warn_node(msg % {'target': target}, node)
+ def _resolve_doc_reference(self, builder, node, contnode):
+ # directly reference to document by source name;
+ # can be absolute or relative
+ docname = docname_join(node['refdoc'], node['reftarget'])
+ if docname in self.all_docs:
+ if node['refexplicit']:
+ # reference with explicit title
+ caption = node.astext()
+ else:
+ caption = clean_astext(self.titles[docname])
+ innernode = nodes.emphasis(caption, caption)
+ newnode = nodes.reference('', '', internal=True)
+ newnode['refuri'] = builder.get_relative_uri(node['refdoc'], docname)
+ newnode.append(innernode)
+ return newnode
+
+ def _resolve_citation(self, builder, fromdocname, node, contnode):
+ docname, labelid = self.citations.get(node['reftarget'], ('', ''))
+ if docname:
+ try:
+ newnode = make_refnode(builder, fromdocname,
+ docname, labelid, contnode)
+ return newnode
+ except NoUri:
+ # remove the ids we added in the CitationReferences
+ # transform since they can't be transfered to
+ # the contnode (if it's a Text node)
+ if not isinstance(contnode, nodes.Element):
+ del node['ids'][:]
+ raise
+ elif 'ids' in node:
+ # remove ids attribute that annotated at
+ # transforms.CitationReference.apply.
+ del node['ids'][:]
+
+ def _resolve_any_reference(self, builder, node, contnode):
+ """Resolve reference generated by the "any" role."""
+ refdoc = node['refdoc']
+ target = node['reftarget']
+ results = []
+ # first, try resolving as :doc:
+ doc_ref = self._resolve_doc_reference(builder, node, contnode)
+ if doc_ref:
+ results.append(('doc', doc_ref))
+ # next, do the standard domain (makes this a priority)
+ results.extend(self.domains['std'].resolve_any_xref(
+ self, refdoc, builder, target, node, contnode))
+ for domain in self.domains.values():
+ if domain.name == 'std':
+ continue # we did this one already
+ try:
+ results.extend(domain.resolve_any_xref(self, refdoc, builder,
+ target, node, contnode))
+ except NotImplementedError:
+ # the domain doesn't yet support the new interface
+ # we have to manually collect possible references (SLOW)
+ for role in domain.roles:
+ res = domain.resolve_xref(self, refdoc, builder, role, target,
+ node, contnode)
+ if res:
+ results.append(('%s:%s' % (domain.name, role), res))
+ # now, see how many matches we got...
+ if not results:
+ return None
+ if len(results) > 1:
+ nice_results = ' or '.join(':%s:' % r[0] for r in results)
+ self.warn_node('more than one target found for \'any\' cross-'
+ 'reference %r: could be %s' % (target, nice_results),
+ node)
+ res_role, newnode = results[0]
+ # Override "any" class with the actual role type to get the styling
+ # approximately correct.
+ res_domain = res_role.split(':')[0]
+ if newnode and newnode[0].get('classes'):
+ newnode[0]['classes'].append(res_domain)
+ newnode[0]['classes'].append(res_role.replace(':', '-'))
+ return newnode
+
def process_only_nodes(self, doctree, builder, fromdocname=None):
# A comment on the comment() nodes being inserted: replacing by [] would
# result in a "Losing ids" exception if there is a target node before
@@ -1595,7 +1777,7 @@ class BuildEnvironment:
# prefixes match: add entry as subitem of the
# previous entry
oldsubitems.setdefault(m.group(2), [[], {}])[0].\
- extend(targets)
+ extend(targets)
del newlist[i]
continue
oldkey = m.group(1)
@@ -1622,6 +1804,7 @@ class BuildEnvironment:
def collect_relations(self):
relations = {}
getinc = self.toctree_includes.get
+
def collect(parents, parents_set, docname, previous, next):
# circular relationship?
if docname in parents_set:
@@ -1661,8 +1844,8 @@ class BuildEnvironment:
# same for children
if includes:
for subindex, args in enumerate(zip(includes,
- [None] + includes,
- includes[1:] + [None])):
+ [None] + includes,
+ includes[1:] + [None])):
collect([(docname, subindex)] + parents,
parents_set.union([docname]), *args)
relations[docname] = [parents[0][0], previous, next]
diff --git a/sphinx/errors.py b/sphinx/errors.py
index 4d737e51..3d7a5eb4 100644
--- a/sphinx/errors.py
+++ b/sphinx/errors.py
@@ -10,6 +10,9 @@
:license: BSD, see LICENSE for details.
"""
+import traceback
+
+
class SphinxError(Exception):
"""
Base class for Sphinx errors that are shown to the user in a nicer
@@ -62,3 +65,13 @@ class PycodeError(Exception):
if len(self.args) > 1:
res += ' (exception was: %r)' % self.args[1]
return res
+
+
+class SphinxParallelError(Exception):
+ def __init__(self, orig_exc, traceback):
+ self.orig_exc = orig_exc
+ self.traceback = traceback
+
+ def __str__(self):
+ return traceback.format_exception_only(
+ self.orig_exc.__class__, self.orig_exc)[0].strip()
diff --git a/sphinx/ext/autodoc.py b/sphinx/ext/autodoc.py
index 5b014d0d..5b0bda17 100644
--- a/sphinx/ext/autodoc.py
+++ b/sphinx/ext/autodoc.py
@@ -30,7 +30,7 @@ from sphinx.application import ExtensionError
from sphinx.util.nodes import nested_parse_with_titles
from sphinx.util.compat import Directive
from sphinx.util.inspect import getargspec, isdescriptor, safe_getmembers, \
- safe_getattr, safe_repr, is_builtin_class_method
+ safe_getattr, safe_repr, is_builtin_class_method
from sphinx.util.docstrings import prepare_docstring
@@ -50,11 +50,13 @@ class DefDict(dict):
def __init__(self, default):
dict.__init__(self)
self.default = default
+
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
return self.default
+
def __bool__(self):
# docutils check "if option_spec"
return True
@@ -92,6 +94,7 @@ class _MockModule(object):
else:
return _MockModule()
+
def mock_import(modname):
if '.' in modname:
pkg, _n, mods = modname.rpartition('.')
@@ -104,12 +107,14 @@ def mock_import(modname):
ALL = object()
INSTANCEATTR = object()
+
def members_option(arg):
"""Used to convert the :members: option to auto directives."""
if arg is None:
return ALL
return [x.strip() for x in arg.split(',')]
+
def members_set_option(arg):
"""Used to convert the :members: option to auto directives."""
if arg is None:
@@ -118,6 +123,7 @@ def members_set_option(arg):
SUPPRESS = object()
+
def annotation_option(arg):
if arg is None:
# suppress showing the representation of the object
@@ -125,6 +131,7 @@ def annotation_option(arg):
else:
return arg
+
def bool_option(arg):
"""Used to convert flag options to auto directives. (Instead of
directives.flag(), which returns None).
@@ -201,6 +208,7 @@ def cut_lines(pre, post=0, what=None):
lines.append('')
return process
+
def between(marker, what=None, keepempty=False, exclude=False):
"""Return a listener that either keeps, or if *exclude* is True excludes,
lines between lines that match the *marker* regular expression. If no line
@@ -211,6 +219,7 @@ def between(marker, what=None, keepempty=False, exclude=False):
be processed.
"""
marker_re = re.compile(marker)
+
def process(app, what_, name, obj, options, lines):
if what and what_ not in what:
return
@@ -325,7 +334,7 @@ class Documenter(object):
# an autogenerated one
try:
explicit_modname, path, base, args, retann = \
- py_ext_sig_re.match(self.name).groups()
+ py_ext_sig_re.match(self.name).groups()
except AttributeError:
self.directive.warn('invalid signature for auto%s (%r)' %
(self.objtype, self.name))
@@ -340,7 +349,7 @@ class Documenter(object):
parents = []
self.modname, self.objpath = \
- self.resolve_name(modname, parents, path, base)
+ self.resolve_name(modname, parents, path, base)
if not self.modname:
return False
@@ -637,19 +646,19 @@ class Documenter(object):
keep = False
if want_all and membername.startswith('__') and \
- membername.endswith('__') and len(membername) > 4:
+ membername.endswith('__') and len(membername) > 4:
# special __methods__
if self.options.special_members is ALL and \
membername != '__doc__':
keep = has_doc or self.options.undoc_members
elif self.options.special_members and \
- self.options.special_members is not ALL and \
+ self.options.special_members is not ALL and \
membername in self.options.special_members:
keep = has_doc or self.options.undoc_members
elif want_all and membername.startswith('_'):
# ignore members whose name starts with _ by default
keep = self.options.private_members and \
- (has_doc or self.options.undoc_members)
+ (has_doc or self.options.undoc_members)
elif (namespace, membername) in attr_docs:
# keep documented attributes
keep = True
@@ -685,7 +694,7 @@ class Documenter(object):
self.env.temp_data['autodoc:class'] = self.objpath[0]
want_all = all_members or self.options.inherited_members or \
- self.options.members is ALL
+ self.options.members is ALL
# find out which members are documentable
members_check_module, members = self.get_object_members(want_all)
@@ -707,11 +716,11 @@ class Documenter(object):
# give explicitly separated module name, so that members
# of inner classes can be documented
full_mname = self.modname + '::' + \
- '.'.join(self.objpath + [mname])
+ '.'.join(self.objpath + [mname])
documenter = classes[-1](self.directive, full_mname, self.indent)
memberdocumenters.append((documenter, isattr))
member_order = self.options.member_order or \
- self.env.config.autodoc_member_order
+ self.env.config.autodoc_member_order
if member_order == 'groupwise':
# sort by group; relies on stable sort to keep items in the
# same group sorted alphabetically
@@ -719,6 +728,7 @@ class Documenter(object):
elif member_order == 'bysource' and self.analyzer:
# sort by source order, by virtue of the module analyzer
tagorder = self.analyzer.tagorder
+
def keyfunc(entry):
fullname = entry[0].name.split('::')[1]
return tagorder.get(fullname, len(tagorder))
@@ -872,7 +882,7 @@ class ModuleDocumenter(Documenter):
self.directive.warn(
'missing attribute mentioned in :members: or __all__: '
'module %s, attribute %s' % (
- safe_getattr(self.object, '__name__', '???'), mname))
+ safe_getattr(self.object, '__name__', '???'), mname))
return False, ret
@@ -891,7 +901,7 @@ class ModuleLevelDocumenter(Documenter):
modname = self.env.temp_data.get('autodoc:module')
# ... or in the scope of a module directive
if not modname:
- modname = self.env.temp_data.get('py:module')
+ modname = self.env.ref_context.get('py:module')
# ... else, it stays None, which means invalid
return modname, parents + [base]
@@ -913,7 +923,7 @@ class ClassLevelDocumenter(Documenter):
mod_cls = self.env.temp_data.get('autodoc:class')
# ... or from a class directive
if mod_cls is None:
- mod_cls = self.env.temp_data.get('py:class')
+ mod_cls = self.env.ref_context.get('py:class')
# ... if still None, there's no way to know
if mod_cls is None:
return None, []
@@ -923,7 +933,7 @@ class ClassLevelDocumenter(Documenter):
if not modname:
modname = self.env.temp_data.get('autodoc:module')
if not modname:
- modname = self.env.temp_data.get('py:module')
+ modname = self.env.ref_context.get('py:module')
# ... else, it stays None, which means invalid
return modname, parents + [base]
@@ -976,6 +986,7 @@ class DocstringSignatureMixin(object):
self.args, self.retann = result
return Documenter.format_signature(self)
+
class DocstringStripSignatureMixin(DocstringSignatureMixin):
"""
Mixin for AttributeDocumenter to provide the
@@ -1007,7 +1018,7 @@ class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter):
def format_args(self):
if inspect.isbuiltin(self.object) or \
- inspect.ismethoddescriptor(self.object):
+ inspect.ismethoddescriptor(self.object):
# cannot introspect arguments of a C function or method
return None
try:
@@ -1070,8 +1081,8 @@ class ClassDocumenter(ModuleLevelDocumenter):
# classes without __init__ method, default __init__ or
# __init__ written in C?
if initmeth is None or \
- is_builtin_class_method(self.object, '__init__') or \
- not(inspect.ismethod(initmeth) or inspect.isfunction(initmeth)):
+ is_builtin_class_method(self.object, '__init__') or \
+ not(inspect.ismethod(initmeth) or inspect.isfunction(initmeth)):
return None
try:
argspec = getargspec(initmeth)
@@ -1109,7 +1120,7 @@ class ClassDocumenter(ModuleLevelDocumenter):
if not self.doc_as_attr and self.options.show_inheritance:
self.add_line(u'', '<autodoc>')
if hasattr(self.object, '__bases__') and len(self.object.__bases__):
- bases = [b.__module__ == '__builtin__' and
+ bases = [b.__module__ in ('__builtin__', 'builtins') and
u':class:`%s`' % b.__name__ or
u':class:`%s.%s`' % (b.__module__, b.__name__)
for b in self.object.__bases__]
@@ -1142,7 +1153,7 @@ class ClassDocumenter(ModuleLevelDocumenter):
# for new-style classes, no __init__ means default __init__
if (initdocstring is not None and
(initdocstring == object.__init__.__doc__ or # for pypy
- initdocstring.strip() == object.__init__.__doc__)): #for !pypy
+ initdocstring.strip() == object.__init__.__doc__)): # for !pypy
initdocstring = None
if initdocstring:
if content == 'init':
@@ -1186,7 +1197,7 @@ class ExceptionDocumenter(ClassDocumenter):
@classmethod
def can_document_member(cls, member, membername, isattr, parent):
return isinstance(member, class_types) and \
- issubclass(member, BaseException)
+ issubclass(member, BaseException)
class DataDocumenter(ModuleLevelDocumenter):
@@ -1233,7 +1244,7 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter):
@classmethod
def can_document_member(cls, member, membername, isattr, parent):
return inspect.isroutine(member) and \
- not isinstance(parent, ModuleDocumenter)
+ not isinstance(parent, ModuleDocumenter)
def import_object(self):
ret = ClassLevelDocumenter.import_object(self)
@@ -1257,7 +1268,7 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter):
def format_args(self):
if inspect.isbuiltin(self.object) or \
- inspect.ismethoddescriptor(self.object):
+ inspect.ismethoddescriptor(self.object):
# can never get arguments of a C function or method
return None
argspec = getargspec(self.object)
@@ -1272,7 +1283,7 @@ class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter):
pass
-class AttributeDocumenter(DocstringStripSignatureMixin,ClassLevelDocumenter):
+class AttributeDocumenter(DocstringStripSignatureMixin, ClassLevelDocumenter):
"""
Specialized Documenter subclass for attributes.
"""
@@ -1290,9 +1301,9 @@ class AttributeDocumenter(DocstringStripSignatureMixin,ClassLevelDocumenter):
@classmethod
def can_document_member(cls, member, membername, isattr, parent):
isdatadesc = isdescriptor(member) and not \
- isinstance(member, cls.method_types) and not \
- type(member).__name__ in ("type", "method_descriptor",
- "instancemethod")
+ isinstance(member, cls.method_types) and not \
+ type(member).__name__ in ("type", "method_descriptor",
+ "instancemethod")
return isdatadesc or (not isinstance(parent, ModuleDocumenter)
and not inspect.isroutine(member)
and not isinstance(member, class_types))
@@ -1303,7 +1314,7 @@ class AttributeDocumenter(DocstringStripSignatureMixin,ClassLevelDocumenter):
def import_object(self):
ret = ClassLevelDocumenter.import_object(self)
if isdescriptor(self.object) and \
- not isinstance(self.object, self.method_types):
+ not isinstance(self.object, self.method_types):
self._datadescriptor = True
else:
# if it's not a data descriptor
@@ -1312,7 +1323,7 @@ class AttributeDocumenter(DocstringStripSignatureMixin,ClassLevelDocumenter):
def get_real_modname(self):
return self.get_attr(self.parent or self.object, '__module__', None) \
- or self.modname
+ or self.modname
def add_directive_header(self, sig):
ClassLevelDocumenter.add_directive_header(self, sig)
@@ -1479,7 +1490,7 @@ def add_documenter(cls):
raise ExtensionError('autodoc documenter %r must be a subclass '
'of Documenter' % cls)
# actually, it should be possible to override Documenters
- #if cls.objtype in AutoDirective._registry:
+ # if cls.objtype in AutoDirective._registry:
# raise ExtensionError('autodoc documenter for %r is already '
# 'registered' % cls.objtype)
AutoDirective._registry[cls.objtype] = cls
@@ -1504,7 +1515,7 @@ def setup(app):
app.add_event('autodoc-process-signature')
app.add_event('autodoc-skip-member')
- return sphinx.__version__
+ return {'version': sphinx.__version__, 'parallel_read_safe': True}
class testcls:
diff --git a/sphinx/ext/autosummary/__init__.py b/sphinx/ext/autosummary/__init__.py
index 31bbfb8a..c40ba91d 100644
--- a/sphinx/ext/autosummary/__init__.py
+++ b/sphinx/ext/autosummary/__init__.py
@@ -432,11 +432,11 @@ def get_import_prefixes_from_env(env):
"""
prefixes = [None]
- currmodule = env.temp_data.get('py:module')
+ currmodule = env.ref_context.get('py:module')
if currmodule:
prefixes.insert(0, currmodule)
- currclass = env.temp_data.get('py:class')
+ currclass = env.ref_context.get('py:class')
if currclass:
if currmodule:
prefixes.insert(0, currmodule + "." + currclass)
@@ -570,4 +570,4 @@ def setup(app):
app.connect('doctree-read', process_autosummary_toc)
app.connect('builder-inited', process_generate_options)
app.add_config_value('autosummary_generate', [], True)
- return sphinx.__version__
+ return {'version': sphinx.__version__, 'parallel_read_safe': True}
diff --git a/sphinx/ext/coverage.py b/sphinx/ext/coverage.py
index 49dc02f4..b62806fa 100644
--- a/sphinx/ext/coverage.py
+++ b/sphinx/ext/coverage.py
@@ -265,4 +265,4 @@ def setup(app):
app.add_config_value('coverage_ignore_c_items', {}, False)
app.add_config_value('coverage_write_headline', True, False)
app.add_config_value('coverage_skip_undoc_in_source', False, False)
- return sphinx.__version__
+ return {'version': sphinx.__version__, 'parallel_read_safe': True}
diff --git a/sphinx/ext/doctest.py b/sphinx/ext/doctest.py
index 20b8692f..216325cb 100644
--- a/sphinx/ext/doctest.py
+++ b/sphinx/ext/doctest.py
@@ -32,6 +32,7 @@ from sphinx.util.console import bold
blankline_re = re.compile(r'^\s*<BLANKLINE>', re.MULTILINE)
doctestopt_re = re.compile(r'#\s*doctest:.+$', re.MULTILINE)
+
# set up the necessary directives
class TestDirective(Directive):
@@ -79,30 +80,35 @@ class TestDirective(Directive):
option_strings = self.options['options'].replace(',', ' ').split()
for option in option_strings:
if (option[0] not in '+-' or option[1:] not in
- doctest.OPTIONFLAGS_BY_NAME):
+ doctest.OPTIONFLAGS_BY_NAME):
# XXX warn?
continue
flag = doctest.OPTIONFLAGS_BY_NAME[option[1:]]
node['options'][flag] = (option[0] == '+')
return [node]
+
class TestsetupDirective(TestDirective):
option_spec = {}
+
class TestcleanupDirective(TestDirective):
option_spec = {}
+
class DoctestDirective(TestDirective):
option_spec = {
'hide': directives.flag,
'options': directives.unchanged,
}
+
class TestcodeDirective(TestDirective):
option_spec = {
'hide': directives.flag,
}
+
class TestoutputDirective(TestDirective):
option_spec = {
'hide': directives.flag,
@@ -112,6 +118,7 @@ class TestoutputDirective(TestDirective):
parser = doctest.DocTestParser()
+
# helper classes
class TestGroup(object):
@@ -196,7 +203,7 @@ class DocTestBuilder(Builder):
def init(self):
# default options
self.opt = doctest.DONT_ACCEPT_TRUE_FOR_1 | doctest.ELLIPSIS | \
- doctest.IGNORE_EXCEPTION_DETAIL
+ doctest.IGNORE_EXCEPTION_DETAIL
# HACK HACK HACK
# doctest compiles its snippets with type 'single'. That is nice
@@ -247,6 +254,10 @@ Results of doctest builder run on %s
# write executive summary
def s(v):
return v != 1 and 's' or ''
+ repl = (self.total_tries, s(self.total_tries),
+ self.total_failures, s(self.total_failures),
+ self.setup_failures, s(self.setup_failures),
+ self.cleanup_failures, s(self.cleanup_failures))
self._out('''
Doctest summary
===============
@@ -254,10 +265,7 @@ Doctest summary
%5d failure%s in tests
%5d failure%s in setup code
%5d failure%s in cleanup code
-''' % (self.total_tries, s(self.total_tries),
- self.total_failures, s(self.total_failures),
- self.setup_failures, s(self.setup_failures),
- self.cleanup_failures, s(self.cleanup_failures)))
+''' % repl)
self.outfile.close()
if self.total_failures or self.setup_failures or self.cleanup_failures:
@@ -290,11 +298,11 @@ Doctest summary
def condition(node):
return (isinstance(node, (nodes.literal_block, nodes.comment))
and 'testnodetype' in node) or \
- isinstance(node, nodes.doctest_block)
+ isinstance(node, nodes.doctest_block)
else:
def condition(node):
return isinstance(node, (nodes.literal_block, nodes.comment)) \
- and 'testnodetype' in node
+ and 'testnodetype' in node
for node in doctree.traverse(condition):
source = 'test' in node and node['test'] or node.astext()
if not source:
@@ -364,7 +372,7 @@ Doctest summary
filename, 0, None)
sim_doctest.globs = ns
old_f = runner.failures
- self.type = 'exec' # the snippet may contain multiple statements
+ self.type = 'exec' # the snippet may contain multiple statements
runner.run(sim_doctest, out=self._warn_out, clear_globs=False)
if runner.failures > old_f:
return False
@@ -394,7 +402,7 @@ Doctest summary
new_opt = code[0].options.copy()
new_opt.update(example.options)
example.options = new_opt
- self.type = 'single' # as for ordinary doctests
+ self.type = 'single' # as for ordinary doctests
else:
# testcode and output separate
output = code[1] and code[1].code or ''
@@ -413,7 +421,7 @@ Doctest summary
options=options)
test = doctest.DocTest([example], {}, group.name,
filename, code[0].lineno, None)
- self.type = 'exec' # multiple statements again
+ self.type = 'exec' # multiple statements again
# DocTest.__init__ copies the globs namespace, which we don't want
test.globs = ns
# also don't clear the globs namespace after running the doctest
@@ -435,4 +443,4 @@ def setup(app):
app.add_config_value('doctest_test_doctest_blocks', 'default', False)
app.add_config_value('doctest_global_setup', '', False)
app.add_config_value('doctest_global_cleanup', '', False)
- return sphinx.__version__
+ return {'version': sphinx.__version__, 'parallel_read_safe': True}
diff --git a/sphinx/ext/extlinks.py b/sphinx/ext/extlinks.py
index c0cfbcd2..ae65dbb8 100644
--- a/sphinx/ext/extlinks.py
+++ b/sphinx/ext/extlinks.py
@@ -59,4 +59,4 @@ def setup_link_roles(app):
def setup(app):
app.add_config_value('extlinks', {}, 'env')
app.connect('builder-inited', setup_link_roles)
- return sphinx.__version__
+ return {'version': sphinx.__version__, 'parallel_read_safe': True}
diff --git a/sphinx/ext/graphviz.py b/sphinx/ext/graphviz.py
index b4b8bc27..56831c64 100644
--- a/sphinx/ext/graphviz.py
+++ b/sphinx/ext/graphviz.py
@@ -323,4 +323,4 @@ def setup(app):
app.add_config_value('graphviz_dot', 'dot', 'html')
app.add_config_value('graphviz_dot_args', [], 'html')
app.add_config_value('graphviz_output_format', 'png', 'html')
- return sphinx.__version__
+ return {'version': sphinx.__version__, 'parallel_read_safe': True}
diff --git a/sphinx/ext/ifconfig.py b/sphinx/ext/ifconfig.py
index ab15e1e1..a4e4a02d 100644
--- a/sphinx/ext/ifconfig.py
+++ b/sphinx/ext/ifconfig.py
@@ -73,4 +73,4 @@ def setup(app):
app.add_node(ifconfig)
app.add_directive('ifconfig', IfConfig)
app.connect('doctree-resolved', process_ifconfig_nodes)
- return sphinx.__version__
+ return {'version': sphinx.__version__, 'parallel_read_safe': True}
diff --git a/sphinx/ext/inheritance_diagram.py b/sphinx/ext/inheritance_diagram.py
index bbae5c11..0b2e5ce3 100644
--- a/sphinx/ext/inheritance_diagram.py
+++ b/sphinx/ext/inheritance_diagram.py
@@ -39,13 +39,14 @@ r"""
import re
import sys
import inspect
-import __builtin__ as __builtin__ # as __builtin__ is for lib2to3 compatibility
try:
from hashlib import md5
except ImportError:
from md5 import md5
from six import text_type
+from six.moves import builtins
+
from docutils import nodes
from docutils.parsers.rst import directives
@@ -147,10 +148,10 @@ class InheritanceGraph(object):
displayed node names.
"""
all_classes = {}
- builtins = vars(__builtin__).values()
+ py_builtins = vars(builtins).values()
def recurse(cls):
- if not show_builtins and cls in builtins:
+ if not show_builtins and cls in py_builtins:
return
if not private_bases and cls.__name__.startswith('_'):
return
@@ -174,7 +175,7 @@ class InheritanceGraph(object):
baselist = []
all_classes[cls] = (nodename, fullname, baselist, tooltip)
for base in cls.__bases__:
- if not show_builtins and base in builtins:
+ if not show_builtins and base in py_builtins:
continue
if not private_bases and base.__name__.startswith('_'):
continue
@@ -194,7 +195,7 @@ class InheritanceGraph(object):
completely general.
"""
module = cls.__module__
- if module == '__builtin__':
+ if module in ('__builtin__', 'builtins'):
fullname = cls.__name__
else:
fullname = '%s.%s' % (module, cls.__name__)
@@ -310,7 +311,7 @@ class InheritanceDiagram(Directive):
# Create a graph starting with the list of classes
try:
graph = InheritanceGraph(
- class_names, env.temp_data.get('py:module'),
+ class_names, env.ref_context.get('py:module'),
parts=node['parts'],
private_bases='private-bases' in self.options)
except InheritanceException as err:
@@ -407,4 +408,4 @@ def setup(app):
app.add_config_value('inheritance_graph_attrs', {}, False),
app.add_config_value('inheritance_node_attrs', {}, False),
app.add_config_value('inheritance_edge_attrs', {}, False),
- return sphinx.__version__
+ return {'version': sphinx.__version__, 'parallel_read_safe': True}
diff --git a/sphinx/ext/intersphinx.py b/sphinx/ext/intersphinx.py
index 43507a38..6f3d44eb 100644
--- a/sphinx/ext/intersphinx.py
+++ b/sphinx/ext/intersphinx.py
@@ -222,15 +222,21 @@ def load_mappings(app):
def missing_reference(app, env, node, contnode):
"""Attempt to resolve a missing reference via intersphinx references."""
- domain = node.get('refdomain')
- if not domain:
- # only objects in domains are in the inventory
- return
target = node['reftarget']
- objtypes = env.domains[domain].objtypes_for_role(node['reftype'])
- if not objtypes:
- return
- objtypes = ['%s:%s' % (domain, objtype) for objtype in objtypes]
+ if node['reftype'] == 'any':
+ # we search anything!
+ objtypes = ['%s:%s' % (domain.name, objtype)
+ for domain in env.domains.values()
+ for objtype in domain.object_types]
+ else:
+ domain = node.get('refdomain')
+ if not domain:
+ # only objects in domains are in the inventory
+ return
+ objtypes = env.domains[domain].objtypes_for_role(node['reftype'])
+ if not objtypes:
+ return
+ objtypes = ['%s:%s' % (domain, objtype) for objtype in objtypes]
to_try = [(env.intersphinx_inventory, target)]
in_set = None
if ':' in target:
@@ -248,7 +254,7 @@ def missing_reference(app, env, node, contnode):
# get correct path in case of subdirectories
uri = path.join(relative_path(node['refdoc'], env.srcdir), uri)
newnode = nodes.reference('', '', internal=False, refuri=uri,
- reftitle=_('(in %s v%s)') % (proj, version))
+ reftitle=_('(in %s v%s)') % (proj, version))
if node.get('refexplicit'):
# use whatever title was given
newnode.append(contnode)
@@ -276,4 +282,4 @@ def setup(app):
app.add_config_value('intersphinx_cache_limit', 5, False)
app.connect('missing-reference', missing_reference)
app.connect('builder-inited', load_mappings)
- return sphinx.__version__
+ return {'version': sphinx.__version__, 'parallel_read_safe': True}
diff --git a/sphinx/ext/jsmath.py b/sphinx/ext/jsmath.py
index 897d87ac..9bf38f62 100644
--- a/sphinx/ext/jsmath.py
+++ b/sphinx/ext/jsmath.py
@@ -57,4 +57,4 @@ def setup(app):
mathbase_setup(app, (html_visit_math, None), (html_visit_displaymath, None))
app.add_config_value('jsmath_path', '', False)
app.connect('builder-inited', builder_inited)
- return sphinx.__version__
+ return {'version': sphinx.__version__, 'parallel_read_safe': True}
diff --git a/sphinx/ext/linkcode.py b/sphinx/ext/linkcode.py
index bbb0698c..37e021e8 100644
--- a/sphinx/ext/linkcode.py
+++ b/sphinx/ext/linkcode.py
@@ -16,9 +16,11 @@ from sphinx import addnodes
from sphinx.locale import _
from sphinx.errors import SphinxError
+
class LinkcodeError(SphinxError):
category = "linkcode error"
+
def doctree_read(app, doctree):
env = app.builder.env
@@ -68,7 +70,8 @@ def doctree_read(app, doctree):
classes=['viewcode-link'])
signode += onlynode
+
def setup(app):
app.connect('doctree-read', doctree_read)
app.add_config_value('linkcode_resolve', None, '')
- return sphinx.__version__
+ return {'version': sphinx.__version__, 'parallel_read_safe': True}
diff --git a/sphinx/ext/mathjax.py b/sphinx/ext/mathjax.py
index fd5c5f1d..f677ff48 100644
--- a/sphinx/ext/mathjax.py
+++ b/sphinx/ext/mathjax.py
@@ -69,4 +69,4 @@ def setup(app):
app.add_config_value('mathjax_inline', [r'\(', r'\)'], 'html')
app.add_config_value('mathjax_display', [r'\[', r'\]'], 'html')
app.connect('builder-inited', builder_inited)
- return sphinx.__version__
+ return {'version': sphinx.__version__, 'parallel_read_safe': True}
diff --git a/sphinx/ext/napoleon/__init__.py b/sphinx/ext/napoleon/__init__.py
index 554162ed..9b43d8fd 100644
--- a/sphinx/ext/napoleon/__init__.py
+++ b/sphinx/ext/napoleon/__init__.py
@@ -256,7 +256,7 @@ def setup(app):
for name, (default, rebuild) in iteritems(Config._config_values):
app.add_config_value(name, default, rebuild)
- return sphinx.__version__
+ return {'version': sphinx.__version__, 'parallel_read_safe': True}
def _process_docstring(app, what, name, obj, options, lines):
diff --git a/sphinx/ext/pngmath.py b/sphinx/ext/pngmath.py
index ee108d16..51c9d011 100644
--- a/sphinx/ext/pngmath.py
+++ b/sphinx/ext/pngmath.py
@@ -246,4 +246,4 @@ def setup(app):
app.add_config_value('pngmath_latex_preamble', '', 'html')
app.add_config_value('pngmath_add_tooltips', True, 'html')
app.connect('build-finished', cleanup_tempdir)
- return sphinx.__version__
+ return {'version': sphinx.__version__, 'parallel_read_safe': True}
diff --git a/sphinx/ext/todo.py b/sphinx/ext/todo.py
index d70617b9..ae434dd4 100644
--- a/sphinx/ext/todo.py
+++ b/sphinx/ext/todo.py
@@ -150,6 +150,14 @@ def purge_todos(app, env, docname):
if todo['docname'] != docname]
+def merge_info(app, env, docnames, other):
+ if not hasattr(other, 'todo_all_todos'):
+ return
+ if not hasattr(env, 'todo_all_todos'):
+ env.todo_all_todos = []
+ env.todo_all_todos.extend(other.todo_all_todos)
+
+
def visit_todo_node(self, node):
self.visit_admonition(node)
@@ -172,4 +180,5 @@ def setup(app):
app.connect('doctree-read', process_todos)
app.connect('doctree-resolved', process_todo_nodes)
app.connect('env-purge-doc', purge_todos)
- return sphinx.__version__
+ app.connect('env-merge-info', merge_info)
+ return {'version': sphinx.__version__, 'parallel_read_safe': True}
diff --git a/sphinx/ext/viewcode.py b/sphinx/ext/viewcode.py
index 3653a2da..cd3f2ac7 100644
--- a/sphinx/ext/viewcode.py
+++ b/sphinx/ext/viewcode.py
@@ -20,6 +20,7 @@ from sphinx.locale import _
from sphinx.pycode import ModuleAnalyzer
from sphinx.util import get_full_modname
from sphinx.util.nodes import make_refnode
+from sphinx.util.console import blue
def _get_full_modname(app, modname, attribute):
@@ -37,7 +38,7 @@ def _get_full_modname(app, modname, attribute):
# It should be displayed only verbose mode.
app.verbose(traceback.format_exc().rstrip())
app.verbose('viewcode can\'t import %s, failed with error "%s"' %
- (modname, e))
+ (modname, e))
return None
@@ -100,6 +101,16 @@ def doctree_read(app, doctree):
signode += onlynode
+def env_merge_info(app, env, docnames, other):
+ if not hasattr(other, '_viewcode_modules'):
+ return
+ # create a _viewcode_modules dict on the main environment
+ if not hasattr(env, '_viewcode_modules'):
+ env._viewcode_modules = {}
+ # now merge in the information from the subprocess
+ env._viewcode_modules.update(other._viewcode_modules)
+
+
def missing_reference(app, env, node, contnode):
# resolve our "viewcode" reference nodes -- they need special treatment
if node['reftype'] == 'viewcode':
@@ -116,10 +127,12 @@ def collect_pages(app):
modnames = set(env._viewcode_modules)
- app.builder.info(' (%d module code pages)' %
- len(env._viewcode_modules), nonl=1)
+# app.builder.info(' (%d module code pages)' %
+# len(env._viewcode_modules), nonl=1)
- for modname, entry in iteritems(env._viewcode_modules):
+ for modname, entry in app.status_iterator(
+ iteritems(env._viewcode_modules), 'highlighting module code... ',
+ blue, len(env._viewcode_modules), lambda x: x[0]):
if not entry:
continue
code, tags, used, refname = entry
@@ -162,15 +175,14 @@ def collect_pages(app):
context = {
'parents': parents,
'title': modname,
- 'body': _('<h1>Source code for %s</h1>') % modname + \
- '\n'.join(lines)
+ 'body': (_('<h1>Source code for %s</h1>') % modname +
+ '\n'.join(lines)),
}
yield (pagename, context, 'page.html')
if not modnames:
return
- app.builder.info(' _modules/index', nonl=True)
html = ['\n']
# the stack logic is needed for using nested lists for submodules
stack = ['']
@@ -190,8 +202,8 @@ def collect_pages(app):
html.append('</ul>' * (len(stack) - 1))
context = {
'title': _('Overview: module code'),
- 'body': _('<h1>All modules for which code is available</h1>') + \
- ''.join(html),
+ 'body': (_('<h1>All modules for which code is available</h1>') +
+ ''.join(html)),
}
yield ('_modules/index', context, 'page.html')
@@ -200,8 +212,9 @@ def collect_pages(app):
def setup(app):
app.add_config_value('viewcode_import', True, False)
app.connect('doctree-read', doctree_read)
+ app.connect('env-merge-info', env_merge_info)
app.connect('html-collect-pages', collect_pages)
app.connect('missing-reference', missing_reference)
- #app.add_config_value('viewcode_include_modules', [], 'env')
- #app.add_config_value('viewcode_exclude_modules', [], 'env')
- return sphinx.__version__
+ # app.add_config_value('viewcode_include_modules', [], 'env')
+ # app.add_config_value('viewcode_exclude_modules', [], 'env')
+ return {'version': sphinx.__version__, 'parallel_read_safe': True}
diff --git a/sphinx/highlighting.py b/sphinx/highlighting.py
index 599a76a9..c2d2e89a 100644
--- a/sphinx/highlighting.py
+++ b/sphinx/highlighting.py
@@ -24,46 +24,32 @@ from sphinx.util.pycompat import htmlescape
from sphinx.util.texescape import tex_hl_escape_map_new
from sphinx.ext import doctest
-try:
- import pygments
- from pygments import highlight
- from pygments.lexers import PythonLexer, PythonConsoleLexer, CLexer, \
- TextLexer, RstLexer
- from pygments.lexers import get_lexer_by_name, guess_lexer
- from pygments.formatters import HtmlFormatter, LatexFormatter
- from pygments.filters import ErrorToken
- from pygments.styles import get_style_by_name
- from pygments.util import ClassNotFound
- from sphinx.pygments_styles import SphinxStyle, NoneStyle
-except ImportError:
- pygments = None
- lexers = None
- HtmlFormatter = LatexFormatter = None
-else:
-
- lexers = dict(
- none = TextLexer(),
- python = PythonLexer(),
- pycon = PythonConsoleLexer(),
- pycon3 = PythonConsoleLexer(python3=True),
- rest = RstLexer(),
- c = CLexer(),
- )
- for _lexer in lexers.values():
- _lexer.add_filter('raiseonerror')
+from pygments import highlight
+from pygments.lexers import PythonLexer, PythonConsoleLexer, CLexer, \
+ TextLexer, RstLexer
+from pygments.lexers import get_lexer_by_name, guess_lexer
+from pygments.formatters import HtmlFormatter, LatexFormatter
+from pygments.filters import ErrorToken
+from pygments.styles import get_style_by_name
+from pygments.util import ClassNotFound
+from sphinx.pygments_styles import SphinxStyle, NoneStyle
+
+lexers = dict(
+ none = TextLexer(),
+ python = PythonLexer(),
+ pycon = PythonConsoleLexer(),
+ pycon3 = PythonConsoleLexer(python3=True),
+ rest = RstLexer(),
+ c = CLexer(),
+)
+for _lexer in lexers.values():
+ _lexer.add_filter('raiseonerror')
escape_hl_chars = {ord(u'\\'): u'\\PYGZbs{}',
ord(u'{'): u'\\PYGZob{}',
ord(u'}'): u'\\PYGZcb{}'}
-# used if Pygments is not available
-_LATEX_STYLES = r'''
-\newcommand\PYGZbs{\char`\\}
-\newcommand\PYGZob{\char`\{}
-\newcommand\PYGZcb{\char`\}}
-'''
-
# used if Pygments is available
# use textcomp quote to get a true single quote
_LATEX_ADD_STYLES = r'''
@@ -80,8 +66,6 @@ class PygmentsBridge(object):
def __init__(self, dest='html', stylename='sphinx',
trim_doctest_flags=False):
self.dest = dest
- if not pygments:
- return
if stylename is None or stylename == 'sphinx':
style = SphinxStyle
elif stylename == 'none':
@@ -153,8 +137,6 @@ class PygmentsBridge(object):
def highlight_block(self, source, lang, warn=None, force=False, **kwargs):
if not isinstance(source, text_type):
source = source.decode()
- if not pygments:
- return self.unhighlighted(source)
# find out which lexer to use
if lang in ('py', 'python'):
@@ -213,11 +195,6 @@ class PygmentsBridge(object):
return hlsource.translate(tex_hl_escape_map_new)
def get_stylesheet(self):
- if not pygments:
- if self.dest == 'latex':
- return _LATEX_STYLES
- # no HTML styles needed
- return ''
formatter = self.get_formatter()
if self.dest == 'html':
return formatter.get_style_defs('.highlight')
diff --git a/sphinx/quickstart.py b/sphinx/quickstart.py
index fdfb8106..f81b38f0 100644
--- a/sphinx/quickstart.py
+++ b/sphinx/quickstart.py
@@ -10,13 +10,16 @@
"""
from __future__ import print_function
-import sys, os, time, re
+import re
+import os
+import sys
+import time
from os import path
from io import open
TERM_ENCODING = getattr(sys.stdin, 'encoding', None)
-#try to import readline, unix specific enhancement
+# try to import readline, unix specific enhancement
try:
import readline
if readline.__doc__ and 'libedit' in readline.__doc__:
@@ -33,7 +36,7 @@ from docutils.utils import column_width
from sphinx import __version__
from sphinx.util.osutil import make_filename
from sphinx.util.console import purple, bold, red, turquoise, \
- nocolor, color_terminal
+ nocolor, color_terminal
from sphinx.util import texescape
# function to get input from terminal -- overridden by the test suite
@@ -972,17 +975,20 @@ def mkdir_p(dir):
class ValidationError(Exception):
"""Raised for validation errors."""
+
def is_path(x):
x = path.expanduser(x)
if path.exists(x) and not path.isdir(x):
raise ValidationError("Please enter a valid path name.")
return x
+
def nonempty(x):
if not x:
raise ValidationError("Please enter some text.")
return x
+
def choice(*l):
def val(x):
if x not in l:
@@ -990,17 +996,20 @@ def choice(*l):
return x
return val
+
def boolean(x):
if x.upper() not in ('Y', 'YES', 'N', 'NO'):
raise ValidationError("Please enter either 'y' or 'n'.")
return x.upper() in ('Y', 'YES')
+
def suffix(x):
if not (x[0:1] == '.' and len(x) > 1):
raise ValidationError("Please enter a file suffix, "
"e.g. '.rst' or '.txt'.")
return x
+
def ok(x):
return x
@@ -1097,7 +1106,7 @@ Enter the root path for documentation.''')
do_prompt(d, 'path', 'Root path for the documentation', '.', is_path)
while path.isfile(path.join(d['path'], 'conf.py')) or \
- path.isfile(path.join(d['path'], 'source', 'conf.py')):
+ path.isfile(path.join(d['path'], 'source', 'conf.py')):
print()
print(bold('Error: an existing conf.py has been found in the '
'selected root path.'))
@@ -1169,7 +1178,7 @@ document is a custom template, you can also set this to another filename.''')
'index')
while path.isfile(path.join(d['path'], d['master']+d['suffix'])) or \
- path.isfile(path.join(d['path'], 'source', d['master']+d['suffix'])):
+ path.isfile(path.join(d['path'], 'source', d['master']+d['suffix'])):
print()
print(bold('Error: the master file %s has already been found in the '
'selected root path.' % (d['master']+d['suffix'])))
@@ -1256,10 +1265,10 @@ def generate(d, overwrite=True, silent=False):
d['extensions'] = extensions
d['copyright'] = time.strftime('%Y') + ', ' + d['author']
d['author_texescaped'] = text_type(d['author']).\
- translate(texescape.tex_escape_map)
+ translate(texescape.tex_escape_map)
d['project_doc'] = d['project'] + ' Documentation'
d['project_doc_texescaped'] = text_type(d['project'] + ' Documentation').\
- translate(texescape.tex_escape_map)
+ translate(texescape.tex_escape_map)
# escape backslashes and single quotes in strings that are put into
# a Python string literal
diff --git a/sphinx/roles.py b/sphinx/roles.py
index aaf6272b..451cfe60 100644
--- a/sphinx/roles.py
+++ b/sphinx/roles.py
@@ -17,22 +17,23 @@ from docutils.parsers.rst import roles
from sphinx import addnodes
from sphinx.locale import _
+from sphinx.errors import SphinxError
from sphinx.util import ws_re
from sphinx.util.nodes import split_explicit_title, process_index_entry, \
- set_role_source_info
+ set_role_source_info
generic_docroles = {
- 'command' : addnodes.literal_strong,
- 'dfn' : nodes.emphasis,
- 'kbd' : nodes.literal,
- 'mailheader' : addnodes.literal_emphasis,
- 'makevar' : addnodes.literal_strong,
- 'manpage' : addnodes.literal_emphasis,
- 'mimetype' : addnodes.literal_emphasis,
- 'newsgroup' : addnodes.literal_emphasis,
- 'program' : addnodes.literal_strong, # XXX should be an x-ref
- 'regexp' : nodes.literal,
+ 'command': addnodes.literal_strong,
+ 'dfn': nodes.emphasis,
+ 'kbd': nodes.literal,
+ 'mailheader': addnodes.literal_emphasis,
+ 'makevar': addnodes.literal_strong,
+ 'manpage': addnodes.literal_emphasis,
+ 'mimetype': addnodes.literal_emphasis,
+ 'newsgroup': addnodes.literal_emphasis,
+ 'program': addnodes.literal_strong, # XXX should be an x-ref
+ 'regexp': nodes.literal,
}
for rolename, nodeclass in iteritems(generic_docroles):
@@ -40,6 +41,7 @@ for rolename, nodeclass in iteritems(generic_docroles):
role = roles.CustomRole(rolename, generic, {'classes': [rolename]})
roles.register_local_role(rolename, role)
+
# -- generic cross-reference role ----------------------------------------------
class XRefRole(object):
@@ -96,7 +98,11 @@ class XRefRole(object):
options={}, content=[]):
env = inliner.document.settings.env
if not typ:
- typ = env.config.default_role
+ typ = env.temp_data.get('default_role')
+ if not typ:
+ typ = env.config.default_role
+ if not typ:
+ raise SphinxError('cannot determine default role!')
else:
typ = typ.lower()
if ':' not in typ:
@@ -158,6 +164,15 @@ class XRefRole(object):
return [node], []
+class AnyXRefRole(XRefRole):
+ def process_link(self, env, refnode, has_explicit_title, title, target):
+ result = XRefRole.process_link(self, env, refnode, has_explicit_title,
+ title, target)
+ # add all possible context info (i.e. std:program, py:module etc.)
+ refnode.attributes.update(env.ref_context)
+ return result
+
+
def indexmarkup_role(typ, rawtext, text, lineno, inliner,
options={}, content=[]):
"""Role for PEP/RFC references that generate an index entry."""
@@ -221,6 +236,7 @@ def indexmarkup_role(typ, rawtext, text, lineno, inliner,
_amp_re = re.compile(r'(?<!&)&(?![&\s])')
+
def menusel_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
text = utils.unescape(text)
if typ == 'menuselection':
@@ -246,8 +262,10 @@ def menusel_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
node['classes'].append(typ)
return [node], []
+
_litvar_re = re.compile('{([^}]+)}')
+
def emph_literal_role(typ, rawtext, text, lineno, inliner,
options={}, content=[]):
text = utils.unescape(text)
@@ -266,6 +284,7 @@ def emph_literal_role(typ, rawtext, text, lineno, inliner,
_abbr_re = re.compile('\((.*)\)$', re.S)
+
def abbr_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
text = utils.unescape(text)
m = _abbr_re.search(text)
@@ -311,6 +330,8 @@ specific_docroles = {
'download': XRefRole(nodeclass=addnodes.download_reference),
# links to documents
'doc': XRefRole(warn_dangling=True),
+ # links to anything
+ 'any': AnyXRefRole(warn_dangling=True),
'pep': indexmarkup_role,
'rfc': indexmarkup_role,
diff --git a/sphinx/themes/basic/static/jquery.js b/sphinx/themes/basic/static/jquery.js
index 83589daa..38837795 100644
--- a/sphinx/themes/basic/static/jquery.js
+++ b/sphinx/themes/basic/static/jquery.js
@@ -1,2 +1,2 @@
-/*! jQuery v1.8.3 jquery.com | jquery.org/license */
+/*! jQuery v1.8.3 jquery.com | jquery.org/license */
(function(e,t){function _(e){var t=M[e]={};return v.each(e.split(y),function(e,n){t[n]=!0}),t}function H(e,n,r){if(r===t&&e.nodeType===1){var i="data-"+n.replace(P,"-$1").toLowerCase();r=e.getAttribute(i);if(typeof r=="string"){try{r=r==="true"?!0:r==="false"?!1:r==="null"?null:+r+""===r?+r:D.test(r)?v.parseJSON(r):r}catch(s){}v.data(e,n,r)}else r=t}return r}function B(e){var t;for(t in e){if(t==="data"&&v.isEmptyObject(e[t]))continue;if(t!=="toJSON")return!1}return!0}function et(){return!1}function tt(){return!0}function ut(e){return!e||!e.parentNode||e.parentNode.nodeType===11}function at(e,t){do e=e[t];while(e&&e.nodeType!==1);return e}function ft(e,t,n){t=t||0;if(v.isFunction(t))return v.grep(e,function(e,r){var i=!!t.call(e,r,e);return i===n});if(t.nodeType)return v.grep(e,function(e,r){return e===t===n});if(typeof t=="string"){var r=v.grep(e,function(e){return e.nodeType===1});if(it.test(t))return v.filter(t,r,!n);t=v.filter(t,r)}return v.grep(e,function(e,r){return v.inArray(e,t)>=0===n})}function lt(e){var t=ct.split("|"),n=e.createDocumentFragment();if(n.createElement)while(t.length)n.createElement(t.pop());return n}function Lt(e,t){return e.getElementsByTagName(t)[0]||e.appendChild(e.ownerDocument.createElement(t))}function At(e,t){if(t.nodeType!==1||!v.hasData(e))return;var n,r,i,s=v._data(e),o=v._data(t,s),u=s.events;if(u){delete o.handle,o.events={};for(n in u)for(r=0,i=u[n].length;r<i;r++)v.event.add(t,n,u[n][r])}o.data&&(o.data=v.extend({},o.data))}function Ot(e,t){var n;if(t.nodeType!==1)return;t.clearAttributes&&t.clearAttributes(),t.mergeAttributes&&t.mergeAttributes(e),n=t.nodeName.toLowerCase(),n==="object"?(t.parentNode&&(t.outerHTML=e.outerHTML),v.support.html5Clone&&e.innerHTML&&!v.trim(t.innerHTML)&&(t.innerHTML=e.innerHTML)):n==="input"&&Et.test(e.type)?(t.defaultChecked=t.checked=e.checked,t.value!==e.value&&(t.value=e.value)):n==="option"?t.selected=e.defaultSelected:n==="input"||n==="textarea"?t.defaultValue=e.defaultValue:n==="script"&&t.text!==e.text&&(t.text=e.text),t.removeAttribute(v.expando)}function Mt(e){return typeof e.getElementsByTagName!="undefined"?e.getElementsByTagName("*"):typeof e.querySelectorAll!="undefined"?e.querySelectorAll("*"):[]}function _t(e){Et.test(e.type)&&(e.defaultChecked=e.checked)}function Qt(e,t){if(t in e)return t;var n=t.charAt(0).toUpperCase()+t.slice(1),r=t,i=Jt.length;while(i--){t=Jt[i]+n;if(t in e)return t}return r}function Gt(e,t){return e=t||e,v.css(e,"display")==="none"||!v.contains(e.ownerDocument,e)}function Yt(e,t){var n,r,i=[],s=0,o=e.length;for(;s<o;s++){n=e[s];if(!n.style)continue;i[s]=v._data(n,"olddisplay"),t?(!i[s]&&n.style.display==="none"&&(n.style.display=""),n.style.display===""&&Gt(n)&&(i[s]=v._data(n,"olddisplay",nn(n.nodeName)))):(r=Dt(n,"display"),!i[s]&&r!=="none"&&v._data(n,"olddisplay",r))}for(s=0;s<o;s++){n=e[s];if(!n.style)continue;if(!t||n.style.display==="none"||n.style.display==="")n.style.display=t?i[s]||"":"none"}return e}function Zt(e,t,n){var r=Rt.exec(t);return r?Math.max(0,r[1]-(n||0))+(r[2]||"px"):t}function en(e,t,n,r){var i=n===(r?"border":"content")?4:t==="width"?1:0,s=0;for(;i<4;i+=2)n==="margin"&&(s+=v.css(e,n+$t[i],!0)),r?(n==="content"&&(s-=parseFloat(Dt(e,"padding"+$t[i]))||0),n!=="margin"&&(s-=parseFloat(Dt(e,"border"+$t[i]+"Width"))||0)):(s+=parseFloat(Dt(e,"padding"+$t[i]))||0,n!=="padding"&&(s+=parseFloat(Dt(e,"border"+$t[i]+"Width"))||0));return s}function tn(e,t,n){var r=t==="width"?e.offsetWidth:e.offsetHeight,i=!0,s=v.support.boxSizing&&v.css(e,"boxSizing")==="border-box";if(r<=0||r==null){r=Dt(e,t);if(r<0||r==null)r=e.style[t];if(Ut.test(r))return r;i=s&&(v.support.boxSizingReliable||r===e.style[t]),r=parseFloat(r)||0}return r+en(e,t,n||(s?"border":"content"),i)+"px"}function nn(e){if(Wt[e])return Wt[e];var t=v("<"+e+">").appendTo(i.body),n=t.css("display");t.remove();if(n==="none"||n===""){Pt=i.body.appendChild(Pt||v.extend(i.createElement("iframe"),{frameBorder:0,width:0,height:0}));if(!Ht||!Pt.createElement)Ht=(Pt.contentWindow||Pt.contentDocument).document,Ht.write("<!doctype html><html><body>"),Ht.close();t=Ht.body.appendChild(Ht.createElement(e)),n=Dt(t,"display"),i.body.removeChild(Pt)}return Wt[e]=n,n}function fn(e,t,n,r){var i;if(v.isArray(t))v.each(t,function(t,i){n||sn.test(e)?r(e,i):fn(e+"["+(typeof i=="object"?t:"")+"]",i,n,r)});else if(!n&&v.type(t)==="object")for(i in t)fn(e+"["+i+"]",t[i],n,r);else r(e,t)}function Cn(e){return function(t,n){typeof t!="string"&&(n=t,t="*");var r,i,s,o=t.toLowerCase().split(y),u=0,a=o.length;if(v.isFunction(n))for(;u<a;u++)r=o[u],s=/^\+/.test(r),s&&(r=r.substr(1)||"*"),i=e[r]=e[r]||[],i[s?"unshift":"push"](n)}}function kn(e,n,r,i,s,o){s=s||n.dataTypes[0],o=o||{},o[s]=!0;var u,a=e[s],f=0,l=a?a.length:0,c=e===Sn;for(;f<l&&(c||!u);f++)u=a[f](n,r,i),typeof u=="string"&&(!c||o[u]?u=t:(n.dataTypes.unshift(u),u=kn(e,n,r,i,u,o)));return(c||!u)&&!o["*"]&&(u=kn(e,n,r,i,"*",o)),u}function Ln(e,n){var r,i,s=v.ajaxSettings.flatOptions||{};for(r in n)n[r]!==t&&((s[r]?e:i||(i={}))[r]=n[r]);i&&v.extend(!0,e,i)}function An(e,n,r){var i,s,o,u,a=e.contents,f=e.dataTypes,l=e.responseFields;for(s in l)s in r&&(n[l[s]]=r[s]);while(f[0]==="*")f.shift(),i===t&&(i=e.mimeType||n.getResponseHeader("content-type"));if(i)for(s in a)if(a[s]&&a[s].test(i)){f.unshift(s);break}if(f[0]in r)o=f[0];else{for(s in r){if(!f[0]||e.converters[s+" "+f[0]]){o=s;break}u||(u=s)}o=o||u}if(o)return o!==f[0]&&f.unshift(o),r[o]}function On(e,t){var n,r,i,s,o=e.dataTypes.slice(),u=o[0],a={},f=0;e.dataFilter&&(t=e.dataFilter(t,e.dataType));if(o[1])for(n in e.converters)a[n.toLowerCase()]=e.converters[n];for(;i=o[++f];)if(i!=="*"){if(u!=="*"&&u!==i){n=a[u+" "+i]||a["* "+i];if(!n)for(r in a){s=r.split(" ");if(s[1]===i){n=a[u+" "+s[0]]||a["* "+s[0]];if(n){n===!0?n=a[r]:a[r]!==!0&&(i=s[0],o.splice(f--,0,i));break}}}if(n!==!0)if(n&&e["throws"])t=n(t);else try{t=n(t)}catch(l){return{state:"parsererror",error:n?l:"No conversion from "+u+" to "+i}}}u=i}return{state:"success",data:t}}function Fn(){try{return new e.XMLHttpRequest}catch(t){}}function In(){try{return new e.ActiveXObject("Microsoft.XMLHTTP")}catch(t){}}function $n(){return setTimeout(function(){qn=t},0),qn=v.now()}function Jn(e,t){v.each(t,function(t,n){var r=(Vn[t]||[]).concat(Vn["*"]),i=0,s=r.length;for(;i<s;i++)if(r[i].call(e,t,n))return})}function Kn(e,t,n){var r,i=0,s=0,o=Xn.length,u=v.Deferred().always(function(){delete a.elem}),a=function(){var t=qn||$n(),n=Math.max(0,f.startTime+f.duration-t),r=n/f.duration||0,i=1-r,s=0,o=f.tweens.length;for(;s<o;s++)f.tweens[s].run(i);return u.notifyWith(e,[f,i,n]),i<1&&o?n:(u.resolveWith(e,[f]),!1)},f=u.promise({elem:e,props:v.extend({},t),opts:v.extend(!0,{specialEasing:{}},n),originalProperties:t,originalOptions:n,startTime:qn||$n(),duration:n.duration,tweens:[],createTween:function(t,n,r){var i=v.Tween(e,f.opts,t,n,f.opts.specialEasing[t]||f.opts.easing);return f.tweens.push(i),i},stop:function(t){var n=0,r=t?f.tweens.length:0;for(;n<r;n++)f.tweens[n].run(1);return t?u.resolveWith(e,[f,t]):u.rejectWith(e,[f,t]),this}}),l=f.props;Qn(l,f.opts.specialEasing);for(;i<o;i++){r=Xn[i].call(f,e,l,f.opts);if(r)return r}return Jn(f,l),v.isFunction(f.opts.start)&&f.opts.start.call(e,f),v.fx.timer(v.extend(a,{anim:f,queue:f.opts.queue,elem:e})),f.progress(f.opts.progress).done(f.opts.done,f.opts.complete).fail(f.opts.fail).always(f.opts.always)}function Qn(e,t){var n,r,i,s,o;for(n in e){r=v.camelCase(n),i=t[r],s=e[n],v.isArray(s)&&(i=s[1],s=e[n]=s[0]),n!==r&&(e[r]=s,delete e[n]),o=v.cssHooks[r];if(o&&"expand"in o){s=o.expand(s),delete e[r];for(n in s)n in e||(e[n]=s[n],t[n]=i)}else t[r]=i}}function Gn(e,t,n){var r,i,s,o,u,a,f,l,c,h=this,p=e.style,d={},m=[],g=e.nodeType&&Gt(e);n.queue||(l=v._queueHooks(e,"fx"),l.unqueued==null&&(l.unqueued=0,c=l.empty.fire,l.empty.fire=function(){l.unqueued||c()}),l.unqueued++,h.always(function(){h.always(function(){l.unqueued--,v.queue(e,"fx").length||l.empty.fire()})})),e.nodeType===1&&("height"in t||"width"in t)&&(n.overflow=[p.overflow,p.overflowX,p.overflowY],v.css(e,"display")==="inline"&&v.css(e,"float")==="none"&&(!v.support.inlineBlockNeedsLayout||nn(e.nodeName)==="inline"?p.display="inline-block":p.zoom=1)),n.overflow&&(p.overflow="hidden",v.support.shrinkWrapBlocks||h.done(function(){p.overflow=n.overflow[0],p.overflowX=n.overflow[1],p.overflowY=n.overflow[2]}));for(r in t){s=t[r];if(Un.exec(s)){delete t[r],a=a||s==="toggle";if(s===(g?"hide":"show"))continue;m.push(r)}}o=m.length;if(o){u=v._data(e,"fxshow")||v._data(e,"fxshow",{}),"hidden"in u&&(g=u.hidden),a&&(u.hidden=!g),g?v(e).show():h.done(function(){v(e).hide()}),h.done(function(){var t;v.removeData(e,"fxshow",!0);for(t in d)v.style(e,t,d[t])});for(r=0;r<o;r++)i=m[r],f=h.createTween(i,g?u[i]:0),d[i]=u[i]||v.style(e,i),i in u||(u[i]=f.start,g&&(f.end=f.start,f.start=i==="width"||i==="height"?1:0))}}function Yn(e,t,n,r,i){return new Yn.prototype.init(e,t,n,r,i)}function Zn(e,t){var n,r={height:e},i=0;t=t?1:0;for(;i<4;i+=2-t)n=$t[i],r["margin"+n]=r["padding"+n]=e;return t&&(r.opacity=r.width=e),r}function tr(e){return v.isWindow(e)?e:e.nodeType===9?e.defaultView||e.parentWindow:!1}var n,r,i=e.document,s=e.location,o=e.navigator,u=e.jQuery,a=e.$,f=Array.prototype.push,l=Array.prototype.slice,c=Array.prototype.indexOf,h=Object.prototype.toString,p=Object.prototype.hasOwnProperty,d=String.prototype.trim,v=function(e,t){return new v.fn.init(e,t,n)},m=/[\-+]?(?:\d*\.|)\d+(?:[eE][\-+]?\d+|)/.source,g=/\S/,y=/\s+/,b=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,w=/^(?:[^#<]*(<[\w\W]+>)[^>]*$|#([\w\-]*)$)/,E=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,S=/^[\],:{}\s]*$/,x=/(?:^|:|,)(?:\s*\[)+/g,T=/\\(?:["\\\/bfnrt]|u[\da-fA-F]{4})/g,N=/"[^"\\\r\n]*"|true|false|null|-?(?:\d\d*\.|)\d+(?:[eE][\-+]?\d+|)/g,C=/^-ms-/,k=/-([\da-z])/gi,L=function(e,t){return(t+"").toUpperCase()},A=function(){i.addEventListener?(i.removeEventListener("DOMContentLoaded",A,!1),v.ready()):i.readyState==="complete"&&(i.detachEvent("onreadystatechange",A),v.ready())},O={};v.fn=v.prototype={constructor:v,init:function(e,n,r){var s,o,u,a;if(!e)return this;if(e.nodeType)return this.context=this[0]=e,this.length=1,this;if(typeof e=="string"){e.charAt(0)==="<"&&e.charAt(e.length-1)===">"&&e.length>=3?s=[null,e,null]:s=w.exec(e);if(s&&(s[1]||!n)){if(s[1])return n=n instanceof v?n[0]:n,a=n&&n.nodeType?n.ownerDocument||n:i,e=v.parseHTML(s[1],a,!0),E.test(s[1])&&v.isPlainObject(n)&&this.attr.call(e,n,!0),v.merge(this,e);o=i.getElementById(s[2]);if(o&&o.parentNode){if(o.id!==s[2])return r.find(e);this.length=1,this[0]=o}return this.context=i,this.selector=e,this}return!n||n.jquery?(n||r).find(e):this.constructor(n).find(e)}return v.isFunction(e)?r.ready(e):(e.selector!==t&&(this.selector=e.selector,this.context=e.context),v.makeArray(e,this))},selector:"",jquery:"1.8.3",length:0,size:function(){return this.length},toArray:function(){return l.call(this)},get:function(e){return e==null?this.toArray():e<0?this[this.length+e]:this[e]},pushStack:function(e,t,n){var r=v.merge(this.constructor(),e);return r.prevObject=this,r.context=this.context,t==="find"?r.selector=this.selector+(this.selector?" ":"")+n:t&&(r.selector=this.selector+"."+t+"("+n+")"),r},each:function(e,t){return v.each(this,e,t)},ready:function(e){return v.ready.promise().done(e),this},eq:function(e){return e=+e,e===-1?this.slice(e):this.slice(e,e+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(l.apply(this,arguments),"slice",l.call(arguments).join(","))},map:function(e){return this.pushStack(v.map(this,function(t,n){return e.call(t,n,t)}))},end:function(){return this.prevObject||this.constructor(null)},push:f,sort:[].sort,splice:[].splice},v.fn.init.prototype=v.fn,v.extend=v.fn.extend=function(){var e,n,r,i,s,o,u=arguments[0]||{},a=1,f=arguments.length,l=!1;typeof u=="boolean"&&(l=u,u=arguments[1]||{},a=2),typeof u!="object"&&!v.isFunction(u)&&(u={}),f===a&&(u=this,--a);for(;a<f;a++)if((e=arguments[a])!=null)for(n in e){r=u[n],i=e[n];if(u===i)continue;l&&i&&(v.isPlainObject(i)||(s=v.isArray(i)))?(s?(s=!1,o=r&&v.isArray(r)?r:[]):o=r&&v.isPlainObject(r)?r:{},u[n]=v.extend(l,o,i)):i!==t&&(u[n]=i)}return u},v.extend({noConflict:function(t){return e.$===v&&(e.$=a),t&&e.jQuery===v&&(e.jQuery=u),v},isReady:!1,readyWait:1,holdReady:function(e){e?v.readyWait++:v.ready(!0)},ready:function(e){if(e===!0?--v.readyWait:v.isReady)return;if(!i.body)return setTimeout(v.ready,1);v.isReady=!0;if(e!==!0&&--v.readyWait>0)return;r.resolveWith(i,[v]),v.fn.trigger&&v(i).trigger("ready").off("ready")},isFunction:function(e){return v.type(e)==="function"},isArray:Array.isArray||function(e){return v.type(e)==="array"},isWindow:function(e){return e!=null&&e==e.window},isNumeric:function(e){return!isNaN(parseFloat(e))&&isFinite(e)},type:function(e){return e==null?String(e):O[h.call(e)]||"object"},isPlainObject:function(e){if(!e||v.type(e)!=="object"||e.nodeType||v.isWindow(e))return!1;try{if(e.constructor&&!p.call(e,"constructor")&&!p.call(e.constructor.prototype,"isPrototypeOf"))return!1}catch(n){return!1}var r;for(r in e);return r===t||p.call(e,r)},isEmptyObject:function(e){var t;for(t in e)return!1;return!0},error:function(e){throw new Error(e)},parseHTML:function(e,t,n){var r;return!e||typeof e!="string"?null:(typeof t=="boolean"&&(n=t,t=0),t=t||i,(r=E.exec(e))?[t.createElement(r[1])]:(r=v.buildFragment([e],t,n?null:[]),v.merge([],(r.cacheable?v.clone(r.fragment):r.fragment).childNodes)))},parseJSON:function(t){if(!t||typeof t!="string")return null;t=v.trim(t);if(e.JSON&&e.JSON.parse)return e.JSON.parse(t);if(S.test(t.replace(T,"@").replace(N,"]").replace(x,"")))return(new Function("return "+t))();v.error("Invalid JSON: "+t)},parseXML:function(n){var r,i;if(!n||typeof n!="string")return null;try{e.DOMParser?(i=new DOMParser,r=i.parseFromString(n,"text/xml")):(r=new ActiveXObject("Microsoft.XMLDOM"),r.async="false",r.loadXML(n))}catch(s){r=t}return(!r||!r.documentElement||r.getElementsByTagName("parsererror").length)&&v.error("Invalid XML: "+n),r},noop:function(){},globalEval:function(t){t&&g.test(t)&&(e.execScript||function(t){e.eval.call(e,t)})(t)},camelCase:function(e){return e.replace(C,"ms-").replace(k,L)},nodeName:function(e,t){return e.nodeName&&e.nodeName.toLowerCase()===t.toLowerCase()},each:function(e,n,r){var i,s=0,o=e.length,u=o===t||v.isFunction(e);if(r){if(u){for(i in e)if(n.apply(e[i],r)===!1)break}else for(;s<o;)if(n.apply(e[s++],r)===!1)break}else if(u){for(i in e)if(n.call(e[i],i,e[i])===!1)break}else for(;s<o;)if(n.call(e[s],s,e[s++])===!1)break;return e},trim:d&&!d.call("\ufeff\u00a0")?function(e){return e==null?"":d.call(e)}:function(e){return e==null?"":(e+"").replace(b,"")},makeArray:function(e,t){var n,r=t||[];return e!=null&&(n=v.type(e),e.length==null||n==="string"||n==="function"||n==="regexp"||v.isWindow(e)?f.call(r,e):v.merge(r,e)),r},inArray:function(e,t,n){var r;if(t){if(c)return c.call(t,e,n);r=t.length,n=n?n<0?Math.max(0,r+n):n:0;for(;n<r;n++)if(n in t&&t[n]===e)return n}return-1},merge:function(e,n){var r=n.length,i=e.length,s=0;if(typeof r=="number")for(;s<r;s++)e[i++]=n[s];else while(n[s]!==t)e[i++]=n[s++];return e.length=i,e},grep:function(e,t,n){var r,i=[],s=0,o=e.length;n=!!n;for(;s<o;s++)r=!!t(e[s],s),n!==r&&i.push(e[s]);return i},map:function(e,n,r){var i,s,o=[],u=0,a=e.length,f=e instanceof v||a!==t&&typeof a=="number"&&(a>0&&e[0]&&e[a-1]||a===0||v.isArray(e));if(f)for(;u<a;u++)i=n(e[u],u,r),i!=null&&(o[o.length]=i);else for(s in e)i=n(e[s],s,r),i!=null&&(o[o.length]=i);return o.concat.apply([],o)},guid:1,proxy:function(e,n){var r,i,s;return typeof n=="string"&&(r=e[n],n=e,e=r),v.isFunction(e)?(i=l.call(arguments,2),s=function(){return e.apply(n,i.concat(l.call(arguments)))},s.guid=e.guid=e.guid||v.guid++,s):t},access:function(e,n,r,i,s,o,u){var a,f=r==null,l=0,c=e.length;if(r&&typeof r=="object"){for(l in r)v.access(e,n,l,r[l],1,o,i);s=1}else if(i!==t){a=u===t&&v.isFunction(i),f&&(a?(a=n,n=function(e,t,n){return a.call(v(e),n)}):(n.call(e,i),n=null));if(n)for(;l<c;l++)n(e[l],r,a?i.call(e[l],l,n(e[l],r)):i,u);s=1}return s?e:f?n.call(e):c?n(e[0],r):o},now:function(){return(new Date).getTime()}}),v.ready.promise=function(t){if(!r){r=v.Deferred();if(i.readyState==="complete")setTimeout(v.ready,1);else if(i.addEventListener)i.addEventListener("DOMContentLoaded",A,!1),e.addEventListener("load",v.ready,!1);else{i.attachEvent("onreadystatechange",A),e.attachEvent("onload",v.ready);var n=!1;try{n=e.frameElement==null&&i.documentElement}catch(s){}n&&n.doScroll&&function o(){if(!v.isReady){try{n.doScroll("left")}catch(e){return setTimeout(o,50)}v.ready()}}()}}return r.promise(t)},v.each("Boolean Number String Function Array Date RegExp Object".split(" "),function(e,t){O["[object "+t+"]"]=t.toLowerCase()}),n=v(i);var M={};v.Callbacks=function(e){e=typeof e=="string"?M[e]||_(e):v.extend({},e);var n,r,i,s,o,u,a=[],f=!e.once&&[],l=function(t){n=e.memory&&t,r=!0,u=s||0,s=0,o=a.length,i=!0;for(;a&&u<o;u++)if(a[u].apply(t[0],t[1])===!1&&e.stopOnFalse){n=!1;break}i=!1,a&&(f?f.length&&l(f.shift()):n?a=[]:c.disable())},c={add:function(){if(a){var t=a.length;(function r(t){v.each(t,function(t,n){var i=v.type(n);i==="function"?(!e.unique||!c.has(n))&&a.push(n):n&&n.length&&i!=="string"&&r(n)})})(arguments),i?o=a.length:n&&(s=t,l(n))}return this},remove:function(){return a&&v.each(arguments,function(e,t){var n;while((n=v.inArray(t,a,n))>-1)a.splice(n,1),i&&(n<=o&&o--,n<=u&&u--)}),this},has:function(e){return v.inArray(e,a)>-1},empty:function(){return a=[],this},disable:function(){return a=f=n=t,this},disabled:function(){return!a},lock:function(){return f=t,n||c.disable(),this},locked:function(){return!f},fireWith:function(e,t){return t=t||[],t=[e,t.slice?t.slice():t],a&&(!r||f)&&(i?f.push(t):l(t)),this},fire:function(){return c.fireWith(this,arguments),this},fired:function(){return!!r}};return c},v.extend({Deferred:function(e){var t=[["resolve","done",v.Callbacks("once memory"),"resolved"],["reject","fail",v.Callbacks("once memory"),"rejected"],["notify","progress",v.Callbacks("memory")]],n="pending",r={state:function(){return n},always:function(){return i.done(arguments).fail(arguments),this},then:function(){var e=arguments;return v.Deferred(function(n){v.each(t,function(t,r){var s=r[0],o=e[t];i[r[1]](v.isFunction(o)?function(){var e=o.apply(this,arguments);e&&v.isFunction(e.promise)?e.promise().done(n.resolve).fail(n.reject).progress(n.notify):n[s+"With"](this===i?n:this,[e])}:n[s])}),e=null}).promise()},promise:function(e){return e!=null?v.extend(e,r):r}},i={};return r.pipe=r.then,v.each(t,function(e,s){var o=s[2],u=s[3];r[s[1]]=o.add,u&&o.add(function(){n=u},t[e^1][2].disable,t[2][2].lock),i[s[0]]=o.fire,i[s[0]+"With"]=o.fireWith}),r.promise(i),e&&e.call(i,i),i},when:function(e){var t=0,n=l.call(arguments),r=n.length,i=r!==1||e&&v.isFunction(e.promise)?r:0,s=i===1?e:v.Deferred(),o=function(e,t,n){return function(r){t[e]=this,n[e]=arguments.length>1?l.call(arguments):r,n===u?s.notifyWith(t,n):--i||s.resolveWith(t,n)}},u,a,f;if(r>1){u=new Array(r),a=new Array(r),f=new Array(r);for(;t<r;t++)n[t]&&v.isFunction(n[t].promise)?n[t].promise().done(o(t,f,n)).fail(s.reject).progress(o(t,a,u)):--i}return i||s.resolveWith(f,n),s.promise()}}),v.support=function(){var t,n,r,s,o,u,a,f,l,c,h,p=i.createElement("div");p.setAttribute("className","t"),p.innerHTML=" <link/><table></table><a href='/a'>a</a><input type='checkbox'/>",n=p.getElementsByTagName("*"),r=p.getElementsByTagName("a")[0];if(!n||!r||!n.length)return{};s=i.createElement("select"),o=s.appendChild(i.createElement("option")),u=p.getElementsByTagName("input")[0],r.style.cssText="top:1px;float:left;opacity:.5",t={leadingWhitespace:p.firstChild.nodeType===3,tbody:!p.getElementsByTagName("tbody").length,htmlSerialize:!!p.getElementsByTagName("link").length,style:/top/.test(r.getAttribute("style")),hrefNormalized:r.getAttribute("href")==="/a",opacity:/^0.5/.test(r.style.opacity),cssFloat:!!r.style.cssFloat,checkOn:u.value==="on",optSelected:o.selected,getSetAttribute:p.className!=="t",enctype:!!i.createElement("form").enctype,html5Clone:i.createElement("nav").cloneNode(!0).outerHTML!=="<:nav></:nav>",boxModel:i.compatMode==="CSS1Compat",submitBubbles:!0,changeBubbles:!0,focusinBubbles:!1,deleteExpando:!0,noCloneEvent:!0,inlineBlockNeedsLayout:!1,shrinkWrapBlocks:!1,reliableMarginRight:!0,boxSizingReliable:!0,pixelPosition:!1},u.checked=!0,t.noCloneChecked=u.cloneNode(!0).checked,s.disabled=!0,t.optDisabled=!o.disabled;try{delete p.test}catch(d){t.deleteExpando=!1}!p.addEventListener&&p.attachEvent&&p.fireEvent&&(p.attachEvent("onclick",h=function(){t.noCloneEvent=!1}),p.cloneNode(!0).fireEvent("onclick"),p.detachEvent("onclick",h)),u=i.createElement("input"),u.value="t",u.setAttribute("type","radio"),t.radioValue=u.value==="t",u.setAttribute("checked","checked"),u.setAttribute("name","t"),p.appendChild(u),a=i.createDocumentFragment(),a.appendChild(p.lastChild),t.checkClone=a.cloneNode(!0).cloneNode(!0).lastChild.checked,t.appendChecked=u.checked,a.removeChild(u),a.appendChild(p);if(p.attachEvent)for(l in{submit:!0,change:!0,focusin:!0})f="on"+l,c=f in p,c||(p.setAttribute(f,"return;"),c=typeof p[f]=="function"),t[l+"Bubbles"]=c;return v(function(){var n,r,s,o,u="padding:0;margin:0;border:0;display:block;overflow:hidden;",a=i.getElementsByTagName("body")[0];if(!a)return;n=i.createElement("div"),n.style.cssText="visibility:hidden;border:0;width:0;height:0;position:static;top:0;margin-top:1px",a.insertBefore(n,a.firstChild),r=i.createElement("div"),n.appendChild(r),r.innerHTML="<table><tr><td></td><td>t</td></tr></table>",s=r.getElementsByTagName("td"),s[0].style.cssText="padding:0;margin:0;border:0;display:none",c=s[0].offsetHeight===0,s[0].style.display="",s[1].style.display="none",t.reliableHiddenOffsets=c&&s[0].offsetHeight===0,r.innerHTML="",r.style.cssText="box-sizing:border-box;-moz-box-sizing:border-box;-webkit-box-sizing:border-box;padding:1px;border:1px;display:block;width:4px;margin-top:1%;position:absolute;top:1%;",t.boxSizing=r.offsetWidth===4,t.doesNotIncludeMarginInBodyOffset=a.offsetTop!==1,e.getComputedStyle&&(t.pixelPosition=(e.getComputedStyle(r,null)||{}).top!=="1%",t.boxSizingReliable=(e.getComputedStyle(r,null)||{width:"4px"}).width==="4px",o=i.createElement("div"),o.style.cssText=r.style.cssText=u,o.style.marginRight=o.style.width="0",r.style.width="1px",r.appendChild(o),t.reliableMarginRight=!parseFloat((e.getComputedStyle(o,null)||{}).marginRight)),typeof r.style.zoom!="undefined"&&(r.innerHTML="",r.style.cssText=u+"width:1px;padding:1px;display:inline;zoom:1",t.inlineBlockNeedsLayout=r.offsetWidth===3,r.style.display="block",r.style.overflow="visible",r.innerHTML="<div></div>",r.firstChild.style.width="5px",t.shrinkWrapBlocks=r.offsetWidth!==3,n.style.zoom=1),a.removeChild(n),n=r=s=o=null}),a.removeChild(p),n=r=s=o=u=a=p=null,t}();var D=/(?:\{[\s\S]*\}|\[[\s\S]*\])$/,P=/([A-Z])/g;v.extend({cache:{},deletedIds:[],uuid:0,expando:"jQuery"+(v.fn.jquery+Math.random()).replace(/\D/g,""),noData:{embed:!0,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:!0},hasData:function(e){return e=e.nodeType?v.cache[e[v.expando]]:e[v.expando],!!e&&!B(e)},data:function(e,n,r,i){if(!v.acceptData(e))return;var s,o,u=v.expando,a=typeof n=="string",f=e.nodeType,l=f?v.cache:e,c=f?e[u]:e[u]&&u;if((!c||!l[c]||!i&&!l[c].data)&&a&&r===t)return;c||(f?e[u]=c=v.deletedIds.pop()||v.guid++:c=u),l[c]||(l[c]={},f||(l[c].toJSON=v.noop));if(typeof n=="object"||typeof n=="function")i?l[c]=v.extend(l[c],n):l[c].data=v.extend(l[c].data,n);return s=l[c],i||(s.data||(s.data={}),s=s.data),r!==t&&(s[v.camelCase(n)]=r),a?(o=s[n],o==null&&(o=s[v.camelCase(n)])):o=s,o},removeData:function(e,t,n){if(!v.acceptData(e))return;var r,i,s,o=e.nodeType,u=o?v.cache:e,a=o?e[v.expando]:v.expando;if(!u[a])return;if(t){r=n?u[a]:u[a].data;if(r){v.isArray(t)||(t in r?t=[t]:(t=v.camelCase(t),t in r?t=[t]:t=t.split(" ")));for(i=0,s=t.length;i<s;i++)delete r[t[i]];if(!(n?B:v.isEmptyObject)(r))return}}if(!n){delete u[a].data;if(!B(u[a]))return}o?v.cleanData([e],!0):v.support.deleteExpando||u!=u.window?delete u[a]:u[a]=null},_data:function(e,t,n){return v.data(e,t,n,!0)},acceptData:function(e){var t=e.nodeName&&v.noData[e.nodeName.toLowerCase()];return!t||t!==!0&&e.getAttribute("classid")===t}}),v.fn.extend({data:function(e,n){var r,i,s,o,u,a=this[0],f=0,l=null;if(e===t){if(this.length){l=v.data(a);if(a.nodeType===1&&!v._data(a,"parsedAttrs")){s=a.attributes;for(u=s.length;f<u;f++)o=s[f].name,o.indexOf("data-")||(o=v.camelCase(o.substring(5)),H(a,o,l[o]));v._data(a,"parsedAttrs",!0)}}return l}return typeof e=="object"?this.each(function(){v.data(this,e)}):(r=e.split(".",2),r[1]=r[1]?"."+r[1]:"",i=r[1]+"!",v.access(this,function(n){if(n===t)return l=this.triggerHandler("getData"+i,[r[0]]),l===t&&a&&(l=v.data(a,e),l=H(a,e,l)),l===t&&r[1]?this.data(r[0]):l;r[1]=n,this.each(function(){var t=v(this);t.triggerHandler("setData"+i,r),v.data(this,e,n),t.triggerHandler("changeData"+i,r)})},null,n,arguments.length>1,null,!1))},removeData:function(e){return this.each(function(){v.removeData(this,e)})}}),v.extend({queue:function(e,t,n){var r;if(e)return t=(t||"fx")+"queue",r=v._data(e,t),n&&(!r||v.isArray(n)?r=v._data(e,t,v.makeArray(n)):r.push(n)),r||[]},dequeue:function(e,t){t=t||"fx";var n=v.queue(e,t),r=n.length,i=n.shift(),s=v._queueHooks(e,t),o=function(){v.dequeue(e,t)};i==="inprogress"&&(i=n.shift(),r--),i&&(t==="fx"&&n.unshift("inprogress"),delete s.stop,i.call(e,o,s)),!r&&s&&s.empty.fire()},_queueHooks:function(e,t){var n=t+"queueHooks";return v._data(e,n)||v._data(e,n,{empty:v.Callbacks("once memory").add(function(){v.removeData(e,t+"queue",!0),v.removeData(e,n,!0)})})}}),v.fn.extend({queue:function(e,n){var r=2;return typeof e!="string"&&(n=e,e="fx",r--),arguments.length<r?v.queue(this[0],e):n===t?this:this.each(function(){var t=v.queue(this,e,n);v._queueHooks(this,e),e==="fx"&&t[0]!=="inprogress"&&v.dequeue(this,e)})},dequeue:function(e){return this.each(function(){v.dequeue(this,e)})},delay:function(e,t){return e=v.fx?v.fx.speeds[e]||e:e,t=t||"fx",this.queue(t,function(t,n){var r=setTimeout(t,e);n.stop=function(){clearTimeout(r)}})},clearQueue:function(e){return this.queue(e||"fx",[])},promise:function(e,n){var r,i=1,s=v.Deferred(),o=this,u=this.length,a=function(){--i||s.resolveWith(o,[o])};typeof e!="string"&&(n=e,e=t),e=e||"fx";while(u--)r=v._data(o[u],e+"queueHooks"),r&&r.empty&&(i++,r.empty.add(a));return a(),s.promise(n)}});var j,F,I,q=/[\t\r\n]/g,R=/\r/g,U=/^(?:button|input)$/i,z=/^(?:button|input|object|select|textarea)$/i,W=/^a(?:rea|)$/i,X=/^(?:autofocus|autoplay|async|checked|controls|defer|disabled|hidden|loop|multiple|open|readonly|required|scoped|selected)$/i,V=v.support.getSetAttribute;v.fn.extend({attr:function(e,t){return v.access(this,v.attr,e,t,arguments.length>1)},removeAttr:function(e){return this.each(function(){v.removeAttr(this,e)})},prop:function(e,t){return v.access(this,v.prop,e,t,arguments.length>1)},removeProp:function(e){return e=v.propFix[e]||e,this.each(function(){try{this[e]=t,delete this[e]}catch(n){}})},addClass:function(e){var t,n,r,i,s,o,u;if(v.isFunction(e))return this.each(function(t){v(this).addClass(e.call(this,t,this.className))});if(e&&typeof e=="string"){t=e.split(y);for(n=0,r=this.length;n<r;n++){i=this[n];if(i.nodeType===1)if(!i.className&&t.length===1)i.className=e;else{s=" "+i.className+" ";for(o=0,u=t.length;o<u;o++)s.indexOf(" "+t[o]+" ")<0&&(s+=t[o]+" ");i.className=v.trim(s)}}}return this},removeClass:function(e){var n,r,i,s,o,u,a;if(v.isFunction(e))return this.each(function(t){v(this).removeClass(e.call(this,t,this.className))});if(e&&typeof e=="string"||e===t){n=(e||"").split(y);for(u=0,a=this.length;u<a;u++){i=this[u];if(i.nodeType===1&&i.className){r=(" "+i.className+" ").replace(q," ");for(s=0,o=n.length;s<o;s++)while(r.indexOf(" "+n[s]+" ")>=0)r=r.replace(" "+n[s]+" "," ");i.className=e?v.trim(r):""}}}return this},toggleClass:function(e,t){var n=typeof e,r=typeof t=="boolean";return v.isFunction(e)?this.each(function(n){v(this).toggleClass(e.call(this,n,this.className,t),t)}):this.each(function(){if(n==="string"){var i,s=0,o=v(this),u=t,a=e.split(y);while(i=a[s++])u=r?u:!o.hasClass(i),o[u?"addClass":"removeClass"](i)}else if(n==="undefined"||n==="boolean")this.className&&v._data(this,"__className__",this.className),this.className=this.className||e===!1?"":v._data(this,"__className__")||""})},hasClass:function(e){var t=" "+e+" ",n=0,r=this.length;for(;n<r;n++)if(this[n].nodeType===1&&(" "+this[n].className+" ").replace(q," ").indexOf(t)>=0)return!0;return!1},val:function(e){var n,r,i,s=this[0];if(!arguments.length){if(s)return n=v.valHooks[s.type]||v.valHooks[s.nodeName.toLowerCase()],n&&"get"in n&&(r=n.get(s,"value"))!==t?r:(r=s.value,typeof r=="string"?r.replace(R,""):r==null?"":r);return}return i=v.isFunction(e),this.each(function(r){var s,o=v(this);if(this.nodeType!==1)return;i?s=e.call(this,r,o.val()):s=e,s==null?s="":typeof s=="number"?s+="":v.isArray(s)&&(s=v.map(s,function(e){return e==null?"":e+""})),n=v.valHooks[this.type]||v.valHooks[this.nodeName.toLowerCase()];if(!n||!("set"in n)||n.set(this,s,"value")===t)this.value=s})}}),v.extend({valHooks:{option:{get:function(e){var t=e.attributes.value;return!t||t.specified?e.value:e.text}},select:{get:function(e){var t,n,r=e.options,i=e.selectedIndex,s=e.type==="select-one"||i<0,o=s?null:[],u=s?i+1:r.length,a=i<0?u:s?i:0;for(;a<u;a++){n=r[a];if((n.selected||a===i)&&(v.support.optDisabled?!n.disabled:n.getAttribute("disabled")===null)&&(!n.parentNode.disabled||!v.nodeName(n.parentNode,"optgroup"))){t=v(n).val();if(s)return t;o.push(t)}}return o},set:function(e,t){var n=v.makeArray(t);return v(e).find("option").each(function(){this.selected=v.inArray(v(this).val(),n)>=0}),n.length||(e.selectedIndex=-1),n}}},attrFn:{},attr:function(e,n,r,i){var s,o,u,a=e.nodeType;if(!e||a===3||a===8||a===2)return;if(i&&v.isFunction(v.fn[n]))return v(e)[n](r);if(typeof e.getAttribute=="undefined")return v.prop(e,n,r);u=a!==1||!v.isXMLDoc(e),u&&(n=n.toLowerCase(),o=v.attrHooks[n]||(X.test(n)?F:j));if(r!==t){if(r===null){v.removeAttr(e,n);return}return o&&"set"in o&&u&&(s=o.set(e,r,n))!==t?s:(e.setAttribute(n,r+""),r)}return o&&"get"in o&&u&&(s=o.get(e,n))!==null?s:(s=e.getAttribute(n),s===null?t:s)},removeAttr:function(e,t){var n,r,i,s,o=0;if(t&&e.nodeType===1){r=t.split(y);for(;o<r.length;o++)i=r[o],i&&(n=v.propFix[i]||i,s=X.test(i),s||v.attr(e,i,""),e.removeAttribute(V?i:n),s&&n in e&&(e[n]=!1))}},attrHooks:{type:{set:function(e,t){if(U.test(e.nodeName)&&e.parentNode)v.error("type property can't be changed");else if(!v.support.radioValue&&t==="radio"&&v.nodeName(e,"input")){var n=e.value;return e.setAttribute("type",t),n&&(e.value=n),t}}},value:{get:function(e,t){return j&&v.nodeName(e,"button")?j.get(e,t):t in e?e.value:null},set:function(e,t,n){if(j&&v.nodeName(e,"button"))return j.set(e,t,n);e.value=t}}},propFix:{tabindex:"tabIndex",readonly:"readOnly","for":"htmlFor","class":"className",maxlength:"maxLength",cellspacing:"cellSpacing",cellpadding:"cellPadding",rowspan:"rowSpan",colspan:"colSpan",usemap:"useMap",frameborder:"frameBorder",contenteditable:"contentEditable"},prop:function(e,n,r){var i,s,o,u=e.nodeType;if(!e||u===3||u===8||u===2)return;return o=u!==1||!v.isXMLDoc(e),o&&(n=v.propFix[n]||n,s=v.propHooks[n]),r!==t?s&&"set"in s&&(i=s.set(e,r,n))!==t?i:e[n]=r:s&&"get"in s&&(i=s.get(e,n))!==null?i:e[n]},propHooks:{tabIndex:{get:function(e){var n=e.getAttributeNode("tabindex");return n&&n.specified?parseInt(n.value,10):z.test(e.nodeName)||W.test(e.nodeName)&&e.href?0:t}}}}),F={get:function(e,n){var r,i=v.prop(e,n);return i===!0||typeof i!="boolean"&&(r=e.getAttributeNode(n))&&r.nodeValue!==!1?n.toLowerCase():t},set:function(e,t,n){var r;return t===!1?v.removeAttr(e,n):(r=v.propFix[n]||n,r in e&&(e[r]=!0),e.setAttribute(n,n.toLowerCase())),n}},V||(I={name:!0,id:!0,coords:!0},j=v.valHooks.button={get:function(e,n){var r;return r=e.getAttributeNode(n),r&&(I[n]?r.value!=="":r.specified)?r.value:t},set:function(e,t,n){var r=e.getAttributeNode(n);return r||(r=i.createAttribute(n),e.setAttributeNode(r)),r.value=t+""}},v.each(["width","height"],function(e,t){v.attrHooks[t]=v.extend(v.attrHooks[t],{set:function(e,n){if(n==="")return e.setAttribute(t,"auto"),n}})}),v.attrHooks.contenteditable={get:j.get,set:function(e,t,n){t===""&&(t="false"),j.set(e,t,n)}}),v.support.hrefNormalized||v.each(["href","src","width","height"],function(e,n){v.attrHooks[n]=v.extend(v.attrHooks[n],{get:function(e){var r=e.getAttribute(n,2);return r===null?t:r}})}),v.support.style||(v.attrHooks.style={get:function(e){return e.style.cssText.toLowerCase()||t},set:function(e,t){return e.style.cssText=t+""}}),v.support.optSelected||(v.propHooks.selected=v.extend(v.propHooks.selected,{get:function(e){var t=e.parentNode;return t&&(t.selectedIndex,t.parentNode&&t.parentNode.selectedIndex),null}})),v.support.enctype||(v.propFix.enctype="encoding"),v.support.checkOn||v.each(["radio","checkbox"],function(){v.valHooks[this]={get:function(e){return e.getAttribute("value")===null?"on":e.value}}}),v.each(["radio","checkbox"],function(){v.valHooks[this]=v.extend(v.valHooks[this],{set:function(e,t){if(v.isArray(t))return e.checked=v.inArray(v(e).val(),t)>=0}})});var $=/^(?:textarea|input|select)$/i,J=/^([^\.]*|)(?:\.(.+)|)$/,K=/(?:^|\s)hover(\.\S+|)\b/,Q=/^key/,G=/^(?:mouse|contextmenu)|click/,Y=/^(?:focusinfocus|focusoutblur)$/,Z=function(e){return v.event.special.hover?e:e.replace(K,"mouseenter$1 mouseleave$1")};v.event={add:function(e,n,r,i,s){var o,u,a,f,l,c,h,p,d,m,g;if(e.nodeType===3||e.nodeType===8||!n||!r||!(o=v._data(e)))return;r.handler&&(d=r,r=d.handler,s=d.selector),r.guid||(r.guid=v.guid++),a=o.events,a||(o.events=a={}),u=o.handle,u||(o.handle=u=function(e){return typeof v=="undefined"||!!e&&v.event.triggered===e.type?t:v.event.dispatch.apply(u.elem,arguments)},u.elem=e),n=v.trim(Z(n)).split(" ");for(f=0;f<n.length;f++){l=J.exec(n[f])||[],c=l[1],h=(l[2]||"").split(".").sort(),g=v.event.special[c]||{},c=(s?g.delegateType:g.bindType)||c,g=v.event.special[c]||{},p=v.extend({type:c,origType:l[1],data:i,handler:r,guid:r.guid,selector:s,needsContext:s&&v.expr.match.needsContext.test(s),namespace:h.join(".")},d),m=a[c];if(!m){m=a[c]=[],m.delegateCount=0;if(!g.setup||g.setup.call(e,i,h,u)===!1)e.addEventListener?e.addEventListener(c,u,!1):e.attachEvent&&e.attachEvent("on"+c,u)}g.add&&(g.add.call(e,p),p.handler.guid||(p.handler.guid=r.guid)),s?m.splice(m.delegateCount++,0,p):m.push(p),v.event.global[c]=!0}e=null},global:{},remove:function(e,t,n,r,i){var s,o,u,a,f,l,c,h,p,d,m,g=v.hasData(e)&&v._data(e);if(!g||!(h=g.events))return;t=v.trim(Z(t||"")).split(" ");for(s=0;s<t.length;s++){o=J.exec(t[s])||[],u=a=o[1],f=o[2];if(!u){for(u in h)v.event.remove(e,u+t[s],n,r,!0);continue}p=v.event.special[u]||{},u=(r?p.delegateType:p.bindType)||u,d=h[u]||[],l=d.length,f=f?new RegExp("(^|\\.)"+f.split(".").sort().join("\\.(?:.*\\.|)")+"(\\.|$)"):null;for(c=0;c<d.length;c++)m=d[c],(i||a===m.origType)&&(!n||n.guid===m.guid)&&(!f||f.test(m.namespace))&&(!r||r===m.selector||r==="**"&&m.selector)&&(d.splice(c--,1),m.selector&&d.delegateCount--,p.remove&&p.remove.call(e,m));d.length===0&&l!==d.length&&((!p.teardown||p.teardown.call(e,f,g.handle)===!1)&&v.removeEvent(e,u,g.handle),delete h[u])}v.isEmptyObject(h)&&(delete g.handle,v.removeData(e,"events",!0))},customEvent:{getData:!0,setData:!0,changeData:!0},trigger:function(n,r,s,o){if(!s||s.nodeType!==3&&s.nodeType!==8){var u,a,f,l,c,h,p,d,m,g,y=n.type||n,b=[];if(Y.test(y+v.event.triggered))return;y.indexOf("!")>=0&&(y=y.slice(0,-1),a=!0),y.indexOf(".")>=0&&(b=y.split("."),y=b.shift(),b.sort());if((!s||v.event.customEvent[y])&&!v.event.global[y])return;n=typeof n=="object"?n[v.expando]?n:new v.Event(y,n):new v.Event(y),n.type=y,n.isTrigger=!0,n.exclusive=a,n.namespace=b.join("."),n.namespace_re=n.namespace?new RegExp("(^|\\.)"+b.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,h=y.indexOf(":")<0?"on"+y:"";if(!s){u=v.cache;for(f in u)u[f].events&&u[f].events[y]&&v.event.trigger(n,r,u[f].handle.elem,!0);return}n.result=t,n.target||(n.target=s),r=r!=null?v.makeArray(r):[],r.unshift(n),p=v.event.special[y]||{};if(p.trigger&&p.trigger.apply(s,r)===!1)return;m=[[s,p.bindType||y]];if(!o&&!p.noBubble&&!v.isWindow(s)){g=p.delegateType||y,l=Y.test(g+y)?s:s.parentNode;for(c=s;l;l=l.parentNode)m.push([l,g]),c=l;c===(s.ownerDocument||i)&&m.push([c.defaultView||c.parentWindow||e,g])}for(f=0;f<m.length&&!n.isPropagationStopped();f++)l=m[f][0],n.type=m[f][1],d=(v._data(l,"events")||{})[n.type]&&v._data(l,"handle"),d&&d.apply(l,r),d=h&&l[h],d&&v.acceptData(l)&&d.apply&&d.apply(l,r)===!1&&n.preventDefault();return n.type=y,!o&&!n.isDefaultPrevented()&&(!p._default||p._default.apply(s.ownerDocument,r)===!1)&&(y!=="click"||!v.nodeName(s,"a"))&&v.acceptData(s)&&h&&s[y]&&(y!=="focus"&&y!=="blur"||n.target.offsetWidth!==0)&&!v.isWindow(s)&&(c=s[h],c&&(s[h]=null),v.event.triggered=y,s[y](),v.event.triggered=t,c&&(s[h]=c)),n.result}return},dispatch:function(n){n=v.event.fix(n||e.event);var r,i,s,o,u,a,f,c,h,p,d=(v._data(this,"events")||{})[n.type]||[],m=d.delegateCount,g=l.call(arguments),y=!n.exclusive&&!n.namespace,b=v.event.special[n.type]||{},w=[];g[0]=n,n.delegateTarget=this;if(b.preDispatch&&b.preDispatch.call(this,n)===!1)return;if(m&&(!n.button||n.type!=="click"))for(s=n.target;s!=this;s=s.parentNode||this)if(s.disabled!==!0||n.type!=="click"){u={},f=[];for(r=0;r<m;r++)c=d[r],h=c.selector,u[h]===t&&(u[h]=c.needsContext?v(h,this).index(s)>=0:v.find(h,this,null,[s]).length),u[h]&&f.push(c);f.length&&w.push({elem:s,matches:f})}d.length>m&&w.push({elem:this,matches:d.slice(m)});for(r=0;r<w.length&&!n.isPropagationStopped();r++){a=w[r],n.currentTarget=a.elem;for(i=0;i<a.matches.length&&!n.isImmediatePropagationStopped();i++){c=a.matches[i];if(y||!n.namespace&&!c.namespace||n.namespace_re&&n.namespace_re.test(c.namespace))n.data=c.data,n.handleObj=c,o=((v.event.special[c.origType]||{}).handle||c.handler).apply(a.elem,g),o!==t&&(n.result=o,o===!1&&(n.preventDefault(),n.stopPropagation()))}}return b.postDispatch&&b.postDispatch.call(this,n),n.result},props:"attrChange attrName relatedNode srcElement altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "),fixHooks:{},keyHooks:{props:"char charCode key keyCode".split(" "),filter:function(e,t){return e.which==null&&(e.which=t.charCode!=null?t.charCode:t.keyCode),e}},mouseHooks:{props:"button buttons clientX clientY fromElement offsetX offsetY pageX pageY screenX screenY toElement".split(" "),filter:function(e,n){var r,s,o,u=n.button,a=n.fromElement;return e.pageX==null&&n.clientX!=null&&(r=e.target.ownerDocument||i,s=r.documentElement,o=r.body,e.pageX=n.clientX+(s&&s.scrollLeft||o&&o.scrollLeft||0)-(s&&s.clientLeft||o&&o.clientLeft||0),e.pageY=n.clientY+(s&&s.scrollTop||o&&o.scrollTop||0)-(s&&s.clientTop||o&&o.clientTop||0)),!e.relatedTarget&&a&&(e.relatedTarget=a===e.target?n.toElement:a),!e.which&&u!==t&&(e.which=u&1?1:u&2?3:u&4?2:0),e}},fix:function(e){if(e[v.expando])return e;var t,n,r=e,s=v.event.fixHooks[e.type]||{},o=s.props?this.props.concat(s.props):this.props;e=v.Event(r);for(t=o.length;t;)n=o[--t],e[n]=r[n];return e.target||(e.target=r.srcElement||i),e.target.nodeType===3&&(e.target=e.target.parentNode),e.metaKey=!!e.metaKey,s.filter?s.filter(e,r):e},special:{load:{noBubble:!0},focus:{delegateType:"focusin"},blur:{delegateType:"focusout"},beforeunload:{setup:function(e,t,n){v.isWindow(this)&&(this.onbeforeunload=n)},teardown:function(e,t){this.onbeforeunload===t&&(this.onbeforeunload=null)}}},simulate:function(e,t,n,r){var i=v.extend(new v.Event,n,{type:e,isSimulated:!0,originalEvent:{}});r?v.event.trigger(i,null,t):v.event.dispatch.call(t,i),i.isDefaultPrevented()&&n.preventDefault()}},v.event.handle=v.event.dispatch,v.removeEvent=i.removeEventListener?function(e,t,n){e.removeEventListener&&e.removeEventListener(t,n,!1)}:function(e,t,n){var r="on"+t;e.detachEvent&&(typeof e[r]=="undefined"&&(e[r]=null),e.detachEvent(r,n))},v.Event=function(e,t){if(!(this instanceof v.Event))return new v.Event(e,t);e&&e.type?(this.originalEvent=e,this.type=e.type,this.isDefaultPrevented=e.defaultPrevented||e.returnValue===!1||e.getPreventDefault&&e.getPreventDefault()?tt:et):this.type=e,t&&v.extend(this,t),this.timeStamp=e&&e.timeStamp||v.now(),this[v.expando]=!0},v.Event.prototype={preventDefault:function(){this.isDefaultPrevented=tt;var e=this.originalEvent;if(!e)return;e.preventDefault?e.preventDefault():e.returnValue=!1},stopPropagation:function(){this.isPropagationStopped=tt;var e=this.originalEvent;if(!e)return;e.stopPropagation&&e.stopPropagation(),e.cancelBubble=!0},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=tt,this.stopPropagation()},isDefaultPrevented:et,isPropagationStopped:et,isImmediatePropagationStopped:et},v.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(e,t){v.event.special[e]={delegateType:t,bindType:t,handle:function(e){var n,r=this,i=e.relatedTarget,s=e.handleObj,o=s.selector;if(!i||i!==r&&!v.contains(r,i))e.type=s.origType,n=s.handler.apply(this,arguments),e.type=t;return n}}}),v.support.submitBubbles||(v.event.special.submit={setup:function(){if(v.nodeName(this,"form"))return!1;v.event.add(this,"click._submit keypress._submit",function(e){var n=e.target,r=v.nodeName(n,"input")||v.nodeName(n,"button")?n.form:t;r&&!v._data(r,"_submit_attached")&&(v.event.add(r,"submit._submit",function(e){e._submit_bubble=!0}),v._data(r,"_submit_attached",!0))})},postDispatch:function(e){e._submit_bubble&&(delete e._submit_bubble,this.parentNode&&!e.isTrigger&&v.event.simulate("submit",this.parentNode,e,!0))},teardown:function(){if(v.nodeName(this,"form"))return!1;v.event.remove(this,"._submit")}}),v.support.changeBubbles||(v.event.special.change={setup:function(){if($.test(this.nodeName)){if(this.type==="checkbox"||this.type==="radio")v.event.add(this,"propertychange._change",function(e){e.originalEvent.propertyName==="checked"&&(this._just_changed=!0)}),v.event.add(this,"click._change",function(e){this._just_changed&&!e.isTrigger&&(this._just_changed=!1),v.event.simulate("change",this,e,!0)});return!1}v.event.add(this,"beforeactivate._change",function(e){var t=e.target;$.test(t.nodeName)&&!v._data(t,"_change_attached")&&(v.event.add(t,"change._change",function(e){this.parentNode&&!e.isSimulated&&!e.isTrigger&&v.event.simulate("change",this.parentNode,e,!0)}),v._data(t,"_change_attached",!0))})},handle:function(e){var t=e.target;if(this!==t||e.isSimulated||e.isTrigger||t.type!=="radio"&&t.type!=="checkbox")return e.handleObj.handler.apply(this,arguments)},teardown:function(){return v.event.remove(this,"._change"),!$.test(this.nodeName)}}),v.support.focusinBubbles||v.each({focus:"focusin",blur:"focusout"},function(e,t){var n=0,r=function(e){v.event.simulate(t,e.target,v.event.fix(e),!0)};v.event.special[t]={setup:function(){n++===0&&i.addEventListener(e,r,!0)},teardown:function(){--n===0&&i.removeEventListener(e,r,!0)}}}),v.fn.extend({on:function(e,n,r,i,s){var o,u;if(typeof e=="object"){typeof n!="string"&&(r=r||n,n=t);for(u in e)this.on(u,n,r,e[u],s);return this}r==null&&i==null?(i=n,r=n=t):i==null&&(typeof n=="string"?(i=r,r=t):(i=r,r=n,n=t));if(i===!1)i=et;else if(!i)return this;return s===1&&(o=i,i=function(e){return v().off(e),o.apply(this,arguments)},i.guid=o.guid||(o.guid=v.guid++)),this.each(function(){v.event.add(this,e,i,r,n)})},one:function(e,t,n,r){return this.on(e,t,n,r,1)},off:function(e,n,r){var i,s;if(e&&e.preventDefault&&e.handleObj)return i=e.handleObj,v(e.delegateTarget).off(i.namespace?i.origType+"."+i.namespace:i.origType,i.selector,i.handler),this;if(typeof e=="object"){for(s in e)this.off(s,n,e[s]);return this}if(n===!1||typeof n=="function")r=n,n=t;return r===!1&&(r=et),this.each(function(){v.event.remove(this,e,r,n)})},bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},live:function(e,t,n){return v(this.context).on(e,this.selector,t,n),this},die:function(e,t){return v(this.context).off(e,this.selector||"**",t),this},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return arguments.length===1?this.off(e,"**"):this.off(t,e||"**",n)},trigger:function(e,t){return this.each(function(){v.event.trigger(e,t,this)})},triggerHandler:function(e,t){if(this[0])return v.event.trigger(e,t,this[0],!0)},toggle:function(e){var t=arguments,n=e.guid||v.guid++,r=0,i=function(n){var i=(v._data(this,"lastToggle"+e.guid)||0)%r;return v._data(this,"lastToggle"+e.guid,i+1),n.preventDefault(),t[i].apply(this,arguments)||!1};i.guid=n;while(r<t.length)t[r++].guid=n;return this.click(i)},hover:function(e,t){return this.mouseenter(e).mouseleave(t||e)}}),v.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error contextmenu".split(" "),function(e,t){v.fn[t]=function(e,n){return n==null&&(n=e,e=null),arguments.length>0?this.on(t,null,e,n):this.trigger(t)},Q.test(t)&&(v.event.fixHooks[t]=v.event.keyHooks),G.test(t)&&(v.event.fixHooks[t]=v.event.mouseHooks)}),function(e,t){function nt(e,t,n,r){n=n||[],t=t||g;var i,s,a,f,l=t.nodeType;if(!e||typeof e!="string")return n;if(l!==1&&l!==9)return[];a=o(t);if(!a&&!r)if(i=R.exec(e))if(f=i[1]){if(l===9){s=t.getElementById(f);if(!s||!s.parentNode)return n;if(s.id===f)return n.push(s),n}else if(t.ownerDocument&&(s=t.ownerDocument.getElementById(f))&&u(t,s)&&s.id===f)return n.push(s),n}else{if(i[2])return S.apply(n,x.call(t.getElementsByTagName(e),0)),n;if((f=i[3])&&Z&&t.getElementsByClassName)return S.apply(n,x.call(t.getElementsByClassName(f),0)),n}return vt(e.replace(j,"$1"),t,n,r,a)}function rt(e){return function(t){var n=t.nodeName.toLowerCase();return n==="input"&&t.type===e}}function it(e){return function(t){var n=t.nodeName.toLowerCase();return(n==="input"||n==="button")&&t.type===e}}function st(e){return N(function(t){return t=+t,N(function(n,r){var i,s=e([],n.length,t),o=s.length;while(o--)n[i=s[o]]&&(n[i]=!(r[i]=n[i]))})})}function ot(e,t,n){if(e===t)return n;var r=e.nextSibling;while(r){if(r===t)return-1;r=r.nextSibling}return 1}function ut(e,t){var n,r,s,o,u,a,f,l=L[d][e+" "];if(l)return t?0:l.slice(0);u=e,a=[],f=i.preFilter;while(u){if(!n||(r=F.exec(u)))r&&(u=u.slice(r[0].length)||u),a.push(s=[]);n=!1;if(r=I.exec(u))s.push(n=new m(r.shift())),u=u.slice(n.length),n.type=r[0].replace(j," ");for(o in i.filter)(r=J[o].exec(u))&&(!f[o]||(r=f[o](r)))&&(s.push(n=new m(r.shift())),u=u.slice(n.length),n.type=o,n.matches=r);if(!n)break}return t?u.length:u?nt.error(e):L(e,a).slice(0)}function at(e,t,r){var i=t.dir,s=r&&t.dir==="parentNode",o=w++;return t.first?function(t,n,r){while(t=t[i])if(s||t.nodeType===1)return e(t,n,r)}:function(t,r,u){if(!u){var a,f=b+" "+o+" ",l=f+n;while(t=t[i])if(s||t.nodeType===1){if((a=t[d])===l)return t.sizset;if(typeof a=="string"&&a.indexOf(f)===0){if(t.sizset)return t}else{t[d]=l;if(e(t,r,u))return t.sizset=!0,t;t.sizset=!1}}}else while(t=t[i])if(s||t.nodeType===1)if(e(t,r,u))return t}}function ft(e){return e.length>1?function(t,n,r){var i=e.length;while(i--)if(!e[i](t,n,r))return!1;return!0}:e[0]}function lt(e,t,n,r,i){var s,o=[],u=0,a=e.length,f=t!=null;for(;u<a;u++)if(s=e[u])if(!n||n(s,r,i))o.push(s),f&&t.push(u);return o}function ct(e,t,n,r,i,s){return r&&!r[d]&&(r=ct(r)),i&&!i[d]&&(i=ct(i,s)),N(function(s,o,u,a){var f,l,c,h=[],p=[],d=o.length,v=s||dt(t||"*",u.nodeType?[u]:u,[]),m=e&&(s||!t)?lt(v,h,e,u,a):v,g=n?i||(s?e:d||r)?[]:o:m;n&&n(m,g,u,a);if(r){f=lt(g,p),r(f,[],u,a),l=f.length;while(l--)if(c=f[l])g[p[l]]=!(m[p[l]]=c)}if(s){if(i||e){if(i){f=[],l=g.length;while(l--)(c=g[l])&&f.push(m[l]=c);i(null,g=[],f,a)}l=g.length;while(l--)(c=g[l])&&(f=i?T.call(s,c):h[l])>-1&&(s[f]=!(o[f]=c))}}else g=lt(g===o?g.splice(d,g.length):g),i?i(null,o,g,a):S.apply(o,g)})}function ht(e){var t,n,r,s=e.length,o=i.relative[e[0].type],u=o||i.relative[" "],a=o?1:0,f=at(function(e){return e===t},u,!0),l=at(function(e){return T.call(t,e)>-1},u,!0),h=[function(e,n,r){return!o&&(r||n!==c)||((t=n).nodeType?f(e,n,r):l(e,n,r))}];for(;a<s;a++)if(n=i.relative[e[a].type])h=[at(ft(h),n)];else{n=i.filter[e[a].type].apply(null,e[a].matches);if(n[d]){r=++a;for(;r<s;r++)if(i.relative[e[r].type])break;return ct(a>1&&ft(h),a>1&&e.slice(0,a-1).join("").replace(j,"$1"),n,a<r&&ht(e.slice(a,r)),r<s&&ht(e=e.slice(r)),r<s&&e.join(""))}h.push(n)}return ft(h)}function pt(e,t){var r=t.length>0,s=e.length>0,o=function(u,a,f,l,h){var p,d,v,m=[],y=0,w="0",x=u&&[],T=h!=null,N=c,C=u||s&&i.find.TAG("*",h&&a.parentNode||a),k=b+=N==null?1:Math.E;T&&(c=a!==g&&a,n=o.el);for(;(p=C[w])!=null;w++){if(s&&p){for(d=0;v=e[d];d++)if(v(p,a,f)){l.push(p);break}T&&(b=k,n=++o.el)}r&&((p=!v&&p)&&y--,u&&x.push(p))}y+=w;if(r&&w!==y){for(d=0;v=t[d];d++)v(x,m,a,f);if(u){if(y>0)while(w--)!x[w]&&!m[w]&&(m[w]=E.call(l));m=lt(m)}S.apply(l,m),T&&!u&&m.length>0&&y+t.length>1&&nt.uniqueSort(l)}return T&&(b=k,c=N),x};return o.el=0,r?N(o):o}function dt(e,t,n){var r=0,i=t.length;for(;r<i;r++)nt(e,t[r],n);return n}function vt(e,t,n,r,s){var o,u,f,l,c,h=ut(e),p=h.length;if(!r&&h.length===1){u=h[0]=h[0].slice(0);if(u.length>2&&(f=u[0]).type==="ID"&&t.nodeType===9&&!s&&i.relative[u[1].type]){t=i.find.ID(f.matches[0].replace($,""),t,s)[0];if(!t)return n;e=e.slice(u.shift().length)}for(o=J.POS.test(e)?-1:u.length-1;o>=0;o--){f=u[o];if(i.relative[l=f.type])break;if(c=i.find[l])if(r=c(f.matches[0].replace($,""),z.test(u[0].type)&&t.parentNode||t,s)){u.splice(o,1),e=r.length&&u.join("");if(!e)return S.apply(n,x.call(r,0)),n;break}}}return a(e,h)(r,t,s,n,z.test(e)),n}function mt(){}var n,r,i,s,o,u,a,f,l,c,h=!0,p="undefined",d=("sizcache"+Math.random()).replace(".",""),m=String,g=e.document,y=g.documentElement,b=0,w=0,E=[].pop,S=[].push,x=[].slice,T=[].indexOf||function(e){var t=0,n=this.length;for(;t<n;t++)if(this[t]===e)return t;return-1},N=function(e,t){return e[d]=t==null||t,e},C=function(){var e={},t=[];return N(function(n,r){return t.push(n)>i.cacheLength&&delete e[t.shift()],e[n+" "]=r},e)},k=C(),L=C(),A=C(),O="[\\x20\\t\\r\\n\\f]",M="(?:\\\\.|[-\\w]|[^\\x00-\\xa0])+",_=M.replace("w","w#"),D="([*^$|!~]?=)",P="\\["+O+"*("+M+")"+O+"*(?:"+D+O+"*(?:(['\"])((?:\\\\.|[^\\\\])*?)\\3|("+_+")|)|)"+O+"*\\]",H=":("+M+")(?:\\((?:(['\"])((?:\\\\.|[^\\\\])*?)\\2|([^()[\\]]*|(?:(?:"+P+")|[^:]|\\\\.)*|.*))\\)|)",B=":(even|odd|eq|gt|lt|nth|first|last)(?:\\("+O+"*((?:-\\d)?\\d*)"+O+"*\\)|)(?=[^-]|$)",j=new RegExp("^"+O+"+|((?:^|[^\\\\])(?:\\\\.)*)"+O+"+$","g"),F=new RegExp("^"+O+"*,"+O+"*"),I=new RegExp("^"+O+"*([\\x20\\t\\r\\n\\f>+~])"+O+"*"),q=new RegExp(H),R=/^(?:#([\w\-]+)|(\w+)|\.([\w\-]+))$/,U=/^:not/,z=/[\x20\t\r\n\f]*[+~]/,W=/:not\($/,X=/h\d/i,V=/input|select|textarea|button/i,$=/\\(?!\\)/g,J={ID:new RegExp("^#("+M+")"),CLASS:new RegExp("^\\.("+M+")"),NAME:new RegExp("^\\[name=['\"]?("+M+")['\"]?\\]"),TAG:new RegExp("^("+M.replace("w","w*")+")"),ATTR:new RegExp("^"+P),PSEUDO:new RegExp("^"+H),POS:new RegExp(B,"i"),CHILD:new RegExp("^:(only|nth|first|last)-child(?:\\("+O+"*(even|odd|(([+-]|)(\\d*)n|)"+O+"*(?:([+-]|)"+O+"*(\\d+)|))"+O+"*\\)|)","i"),needsContext:new RegExp("^"+O+"*[>+~]|"+B,"i")},K=function(e){var t=g.createElement("div");try{return e(t)}catch(n){return!1}finally{t=null}},Q=K(function(e){return e.appendChild(g.createComment("")),!e.getElementsByTagName("*").length}),G=K(function(e){return e.innerHTML="<a href='#'></a>",e.firstChild&&typeof e.firstChild.getAttribute!==p&&e.firstChild.getAttribute("href")==="#"}),Y=K(function(e){e.innerHTML="<select></select>";var t=typeof e.lastChild.getAttribute("multiple");return t!=="boolean"&&t!=="string"}),Z=K(function(e){return e.innerHTML="<div class='hidden e'></div><div class='hidden'></div>",!e.getElementsByClassName||!e.getElementsByClassName("e").length?!1:(e.lastChild.className="e",e.getElementsByClassName("e").length===2)}),et=K(function(e){e.id=d+0,e.innerHTML="<a name='"+d+"'></a><div name='"+d+"'></div>",y.insertBefore(e,y.firstChild);var t=g.getElementsByName&&g.getElementsByName(d).length===2+g.getElementsByName(d+0).length;return r=!g.getElementById(d),y.removeChild(e),t});try{x.call(y.childNodes,0)[0].nodeType}catch(tt){x=function(e){var t,n=[];for(;t=this[e];e++)n.push(t);return n}}nt.matches=function(e,t){return nt(e,null,null,t)},nt.matchesSelector=function(e,t){return nt(t,null,null,[e]).length>0},s=nt.getText=function(e){var t,n="",r=0,i=e.nodeType;if(i){if(i===1||i===9||i===11){if(typeof e.textContent=="string")return e.textContent;for(e=e.firstChild;e;e=e.nextSibling)n+=s(e)}else if(i===3||i===4)return e.nodeValue}else for(;t=e[r];r++)n+=s(t);return n},o=nt.isXML=function(e){var t=e&&(e.ownerDocument||e).documentElement;return t?t.nodeName!=="HTML":!1},u=nt.contains=y.contains?function(e,t){var n=e.nodeType===9?e.documentElement:e,r=t&&t.parentNode;return e===r||!!(r&&r.nodeType===1&&n.contains&&n.contains(r))}:y.compareDocumentPosition?function(e,t){return t&&!!(e.compareDocumentPosition(t)&16)}:function(e,t){while(t=t.parentNode)if(t===e)return!0;return!1},nt.attr=function(e,t){var n,r=o(e);return r||(t=t.toLowerCase()),(n=i.attrHandle[t])?n(e):r||Y?e.getAttribute(t):(n=e.getAttributeNode(t),n?typeof e[t]=="boolean"?e[t]?t:null:n.specified?n.value:null:null)},i=nt.selectors={cacheLength:50,createPseudo:N,match:J,attrHandle:G?{}:{href:function(e){return e.getAttribute("href",2)},type:function(e){return e.getAttribute("type")}},find:{ID:r?function(e,t,n){if(typeof t.getElementById!==p&&!n){var r=t.getElementById(e);return r&&r.parentNode?[r]:[]}}:function(e,n,r){if(typeof n.getElementById!==p&&!r){var i=n.getElementById(e);return i?i.id===e||typeof i.getAttributeNode!==p&&i.getAttributeNode("id").value===e?[i]:t:[]}},TAG:Q?function(e,t){if(typeof t.getElementsByTagName!==p)return t.getElementsByTagName(e)}:function(e,t){var n=t.getElementsByTagName(e);if(e==="*"){var r,i=[],s=0;for(;r=n[s];s++)r.nodeType===1&&i.push(r);return i}return n},NAME:et&&function(e,t){if(typeof t.getElementsByName!==p)return t.getElementsByName(name)},CLASS:Z&&function(e,t,n){if(typeof t.getElementsByClassName!==p&&!n)return t.getElementsByClassName(e)}},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace($,""),e[3]=(e[4]||e[5]||"").replace($,""),e[2]==="~="&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),e[1]==="nth"?(e[2]||nt.error(e[0]),e[3]=+(e[3]?e[4]+(e[5]||1):2*(e[2]==="even"||e[2]==="odd")),e[4]=+(e[6]+e[7]||e[2]==="odd")):e[2]&&nt.error(e[0]),e},PSEUDO:function(e){var t,n;if(J.CHILD.test(e[0]))return null;if(e[3])e[2]=e[3];else if(t=e[4])q.test(t)&&(n=ut(t,!0))&&(n=t.indexOf(")",t.length-n)-t.length)&&(t=t.slice(0,n),e[0]=e[0].slice(0,n)),e[2]=t;return e.slice(0,3)}},filter:{ID:r?function(e){return e=e.replace($,""),function(t){return t.getAttribute("id")===e}}:function(e){return e=e.replace($,""),function(t){var n=typeof t.getAttributeNode!==p&&t.getAttributeNode("id");return n&&n.value===e}},TAG:function(e){return e==="*"?function(){return!0}:(e=e.replace($,"").toLowerCase(),function(t){return t.nodeName&&t.nodeName.toLowerCase()===e})},CLASS:function(e){var t=k[d][e+" "];return t||(t=new RegExp("(^|"+O+")"+e+"("+O+"|$)"))&&k(e,function(e){return t.test(e.className||typeof e.getAttribute!==p&&e.getAttribute("class")||"")})},ATTR:function(e,t,n){return function(r,i){var s=nt.attr(r,e);return s==null?t==="!=":t?(s+="",t==="="?s===n:t==="!="?s!==n:t==="^="?n&&s.indexOf(n)===0:t==="*="?n&&s.indexOf(n)>-1:t==="$="?n&&s.substr(s.length-n.length)===n:t==="~="?(" "+s+" ").indexOf(n)>-1:t==="|="?s===n||s.substr(0,n.length+1)===n+"-":!1):!0}},CHILD:function(e,t,n,r){return e==="nth"?function(e){var t,i,s=e.parentNode;if(n===1&&r===0)return!0;if(s){i=0;for(t=s.firstChild;t;t=t.nextSibling)if(t.nodeType===1){i++;if(e===t)break}}return i-=r,i===n||i%n===0&&i/n>=0}:function(t){var n=t;switch(e){case"only":case"first":while(n=n.previousSibling)if(n.nodeType===1)return!1;if(e==="first")return!0;n=t;case"last":while(n=n.nextSibling)if(n.nodeType===1)return!1;return!0}}},PSEUDO:function(e,t){var n,r=i.pseudos[e]||i.setFilters[e.toLowerCase()]||nt.error("unsupported pseudo: "+e);return r[d]?r(t):r.length>1?(n=[e,e,"",t],i.setFilters.hasOwnProperty(e.toLowerCase())?N(function(e,n){var i,s=r(e,t),o=s.length;while(o--)i=T.call(e,s[o]),e[i]=!(n[i]=s[o])}):function(e){return r(e,0,n)}):r}},pseudos:{not:N(function(e){var t=[],n=[],r=a(e.replace(j,"$1"));return r[d]?N(function(e,t,n,i){var s,o=r(e,null,i,[]),u=e.length;while(u--)if(s=o[u])e[u]=!(t[u]=s)}):function(e,i,s){return t[0]=e,r(t,null,s,n),!n.pop()}}),has:N(function(e){return function(t){return nt(e,t).length>0}}),contains:N(function(e){return function(t){return(t.textContent||t.innerText||s(t)).indexOf(e)>-1}}),enabled:function(e){return e.disabled===!1},disabled:function(e){return e.disabled===!0},checked:function(e){var t=e.nodeName.toLowerCase();return t==="input"&&!!e.checked||t==="option"&&!!e.selected},selected:function(e){return e.parentNode&&e.parentNode.selectedIndex,e.selected===!0},parent:function(e){return!i.pseudos.empty(e)},empty:function(e){var t;e=e.firstChild;while(e){if(e.nodeName>"@"||(t=e.nodeType)===3||t===4)return!1;e=e.nextSibling}return!0},header:function(e){return X.test(e.nodeName)},text:function(e){var t,n;return e.nodeName.toLowerCase()==="input"&&(t=e.type)==="text"&&((n=e.getAttribute("type"))==null||n.toLowerCase()===t)},radio:rt("radio"),checkbox:rt("checkbox"),file:rt("file"),password:rt("password"),image:rt("image"),submit:it("submit"),reset:it("reset"),button:function(e){var t=e.nodeName.toLowerCase();return t==="input"&&e.type==="button"||t==="button"},input:function(e){return V.test(e.nodeName)},focus:function(e){var t=e.ownerDocument;return e===t.activeElement&&(!t.hasFocus||t.hasFocus())&&!!(e.type||e.href||~e.tabIndex)},active:function(e){return e===e.ownerDocument.activeElement},first:st(function(){return[0]}),last:st(function(e,t){return[t-1]}),eq:st(function(e,t,n){return[n<0?n+t:n]}),even:st(function(e,t){for(var n=0;n<t;n+=2)e.push(n);return e}),odd:st(function(e,t){for(var n=1;n<t;n+=2)e.push(n);return e}),lt:st(function(e,t,n){for(var r=n<0?n+t:n;--r>=0;)e.push(r);return e}),gt:st(function(e,t,n){for(var r=n<0?n+t:n;++r<t;)e.push(r);return e})}},f=y.compareDocumentPosition?function(e,t){return e===t?(l=!0,0):(!e.compareDocumentPosition||!t.compareDocumentPosition?e.compareDocumentPosition:e.compareDocumentPosition(t)&4)?-1:1}:function(e,t){if(e===t)return l=!0,0;if(e.sourceIndex&&t.sourceIndex)return e.sourceIndex-t.sourceIndex;var n,r,i=[],s=[],o=e.parentNode,u=t.parentNode,a=o;if(o===u)return ot(e,t);if(!o)return-1;if(!u)return 1;while(a)i.unshift(a),a=a.parentNode;a=u;while(a)s.unshift(a),a=a.parentNode;n=i.length,r=s.length;for(var f=0;f<n&&f<r;f++)if(i[f]!==s[f])return ot(i[f],s[f]);return f===n?ot(e,s[f],-1):ot(i[f],t,1)},[0,0].sort(f),h=!l,nt.uniqueSort=function(e){var t,n=[],r=1,i=0;l=h,e.sort(f);if(l){for(;t=e[r];r++)t===e[r-1]&&(i=n.push(r));while(i--)e.splice(n[i],1)}return e},nt.error=function(e){throw new Error("Syntax error, unrecognized expression: "+e)},a=nt.compile=function(e,t){var n,r=[],i=[],s=A[d][e+" "];if(!s){t||(t=ut(e)),n=t.length;while(n--)s=ht(t[n]),s[d]?r.push(s):i.push(s);s=A(e,pt(i,r))}return s},g.querySelectorAll&&function(){var e,t=vt,n=/'|\\/g,r=/\=[\x20\t\r\n\f]*([^'"\]]*)[\x20\t\r\n\f]*\]/g,i=[":focus"],s=[":active"],u=y.matchesSelector||y.mozMatchesSelector||y.webkitMatchesSelector||y.oMatchesSelector||y.msMatchesSelector;K(function(e){e.innerHTML="<select><option selected=''></option></select>",e.querySelectorAll("[selected]").length||i.push("\\["+O+"*(?:checked|disabled|ismap|multiple|readonly|selected|value)"),e.querySelectorAll(":checked").length||i.push(":checked")}),K(function(e){e.innerHTML="<p test=''></p>",e.querySelectorAll("[test^='']").length&&i.push("[*^$]="+O+"*(?:\"\"|'')"),e.innerHTML="<input type='hidden'/>",e.querySelectorAll(":enabled").length||i.push(":enabled",":disabled")}),i=new RegExp(i.join("|")),vt=function(e,r,s,o,u){if(!o&&!u&&!i.test(e)){var a,f,l=!0,c=d,h=r,p=r.nodeType===9&&e;if(r.nodeType===1&&r.nodeName.toLowerCase()!=="object"){a=ut(e),(l=r.getAttribute("id"))?c=l.replace(n,"\\$&"):r.setAttribute("id",c),c="[id='"+c+"'] ",f=a.length;while(f--)a[f]=c+a[f].join("");h=z.test(e)&&r.parentNode||r,p=a.join(",")}if(p)try{return S.apply(s,x.call(h.querySelectorAll(p),0)),s}catch(v){}finally{l||r.removeAttribute("id")}}return t(e,r,s,o,u)},u&&(K(function(t){e=u.call(t,"div");try{u.call(t,"[test!='']:sizzle"),s.push("!=",H)}catch(n){}}),s=new RegExp(s.join("|")),nt.matchesSelector=function(t,n){n=n.replace(r,"='$1']");if(!o(t)&&!s.test(n)&&!i.test(n))try{var a=u.call(t,n);if(a||e||t.document&&t.document.nodeType!==11)return a}catch(f){}return nt(n,null,null,[t]).length>0})}(),i.pseudos.nth=i.pseudos.eq,i.filters=mt.prototype=i.pseudos,i.setFilters=new mt,nt.attr=v.attr,v.find=nt,v.expr=nt.selectors,v.expr[":"]=v.expr.pseudos,v.unique=nt.uniqueSort,v.text=nt.getText,v.isXMLDoc=nt.isXML,v.contains=nt.contains}(e);var nt=/Until$/,rt=/^(?:parents|prev(?:Until|All))/,it=/^.[^:#\[\.,]*$/,st=v.expr.match.needsContext,ot={children:!0,contents:!0,next:!0,prev:!0};v.fn.extend({find:function(e){var t,n,r,i,s,o,u=this;if(typeof e!="string")return v(e).filter(function(){for(t=0,n=u.length;t<n;t++)if(v.contains(u[t],this))return!0});o=this.pushStack("","find",e);for(t=0,n=this.length;t<n;t++){r=o.length,v.find(e,this[t],o);if(t>0)for(i=r;i<o.length;i++)for(s=0;s<r;s++)if(o[s]===o[i]){o.splice(i--,1);break}}return o},has:function(e){var t,n=v(e,this),r=n.length;return this.filter(function(){for(t=0;t<r;t++)if(v.contains(this,n[t]))return!0})},not:function(e){return this.pushStack(ft(this,e,!1),"not",e)},filter:function(e){return this.pushStack(ft(this,e,!0),"filter",e)},is:function(e){return!!e&&(typeof e=="string"?st.test(e)?v(e,this.context).index(this[0])>=0:v.filter(e,this).length>0:this.filter(e).length>0)},closest:function(e,t){var n,r=0,i=this.length,s=[],o=st.test(e)||typeof e!="string"?v(e,t||this.context):0;for(;r<i;r++){n=this[r];while(n&&n.ownerDocument&&n!==t&&n.nodeType!==11){if(o?o.index(n)>-1:v.find.matchesSelector(n,e)){s.push(n);break}n=n.parentNode}}return s=s.length>1?v.unique(s):s,this.pushStack(s,"closest",e)},index:function(e){return e?typeof e=="string"?v.inArray(this[0],v(e)):v.inArray(e.jquery?e[0]:e,this):this[0]&&this[0].parentNode?this.prevAll().length:-1},add:function(e,t){var n=typeof e=="string"?v(e,t):v.makeArray(e&&e.nodeType?[e]:e),r=v.merge(this.get(),n);return this.pushStack(ut(n[0])||ut(r[0])?r:v.unique(r))},addBack:function(e){return this.add(e==null?this.prevObject:this.prevObject.filter(e))}}),v.fn.andSelf=v.fn.addBack,v.each({parent:function(e){var t=e.parentNode;return t&&t.nodeType!==11?t:null},parents:function(e){return v.dir(e,"parentNode")},parentsUntil:function(e,t,n){return v.dir(e,"parentNode",n)},next:function(e){return at(e,"nextSibling")},prev:function(e){return at(e,"previousSibling")},nextAll:function(e){return v.dir(e,"nextSibling")},prevAll:function(e){return v.dir(e,"previousSibling")},nextUntil:function(e,t,n){return v.dir(e,"nextSibling",n)},prevUntil:function(e,t,n){return v.dir(e,"previousSibling",n)},siblings:function(e){return v.sibling((e.parentNode||{}).firstChild,e)},children:function(e){return v.sibling(e.firstChild)},contents:function(e){return v.nodeName(e,"iframe")?e.contentDocument||e.contentWindow.document:v.merge([],e.childNodes)}},function(e,t){v.fn[e]=function(n,r){var i=v.map(this,t,n);return nt.test(e)||(r=n),r&&typeof r=="string"&&(i=v.filter(r,i)),i=this.length>1&&!ot[e]?v.unique(i):i,this.length>1&&rt.test(e)&&(i=i.reverse()),this.pushStack(i,e,l.call(arguments).join(","))}}),v.extend({filter:function(e,t,n){return n&&(e=":not("+e+")"),t.length===1?v.find.matchesSelector(t[0],e)?[t[0]]:[]:v.find.matches(e,t)},dir:function(e,n,r){var i=[],s=e[n];while(s&&s.nodeType!==9&&(r===t||s.nodeType!==1||!v(s).is(r)))s.nodeType===1&&i.push(s),s=s[n];return i},sibling:function(e,t){var n=[];for(;e;e=e.nextSibling)e.nodeType===1&&e!==t&&n.push(e);return n}});var ct="abbr|article|aside|audio|bdi|canvas|data|datalist|details|figcaption|figure|footer|header|hgroup|mark|meter|nav|output|progress|section|summary|time|video",ht=/ jQuery\d+="(?:null|\d+)"/g,pt=/^\s+/,dt=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,vt=/<([\w:]+)/,mt=/<tbody/i,gt=/<|&#?\w+;/,yt=/<(?:script|style|link)/i,bt=/<(?:script|object|embed|option|style)/i,wt=new RegExp("<(?:"+ct+")[\\s/>]","i"),Et=/^(?:checkbox|radio)$/,St=/checked\s*(?:[^=]|=\s*.checked.)/i,xt=/\/(java|ecma)script/i,Tt=/^\s*<!(?:\[CDATA\[|\-\-)|[\]\-]{2}>\s*$/g,Nt={option:[1,"<select multiple='multiple'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],area:[1,"<map>","</map>"],_default:[0,"",""]},Ct=lt(i),kt=Ct.appendChild(i.createElement("div"));Nt.optgroup=Nt.option,Nt.tbody=Nt.tfoot=Nt.colgroup=Nt.caption=Nt.thead,Nt.th=Nt.td,v.support.htmlSerialize||(Nt._default=[1,"X<div>","</div>"]),v.fn.extend({text:function(e){return v.access(this,function(e){return e===t?v.text(this):this.empty().append((this[0]&&this[0].ownerDocument||i).createTextNode(e))},null,e,arguments.length)},wrapAll:function(e){if(v.isFunction(e))return this.each(function(t){v(this).wrapAll(e.call(this,t))});if(this[0]){var t=v(e,this[0].ownerDocument).eq(0).clone(!0);this[0].parentNode&&t.insertBefore(this[0]),t.map(function(){var e=this;while(e.firstChild&&e.firstChild.nodeType===1)e=e.firstChild;return e}).append(this)}return this},wrapInner:function(e){return v.isFunction(e)?this.each(function(t){v(this).wrapInner(e.call(this,t))}):this.each(function(){var t=v(this),n=t.contents();n.length?n.wrapAll(e):t.append(e)})},wrap:function(e){var t=v.isFunction(e);return this.each(function(n){v(this).wrapAll(t?e.call(this,n):e)})},unwrap:function(){return this.parent().each(function(){v.nodeName(this,"body")||v(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,!0,function(e){(this.nodeType===1||this.nodeType===11)&&this.appendChild(e)})},prepend:function(){return this.domManip(arguments,!0,function(e){(this.nodeType===1||this.nodeType===11)&&this.insertBefore(e,this.firstChild)})},before:function(){if(!ut(this[0]))return this.domManip(arguments,!1,function(e){this.parentNode.insertBefore(e,this)});if(arguments.length){var e=v.clean(arguments);return this.pushStack(v.merge(e,this),"before",this.selector)}},after:function(){if(!ut(this[0]))return this.domManip(arguments,!1,function(e){this.parentNode.insertBefore(e,this.nextSibling)});if(arguments.length){var e=v.clean(arguments);return this.pushStack(v.merge(this,e),"after",this.selector)}},remove:function(e,t){var n,r=0;for(;(n=this[r])!=null;r++)if(!e||v.filter(e,[n]).length)!t&&n.nodeType===1&&(v.cleanData(n.getElementsByTagName("*")),v.cleanData([n])),n.parentNode&&n.parentNode.removeChild(n);return this},empty:function(){var e,t=0;for(;(e=this[t])!=null;t++){e.nodeType===1&&v.cleanData(e.getElementsByTagName("*"));while(e.firstChild)e.removeChild(e.firstChild)}return this},clone:function(e,t){return e=e==null?!1:e,t=t==null?e:t,this.map(function(){return v.clone(this,e,t)})},html:function(e){return v.access(this,function(e){var n=this[0]||{},r=0,i=this.length;if(e===t)return n.nodeType===1?n.innerHTML.replace(ht,""):t;if(typeof e=="string"&&!yt.test(e)&&(v.support.htmlSerialize||!wt.test(e))&&(v.support.leadingWhitespace||!pt.test(e))&&!Nt[(vt.exec(e)||["",""])[1].toLowerCase()]){e=e.replace(dt,"<$1></$2>");try{for(;r<i;r++)n=this[r]||{},n.nodeType===1&&(v.cleanData(n.getElementsByTagName("*")),n.innerHTML=e);n=0}catch(s){}}n&&this.empty().append(e)},null,e,arguments.length)},replaceWith:function(e){return ut(this[0])?this.length?this.pushStack(v(v.isFunction(e)?e():e),"replaceWith",e):this:v.isFunction(e)?this.each(function(t){var n=v(this),r=n.html();n.replaceWith(e.call(this,t,r))}):(typeof e!="string"&&(e=v(e).detach()),this.each(function(){var t=this.nextSibling,n=this.parentNode;v(this).remove(),t?v(t).before(e):v(n).append(e)}))},detach:function(e){return this.remove(e,!0)},domManip:function(e,n,r){e=[].concat.apply([],e);var i,s,o,u,a=0,f=e[0],l=[],c=this.length;if(!v.support.checkClone&&c>1&&typeof f=="string"&&St.test(f))return this.each(function(){v(this).domManip(e,n,r)});if(v.isFunction(f))return this.each(function(i){var s=v(this);e[0]=f.call(this,i,n?s.html():t),s.domManip(e,n,r)});if(this[0]){i=v.buildFragment(e,this,l),o=i.fragment,s=o.firstChild,o.childNodes.length===1&&(o=s);if(s){n=n&&v.nodeName(s,"tr");for(u=i.cacheable||c-1;a<c;a++)r.call(n&&v.nodeName(this[a],"table")?Lt(this[a],"tbody"):this[a],a===u?o:v.clone(o,!0,!0))}o=s=null,l.length&&v.each(l,function(e,t){t.src?v.ajax?v.ajax({url:t.src,type:"GET",dataType:"script",async:!1,global:!1,"throws":!0}):v.error("no ajax"):v.globalEval((t.text||t.textContent||t.innerHTML||"").replace(Tt,"")),t.parentNode&&t.parentNode.removeChild(t)})}return this}}),v.buildFragment=function(e,n,r){var s,o,u,a=e[0];return n=n||i,n=!n.nodeType&&n[0]||n,n=n.ownerDocument||n,e.length===1&&typeof a=="string"&&a.length<512&&n===i&&a.charAt(0)==="<"&&!bt.test(a)&&(v.support.checkClone||!St.test(a))&&(v.support.html5Clone||!wt.test(a))&&(o=!0,s=v.fragments[a],u=s!==t),s||(s=n.createDocumentFragment(),v.clean(e,n,s,r),o&&(v.fragments[a]=u&&s)),{fragment:s,cacheable:o}},v.fragments={},v.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(e,t){v.fn[e]=function(n){var r,i=0,s=[],o=v(n),u=o.length,a=this.length===1&&this[0].parentNode;if((a==null||a&&a.nodeType===11&&a.childNodes.length===1)&&u===1)return o[t](this[0]),this;for(;i<u;i++)r=(i>0?this.clone(!0):this).get(),v(o[i])[t](r),s=s.concat(r);return this.pushStack(s,e,o.selector)}}),v.extend({clone:function(e,t,n){var r,i,s,o;v.support.html5Clone||v.isXMLDoc(e)||!wt.test("<"+e.nodeName+">")?o=e.cloneNode(!0):(kt.innerHTML=e.outerHTML,kt.removeChild(o=kt.firstChild));if((!v.support.noCloneEvent||!v.support.noCloneChecked)&&(e.nodeType===1||e.nodeType===11)&&!v.isXMLDoc(e)){Ot(e,o),r=Mt(e),i=Mt(o);for(s=0;r[s];++s)i[s]&&Ot(r[s],i[s])}if(t){At(e,o);if(n){r=Mt(e),i=Mt(o);for(s=0;r[s];++s)At(r[s],i[s])}}return r=i=null,o},clean:function(e,t,n,r){var s,o,u,a,f,l,c,h,p,d,m,g,y=t===i&&Ct,b=[];if(!t||typeof t.createDocumentFragment=="undefined")t=i;for(s=0;(u=e[s])!=null;s++){typeof u=="number"&&(u+="");if(!u)continue;if(typeof u=="string")if(!gt.test(u))u=t.createTextNode(u);else{y=y||lt(t),c=t.createElement("div"),y.appendChild(c),u=u.replace(dt,"<$1></$2>"),a=(vt.exec(u)||["",""])[1].toLowerCase(),f=Nt[a]||Nt._default,l=f[0],c.innerHTML=f[1]+u+f[2];while(l--)c=c.lastChild;if(!v.support.tbody){h=mt.test(u),p=a==="table"&&!h?c.firstChild&&c.firstChild.childNodes:f[1]==="<table>"&&!h?c.childNodes:[];for(o=p.length-1;o>=0;--o)v.nodeName(p[o],"tbody")&&!p[o].childNodes.length&&p[o].parentNode.removeChild(p[o])}!v.support.leadingWhitespace&&pt.test(u)&&c.insertBefore(t.createTextNode(pt.exec(u)[0]),c.firstChild),u=c.childNodes,c.parentNode.removeChild(c)}u.nodeType?b.push(u):v.merge(b,u)}c&&(u=c=y=null);if(!v.support.appendChecked)for(s=0;(u=b[s])!=null;s++)v.nodeName(u,"input")?_t(u):typeof u.getElementsByTagName!="undefined"&&v.grep(u.getElementsByTagName("input"),_t);if(n){m=function(e){if(!e.type||xt.test(e.type))return r?r.push(e.parentNode?e.parentNode.removeChild(e):e):n.appendChild(e)};for(s=0;(u=b[s])!=null;s++)if(!v.nodeName(u,"script")||!m(u))n.appendChild(u),typeof u.getElementsByTagName!="undefined"&&(g=v.grep(v.merge([],u.getElementsByTagName("script")),m),b.splice.apply(b,[s+1,0].concat(g)),s+=g.length)}return b},cleanData:function(e,t){var n,r,i,s,o=0,u=v.expando,a=v.cache,f=v.support.deleteExpando,l=v.event.special;for(;(i=e[o])!=null;o++)if(t||v.acceptData(i)){r=i[u],n=r&&a[r];if(n){if(n.events)for(s in n.events)l[s]?v.event.remove(i,s):v.removeEvent(i,s,n.handle);a[r]&&(delete a[r],f?delete i[u]:i.removeAttribute?i.removeAttribute(u):i[u]=null,v.deletedIds.push(r))}}}}),function(){var e,t;v.uaMatch=function(e){e=e.toLowerCase();var t=/(chrome)[ \/]([\w.]+)/.exec(e)||/(webkit)[ \/]([\w.]+)/.exec(e)||/(opera)(?:.*version|)[ \/]([\w.]+)/.exec(e)||/(msie) ([\w.]+)/.exec(e)||e.indexOf("compatible")<0&&/(mozilla)(?:.*? rv:([\w.]+)|)/.exec(e)||[];return{browser:t[1]||"",version:t[2]||"0"}},e=v.uaMatch(o.userAgent),t={},e.browser&&(t[e.browser]=!0,t.version=e.version),t.chrome?t.webkit=!0:t.webkit&&(t.safari=!0),v.browser=t,v.sub=function(){function e(t,n){return new e.fn.init(t,n)}v.extend(!0,e,this),e.superclass=this,e.fn=e.prototype=this(),e.fn.constructor=e,e.sub=this.sub,e.fn.init=function(r,i){return i&&i instanceof v&&!(i instanceof e)&&(i=e(i)),v.fn.init.call(this,r,i,t)},e.fn.init.prototype=e.fn;var t=e(i);return e}}();var Dt,Pt,Ht,Bt=/alpha\([^)]*\)/i,jt=/opacity=([^)]*)/,Ft=/^(top|right|bottom|left)$/,It=/^(none|table(?!-c[ea]).+)/,qt=/^margin/,Rt=new RegExp("^("+m+")(.*)$","i"),Ut=new RegExp("^("+m+")(?!px)[a-z%]+$","i"),zt=new RegExp("^([-+])=("+m+")","i"),Wt={BODY:"block"},Xt={position:"absolute",visibility:"hidden",display:"block"},Vt={letterSpacing:0,fontWeight:400},$t=["Top","Right","Bottom","Left"],Jt=["Webkit","O","Moz","ms"],Kt=v.fn.toggle;v.fn.extend({css:function(e,n){return v.access(this,function(e,n,r){return r!==t?v.style(e,n,r):v.css(e,n)},e,n,arguments.length>1)},show:function(){return Yt(this,!0)},hide:function(){return Yt(this)},toggle:function(e,t){var n=typeof e=="boolean";return v.isFunction(e)&&v.isFunction(t)?Kt.apply(this,arguments):this.each(function(){(n?e:Gt(this))?v(this).show():v(this).hide()})}}),v.extend({cssHooks:{opacity:{get:function(e,t){if(t){var n=Dt(e,"opacity");return n===""?"1":n}}}},cssNumber:{fillOpacity:!0,fontWeight:!0,lineHeight:!0,opacity:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":v.support.cssFloat?"cssFloat":"styleFloat"},style:function(e,n,r,i){if(!e||e.nodeType===3||e.nodeType===8||!e.style)return;var s,o,u,a=v.camelCase(n),f=e.style;n=v.cssProps[a]||(v.cssProps[a]=Qt(f,a)),u=v.cssHooks[n]||v.cssHooks[a];if(r===t)return u&&"get"in u&&(s=u.get(e,!1,i))!==t?s:f[n];o=typeof r,o==="string"&&(s=zt.exec(r))&&(r=(s[1]+1)*s[2]+parseFloat(v.css(e,n)),o="number");if(r==null||o==="number"&&isNaN(r))return;o==="number"&&!v.cssNumber[a]&&(r+="px");if(!u||!("set"in u)||(r=u.set(e,r,i))!==t)try{f[n]=r}catch(l){}},css:function(e,n,r,i){var s,o,u,a=v.camelCase(n);return n=v.cssProps[a]||(v.cssProps[a]=Qt(e.style,a)),u=v.cssHooks[n]||v.cssHooks[a],u&&"get"in u&&(s=u.get(e,!0,i)),s===t&&(s=Dt(e,n)),s==="normal"&&n in Vt&&(s=Vt[n]),r||i!==t?(o=parseFloat(s),r||v.isNumeric(o)?o||0:s):s},swap:function(e,t,n){var r,i,s={};for(i in t)s[i]=e.style[i],e.style[i]=t[i];r=n.call(e);for(i in t)e.style[i]=s[i];return r}}),e.getComputedStyle?Dt=function(t,n){var r,i,s,o,u=e.getComputedStyle(t,null),a=t.style;return u&&(r=u.getPropertyValue(n)||u[n],r===""&&!v.contains(t.ownerDocument,t)&&(r=v.style(t,n)),Ut.test(r)&&qt.test(n)&&(i=a.width,s=a.minWidth,o=a.maxWidth,a.minWidth=a.maxWidth=a.width=r,r=u.width,a.width=i,a.minWidth=s,a.maxWidth=o)),r}:i.documentElement.currentStyle&&(Dt=function(e,t){var n,r,i=e.currentStyle&&e.currentStyle[t],s=e.style;return i==null&&s&&s[t]&&(i=s[t]),Ut.test(i)&&!Ft.test(t)&&(n=s.left,r=e.runtimeStyle&&e.runtimeStyle.left,r&&(e.runtimeStyle.left=e.currentStyle.left),s.left=t==="fontSize"?"1em":i,i=s.pixelLeft+"px",s.left=n,r&&(e.runtimeStyle.left=r)),i===""?"auto":i}),v.each(["height","width"],function(e,t){v.cssHooks[t]={get:function(e,n,r){if(n)return e.offsetWidth===0&&It.test(Dt(e,"display"))?v.swap(e,Xt,function(){return tn(e,t,r)}):tn(e,t,r)},set:function(e,n,r){return Zt(e,n,r?en(e,t,r,v.support.boxSizing&&v.css(e,"boxSizing")==="border-box"):0)}}}),v.support.opacity||(v.cssHooks.opacity={get:function(e,t){return jt.test((t&&e.currentStyle?e.currentStyle.filter:e.style.filter)||"")?.01*parseFloat(RegExp.$1)+"":t?"1":""},set:function(e,t){var n=e.style,r=e.currentStyle,i=v.isNumeric(t)?"alpha(opacity="+t*100+")":"",s=r&&r.filter||n.filter||"";n.zoom=1;if(t>=1&&v.trim(s.replace(Bt,""))===""&&n.removeAttribute){n.removeAttribute("filter");if(r&&!r.filter)return}n.filter=Bt.test(s)?s.replace(Bt,i):s+" "+i}}),v(function(){v.support.reliableMarginRight||(v.cssHooks.marginRight={get:function(e,t){return v.swap(e,{display:"inline-block"},function(){if(t)return Dt(e,"marginRight")})}}),!v.support.pixelPosition&&v.fn.position&&v.each(["top","left"],function(e,t){v.cssHooks[t]={get:function(e,n){if(n){var r=Dt(e,t);return Ut.test(r)?v(e).position()[t]+"px":r}}}})}),v.expr&&v.expr.filters&&(v.expr.filters.hidden=function(e){return e.offsetWidth===0&&e.offsetHeight===0||!v.support.reliableHiddenOffsets&&(e.style&&e.style.display||Dt(e,"display"))==="none"},v.expr.filters.visible=function(e){return!v.expr.filters.hidden(e)}),v.each({margin:"",padding:"",border:"Width"},function(e,t){v.cssHooks[e+t]={expand:function(n){var r,i=typeof n=="string"?n.split(" "):[n],s={};for(r=0;r<4;r++)s[e+$t[r]+t]=i[r]||i[r-2]||i[0];return s}},qt.test(e)||(v.cssHooks[e+t].set=Zt)});var rn=/%20/g,sn=/\[\]$/,on=/\r?\n/g,un=/^(?:color|date|datetime|datetime-local|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,an=/^(?:select|textarea)/i;v.fn.extend({serialize:function(){return v.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?v.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||an.test(this.nodeName)||un.test(this.type))}).map(function(e,t){var n=v(this).val();return n==null?null:v.isArray(n)?v.map(n,function(e,n){return{name:t.name,value:e.replace(on,"\r\n")}}):{name:t.name,value:n.replace(on,"\r\n")}}).get()}}),v.param=function(e,n){var r,i=[],s=function(e,t){t=v.isFunction(t)?t():t==null?"":t,i[i.length]=encodeURIComponent(e)+"="+encodeURIComponent(t)};n===t&&(n=v.ajaxSettings&&v.ajaxSettings.traditional);if(v.isArray(e)||e.jquery&&!v.isPlainObject(e))v.each(e,function(){s(this.name,this.value)});else for(r in e)fn(r,e[r],n,s);return i.join("&").replace(rn,"+")};var ln,cn,hn=/#.*$/,pn=/^(.*?):[ \t]*([^\r\n]*)\r?$/mg,dn=/^(?:about|app|app\-storage|.+\-extension|file|res|widget):$/,vn=/^(?:GET|HEAD)$/,mn=/^\/\//,gn=/\?/,yn=/<script\b[^<]*(?:(?!<\/script>)<[^<]*)*<\/script>/gi,bn=/([?&])_=[^&]*/,wn=/^([\w\+\.\-]+:)(?:\/\/([^\/?#:]*)(?::(\d+)|)|)/,En=v.fn.load,Sn={},xn={},Tn=["*/"]+["*"];try{cn=s.href}catch(Nn){cn=i.createElement("a"),cn.href="",cn=cn.href}ln=wn.exec(cn.toLowerCase())||[],v.fn.load=function(e,n,r){if(typeof e!="string"&&En)return En.apply(this,arguments);if(!this.length)return this;var i,s,o,u=this,a=e.indexOf(" ");return a>=0&&(i=e.slice(a,e.length),e=e.slice(0,a)),v.isFunction(n)?(r=n,n=t):n&&typeof n=="object"&&(s="POST"),v.ajax({url:e,type:s,dataType:"html",data:n,complete:function(e,t){r&&u.each(r,o||[e.responseText,t,e])}}).done(function(e){o=arguments,u.html(i?v("<div>").append(e.replace(yn,"")).find(i):e)}),this},v.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(e,t){v.fn[t]=function(e){return this.on(t,e)}}),v.each(["get","post"],function(e,n){v[n]=function(e,r,i,s){return v.isFunction(r)&&(s=s||i,i=r,r=t),v.ajax({type:n,url:e,data:r,success:i,dataType:s})}}),v.extend({getScript:function(e,n){return v.get(e,t,n,"script")},getJSON:function(e,t,n){return v.get(e,t,n,"json")},ajaxSetup:function(e,t){return t?Ln(e,v.ajaxSettings):(t=e,e=v.ajaxSettings),Ln(e,t),e},ajaxSettings:{url:cn,isLocal:dn.test(ln[1]),global:!0,type:"GET",contentType:"application/x-www-form-urlencoded; charset=UTF-8",processData:!0,async:!0,accepts:{xml:"application/xml, text/xml",html:"text/html",text:"text/plain",json:"application/json, text/javascript","*":Tn},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText"},converters:{"* text":e.String,"text html":!0,"text json":v.parseJSON,"text xml":v.parseXML},flatOptions:{context:!0,url:!0}},ajaxPrefilter:Cn(Sn),ajaxTransport:Cn(xn),ajax:function(e,n){function T(e,n,s,a){var l,y,b,w,S,T=n;if(E===2)return;E=2,u&&clearTimeout(u),o=t,i=a||"",x.readyState=e>0?4:0,s&&(w=An(c,x,s));if(e>=200&&e<300||e===304)c.ifModified&&(S=x.getResponseHeader("Last-Modified"),S&&(v.lastModified[r]=S),S=x.getResponseHeader("Etag"),S&&(v.etag[r]=S)),e===304?(T="notmodified",l=!0):(l=On(c,w),T=l.state,y=l.data,b=l.error,l=!b);else{b=T;if(!T||e)T="error",e<0&&(e=0)}x.status=e,x.statusText=(n||T)+"",l?d.resolveWith(h,[y,T,x]):d.rejectWith(h,[x,T,b]),x.statusCode(g),g=t,f&&p.trigger("ajax"+(l?"Success":"Error"),[x,c,l?y:b]),m.fireWith(h,[x,T]),f&&(p.trigger("ajaxComplete",[x,c]),--v.active||v.event.trigger("ajaxStop"))}typeof e=="object"&&(n=e,e=t),n=n||{};var r,i,s,o,u,a,f,l,c=v.ajaxSetup({},n),h=c.context||c,p=h!==c&&(h.nodeType||h instanceof v)?v(h):v.event,d=v.Deferred(),m=v.Callbacks("once memory"),g=c.statusCode||{},b={},w={},E=0,S="canceled",x={readyState:0,setRequestHeader:function(e,t){if(!E){var n=e.toLowerCase();e=w[n]=w[n]||e,b[e]=t}return this},getAllResponseHeaders:function(){return E===2?i:null},getResponseHeader:function(e){var n;if(E===2){if(!s){s={};while(n=pn.exec(i))s[n[1].toLowerCase()]=n[2]}n=s[e.toLowerCase()]}return n===t?null:n},overrideMimeType:function(e){return E||(c.mimeType=e),this},abort:function(e){return e=e||S,o&&o.abort(e),T(0,e),this}};d.promise(x),x.success=x.done,x.error=x.fail,x.complete=m.add,x.statusCode=function(e){if(e){var t;if(E<2)for(t in e)g[t]=[g[t],e[t]];else t=e[x.status],x.always(t)}return this},c.url=((e||c.url)+"").replace(hn,"").replace(mn,ln[1]+"//"),c.dataTypes=v.trim(c.dataType||"*").toLowerCase().split(y),c.crossDomain==null&&(a=wn.exec(c.url.toLowerCase()),c.crossDomain=!(!a||a[1]===ln[1]&&a[2]===ln[2]&&(a[3]||(a[1]==="http:"?80:443))==(ln[3]||(ln[1]==="http:"?80:443)))),c.data&&c.processData&&typeof c.data!="string"&&(c.data=v.param(c.data,c.traditional)),kn(Sn,c,n,x);if(E===2)return x;f=c.global,c.type=c.type.toUpperCase(),c.hasContent=!vn.test(c.type),f&&v.active++===0&&v.event.trigger("ajaxStart");if(!c.hasContent){c.data&&(c.url+=(gn.test(c.url)?"&":"?")+c.data,delete c.data),r=c.url;if(c.cache===!1){var N=v.now(),C=c.url.replace(bn,"$1_="+N);c.url=C+(C===c.url?(gn.test(c.url)?"&":"?")+"_="+N:"")}}(c.data&&c.hasContent&&c.contentType!==!1||n.contentType)&&x.setRequestHeader("Content-Type",c.contentType),c.ifModified&&(r=r||c.url,v.lastModified[r]&&x.setRequestHeader("If-Modified-Since",v.lastModified[r]),v.etag[r]&&x.setRequestHeader("If-None-Match",v.etag[r])),x.setRequestHeader("Accept",c.dataTypes[0]&&c.accepts[c.dataTypes[0]]?c.accepts[c.dataTypes[0]]+(c.dataTypes[0]!=="*"?", "+Tn+"; q=0.01":""):c.accepts["*"]);for(l in c.headers)x.setRequestHeader(l,c.headers[l]);if(!c.beforeSend||c.beforeSend.call(h,x,c)!==!1&&E!==2){S="abort";for(l in{success:1,error:1,complete:1})x[l](c[l]);o=kn(xn,c,n,x);if(!o)T(-1,"No Transport");else{x.readyState=1,f&&p.trigger("ajaxSend",[x,c]),c.async&&c.timeout>0&&(u=setTimeout(function(){x.abort("timeout")},c.timeout));try{E=1,o.send(b,T)}catch(k){if(!(E<2))throw k;T(-1,k)}}return x}return x.abort()},active:0,lastModified:{},etag:{}});var Mn=[],_n=/\?/,Dn=/(=)\?(?=&|$)|\?\?/,Pn=v.now();v.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=Mn.pop()||v.expando+"_"+Pn++;return this[e]=!0,e}}),v.ajaxPrefilter("json jsonp",function(n,r,i){var s,o,u,a=n.data,f=n.url,l=n.jsonp!==!1,c=l&&Dn.test(f),h=l&&!c&&typeof a=="string"&&!(n.contentType||"").indexOf("application/x-www-form-urlencoded")&&Dn.test(a);if(n.dataTypes[0]==="jsonp"||c||h)return s=n.jsonpCallback=v.isFunction(n.jsonpCallback)?n.jsonpCallback():n.jsonpCallback,o=e[s],c?n.url=f.replace(Dn,"$1"+s):h?n.data=a.replace(Dn,"$1"+s):l&&(n.url+=(_n.test(f)?"&":"?")+n.jsonp+"="+s),n.converters["script json"]=function(){return u||v.error(s+" was not called"),u[0]},n.dataTypes[0]="json",e[s]=function(){u=arguments},i.always(function(){e[s]=o,n[s]&&(n.jsonpCallback=r.jsonpCallback,Mn.push(s)),u&&v.isFunction(o)&&o(u[0]),u=o=t}),"script"}),v.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/javascript|ecmascript/},converters:{"text script":function(e){return v.globalEval(e),e}}}),v.ajaxPrefilter("script",function(e){e.cache===t&&(e.cache=!1),e.crossDomain&&(e.type="GET",e.global=!1)}),v.ajaxTransport("script",function(e){if(e.crossDomain){var n,r=i.head||i.getElementsByTagName("head")[0]||i.documentElement;return{send:function(s,o){n=i.createElement("script"),n.async="async",e.scriptCharset&&(n.charset=e.scriptCharset),n.src=e.url,n.onload=n.onreadystatechange=function(e,i){if(i||!n.readyState||/loaded|complete/.test(n.readyState))n.onload=n.onreadystatechange=null,r&&n.parentNode&&r.removeChild(n),n=t,i||o(200,"success")},r.insertBefore(n,r.firstChild)},abort:function(){n&&n.onload(0,1)}}}});var Hn,Bn=e.ActiveXObject?function(){for(var e in Hn)Hn[e](0,1)}:!1,jn=0;v.ajaxSettings.xhr=e.ActiveXObject?function(){return!this.isLocal&&Fn()||In()}:Fn,function(e){v.extend(v.support,{ajax:!!e,cors:!!e&&"withCredentials"in e})}(v.ajaxSettings.xhr()),v.support.ajax&&v.ajaxTransport(function(n){if(!n.crossDomain||v.support.cors){var r;return{send:function(i,s){var o,u,a=n.xhr();n.username?a.open(n.type,n.url,n.async,n.username,n.password):a.open(n.type,n.url,n.async);if(n.xhrFields)for(u in n.xhrFields)a[u]=n.xhrFields[u];n.mimeType&&a.overrideMimeType&&a.overrideMimeType(n.mimeType),!n.crossDomain&&!i["X-Requested-With"]&&(i["X-Requested-With"]="XMLHttpRequest");try{for(u in i)a.setRequestHeader(u,i[u])}catch(f){}a.send(n.hasContent&&n.data||null),r=function(e,i){var u,f,l,c,h;try{if(r&&(i||a.readyState===4)){r=t,o&&(a.onreadystatechange=v.noop,Bn&&delete Hn[o]);if(i)a.readyState!==4&&a.abort();else{u=a.status,l=a.getAllResponseHeaders(),c={},h=a.responseXML,h&&h.documentElement&&(c.xml=h);try{c.text=a.responseText}catch(p){}try{f=a.statusText}catch(p){f=""}!u&&n.isLocal&&!n.crossDomain?u=c.text?200:404:u===1223&&(u=204)}}}catch(d){i||s(-1,d)}c&&s(u,f,c,l)},n.async?a.readyState===4?setTimeout(r,0):(o=++jn,Bn&&(Hn||(Hn={},v(e).unload(Bn)),Hn[o]=r),a.onreadystatechange=r):r()},abort:function(){r&&r(0,1)}}}});var qn,Rn,Un=/^(?:toggle|show|hide)$/,zn=new RegExp("^(?:([-+])=|)("+m+")([a-z%]*)$","i"),Wn=/queueHooks$/,Xn=[Gn],Vn={"*":[function(e,t){var n,r,i=this.createTween(e,t),s=zn.exec(t),o=i.cur(),u=+o||0,a=1,f=20;if(s){n=+s[2],r=s[3]||(v.cssNumber[e]?"":"px");if(r!=="px"&&u){u=v.css(i.elem,e,!0)||n||1;do a=a||".5",u/=a,v.style(i.elem,e,u+r);while(a!==(a=i.cur()/o)&&a!==1&&--f)}i.unit=r,i.start=u,i.end=s[1]?u+(s[1]+1)*n:n}return i}]};v.Animation=v.extend(Kn,{tweener:function(e,t){v.isFunction(e)?(t=e,e=["*"]):e=e.split(" ");var n,r=0,i=e.length;for(;r<i;r++)n=e[r],Vn[n]=Vn[n]||[],Vn[n].unshift(t)},prefilter:function(e,t){t?Xn.unshift(e):Xn.push(e)}}),v.Tween=Yn,Yn.prototype={constructor:Yn,init:function(e,t,n,r,i,s){this.elem=e,this.prop=n,this.easing=i||"swing",this.options=t,this.start=this.now=this.cur(),this.end=r,this.unit=s||(v.cssNumber[n]?"":"px")},cur:function(){var e=Yn.propHooks[this.prop];return e&&e.get?e.get(this):Yn.propHooks._default.get(this)},run:function(e){var t,n=Yn.propHooks[this.prop];return this.options.duration?this.pos=t=v.easing[this.easing](e,this.options.duration*e,0,1,this.options.duration):this.pos=t=e,this.now=(this.end-this.start)*t+this.start,this.options.step&&this.options.step.call(this.elem,this.now,this),n&&n.set?n.set(this):Yn.propHooks._default.set(this),this}},Yn.prototype.init.prototype=Yn.prototype,Yn.propHooks={_default:{get:function(e){var t;return e.elem[e.prop]==null||!!e.elem.style&&e.elem.style[e.prop]!=null?(t=v.css(e.elem,e.prop,!1,""),!t||t==="auto"?0:t):e.elem[e.prop]},set:function(e){v.fx.step[e.prop]?v.fx.step[e.prop](e):e.elem.style&&(e.elem.style[v.cssProps[e.prop]]!=null||v.cssHooks[e.prop])?v.style(e.elem,e.prop,e.now+e.unit):e.elem[e.prop]=e.now}}},Yn.propHooks.scrollTop=Yn.propHooks.scrollLeft={set:function(e){e.elem.nodeType&&e.elem.parentNode&&(e.elem[e.prop]=e.now)}},v.each(["toggle","show","hide"],function(e,t){var n=v.fn[t];v.fn[t]=function(r,i,s){return r==null||typeof r=="boolean"||!e&&v.isFunction(r)&&v.isFunction(i)?n.apply(this,arguments):this.animate(Zn(t,!0),r,i,s)}}),v.fn.extend({fadeTo:function(e,t,n,r){return this.filter(Gt).css("opacity",0).show().end().animate({opacity:t},e,n,r)},animate:function(e,t,n,r){var i=v.isEmptyObject(e),s=v.speed(t,n,r),o=function(){var t=Kn(this,v.extend({},e),s);i&&t.stop(!0)};return i||s.queue===!1?this.each(o):this.queue(s.queue,o)},stop:function(e,n,r){var i=function(e){var t=e.stop;delete e.stop,t(r)};return typeof e!="string"&&(r=n,n=e,e=t),n&&e!==!1&&this.queue(e||"fx",[]),this.each(function(){var t=!0,n=e!=null&&e+"queueHooks",s=v.timers,o=v._data(this);if(n)o[n]&&o[n].stop&&i(o[n]);else for(n in o)o[n]&&o[n].stop&&Wn.test(n)&&i(o[n]);for(n=s.length;n--;)s[n].elem===this&&(e==null||s[n].queue===e)&&(s[n].anim.stop(r),t=!1,s.splice(n,1));(t||!r)&&v.dequeue(this,e)})}}),v.each({slideDown:Zn("show"),slideUp:Zn("hide"),slideToggle:Zn("toggle"),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(e,t){v.fn[e]=function(e,n,r){return this.animate(t,e,n,r)}}),v.speed=function(e,t,n){var r=e&&typeof e=="object"?v.extend({},e):{complete:n||!n&&t||v.isFunction(e)&&e,duration:e,easing:n&&t||t&&!v.isFunction(t)&&t};r.duration=v.fx.off?0:typeof r.duration=="number"?r.duration:r.duration in v.fx.speeds?v.fx.speeds[r.duration]:v.fx.speeds._default;if(r.queue==null||r.queue===!0)r.queue="fx";return r.old=r.complete,r.complete=function(){v.isFunction(r.old)&&r.old.call(this),r.queue&&v.dequeue(this,r.queue)},r},v.easing={linear:function(e){return e},swing:function(e){return.5-Math.cos(e*Math.PI)/2}},v.timers=[],v.fx=Yn.prototype.init,v.fx.tick=function(){var e,n=v.timers,r=0;qn=v.now();for(;r<n.length;r++)e=n[r],!e()&&n[r]===e&&n.splice(r--,1);n.length||v.fx.stop(),qn=t},v.fx.timer=function(e){e()&&v.timers.push(e)&&!Rn&&(Rn=setInterval(v.fx.tick,v.fx.interval))},v.fx.interval=13,v.fx.stop=function(){clearInterval(Rn),Rn=null},v.fx.speeds={slow:600,fast:200,_default:400},v.fx.step={},v.expr&&v.expr.filters&&(v.expr.filters.animated=function(e){return v.grep(v.timers,function(t){return e===t.elem}).length});var er=/^(?:body|html)$/i;v.fn.offset=function(e){if(arguments.length)return e===t?this:this.each(function(t){v.offset.setOffset(this,e,t)});var n,r,i,s,o,u,a,f={top:0,left:0},l=this[0],c=l&&l.ownerDocument;if(!c)return;return(r=c.body)===l?v.offset.bodyOffset(l):(n=c.documentElement,v.contains(n,l)?(typeof l.getBoundingClientRect!="undefined"&&(f=l.getBoundingClientRect()),i=tr(c),s=n.clientTop||r.clientTop||0,o=n.clientLeft||r.clientLeft||0,u=i.pageYOffset||n.scrollTop,a=i.pageXOffset||n.scrollLeft,{top:f.top+u-s,left:f.left+a-o}):f)},v.offset={bodyOffset:function(e){var t=e.offsetTop,n=e.offsetLeft;return v.support.doesNotIncludeMarginInBodyOffset&&(t+=parseFloat(v.css(e,"marginTop"))||0,n+=parseFloat(v.css(e,"marginLeft"))||0),{top:t,left:n}},setOffset:function(e,t,n){var r=v.css(e,"position");r==="static"&&(e.style.position="relative");var i=v(e),s=i.offset(),o=v.css(e,"top"),u=v.css(e,"left"),a=(r==="absolute"||r==="fixed")&&v.inArray("auto",[o,u])>-1,f={},l={},c,h;a?(l=i.position(),c=l.top,h=l.left):(c=parseFloat(o)||0,h=parseFloat(u)||0),v.isFunction(t)&&(t=t.call(e,n,s)),t.top!=null&&(f.top=t.top-s.top+c),t.left!=null&&(f.left=t.left-s.left+h),"using"in t?t.using.call(e,f):i.css(f)}},v.fn.extend({position:function(){if(!this[0])return;var e=this[0],t=this.offsetParent(),n=this.offset(),r=er.test(t[0].nodeName)?{top:0,left:0}:t.offset();return n.top-=parseFloat(v.css(e,"marginTop"))||0,n.left-=parseFloat(v.css(e,"marginLeft"))||0,r.top+=parseFloat(v.css(t[0],"borderTopWidth"))||0,r.left+=parseFloat(v.css(t[0],"borderLeftWidth"))||0,{top:n.top-r.top,left:n.left-r.left}},offsetParent:function(){return this.map(function(){var e=this.offsetParent||i.body;while(e&&!er.test(e.nodeName)&&v.css(e,"position")==="static")e=e.offsetParent;return e||i.body})}}),v.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(e,n){var r=/Y/.test(n);v.fn[e]=function(i){return v.access(this,function(e,i,s){var o=tr(e);if(s===t)return o?n in o?o[n]:o.document.documentElement[i]:e[i];o?o.scrollTo(r?v(o).scrollLeft():s,r?s:v(o).scrollTop()):e[i]=s},e,i,arguments.length,null)}}),v.each({Height:"height",Width:"width"},function(e,n){v.each({padding:"inner"+e,content:n,"":"outer"+e},function(r,i){v.fn[i]=function(i,s){var o=arguments.length&&(r||typeof i!="boolean"),u=r||(i===!0||s===!0?"margin":"border");return v.access(this,function(n,r,i){var s;return v.isWindow(n)?n.document.documentElement["client"+e]:n.nodeType===9?(s=n.documentElement,Math.max(n.body["scroll"+e],s["scroll"+e],n.body["offset"+e],s["offset"+e],s["client"+e])):i===t?v.css(n,r,i,u):v.style(n,r,i,u)},n,o?i:t,o,null)}})}),e.jQuery=e.$=v,typeof define=="function"&&define.amd&&define.amd.jQuery&&define("jquery",[],function(){return v})})(window); \ No newline at end of file
diff --git a/sphinx/themes/bizstyle/static/css3-mediaqueries_src.js b/sphinx/themes/bizstyle/static/css3-mediaqueries_src.js
index e5a3bb0b..65b44825 100644
--- a/sphinx/themes/bizstyle/static/css3-mediaqueries_src.js
+++ b/sphinx/themes/bizstyle/static/css3-mediaqueries_src.js
@@ -1,1104 +1,1104 @@
-/*
-css3-mediaqueries.js - CSS Helper and CSS3 Media Queries Enabler
-
-author: Wouter van der Graaf <wouter at dynora nl>
-version: 1.0 (20110330)
-license: MIT
-website: http://code.google.com/p/css3-mediaqueries-js/
-
-W3C spec: http://www.w3.org/TR/css3-mediaqueries/
-
-Note: use of embedded <style> is not recommended when using media queries, because IE has no way of returning the raw literal css text from a <style> element.
-*/
-
-
-// true prototypal inheritance (http://javascript.crockford.com/prototypal.html)
-if (typeof Object.create !== 'function') {
- Object.create = function (o) {
- function F() {}
- F.prototype = o;
- return new F();
- };
-}
-
-
-// user agent sniffing shortcuts
-var ua = {
- toString: function () {
- return navigator.userAgent;
- },
- test: function (s) {
- return this.toString().toLowerCase().indexOf(s.toLowerCase()) > -1;
- }
-};
-ua.version = (ua.toString().toLowerCase().match(/[\s\S]+(?:rv|it|ra|ie)[\/: ]([\d.]+)/) || [])[1];
-ua.webkit = ua.test('webkit');
-ua.gecko = ua.test('gecko') && !ua.webkit;
-ua.opera = ua.test('opera');
-ua.ie = ua.test('msie') && !ua.opera;
-ua.ie6 = ua.ie && document.compatMode && typeof document.documentElement.style.maxHeight === 'undefined';
-ua.ie7 = ua.ie && document.documentElement && typeof document.documentElement.style.maxHeight !== 'undefined' && typeof XDomainRequest === 'undefined';
-ua.ie8 = ua.ie && typeof XDomainRequest !== 'undefined';
-
-
-
-// initialize when DOM content is loaded
-var domReady = function () {
- var fns = [];
- var init = function () {
- if (!arguments.callee.done) { // run init functions once
- arguments.callee.done = true;
- for (var i = 0; i < fns.length; i++) {
- fns[i]();
- }
- }
- };
-
- // listeners for different browsers
- if (document.addEventListener) {
- document.addEventListener('DOMContentLoaded', init, false);
- }
- if (ua.ie) {
- (function () {
- try {
- // throws errors until after ondocumentready
- document.documentElement.doScroll('left');
- }
- catch (e) {
- setTimeout(arguments.callee, 50);
- return;
- }
- // no errors, fire
- init();
- })();
- // trying to always fire before onload
- document.onreadystatechange = function () {
- if (document.readyState === 'complete') {
- document.onreadystatechange = null;
- init();
- }
- };
- }
- if (ua.webkit && document.readyState) {
- (function () {
- if (document.readyState !== 'loading') {
- init();
- }
- else {
- setTimeout(arguments.callee, 10);
- }
- })();
- }
- window.onload = init; // fallback
-
- return function (fn) { // add fn to init functions
- if (typeof fn === 'function') {
- fns[fns.length] = fn;
- }
- return fn;
- };
-}();
-
-
-
-// helper library for parsing css to objects
-var cssHelper = function () {
-
- var regExp = {
- BLOCKS: /[^\s{;][^{;]*\{(?:[^{}]*\{[^{}]*\}[^{}]*|[^{}]*)*\}/g,
- BLOCKS_INSIDE: /[^\s{][^{]*\{[^{}]*\}/g,
- DECLARATIONS: /[a-zA-Z\-]+[^;]*:[^;]+;/g,
- RELATIVE_URLS: /url\(['"]?([^\/\)'"][^:\)'"]+)['"]?\)/g,
- // strip whitespace and comments, @import is evil
- REDUNDANT_COMPONENTS: /(?:\/\*([^*\\\\]|\*(?!\/))+\*\/|@import[^;]+;)/g,
- REDUNDANT_WHITESPACE: /\s*(,|:|;|\{|\})\s*/g,
- WHITESPACE_IN_PARENTHESES: /\(\s*(\S*)\s*\)/g,
- MORE_WHITESPACE: /\s{2,}/g,
- FINAL_SEMICOLONS: /;\}/g,
- NOT_WHITESPACE: /\S+/g
- };
-
- var parsed, parsing = false;
-
- var waiting = [];
- var wait = function (fn) {
- if (typeof fn === 'function') {
- waiting[waiting.length] = fn;
- }
- };
- var ready = function () {
- for (var i = 0; i < waiting.length; i++) {
- waiting[i](parsed);
- }
- };
- var events = {};
- var broadcast = function (n, v) {
- if (events[n]) {
- var listeners = events[n].listeners;
- if (listeners) {
- for (var i = 0; i < listeners.length; i++) {
- listeners[i](v);
- }
- }
- }
- };
-
- var requestText = function (url, fnSuccess, fnFailure) {
- if (ua.ie && !window.XMLHttpRequest) {
- window.XMLHttpRequest = function () {
- return new ActiveXObject('Microsoft.XMLHTTP');
- };
- }
- if (!XMLHttpRequest) {
- return '';
- }
- var r = new XMLHttpRequest();
- try {
- r.open('get', url, true);
- r.setRequestHeader('X_REQUESTED_WITH', 'XMLHttpRequest');
- }
- catch (e) {
- fnFailure();
- return;
- }
- var done = false;
- setTimeout(function () {
- done = true;
- }, 5000);
- document.documentElement.style.cursor = 'progress';
- r.onreadystatechange = function () {
- if (r.readyState === 4 && !done) {
- if (!r.status && location.protocol === 'file:' ||
- (r.status >= 200 && r.status < 300) ||
- r.status === 304 ||
- navigator.userAgent.indexOf('Safari') > -1 && typeof r.status === 'undefined') {
- fnSuccess(r.responseText);
- }
- else {
- fnFailure();
- }
- document.documentElement.style.cursor = '';
- r = null; // avoid memory leaks
- }
- };
- r.send('');
- };
-
- var sanitize = function (text) {
- text = text.replace(regExp.REDUNDANT_COMPONENTS, '');
- text = text.replace(regExp.REDUNDANT_WHITESPACE, '$1');
- text = text.replace(regExp.WHITESPACE_IN_PARENTHESES, '($1)');
- text = text.replace(regExp.MORE_WHITESPACE, ' ');
- text = text.replace(regExp.FINAL_SEMICOLONS, '}'); // optional final semicolons
- return text;
- };
-
- var objects = {
- stylesheet: function (el) {
- var o = {};
- var amqs = [], mqls = [], rs = [], rsw = [];
- var s = el.cssHelperText;
-
- // add attribute media queries
- var attr = el.getAttribute('media');
- if (attr) {
- var qts = attr.toLowerCase().split(',')
- }
- else {
- var qts = ['all'] // imply 'all'
- }
- for (var i = 0; i < qts.length; i++) {
- amqs[amqs.length] = objects.mediaQuery(qts[i], o);
- }
-
- // add media query lists and rules (top down order)
- var blocks = s.match(regExp.BLOCKS); // @charset is not a block
- if (blocks !== null) {
- for (var i = 0; i < blocks.length; i++) {
- if (blocks[i].substring(0, 7) === '@media ') { // media query (list)
- var mql = objects.mediaQueryList(blocks[i], o);
- rs = rs.concat(mql.getRules());
- mqls[mqls.length] = mql;
- }
- else { // regular rule set, page context (@page) or font description (@font-face)
- rs[rs.length] = rsw[rsw.length] = objects.rule(blocks[i], o, null);
- }
- }
- }
-
- o.element = el;
- o.getCssText = function () {
- return s;
- };
- o.getAttrMediaQueries = function () {
- return amqs;
- };
- o.getMediaQueryLists = function () {
- return mqls;
- };
- o.getRules = function () {
- return rs;
- };
- o.getRulesWithoutMQ = function () {
- return rsw;
- };
- return o;
- },
-
- mediaQueryList: function (s, stsh) {
- var o = {};
- var idx = s.indexOf('{');
- var lt = s.substring(0, idx);
- s = s.substring(idx + 1, s.length - 1);
- var mqs = [], rs = [];
-
- // add media queries
- var qts = lt.toLowerCase().substring(7).split(',');
- for (var i = 0; i < qts.length; i++) { // parse each media query
- mqs[mqs.length] = objects.mediaQuery(qts[i], o);
- }
-
- // add rule sets
- var rts = s.match(regExp.BLOCKS_INSIDE);
- if (rts !== null) {
- for (i = 0; i < rts.length; i++) {
- rs[rs.length] = objects.rule(rts[i], stsh, o);
- }
- }
-
- o.type = 'mediaQueryList';
- o.getMediaQueries = function () {
- return mqs;
- };
- o.getRules = function () {
- return rs;
- };
- o.getListText = function () {
- return lt;
- };
- o.getCssText = function () {
- return s;
- };
- return o;
- },
-
- mediaQuery: function (s, listOrSheet) {
- s = s || '';
- var mql, stsh;
- if (listOrSheet.type === 'mediaQueryList') {
- mql = listOrSheet;
- }
- else {
- stsh = listOrSheet;
- }
- var not = false, type;
- var expr = [];
- var valid = true;
- var tokens = s.match(regExp.NOT_WHITESPACE);
-
-
-
- for (var i = 0; i < tokens.length; i++) {
- var token = tokens[i];
- if (!type && (token === 'not' || token === 'only')) { // 'not' and 'only' keywords
- // keyword 'only' does nothing, as if it was not present
- if (token === 'not') {
- not = true;
- }
- }
- else if (!type) { // media type
- type = token;
- }
- else if (token.charAt(0) === '(') { // media feature expression
- var pair = token.substring(1, token.length - 1).split(':');
- expr[expr.length] = {
- mediaFeature: pair[0],
- value: pair[1] || null
- };
- }
- }
-
- return {
- getQueryText: function () {
- return s;
- },
- getAttrStyleSheet: function () {
- return stsh || null;
- },
- getList: function () {
- return mql || null;
- },
- getValid: function () {
- return valid;
- },
- getNot: function () {
- return not;
- },
- getMediaType: function () {
- return type;
- },
- getExpressions: function () {
- return expr;
- }
- };
- },
-
- rule: function (s, stsh, mql) {
- var o = {};
- var idx = s.indexOf('{');
- var st = s.substring(0, idx);
- var ss = st.split(',');
- var ds = [];
- var dts = s.substring(idx + 1, s.length - 1).split(';');
- for (var i = 0; i < dts.length; i++) {
- ds[ds.length] = objects.declaration(dts[i], o);
- }
-
- o.getStylesheet = function () {
- return stsh || null;
- };
- o.getMediaQueryList = function () {
- return mql || null;
- };
- o.getSelectors = function () {
- return ss;
- };
- o.getSelectorText = function () {
- return st;
- };
- o.getDeclarations = function () {
- return ds;
- };
- o.getPropertyValue = function (n) {
- for (var i = 0; i < ds.length; i++) {
- if (ds[i].getProperty() === n) {
- return ds[i].getValue();
- }
- }
- return null;
- };
- return o;
- },
-
- declaration: function (s, r) {
- var idx = s.indexOf(':');
- var p = s.substring(0, idx);
- var v = s.substring(idx + 1);
- return {
- getRule: function () {
- return r || null;
- },
- getProperty: function () {
- return p;
- },
- getValue: function () {
- return v;
- }
- };
- }
- };
-
- var parseText = function (el) {
- if (typeof el.cssHelperText !== 'string') {
- return;
- }
- var o = {
- stylesheet: null,
- mediaQueryLists: [],
- rules: [],
- selectors: {},
- declarations: [],
- properties: {}
- };
-
- // build stylesheet object
- var stsh = o.stylesheet = objects.stylesheet(el);
-
- // collect media query lists
- var mqls = o.mediaQueryLists = stsh.getMediaQueryLists();
-
- // collect all rules
- var ors = o.rules = stsh.getRules();
-
- // collect all selectors
- var oss = o.selectors;
- var collectSelectors = function (r) {
- var ss = r.getSelectors();
- for (var i = 0; i < ss.length; i++) {
- var n = ss[i];
- if (!oss[n]) {
- oss[n] = [];
- }
- oss[n][oss[n].length] = r;
- }
- };
- for (i = 0; i < ors.length; i++) {
- collectSelectors(ors[i]);
- }
-
- // collect all declarations
- var ods = o.declarations;
- for (i = 0; i < ors.length; i++) {
- ods = o.declarations = ods.concat(ors[i].getDeclarations());
- }
-
- // collect all properties
- var ops = o.properties;
- for (i = 0; i < ods.length; i++) {
- var n = ods[i].getProperty();
- if (!ops[n]) {
- ops[n] = [];
- }
- ops[n][ops[n].length] = ods[i];
- }
-
- el.cssHelperParsed = o;
- parsed[parsed.length] = el;
- return o;
- };
-
- var parseEmbedded = function (el, s) {
- return;
- // This function doesn't work because of a bug in IE, where innerHTML gives us parsed css instead of raw literal.
- el.cssHelperText = sanitize(s || el.innerHTML);
- return parseText(el);
- };
-
- var parse = function () {
- parsing = true;
- parsed = [];
- var linked = [];
- var finish = function () {
- for (var i = 0; i < linked.length; i++) {
- parseText(linked[i]);
- }
- var styles = document.getElementsByTagName('style');
- for (i = 0; i < styles.length; i++) {
- parseEmbedded(styles[i]);
- }
- parsing = false;
- ready();
- };
- var links = document.getElementsByTagName('link');
- for (var i = 0; i < links.length; i++) {
- var link = links[i];
- if (link.getAttribute('rel').indexOf('style') > -1 && link.href && link.href.length !== 0 && !link.disabled) {
- linked[linked.length] = link;
- }
- }
- if (linked.length > 0) {
- var c = 0;
- var checkForFinish = function () {
- c++;
- if (c === linked.length) { // parse in right order, so after last link is read
- finish();
- }
- };
- var processLink = function (link) {
- var href = link.href;
- requestText(href, function (text) {
- // fix url's
- text = sanitize(text).replace(regExp.RELATIVE_URLS, 'url(' + href.substring(0, href.lastIndexOf('/')) + '/$1)');
- link.cssHelperText = text;
- checkForFinish();
- }, checkForFinish);
- };
- for (i = 0; i < linked.length; i++) {
- processLink(linked[i]);
- }
- }
- else {
- finish();
- }
- };
-
- var types = {
- stylesheets: 'array',
- mediaQueryLists: 'array',
- rules: 'array',
- selectors: 'object',
- declarations: 'array',
- properties: 'object'
- };
-
- var collections = {
- stylesheets: null,
- mediaQueryLists: null,
- rules: null,
- selectors: null,
- declarations: null,
- properties: null
- };
-
- var addToCollection = function (name, v) {
- if (collections[name] !== null) {
- if (types[name] === 'array') {
- return (collections[name] = collections[name].concat(v));
- }
- else {
- var c = collections[name];
- for (var n in v) {
- if (v.hasOwnProperty(n)) {
- if (!c[n]) {
- c[n] = v[n];
- }
- else {
- c[n] = c[n].concat(v[n]);
- }
- }
- }
- return c;
- }
- }
- };
-
- var collect = function (name) {
- collections[name] = (types[name] === 'array') ? [] : {};
- for (var i = 0; i < parsed.length; i++) {
- var pname = name === 'stylesheets' ? 'stylesheet' : name; // the exception
- addToCollection(name, parsed[i].cssHelperParsed[pname]);
- }
- return collections[name];
- };
-
- // viewport size
- var getViewportSize = function (d) {
- if (typeof window.innerWidth != 'undefined') {
- return window['inner' + d];
- }
- else if (typeof document.documentElement !== 'undefined'
- && typeof document.documentElement.clientWidth !== 'undefined'
- && document.documentElement.clientWidth != 0) {
- return document.documentElement['client' + d];
- }
- };
-
- // public static functions
- return {
- addStyle: function (s, mediaTypes, process) {
- var el = document.createElement('style');
- el.setAttribute('type', 'text/css');
- if (mediaTypes && mediaTypes.length > 0) {
- el.setAttribute('media', mediaTypes.join(','));
- }
- document.getElementsByTagName('head')[0].appendChild(el);
- if (el.styleSheet) { // IE
- el.styleSheet.cssText = s;
- }
- else {
- el.appendChild(document.createTextNode(s));
- }
- el.addedWithCssHelper = true;
- if (typeof process === 'undefined' || process === true) {
- cssHelper.parsed(function (parsed) {
- var o = parseEmbedded(el, s);
- for (var n in o) {
- if (o.hasOwnProperty(n)) {
- addToCollection(n, o[n]);
- }
- }
- broadcast('newStyleParsed', el);
- });
- }
- else {
- el.parsingDisallowed = true;
- }
- return el;
- },
-
- removeStyle: function (el) {
- return el.parentNode.removeChild(el);
- },
-
- parsed: function (fn) {
- if (parsing) {
- wait(fn);
- }
- else {
- if (typeof parsed !== 'undefined') {
- if (typeof fn === 'function') {
- fn(parsed);
- }
- }
- else {
- wait(fn);
- parse();
- }
- }
- },
-
- stylesheets: function (fn) {
- cssHelper.parsed(function (parsed) {
- fn(collections.stylesheets || collect('stylesheets'));
- });
- },
-
- mediaQueryLists: function (fn) {
- cssHelper.parsed(function (parsed) {
- fn(collections.mediaQueryLists || collect('mediaQueryLists'));
- });
- },
-
- rules: function (fn) {
- cssHelper.parsed(function (parsed) {
- fn(collections.rules || collect('rules'));
- });
- },
-
- selectors: function (fn) {
- cssHelper.parsed(function (parsed) {
- fn(collections.selectors || collect('selectors'));
- });
- },
-
- declarations: function (fn) {
- cssHelper.parsed(function (parsed) {
- fn(collections.declarations || collect('declarations'));
- });
- },
-
- properties: function (fn) {
- cssHelper.parsed(function (parsed) {
- fn(collections.properties || collect('properties'));
- });
- },
-
- broadcast: broadcast,
-
- addListener: function (n, fn) { // in case n is 'styleadd': added function is called everytime style is added and parsed
- if (typeof fn === 'function') {
- if (!events[n]) {
- events[n] = {
- listeners: []
- };
- }
- events[n].listeners[events[n].listeners.length] = fn;
- }
- },
-
- removeListener: function (n, fn) {
- if (typeof fn === 'function' && events[n]) {
- var ls = events[n].listeners;
- for (var i = 0; i < ls.length; i++) {
- if (ls[i] === fn) {
- ls.splice(i, 1);
- i -= 1;
- }
- }
- }
- },
-
- getViewportWidth: function () {
- return getViewportSize('Width');
- },
-
- getViewportHeight: function () {
- return getViewportSize('Height');
- }
- };
-}();
-
-
-
-// function to test and apply parsed media queries against browser capabilities
-domReady(function enableCssMediaQueries() {
- var meter;
-
- var regExp = {
- LENGTH_UNIT: /[0-9]+(em|ex|px|in|cm|mm|pt|pc)$/,
- RESOLUTION_UNIT: /[0-9]+(dpi|dpcm)$/,
- ASPECT_RATIO: /^[0-9]+\/[0-9]+$/,
- ABSOLUTE_VALUE: /^[0-9]*(\.[0-9]+)*$/
- };
-
- var styles = [];
-
- var nativeSupport = function () {
- // check support for media queries
- var id = 'css3-mediaqueries-test';
- var el = document.createElement('div');
- el.id = id;
- var style = cssHelper.addStyle('@media all and (width) { #' + id +
- ' { width: 1px !important; } }', [], false); // false means don't parse this temp style
- document.body.appendChild(el);
- var ret = el.offsetWidth === 1;
- style.parentNode.removeChild(style);
- el.parentNode.removeChild(el);
- nativeSupport = function () {
- return ret;
- };
- return ret;
- };
-
- var createMeter = function () { // create measuring element
- meter = document.createElement('div');
- meter.style.cssText = 'position:absolute;top:-9999em;left:-9999em;' +
- 'margin:0;border:none;padding:0;width:1em;font-size:1em;'; // cssText is needed for IE, works for the others
- document.body.appendChild(meter);
- // meter must have browser default font size of 16px
- if (meter.offsetWidth !== 16) {
- meter.style.fontSize = 16 / meter.offsetWidth + 'em';
- }
- meter.style.width = '';
- };
-
- var measure = function (value) {
- meter.style.width = value;
- var amount = meter.offsetWidth;
- meter.style.width = '';
- return amount;
- };
-
- var testMediaFeature = function (feature, value) {
- // non-testable features: monochrome|min-monochrome|max-monochrome|scan|grid
- var l = feature.length;
- var min = (feature.substring(0, 4) === 'min-');
- var max = (!min && feature.substring(0, 4) === 'max-');
-
- if (value !== null) { // determine value type and parse to usable amount
- var valueType;
- var amount;
- if (regExp.LENGTH_UNIT.exec(value)) {
- valueType = 'length';
- amount = measure(value);
- }
- else if (regExp.RESOLUTION_UNIT.exec(value)) {
- valueType = 'resolution';
- amount = parseInt(value, 10);
- var unit = value.substring((amount + '').length);
- }
- else if (regExp.ASPECT_RATIO.exec(value)) {
- valueType = 'aspect-ratio';
- amount = value.split('/');
- }
- else if (regExp.ABSOLUTE_VALUE) {
- valueType = 'absolute';
- amount = value;
- }
- else {
- valueType = 'unknown';
- }
- }
-
- var width, height;
- if ('device-width' === feature.substring(l - 12, l)) { // screen width
- width = screen.width;
- if (value !== null) {
- if (valueType === 'length') {
- return ((min && width >= amount) || (max && width < amount) || (!min && !max && width === amount));
- }
- else {
- return false;
- }
- }
- else { // test width without value
- return width > 0;
- }
- }
- else if ('device-height' === feature.substring(l - 13, l)) { // screen height
- height = screen.height;
- if (value !== null) {
- if (valueType === 'length') {
- return ((min && height >= amount) || (max && height < amount) || (!min && !max && height === amount));
- }
- else {
- return false;
- }
- }
- else { // test height without value
- return height > 0;
- }
- }
- else if ('width' === feature.substring(l - 5, l)) { // viewport width
- width = document.documentElement.clientWidth || document.body.clientWidth; // the latter for IE quirks mode
- if (value !== null) {
- if (valueType === 'length') {
- return ((min && width >= amount) || (max && width < amount) || (!min && !max && width === amount));
- }
- else {
- return false;
- }
- }
- else { // test width without value
- return width > 0;
- }
- }
- else if ('height' === feature.substring(l - 6, l)) { // viewport height
- height = document.documentElement.clientHeight || document.body.clientHeight; // the latter for IE quirks mode
- if (value !== null) {
- if (valueType === 'length') {
- return ((min && height >= amount) || (max && height < amount) || (!min && !max && height === amount));
- }
- else {
- return false;
- }
- }
- else { // test height without value
- return height > 0;
- }
- }
- else if ('device-aspect-ratio' === feature.substring(l - 19, l)) { // screen aspect ratio
- return valueType === 'aspect-ratio' && screen.width * amount[1] === screen.height * amount[0];
- }
- else if ('color-index' === feature.substring(l - 11, l)) { // number of colors
- var colors = Math.pow(2, screen.colorDepth);
- if (value !== null) {
- if (valueType === 'absolute') {
- return ((min && colors >= amount) || (max && colors < amount) || (!min && !max && colors === amount));
- }
- else {
- return false;
- }
- }
- else { // test height without value
- return colors > 0;
- }
- }
- else if ('color' === feature.substring(l - 5, l)) { // bits per color component
- var color = screen.colorDepth;
- if (value !== null) {
- if (valueType === 'absolute') {
- return ((min && color >= amount) || (max && color < amount) || (!min && !max && color === amount));
- }
- else {
- return false;
- }
- }
- else { // test height without value
- return color > 0;
- }
- }
- else if ('resolution' === feature.substring(l - 10, l)) {
- var res;
- if (unit === 'dpcm') {
- res = measure('1cm');
- }
- else {
- res = measure('1in');
- }
- if (value !== null) {
- if (valueType === 'resolution') {
- return ((min && res >= amount) || (max && res < amount) || (!min && !max && res === amount));
- }
- else {
- return false;
- }
- }
- else { // test height without value
- return res > 0;
- }
- }
- else {
- return false;
- }
- };
-
- var testMediaQuery = function (mq) {
- var test = mq.getValid();
- var expressions = mq.getExpressions();
- var l = expressions.length;
- if (l > 0) {
- for (var i = 0; i < l && test; i++) {
- test = testMediaFeature(expressions[i].mediaFeature, expressions[i].value);
- }
- var not = mq.getNot();
- return (test && !not || not && !test);
- }
- return test;
- };
-
- var testMediaQueryList = function (mql, ts) {
- // ts is null or an array with any media type but 'all'.
- var mqs = mql.getMediaQueries();
- var t = {};
- for (var i = 0; i < mqs.length; i++) {
- var type = mqs[i].getMediaType();
- if (mqs[i].getExpressions().length === 0) {
- continue;
- // TODO: Browser check! Assuming old browsers do apply the bare media types, even in a list with media queries.
- }
- var typeAllowed = true;
- if (type !== 'all' && ts && ts.length > 0) {
- typeAllowed = false;
- for (var j = 0; j < ts.length; j++) {
- if (ts[j] === type) {
- typeAllowed = true;
- }
- }
- }
- if (typeAllowed && testMediaQuery(mqs[i])) {
- t[type] = true;
- }
- }
- var s = [], c = 0;
- for (var n in t) {
- if (t.hasOwnProperty(n)) {
- if (c > 0) {
- s[c++] = ',';
- }
- s[c++] = n;
- }
- }
- if (s.length > 0) {
- styles[styles.length] = cssHelper.addStyle('@media ' + s.join('') + '{' + mql.getCssText() + '}', ts, false);
- }
- };
-
- var testMediaQueryLists = function (mqls, ts) {
- for (var i = 0; i < mqls.length; i++) {
- testMediaQueryList(mqls[i], ts);
- }
- };
-
- var testStylesheet = function (stsh) {
- var amqs = stsh.getAttrMediaQueries();
- var allPassed = false;
- var t = {};
- for (var i = 0; i < amqs.length; i++) {
- if (testMediaQuery(amqs[i])) {
- t[amqs[i].getMediaType()] = amqs[i].getExpressions().length > 0;
- }
- }
- var ts = [], tswe = [];
- for (var n in t) {
- if (t.hasOwnProperty(n)) {
- ts[ts.length] = n;
- if (t[n]) {
- tswe[tswe.length] = n
- }
- if (n === 'all') {
- allPassed = true;
- }
- }
- }
- if (tswe.length > 0) { // types with query expressions that passed the test
- styles[styles.length] = cssHelper.addStyle(stsh.getCssText(), tswe, false);
- }
- var mqls = stsh.getMediaQueryLists();
- if (allPassed) {
- // If 'all' in media attribute passed the test, then test all @media types in linked CSS and create style with those types.
- testMediaQueryLists(mqls);
- }
- else {
- // Or else, test only media attribute types that passed the test and also 'all'.
- // For positive '@media all', create style with attribute types that passed their test.
- testMediaQueryLists(mqls, ts);
- }
- };
-
- var testStylesheets = function (stshs) {
- for (var i = 0; i < stshs.length; i++) {
- testStylesheet(stshs[i]);
- }
- if (ua.ie) {
- // force repaint in IE
- document.documentElement.style.display = 'block';
- setTimeout(function () {
- document.documentElement.style.display = '';
- }, 0);
- // delay broadcast somewhat for IE
- setTimeout(function () {
- cssHelper.broadcast('cssMediaQueriesTested');
- }, 100);
- }
- else {
- cssHelper.broadcast('cssMediaQueriesTested');
- }
- };
-
- var test = function () {
- for (var i = 0; i < styles.length; i++) {
- cssHelper.removeStyle(styles[i]);
- }
- styles = [];
- cssHelper.stylesheets(testStylesheets);
- };
-
- var scrollbarWidth = 0;
- var checkForResize = function () {
- var cvpw = cssHelper.getViewportWidth();
- var cvph = cssHelper.getViewportHeight();
-
- // determine scrollbar width in IE, see resizeHandler
- if (ua.ie) {
- var el = document.createElement('div');
- el.style.position = 'absolute';
- el.style.top = '-9999em';
- el.style.overflow = 'scroll';
- document.body.appendChild(el);
- scrollbarWidth = el.offsetWidth - el.clientWidth;
- document.body.removeChild(el);
- }
-
- var timer;
- var resizeHandler = function () {
- var vpw = cssHelper.getViewportWidth();
- var vph = cssHelper.getViewportHeight();
- // check whether vp size has really changed, because IE also triggers resize event when body size changes
- // 20px allowance to accomodate short appearance of scrollbars in IE in some cases
- if (Math.abs(vpw - cvpw) > scrollbarWidth || Math.abs(vph - cvph) > scrollbarWidth) {
- cvpw = vpw;
- cvph = vph;
- clearTimeout(timer);
- timer = setTimeout(function () {
- if (!nativeSupport()) {
- test();
- }
- else {
- cssHelper.broadcast('cssMediaQueriesTested');
- }
- }, 500);
- }
- };
-
- window.onresize = function () {
- var x = window.onresize || function () {}; // save original
- return function () {
- x();
- resizeHandler();
- };
- }();
- };
-
- // prevent jumping of layout by hiding everything before painting <body>
- var docEl = document.documentElement;
- docEl.style.marginLeft = '-32767px';
-
- // make sure it comes back after a while
- setTimeout(function () {
- docEl.style.marginLeft = '';
- }, 5000);
-
- return function () {
- if (!nativeSupport()) { // if browser doesn't support media queries
- cssHelper.addListener('newStyleParsed', function (el) {
- testStylesheet(el.cssHelperParsed.stylesheet);
- });
- // return visibility after media queries are tested
- cssHelper.addListener('cssMediaQueriesTested', function () {
- // force repaint in IE by changing width
- if (ua.ie) {
- docEl.style.width = '1px';
- }
- setTimeout(function () {
- docEl.style.width = ''; // undo width
- docEl.style.marginLeft = ''; // undo hide
- }, 0);
- // remove this listener to prevent following execution
- cssHelper.removeListener('cssMediaQueriesTested', arguments.callee);
- });
- createMeter();
- test();
- }
- else {
- docEl.style.marginLeft = ''; // undo visibility hidden
- }
- checkForResize();
- };
-}());
-
-
-// bonus: hotfix for IE6 SP1 (bug KB823727)
-try {
- document.execCommand('BackgroundImageCache', false, true);
-} catch (e) {}
+/*
+css3-mediaqueries.js - CSS Helper and CSS3 Media Queries Enabler
+
+author: Wouter van der Graaf <wouter at dynora nl>
+version: 1.0 (20110330)
+license: MIT
+website: http://code.google.com/p/css3-mediaqueries-js/
+
+W3C spec: http://www.w3.org/TR/css3-mediaqueries/
+
+Note: use of embedded <style> is not recommended when using media queries, because IE has no way of returning the raw literal css text from a <style> element.
+*/
+
+
+// true prototypal inheritance (http://javascript.crockford.com/prototypal.html)
+if (typeof Object.create !== 'function') {
+ Object.create = function (o) {
+ function F() {}
+ F.prototype = o;
+ return new F();
+ };
+}
+
+
+// user agent sniffing shortcuts
+var ua = {
+ toString: function () {
+ return navigator.userAgent;
+ },
+ test: function (s) {
+ return this.toString().toLowerCase().indexOf(s.toLowerCase()) > -1;
+ }
+};
+ua.version = (ua.toString().toLowerCase().match(/[\s\S]+(?:rv|it|ra|ie)[\/: ]([\d.]+)/) || [])[1];
+ua.webkit = ua.test('webkit');
+ua.gecko = ua.test('gecko') && !ua.webkit;
+ua.opera = ua.test('opera');
+ua.ie = ua.test('msie') && !ua.opera;
+ua.ie6 = ua.ie && document.compatMode && typeof document.documentElement.style.maxHeight === 'undefined';
+ua.ie7 = ua.ie && document.documentElement && typeof document.documentElement.style.maxHeight !== 'undefined' && typeof XDomainRequest === 'undefined';
+ua.ie8 = ua.ie && typeof XDomainRequest !== 'undefined';
+
+
+
+// initialize when DOM content is loaded
+var domReady = function () {
+ var fns = [];
+ var init = function () {
+ if (!arguments.callee.done) { // run init functions once
+ arguments.callee.done = true;
+ for (var i = 0; i < fns.length; i++) {
+ fns[i]();
+ }
+ }
+ };
+
+ // listeners for different browsers
+ if (document.addEventListener) {
+ document.addEventListener('DOMContentLoaded', init, false);
+ }
+ if (ua.ie) {
+ (function () {
+ try {
+ // throws errors until after ondocumentready
+ document.documentElement.doScroll('left');
+ }
+ catch (e) {
+ setTimeout(arguments.callee, 50);
+ return;
+ }
+ // no errors, fire
+ init();
+ })();
+ // trying to always fire before onload
+ document.onreadystatechange = function () {
+ if (document.readyState === 'complete') {
+ document.onreadystatechange = null;
+ init();
+ }
+ };
+ }
+ if (ua.webkit && document.readyState) {
+ (function () {
+ if (document.readyState !== 'loading') {
+ init();
+ }
+ else {
+ setTimeout(arguments.callee, 10);
+ }
+ })();
+ }
+ window.onload = init; // fallback
+
+ return function (fn) { // add fn to init functions
+ if (typeof fn === 'function') {
+ fns[fns.length] = fn;
+ }
+ return fn;
+ };
+}();
+
+
+
+// helper library for parsing css to objects
+var cssHelper = function () {
+
+ var regExp = {
+ BLOCKS: /[^\s{;][^{;]*\{(?:[^{}]*\{[^{}]*\}[^{}]*|[^{}]*)*\}/g,
+ BLOCKS_INSIDE: /[^\s{][^{]*\{[^{}]*\}/g,
+ DECLARATIONS: /[a-zA-Z\-]+[^;]*:[^;]+;/g,
+ RELATIVE_URLS: /url\(['"]?([^\/\)'"][^:\)'"]+)['"]?\)/g,
+ // strip whitespace and comments, @import is evil
+ REDUNDANT_COMPONENTS: /(?:\/\*([^*\\\\]|\*(?!\/))+\*\/|@import[^;]+;)/g,
+ REDUNDANT_WHITESPACE: /\s*(,|:|;|\{|\})\s*/g,
+ WHITESPACE_IN_PARENTHESES: /\(\s*(\S*)\s*\)/g,
+ MORE_WHITESPACE: /\s{2,}/g,
+ FINAL_SEMICOLONS: /;\}/g,
+ NOT_WHITESPACE: /\S+/g
+ };
+
+ var parsed, parsing = false;
+
+ var waiting = [];
+ var wait = function (fn) {
+ if (typeof fn === 'function') {
+ waiting[waiting.length] = fn;
+ }
+ };
+ var ready = function () {
+ for (var i = 0; i < waiting.length; i++) {
+ waiting[i](parsed);
+ }
+ };
+ var events = {};
+ var broadcast = function (n, v) {
+ if (events[n]) {
+ var listeners = events[n].listeners;
+ if (listeners) {
+ for (var i = 0; i < listeners.length; i++) {
+ listeners[i](v);
+ }
+ }
+ }
+ };
+
+ var requestText = function (url, fnSuccess, fnFailure) {
+ if (ua.ie && !window.XMLHttpRequest) {
+ window.XMLHttpRequest = function () {
+ return new ActiveXObject('Microsoft.XMLHTTP');
+ };
+ }
+ if (!XMLHttpRequest) {
+ return '';
+ }
+ var r = new XMLHttpRequest();
+ try {
+ r.open('get', url, true);
+ r.setRequestHeader('X_REQUESTED_WITH', 'XMLHttpRequest');
+ }
+ catch (e) {
+ fnFailure();
+ return;
+ }
+ var done = false;
+ setTimeout(function () {
+ done = true;
+ }, 5000);
+ document.documentElement.style.cursor = 'progress';
+ r.onreadystatechange = function () {
+ if (r.readyState === 4 && !done) {
+ if (!r.status && location.protocol === 'file:' ||
+ (r.status >= 200 && r.status < 300) ||
+ r.status === 304 ||
+ navigator.userAgent.indexOf('Safari') > -1 && typeof r.status === 'undefined') {
+ fnSuccess(r.responseText);
+ }
+ else {
+ fnFailure();
+ }
+ document.documentElement.style.cursor = '';
+ r = null; // avoid memory leaks
+ }
+ };
+ r.send('');
+ };
+
+ var sanitize = function (text) {
+ text = text.replace(regExp.REDUNDANT_COMPONENTS, '');
+ text = text.replace(regExp.REDUNDANT_WHITESPACE, '$1');
+ text = text.replace(regExp.WHITESPACE_IN_PARENTHESES, '($1)');
+ text = text.replace(regExp.MORE_WHITESPACE, ' ');
+ text = text.replace(regExp.FINAL_SEMICOLONS, '}'); // optional final semicolons
+ return text;
+ };
+
+ var objects = {
+ stylesheet: function (el) {
+ var o = {};
+ var amqs = [], mqls = [], rs = [], rsw = [];
+ var s = el.cssHelperText;
+
+ // add attribute media queries
+ var attr = el.getAttribute('media');
+ if (attr) {
+ var qts = attr.toLowerCase().split(',')
+ }
+ else {
+ var qts = ['all'] // imply 'all'
+ }
+ for (var i = 0; i < qts.length; i++) {
+ amqs[amqs.length] = objects.mediaQuery(qts[i], o);
+ }
+
+ // add media query lists and rules (top down order)
+ var blocks = s.match(regExp.BLOCKS); // @charset is not a block
+ if (blocks !== null) {
+ for (var i = 0; i < blocks.length; i++) {
+ if (blocks[i].substring(0, 7) === '@media ') { // media query (list)
+ var mql = objects.mediaQueryList(blocks[i], o);
+ rs = rs.concat(mql.getRules());
+ mqls[mqls.length] = mql;
+ }
+ else { // regular rule set, page context (@page) or font description (@font-face)
+ rs[rs.length] = rsw[rsw.length] = objects.rule(blocks[i], o, null);
+ }
+ }
+ }
+
+ o.element = el;
+ o.getCssText = function () {
+ return s;
+ };
+ o.getAttrMediaQueries = function () {
+ return amqs;
+ };
+ o.getMediaQueryLists = function () {
+ return mqls;
+ };
+ o.getRules = function () {
+ return rs;
+ };
+ o.getRulesWithoutMQ = function () {
+ return rsw;
+ };
+ return o;
+ },
+
+ mediaQueryList: function (s, stsh) {
+ var o = {};
+ var idx = s.indexOf('{');
+ var lt = s.substring(0, idx);
+ s = s.substring(idx + 1, s.length - 1);
+ var mqs = [], rs = [];
+
+ // add media queries
+ var qts = lt.toLowerCase().substring(7).split(',');
+ for (var i = 0; i < qts.length; i++) { // parse each media query
+ mqs[mqs.length] = objects.mediaQuery(qts[i], o);
+ }
+
+ // add rule sets
+ var rts = s.match(regExp.BLOCKS_INSIDE);
+ if (rts !== null) {
+ for (i = 0; i < rts.length; i++) {
+ rs[rs.length] = objects.rule(rts[i], stsh, o);
+ }
+ }
+
+ o.type = 'mediaQueryList';
+ o.getMediaQueries = function () {
+ return mqs;
+ };
+ o.getRules = function () {
+ return rs;
+ };
+ o.getListText = function () {
+ return lt;
+ };
+ o.getCssText = function () {
+ return s;
+ };
+ return o;
+ },
+
+ mediaQuery: function (s, listOrSheet) {
+ s = s || '';
+ var mql, stsh;
+ if (listOrSheet.type === 'mediaQueryList') {
+ mql = listOrSheet;
+ }
+ else {
+ stsh = listOrSheet;
+ }
+ var not = false, type;
+ var expr = [];
+ var valid = true;
+ var tokens = s.match(regExp.NOT_WHITESPACE);
+
+
+
+ for (var i = 0; i < tokens.length; i++) {
+ var token = tokens[i];
+ if (!type && (token === 'not' || token === 'only')) { // 'not' and 'only' keywords
+ // keyword 'only' does nothing, as if it was not present
+ if (token === 'not') {
+ not = true;
+ }
+ }
+ else if (!type) { // media type
+ type = token;
+ }
+ else if (token.charAt(0) === '(') { // media feature expression
+ var pair = token.substring(1, token.length - 1).split(':');
+ expr[expr.length] = {
+ mediaFeature: pair[0],
+ value: pair[1] || null
+ };
+ }
+ }
+
+ return {
+ getQueryText: function () {
+ return s;
+ },
+ getAttrStyleSheet: function () {
+ return stsh || null;
+ },
+ getList: function () {
+ return mql || null;
+ },
+ getValid: function () {
+ return valid;
+ },
+ getNot: function () {
+ return not;
+ },
+ getMediaType: function () {
+ return type;
+ },
+ getExpressions: function () {
+ return expr;
+ }
+ };
+ },
+
+ rule: function (s, stsh, mql) {
+ var o = {};
+ var idx = s.indexOf('{');
+ var st = s.substring(0, idx);
+ var ss = st.split(',');
+ var ds = [];
+ var dts = s.substring(idx + 1, s.length - 1).split(';');
+ for (var i = 0; i < dts.length; i++) {
+ ds[ds.length] = objects.declaration(dts[i], o);
+ }
+
+ o.getStylesheet = function () {
+ return stsh || null;
+ };
+ o.getMediaQueryList = function () {
+ return mql || null;
+ };
+ o.getSelectors = function () {
+ return ss;
+ };
+ o.getSelectorText = function () {
+ return st;
+ };
+ o.getDeclarations = function () {
+ return ds;
+ };
+ o.getPropertyValue = function (n) {
+ for (var i = 0; i < ds.length; i++) {
+ if (ds[i].getProperty() === n) {
+ return ds[i].getValue();
+ }
+ }
+ return null;
+ };
+ return o;
+ },
+
+ declaration: function (s, r) {
+ var idx = s.indexOf(':');
+ var p = s.substring(0, idx);
+ var v = s.substring(idx + 1);
+ return {
+ getRule: function () {
+ return r || null;
+ },
+ getProperty: function () {
+ return p;
+ },
+ getValue: function () {
+ return v;
+ }
+ };
+ }
+ };
+
+ var parseText = function (el) {
+ if (typeof el.cssHelperText !== 'string') {
+ return;
+ }
+ var o = {
+ stylesheet: null,
+ mediaQueryLists: [],
+ rules: [],
+ selectors: {},
+ declarations: [],
+ properties: {}
+ };
+
+ // build stylesheet object
+ var stsh = o.stylesheet = objects.stylesheet(el);
+
+ // collect media query lists
+ var mqls = o.mediaQueryLists = stsh.getMediaQueryLists();
+
+ // collect all rules
+ var ors = o.rules = stsh.getRules();
+
+ // collect all selectors
+ var oss = o.selectors;
+ var collectSelectors = function (r) {
+ var ss = r.getSelectors();
+ for (var i = 0; i < ss.length; i++) {
+ var n = ss[i];
+ if (!oss[n]) {
+ oss[n] = [];
+ }
+ oss[n][oss[n].length] = r;
+ }
+ };
+ for (i = 0; i < ors.length; i++) {
+ collectSelectors(ors[i]);
+ }
+
+ // collect all declarations
+ var ods = o.declarations;
+ for (i = 0; i < ors.length; i++) {
+ ods = o.declarations = ods.concat(ors[i].getDeclarations());
+ }
+
+ // collect all properties
+ var ops = o.properties;
+ for (i = 0; i < ods.length; i++) {
+ var n = ods[i].getProperty();
+ if (!ops[n]) {
+ ops[n] = [];
+ }
+ ops[n][ops[n].length] = ods[i];
+ }
+
+ el.cssHelperParsed = o;
+ parsed[parsed.length] = el;
+ return o;
+ };
+
+ var parseEmbedded = function (el, s) {
+ return;
+ // This function doesn't work because of a bug in IE, where innerHTML gives us parsed css instead of raw literal.
+ el.cssHelperText = sanitize(s || el.innerHTML);
+ return parseText(el);
+ };
+
+ var parse = function () {
+ parsing = true;
+ parsed = [];
+ var linked = [];
+ var finish = function () {
+ for (var i = 0; i < linked.length; i++) {
+ parseText(linked[i]);
+ }
+ var styles = document.getElementsByTagName('style');
+ for (i = 0; i < styles.length; i++) {
+ parseEmbedded(styles[i]);
+ }
+ parsing = false;
+ ready();
+ };
+ var links = document.getElementsByTagName('link');
+ for (var i = 0; i < links.length; i++) {
+ var link = links[i];
+ if (link.getAttribute('rel').indexOf('style') > -1 && link.href && link.href.length !== 0 && !link.disabled) {
+ linked[linked.length] = link;
+ }
+ }
+ if (linked.length > 0) {
+ var c = 0;
+ var checkForFinish = function () {
+ c++;
+ if (c === linked.length) { // parse in right order, so after last link is read
+ finish();
+ }
+ };
+ var processLink = function (link) {
+ var href = link.href;
+ requestText(href, function (text) {
+ // fix url's
+ text = sanitize(text).replace(regExp.RELATIVE_URLS, 'url(' + href.substring(0, href.lastIndexOf('/')) + '/$1)');
+ link.cssHelperText = text;
+ checkForFinish();
+ }, checkForFinish);
+ };
+ for (i = 0; i < linked.length; i++) {
+ processLink(linked[i]);
+ }
+ }
+ else {
+ finish();
+ }
+ };
+
+ var types = {
+ stylesheets: 'array',
+ mediaQueryLists: 'array',
+ rules: 'array',
+ selectors: 'object',
+ declarations: 'array',
+ properties: 'object'
+ };
+
+ var collections = {
+ stylesheets: null,
+ mediaQueryLists: null,
+ rules: null,
+ selectors: null,
+ declarations: null,
+ properties: null
+ };
+
+ var addToCollection = function (name, v) {
+ if (collections[name] !== null) {
+ if (types[name] === 'array') {
+ return (collections[name] = collections[name].concat(v));
+ }
+ else {
+ var c = collections[name];
+ for (var n in v) {
+ if (v.hasOwnProperty(n)) {
+ if (!c[n]) {
+ c[n] = v[n];
+ }
+ else {
+ c[n] = c[n].concat(v[n]);
+ }
+ }
+ }
+ return c;
+ }
+ }
+ };
+
+ var collect = function (name) {
+ collections[name] = (types[name] === 'array') ? [] : {};
+ for (var i = 0; i < parsed.length; i++) {
+ var pname = name === 'stylesheets' ? 'stylesheet' : name; // the exception
+ addToCollection(name, parsed[i].cssHelperParsed[pname]);
+ }
+ return collections[name];
+ };
+
+ // viewport size
+ var getViewportSize = function (d) {
+ if (typeof window.innerWidth != 'undefined') {
+ return window['inner' + d];
+ }
+ else if (typeof document.documentElement !== 'undefined'
+ && typeof document.documentElement.clientWidth !== 'undefined'
+ && document.documentElement.clientWidth != 0) {
+ return document.documentElement['client' + d];
+ }
+ };
+
+ // public static functions
+ return {
+ addStyle: function (s, mediaTypes, process) {
+ var el = document.createElement('style');
+ el.setAttribute('type', 'text/css');
+ if (mediaTypes && mediaTypes.length > 0) {
+ el.setAttribute('media', mediaTypes.join(','));
+ }
+ document.getElementsByTagName('head')[0].appendChild(el);
+ if (el.styleSheet) { // IE
+ el.styleSheet.cssText = s;
+ }
+ else {
+ el.appendChild(document.createTextNode(s));
+ }
+ el.addedWithCssHelper = true;
+ if (typeof process === 'undefined' || process === true) {
+ cssHelper.parsed(function (parsed) {
+ var o = parseEmbedded(el, s);
+ for (var n in o) {
+ if (o.hasOwnProperty(n)) {
+ addToCollection(n, o[n]);
+ }
+ }
+ broadcast('newStyleParsed', el);
+ });
+ }
+ else {
+ el.parsingDisallowed = true;
+ }
+ return el;
+ },
+
+ removeStyle: function (el) {
+ return el.parentNode.removeChild(el);
+ },
+
+ parsed: function (fn) {
+ if (parsing) {
+ wait(fn);
+ }
+ else {
+ if (typeof parsed !== 'undefined') {
+ if (typeof fn === 'function') {
+ fn(parsed);
+ }
+ }
+ else {
+ wait(fn);
+ parse();
+ }
+ }
+ },
+
+ stylesheets: function (fn) {
+ cssHelper.parsed(function (parsed) {
+ fn(collections.stylesheets || collect('stylesheets'));
+ });
+ },
+
+ mediaQueryLists: function (fn) {
+ cssHelper.parsed(function (parsed) {
+ fn(collections.mediaQueryLists || collect('mediaQueryLists'));
+ });
+ },
+
+ rules: function (fn) {
+ cssHelper.parsed(function (parsed) {
+ fn(collections.rules || collect('rules'));
+ });
+ },
+
+ selectors: function (fn) {
+ cssHelper.parsed(function (parsed) {
+ fn(collections.selectors || collect('selectors'));
+ });
+ },
+
+ declarations: function (fn) {
+ cssHelper.parsed(function (parsed) {
+ fn(collections.declarations || collect('declarations'));
+ });
+ },
+
+ properties: function (fn) {
+ cssHelper.parsed(function (parsed) {
+ fn(collections.properties || collect('properties'));
+ });
+ },
+
+ broadcast: broadcast,
+
+ addListener: function (n, fn) { // in case n is 'styleadd': added function is called everytime style is added and parsed
+ if (typeof fn === 'function') {
+ if (!events[n]) {
+ events[n] = {
+ listeners: []
+ };
+ }
+ events[n].listeners[events[n].listeners.length] = fn;
+ }
+ },
+
+ removeListener: function (n, fn) {
+ if (typeof fn === 'function' && events[n]) {
+ var ls = events[n].listeners;
+ for (var i = 0; i < ls.length; i++) {
+ if (ls[i] === fn) {
+ ls.splice(i, 1);
+ i -= 1;
+ }
+ }
+ }
+ },
+
+ getViewportWidth: function () {
+ return getViewportSize('Width');
+ },
+
+ getViewportHeight: function () {
+ return getViewportSize('Height');
+ }
+ };
+}();
+
+
+
+// function to test and apply parsed media queries against browser capabilities
+domReady(function enableCssMediaQueries() {
+ var meter;
+
+ var regExp = {
+ LENGTH_UNIT: /[0-9]+(em|ex|px|in|cm|mm|pt|pc)$/,
+ RESOLUTION_UNIT: /[0-9]+(dpi|dpcm)$/,
+ ASPECT_RATIO: /^[0-9]+\/[0-9]+$/,
+ ABSOLUTE_VALUE: /^[0-9]*(\.[0-9]+)*$/
+ };
+
+ var styles = [];
+
+ var nativeSupport = function () {
+ // check support for media queries
+ var id = 'css3-mediaqueries-test';
+ var el = document.createElement('div');
+ el.id = id;
+ var style = cssHelper.addStyle('@media all and (width) { #' + id +
+ ' { width: 1px !important; } }', [], false); // false means don't parse this temp style
+ document.body.appendChild(el);
+ var ret = el.offsetWidth === 1;
+ style.parentNode.removeChild(style);
+ el.parentNode.removeChild(el);
+ nativeSupport = function () {
+ return ret;
+ };
+ return ret;
+ };
+
+ var createMeter = function () { // create measuring element
+ meter = document.createElement('div');
+ meter.style.cssText = 'position:absolute;top:-9999em;left:-9999em;' +
+ 'margin:0;border:none;padding:0;width:1em;font-size:1em;'; // cssText is needed for IE, works for the others
+ document.body.appendChild(meter);
+ // meter must have browser default font size of 16px
+ if (meter.offsetWidth !== 16) {
+ meter.style.fontSize = 16 / meter.offsetWidth + 'em';
+ }
+ meter.style.width = '';
+ };
+
+ var measure = function (value) {
+ meter.style.width = value;
+ var amount = meter.offsetWidth;
+ meter.style.width = '';
+ return amount;
+ };
+
+ var testMediaFeature = function (feature, value) {
+ // non-testable features: monochrome|min-monochrome|max-monochrome|scan|grid
+ var l = feature.length;
+ var min = (feature.substring(0, 4) === 'min-');
+ var max = (!min && feature.substring(0, 4) === 'max-');
+
+ if (value !== null) { // determine value type and parse to usable amount
+ var valueType;
+ var amount;
+ if (regExp.LENGTH_UNIT.exec(value)) {
+ valueType = 'length';
+ amount = measure(value);
+ }
+ else if (regExp.RESOLUTION_UNIT.exec(value)) {
+ valueType = 'resolution';
+ amount = parseInt(value, 10);
+ var unit = value.substring((amount + '').length);
+ }
+ else if (regExp.ASPECT_RATIO.exec(value)) {
+ valueType = 'aspect-ratio';
+ amount = value.split('/');
+ }
+ else if (regExp.ABSOLUTE_VALUE) {
+ valueType = 'absolute';
+ amount = value;
+ }
+ else {
+ valueType = 'unknown';
+ }
+ }
+
+ var width, height;
+ if ('device-width' === feature.substring(l - 12, l)) { // screen width
+ width = screen.width;
+ if (value !== null) {
+ if (valueType === 'length') {
+ return ((min && width >= amount) || (max && width < amount) || (!min && !max && width === amount));
+ }
+ else {
+ return false;
+ }
+ }
+ else { // test width without value
+ return width > 0;
+ }
+ }
+ else if ('device-height' === feature.substring(l - 13, l)) { // screen height
+ height = screen.height;
+ if (value !== null) {
+ if (valueType === 'length') {
+ return ((min && height >= amount) || (max && height < amount) || (!min && !max && height === amount));
+ }
+ else {
+ return false;
+ }
+ }
+ else { // test height without value
+ return height > 0;
+ }
+ }
+ else if ('width' === feature.substring(l - 5, l)) { // viewport width
+ width = document.documentElement.clientWidth || document.body.clientWidth; // the latter for IE quirks mode
+ if (value !== null) {
+ if (valueType === 'length') {
+ return ((min && width >= amount) || (max && width < amount) || (!min && !max && width === amount));
+ }
+ else {
+ return false;
+ }
+ }
+ else { // test width without value
+ return width > 0;
+ }
+ }
+ else if ('height' === feature.substring(l - 6, l)) { // viewport height
+ height = document.documentElement.clientHeight || document.body.clientHeight; // the latter for IE quirks mode
+ if (value !== null) {
+ if (valueType === 'length') {
+ return ((min && height >= amount) || (max && height < amount) || (!min && !max && height === amount));
+ }
+ else {
+ return false;
+ }
+ }
+ else { // test height without value
+ return height > 0;
+ }
+ }
+ else if ('device-aspect-ratio' === feature.substring(l - 19, l)) { // screen aspect ratio
+ return valueType === 'aspect-ratio' && screen.width * amount[1] === screen.height * amount[0];
+ }
+ else if ('color-index' === feature.substring(l - 11, l)) { // number of colors
+ var colors = Math.pow(2, screen.colorDepth);
+ if (value !== null) {
+ if (valueType === 'absolute') {
+ return ((min && colors >= amount) || (max && colors < amount) || (!min && !max && colors === amount));
+ }
+ else {
+ return false;
+ }
+ }
+ else { // test height without value
+ return colors > 0;
+ }
+ }
+ else if ('color' === feature.substring(l - 5, l)) { // bits per color component
+ var color = screen.colorDepth;
+ if (value !== null) {
+ if (valueType === 'absolute') {
+ return ((min && color >= amount) || (max && color < amount) || (!min && !max && color === amount));
+ }
+ else {
+ return false;
+ }
+ }
+ else { // test height without value
+ return color > 0;
+ }
+ }
+ else if ('resolution' === feature.substring(l - 10, l)) {
+ var res;
+ if (unit === 'dpcm') {
+ res = measure('1cm');
+ }
+ else {
+ res = measure('1in');
+ }
+ if (value !== null) {
+ if (valueType === 'resolution') {
+ return ((min && res >= amount) || (max && res < amount) || (!min && !max && res === amount));
+ }
+ else {
+ return false;
+ }
+ }
+ else { // test height without value
+ return res > 0;
+ }
+ }
+ else {
+ return false;
+ }
+ };
+
+ var testMediaQuery = function (mq) {
+ var test = mq.getValid();
+ var expressions = mq.getExpressions();
+ var l = expressions.length;
+ if (l > 0) {
+ for (var i = 0; i < l && test; i++) {
+ test = testMediaFeature(expressions[i].mediaFeature, expressions[i].value);
+ }
+ var not = mq.getNot();
+ return (test && !not || not && !test);
+ }
+ return test;
+ };
+
+ var testMediaQueryList = function (mql, ts) {
+ // ts is null or an array with any media type but 'all'.
+ var mqs = mql.getMediaQueries();
+ var t = {};
+ for (var i = 0; i < mqs.length; i++) {
+ var type = mqs[i].getMediaType();
+ if (mqs[i].getExpressions().length === 0) {
+ continue;
+ // TODO: Browser check! Assuming old browsers do apply the bare media types, even in a list with media queries.
+ }
+ var typeAllowed = true;
+ if (type !== 'all' && ts && ts.length > 0) {
+ typeAllowed = false;
+ for (var j = 0; j < ts.length; j++) {
+ if (ts[j] === type) {
+ typeAllowed = true;
+ }
+ }
+ }
+ if (typeAllowed && testMediaQuery(mqs[i])) {
+ t[type] = true;
+ }
+ }
+ var s = [], c = 0;
+ for (var n in t) {
+ if (t.hasOwnProperty(n)) {
+ if (c > 0) {
+ s[c++] = ',';
+ }
+ s[c++] = n;
+ }
+ }
+ if (s.length > 0) {
+ styles[styles.length] = cssHelper.addStyle('@media ' + s.join('') + '{' + mql.getCssText() + '}', ts, false);
+ }
+ };
+
+ var testMediaQueryLists = function (mqls, ts) {
+ for (var i = 0; i < mqls.length; i++) {
+ testMediaQueryList(mqls[i], ts);
+ }
+ };
+
+ var testStylesheet = function (stsh) {
+ var amqs = stsh.getAttrMediaQueries();
+ var allPassed = false;
+ var t = {};
+ for (var i = 0; i < amqs.length; i++) {
+ if (testMediaQuery(amqs[i])) {
+ t[amqs[i].getMediaType()] = amqs[i].getExpressions().length > 0;
+ }
+ }
+ var ts = [], tswe = [];
+ for (var n in t) {
+ if (t.hasOwnProperty(n)) {
+ ts[ts.length] = n;
+ if (t[n]) {
+ tswe[tswe.length] = n
+ }
+ if (n === 'all') {
+ allPassed = true;
+ }
+ }
+ }
+ if (tswe.length > 0) { // types with query expressions that passed the test
+ styles[styles.length] = cssHelper.addStyle(stsh.getCssText(), tswe, false);
+ }
+ var mqls = stsh.getMediaQueryLists();
+ if (allPassed) {
+ // If 'all' in media attribute passed the test, then test all @media types in linked CSS and create style with those types.
+ testMediaQueryLists(mqls);
+ }
+ else {
+ // Or else, test only media attribute types that passed the test and also 'all'.
+ // For positive '@media all', create style with attribute types that passed their test.
+ testMediaQueryLists(mqls, ts);
+ }
+ };
+
+ var testStylesheets = function (stshs) {
+ for (var i = 0; i < stshs.length; i++) {
+ testStylesheet(stshs[i]);
+ }
+ if (ua.ie) {
+ // force repaint in IE
+ document.documentElement.style.display = 'block';
+ setTimeout(function () {
+ document.documentElement.style.display = '';
+ }, 0);
+ // delay broadcast somewhat for IE
+ setTimeout(function () {
+ cssHelper.broadcast('cssMediaQueriesTested');
+ }, 100);
+ }
+ else {
+ cssHelper.broadcast('cssMediaQueriesTested');
+ }
+ };
+
+ var test = function () {
+ for (var i = 0; i < styles.length; i++) {
+ cssHelper.removeStyle(styles[i]);
+ }
+ styles = [];
+ cssHelper.stylesheets(testStylesheets);
+ };
+
+ var scrollbarWidth = 0;
+ var checkForResize = function () {
+ var cvpw = cssHelper.getViewportWidth();
+ var cvph = cssHelper.getViewportHeight();
+
+ // determine scrollbar width in IE, see resizeHandler
+ if (ua.ie) {
+ var el = document.createElement('div');
+ el.style.position = 'absolute';
+ el.style.top = '-9999em';
+ el.style.overflow = 'scroll';
+ document.body.appendChild(el);
+ scrollbarWidth = el.offsetWidth - el.clientWidth;
+ document.body.removeChild(el);
+ }
+
+ var timer;
+ var resizeHandler = function () {
+ var vpw = cssHelper.getViewportWidth();
+ var vph = cssHelper.getViewportHeight();
+ // check whether vp size has really changed, because IE also triggers resize event when body size changes
+ // 20px allowance to accomodate short appearance of scrollbars in IE in some cases
+ if (Math.abs(vpw - cvpw) > scrollbarWidth || Math.abs(vph - cvph) > scrollbarWidth) {
+ cvpw = vpw;
+ cvph = vph;
+ clearTimeout(timer);
+ timer = setTimeout(function () {
+ if (!nativeSupport()) {
+ test();
+ }
+ else {
+ cssHelper.broadcast('cssMediaQueriesTested');
+ }
+ }, 500);
+ }
+ };
+
+ window.onresize = function () {
+ var x = window.onresize || function () {}; // save original
+ return function () {
+ x();
+ resizeHandler();
+ };
+ }();
+ };
+
+ // prevent jumping of layout by hiding everything before painting <body>
+ var docEl = document.documentElement;
+ docEl.style.marginLeft = '-32767px';
+
+ // make sure it comes back after a while
+ setTimeout(function () {
+ docEl.style.marginLeft = '';
+ }, 5000);
+
+ return function () {
+ if (!nativeSupport()) { // if browser doesn't support media queries
+ cssHelper.addListener('newStyleParsed', function (el) {
+ testStylesheet(el.cssHelperParsed.stylesheet);
+ });
+ // return visibility after media queries are tested
+ cssHelper.addListener('cssMediaQueriesTested', function () {
+ // force repaint in IE by changing width
+ if (ua.ie) {
+ docEl.style.width = '1px';
+ }
+ setTimeout(function () {
+ docEl.style.width = ''; // undo width
+ docEl.style.marginLeft = ''; // undo hide
+ }, 0);
+ // remove this listener to prevent following execution
+ cssHelper.removeListener('cssMediaQueriesTested', arguments.callee);
+ });
+ createMeter();
+ test();
+ }
+ else {
+ docEl.style.marginLeft = ''; // undo visibility hidden
+ }
+ checkForResize();
+ };
+}());
+
+
+// bonus: hotfix for IE6 SP1 (bug KB823727)
+try {
+ document.execCommand('BackgroundImageCache', false, true);
+} catch (e) {}
diff --git a/sphinx/theming.py b/sphinx/theming.py
index 41cbcae9..100a8931 100644
--- a/sphinx/theming.py
+++ b/sphinx/theming.py
@@ -30,6 +30,7 @@ from sphinx.errors import ThemeError
NODEFAULT = object()
THEMECONF = 'theme.conf'
+
class Theme(object):
"""
Represents the theme chosen in the configuration.
@@ -94,7 +95,8 @@ class Theme(object):
self.themedir = tempfile.mkdtemp('sxt')
self.themedir_created = True
for name in tinfo.namelist():
- if name.endswith('/'): continue
+ if name.endswith('/'):
+ continue
dirname = path.dirname(name)
if not path.isdir(path.join(self.themedir, dirname)):
os.makedirs(path.join(self.themedir, dirname))
diff --git a/sphinx/transforms.py b/sphinx/transforms.py
index 0e0f8334..42abea58 100644
--- a/sphinx/transforms.py
+++ b/sphinx/transforms.py
@@ -34,6 +34,7 @@ default_substitutions = set([
'today',
])
+
class DefaultSubstitutions(Transform):
"""
Replace some substitutions if they aren't defined in the document.
@@ -69,9 +70,9 @@ class MoveModuleTargets(Transform):
if not node['ids']:
continue
if ('ismod' in node and
- node.parent.__class__ is nodes.section and
- # index 0 is the section title node
- node.parent.index(node) == 1):
+ node.parent.__class__ is nodes.section and
+ # index 0 is the section title node
+ node.parent.index(node) == 1):
node.parent['ids'][0:0] = node['ids']
node.parent.remove(node)
@@ -86,10 +87,10 @@ class HandleCodeBlocks(Transform):
# move doctest blocks out of blockquotes
for node in self.document.traverse(nodes.block_quote):
if all(isinstance(child, nodes.doctest_block) for child
- in node.children):
+ in node.children):
node.replace_self(node.children)
# combine successive doctest blocks
- #for node in self.document.traverse(nodes.doctest_block):
+ # for node in self.document.traverse(nodes.doctest_block):
# if node not in node.parent.children:
# continue
# parindex = node.parent.index(node)
@@ -173,7 +174,7 @@ class Locale(Transform):
parser = RSTParser()
- #phase1: replace reference ids with translated names
+ # phase1: replace reference ids with translated names
for node, msg in extract_messages(self.document):
msgstr = catalog.gettext(msg)
# XXX add marker to untranslated parts
@@ -198,7 +199,7 @@ class Locale(Transform):
pass
# XXX doctest and other block markup
if not isinstance(patch, nodes.paragraph):
- continue # skip for now
+ continue # skip for now
processed = False # skip flag
@@ -281,15 +282,14 @@ class Locale(Transform):
node.children = patch.children
node['translated'] = True
-
- #phase2: translation
+ # phase2: translation
for node, msg in extract_messages(self.document):
if node.get('translated', False):
continue
msgstr = catalog.gettext(msg)
# XXX add marker to untranslated parts
- if not msgstr or msgstr == msg: # as-of-yet untranslated
+ if not msgstr or msgstr == msg: # as-of-yet untranslated
continue
# Avoid "Literal block expected; none found." warnings.
@@ -309,12 +309,13 @@ class Locale(Transform):
pass
# XXX doctest and other block markup
if not isinstance(patch, nodes.paragraph):
- continue # skip for now
+ continue # skip for now
# auto-numbered foot note reference should use original 'ids'.
def is_autonumber_footnote_ref(node):
return isinstance(node, nodes.footnote_reference) and \
node.get('auto') == 1
+
def list_replace_or_append(lst, old, new):
if old in lst:
lst[lst.index(old)] = new
@@ -339,7 +340,7 @@ class Locale(Transform):
for id in new['ids']:
self.document.ids[id] = new
list_replace_or_append(
- self.document.autofootnote_refs, old, new)
+ self.document.autofootnote_refs, old, new)
if refname:
list_replace_or_append(
self.document.footnote_refs.setdefault(refname, []),
@@ -404,6 +405,7 @@ class Locale(Transform):
if len(old_refs) != len(new_refs):
env.warn_node('inconsistent term references in '
'translated message', node)
+
def get_ref_key(node):
case = node["refdomain"], node["reftype"]
if case == ('std', 'term'):
diff --git a/sphinx/util/__init__.py b/sphinx/util/__init__.py
index 3a4334e7..e7277520 100644
--- a/sphinx/util/__init__.py
+++ b/sphinx/util/__init__.py
@@ -29,15 +29,16 @@ from docutils.utils import relative_path
import jinja2
import sphinx
-from sphinx.errors import PycodeError
+from sphinx.errors import PycodeError, SphinxParallelError
from sphinx.util.console import strip_colors
+from sphinx.util.osutil import fs_encoding
# import other utilities; partly for backwards compatibility, so don't
# prune unused ones indiscriminately
from sphinx.util.osutil import SEP, os_path, relative_uri, ensuredir, walk, \
- mtimes_of_files, movefile, copyfile, copytimes, make_filename, ustrftime
+ mtimes_of_files, movefile, copyfile, copytimes, make_filename, ustrftime
from sphinx.util.nodes import nested_parse_with_titles, split_explicit_title, \
- explicit_title_re, caption_ref_re
+ explicit_title_re, caption_ref_re
from sphinx.util.matching import patfilter
# Generally useful regular expressions.
@@ -129,6 +130,11 @@ class FilenameUniqDict(dict):
del self[filename]
self._existing.discard(unique)
+ def merge_other(self, docnames, other):
+ for filename, (docs, unique) in other.items():
+ for doc in docs & docnames:
+ self.add_file(doc, filename)
+
def __getstate__(self):
return self._existing
@@ -185,7 +191,11 @@ _DEBUG_HEADER = '''\
def save_traceback(app):
"""Save the current exception's traceback in a temporary file."""
import platform
- exc = traceback.format_exc()
+ exc = sys.exc_info()[1]
+ if isinstance(exc, SphinxParallelError):
+ exc_format = '(Error in parallel process)\n' + exc.traceback
+ else:
+ exc_format = traceback.format_exc()
fd, path = tempfile.mkstemp('.log', 'sphinx-err-')
last_msgs = ''
if app is not None:
@@ -200,11 +210,13 @@ def save_traceback(app):
last_msgs)).encode('utf-8'))
if app is not None:
for extname, extmod in iteritems(app._extensions):
+ modfile = getattr(extmod, '__file__', 'unknown')
+ if isinstance(modfile, bytes):
+ modfile = modfile.decode(fs_encoding, 'replace')
os.write(fd, ('# %s (%s) from %s\n' % (
- extname, app._extension_versions[extname],
- getattr(extmod, '__file__', 'unknown'))
- ).encode('utf-8'))
- os.write(fd, exc.encode('utf-8'))
+ extname, app._extension_metadata[extname]['version'],
+ modfile)).encode('utf-8'))
+ os.write(fd, exc_format.encode('utf-8'))
os.close(fd)
return path
diff --git a/sphinx/util/i18n.py b/sphinx/util/i18n.py
index 8e61c12b..58906781 100644
--- a/sphinx/util/i18n.py
+++ b/sphinx/util/i18n.py
@@ -1,89 +1,89 @@
-# -*- coding: utf-8 -*-
-"""
- sphinx.util.i18n
- ~~~~~~~~~~~~~~~~
-
- Builder superclass for all builders.
-
- :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from os import path
-from collections import namedtuple
-
-from babel.messages.pofile import read_po
-from babel.messages.mofile import write_mo
-
-from sphinx.util.osutil import walk
-
-
-LocaleFileInfoBase = namedtuple('CatalogInfo', 'base_dir,domain')
-
-
-class CatalogInfo(LocaleFileInfoBase):
-
- @property
- def po_file(self):
- return self.domain + '.po'
-
- @property
- def mo_file(self):
- return self.domain + '.mo'
-
- @property
- def po_path(self):
- return path.join(self.base_dir, self.po_file)
-
- @property
- def mo_path(self):
- return path.join(self.base_dir, self.mo_file)
-
- def is_outdated(self):
- return (
- not path.exists(self.mo_path) or
- path.getmtime(self.mo_path) < path.getmtime(self.po_path))
-
- def write_mo(self, locale):
- with open(self.po_path, 'rt') as po:
- with open(self.mo_path, 'wb') as mo:
- write_mo(mo, read_po(po, locale))
-
-
-def get_catalogs(locale_dirs, locale, gettext_compact=False, force_all=False):
- """
- :param list locale_dirs:
- list of path as `['locale_dir1', 'locale_dir2', ...]` to find
- translation catalogs. Each path contains a structure such as
- `<locale>/LC_MESSAGES/domain.po`.
- :param str locale: a language as `'en'`
- :param boolean gettext_compact:
- * False: keep domains directory structure (default).
- * True: domains in the sub directory will be merged into 1 file.
- :param boolean force_all:
- Set True if you want to get all catalogs rather than updated catalogs.
- default is False.
- :return: [CatalogInfo(), ...]
- """
- if not locale:
- return [] # locale is not specified
-
- catalogs = set()
- for locale_dir in locale_dirs:
- base_dir = path.join(locale_dir, locale, 'LC_MESSAGES')
-
- if not path.exists(base_dir):
- continue # locale path is not found
-
- for dirpath, dirnames, filenames in walk(base_dir, followlinks=True):
- filenames = [f for f in filenames if f.endswith('.po')]
- for filename in filenames:
- base = path.splitext(filename)[0]
- domain = path.relpath(path.join(dirpath, base), base_dir)
- if gettext_compact and path.sep in domain:
- domain = path.split(domain)[0]
- cat = CatalogInfo(base_dir, domain)
- if force_all or cat.is_outdated():
- catalogs.add(cat)
-
- return catalogs
+# -*- coding: utf-8 -*-
+"""
+ sphinx.util.i18n
+ ~~~~~~~~~~~~~~~~
+
+ Builder superclass for all builders.
+
+ :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from os import path
+from collections import namedtuple
+
+from babel.messages.pofile import read_po
+from babel.messages.mofile import write_mo
+
+from sphinx.util.osutil import walk
+
+
+LocaleFileInfoBase = namedtuple('CatalogInfo', 'base_dir,domain')
+
+
+class CatalogInfo(LocaleFileInfoBase):
+
+ @property
+ def po_file(self):
+ return self.domain + '.po'
+
+ @property
+ def mo_file(self):
+ return self.domain + '.mo'
+
+ @property
+ def po_path(self):
+ return path.join(self.base_dir, self.po_file)
+
+ @property
+ def mo_path(self):
+ return path.join(self.base_dir, self.mo_file)
+
+ def is_outdated(self):
+ return (
+ not path.exists(self.mo_path) or
+ path.getmtime(self.mo_path) < path.getmtime(self.po_path))
+
+ def write_mo(self, locale):
+ with open(self.po_path, 'rt') as po:
+ with open(self.mo_path, 'wb') as mo:
+ write_mo(mo, read_po(po, locale))
+
+
+def get_catalogs(locale_dirs, locale, gettext_compact=False, force_all=False):
+ """
+ :param list locale_dirs:
+ list of path as `['locale_dir1', 'locale_dir2', ...]` to find
+ translation catalogs. Each path contains a structure such as
+ `<locale>/LC_MESSAGES/domain.po`.
+ :param str locale: a language as `'en'`
+ :param boolean gettext_compact:
+ * False: keep domains directory structure (default).
+ * True: domains in the sub directory will be merged into 1 file.
+ :param boolean force_all:
+ Set True if you want to get all catalogs rather than updated catalogs.
+ default is False.
+ :return: [CatalogInfo(), ...]
+ """
+ if not locale:
+ return [] # locale is not specified
+
+ catalogs = set()
+ for locale_dir in locale_dirs:
+ base_dir = path.join(locale_dir, locale, 'LC_MESSAGES')
+
+ if not path.exists(base_dir):
+ continue # locale path is not found
+
+ for dirpath, dirnames, filenames in walk(base_dir, followlinks=True):
+ filenames = [f for f in filenames if f.endswith('.po')]
+ for filename in filenames:
+ base = path.splitext(filename)[0]
+ domain = path.relpath(path.join(dirpath, base), base_dir)
+ if gettext_compact and path.sep in domain:
+ domain = path.split(domain)[0]
+ cat = CatalogInfo(base_dir, domain)
+ if force_all or cat.is_outdated():
+ catalogs.add(cat)
+
+ return catalogs
diff --git a/sphinx/util/osutil.py b/sphinx/util/osutil.py
index 9b5f58b7..58ee31b7 100644
--- a/sphinx/util/osutil.py
+++ b/sphinx/util/osutil.py
@@ -194,3 +194,9 @@ def abspath(pathdir):
if isinstance(pathdir, bytes):
pathdir = pathdir.decode(fs_encoding)
return pathdir
+
+
+def getcwd():
+ if hasattr(os, 'getcwdu'):
+ return os.getcwdu()
+ return os.getcwd()
diff --git a/sphinx/util/parallel.py b/sphinx/util/parallel.py
new file mode 100644
index 00000000..5f9e8eff
--- /dev/null
+++ b/sphinx/util/parallel.py
@@ -0,0 +1,131 @@
+# -*- coding: utf-8 -*-
+"""
+ sphinx.util.parallel
+ ~~~~~~~~~~~~~~~~~~~~
+
+ Parallel building utilities.
+
+ :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import os
+import traceback
+
+try:
+ import multiprocessing
+ import threading
+except ImportError:
+ multiprocessing = threading = None
+
+from six.moves import queue
+
+from sphinx.errors import SphinxParallelError
+
+# our parallel functionality only works for the forking Process
+parallel_available = multiprocessing and (os.name == 'posix')
+
+
+class SerialTasks(object):
+ """Has the same interface as ParallelTasks, but executes tasks directly."""
+
+ def __init__(self, nproc=1):
+ pass
+
+ def add_task(self, task_func, arg=None, result_func=None):
+ if arg is not None:
+ res = task_func(arg)
+ else:
+ res = task_func()
+ if result_func:
+ result_func(res)
+
+ def join(self):
+ pass
+
+
+class ParallelTasks(object):
+ """Executes *nproc* tasks in parallel after forking."""
+
+ def __init__(self, nproc):
+ self.nproc = nproc
+ # list of threads to join when waiting for completion
+ self._taskid = 0
+ self._threads = {}
+ self._nthreads = 0
+ # queue of result objects to process
+ self.result_queue = queue.Queue()
+ self._nprocessed = 0
+ # maps tasks to result functions
+ self._result_funcs = {}
+ # allow only "nproc" worker processes at once
+ self._semaphore = threading.Semaphore(self.nproc)
+
+ def _process(self, pipe, func, arg):
+ try:
+ if arg is None:
+ ret = func()
+ else:
+ ret = func(arg)
+ pipe.send((False, ret))
+ except BaseException as err:
+ pipe.send((True, (err, traceback.format_exc())))
+
+ def _process_thread(self, tid, func, arg):
+ precv, psend = multiprocessing.Pipe(False)
+ proc = multiprocessing.Process(target=self._process,
+ args=(psend, func, arg))
+ proc.start()
+ result = precv.recv()
+ self.result_queue.put((tid, arg) + result)
+ proc.join()
+ self._semaphore.release()
+
+ def add_task(self, task_func, arg=None, result_func=None):
+ tid = self._taskid
+ self._taskid += 1
+ self._semaphore.acquire()
+ thread = threading.Thread(target=self._process_thread,
+ args=(tid, task_func, arg))
+ thread.setDaemon(True)
+ thread.start()
+ self._nthreads += 1
+ self._threads[tid] = thread
+ self._result_funcs[tid] = result_func or (lambda *x: None)
+ # try processing results already in parallel
+ try:
+ tid, arg, exc, result = self.result_queue.get(False)
+ except queue.Empty:
+ pass
+ else:
+ del self._threads[tid]
+ if exc:
+ raise SphinxParallelError(*result)
+ self._result_funcs.pop(tid)(arg, result)
+ self._nprocessed += 1
+
+ def join(self):
+ while self._nprocessed < self._nthreads:
+ tid, arg, exc, result = self.result_queue.get()
+ del self._threads[tid]
+ if exc:
+ raise SphinxParallelError(*result)
+ self._result_funcs.pop(tid)(arg, result)
+ self._nprocessed += 1
+
+ # there shouldn't be any threads left...
+ for t in self._threads.values():
+ t.join()
+
+
+def make_chunks(arguments, nproc, maxbatch=10):
+ # determine how many documents to read in one go
+ nargs = len(arguments)
+ chunksize = min(nargs // nproc, maxbatch)
+ if chunksize == 0:
+ chunksize = 1
+ nchunks, rest = divmod(nargs, chunksize)
+ if rest:
+ nchunks += 1
+ # partition documents in "chunks" that will be written by one Process
+ return [arguments[i*chunksize:(i+1)*chunksize] for i in range(nchunks)]
diff --git a/tests/path.py b/tests/path.py
index 3e2c8f89..573d3d3c 100755
--- a/tests/path.py
+++ b/tests/path.py
@@ -123,6 +123,9 @@ class path(text_type):
"""
os.unlink(self)
+ def utime(self, arg):
+ os.utime(self, arg)
+
def write_text(self, text, **kwargs):
"""
Writes the given `text` to the file.
@@ -195,6 +198,9 @@ class path(text_type):
"""
return self.__class__(os.path.join(self, *map(self.__class__, args)))
+ def listdir(self):
+ return os.listdir(self)
+
__div__ = __truediv__ = joinpath
def __repr__(self):
diff --git a/tests/root/conf.py b/tests/root/conf.py
index f0d40148..d12e8167 100644
--- a/tests/root/conf.py
+++ b/tests/root/conf.py
@@ -3,12 +3,9 @@
import sys, os
sys.path.append(os.path.abspath('.'))
-sys.path.append(os.path.abspath('..'))
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.jsmath', 'sphinx.ext.todo',
- 'sphinx.ext.coverage', 'sphinx.ext.autosummary',
- 'sphinx.ext.doctest', 'sphinx.ext.extlinks',
- 'sphinx.ext.viewcode', 'ext']
+ 'sphinx.ext.coverage', 'sphinx.ext.extlinks', 'ext']
jsmath_path = 'dummy.js'
@@ -18,7 +15,7 @@ master_doc = 'contents'
source_suffix = '.txt'
project = 'Sphinx <Tests>'
-copyright = '2010, Georg Brandl & Team'
+copyright = '2010-2014, Georg Brandl & Team'
# If this is changed, remember to update the versionchanges!
version = '0.6'
release = '0.6alpha1'
@@ -34,7 +31,8 @@ html_theme = 'testtheme'
html_theme_path = ['.']
html_theme_options = {'testopt': 'testoverride'}
html_sidebars = {'**': 'customsb.html',
- 'contents': ['contentssb.html', 'localtoc.html'] }
+ 'contents': ['contentssb.html', 'localtoc.html',
+ 'globaltoc.html']}
html_style = 'default.css'
html_static_path = ['_static', 'templated.css_t']
html_extra_path = ['robots.txt']
@@ -44,15 +42,15 @@ html_context = {'hckey': 'hcval', 'hckey_co': 'wrong_hcval_co'}
htmlhelp_basename = 'SphinxTestsdoc'
latex_documents = [
- ('contents', 'SphinxTests.tex', 'Sphinx Tests Documentation',
- 'Georg Brandl \\and someone else', 'manual'),
+ ('contents', 'SphinxTests.tex', 'Sphinx Tests Documentation',
+ 'Georg Brandl \\and someone else', 'manual'),
]
latex_additional_files = ['svgimg.svg']
texinfo_documents = [
- ('contents', 'SphinxTests', 'Sphinx Tests',
- 'Georg Brandl \\and someone else', 'Sphinx Testing', 'Miscellaneous'),
+ ('contents', 'SphinxTests', 'Sphinx Tests',
+ 'Georg Brandl \\and someone else', 'Sphinx Testing', 'Miscellaneous'),
]
man_pages = [
@@ -65,8 +63,6 @@ value_from_conf_py = 84
coverage_c_path = ['special/*.h']
coverage_c_regexes = {'function': r'^PyAPI_FUNC\(.*\)\s+([^_][\w_]+)'}
-autosummary_generate = ['autosummary']
-
extlinks = {'issue': ('http://bugs.python.org/issue%s', 'issue '),
'pyurl': ('http://python.org/%s', None)}
@@ -80,35 +76,13 @@ autodoc_mock_imports = [
# modify tags from conf.py
tags.add('confpytag')
-# -- linkcode
-
-if 'test_linkcode' in tags:
- import glob
-
- extensions.remove('sphinx.ext.viewcode')
- extensions.append('sphinx.ext.linkcode')
-
- exclude_patterns.extend(glob.glob('*.txt') + glob.glob('*/*.txt'))
- exclude_patterns.remove('contents.txt')
- exclude_patterns.remove('objects.txt')
-
- def linkcode_resolve(domain, info):
- if domain == 'py':
- fn = info['module'].replace('.', '/')
- return "http://foobar/source/%s.py" % fn
- elif domain == "js":
- return "http://foobar/js/" + info['fullname']
- elif domain in ("c", "cpp"):
- return "http://foobar/%s/%s" % (domain, "".join(info['names']))
- else:
- raise AssertionError()
-
# -- extension API
from docutils import nodes
from sphinx import addnodes
from sphinx.util.compat import Directive
+
def userdesc_parse(env, sig, signode):
x, y = sig.split(':')
signode += addnodes.desc_name(x, x)
@@ -116,15 +90,19 @@ def userdesc_parse(env, sig, signode):
signode[-1] += addnodes.desc_parameter(y, y)
return x
+
def functional_directive(name, arguments, options, content, lineno,
content_offset, block_text, state, state_machine):
return [nodes.strong(text='from function: %s' % options['opt'])]
+
class ClassDirective(Directive):
option_spec = {'opt': lambda x: x}
+
def run(self):
return [nodes.strong(text='from class: %s' % self.options['opt'])]
+
def setup(app):
app.add_config_value('value_from_conf_py', 42, False)
app.add_directive('funcdir', functional_directive, opt=lambda x: x)
diff --git a/tests/root/contents.txt b/tests/root/contents.txt
index c6b75c63..d786b914 100644
--- a/tests/root/contents.txt
+++ b/tests/root/contents.txt
@@ -21,15 +21,14 @@ Contents:
bom
math
autodoc
- autosummary
metadata
extensions
- doctest
extensions
- versioning/index
footnote
lists
+ http://sphinx-doc.org/
+ Latest reference <http://sphinx-doc.org/latest/>
Python <http://python.org/>
Indices and tables
@@ -44,3 +43,13 @@ References
.. [Ref1] Reference target.
.. [Ref_1] Reference target 2.
+
+Test for issue #1157
+====================
+
+This used to crash:
+
+.. toctree::
+
+.. toctree::
+ :hidden:
diff --git a/tests/root/markup.txt b/tests/root/markup.txt
index f6f955e2..6ed396ac 100644
--- a/tests/root/markup.txt
+++ b/tests/root/markup.txt
@@ -142,6 +142,7 @@ Adding \n to test unescaping.
* :ref:`here <some-label>`
* :ref:`my-figure`
* :ref:`my-table`
+* :ref:`my-code-block`
* :doc:`subdir/includes`
* ``:download:`` is tested in includes.txt
* :option:`Python -c option <python -c>`
@@ -228,8 +229,11 @@ Version markup
Code blocks
-----------
+.. _my-code-block:
+
.. code-block:: ruby
:linenos:
+ :caption: my ruby code
def ruby?
false
@@ -356,6 +360,25 @@ Only directive
Always present, because set through conf.py/command line.
+Any role
+--------
+
+.. default-role:: any
+
+Test referencing to `headings <with>` and `objects <func_without_body>`.
+Also `modules <mod>` and `classes <Time>`.
+
+More domains:
+
+* `JS <bar.baz>`
+* `C <SphinxType>`
+* `myobj` (user markup)
+* `n::Array`
+* `perl -c`
+
+.. default-role::
+
+
.. rubric:: Footnotes
.. [#] Like footnotes.
diff --git a/tests/root/objects.txt b/tests/root/objects.txt
index 73661d22..ebed06ea 100644
--- a/tests/root/objects.txt
+++ b/tests/root/objects.txt
@@ -170,6 +170,10 @@ Others
.. cmdoption:: -c
+.. option:: +p
+
+Link to :option:`perl +p`.
+
User markup
===========
diff --git a/tests/root/undecodable.txt b/tests/root/undecodable.txt
new file mode 100644
index 00000000..a4cf5c37
--- /dev/null
+++ b/tests/root/undecodable.txt
@@ -0,0 +1,3 @@
+:orphan:
+
+here: »
diff --git a/tests/roots/test-api-set-translator/conf.py b/tests/roots/test-api-set-translator/conf.py
index ab458e60..3c160664 100644
--- a/tests/roots/test-api-set-translator/conf.py
+++ b/tests/roots/test-api-set-translator/conf.py
@@ -1,80 +1,80 @@
-# -*- coding: utf-8 -*-
-## set this by test
-# import os
-# import sys
-# sys.path.insert(0, os.path.abspath('.'))
-
-from sphinx.writers.html import HTMLTranslator
-from sphinx.writers.latex import LaTeXTranslator
-from sphinx.writers.manpage import ManualPageTranslator
-from sphinx.writers.texinfo import TexinfoTranslator
-from sphinx.writers.text import TextTranslator
-from sphinx.writers.websupport import WebSupportTranslator
-from docutils.writers.docutils_xml import XMLTranslator
-
-
-project = 'test'
-master_doc = 'index'
-
-
-class ConfHTMLTranslator(HTMLTranslator):
- pass
-
-
-class ConfDirHTMLTranslator(HTMLTranslator):
- pass
-
-
-class ConfSingleHTMLTranslator(HTMLTranslator):
- pass
-
-
-class ConfPickleTranslator(HTMLTranslator):
- pass
-
-
-class ConfJsonTranslator(HTMLTranslator):
- pass
-
-
-class ConfLaTeXTranslator(LaTeXTranslator):
- pass
-
-
-class ConfManualPageTranslator(ManualPageTranslator):
- pass
-
-
-class ConfTexinfoTranslator(TexinfoTranslator):
- pass
-
-
-class ConfTextTranslator(TextTranslator):
- pass
-
-
-class ConfWebSupportTranslator(WebSupportTranslator):
- pass
-
-
-class ConfXMLTranslator(XMLTranslator):
- pass
-
-
-class ConfPseudoXMLTranslator(XMLTranslator):
- pass
-
-
-def setup(app):
- app.set_translator('html', ConfHTMLTranslator)
- app.set_translator('dirhtml', ConfDirHTMLTranslator)
- app.set_translator('singlehtml', ConfSingleHTMLTranslator)
- app.set_translator('pickle', ConfPickleTranslator)
- app.set_translator('json', ConfJsonTranslator)
- app.set_translator('latex', ConfLaTeXTranslator)
- app.set_translator('man', ConfManualPageTranslator)
- app.set_translator('texinfo', ConfTexinfoTranslator)
- app.set_translator('text', ConfTextTranslator)
- app.set_translator('websupport', ConfWebSupportTranslator)
- app.set_translator('xml', ConfXMLTranslator)
- app.set_translator('pseudoxml', ConfPseudoXMLTranslator)
+# -*- coding: utf-8 -*-
+## set this by test
+# import os
+# import sys
+# sys.path.insert(0, os.path.abspath('.'))
+
+from sphinx.writers.html import HTMLTranslator
+from sphinx.writers.latex import LaTeXTranslator
+from sphinx.writers.manpage import ManualPageTranslator
+from sphinx.writers.texinfo import TexinfoTranslator
+from sphinx.writers.text import TextTranslator
+from sphinx.writers.websupport import WebSupportTranslator
+from docutils.writers.docutils_xml import XMLTranslator
+
+
+project = 'test'
+master_doc = 'index'
+
+
+class ConfHTMLTranslator(HTMLTranslator):
+ pass
+
+
+class ConfDirHTMLTranslator(HTMLTranslator):
+ pass
+
+
+class ConfSingleHTMLTranslator(HTMLTranslator):
+ pass
+
+
+class ConfPickleTranslator(HTMLTranslator):
+ pass
+
+
+class ConfJsonTranslator(HTMLTranslator):
+ pass
+
+
+class ConfLaTeXTranslator(LaTeXTranslator):
+ pass
+
+
+class ConfManualPageTranslator(ManualPageTranslator):
+ pass
+
+
+class ConfTexinfoTranslator(TexinfoTranslator):
+ pass
+
+
+class ConfTextTranslator(TextTranslator):
+ pass
+
+
+class ConfWebSupportTranslator(WebSupportTranslator):
+ pass
+
+
+class ConfXMLTranslator(XMLTranslator):
+ pass
+
+
+class ConfPseudoXMLTranslator(XMLTranslator):
+ pass
+
+
+def setup(app):
+ app.set_translator('html', ConfHTMLTranslator)
+ app.set_translator('dirhtml', ConfDirHTMLTranslator)
+ app.set_translator('singlehtml', ConfSingleHTMLTranslator)
+ app.set_translator('pickle', ConfPickleTranslator)
+ app.set_translator('json', ConfJsonTranslator)
+ app.set_translator('latex', ConfLaTeXTranslator)
+ app.set_translator('man', ConfManualPageTranslator)
+ app.set_translator('texinfo', ConfTexinfoTranslator)
+ app.set_translator('text', ConfTextTranslator)
+ app.set_translator('websupport', ConfWebSupportTranslator)
+ app.set_translator('xml', ConfXMLTranslator)
+ app.set_translator('pseudoxml', ConfPseudoXMLTranslator)
diff --git a/tests/roots/test-api-set-translator/index.rst b/tests/roots/test-api-set-translator/index.rst
index 101bd39a..e5a29cf2 100644
--- a/tests/roots/test-api-set-translator/index.rst
+++ b/tests/roots/test-api-set-translator/index.rst
@@ -1,3 +1,3 @@
-=======================
-Test API set_translator
+=======================
+Test API set_translator
======================= \ No newline at end of file
diff --git a/tests/roots/test-api-set-translator/nonext/conf.py b/tests/roots/test-api-set-translator/nonext/conf.py
index a07b3c27..5a92f736 100644
--- a/tests/roots/test-api-set-translator/nonext/conf.py
+++ b/tests/roots/test-api-set-translator/nonext/conf.py
@@ -1,9 +1,9 @@
-# -*- coding: utf-8 -*-
-
-import os
-import sys
-
-sys.path.insert(0, os.path.dirname(os.path.abspath('.')))
-
-project = 'test'
-master_doc = 'index'
+# -*- coding: utf-8 -*-
+
+import os
+import sys
+
+sys.path.insert(0, os.path.dirname(os.path.abspath('.')))
+
+project = 'test'
+master_doc = 'index'
diff --git a/tests/roots/test-api-set-translator/translator.py b/tests/roots/test-api-set-translator/translator.py
index d5c23d39..015b4aa2 100644
--- a/tests/roots/test-api-set-translator/translator.py
+++ b/tests/roots/test-api-set-translator/translator.py
@@ -1,6 +1,6 @@
-# -*- coding: utf-8 -*-
-
-from sphinx.writers.html import HTMLTranslator
-
-class ExtHTMLTranslator(HTMLTranslator):
- pass
+# -*- coding: utf-8 -*-
+
+from sphinx.writers.html import HTMLTranslator
+
+class ExtHTMLTranslator(HTMLTranslator):
+ pass
diff --git a/tests/roots/test-autosummary/conf.py b/tests/roots/test-autosummary/conf.py
index 542696e9..d9a44748 100644
--- a/tests/roots/test-autosummary/conf.py
+++ b/tests/roots/test-autosummary/conf.py
@@ -1,3 +1,7 @@
+import sys, os
+
+sys.path.insert(0, os.path.abspath('.'))
+
extensions = ['sphinx.ext.autosummary']
# The suffix of source filenames.
diff --git a/tests/roots/test-autosummary/contents.rst b/tests/roots/test-autosummary/contents.rst
index 32390a32..cd4b7c5e 100644
--- a/tests/roots/test-autosummary/contents.rst
+++ b/tests/roots/test-autosummary/contents.rst
@@ -1,6 +1,7 @@
-
-.. autosummary::
- :nosignatures:
- :toctree:
-
- dummy_module
+
+.. autosummary::
+ :nosignatures:
+ :toctree:
+
+ dummy_module
+ sphinx
diff --git a/tests/root/autosummary.txt b/tests/roots/test-autosummary/sphinx.rst
index fc1a35a0..fc1a35a0 100644
--- a/tests/root/autosummary.txt
+++ b/tests/roots/test-autosummary/sphinx.rst
diff --git a/tests/roots/test-build-text/conf.py b/tests/roots/test-build-text/conf.py
new file mode 100644
index 00000000..1ba342a6
--- /dev/null
+++ b/tests/roots/test-build-text/conf.py
@@ -0,0 +1,2 @@
+master_doc = 'contents'
+source_suffix = '.txt'
diff --git a/tests/roots/test-build-text/contents.txt b/tests/roots/test-build-text/contents.txt
new file mode 100644
index 00000000..420d1428
--- /dev/null
+++ b/tests/roots/test-build-text/contents.txt
@@ -0,0 +1,8 @@
+.. toctree::
+
+ maxwidth
+ lineblock
+ nonascii_title
+ nonascii_table
+ nonascii_maxwidth
+ table
diff --git a/tests/roots/test-build-text/lineblock.txt b/tests/roots/test-build-text/lineblock.txt
new file mode 100644
index 00000000..b9cd0ed7
--- /dev/null
+++ b/tests/roots/test-build-text/lineblock.txt
@@ -0,0 +1,6 @@
+* one
+
+ | line-block 1
+ | line-block 2
+
+followed paragraph.
diff --git a/tests/roots/test-build-text/maxwidth.txt b/tests/roots/test-build-text/maxwidth.txt
new file mode 100644
index 00000000..c36f8a02
--- /dev/null
+++ b/tests/roots/test-build-text/maxwidth.txt
@@ -0,0 +1,6 @@
+.. seealso:: ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham
+
+* ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham
+* ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham
+
+spam egg
diff --git a/tests/roots/test-build-text/nonascii_maxwidth.txt b/tests/roots/test-build-text/nonascii_maxwidth.txt
new file mode 100644
index 00000000..e9f0fd9b
--- /dev/null
+++ b/tests/roots/test-build-text/nonascii_maxwidth.txt
@@ -0,0 +1,5 @@
+abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc
+
+日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語
+
+abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語
diff --git a/tests/roots/test-build-text/nonascii_table.txt b/tests/roots/test-build-text/nonascii_table.txt
new file mode 100644
index 00000000..709e0f2f
--- /dev/null
+++ b/tests/roots/test-build-text/nonascii_table.txt
@@ -0,0 +1,7 @@
+.. list-table::
+
+ - - spam
+ - egg
+
+ - - 日本語
+ - 日本語
diff --git a/tests/roots/test-build-text/nonascii_title.txt b/tests/roots/test-build-text/nonascii_title.txt
new file mode 100644
index 00000000..6d3b1f61
--- /dev/null
+++ b/tests/roots/test-build-text/nonascii_title.txt
@@ -0,0 +1,2 @@
+日本語
+======
diff --git a/tests/roots/test-build-text/table.txt b/tests/roots/test-build-text/table.txt
new file mode 100644
index 00000000..84328940
--- /dev/null
+++ b/tests/roots/test-build-text/table.txt
@@ -0,0 +1,7 @@
+ +-----+-----+
+ | XXX | XXX |
+ +-----+-----+
+ | | XXX |
+ +-----+-----+
+ | XXX | |
+ +-----+-----+
diff --git a/tests/roots/test-circular/conf.py b/tests/roots/test-circular/conf.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/tests/roots/test-circular/conf.py
diff --git a/tests/roots/test-circular/contents.rst b/tests/roots/test-circular/contents.rst
new file mode 100644
index 00000000..294e674d
--- /dev/null
+++ b/tests/roots/test-circular/contents.rst
@@ -0,0 +1,4 @@
+.. toctree::
+
+ sub
+
diff --git a/tests/roots/test-circular/sub.rst b/tests/roots/test-circular/sub.rst
new file mode 100644
index 00000000..070c3974
--- /dev/null
+++ b/tests/roots/test-circular/sub.rst
@@ -0,0 +1,3 @@
+.. toctree::
+
+ contents
diff --git a/tests/roots/test-directive-code/dedent.rst b/tests/roots/test-directive-code/dedent.rst
index d29e2cfa..9ec1c0ee 100644
--- a/tests/roots/test-directive-code/dedent.rst
+++ b/tests/roots/test-directive-code/dedent.rst
@@ -1,22 +1,35 @@
Dedent
======
-Code blocks
------------
+Literal Include
+---------------
-.. code-block:: ruby
- :linenos:
- :dedent: 4
+.. literalinclude:: literal.inc
+ :language: python
+ :lines: 10-11
+ :dedent: 0
- def ruby?
- false
- end
+.. literalinclude:: literal.inc
+ :language: python
+ :lines: 10-11
+ :dedent: 1
+.. literalinclude:: literal.inc
+ :language: python
+ :lines: 10-11
+ :dedent: 2
-Literal Include
----------------
+.. literalinclude:: literal.inc
+ :language: python
+ :lines: 10-11
+ :dedent: 3
.. literalinclude:: literal.inc
:language: python
:lines: 10-11
:dedent: 4
+
+.. literalinclude:: literal.inc
+ :language: python
+ :lines: 10-11
+ :dedent: 1000
diff --git a/tests/roots/test-directive-code/dedent_code.rst b/tests/roots/test-directive-code/dedent_code.rst
new file mode 100644
index 00000000..3e8dacd6
--- /dev/null
+++ b/tests/roots/test-directive-code/dedent_code.rst
@@ -0,0 +1,53 @@
+Dedent
+======
+
+Code blocks
+-----------
+
+.. code-block:: ruby
+ :linenos:
+ :dedent: 0
+
+ def ruby?
+ false
+ end
+
+.. code-block:: ruby
+ :linenos:
+ :dedent: 1
+
+ def ruby?
+ false
+ end
+
+.. code-block:: ruby
+ :linenos:
+ :dedent: 2
+
+ def ruby?
+ false
+ end
+
+.. code-block:: ruby
+ :linenos:
+ :dedent: 3
+
+ def ruby?
+ false
+ end
+
+.. code-block:: ruby
+ :linenos:
+ :dedent: 4
+
+ def ruby?
+ false
+ end
+
+.. code-block:: ruby
+ :linenos:
+ :dedent: 1000
+
+ def ruby?
+ false
+ end
diff --git a/tests/roots/test-doctest/conf.py b/tests/roots/test-doctest/conf.py
new file mode 100644
index 00000000..f6a12edb
--- /dev/null
+++ b/tests/roots/test-doctest/conf.py
@@ -0,0 +1,5 @@
+extensions = ['sphinx.ext.doctest']
+
+project = 'test project for doctest'
+master_doc = 'doctest.txt'
+source_suffix = '.txt'
diff --git a/tests/root/doctest.txt b/tests/roots/test-doctest/doctest.txt
index d029cd88..ce4d88bd 100644
--- a/tests/root/doctest.txt
+++ b/tests/roots/test-doctest/doctest.txt
@@ -125,5 +125,5 @@ Special directives
.. testcleanup:: *
- import test_doctest
- test_doctest.cleanup_call()
+ import test_ext_doctest
+ test_ext_doctest.cleanup_call()
diff --git a/tests/roots/test-docutilsconf/contents.txt b/tests/roots/test-docutilsconf/contents.txt
index 3d0003b8..b20204e6 100644
--- a/tests/roots/test-docutilsconf/contents.txt
+++ b/tests/roots/test-docutilsconf/contents.txt
@@ -1,15 +1,15 @@
-docutils conf
-=============
-
-field-name-limit
-----------------
-
-:short: desc
-:long long long long: long title
-
-option-limit
-------------
-
---short short desc
---long-long-long-long long desc
-
+docutils conf
+=============
+
+field-name-limit
+----------------
+
+:short: desc
+:long long long long: long title
+
+option-limit
+------------
+
+--short short desc
+--long-long-long-long long desc
+
diff --git a/tests/roots/test-ext-viewcode/conf.py b/tests/roots/test-ext-viewcode/conf.py
index 946cb786..a99a72bb 100644
--- a/tests/roots/test-ext-viewcode/conf.py
+++ b/tests/roots/test-ext-viewcode/conf.py
@@ -1,8 +1,24 @@
-# -*- coding: utf-8 -*-
-
-import sys
-import os
-
-sys.path.insert(0, os.path.abspath('.'))
-extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
-master_doc = 'index'
+# -*- coding: utf-8 -*-
+
+import sys
+import os
+
+sys.path.insert(0, os.path.abspath('.'))
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
+master_doc = 'index'
+
+
+if 'test_linkcode' in tags:
+ extensions.remove('sphinx.ext.viewcode')
+ extensions.append('sphinx.ext.linkcode')
+
+ def linkcode_resolve(domain, info):
+ if domain == 'py':
+ fn = info['module'].replace('.', '/')
+ return "http://foobar/source/%s.py" % fn
+ elif domain == "js":
+ return "http://foobar/js/" + info['fullname']
+ elif domain in ("c", "cpp"):
+ return "http://foobar/%s/%s" % (domain, "".join(info['names']))
+ else:
+ raise AssertionError()
diff --git a/tests/roots/test-ext-viewcode/index.rst b/tests/roots/test-ext-viewcode/index.rst
index 72e94321..b5776cfa 100644
--- a/tests/roots/test-ext-viewcode/index.rst
+++ b/tests/roots/test-ext-viewcode/index.rst
@@ -1,29 +1,34 @@
-viewcode
-========
-
-.. py:module:: spam
-
-.. autofunction:: func1
-
-.. autofunction:: func2
-
-.. autofunction:: spam.mod1.func1
-
-.. autofunction:: spam.mod2.func2
-
-.. autofunction:: Class1
-
-.. autofunction:: Class2
-
-.. autofunction:: spam.mod1.Class1
-
-.. autofunction:: spam.mod2.Class2
-
-
-.. literalinclude:: spam/__init__.py
- :language: python
- :pyobject: func1
-
-.. literalinclude:: spam/mod1.py
- :language: python
- :pyobject: func1
+viewcode
+========
+
+.. py:module:: spam
+
+.. autofunction:: func1
+
+.. autofunction:: func2
+
+.. autofunction:: spam.mod1.func1
+
+.. autofunction:: spam.mod2.func2
+
+.. autofunction:: Class1
+
+.. autofunction:: Class2
+
+.. autofunction:: spam.mod1.Class1
+
+.. autofunction:: spam.mod2.Class2
+
+
+.. literalinclude:: spam/__init__.py
+ :language: python
+ :pyobject: func1
+
+.. literalinclude:: spam/mod1.py
+ :language: python
+ :pyobject: func1
+
+
+.. toctree::
+
+ objects
diff --git a/tests/roots/test-ext-viewcode/objects.rst b/tests/roots/test-ext-viewcode/objects.rst
new file mode 100644
index 00000000..8d304bec
--- /dev/null
+++ b/tests/roots/test-ext-viewcode/objects.rst
@@ -0,0 +1,169 @@
+Testing object descriptions
+===========================
+
+.. function:: func_without_module(a, b, *c[, d])
+
+ Does something.
+
+.. function:: func_without_body()
+
+.. function:: func_noindex
+ :noindex:
+
+.. function:: func_with_module
+ :module: foolib
+
+Referring to :func:`func with no index <func_noindex>`.
+Referring to :func:`nothing <>`.
+
+.. module:: mod
+ :synopsis: Module synopsis.
+ :platform: UNIX
+
+.. function:: func_in_module
+
+.. class:: Cls
+
+ .. method:: meth1
+
+ .. staticmethod:: meths
+
+ .. attribute:: attr
+
+.. explicit class given
+.. method:: Cls.meth2
+
+.. explicit module given
+.. exception:: Error(arg1, arg2)
+ :module: errmod
+
+.. data:: var
+
+
+.. currentmodule:: None
+
+.. function:: func_without_module2() -> annotation
+
+.. object:: long(parameter, \
+ list)
+ another one
+
+.. class:: TimeInt
+
+ Has only one parameter (triggers special behavior...)
+
+ :param moo: |test|
+ :type moo: |test|
+
+.. |test| replace:: Moo
+
+.. class:: Time(hour, minute, isdst)
+
+ :param year: The year.
+ :type year: TimeInt
+ :param TimeInt minute: The minute.
+ :param isdst: whether it's DST
+ :type isdst: * some complex
+ * expression
+ :returns: a new :class:`Time` instance
+ :rtype: :class:`Time`
+ :raises ValueError: if the values are out of range
+ :ivar int hour: like *hour*
+ :ivar minute: like *minute*
+ :vartype minute: int
+ :param hour: Some parameter
+ :type hour: DuplicateType
+ :param hour: Duplicate param. Should not lead to crashes.
+ :type hour: DuplicateType
+ :param .Cls extcls: A class from another module.
+
+
+C items
+=======
+
+.. c:function:: Sphinx_DoSomething()
+
+.. c:member:: SphinxStruct.member
+
+.. c:macro:: SPHINX_USE_PYTHON
+
+.. c:type:: SphinxType
+
+.. c:var:: sphinx_global
+
+
+Javascript items
+================
+
+.. js:function:: foo()
+
+.. js:data:: bar
+
+.. documenting the method of any object
+.. js:function:: bar.baz(href, callback[, errback])
+
+ :param string href: The location of the resource.
+ :param callback: Get's called with the data returned by the resource.
+ :throws InvalidHref: If the `href` is invalid.
+ :returns: `undefined`
+
+.. js:attribute:: bar.spam
+
+References
+==========
+
+Referencing :class:`mod.Cls` or :Class:`mod.Cls` should be the same.
+
+With target: :c:func:`Sphinx_DoSomething()` (parentheses are handled),
+:c:member:`SphinxStruct.member`, :c:macro:`SPHINX_USE_PYTHON`,
+:c:type:`SphinxType *` (pointer is handled), :c:data:`sphinx_global`.
+
+Without target: :c:func:`CFunction`. :c:func:`!malloc`.
+
+:js:func:`foo()`
+:js:func:`foo`
+
+:js:data:`bar`
+:js:func:`bar.baz()`
+:js:func:`bar.baz`
+:js:func:`~bar.baz()`
+
+:js:attr:`bar.baz`
+
+
+Others
+======
+
+.. envvar:: HOME
+
+.. program:: python
+
+.. cmdoption:: -c command
+
+.. program:: perl
+
+.. cmdoption:: -c
+
+.. option:: +p
+
+Link to :option:`perl +p`.
+
+
+User markup
+===========
+
+.. userdesc:: myobj:parameter
+
+ Description of userdesc.
+
+
+Referencing :userdescrole:`myobj`.
+
+
+CPP domain
+==========
+
+.. cpp:class:: n::Array<T,d>
+
+ .. cpp:function:: T& operator[]( unsigned j )
+ const T& operator[]( unsigned j ) const
diff --git a/tests/roots/test-ext-viewcode/spam/__init__.py b/tests/roots/test-ext-viewcode/spam/__init__.py
index 980e9b8a..2c8603c1 100644
--- a/tests/roots/test-ext-viewcode/spam/__init__.py
+++ b/tests/roots/test-ext-viewcode/spam/__init__.py
@@ -1,7 +1,7 @@
-from __future__ import absolute_import
-
-from .mod1 import func1, Class1
-from .mod2 import (
- func2,
- Class2,
-)
+from __future__ import absolute_import
+
+from .mod1 import func1, Class1
+from .mod2 import (
+ func2,
+ Class2,
+)
diff --git a/tests/roots/test-ext-viewcode/spam/mod1.py b/tests/roots/test-ext-viewcode/spam/mod1.py
index e5eb0d47..7133fc82 100644
--- a/tests/roots/test-ext-viewcode/spam/mod1.py
+++ b/tests/roots/test-ext-viewcode/spam/mod1.py
@@ -1,15 +1,15 @@
-"""
-mod1
-"""
-
-def func1(a, b):
- """
- this is func1
- """
- return a, b
-
-
-class Class1(object):
- """
- this is Class1
- """
+"""
+mod1
+"""
+
+def func1(a, b):
+ """
+ this is func1
+ """
+ return a, b
+
+
+class Class1(object):
+ """
+ this is Class1
+ """
diff --git a/tests/roots/test-ext-viewcode/spam/mod2.py b/tests/roots/test-ext-viewcode/spam/mod2.py
index 1841db1e..79834b66 100644
--- a/tests/roots/test-ext-viewcode/spam/mod2.py
+++ b/tests/roots/test-ext-viewcode/spam/mod2.py
@@ -1,15 +1,15 @@
-"""
-mod2
-"""
-
-def func2(a, b):
- """
- this is func2
- """
- return a, b
-
-
-class Class2(object):
- """
- this is Class2
- """
+"""
+mod2
+"""
+
+def func2(a, b):
+ """
+ this is func2
+ """
+ return a, b
+
+
+class Class2(object):
+ """
+ this is Class2
+ """
diff --git a/tests/roots/test-intl/refs_python_domain.txt b/tests/roots/test-intl/refs_python_domain.txt
index 20a8bc50..2b021f2e 100644
--- a/tests/roots/test-intl/refs_python_domain.txt
+++ b/tests/roots/test-intl/refs_python_domain.txt
@@ -1,15 +1,15 @@
-:tocdepth: 2
-
-i18n with python domain refs
-=============================
-
-.. currentmodule:: sensitive
-
-See this decorator: :func:`sensitive_variables`.
-
-.. function:: sensitive_variables(*variables)
-
- Some description
-
-.. currentmodule:: reporting
-
+:tocdepth: 2
+
+i18n with python domain refs
+=============================
+
+.. currentmodule:: sensitive
+
+See this decorator: :func:`sensitive_variables`.
+
+.. function:: sensitive_variables(*variables)
+
+ Some description
+
+.. currentmodule:: reporting
+
diff --git a/tests/roots/test-intl/subdir/contents.txt b/tests/roots/test-intl/subdir/contents.txt
index b6509baf..7578ce38 100644
--- a/tests/roots/test-intl/subdir/contents.txt
+++ b/tests/roots/test-intl/subdir/contents.txt
@@ -1,2 +1,2 @@
-subdir contents
-===============
+subdir contents
+===============
diff --git a/tests/roots/test-numbered-circular/conf.py b/tests/roots/test-numbered-circular/conf.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/tests/roots/test-numbered-circular/conf.py
diff --git a/tests/roots/test-numbered-circular/contents.rst b/tests/roots/test-numbered-circular/contents.rst
new file mode 100644
index 00000000..c3129cd4
--- /dev/null
+++ b/tests/roots/test-numbered-circular/contents.rst
@@ -0,0 +1,5 @@
+.. toctree::
+ :numbered:
+
+ sub
+
diff --git a/tests/roots/test-numbered-circular/sub.rst b/tests/roots/test-numbered-circular/sub.rst
new file mode 100644
index 00000000..070c3974
--- /dev/null
+++ b/tests/roots/test-numbered-circular/sub.rst
@@ -0,0 +1,3 @@
+.. toctree::
+
+ contents
diff --git a/tests/roots/test-setup/doc/contents.txt b/tests/roots/test-setup/doc/contents.txt
index cb52405f..56960f53 100644
--- a/tests/roots/test-setup/doc/contents.txt
+++ b/tests/roots/test-setup/doc/contents.txt
@@ -1,5 +1,5 @@
-contents
-=========
-
-spam egg ham
-
+contents
+=========
+
+spam egg ham
+
diff --git a/tests/roots/test-templating/autosummary_templating.txt b/tests/roots/test-templating/autosummary_templating.txt
index 05643a02..6b396a3f 100644
--- a/tests/roots/test-templating/autosummary_templating.txt
+++ b/tests/roots/test-templating/autosummary_templating.txt
@@ -4,10 +4,4 @@ Autosummary templating test
.. autosummary::
:toctree: generated
- sphinx.application.Sphinx
-
-.. currentmodule:: sphinx.application
-
-.. autoclass:: TemplateBridge
-
- .. automethod:: render
+ sphinx.application.TemplateBridge
diff --git a/tests/root/versioning/added.txt b/tests/roots/test-versioning/added.txt
index 22a70739..22a70739 100644
--- a/tests/root/versioning/added.txt
+++ b/tests/roots/test-versioning/added.txt
diff --git a/tests/roots/test-versioning/conf.py b/tests/roots/test-versioning/conf.py
new file mode 100644
index 00000000..edcf9295
--- /dev/null
+++ b/tests/roots/test-versioning/conf.py
@@ -0,0 +1,3 @@
+project = 'versioning test root'
+master_doc = 'index'
+source_suffix = '.txt'
diff --git a/tests/root/versioning/deleted.txt b/tests/roots/test-versioning/deleted.txt
index a1a9c4c9..a1a9c4c9 100644
--- a/tests/root/versioning/deleted.txt
+++ b/tests/roots/test-versioning/deleted.txt
diff --git a/tests/root/versioning/deleted_end.txt b/tests/roots/test-versioning/deleted_end.txt
index f30e6300..f30e6300 100644
--- a/tests/root/versioning/deleted_end.txt
+++ b/tests/roots/test-versioning/deleted_end.txt
diff --git a/tests/root/versioning/index.txt b/tests/roots/test-versioning/index.txt
index 9d098f75..9d098f75 100644
--- a/tests/root/versioning/index.txt
+++ b/tests/roots/test-versioning/index.txt
diff --git a/tests/root/versioning/insert.txt b/tests/roots/test-versioning/insert.txt
index 1c157cc9..1c157cc9 100644
--- a/tests/root/versioning/insert.txt
+++ b/tests/roots/test-versioning/insert.txt
diff --git a/tests/root/versioning/insert_beginning.txt b/tests/roots/test-versioning/insert_beginning.txt
index 57102a76..57102a76 100644
--- a/tests/root/versioning/insert_beginning.txt
+++ b/tests/roots/test-versioning/insert_beginning.txt
diff --git a/tests/root/versioning/insert_similar.txt b/tests/roots/test-versioning/insert_similar.txt
index ee9b5305..ee9b5305 100644
--- a/tests/root/versioning/insert_similar.txt
+++ b/tests/roots/test-versioning/insert_similar.txt
diff --git a/tests/root/versioning/modified.txt b/tests/roots/test-versioning/modified.txt
index 49cdad93..49cdad93 100644
--- a/tests/root/versioning/modified.txt
+++ b/tests/roots/test-versioning/modified.txt
diff --git a/tests/root/versioning/original.txt b/tests/roots/test-versioning/original.txt
index b3fe0609..b3fe0609 100644
--- a/tests/root/versioning/original.txt
+++ b/tests/roots/test-versioning/original.txt
diff --git a/tests/run.py b/tests/run.py
index b903165d..e143ac15 100755
--- a/tests/run.py
+++ b/tests/run.py
@@ -11,47 +11,41 @@
"""
from __future__ import print_function
+import os
import sys
-from os import path, chdir, listdir, environ
-import shutil
-
-
-testroot = path.dirname(__file__) or '.'
-if 'BUILD_TEST_PATH' in environ:
- # for tox testing
- newroot = environ['BUILD_TEST_PATH']
- # tox installs the sphinx package, no need for sys.path.insert
-else:
- newroot = path.join(testroot, path.pardir, 'build')
- newroot = path.join(newroot, listdir(newroot)[0], 'tests')
-
-shutil.rmtree(newroot, ignore_errors=True)
-# just copying test directory to parallel testing
-print('Copying sources to build/lib/tests...')
-shutil.copytree(testroot, newroot)
-
-# always test the sphinx package from build/lib/
-sys.path.insert(0, path.abspath(path.join(newroot, path.pardir)))
-# switch to the copy/converted dir so nose tests the right tests
-chdir(newroot)
-
-try:
- import nose
-except ImportError:
- print('The nose package is needed to run the Sphinx test suite.')
- sys.exit(1)
-
-try:
- import docutils
-except ImportError:
- print('Sphinx requires the docutils package to be installed.')
- sys.exit(1)
-
-try:
- import jinja2
-except ImportError:
- print('Sphinx requires the jinja2 package to be installed.')
- sys.exit(1)
+import traceback
+
+from path import path
+
+testroot = os.path.dirname(__file__) or '.'
+sys.path.insert(0, os.path.abspath(os.path.join(testroot, os.path.pardir)))
+
+# check dependencies before testing
+print('Checking dependencies...')
+for modname in ('nose', 'mock', 'six', 'docutils', 'jinja2', 'pygments',
+ 'snowballstemmer', 'babel'):
+ try:
+ __import__(modname)
+ except ImportError as err:
+ if modname == 'mock' and sys.version_info[0] == 3:
+ continue
+ traceback.print_exc()
+ print('The %r package is needed to run the Sphinx test suite.' % modname)
+ sys.exit(1)
+
+# find a temp dir for testing and clean it up now
+os.environ['SPHINX_TEST_TEMPDIR'] = \
+ os.path.abspath(os.path.join(testroot, 'build')) \
+ if 'SPHINX_TEST_TEMPDIR' not in os.environ \
+ else os.path.abspath(os.environ['SPHINX_TEST_TEMPDIR'])
+tempdir = path(os.environ['SPHINX_TEST_TEMPDIR'])
+print('Temporary files will be placed in %s.' % tempdir)
+if tempdir.exists():
+ tempdir.rmtree()
+tempdir.makedirs()
print('Running Sphinx test suite...')
+sys.stdout.flush()
+
+import nose
nose.main()
diff --git a/tests/test_api_translator.py b/tests/test_api_translator.py
index 9fa1b3ea..e0ba5e0f 100644
--- a/tests/test_api_translator.py
+++ b/tests/test_api_translator.py
@@ -8,82 +8,57 @@
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import sys
-from nose.tools import with_setup
+import sys
-from util import with_app, test_roots
+from util import with_app, rootdir
def setup_module():
- sys.path.insert(0, test_roots / 'test-api-set-translator')
+ sys.path.insert(0, rootdir / 'roots' / 'test-api-set-translator')
def teardown_module():
- sys.path.remove(test_roots / 'test-api-set-translator')
-
-
-def teardown_websupport():
- (test_roots / 'test-api-set-translator' / 'generated').rmtree(True)
- (test_roots / 'test-api-set-translator' / 'websupport').rmtree(True)
+ sys.path.remove(rootdir / 'roots' / 'test-api-set-translator')
-@with_app(
- buildername='html',
- srcdir=(test_roots / 'test-api-set-translator'),
- confdir=(test_roots / 'test-api-set-translator' / 'nonext'),
-)
-def test_html_translator(app):
+@with_app('html')
+def test_html_translator(app, status, warning):
# no set_translator(), no html_translator_class
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'SmartyPantsHTMLTranslator'
-@with_app(
- buildername='html',
- srcdir=(test_roots / 'test-api-set-translator'),
- confdir=(test_roots / 'test-api-set-translator' / 'nonext'),
- confoverrides={
- 'html_translator_class': 'translator.ExtHTMLTranslator'},
-)
-def test_html_with_html_translator_class(app):
+@with_app('html', confoverrides={
+ 'html_translator_class': 'translator.ExtHTMLTranslator'})
+def test_html_with_html_translator_class(app, status, warning):
# no set_translator(), but html_translator_class
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'ExtHTMLTranslator'
-@with_app(
- buildername='html',
- srcdir=(test_roots / 'test-api-set-translator'),
- confdir=(test_roots / 'test-api-set-translator' / 'nonext'),
- confoverrides={'html_use_smartypants': False},
-)
-def test_html_with_smartypants(app):
+@with_app('html',
+ confoverrides={'html_use_smartypants': False})
+def test_html_with_smartypants(app, status, warning):
# no set_translator(), html_use_smartypants=False
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'HTMLTranslator'
-@with_app(
- buildername='html',
- srcdir=(test_roots / 'test-api-set-translator'),
-)
-def test_html_with_set_translator_for_html_(app):
+@with_app('html', testroot='api-set-translator')
+def test_html_with_set_translator_for_html_(app, status, warning):
# use set_translator(), no html_translator_class
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'ConfHTMLTranslator'
-@with_app(
- buildername='html',
- srcdir=(test_roots / 'test-api-set-translator'),
- confoverrides={'html_translator_class': 'ext.ExtHTMLTranslator'},
-)
-def test_html_with_set_translator_for_html_and_html_translator_class(app):
+@with_app('html', testroot='api-set-translator',
+ confoverrides={'html_translator_class': 'ext.ExtHTMLTranslator'})
+def test_html_with_set_translator_for_html_and_html_translator_class(app, status, warning):
# use set_translator() and html_translator_class.
# set_translator() is given priority over html_translator_clas.
translator_class = app.builder.translator_class
@@ -96,108 +71,70 @@ def test_html_with_set_translator_for_html_and_html_translator_class(app):
# buildername='dirhtml',
# srcdir=(test_roots / 'test-api-set-translator'),
# )
-# def test_dirhtml_set_translator_for_dirhtml(app):
+# def test_dirhtml_set_translator_for_dirhtml(app, status, warning):
# translator_class = app.builder.translator_class
# assert translator_class
# assert translator_class.__name__ == 'ConfDirHTMLTranslator'
-@with_app(
- buildername='singlehtml',
- srcdir=(test_roots / 'test-api-set-translator'),
-)
-def test_singlehtml_set_translator_for_singlehtml(app):
+@with_app('singlehtml', testroot='api-set-translator')
+def test_singlehtml_set_translator_for_singlehtml(app, status, warning):
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'ConfSingleHTMLTranslator'
-@with_app(
- buildername='pickle',
- srcdir=(test_roots / 'test-api-set-translator'),
-)
-def test_pickle_set_translator_for_pickle(app):
+@with_app('pickle', testroot='api-set-translator')
+def test_pickle_set_translator_for_pickle(app, status, warning):
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'ConfPickleTranslator'
-@with_app(
- buildername='json',
- srcdir=(test_roots / 'test-api-set-translator'),
-)
-def test_json_set_translator_for_json(app):
+@with_app('json', testroot='api-set-translator')
+def test_json_set_translator_for_json(app, status, warning):
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'ConfJsonTranslator'
-@with_app(
- buildername='latex',
- srcdir=(test_roots / 'test-api-set-translator'),
-)
-def test_html_with_set_translator_for_latex(app):
+@with_app('latex', testroot='api-set-translator')
+def test_html_with_set_translator_for_latex(app, status, warning):
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'ConfLaTeXTranslator'
-@with_app(
- buildername='man',
- srcdir=(test_roots / 'test-api-set-translator'),
-)
-def test_html_with_set_translator_for_man(app):
+@with_app('man', testroot='api-set-translator')
+def test_html_with_set_translator_for_man(app, status, warning):
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'ConfManualPageTranslator'
-@with_app(
- buildername='texinfo',
- srcdir=(test_roots / 'test-api-set-translator'),
-)
-def test_html_with_set_translator_for_texinfo(app):
+@with_app('texinfo', testroot='api-set-translator')
+def test_html_with_set_translator_for_texinfo(app, status, warning):
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'ConfTexinfoTranslator'
-@with_app(
- buildername='text',
- srcdir=(test_roots / 'test-api-set-translator'),
-)
-def test_html_with_set_translator_for_text(app):
+@with_app('text', testroot='api-set-translator')
+def test_html_with_set_translator_for_text(app, status, warning):
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'ConfTextTranslator'
-@with_setup(teardown=teardown_websupport)
-@with_app(
- buildername='websupport',
- srcdir=(test_roots / 'test-api-set-translator'),
-)
-def test_html_with_set_translator_for_websupport(app):
- translator_class = app.builder.translator_class
- assert translator_class
- assert translator_class.__name__ == 'ConfWebSupportTranslator'
-
-
-@with_app(
- buildername='xml',
- srcdir=(test_roots / 'test-api-set-translator'),
-)
-def test_html_with_set_translator_for_xml(app):
+@with_app('xml', testroot='api-set-translator')
+def test_html_with_set_translator_for_xml(app, status, warning):
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'ConfXMLTranslator'
-@with_app(
- buildername='pseudoxml',
- srcdir=(test_roots / 'test-api-set-translator'),
-)
-def test_html_with_set_translator_for_pseudoxml(app):
+@with_app('pseudoxml', testroot='api-set-translator')
+def test_html_with_set_translator_for_pseudoxml(app, status, warning):
translator_class = app.builder.translator_class
assert translator_class
assert translator_class.__name__ == 'ConfPseudoXMLTranslator'
diff --git a/tests/test_application.py b/tests/test_application.py
index 49c27452..1f188de1 100644
--- a/tests/test_application.py
+++ b/tests/test_application.py
@@ -9,22 +9,21 @@
:license: BSD, see LICENSE for details.
"""
-from six import StringIO
from docutils import nodes
from sphinx.application import ExtensionError
from sphinx.domains import Domain
-from util import with_app, raises_msg, TestApp
+from util import with_app, raises_msg
@with_app()
-def test_events(app):
- def empty(): pass
+def test_events(app, status, warning):
+ def empty():
+ pass
raises_msg(ExtensionError, "Unknown event name: invalid",
app.connect, "invalid", empty)
-
app.add_event("my_event")
raises_msg(ExtensionError, "Event 'my_event' already present",
app.add_event, "my_event")
@@ -43,57 +42,49 @@ def test_events(app):
@with_app()
-def test_emit_with_nonascii_name_node(app):
+def test_emit_with_nonascii_name_node(app, status, warning):
node = nodes.section(names=[u'\u65e5\u672c\u8a9e'])
app.emit('my_event', node)
-def test_output():
- status, warnings = StringIO(), StringIO()
- app = TestApp(status=status, warning=warnings)
- try:
- status.truncate(0) # __init__ writes to status
- status.seek(0)
- app.info("Nothing here...")
- assert status.getvalue() == "Nothing here...\n"
- status.truncate(0)
- status.seek(0)
- app.info("Nothing here...", True)
- assert status.getvalue() == "Nothing here..."
-
- old_count = app._warncount
- app.warn("Bad news!")
- assert warnings.getvalue() == "WARNING: Bad news!\n"
- assert app._warncount == old_count + 1
- finally:
- app.cleanup()
-
-
-def test_extensions():
- status, warnings = StringIO(), StringIO()
- app = TestApp(status=status, warning=warnings)
- try:
- app.setup_extension('shutil')
- assert warnings.getvalue().startswith("WARNING: extension 'shutil'")
- finally:
- app.cleanup()
-
-def test_domain_override():
+@with_app()
+def test_output(app, status, warning):
+ status.truncate(0) # __init__ writes to status
+ status.seek(0)
+ app.info("Nothing here...")
+ assert status.getvalue() == "Nothing here...\n"
+ status.truncate(0)
+ status.seek(0)
+ app.info("Nothing here...", True)
+ assert status.getvalue() == "Nothing here..."
+
+ old_count = app._warncount
+ app.warn("Bad news!")
+ assert warning.getvalue() == "WARNING: Bad news!\n"
+ assert app._warncount == old_count + 1
+
+
+@with_app()
+def test_extensions(app, status, warning):
+ app.setup_extension('shutil')
+ assert warning.getvalue().startswith("WARNING: extension 'shutil'")
+
+
+@with_app()
+def test_domain_override(app, status, warning):
class A(Domain):
name = 'foo'
+
class B(A):
name = 'foo'
+
class C(Domain):
name = 'foo'
- status, warnings = StringIO(), StringIO()
- app = TestApp(status=status, warning=warnings)
- try:
- # No domain know named foo.
- raises_msg(ExtensionError, 'domain foo not yet registered',
- app.override_domain, A)
- assert app.add_domain(A) is None
- assert app.override_domain(B) is None
- raises_msg(ExtensionError, 'new domain not a subclass of registered '
- 'foo domain', app.override_domain, C)
- finally:
- app.cleanup()
+
+ # No domain know named foo.
+ raises_msg(ExtensionError, 'domain foo not yet registered',
+ app.override_domain, A)
+ assert app.add_domain(A) is None
+ assert app.override_domain(B) is None
+ raises_msg(ExtensionError, 'new domain not a subclass of registered '
+ 'foo domain', app.override_domain, C)
diff --git a/tests/test_autodoc.py b/tests/test_autodoc.py
index e0d39f09..f4ae0c08 100644
--- a/tests/test_autodoc.py
+++ b/tests/test_autodoc.py
@@ -18,7 +18,7 @@ from six import StringIO
from docutils.statemachine import ViewList
from sphinx.ext.autodoc import AutoDirective, add_documenter, \
- ModuleLevelDocumenter, FunctionDocumenter, cut_lines, between, ALL
+ ModuleLevelDocumenter, FunctionDocumenter, cut_lines, between, ALL
app = None
@@ -123,24 +123,24 @@ def test_parse_name():
directive.env.temp_data['autodoc:module'] = 'util'
verify('function', 'raises', ('util', ['raises'], None, None))
del directive.env.temp_data['autodoc:module']
- directive.env.temp_data['py:module'] = 'util'
+ directive.env.ref_context['py:module'] = 'util'
verify('function', 'raises', ('util', ['raises'], None, None))
verify('class', 'TestApp', ('util', ['TestApp'], None, None))
# for members
- directive.env.temp_data['py:module'] = 'foo'
+ directive.env.ref_context['py:module'] = 'foo'
verify('method', 'util.TestApp.cleanup',
('util', ['TestApp', 'cleanup'], None, None))
- directive.env.temp_data['py:module'] = 'util'
- directive.env.temp_data['py:class'] = 'Foo'
+ directive.env.ref_context['py:module'] = 'util'
+ directive.env.ref_context['py:class'] = 'Foo'
directive.env.temp_data['autodoc:class'] = 'TestApp'
verify('method', 'cleanup', ('util', ['TestApp', 'cleanup'], None, None))
verify('method', 'TestApp.cleanup',
('util', ['TestApp', 'cleanup'], None, None))
# and clean up
- del directive.env.temp_data['py:module']
- del directive.env.temp_data['py:class']
+ del directive.env.ref_context['py:module']
+ del directive.env.ref_context['py:class']
del directive.env.temp_data['autodoc:class']
@@ -584,7 +584,7 @@ def test_generate():
'method', 'test_autodoc.Class.foobar', more_content=None)
# test auto and given content mixing
- directive.env.temp_data['py:module'] = 'test_autodoc'
+ directive.env.ref_context['py:module'] = 'test_autodoc'
assert_result_contains(' Function.', 'method', 'Class.meth')
add_content = ViewList()
add_content.append('Content.', '', 0)
@@ -682,12 +682,12 @@ def test_generate():
'attribute', 'test_autodoc.Class.descr')
# test generation for C modules (which have no source file)
- directive.env.temp_data['py:module'] = 'time'
+ directive.env.ref_context['py:module'] = 'time'
assert_processes([('function', 'time.asctime')], 'function', 'asctime')
assert_processes([('function', 'time.asctime')], 'function', 'asctime')
# test autodoc_member_order == 'source'
- directive.env.temp_data['py:module'] = 'test_autodoc'
+ directive.env.ref_context['py:module'] = 'test_autodoc'
assert_order(['.. py:class:: Class(arg)',
' .. py:attribute:: Class.descr',
' .. py:method:: Class.meth()',
@@ -704,7 +704,7 @@ def test_generate():
' .. py:method:: Class.inheritedmeth()',
],
'class', 'Class', member_order='bysource', all_members=True)
- del directive.env.temp_data['py:module']
+ del directive.env.ref_context['py:module']
# test attribute initialized to class instance from other module
directive.env.temp_data['autodoc:class'] = 'test_autodoc.Class'
@@ -729,7 +729,7 @@ def test_generate():
'test_autodoc.Class.moore')
# test new attribute documenter behavior
- directive.env.temp_data['py:module'] = 'test_autodoc'
+ directive.env.ref_context['py:module'] = 'test_autodoc'
options.undoc_members = True
assert_processes([('class', 'test_autodoc.AttCls'),
('attribute', 'test_autodoc.AttCls.a1'),
@@ -743,7 +743,7 @@ def test_generate():
# test explicit members with instance attributes
del directive.env.temp_data['autodoc:class']
del directive.env.temp_data['autodoc:module']
- directive.env.temp_data['py:module'] = 'test_autodoc'
+ directive.env.ref_context['py:module'] = 'test_autodoc'
options.inherited_members = False
options.undoc_members = False
options.members = ALL
@@ -765,7 +765,7 @@ def test_generate():
], 'class', 'InstAttCls')
del directive.env.temp_data['autodoc:class']
del directive.env.temp_data['autodoc:module']
- del directive.env.temp_data['py:module']
+ del directive.env.ref_context['py:module']
# test descriptor class documentation
options.members = ['CustomDataDescriptor']
diff --git a/tests/test_build.py b/tests/test_build.py
index 56fdf826..fe38cfaf 100644
--- a/tests/test_build.py
+++ b/tests/test_build.py
@@ -3,114 +3,86 @@
test_build
~~~~~~~~~~
- Test all builders that have no special checks.
+ Test all builders.
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-from util import with_app, test_root, path, SkipTest, TestApp
+from six import BytesIO
+
from textwrap import dedent
+from util import with_app, rootdir, tempdir, SkipTest, TestApp
+
try:
from docutils.writers.manpage import Writer as ManWriter
except ImportError:
ManWriter = None
-def teardown_module():
- (test_root / '_build').rmtree(True)
+class MockOpener(object):
+ def open(self, req, **kwargs):
+ class result(BytesIO):
+ headers = None
+ url = req.url
+ return result()
+import sphinx.builders.linkcheck
+sphinx.builders.linkcheck.opener = MockOpener()
-def test_build():
- for buildername in ('pickle', 'json', 'linkcheck', 'text', 'htmlhelp',
- 'qthelp', 'epub', 'changes', 'singlehtml', 'xml',
- 'pseudoxml'):
- app = TestApp(buildername=buildername)
- yield lambda app: app.builder.build_all(), app
- app.cleanup()
-
-@with_app(buildername='man')
-def test_man(app):
- if ManWriter is None:
+def verify_build(buildername, srcdir):
+ if buildername == 'man' and ManWriter is None:
raise SkipTest('man writer is not available')
- app.builder.build_all()
- assert (app.outdir / 'SphinxTests.1').exists()
-
-
-def _test_nonascii_path(app):
- srcdir = path(app.srcdir)
- mb_name = u'\u65e5\u672c\u8a9e'
+ app = TestApp(buildername=buildername, srcdir=srcdir)
try:
- (srcdir / mb_name).makedirs()
- except UnicodeEncodeError:
- from path import FILESYSTEMENCODING
- raise SkipTest(
- 'nonascii filename not supported on this filesystem encoding: '
- '%s', FILESYSTEMENCODING)
-
- (srcdir / mb_name / (mb_name + '.txt')).write_text(dedent("""
- multi byte file name page
- ==========================
- """))
-
- master_doc = srcdir / 'contents.txt'
- master_doc.write_bytes((master_doc.text() + dedent("""
- .. toctree::
-
- %(mb_name)s/%(mb_name)s
- """ % {'mb_name': mb_name})
- ).encode('utf-8'))
- app.builder.build_all()
-
-
-def test_nonascii_path():
- (test_root / '_build').rmtree(True) #keep this to build first gettext
-
- builder_names = ['gettext', 'html', 'dirhtml', 'singlehtml', 'latex',
- 'texinfo', 'pickle', 'json', 'linkcheck', 'text',
- 'htmlhelp', 'qthelp', 'epub', 'changes', 'xml',
- 'pseudoxml']
- if ManWriter is not None:
- builder_names.append('man')
-
- for buildername in builder_names:
- app = TestApp(buildername=buildername, _copy_to_temp=True)
- yield _test_nonascii_path, app
+ app.builder.build_all()
+ finally:
app.cleanup()
-@with_app(buildername='text', srcdir='(empty)')
-def test_circular_toctree(app):
- contents = (".. toctree::\n"
- "\n"
- " sub\n")
- (app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8')
-
- contents = (".. toctree::\n"
- "\n"
- " contents\n")
- (app.srcdir / 'sub.rst').write_text(contents, encoding='utf-8')
+def test_build_all():
+ # If supported, build in a non-ASCII source dir
+ test_name = u'\u65e5\u672c\u8a9e'
+ try:
+ srcdir = tempdir / test_name
+ (rootdir / 'root').copytree(tempdir / test_name)
+ except UnicodeEncodeError:
+ srcdir = tempdir / 'all'
+ else:
+ # add a doc with a non-ASCII file name to the source dir
+ (srcdir / (test_name + '.txt')).write_text(dedent("""
+ nonascii file name page
+ =======================
+ """))
+
+ master_doc = srcdir / 'contents.txt'
+ master_doc.write_bytes((master_doc.text() + dedent("""
+ .. toctree::
+
+ %(test_name)s/%(test_name)s
+ """ % {'test_name': test_name})
+ ).encode('utf-8'))
+
+ # note: no 'html' - if it's ok with dirhtml it's ok with html
+ for buildername in ['dirhtml', 'singlehtml', 'latex', 'texinfo',
+ 'pickle', 'json', 'text', 'htmlhelp', 'qthelp', 'epub',
+ 'changes', 'xml', 'pseudoxml', 'man', 'linkcheck']:
+ yield verify_build, buildername, srcdir
+
+
+@with_app(buildername='text', testroot='circular')
+def test_circular_toctree(app, status, warning):
app.builder.build_all()
- warnings = "".join(app._warning.content)
+ warnings = warning.getvalue()
assert 'circular toctree references detected, ignoring: sub <- contents <- sub' in warnings
assert 'circular toctree references detected, ignoring: contents <- sub <- contents' in warnings
-@with_app(buildername='text', srcdir='(empty)')
-def test_numbered_circular_toctree(app):
- contents = (".. toctree::\n"
- " :numbered:\n"
- "\n"
- " sub\n")
- (app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8')
-
- contents = (".. toctree::\n"
- "\n"
- " contents\n")
- (app.srcdir / 'sub.rst').write_text(contents, encoding='utf-8')
+@with_app(buildername='text', testroot='numbered-circular')
+def test_numbered_circular_toctree(app, status, warning):
app.builder.build_all()
- warnings = "\n".join(app._warning.content)
+ warnings = warning.getvalue()
assert 'circular toctree references detected, ignoring: sub <- contents <- sub' in warnings
assert 'circular toctree references detected, ignoring: contents <- sub <- contents' in warnings
diff --git a/tests/test_build_gettext.py b/tests/test_build_gettext.py
index e7eda179..d7189443 100644
--- a/tests/test_build_gettext.py
+++ b/tests/test_build_gettext.py
@@ -10,46 +10,30 @@
"""
from __future__ import print_function
-import gettext
import os
import re
+import gettext
from subprocess import Popen, PIPE
-from util import test_root, test_roots, with_app, SkipTest
+from nose.tools import assert_true, assert_equal
+from util import with_app, gen_with_app, SkipTest, assert_in
-def teardown_module():
- (test_root / '_build').rmtree(True)
- (test_roots / 'test-intl' / '_build').rmtree(True),
-
-@with_app(buildername='gettext')
-def test_all(app):
+@gen_with_app('gettext', srcdir='root-gettext')
+def test_all(app, status, warning):
# Generic build; should fail only when the builder is horribly broken.
app.builder.build_all()
-
-@with_app(buildername='gettext')
-def test_build(app):
# Do messages end up in the correct location?
- app.builder.build(['extapi', 'subdir/includes'])
# top-level documents end up in a message catalog
- assert (app.outdir / 'extapi.pot').isfile()
+ yield assert_true, (app.outdir / 'extapi.pot').isfile()
# directory items are grouped into sections
- assert (app.outdir / 'subdir.pot').isfile()
+ yield assert_true, (app.outdir / 'subdir.pot').isfile()
-
-@with_app(buildername='gettext')
-def test_seealso(app):
# regression test for issue #960
- app.builder.build(['markup'])
catalog = (app.outdir / 'markup.pot').text(encoding='utf-8')
- assert 'msgid "something, something else, something more"' in catalog
-
-
-@with_app(buildername='gettext')
-def test_gettext(app):
- app.builder.build(['markup'])
+ yield assert_in, 'msgid "something, something else, something more"', catalog
(app.outdir / 'en' / 'LC_MESSAGES').makedirs()
cwd = os.getcwd()
@@ -58,7 +42,7 @@ def test_gettext(app):
try:
p = Popen(['msginit', '--no-translator', '-i', 'markup.pot',
'--locale', 'en_US'],
- stdout=PIPE, stderr=PIPE)
+ stdout=PIPE, stderr=PIPE)
except OSError:
raise SkipTest # most likely msginit was not found
else:
@@ -67,12 +51,12 @@ def test_gettext(app):
print(stdout)
print(stderr)
assert False, 'msginit exited with return code %s' % \
- p.returncode
- assert (app.outdir / 'en_US.po').isfile(), 'msginit failed'
+ p.returncode
+ yield assert_true, (app.outdir / 'en_US.po').isfile(), 'msginit failed'
try:
p = Popen(['msgfmt', 'en_US.po', '-o',
- os.path.join('en', 'LC_MESSAGES', 'test_root.mo')],
- stdout=PIPE, stderr=PIPE)
+ os.path.join('en', 'LC_MESSAGES', 'test_root.mo')],
+ stdout=PIPE, stderr=PIPE)
except OSError:
raise SkipTest # most likely msgfmt was not found
else:
@@ -81,25 +65,24 @@ def test_gettext(app):
print(stdout)
print(stderr)
assert False, 'msgfmt exited with return code %s' % \
- p.returncode
- assert (app.outdir / 'en' / 'LC_MESSAGES' / 'test_root.mo').isfile(), \
- 'msgfmt failed'
+ p.returncode
+ yield assert_true, (app.outdir / 'en' / 'LC_MESSAGES' / 'test_root.mo').isfile(), \
+ 'msgfmt failed'
finally:
os.chdir(cwd)
_ = gettext.translation('test_root', app.outdir, languages=['en']).gettext
- assert _("Testing various markup") == u"Testing various markup"
+ yield assert_equal, _("Testing various markup"), u"Testing various markup"
-@with_app(buildername='gettext',
- srcdir=(test_roots / 'test-intl'),
- doctreedir=(test_roots / 'test-intl' / '_build' / 'doctree'),
+@with_app('gettext', testroot='intl',
confoverrides={'gettext_compact': False})
-def test_gettext_index_entries(app):
+def test_gettext_index_entries(app, status, warning):
# regression test for #976
app.builder.build(['index_entries'])
_msgid_getter = re.compile(r'msgid "(.*)"').search
+
def msgid_getter(msgid):
m = _msgid_getter(msgid)
if m:
@@ -139,10 +122,8 @@ def test_gettext_index_entries(app):
assert msgids == []
-@with_app(buildername='gettext',
- srcdir=(test_roots / 'test-intl'),
- doctreedir=(test_roots / 'test-intl' / '_build' / 'doctree'))
-def test_gettext_template(app):
+@with_app(buildername='gettext', testroot='intl')
+def test_gettext_template(app, status, warning):
app.builder.build_all()
assert (app.outdir / 'sphinx.pot').isfile()
diff --git a/tests/test_build_html.py b/tests/test_build_html.py
index 17a09eae..62dcccd2 100644
--- a/tests/test_build_html.py
+++ b/tests/test_build_html.py
@@ -12,39 +12,30 @@
import os
import re
-from six import PY3, iteritems, StringIO
+from six import PY3, iteritems
from six.moves import html_entities
-try:
- import pygments
-except ImportError:
- pygments = None
-
from sphinx import __version__
-from util import test_root, test_roots, remove_unicode_literals, gen_with_app, with_app
+from util import remove_unicode_literals, gen_with_app
from etree13 import ElementTree as ET
-def teardown_module():
- (test_root / '_build').rmtree(True)
-
-
-html_warnfile = StringIO()
-
ENV_WARNINGS = """\
-%(root)s/autodoc_fodder.py:docstring of autodoc_fodder\\.MarkupError:2: \
+(%(root)s/autodoc_fodder.py:docstring of autodoc_fodder\\.MarkupError:2: \
WARNING: Explicit markup ends without a blank line; unexpected \
unindent\\.\\n?
-%(root)s/images.txt:9: WARNING: image file not readable: foo.png
+)?%(root)s/images.txt:9: WARNING: image file not readable: foo.png
%(root)s/images.txt:23: WARNING: nonlocal image URI found: \
http://www.python.org/logo.png
%(root)s/includes.txt:\\d*: WARNING: Encoding 'utf-8-sig' used for \
reading included file u'.*?wrongenc.inc' seems to be wrong, try giving an \
:encoding: option\\n?
%(root)s/includes.txt:4: WARNING: download file not readable: .*?nonexisting.png
-%(root)s/markup.txt:\\d+: WARNING: Malformed :option: u'Python c option', does \
-not contain option marker - or -- or /
-"""
+(%(root)s/markup.txt:\\d+: WARNING: Malformed :option: u'Python c option', does \
+not contain option marker - or -- or / or \\+
+%(root)s/undecodable.txt:3: WARNING: undecodable source characters, replacing \
+with "\\?": b?'here: >>>\\\\xbb<<<'
+)?"""
HTML_WARNINGS = ENV_WARNINGS + """\
%(root)s/images.txt:20: WARNING: no matching candidate for image URI u'foo.\\*'
@@ -61,6 +52,7 @@ if PY3:
def tail_check(check):
rex = re.compile(check)
+
def checker(nodes):
for node in nodes:
if node.tail and rex.search(node.tail):
@@ -84,6 +76,8 @@ HTML_XPATH = {
(".//a[@href='../_downloads/img.png']", ''),
(".//img[@src='../_images/img.png']", ''),
(".//p", 'This is an include file.'),
+ (".//pre/span", 'line 1'),
+ (".//pre/span", 'line 2'),
],
'includes.html': [
(".//pre", u'Max Strauß'),
@@ -91,6 +85,23 @@ HTML_XPATH = {
(".//a[@href='_downloads/img1.png']", ''),
(".//pre", u'"quotes"'),
(".//pre", u"'included'"),
+ (".//pre/span[@class='s']", u'üöä'),
+ (".//div[@class='inc-pyobj1 highlight-text']//pre",
+ r'^class Foo:\n pass\n\s*$'),
+ (".//div[@class='inc-pyobj2 highlight-text']//pre",
+ r'^ def baz\(\):\n pass\n\s*$'),
+ (".//div[@class='inc-lines highlight-text']//pre",
+ r'^class Foo:\n pass\nclass Bar:\n$'),
+ (".//div[@class='inc-startend highlight-text']//pre",
+ u'^foo = "Including Unicode characters: üöä"\\n$'),
+ (".//div[@class='inc-preappend highlight-text']//pre",
+ r'(?m)^START CODE$'),
+ (".//div[@class='inc-pyobj-dedent highlight-python']//span",
+ r'def'),
+ (".//div[@class='inc-tab3 highlight-text']//pre",
+ r'-| |-'),
+ (".//div[@class='inc-tab8 highlight-python']//pre/span",
+ r'-| |-'),
],
'autodoc.html': [
(".//dt[@id='test_autodoc.Class']", ''),
@@ -144,7 +155,7 @@ HTML_XPATH = {
(".//a[@href='subdir/includes.html']"
"[@class='reference internal']/em", 'Including in subdir'),
(".//a[@href='objects.html#cmdoption-python-c']"
- "[@class='reference internal']/em", 'Python -c option'),
+ "[@class='reference internal']/code/span[@class='pre']", '-c'),
# abbreviations
(".//abbr[@title='abbreviation']", '^abbr$'),
# version stuff
@@ -175,6 +186,9 @@ HTML_XPATH = {
(".//p", 'In HTML.'),
(".//p", 'In both.'),
(".//p", 'Always present'),
+ # tests for ``any`` role
+ (".//a[@href='#with']/em", 'headings'),
+ (".//a[@href='objects.html#func_without_body']/code/span", 'objects'),
],
'objects.html': [
(".//dt[@id='mod.Cls.meth1']", ''),
@@ -212,12 +226,10 @@ HTML_XPATH = {
(".//h4", 'Custom sidebar'),
# docfields
(".//td[@class='field-body']/strong", '^moo$'),
- (".//td[@class='field-body']/strong",
- tail_check(r'\(Moo\) .* Moo')),
+ (".//td[@class='field-body']/strong", tail_check(r'\(Moo\) .* Moo')),
(".//td[@class='field-body']/ul/li/strong", '^hour$'),
(".//td[@class='field-body']/ul/li/em", '^DuplicateType$'),
- (".//td[@class='field-body']/ul/li/em",
- tail_check(r'.* Some parameter')),
+ (".//td[@class='field-body']/ul/li/em", tail_check(r'.* Some parameter')),
],
'contents.html': [
(".//meta[@name='hc'][@content='hcval']", ''),
@@ -238,6 +250,11 @@ HTML_XPATH = {
(".//h4", 'Contents sidebar'),
# custom JavaScript
(".//script[@src='file://moo.js']", ''),
+ # URL in contents
+ (".//a[@class='reference external'][@href='http://sphinx-doc.org/']",
+ 'http://sphinx-doc.org/'),
+ (".//a[@class='reference external'][@href='http://sphinx-doc.org/latest/']",
+ 'Latest reference'),
],
'bom.html': [
(".//title", " File with UTF-8 BOM"),
@@ -257,33 +274,19 @@ HTML_XPATH = {
(".//a/strong", "Other"),
(".//a", "entry"),
(".//dt/a", "double"),
- ]
+ ],
+ 'footnote.html': [
+ (".//a[@class='footnote-reference'][@href='#id5'][@id='id1']", r"\[1\]"),
+ (".//a[@class='footnote-reference'][@href='#id6'][@id='id2']", r"\[2\]"),
+ (".//a[@class='footnote-reference'][@href='#foo'][@id='id3']", r"\[3\]"),
+ (".//a[@class='reference internal'][@href='#bar'][@id='id4']", r"\[bar\]"),
+ (".//a[@class='fn-backref'][@href='#id1']", r"\[1\]"),
+ (".//a[@class='fn-backref'][@href='#id2']", r"\[2\]"),
+ (".//a[@class='fn-backref'][@href='#id3']", r"\[3\]"),
+ (".//a[@class='fn-backref'][@href='#id4']", r"\[bar\]"),
+ ],
}
-if pygments:
- HTML_XPATH['includes.html'].extend([
- (".//pre/span[@class='s']", u'üöä'),
- (".//div[@class='inc-pyobj1 highlight-text']//pre",
- r'^class Foo:\n pass\n\s*$'),
- (".//div[@class='inc-pyobj2 highlight-text']//pre",
- r'^ def baz\(\):\n pass\n\s*$'),
- (".//div[@class='inc-lines highlight-text']//pre",
- r'^class Foo:\n pass\nclass Bar:\n$'),
- (".//div[@class='inc-startend highlight-text']//pre",
- u'^foo = "Including Unicode characters: üöä"\\n$'),
- (".//div[@class='inc-preappend highlight-text']//pre",
- r'(?m)^START CODE$'),
- (".//div[@class='inc-pyobj-dedent highlight-python']//span",
- r'def'),
- (".//div[@class='inc-tab3 highlight-text']//pre",
- r'-| |-'),
- (".//div[@class='inc-tab8 highlight-python']//pre/span",
- r'-| |-'),
- ])
- HTML_XPATH['subdir/includes.html'].extend([
- (".//pre/span", 'line 1'),
- (".//pre/span", 'line 2'),
- ])
class NslessParser(ET.XMLParser):
"""XMLParser that throws away namespaces in tag names."""
@@ -317,7 +320,8 @@ def check_xpath(etree, fname, path, check, be_found=True):
else:
assert False, ('%r not found in any node matching '
'path %s in %s: %r' % (check, path, fname,
- [node.text for node in nodes]))
+ [node.text for node in nodes]))
+
def check_static_entries(outdir):
staticdir = outdir / '_static'
@@ -332,21 +336,23 @@ def check_static_entries(outdir):
# a file from _static, but matches exclude_patterns
assert not (staticdir / 'excluded.css').exists()
+
def check_extra_entries(outdir):
assert (outdir / 'robots.txt').isfile()
-@gen_with_app(buildername='html', warning=html_warnfile, cleanenv=True,
+
+@gen_with_app(buildername='html',
confoverrides={'html_context.hckey_co': 'hcval_co'},
tags=['testtag'])
-def test_html(app):
+def test_html_output(app, status, warning):
app.builder.build_all()
- html_warnings = html_warnfile.getvalue().replace(os.sep, '/')
+ html_warnings = warning.getvalue().replace(os.sep, '/')
html_warnings_exp = HTML_WARNINGS % {
- 'root': re.escape(app.srcdir.replace(os.sep, '/'))}
+ 'root': re.escape(app.srcdir.replace(os.sep, '/'))}
assert re.match(html_warnings_exp + '$', html_warnings), \
- 'Warnings don\'t match:\n' + \
- '--- Expected (regex):\n' + html_warnings_exp + \
- '--- Got:\n' + html_warnings
+ 'Warnings don\'t match:\n' + \
+ '--- Expected (regex):\n' + html_warnings_exp + \
+ '--- Got:\n' + html_warnings
for fname, paths in iteritems(HTML_XPATH):
parser = NslessParser()
@@ -362,23 +368,9 @@ def test_html(app):
check_static_entries(app.builder.outdir)
check_extra_entries(app.builder.outdir)
-@with_app(buildername='html', srcdir='(empty)',
- confoverrides={'html_sidebars': {'*': ['globaltoc.html']}},
- )
-def test_html_with_globaltoc_and_hidden_toctree(app):
- # issue #1157: combination of 'globaltoc.html' and hidden toctree cause
- # exception.
- (app.srcdir / 'contents.rst').write_text(
- '\n.. toctree::'
- '\n'
- '\n.. toctree::'
- '\n :hidden:'
- '\n')
- app.builder.build_all()
-
-@gen_with_app(buildername='html', srcdir=(test_roots / 'test-tocdepth'))
-def test_tocdepth(app):
+@gen_with_app(buildername='html', testroot='tocdepth')
+def test_tocdepth(app, status, warning):
# issue #1251
app.builder.build_all()
@@ -388,14 +380,14 @@ def test_tocdepth(app):
(".//li[@class='toctree-l3']/a", '1.2.1. Foo B1', True),
(".//li[@class='toctree-l3']/a", '2.1.1. Bar A1', False),
(".//li[@class='toctree-l3']/a", '2.2.1. Bar B1', False),
- ],
+ ],
'foo.html': [
(".//h1", '1. Foo', True),
(".//h2", '1.1. Foo A', True),
(".//h3", '1.1.1. Foo A1', True),
(".//h2", '1.2. Foo B', True),
(".//h3", '1.2.1. Foo B1', True),
- ],
+ ],
'bar.html': [
(".//h1", '2. Bar', True),
(".//h2", '2.1. Bar A', True),
@@ -420,8 +412,8 @@ def test_tocdepth(app):
yield check_xpath, etree, fname, xpath, check, be_found
-@gen_with_app(buildername='singlehtml', srcdir=(test_roots / 'test-tocdepth'))
-def test_tocdepth_singlehtml(app):
+@gen_with_app(buildername='singlehtml', testroot='tocdepth')
+def test_tocdepth_singlehtml(app, status, warning):
app.builder.build_all()
expects = {
@@ -463,18 +455,3 @@ def test_tocdepth_singlehtml(app):
for xpath, check, be_found in paths:
yield check_xpath, etree, fname, xpath, check, be_found
-
-
-@with_app(buildername='html', srcdir='(empty)')
-def test_url_in_toctree(app):
- contents = (".. toctree::\n"
- "\n"
- " http://sphinx-doc.org/\n"
- " Latest reference <http://sphinx-doc.org/latest/>\n")
-
- (app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8')
- app.builder.build_all()
-
- result = (app.outdir / 'contents.html').text(encoding='utf-8')
- assert '<a class="reference external" href="http://sphinx-doc.org/">http://sphinx-doc.org/</a>' in result
- assert '<a class="reference external" href="http://sphinx-doc.org/latest/">Latest reference</a>' in result
diff --git a/tests/test_build_latex.py b/tests/test_build_latex.py
index 41ae03df..9e4c11d5 100644
--- a/tests/test_build_latex.py
+++ b/tests/test_build_latex.py
@@ -14,20 +14,14 @@ import os
import re
from subprocess import Popen, PIPE
-from six import PY3, StringIO
+from six import PY3
from sphinx.writers.latex import LaTeXTranslator
-from util import test_root, SkipTest, remove_unicode_literals, with_app
+from util import SkipTest, remove_unicode_literals, with_app
from test_build_html import ENV_WARNINGS
-def teardown_module():
- (test_root / '_build').rmtree(True)
-
-
-latex_warnfile = StringIO()
-
LATEX_WARNINGS = ENV_WARNINGS + """\
None:None: WARNING: citation not found: missing
None:None: WARNING: no matching candidate for image URI u'foo.\\*'
@@ -39,17 +33,17 @@ if PY3:
LATEX_WARNINGS = remove_unicode_literals(LATEX_WARNINGS)
-@with_app(buildername='latex', warning=latex_warnfile, cleanenv=True)
-def test_latex(app):
+@with_app(buildername='latex')
+def test_latex(app, status, warning):
LaTeXTranslator.ignore_missing_images = True
app.builder.build_all()
- latex_warnings = latex_warnfile.getvalue().replace(os.sep, '/')
+ latex_warnings = warning.getvalue().replace(os.sep, '/')
latex_warnings_exp = LATEX_WARNINGS % {
- 'root': re.escape(app.srcdir.replace(os.sep, '/'))}
+ 'root': re.escape(app.srcdir.replace(os.sep, '/'))}
assert re.match(latex_warnings_exp + '$', latex_warnings), \
- 'Warnings don\'t match:\n' + \
- '--- Expected (regex):\n' + latex_warnings_exp + \
- '--- Got:\n' + latex_warnings
+ 'Warnings don\'t match:\n' + \
+ '--- Expected (regex):\n' + latex_warnings_exp + \
+ '--- Got:\n' + latex_warnings
# file from latex_additional_files
assert (app.outdir / 'svgimg.svg').isfile()
diff --git a/tests/test_build_texinfo.py b/tests/test_build_texinfo.py
index fbe8a173..bb10f8fa 100644
--- a/tests/test_build_texinfo.py
+++ b/tests/test_build_texinfo.py
@@ -14,20 +14,14 @@ import os
import re
from subprocess import Popen, PIPE
-from six import PY3, StringIO
+from six import PY3
from sphinx.writers.texinfo import TexinfoTranslator
-from util import test_root, SkipTest, remove_unicode_literals, with_app
+from util import SkipTest, remove_unicode_literals, with_app
from test_build_html import ENV_WARNINGS
-def teardown_module():
- (test_root / '_build').rmtree(True)
-
-
-texinfo_warnfile = StringIO()
-
TEXINFO_WARNINGS = ENV_WARNINGS + """\
None:None: WARNING: citation not found: missing
None:None: WARNING: no matching candidate for image URI u'foo.\\*'
@@ -38,17 +32,17 @@ if PY3:
TEXINFO_WARNINGS = remove_unicode_literals(TEXINFO_WARNINGS)
-@with_app(buildername='texinfo', warning=texinfo_warnfile, cleanenv=True)
-def test_texinfo(app):
+@with_app('texinfo')
+def test_texinfo(app, status, warning):
TexinfoTranslator.ignore_missing_images = True
app.builder.build_all()
- texinfo_warnings = texinfo_warnfile.getvalue().replace(os.sep, '/')
+ texinfo_warnings = warning.getvalue().replace(os.sep, '/')
texinfo_warnings_exp = TEXINFO_WARNINGS % {
- 'root': re.escape(app.srcdir.replace(os.sep, '/'))}
+ 'root': re.escape(app.srcdir.replace(os.sep, '/'))}
assert re.match(texinfo_warnings_exp + '$', texinfo_warnings), \
- 'Warnings don\'t match:\n' + \
- '--- Expected (regex):\n' + texinfo_warnings_exp + \
- '--- Got:\n' + texinfo_warnings
+ 'Warnings don\'t match:\n' + \
+ '--- Expected (regex):\n' + texinfo_warnings_exp + \
+ '--- Got:\n' + texinfo_warnings
# now, try to run makeinfo over it
cwd = os.getcwd()
os.chdir(app.outdir)
diff --git a/tests/test_build_text.py b/tests/test_build_text.py
index e6e4d5be..d486bed2 100644
--- a/tests/test_build_text.py
+++ b/tests/test_build_text.py
@@ -18,29 +18,16 @@ from util import with_app
def with_text_app(*args, **kw):
default_kw = {
'buildername': 'text',
- 'srcdir': '(empty)',
- 'confoverrides': {
- 'project': 'text',
- 'master_doc': 'contents',
- },
+ 'testroot': 'build-text',
}
default_kw.update(kw)
return with_app(*args, **default_kw)
@with_text_app()
-def test_maxwitdh_with_prefix(app):
- long_string = u' '.join([u"ham"] * 30)
- contents = (
- u".. seealso:: %(long_string)s\n\n"
- u"* %(long_string)s\n"
- u"* %(long_string)s\n"
- u"\nspam egg\n"
- ) % locals()
-
- (app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8')
- app.builder.build_all()
- result = (app.outdir / 'contents.txt').text(encoding='utf-8')
+def test_maxwitdh_with_prefix(app, status, warning):
+ app.builder.build_update()
+ result = (app.outdir / 'maxwidth.txt').text(encoding='utf-8')
lines = result.splitlines()
line_widths = [column_width(line) for line in lines]
@@ -58,105 +45,52 @@ def test_maxwitdh_with_prefix(app):
@with_text_app()
-def test_lineblock(app):
+def test_lineblock(app, status, warning):
# regression test for #1109: need empty line after line block
- contents = (
- u"* one\n"
- u"\n"
- u" | line-block 1\n"
- u" | line-block 2\n"
- u"\n"
- u"followed paragraph.\n"
- )
-
- (app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8')
- app.builder.build_all()
- result = (app.outdir / 'contents.txt').text(encoding='utf-8')
-
+ app.builder.build_update()
+ result = (app.outdir / 'lineblock.txt').text(encoding='utf-8')
expect = (
- u"* one\n"
- u"\n"
- u" line-block 1\n"
- u" line-block 2\n"
- u"\n"
- u"followed paragraph.\n"
- )
-
+ u"* one\n"
+ u"\n"
+ u" line-block 1\n"
+ u" line-block 2\n"
+ u"\n"
+ u"followed paragraph.\n"
+ )
assert result == expect
@with_text_app()
-def test_nonascii_title_line(app):
- title = u'\u65e5\u672c\u8a9e'
- underline = u'=' * column_width(title)
- content = u'\n'.join((title, underline, u''))
-
- (app.srcdir / 'contents.rst').write_text(content, encoding='utf-8')
- app.builder.build_all()
- result = (app.outdir / 'contents.txt').text(encoding='utf-8')
-
- expect_underline = underline.replace('=', '*')
+def test_nonascii_title_line(app, status, warning):
+ app.builder.build_update()
+ result = (app.outdir / 'nonascii_title.txt').text(encoding='utf-8')
+ expect_underline = '******'
result_underline = result.splitlines()[2].strip()
assert expect_underline == result_underline
@with_text_app()
-def test_nonascii_table(app):
- text = u'\u65e5\u672c\u8a9e'
- contents = (u"\n.. list-table::"
- "\n"
- "\n - - spam"
- "\n - egg"
- "\n"
- "\n - - %(text)s"
- "\n - %(text)s"
- "\n" % locals())
-
- (app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8')
- app.builder.build_all()
- result = (app.outdir / 'contents.txt').text(encoding='utf-8')
-
+def test_nonascii_table(app, status, warning):
+ app.builder.build_update()
+ result = (app.outdir / 'nonascii_table.txt').text(encoding='utf-8')
lines = [line.strip() for line in result.splitlines() if line.strip()]
line_widths = [column_width(line) for line in lines]
assert len(set(line_widths)) == 1 # same widths
@with_text_app()
-def test_nonascii_maxwidth(app):
- sb_text = u'abc' #length=3
- mb_text = u'\u65e5\u672c\u8a9e' #length=3
-
- sb_line = ' '.join([sb_text] * int(MAXWIDTH / 3))
- mb_line = ' '.join([mb_text] * int(MAXWIDTH / 3))
- mix_line = ' '.join([sb_text, mb_text] * int(MAXWIDTH / 6))
-
- contents = u'\n\n'.join((sb_line, mb_line, mix_line))
-
- (app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8')
- app.builder.build_all()
- result = (app.outdir / 'contents.txt').text(encoding='utf-8')
-
+def test_nonascii_maxwidth(app, status, warning):
+ app.builder.build_update()
+ result = (app.outdir / 'nonascii_maxwidth.txt').text(encoding='utf-8')
lines = [line.strip() for line in result.splitlines() if line.strip()]
line_widths = [column_width(line) for line in lines]
assert max(line_widths) < MAXWIDTH
@with_text_app()
-def test_table_with_empty_cell(app):
- contents = (u"""
- +-----+-----+
- | XXX | XXX |
- +-----+-----+
- | | XXX |
- +-----+-----+
- | XXX | |
- +-----+-----+
- """)
-
- (app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8')
- app.builder.build_all()
- result = (app.outdir / 'contents.txt').text(encoding='utf-8')
-
+def test_table_with_empty_cell(app, status, warning):
+ app.builder.build_update()
+ result = (app.outdir / 'table.txt').text(encoding='utf-8')
lines = [line.strip() for line in result.splitlines() if line.strip()]
assert lines[0] == "+-------+-------+"
assert lines[1] == "| XXX | XXX |"
diff --git a/tests/test_build_base.py b/tests/test_catalogs.py
index ee270626..c4f5c08f 100644
--- a/tests/test_build_base.py
+++ b/tests/test_catalogs.py
@@ -1,77 +1,78 @@
-# -*- coding: utf-8 -*-
-"""
- test_build_base
- ~~~~~~~~~~~~~~~
-
- Test the base build process.
-
- :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-import shutil
-
-from nose.tools import with_setup
-
-from util import test_roots, with_app, find_files
-
-root = test_roots / 'test-intl'
-build_dir = root / '_build'
-locale_dir = build_dir / 'locale'
-
-
-def setup_test():
- # Delete remnants left over after failed build
- locale_dir.rmtree(True)
- # copy all catalogs into locale layout directory
- for po in find_files(root, '.po'):
- copy_po = (locale_dir / 'en' / 'LC_MESSAGES' / po)
- if not copy_po.parent.exists():
- copy_po.parent.makedirs()
- shutil.copy(root / po, copy_po)
-
-
-def teardown_test():
- build_dir.rmtree(True),
-
-
-@with_setup(setup_test, teardown_test)
-@with_app(buildername='html', srcdir=root,
- confoverrides={'language': 'en', 'locale_dirs': [locale_dir]})
-def test_compile_all_catalogs(app):
- app.builder.compile_all_catalogs()
-
- catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES'
- expect = set([
- x.replace('.po', '.mo')
- for x in find_files(catalog_dir, '.po')
- ])
- actual = set(find_files(catalog_dir, '.mo'))
- assert actual # not empty
- assert actual == expect
-
-
-@with_setup(setup_test, teardown_test)
-@with_app(buildername='html', srcdir=root,
- confoverrides={'language': 'en', 'locale_dirs': [locale_dir]})
-def test_compile_specific_catalogs(app):
- app.builder.compile_specific_catalogs(['admonitions'])
-
- catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES'
- actual = set(find_files(catalog_dir, '.mo'))
- assert actual == set(['admonitions.mo'])
-
-
-@with_setup(setup_test, teardown_test)
-@with_app(buildername='html', srcdir=root,
- confoverrides={'language': 'en', 'locale_dirs': [locale_dir]})
-def test_compile_update_catalogs(app):
- app.builder.compile_update_catalogs()
-
- catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES'
- expect = set([
- x.replace('.po', '.mo')
- for x in find_files(catalog_dir, '.po')
- ])
- actual = set(find_files(catalog_dir, '.mo'))
- assert actual # not empty
- assert actual == expect
+# -*- coding: utf-8 -*-
+"""
+ test_build_base
+ ~~~~~~~~~~~~~~~
+
+ Test the base build process.
+
+ :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+import shutil
+
+from nose.tools import with_setup
+
+from util import with_app, find_files, rootdir, tempdir
+
+root = tempdir / 'test-intl'
+build_dir = root / '_build'
+locale_dir = build_dir / 'locale'
+
+
+def setup_test():
+ # delete remnants left over after failed build
+ root.rmtree(True)
+ (rootdir / 'roots' / 'test-intl').copytree(root)
+ # copy all catalogs into locale layout directory
+ for po in find_files(root, '.po'):
+ copy_po = (locale_dir / 'en' / 'LC_MESSAGES' / po)
+ if not copy_po.parent.exists():
+ copy_po.parent.makedirs()
+ shutil.copy(root / po, copy_po)
+
+
+def teardown_test():
+ build_dir.rmtree(True)
+
+
+@with_setup(setup_test, teardown_test)
+@with_app(buildername='html', testroot='intl',
+ confoverrides={'language': 'en', 'locale_dirs': [locale_dir]})
+def test_compile_all_catalogs(app, status, warning):
+ app.builder.compile_all_catalogs()
+
+ catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES'
+ expect = set([
+ x.replace('.po', '.mo')
+ for x in find_files(catalog_dir, '.po')
+ ])
+ actual = set(find_files(catalog_dir, '.mo'))
+ assert actual # not empty
+ assert actual == expect
+
+
+@with_setup(setup_test, teardown_test)
+@with_app(buildername='html', testroot='intl',
+ confoverrides={'language': 'en', 'locale_dirs': [locale_dir]})
+def test_compile_specific_catalogs(app, status, warning):
+ app.builder.compile_specific_catalogs(['admonitions'])
+
+ catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES'
+ actual = set(find_files(catalog_dir, '.mo'))
+ assert actual == set(['admonitions.mo'])
+
+
+@with_setup(setup_test, teardown_test)
+@with_app(buildername='html', testroot='intl',
+ confoverrides={'language': 'en', 'locale_dirs': [locale_dir]})
+def test_compile_update_catalogs(app, status, warning):
+ app.builder.compile_update_catalogs()
+
+ catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES'
+ expect = set([
+ x.replace('.po', '.mo')
+ for x in find_files(catalog_dir, '.po')
+ ])
+ actual = set(find_files(catalog_dir, '.mo'))
+ assert actual # not empty
+ assert actual == expect
diff --git a/tests/test_config.py b/tests/test_config.py
index 36a8d957..0dcf3fa3 100644
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -20,7 +20,7 @@ from sphinx.errors import ExtensionError, ConfigError, VersionRequirementError
@with_app(confoverrides={'master_doc': 'master', 'nonexisting_value': 'True',
'latex_elements.docclass': 'scrartcl',
'modindex_common_prefix': 'path1,path2'})
-def test_core_config(app):
+def test_core_config(app, status, warning):
cfg = app.config
# simple values
@@ -36,7 +36,7 @@ def test_core_config(app):
# simple default values
assert 'locale_dirs' not in cfg.__dict__
assert cfg.locale_dirs == []
- assert cfg.trim_footnote_reference_space == False
+ assert cfg.trim_footnote_reference_space is False
# complex default values
assert 'html_title' not in cfg.__dict__
@@ -68,7 +68,7 @@ def test_core_config(app):
@with_app()
-def test_extension_values(app):
+def test_extension_values(app, status, warning):
cfg = app.config
# default value
diff --git a/tests/test_directive_code.py b/tests/test_directive_code.py
index f2f64970..d29c9910 100644
--- a/tests/test_directive_code.py
+++ b/tests/test_directive_code.py
@@ -1,171 +1,111 @@
-# -*- coding: utf-8 -*-
-"""
- test_directive_code
- ~~~~~~~~~~~~~~~~~~~
-
- Test the code-block directive.
-
- :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-from xml.etree import ElementTree
-
-from util import with_app, test_roots
-
-
-def teardown_module():
- (test_roots / 'test-directive-code' / '_build').rmtree(True)
-
-
-@with_app(buildername='xml',
- srcdir=(test_roots / 'test-directive-code'),
- _copy_to_temp=True)
-def test_code_block(app):
- app.builder.build('index')
- et = ElementTree.parse(app.outdir / 'index.xml')
- secs = et.findall('./section/section')
- code_block = secs[0].findall('literal_block')
- assert len(code_block) > 0
- actual = code_block[0].text
- expect = (
- " def ruby?\n" +
- " false\n" +
- " end"
- )
- assert actual == expect
-
-
-@with_app(buildername='xml',
- srcdir=(test_roots / 'test-directive-code'),
- _copy_to_temp=True)
-def test_code_block_dedent(app):
- outdir = app.outdir
-
- def get_dedent_actual(dedent):
- dedent_text = (app.srcdir / 'dedent.rst').text(encoding='utf-8')
- dedent_text = re.sub(
- r':dedent: \d', ':dedent: %d' % dedent, dedent_text)
- (app.srcdir / 'dedent.rst').write_text(dedent_text, encoding='utf-8')
-
- # use another output dir to force rebuild
- app.outdir = outdir / str(dedent)
- app._init_env(freshenv=True)
- app._init_builder(app.builder.name)
- app.builder.build(['dedent'], method='specific')
-
- et = ElementTree.parse(app.outdir / 'dedent.xml')
- secs = et.findall('./section/section')
- code_block = secs[0].findall('literal_block')
-
- assert len(code_block) > 0
- actual = code_block[0].text
- return actual
-
- for i in range(5): # 0-4
- actual = get_dedent_actual(i)
- indent = " " * (4 - i)
- expect = (
- indent + "def ruby?\n" +
- indent + " false\n" +
- indent + "end"
- )
- assert (i, actual) == (i, expect)
-
- actual = get_dedent_actual(1000)
- assert actual == '\n\n'
-
-
-@with_app(buildername='html',
- srcdir=(test_roots / 'test-directive-code'),
- _copy_to_temp=True)
-def test_code_block_caption_html(app):
- app.builder.build('index')
- html = (app.outdir / 'caption.html').text()
- caption = '<div class="code-block-caption"><code>caption-test.rb</code></div>'
- assert caption in html
-
-
-@with_app(buildername='latex',
- srcdir=(test_roots / 'test-directive-code'),
- _copy_to_temp=True)
-def test_code_block_caption_latex(app):
- app.builder.build('index')
- latex = (app.outdir / 'Python.tex').text()
- caption = '\\caption{caption-test.rb}'
- assert caption in latex
-
-
-@with_app(buildername='xml',
- srcdir=(test_roots / 'test-directive-code'),
- _copy_to_temp=True)
-def test_literal_include(app):
- app.builder.build('index')
- et = ElementTree.parse(app.outdir / 'index.xml')
- secs = et.findall('./section/section')
- literal_include = secs[1].findall('literal_block')
- literal_src = (app.srcdir / 'literal.inc').text(encoding='utf-8')
- assert len(literal_include) > 0
- actual = literal_include[0].text
- assert actual == literal_src
-
-
-@with_app(buildername='xml',
- srcdir=(test_roots / 'test-directive-code'),
- _copy_to_temp=True)
-def test_literal_include_dedent(app):
- outdir = app.outdir
- literal_src = (app.srcdir / 'literal.inc').text(encoding='utf-8')
- literal_lines = [l[4:] for l in literal_src.split('\n')[9:11]]
-
- def get_dedent_actual(dedent):
- dedent_text = (app.srcdir / 'dedent.rst').text(encoding='utf-8')
- dedent_text = re.sub(
- r':dedent: \d', ':dedent: %d' % dedent, dedent_text)
- (app.srcdir / 'dedent.rst').write_text(dedent_text, encoding='utf-8')
-
- # use another output dir to force rebuild
- app.outdir = outdir / str(dedent)
- app._init_env(freshenv=True)
- app._init_builder(app.builder.name)
- app.builder.build(['dedent'])
-
- et = ElementTree.parse(app.outdir / 'dedent.xml')
- secs = et.findall('./section/section')
- literal_include = secs[1].findall('literal_block')
-
- assert len(literal_include) > 0
- actual = literal_include[0].text
- return actual
-
-
- for i in range(5): # 0-4
- actual = get_dedent_actual(i)
- indent = " " * (4 - i)
- expect = '\n'.join(indent + l for l in literal_lines) + '\n'
- assert (i, actual) == (i, expect)
-
-
- actual = get_dedent_actual(1000)
- assert actual == '\n\n'
-
-
-@with_app(buildername='html',
- srcdir=(test_roots / 'test-directive-code'),
- _copy_to_temp=True)
-def test_literalinclude_caption_html(app):
- app.builder.build('index')
- html = (app.outdir / 'caption.html').text()
- caption = '<div class="code-block-caption"><code>caption-test.py</code></div>'
- assert caption in html
-
-
-@with_app(buildername='latex',
- srcdir=(test_roots / 'test-directive-code'),
- _copy_to_temp=True)
-def test_literalinclude_caption_latex(app):
- app.builder.build('index')
- latex = (app.outdir / 'Python.tex').text()
- caption = '\\caption{caption-test.py}'
- assert caption in latex
+# -*- coding: utf-8 -*-
+"""
+ test_directive_code
+ ~~~~~~~~~~~~~~~~~~~
+
+ Test the code-block directive.
+
+ :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from xml.etree import ElementTree
+
+from util import with_app
+
+
+@with_app('xml', testroot='directive-code')
+def test_code_block(app, status, warning):
+ app.builder.build('index')
+ et = ElementTree.parse(app.outdir / 'index.xml')
+ secs = et.findall('./section/section')
+ code_block = secs[0].findall('literal_block')
+ assert len(code_block) > 0
+ actual = code_block[0].text
+ expect = (
+ " def ruby?\n" +
+ " false\n" +
+ " end"
+ )
+ assert actual == expect
+
+
+@with_app('xml', testroot='directive-code')
+def test_code_block_dedent(app, status, warning):
+ app.builder.build(['dedent_code'])
+ et = ElementTree.parse(app.outdir / 'dedent_code.xml')
+ blocks = et.findall('./section/section/literal_block')
+
+ for i in range(5): # 0-4
+ actual = blocks[i].text
+ indent = " " * (4 - i)
+ expect = (
+ indent + "def ruby?\n" +
+ indent + " false\n" +
+ indent + "end"
+ )
+ assert (i, actual) == (i, expect)
+
+ assert blocks[5].text == '\n\n' # dedent: 1000
+
+
+@with_app('html', testroot='directive-code')
+def test_code_block_caption_html(app, status, warning):
+ app.builder.build(['caption'])
+ html = (app.outdir / 'caption.html').text()
+ caption = '<div class="code-block-caption"><code>caption-test.rb</code></div>'
+ assert caption in html
+
+
+@with_app('latex', testroot='directive-code')
+def test_code_block_caption_latex(app, status, warning):
+ app.builder.build_all()
+ latex = (app.outdir / 'Python.tex').text()
+ caption = '\\caption{caption-test.rb}'
+ assert caption in latex
+
+
+@with_app('xml', testroot='directive-code')
+def test_literal_include(app, status, warning):
+ app.builder.build(['index'])
+ et = ElementTree.parse(app.outdir / 'index.xml')
+ secs = et.findall('./section/section')
+ literal_include = secs[1].findall('literal_block')
+ literal_src = (app.srcdir / 'literal.inc').text(encoding='utf-8')
+ assert len(literal_include) > 0
+ actual = literal_include[0].text
+ assert actual == literal_src
+
+
+@with_app('xml', testroot='directive-code')
+def test_literal_include_dedent(app, status, warning):
+ literal_src = (app.srcdir / 'literal.inc').text(encoding='utf-8')
+ literal_lines = [l[4:] for l in literal_src.split('\n')[9:11]]
+
+ app.builder.build(['dedent'])
+ et = ElementTree.parse(app.outdir / 'dedent.xml')
+ blocks = et.findall('./section/section/literal_block')
+
+ for i in range(5): # 0-4
+ actual = blocks[i].text
+ indent = ' ' * (4 - i)
+ expect = '\n'.join(indent + l for l in literal_lines) + '\n'
+ assert (i, actual) == (i, expect)
+
+ assert blocks[5].text == '\n\n' # dedent: 1000
+
+
+@with_app('html', testroot='directive-code')
+def test_literalinclude_caption_html(app, status, warning):
+ app.builder.build('index')
+ html = (app.outdir / 'caption.html').text()
+ caption = '<div class="code-block-caption"><code>caption-test.py</code></div>'
+ assert caption in html
+
+
+@with_app('latex', testroot='directive-code')
+def test_literalinclude_caption_latex(app, status, warning):
+ app.builder.build('index')
+ latex = (app.outdir / 'Python.tex').text()
+ caption = '\\caption{caption-test.py}'
+ assert caption in latex
diff --git a/tests/test_directive_only.py b/tests/test_directive_only.py
index 7fb1f5bb..0cf44663 100644
--- a/tests/test_directive_only.py
+++ b/tests/test_directive_only.py
@@ -13,15 +13,11 @@ import re
from docutils import nodes
-from util import with_app, test_roots
+from util import with_app
-def teardown_module():
- (test_roots / 'test-directive-only' / '_build').rmtree(True)
-
-
-@with_app(buildername='text', srcdir=(test_roots / 'test-directive-only'))
-def test_sectioning(app):
+@with_app('text', testroot='directive-only')
+def test_sectioning(app, status, warning):
def getsects(section):
if not isinstance(section, nodes.section):
diff --git a/tests/test_docutilsconf.py b/tests/test_docutilsconf.py
index c0ee4a17..90fa5db3 100644
--- a/tests/test_docutilsconf.py
+++ b/tests/test_docutilsconf.py
@@ -9,50 +9,17 @@
:license: BSD, see LICENSE for details.
"""
-import os
import re
-from functools import wraps
-
-from six import StringIO
-
-from util import test_roots, TestApp, path, SkipTest
-
-
-html_warnfile = StringIO()
-root = test_roots / 'test-docutilsconf'
-
-
-# need cleanenv to rebuild everytime.
-# docutils.conf change did not effect to rebuild.
-def with_conf_app(docutilsconf='', *args, **kwargs):
- default_kw = {
- 'srcdir': root,
- 'cleanenv': True,
- }
- default_kw.update(kwargs)
- def generator(func):
- @wraps(func)
- def deco(*args2, **kwargs2):
- app = TestApp(*args, **default_kw)
- (app.srcdir / 'docutils.conf').write_text(docutilsconf)
- try:
- cwd = os.getcwd()
- os.chdir(app.srcdir)
- func(app, *args2, **kwargs2)
- finally:
- os.chdir(cwd)
- # don't execute cleanup if test failed
- app.cleanup()
- return deco
- return generator
+
+from util import with_app, path, SkipTest
def regex_count(expr, result):
return len(re.findall(expr, result))
-@with_conf_app(buildername='html')
-def test_html_with_default_docutilsconf(app):
+@with_app('html', testroot='docutilsconf', freshenv=True, docutilsconf='')
+def test_html_with_default_docutilsconf(app, status, warning):
app.builder.build(['contents'])
result = (app.outdir / 'contents.html').text(encoding='utf-8')
@@ -62,13 +29,13 @@ def test_html_with_default_docutilsconf(app):
assert regex_count(r'<td class="option-group" colspan="2">', result) == 1
-@with_conf_app(buildername='html', docutilsconf=(
+@with_app('html', testroot='docutilsconf', freshenv=True, docutilsconf=(
'\n[html4css1 writer]'
'\noption-limit:1'
'\nfield-name-limit:1'
'\n')
)
-def test_html_with_docutilsconf(app):
+def test_html_with_docutilsconf(app, status, warning):
app.builder.build(['contents'])
result = (app.outdir / 'contents.html').text(encoding='utf-8')
@@ -78,41 +45,32 @@ def test_html_with_docutilsconf(app):
assert regex_count(r'<td class="option-group" colspan="2">', result) == 2
-@with_conf_app(buildername='html', warning=html_warnfile)
-def test_html(app):
+@with_app('html', testroot='docutilsconf')
+def test_html(app, status, warning):
app.builder.build(['contents'])
- assert html_warnfile.getvalue() == ''
+ assert warning.getvalue() == ''
-@with_conf_app(buildername='latex', warning=html_warnfile)
-def test_latex(app):
+@with_app('latex', testroot='docutilsconf')
+def test_latex(app, status, warning):
app.builder.build(['contents'])
- assert html_warnfile.getvalue() == ''
+ assert warning.getvalue() == ''
-@with_conf_app(buildername='man', warning=html_warnfile)
-def test_man(app):
+@with_app('man', testroot='docutilsconf')
+def test_man(app, status, warning):
app.builder.build(['contents'])
- assert html_warnfile.getvalue() == ''
+ assert warning.getvalue() == ''
-@with_conf_app(buildername='texinfo', warning=html_warnfile)
-def test_texinfo(app):
+@with_app('texinfo', testroot='docutilsconf')
+def test_texinfo(app, status, warning):
app.builder.build(['contents'])
-@with_conf_app(buildername='html', srcdir='(empty)',
- docutilsconf='[general]\nsource_link=true\n')
-def test_docutils_source_link(app):
- srcdir = path(app.srcdir)
- (srcdir / 'conf.py').write_text('')
- (srcdir / 'contents.rst').write_text('')
- app.builder.build_all()
-
-
-@with_conf_app(buildername='html', srcdir='(empty)',
- docutilsconf='[general]\nsource_link=true\n')
-def test_docutils_source_link_with_nonascii_file(app):
+@with_app('html', testroot='docutilsconf',
+ docutilsconf='[general]\nsource_link=true\n')
+def test_docutils_source_link_with_nonascii_file(app, status, warning):
srcdir = path(app.srcdir)
mb_name = u'\u65e5\u672c\u8a9e'
try:
@@ -123,7 +81,4 @@ def test_docutils_source_link_with_nonascii_file(app):
'nonascii filename not supported on this filesystem encoding: '
'%s', FILESYSTEMENCODING)
- (srcdir / 'conf.py').write_text('')
- (srcdir / 'contents.rst').write_text('')
-
app.builder.build_all()
diff --git a/tests/test_py_domain.py b/tests/test_domain_py.py
index 87f6eb98..1d0fcc5f 100644
--- a/tests/test_py_domain.py
+++ b/tests/test_domain_py.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
"""
- test_py_domain
+ test_domain_py
~~~~~~~~~~~~~~
Tests the Python Domain
diff --git a/tests/test_rst_domain.py b/tests/test_domain_rst.py
index 9f70f5b1..9f70f5b1 100644
--- a/tests/test_rst_domain.py
+++ b/tests/test_domain_rst.py
diff --git a/tests/test_domain_std.py b/tests/test_domain_std.py
index 81dbe6a4..a1e5bdc1 100644
--- a/tests/test_domain_std.py
+++ b/tests/test_domain_std.py
@@ -1,80 +1,80 @@
-# -*- coding: utf-8 -*-
-"""
- test_domain_std
- ~~~~~~~~~~~~~~~
-
- Tests the std domain
-
- :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from docutils import nodes
-
-from sphinx.domains.std import StandardDomain
-from util import mock
-
-
-def test_process_doc_handle_figure_caption():
- env = mock.Mock(domaindata={})
- figure_node = nodes.figure(
- '',
- nodes.caption('caption text', 'caption text'),
- )
- document = mock.Mock(
- nametypes={'testname': True},
- nameids={'testname': 'testid'},
- ids={'testid': figure_node},
- )
-
- domain = StandardDomain(env)
- if 'testname' in domain.data['labels']:
- del domain.data['labels']['testname']
- domain.process_doc(env, 'testdoc', document)
- assert 'testname' in domain.data['labels']
- assert domain.data['labels']['testname'] == (
- 'testdoc', 'testid', 'caption text')
-
-
-def test_process_doc_handle_image_parent_figure_caption():
- env = mock.Mock(domaindata={})
- img_node = nodes.image('', alt='image alt')
- figure_node = nodes.figure(
- '',
- nodes.caption('caption text', 'caption text'),
- img_node,
- )
- document = mock.Mock(
- nametypes={'testname': True},
- nameids={'testname': 'testid'},
- ids={'testid': img_node},
- )
-
- domain = StandardDomain(env)
- if 'testname' in domain.data['labels']:
- del domain.data['labels']['testname']
- domain.process_doc(env, 'testdoc', document)
- assert 'testname' in domain.data['labels']
- assert domain.data['labels']['testname'] == (
- 'testdoc', 'testid', 'caption text')
-
-
-def test_process_doc_handle_table_title():
- env = mock.Mock(domaindata={})
- table_node = nodes.table(
- '',
- nodes.title('title text', 'title text'),
- )
- document = mock.Mock(
- nametypes={'testname': True},
- nameids={'testname': 'testid'},
- ids={'testid': table_node},
- )
-
- domain = StandardDomain(env)
- if 'testname' in domain.data['labels']:
- del domain.data['labels']['testname']
- domain.process_doc(env, 'testdoc', document)
- assert 'testname' in domain.data['labels']
- assert domain.data['labels']['testname'] == (
- 'testdoc', 'testid', 'title text')
+# -*- coding: utf-8 -*-
+"""
+ test_domain_std
+ ~~~~~~~~~~~~~~~
+
+ Tests the std domain
+
+ :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from docutils import nodes
+
+from sphinx.domains.std import StandardDomain
+from util import mock
+
+
+def test_process_doc_handle_figure_caption():
+ env = mock.Mock(domaindata={})
+ figure_node = nodes.figure(
+ '',
+ nodes.caption('caption text', 'caption text'),
+ )
+ document = mock.Mock(
+ nametypes={'testname': True},
+ nameids={'testname': 'testid'},
+ ids={'testid': figure_node},
+ )
+
+ domain = StandardDomain(env)
+ if 'testname' in domain.data['labels']:
+ del domain.data['labels']['testname']
+ domain.process_doc(env, 'testdoc', document)
+ assert 'testname' in domain.data['labels']
+ assert domain.data['labels']['testname'] == (
+ 'testdoc', 'testid', 'caption text')
+
+
+def test_process_doc_handle_image_parent_figure_caption():
+ env = mock.Mock(domaindata={})
+ img_node = nodes.image('', alt='image alt')
+ figure_node = nodes.figure(
+ '',
+ nodes.caption('caption text', 'caption text'),
+ img_node,
+ )
+ document = mock.Mock(
+ nametypes={'testname': True},
+ nameids={'testname': 'testid'},
+ ids={'testid': img_node},
+ )
+
+ domain = StandardDomain(env)
+ if 'testname' in domain.data['labels']:
+ del domain.data['labels']['testname']
+ domain.process_doc(env, 'testdoc', document)
+ assert 'testname' in domain.data['labels']
+ assert domain.data['labels']['testname'] == (
+ 'testdoc', 'testid', 'caption text')
+
+
+def test_process_doc_handle_table_title():
+ env = mock.Mock(domaindata={})
+ table_node = nodes.table(
+ '',
+ nodes.title('title text', 'title text'),
+ )
+ document = mock.Mock(
+ nametypes={'testname': True},
+ nameids={'testname': 'testid'},
+ ids={'testid': table_node},
+ )
+
+ domain = StandardDomain(env)
+ if 'testname' in domain.data['labels']:
+ del domain.data['labels']['testname']
+ domain.process_doc(env, 'testdoc', document)
+ assert 'testname' in domain.data['labels']
+ assert domain.data['labels']['testname'] == (
+ 'testdoc', 'testid', 'title text')
diff --git a/tests/test_env.py b/tests/test_environment.py
index 3dc7431b..b5da325f 100644
--- a/tests/test_env.py
+++ b/tests/test_environment.py
@@ -8,9 +8,10 @@
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+
from six import PY3
-from util import TestApp, remove_unicode_literals, path, with_app
+from util import TestApp, remove_unicode_literals, path
from sphinx.builders.html import StandaloneHTMLBuilder
from sphinx.builders.latex import LaTeXBuilder
@@ -18,34 +19,35 @@ from sphinx.builders.latex import LaTeXBuilder
app = env = None
warnings = []
+
def setup_module():
global app, env
- app = TestApp(freshenv=True, _copy_to_temp=True)
+ app = TestApp(srcdir='root-envtest')
env = app.env
env.set_warnfunc(lambda *args: warnings.append(args))
+
def teardown_module():
app.cleanup()
+
def warning_emitted(file, text):
for warning in warnings:
if len(warning) == 2 and file in warning[1] and text in warning[0]:
return True
return False
+
# Tests are run in the order they appear in the file, therefore we can
# afford to not run update() in the setup but in its own test
def test_first_update():
- msg, num, it = env.update(app.config, app.srcdir, app.doctreedir, app)
- assert msg.endswith('%d added, 0 changed, 0 removed' % len(env.found_docs))
- docnames = set()
- for docname in it: # the generator does all the work
- docnames.add(docname)
- assert docnames == env.found_docs == set(env.all_docs)
+ updated = env.update(app.config, app.srcdir, app.doctreedir, app)
+ assert set(updated) == env.found_docs == set(env.all_docs)
# test if exclude_patterns works ok
assert 'subdir/excluded' not in env.found_docs
+
def test_images():
assert warning_emitted('images', 'image file not readable: foo.png')
assert warning_emitted('images', 'nonlocal image URI found: '
@@ -75,6 +77,7 @@ def test_images():
assert set(latexbuilder.images.values()) == \
set(['img.pdf', 'img.png', 'img1.png', 'simg.png', 'svgimg.pdf'])
+
def test_second_update():
# delete, add and "edit" (change saved mtime) some files and update again
env.all_docs['contents'] = 0
@@ -83,30 +86,33 @@ def test_second_update():
# the contents.txt toctree; otherwise section numbers would shift
(root / 'autodoc.txt').unlink()
(root / 'new.txt').write_text('New file\n========\n')
- msg, num, it = env.update(app.config, app.srcdir, app.doctreedir, app)
- assert '1 added, 3 changed, 1 removed' in msg
- docnames = set()
- for docname in it:
- docnames.add(docname)
+ updated = env.update(app.config, app.srcdir, app.doctreedir, app)
# "includes" and "images" are in there because they contain references
# to nonexisting downloadable or image files, which are given another
# chance to exist
- assert docnames == set(['contents', 'new', 'includes', 'images'])
+ assert set(updated) == set(['contents', 'new', 'includes', 'images'])
assert 'autodoc' not in env.all_docs
assert 'autodoc' not in env.found_docs
-@with_app(srcdir='(empty)')
-def test_undecodable_source_reading_emit_warnings(app):
- # issue #1524
- warnings[:] = []
- app.env.set_warnfunc(lambda *args: warnings.append(args))
- (app.srcdir / 'contents.rst').write_bytes(b'1\xbb2')
- _, _, it = app.env.update(app.config, app.srcdir, app.doctreedir, app)
- list(it) # the generator does all the work
- assert warning_emitted(
- 'contents', 'undecodable source characters, replacing with "?":'
- )
+def test_env_read_docs():
+ """By default, docnames are read in alphanumeric order"""
+ def on_env_read_docs_1(app, env, docnames):
+ pass
+
+ app.connect('env-before-read-docs', on_env_read_docs_1)
+
+ read_docnames = env.update(app.config, app.srcdir, app.doctreedir, app)
+ assert len(read_docnames) > 2 and read_docnames == sorted(read_docnames)
+
+ def on_env_read_docs_2(app, env, docnames):
+ docnames.reverse()
+
+ app.connect('env-before-read-docs', on_env_read_docs_2)
+
+ read_docnames = env.update(app.config, app.srcdir, app.doctreedir, app)
+ reversed_read_docnames = sorted(read_docnames, reverse=True)
+ assert len(read_docnames) > 2 and read_docnames == reversed_read_docnames
def test_object_inventory():
diff --git a/tests/test_autosummary.py b/tests/test_ext_autosummary.py
index 8803f88d..363c11e9 100644
--- a/tests/test_autosummary.py
+++ b/tests/test_ext_autosummary.py
@@ -8,49 +8,24 @@
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import sys
-from functools import wraps
from six import iteritems, StringIO
from sphinx.ext.autosummary import mangle_signature
-from util import test_roots, TestApp
+from util import with_app
html_warnfile = StringIO()
-def with_autosummary_app(*args, **kw):
- default_kw = {
- 'srcdir': (test_roots / 'test-autosummary'),
- 'confoverrides': {
- 'extensions': ['sphinx.ext.autosummary'],
- 'autosummary_generate': True,
- 'source_suffix': '.rst'
- }
+default_kw = {
+ 'testroot': 'autosummary',
+ 'confoverrides': {
+ 'extensions': ['sphinx.ext.autosummary'],
+ 'autosummary_generate': True,
+ 'source_suffix': '.rst'
}
- default_kw.update(kw)
- def generator(func):
- @wraps(func)
- def deco(*args2, **kwargs2):
- # Now, modify the python path...
- srcdir = default_kw['srcdir']
- sys.path.insert(0, srcdir)
- try:
- app = TestApp(*args, **default_kw)
- func(app, *args2, **kwargs2)
- finally:
- if srcdir in sys.path:
- sys.path.remove(srcdir)
- # remove the auto-generated dummy_module.rst
- dummy_rst = srcdir / 'dummy_module.rst'
- if dummy_rst.isfile():
- dummy_rst.unlink()
-
- # don't execute cleanup if test failed
- app.cleanup()
- return deco
- return generator
+}
def test_mangle_signature():
@@ -79,10 +54,8 @@ def test_mangle_signature():
assert res == outp, (u"'%s' -> '%s' != '%s'" % (inp, res, outp))
-@with_autosummary_app(buildername='html', warning=html_warnfile)
-def test_get_items_summary(app):
- app.builddir.rmtree(True)
-
+@with_app(buildername='html', **default_kw)
+def test_get_items_summary(app, status, warning):
# monkey-patch Autosummary.get_items so we can easily get access to it's
# results..
import sphinx.ext.autosummary
@@ -96,13 +69,17 @@ def test_get_items_summary(app):
autosummary_items[name] = result
return results
+ def handler(app, what, name, obj, options, lines):
+ assert isinstance(lines, list)
+ app.connect('autodoc-process-docstring', handler)
+
sphinx.ext.autosummary.Autosummary.get_items = new_get_items
try:
app.builder.build_all()
finally:
sphinx.ext.autosummary.Autosummary.get_items = orig_get_items
- html_warnings = html_warnfile.getvalue()
+ html_warnings = warning.getvalue()
assert html_warnings == ''
expected_values = {
@@ -118,13 +95,3 @@ def test_get_items_summary(app):
for key, expected in iteritems(expected_values):
assert autosummary_items[key][2] == expected, 'Summary for %s was %r -'\
' expected %r' % (key, autosummary_items[key], expected)
-
-
-@with_autosummary_app(buildername='html')
-def test_process_doc_event(app):
- app.builddir.rmtree(True)
-
- def handler(app, what, name, obj, options, lines):
- assert isinstance(lines, list)
- app.connect('autodoc-process-docstring', handler)
- app.builder.build_all()
diff --git a/tests/test_coverage.py b/tests/test_ext_coverage.py
index bfa76a98..ec1916d9 100644
--- a/tests/test_coverage.py
+++ b/tests/test_ext_coverage.py
@@ -15,7 +15,7 @@ from util import with_app
@with_app(buildername='coverage')
-def test_build(app):
+def test_build(app, status, warning):
app.builder.build_all()
py_undoc = (app.outdir / 'python.txt').text()
diff --git a/tests/test_doctest.py b/tests/test_ext_doctest.py
index 9fb8a2ea..002afff6 100644
--- a/tests/test_doctest.py
+++ b/tests/test_ext_doctest.py
@@ -8,30 +8,24 @@
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-from __future__ import print_function
-
-import sys
-
-from six import StringIO
from util import with_app
-
-status = StringIO()
cleanup_called = 0
-@with_app(buildername='doctest', status=status)
-def test_build(app):
+
+@with_app(buildername='doctest', testroot='doctest')
+def test_build(app, status, warning):
global cleanup_called
cleanup_called = 0
app.builder.build_all()
if app.statuscode != 0:
- print(status.getvalue(), file=sys.stderr)
- assert False, 'failures in doctests'
+ assert False, 'failures in doctests:' + status.getvalue()
# in doctest.txt, there are two named groups and the default group,
# so the cleanup function must be called three times
assert cleanup_called == 3, 'testcleanup did not get executed enough times'
+
def cleanup_call():
global cleanup_called
cleanup_called += 1
diff --git a/tests/test_intersphinx.py b/tests/test_ext_intersphinx.py
index dd71c6fb..fb540668 100644
--- a/tests/test_intersphinx.py
+++ b/tests/test_ext_intersphinx.py
@@ -17,7 +17,7 @@ from docutils import nodes
from sphinx import addnodes
from sphinx.ext.intersphinx import read_inventory_v1, read_inventory_v2, \
- load_mappings, missing_reference
+ load_mappings, missing_reference
from util import with_app, with_tempdir
@@ -49,9 +49,9 @@ def test_read_inventory_v1():
f.readline()
invdata = read_inventory_v1(f, '/util', posixpath.join)
assert invdata['py:module']['module'] == \
- ('foo', '1.0', '/util/foo.html#module-module', '-')
+ ('foo', '1.0', '/util/foo.html#module-module', '-')
assert invdata['py:class']['module.cls'] == \
- ('foo', '1.0', '/util/foo.html#module.cls', '-')
+ ('foo', '1.0', '/util/foo.html#module.cls', '-')
def test_read_inventory_v2():
@@ -68,19 +68,19 @@ def test_read_inventory_v2():
assert len(invdata1['py:module']) == 2
assert invdata1['py:module']['module1'] == \
- ('foo', '2.0', '/util/foo.html#module-module1', 'Long Module desc')
+ ('foo', '2.0', '/util/foo.html#module-module1', 'Long Module desc')
assert invdata1['py:module']['module2'] == \
- ('foo', '2.0', '/util/foo.html#module-module2', '-')
+ ('foo', '2.0', '/util/foo.html#module-module2', '-')
assert invdata1['py:function']['module1.func'][2] == \
- '/util/sub/foo.html#module1.func'
+ '/util/sub/foo.html#module1.func'
assert invdata1['c:function']['CFunc'][2] == '/util/cfunc.html#CFunc'
assert invdata1['std:term']['a term'][2] == \
- '/util/glossary.html#term-a-term'
+ '/util/glossary.html#term-a-term'
@with_app()
@with_tempdir
-def test_missing_reference(tempdir, app):
+def test_missing_reference(tempdir, app, status, warning):
inv_file = tempdir / 'inventory'
inv_file.write_bytes(inventory_v2)
app.config.intersphinx_mapping = {
@@ -94,7 +94,7 @@ def test_missing_reference(tempdir, app):
inv = app.env.intersphinx_inventory
assert inv['py:module']['module2'] == \
- ('foo', '2.0', 'http://docs.python.org/foo.html#module-module2', '-')
+ ('foo', '2.0', 'http://docs.python.org/foo.html#module-module2', '-')
# create fake nodes and check referencing
@@ -156,7 +156,7 @@ def test_missing_reference(tempdir, app):
@with_app()
@with_tempdir
-def test_load_mappings_warnings(tempdir, app):
+def test_load_mappings_warnings(tempdir, app, status, warning):
"""
load_mappings issues a warning if new-style mapping
identifiers are not alphanumeric
@@ -174,4 +174,4 @@ def test_load_mappings_warnings(tempdir, app):
app.config.intersphinx_cache_limit = 0
# load the inventory and check if it's done correctly
load_mappings(app)
- assert len(app._warning.content) == 2
+ assert warning.getvalue().count('\n') == 2
diff --git a/tests/test_napoleon.py b/tests/test_ext_napoleon.py
index e2790d38..e2790d38 100644
--- a/tests/test_napoleon.py
+++ b/tests/test_ext_napoleon.py
diff --git a/tests/test_napoleon_docstring.py b/tests/test_ext_napoleon_docstring.py
index 3b5c4fc7..3b5c4fc7 100644
--- a/tests/test_napoleon_docstring.py
+++ b/tests/test_ext_napoleon_docstring.py
diff --git a/tests/test_napoleon_iterators.py b/tests/test_ext_napoleon_iterators.py
index 320047e5..320047e5 100644
--- a/tests/test_napoleon_iterators.py
+++ b/tests/test_ext_napoleon_iterators.py
diff --git a/tests/test_ext_viewcode.py b/tests/test_ext_viewcode.py
index 60ab7941..fb24f765 100644
--- a/tests/test_ext_viewcode.py
+++ b/tests/test_ext_viewcode.py
@@ -1,43 +1,44 @@
-# -*- coding: utf-8 -*-
-"""
- test_ext_viewcode
- ~~~~~~~~~~~~~~~~~
-
- Test sphinx.ext.viewcode extension.
-
- :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from six import StringIO
-
-from util import test_roots, with_app
-
-
-warnfile = StringIO()
-root = test_roots / 'test-ext-viewcode'
-doctreedir = root / '_build' / 'doctree'
-
-
-def teardown_module():
- (root / '_build').rmtree(True)
-
-
-@with_app(srcdir=root, warning=warnfile)
-def test_simple(app):
- app.builder.build_all()
-
- warnings = re.sub(r'\\+', '/', warnfile.getvalue())
- assert re.findall(
- r"index.rst:\d+: WARNING: Object named 'func1' not found in include " +
- r"file .*/spam/__init__.py'",
- warnings
- )
-
- result = (app.outdir / 'index.html').text(encoding='utf-8')
- assert result.count('href="_modules/spam/mod1.html#func1"') == 2
- assert result.count('href="_modules/spam/mod2.html#func2"') == 2
- assert result.count('href="_modules/spam/mod1.html#Class1"') == 2
- assert result.count('href="_modules/spam/mod2.html#Class2"') == 2
+# -*- coding: utf-8 -*-
+"""
+ test_ext_viewcode
+ ~~~~~~~~~~~~~~~~~
+
+ Test sphinx.ext.viewcode extension.
+
+ :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from util import with_app
+
+
+@with_app(testroot='ext-viewcode')
+def test_viewcode(app, status, warning):
+ app.builder.build_all()
+
+ warnings = re.sub(r'\\+', '/', warning.getvalue())
+ assert re.findall(
+ r"index.rst:\d+: WARNING: Object named 'func1' not found in include " +
+ r"file .*/spam/__init__.py'",
+ warnings
+ )
+
+ result = (app.outdir / 'index.html').text(encoding='utf-8')
+ assert result.count('href="_modules/spam/mod1.html#func1"') == 2
+ assert result.count('href="_modules/spam/mod2.html#func2"') == 2
+ assert result.count('href="_modules/spam/mod1.html#Class1"') == 2
+ assert result.count('href="_modules/spam/mod2.html#Class2"') == 2
+
+
+@with_app(testroot='ext-viewcode', tags=['test_linkcode'])
+def test_linkcode(app, status, warning):
+ app.builder.build(['objects'])
+
+ stuff = (app.outdir / 'objects.html').text(encoding='utf-8')
+
+ assert 'http://foobar/source/foolib.py' in stuff
+ assert 'http://foobar/js/' in stuff
+ assert 'http://foobar/c/' in stuff
+ assert 'http://foobar/cpp/' in stuff
diff --git a/tests/test_footnote.py b/tests/test_footnote.py
deleted file mode 100644
index 964bb3e7..00000000
--- a/tests/test_footnote.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- test_footnote
- ~~~~~~~~~~~~~
-
- Test for footnote and citation.
-
- :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from util import test_root, with_app
-
-
-def teardown_module():
- (test_root / '_build').rmtree(True)
-
-
-@with_app(buildername='html')
-def test_html(app):
- app.builder.build(['footnote'])
- result = (app.outdir / 'footnote.html').text(encoding='utf-8')
- expects = [
- '<a class="footnote-reference" href="#id5" id="id1">[1]</a>',
- '<a class="footnote-reference" href="#id6" id="id2">[2]</a>',
- '<a class="footnote-reference" href="#foo" id="id3">[3]</a>',
- '<a class="reference internal" href="#bar" id="id4">[bar]</a>',
- '<a class="fn-backref" href="#id1">[1]</a>',
- '<a class="fn-backref" href="#id2">[2]</a>',
- '<a class="fn-backref" href="#id3">[3]</a>',
- '<a class="fn-backref" href="#id4">[bar]</a>',
- ]
- for expect in expects:
- matches = re.findall(re.escape(expect), result)
- assert len(matches) == 1
diff --git a/tests/test_highlighting.py b/tests/test_highlighting.py
index b4e5149f..5044ab0e 100644
--- a/tests/test_highlighting.py
+++ b/tests/test_highlighting.py
@@ -15,12 +15,7 @@ from pygments.formatters.html import HtmlFormatter
from sphinx.highlighting import PygmentsBridge
-from util import with_app, SkipTest
-
-try:
- import pygments
-except ImportError:
- raise SkipTest('pygments not available')
+from util import with_app
class MyLexer(RegexLexer):
@@ -46,13 +41,14 @@ class ComplainOnUnhighlighted(PygmentsBridge):
@with_app()
-def test_add_lexer(app):
+def test_add_lexer(app, status, warning):
app.add_lexer('test', MyLexer())
bridge = PygmentsBridge('html')
ret = bridge.highlight_block('ab', 'test')
assert '<span class="n">a</span>b' in ret
+
def test_detect_interactive():
bridge = ComplainOnUnhighlighted('html')
blocks = [
@@ -60,11 +56,12 @@ def test_detect_interactive():
>>> testing()
True
""",
- ]
+ ]
for block in blocks:
ret = bridge.highlight_block(block.lstrip(), 'python')
assert ret.startswith("<div class=\"highlight\">")
+
def test_set_formatter():
PygmentsBridge.html_formatter = MyFormatter
try:
@@ -74,6 +71,7 @@ def test_set_formatter():
finally:
PygmentsBridge.html_formatter = HtmlFormatter
+
def test_trim_doctest_flags():
PygmentsBridge.html_formatter = MyFormatter
try:
diff --git a/tests/test_i18n.py b/tests/test_i18n.py
index 06f6b28b..8144663c 100644
--- a/tests/test_i18n.py
+++ b/tests/test_i18n.py
@@ -13,5 +13,5 @@ from util import with_app
@with_app(confoverrides={'language': 'de'})
-def test_i18n(app):
+def test_i18n(app, status, warning):
app.builder.build_all()
diff --git a/tests/test_intl.py b/tests/test_intl.py
index bb54e5df..69437c1a 100644
--- a/tests/test_intl.py
+++ b/tests/test_intl.py
@@ -16,45 +16,45 @@ import re
from subprocess import Popen, PIPE
from xml.etree import ElementTree
-from six import StringIO, string_types
+from nose.tools import assert_equal
+from six import string_types
-from util import test_roots, path, with_app, SkipTest
+from util import tempdir, rootdir, path, gen_with_app, SkipTest, \
+ assert_re_search, assert_not_re_search, assert_in, assert_not_in, \
+ assert_startswith
-warnfile = StringIO()
-root = test_roots / 'test-intl'
-doctreedir = root / '_build' / 'doctree'
+root = tempdir / 'test-intl'
-def with_intl_app(*args, **kw):
+def gen_with_intl_app(*args, **kw):
default_kw = {
- 'srcdir': root,
- 'doctreedir': doctreedir,
+ 'testroot': 'intl',
'confoverrides': {
'language': 'xx', 'locale_dirs': ['.'],
'gettext_compact': False,
},
}
default_kw.update(kw)
- return with_app(*args, **default_kw)
+ return gen_with_app(*args, **default_kw)
def setup_module():
+ if not root.exists():
+ (rootdir / 'roots' / 'test-intl').copytree(root)
# Delete remnants left over after failed build
- (root / 'xx').rmtree(True)
- (root / 'xx' / 'LC_MESSAGES').makedirs()
# Compile all required catalogs into binary format (*.mo).
for dirpath, dirs, files in os.walk(root):
dirpath = path(dirpath)
for f in [f for f in files if f.endswith('.po')]:
po = dirpath / f
mo = root / 'xx' / 'LC_MESSAGES' / (
- os.path.relpath(po[:-3], root) + '.mo')
+ os.path.relpath(po[:-3], root) + '.mo')
if not mo.parent.exists():
mo.parent.makedirs()
try:
p = Popen(['msgfmt', po, '-o', mo],
- stdout=PIPE, stderr=PIPE)
+ stdout=PIPE, stderr=PIPE)
except OSError:
raise SkipTest # most likely msgfmt was not found
else:
@@ -67,11 +67,6 @@ def setup_module():
assert mo.isfile(), 'msgfmt failed'
-def teardown_module():
- (root / '_build').rmtree(True)
- (root / 'xx').rmtree(True)
-
-
def elem_gettexts(elem):
def itertext(self):
# this function copied from Python-2.7 'ElementTree.itertext'.
@@ -105,125 +100,38 @@ def assert_elem(elem, texts=None, refs=None, names=None):
assert _names == names
-@with_intl_app(buildername='text')
-def test_simple(app):
- app.builder.build(['bom'])
- result = (app.outdir / 'bom.txt').text(encoding='utf-8')
- expect = (u"\nDatei mit UTF-8"
- u"\n***************\n" # underline matches new translation
- u"\nThis file has umlauts: äöü.\n")
- assert result == expect
-
+@gen_with_intl_app('text', freshenv=True)
+def test_text_builder(app, status, warning):
+ app.builder.build_all()
-@with_intl_app(buildername='text')
-def test_subdir(app):
- app.builder.build(['subdir/contents'])
- result = (app.outdir / 'subdir' / 'contents.txt').text(encoding='utf-8')
- assert result.startswith(u"\nsubdir contents\n***************\n")
+ # --- warnings in translation
+ warnings = warning.getvalue().replace(os.sep, '/')
+ warning_expr = u'.*/warnings.txt:4: ' \
+ u'WARNING: Inline literal start-string without end-string.\n'
+ yield assert_re_search, warning_expr, warnings
-@with_intl_app(buildername='text', warning=warnfile)
-def test_i18n_warnings_in_translation(app):
- app.builddir.rmtree(True)
- app.builder.build(['warnings'])
result = (app.outdir / 'warnings.txt').text(encoding='utf-8')
expect = (u"\nI18N WITH REST WARNINGS"
u"\n***********************\n"
u"\nLINE OF >>``<<BROKEN LITERAL MARKUP.\n")
+ yield assert_equal, result, expect
- assert result == expect
-
- warnings = warnfile.getvalue().replace(os.sep, '/')
- warning_expr = u'.*/warnings.txt:4: ' \
- u'WARNING: Inline literal start-string without end-string.\n'
- assert re.search(warning_expr, warnings)
-
-
-@with_intl_app(buildername='html', cleanenv=True)
-def test_i18n_footnote_break_refid(app):
- """test for #955 cant-build-html-with-footnotes-when-using"""
- app.builder.build(['footnote'])
- result = (app.outdir / 'footnote.html').text(encoding='utf-8')
- # expect no error by build
-
-
-@with_intl_app(buildername='xml', warning=warnfile)
-def test_i18n_footnote_regression(app):
- # regression test for fix #955, #1176
- app.builddir.rmtree(True)
- app.builder.build(['footnote'])
- et = ElementTree.parse(app.outdir / 'footnote.xml')
- secs = et.findall('section')
-
- para0 = secs[0].findall('paragraph')
- assert_elem(
- para0[0],
- texts=['I18N WITH FOOTNOTE', 'INCLUDE THIS CONTENTS',
- '2', '[ref]', '1', '100', '.'],
- refs=['i18n-with-footnote', 'ref'])
-
- footnote0 = secs[0].findall('footnote')
- assert_elem(
- footnote0[0],
- texts=['1','THIS IS A AUTO NUMBERED FOOTNOTE.'],
- names=['1'])
- assert_elem(
- footnote0[1],
- texts=['100','THIS IS A NUMBERED FOOTNOTE.'],
- names=['100'])
- assert_elem(
- footnote0[2],
- texts=['2','THIS IS A AUTO NUMBERED NAMED FOOTNOTE.'],
- names=['named'])
-
- citation0 = secs[0].findall('citation')
- assert_elem(
- citation0[0],
- texts=['ref','THIS IS A NAMED FOOTNOTE.'],
- names=['ref'])
-
- warnings = warnfile.getvalue().replace(os.sep, '/')
- warning_expr = u'.*/footnote.xml:\\d*: SEVERE: Duplicate ID: ".*".\n'
- assert not re.search(warning_expr, warnings)
-
-
-@with_intl_app(buildername='xml', cleanenv=True)
-def test_i18n_footnote_backlink(app):
- # i18n test for #1058
- app.builder.build(['footnote'])
- et = ElementTree.parse(app.outdir / 'footnote.xml')
- secs = et.findall('section')
-
- para0 = secs[0].findall('paragraph')
- refs0 = para0[0].findall('footnote_reference')
- refid2id = dict([
- (r.attrib.get('refid'), r.attrib.get('ids')) for r in refs0])
-
- footnote0 = secs[0].findall('footnote')
- for footnote in footnote0:
- ids = footnote.attrib.get('ids')
- backrefs = footnote.attrib.get('backrefs')
- assert refid2id[ids] == backrefs
+ # --- simple translation; check title underlines
+ result = (app.outdir / 'bom.txt').text(encoding='utf-8')
+ expect = (u"\nDatei mit UTF-8"
+ u"\n***************\n" # underline matches new translation
+ u"\nThis file has umlauts: äöü.\n")
+ yield assert_equal, result, expect
-@with_intl_app(buildername='xml', warning=warnfile)
-def test_i18n_refs_python_domain(app):
- app.builder.build(['refs_python_domain'])
- et = ElementTree.parse(app.outdir / 'refs_python_domain.xml')
- secs = et.findall('section')
+ # --- check translation in subdirs
- # regression test for fix #1363
- para0 = secs[0].findall('paragraph')
- assert_elem(
- para0[0],
- texts=['SEE THIS DECORATOR:', 'sensitive_variables()', '.'],
- refs=['sensitive.sensitive_variables'])
+ result = (app.outdir / 'subdir' / 'contents.txt').text(encoding='utf-8')
+ yield assert_startswith, result, u"\nsubdir contents\n***************\n"
+ # --- check warnings for inconsistency in number of references
-@with_intl_app(buildername='text', warning=warnfile, cleanenv=True)
-def test_i18n_warn_for_number_of_references_inconsistency(app):
- app.builddir.rmtree(True)
- app.builder.build(['refs_inconsistency'])
result = (app.outdir / 'refs_inconsistency.txt').text(encoding='utf-8')
expect = (u"\nI18N WITH REFS INCONSISTENCY"
u"\n****************************\n"
@@ -233,98 +141,19 @@ def test_i18n_warn_for_number_of_references_inconsistency(app):
u"\n[1] THIS IS A AUTO NUMBERED FOOTNOTE.\n"
u"\n[ref2] THIS IS A NAMED FOOTNOTE.\n"
u"\n[100] THIS IS A NUMBERED FOOTNOTE.\n")
- assert result == expect
+ yield assert_equal, result, expect
- warnings = warnfile.getvalue().replace(os.sep, '/')
+ warnings = warning.getvalue().replace(os.sep, '/')
warning_fmt = u'.*/refs_inconsistency.txt:\\d+: ' \
- u'WARNING: inconsistent %s in translated message\n'
+ u'WARNING: inconsistent %s in translated message\n'
expected_warning_expr = (
warning_fmt % 'footnote references' +
warning_fmt % 'references' +
warning_fmt % 'references')
- assert re.search(expected_warning_expr, warnings)
-
-
-@with_intl_app(buildername='html', cleanenv=True)
-def test_i18n_link_to_undefined_reference(app):
- app.builder.build(['refs_inconsistency'])
- result = (app.outdir / 'refs_inconsistency.html').text(encoding='utf-8')
-
- expected_expr = ('<a class="reference external" '
- 'href="http://www.example.com">reference</a>')
- assert len(re.findall(expected_expr, result)) == 2
-
- expected_expr = ('<a class="reference internal" '
- 'href="#reference">reference</a>')
- assert len(re.findall(expected_expr, result)) == 0
-
- expected_expr = ('<a class="reference internal" '
- 'href="#i18n-with-refs-inconsistency">I18N WITH '
- 'REFS INCONSISTENCY</a>')
- assert len(re.findall(expected_expr, result)) == 1
-
-
-@with_intl_app(buildername='xml', cleanenv=True)
-def test_i18n_keep_external_links(app):
- # regression test for #1044
- app.builder.build(['external_links'])
- et = ElementTree.parse(app.outdir / 'external_links.xml')
- secs = et.findall('section')
-
- para0 = secs[0].findall('paragraph')
- # external link check
- assert_elem(
- para0[0],
- texts=['EXTERNAL LINK TO', 'Python', '.'],
- refs=['http://python.org/index.html'])
-
- # internal link check
- assert_elem(
- para0[1],
- texts=['EXTERNAL LINKS', 'IS INTERNAL LINK.'],
- refs=['i18n-with-external-links'])
-
- # inline link check
- assert_elem(
- para0[2],
- texts=['INLINE LINK BY', 'THE SPHINX SITE', '.'],
- refs=['http://sphinx-doc.org'])
-
- # unnamed link check
- assert_elem(
- para0[3],
- texts=['UNNAMED', 'LINK', '.'],
- refs=['http://google.com'])
+ yield assert_re_search, expected_warning_expr, warnings
- # link target swapped translation
- para1 = secs[1].findall('paragraph')
- assert_elem(
- para1[0],
- texts=['LINK TO', 'external2', 'AND', 'external1', '.'],
- refs=['http://example.com/external2',
- 'http://example.com/external1'])
- assert_elem(
- para1[1],
- texts=['LINK TO', 'THE PYTHON SITE', 'AND', 'THE SPHINX SITE',
- '.'],
- refs=['http://python.org', 'http://sphinx-doc.org'])
+ # --- check warning for literal block
- # multiple references in the same line
- para2 = secs[2].findall('paragraph')
- assert_elem(
- para2[0],
- texts=['LINK TO', 'EXTERNAL LINKS', ',', 'Python', ',',
- 'THE SPHINX SITE', ',', 'UNNAMED', 'AND',
- 'THE PYTHON SITE', '.'],
- refs=['i18n-with-external-links', 'http://python.org/index.html',
- 'http://sphinx-doc.org', 'http://google.com',
- 'http://python.org'])
-
-
-@with_intl_app(buildername='text', warning=warnfile, cleanenv=True)
-def test_i18n_literalblock_warning(app):
- app.builddir.rmtree(True) #for warnings acceleration
- app.builder.build(['literalblock'])
result = (app.outdir / 'literalblock.txt').text(encoding='utf-8')
expect = (u"\nI18N WITH LITERAL BLOCK"
u"\n***********************\n"
@@ -333,18 +162,15 @@ def test_i18n_literalblock_warning(app):
u"\n literal block\n"
u"\nMISSING LITERAL BLOCK:\n"
u"\n<SYSTEM MESSAGE:")
- assert result.startswith(expect)
+ yield assert_startswith, result, expect
- warnings = warnfile.getvalue().replace(os.sep, '/')
+ warnings = warning.getvalue().replace(os.sep, '/')
expected_warning_expr = u'.*/literalblock.txt:\\d+: ' \
- u'WARNING: Literal block expected; none found.'
- assert re.search(expected_warning_expr, warnings)
+ u'WARNING: Literal block expected; none found.'
+ yield assert_re_search, expected_warning_expr, warnings
+ # --- definition terms: regression test for #975
-@with_intl_app(buildername='text')
-def test_i18n_definition_terms(app):
- # regression test for #975
- app.builder.build(['definition_terms'])
result = (app.outdir / 'definition_terms.txt').text(encoding='utf-8')
expect = (u"\nI18N WITH DEFINITION TERMS"
u"\n**************************\n"
@@ -352,15 +178,10 @@ def test_i18n_definition_terms(app):
u"\n THE CORRESPONDING DEFINITION\n"
u"\nSOME OTHER TERM"
u"\n THE CORRESPONDING DEFINITION #2\n")
+ yield assert_equal, result, expect
- assert result == expect
-
+ # --- glossary terms: regression test for #1090
-@with_intl_app(buildername='text', warning=warnfile)
-def test_i18n_glossary_terms(app):
- # regression test for #1090
- app.builddir.rmtree(True) #for warnings acceleration
- app.builder.build(['glossary_terms'])
result = (app.outdir / 'glossary_terms.txt').text(encoding='utf-8')
expect = (u"\nI18N WITH GLOSSARY TERMS"
u"\n************************\n"
@@ -369,146 +190,26 @@ def test_i18n_glossary_terms(app):
u"\nSOME OTHER NEW TERM"
u"\n THE CORRESPONDING GLOSSARY #2\n"
u"\nLINK TO *SOME NEW TERM*.\n")
- assert result == expect
+ yield assert_equal, result, expect
+ warnings = warning.getvalue().replace(os.sep, '/')
+ yield assert_not_in, 'term not in glossary', warnings
- warnings = warnfile.getvalue().replace(os.sep, '/')
- assert 'term not in glossary' not in warnings
-
-
-@with_intl_app(buildername='xml', warning=warnfile)
-def test_i18n_role_xref(app):
- # regression test for #1090, #1193
- app.builddir.rmtree(True) #for warnings acceleration
- app.builder.build(['role_xref'])
- et = ElementTree.parse(app.outdir / 'role_xref.xml')
- sec1, sec2 = et.findall('section')
-
- para1, = sec1.findall('paragraph')
- assert_elem(
- para1,
- texts=['LINK TO', "I18N ROCK'N ROLE XREF", ',', 'CONTENTS', ',',
- 'SOME NEW TERM', '.'],
- refs=['i18n-role-xref', 'contents',
- 'glossary_terms#term-some-term'])
-
- para2 = sec2.findall('paragraph')
- assert_elem(
- para2[0],
- texts=['LINK TO', 'SOME OTHER NEW TERM', 'AND', 'SOME NEW TERM',
- '.'],
- refs=['glossary_terms#term-some-other-term',
- 'glossary_terms#term-some-term'])
- assert_elem(
- para2[1],
- texts=['LINK TO', 'SAME TYPE LINKS', 'AND',
- "I18N ROCK'N ROLE XREF", '.'],
- refs=['same-type-links', 'i18n-role-xref'])
- assert_elem(
- para2[2],
- texts=['LINK TO', 'I18N WITH GLOSSARY TERMS', 'AND', 'CONTENTS',
- '.'],
- refs=['glossary_terms', 'contents'])
- assert_elem(
- para2[3],
- texts=['LINK TO', '--module', 'AND', '-m', '.'],
- refs=['cmdoption--module', 'cmdoption-m'])
- assert_elem(
- para2[4],
- texts=['LINK TO', 'env2', 'AND', 'env1', '.'],
- refs=['envvar-env2', 'envvar-env1'])
- assert_elem(
- para2[5],
- texts=['LINK TO', 'token2', 'AND', 'token1', '.'],
- refs=[]) #TODO: how do I link token role to productionlist?
- assert_elem(
- para2[6],
- texts=['LINK TO', 'same-type-links', 'AND', "i18n-role-xref", '.'],
- refs=['same-type-links', 'i18n-role-xref'])
-
- #warnings
- warnings = warnfile.getvalue().replace(os.sep, '/')
- assert 'term not in glossary' not in warnings
- assert 'undefined label' not in warnings
- assert 'unknown document' not in warnings
-
-
-@with_intl_app(buildername='xml', warning=warnfile)
-def test_i18n_label_target(app):
- # regression test for #1193, #1265
- app.builder.build(['label_target'])
- et = ElementTree.parse(app.outdir / 'label_target.xml')
- secs = et.findall('section')
-
- para0 = secs[0].findall('paragraph')
- assert_elem(
- para0[0],
- texts=['X SECTION AND LABEL', 'POINT TO', 'implicit-target', 'AND',
- 'X SECTION AND LABEL', 'POINT TO', 'section-and-label', '.'],
- refs=['implicit-target', 'section-and-label'])
-
- para1 = secs[1].findall('paragraph')
- assert_elem(
- para1[0],
- texts=['X EXPLICIT-TARGET', 'POINT TO', 'explicit-target', 'AND',
- 'X EXPLICIT-TARGET', 'POINT TO DUPLICATED ID LIKE', 'id1',
- '.'],
- refs=['explicit-target', 'id1'])
-
- para2 = secs[2].findall('paragraph')
- assert_elem(
- para2[0],
- texts=['X IMPLICIT SECTION NAME', 'POINT TO',
- 'implicit-section-name', '.'],
- refs=['implicit-section-name'])
+ # --- glossary term inconsistencies: regression test for #1090
- sec2 = secs[2].findall('section')
-
- para2_0 = sec2[0].findall('paragraph')
- assert_elem(
- para2_0[0],
- texts=['`X DUPLICATED SUB SECTION`_', 'IS BROKEN LINK.'],
- refs=[])
-
- para3 = secs[3].findall('paragraph')
- assert_elem(
- para3[0],
- texts=['X', 'bridge label',
- 'IS NOT TRANSLATABLE BUT LINKED TO TRANSLATED ' +
- 'SECTION TITLE.'],
- refs=['label-bridged-target-section'])
- assert_elem(
- para3[1],
- texts=['X', 'bridge label', 'POINT TO',
- 'LABEL BRIDGED TARGET SECTION', 'AND', 'bridge label2',
- 'POINT TO', 'SECTION AND LABEL', '. THE SECOND APPEARED',
- 'bridge label2', 'POINT TO CORRECT TARGET.'],
- refs=['label-bridged-target-section',
- 'section-and-label',
- 'section-and-label'])
-
-
-@with_intl_app(buildername='text', warning=warnfile)
-def test_i18n_glossary_terms_inconsistency(app):
- # regression test for #1090
- app.builddir.rmtree(True) #for warnings acceleration
- app.builder.build(['glossary_terms_inconsistency'])
- result = (app.outdir / 'glossary_terms_inconsistency.txt'
- ).text(encoding='utf-8')
+ result = (app.outdir / 'glossary_terms_inconsistency.txt').text(encoding='utf-8')
expect = (u"\nI18N WITH GLOSSARY TERMS INCONSISTENCY"
u"\n**************************************\n"
u"\n1. LINK TO *SOME NEW TERM*.\n")
- assert result == expect
+ yield assert_equal, result, expect
- warnings = warnfile.getvalue().replace(os.sep, '/')
+ warnings = warning.getvalue().replace(os.sep, '/')
expected_warning_expr = (
- u'.*/glossary_terms_inconsistency.txt:\\d+: '
- u'WARNING: inconsistent term references in translated message\n')
- assert re.search(expected_warning_expr, warnings)
+ u'.*/glossary_terms_inconsistency.txt:\\d+: '
+ u'WARNING: inconsistent term references in translated message\n')
+ yield assert_re_search, expected_warning_expr, warnings
+ # --- seealso
-@with_intl_app(buildername='text')
-def test_seealso(app):
- app.builder.build(['seealso'])
result = (app.outdir / 'seealso.txt').text(encoding='utf-8')
expect = (u"\nI18N WITH SEEALSO"
u"\n*****************\n"
@@ -516,13 +217,10 @@ def test_seealso(app):
u"\nSee also: LONG TEXT 1\n"
u"\nSee also: SHORT TEXT 2\n"
u"\n LONG TEXT 2\n")
- assert result == expect
+ yield assert_equal, result, expect
+ # --- figure captions: regression test for #940
-@with_intl_app(buildername='text')
-def test_i18n_figure_caption(app):
- # regression test for #940
- app.builder.build(['figure_caption'])
result = (app.outdir / 'figure_caption.txt').text(encoding='utf-8')
expect = (u"\nI18N WITH FIGURE CAPTION"
u"\n************************\n"
@@ -536,14 +234,10 @@ def test_i18n_figure_caption(app):
u"\n [image]MY CAPTION OF THE FIGURE\n"
u"\n MY DESCRIPTION PARAGRAPH1 OF THE FIGURE.\n"
u"\n MY DESCRIPTION PARAGRAPH2 OF THE FIGURE.\n")
+ yield assert_equal, result, expect
- assert result == expect
-
+ # --- rubric: regression test for pull request #190
-@with_intl_app(buildername='text')
-def test_i18n_rubric(app):
- # regression test for pull request #190
- app.builder.build(['rubric'])
result = (app.outdir / 'rubric.txt').text(encoding='utf-8')
expect = (u"\nI18N WITH RUBRIC"
u"\n****************\n"
@@ -553,14 +247,73 @@ def test_i18n_rubric(app):
u"\n===================\n"
u"\nBLOCK\n"
u"\n -[ RUBRIC TITLE ]-\n")
+ yield assert_equal, result, expect
+
+ # --- docfields
+
+ result = (app.outdir / 'docfields.txt').text(encoding='utf-8')
+ expect = (u"\nI18N WITH DOCFIELDS"
+ u"\n*******************\n"
+ u"\nclass class Cls1\n"
+ u"\n Parameters:"
+ u"\n **param** -- DESCRIPTION OF PARAMETER param\n"
+ u"\nclass class Cls2\n"
+ u"\n Parameters:"
+ u"\n * **foo** -- DESCRIPTION OF PARAMETER foo\n"
+ u"\n * **bar** -- DESCRIPTION OF PARAMETER bar\n"
+ u"\nclass class Cls3(values)\n"
+ u"\n Raises ValueError:"
+ u"\n IF THE VALUES ARE OUT OF RANGE\n"
+ u"\nclass class Cls4(values)\n"
+ u"\n Raises:"
+ u"\n * **TypeError** -- IF THE VALUES ARE NOT VALID\n"
+ u"\n * **ValueError** -- IF THE VALUES ARE OUT OF RANGE\n"
+ u"\nclass class Cls5\n"
+ u"\n Returns:"
+ u'\n A NEW "Cls3" INSTANCE\n')
+ yield assert_equal, result, expect
+
+ # --- admonitions
+ # #1206: gettext did not translate admonition directive's title
+ # seealso: http://docutils.sourceforge.net/docs/ref/rst/directives.html#admonitions
- assert result == expect
+ result = (app.outdir / 'admonitions.txt').text(encoding='utf-8')
+ directives = (
+ "attention", "caution", "danger", "error", "hint",
+ "important", "note", "tip", "warning", "admonition")
+ for d in directives:
+ yield assert_in, d.upper() + " TITLE", result
+ yield assert_in, d.upper() + " BODY", result
+
+
+@gen_with_intl_app('html', freshenv=True)
+def test_html_builder(app, status, warning):
+ app.builder.build_all()
+
+ # --- test for #955 cant-build-html-with-footnotes-when-using
+
+ # expect no error by build
+ (app.outdir / 'footnote.html').text(encoding='utf-8')
+
+ # --- links to undefined reference
+
+ result = (app.outdir / 'refs_inconsistency.html').text(encoding='utf-8')
+
+ expected_expr = ('<a class="reference external" '
+ 'href="http://www.example.com">reference</a>')
+ yield assert_equal, len(re.findall(expected_expr, result)), 2
+
+ expected_expr = ('<a class="reference internal" '
+ 'href="#reference">reference</a>')
+ yield assert_equal, len(re.findall(expected_expr, result)), 0
+
+ expected_expr = ('<a class="reference internal" '
+ 'href="#i18n-with-refs-inconsistency">I18N WITH '
+ 'REFS INCONSISTENCY</a>')
+ yield assert_equal, len(re.findall(expected_expr, result)), 1
+ # --- index entries: regression test for #976
-@with_intl_app(buildername='html')
-def test_i18n_index_entries(app):
- # regression test for #976
- app.builder.build(['index_entries'])
result = (app.outdir / 'genindex.html').text(encoding='utf-8')
def wrap(tag, keyword):
@@ -586,12 +339,10 @@ def test_i18n_index_entries(app):
wrap('a', 'BUILTIN'),
]
for expr in expected_exprs:
- assert re.search(expr, result, re.M)
+ yield assert_re_search, expr, result, re.M
+ # --- versionchanges
-@with_intl_app(buildername='html', cleanenv=True)
-def test_versionchange(app):
- app.builder.build(['versionchange'])
result = (app.outdir / 'versionchange.html').text(encoding='utf-8')
def get_content(result, name):
@@ -607,83 +358,266 @@ def test_versionchange(app):
u"""THIS IS THE <em>FIRST</em> PARAGRAPH OF DEPRECATED.</p>\n"""
u"""<p>THIS IS THE <em>SECOND</em> PARAGRAPH OF DEPRECATED.</p>\n""")
matched_content = get_content(result, "deprecated")
- assert expect1 == matched_content
+ yield assert_equal, expect1, matched_content
expect2 = (
u"""<p><span class="versionmodified">New in version 1.0: </span>"""
u"""THIS IS THE <em>FIRST</em> PARAGRAPH OF VERSIONADDED.</p>\n""")
matched_content = get_content(result, "versionadded")
- assert expect2 == matched_content
+ yield assert_equal, expect2, matched_content
expect3 = (
u"""<p><span class="versionmodified">Changed in version 1.0: </span>"""
u"""THIS IS THE <em>FIRST</em> PARAGRAPH OF VERSIONCHANGED.</p>\n""")
matched_content = get_content(result, "versionchanged")
- assert expect3 == matched_content
+ yield assert_equal, expect3, matched_content
+ # --- docfields
-@with_intl_app(buildername='text', cleanenv=True)
-def test_i18n_docfields(app):
- app.builder.build(['docfields'])
- result = (app.outdir / 'docfields.txt').text(encoding='utf-8')
- expect = (u"\nI18N WITH DOCFIELDS"
- u"\n*******************\n"
- u"\nclass class Cls1\n"
- u"\n Parameters:"
- u"\n **param** -- DESCRIPTION OF PARAMETER param\n"
- u"\nclass class Cls2\n"
- u"\n Parameters:"
- u"\n * **foo** -- DESCRIPTION OF PARAMETER foo\n"
- u"\n * **bar** -- DESCRIPTION OF PARAMETER bar\n"
- u"\nclass class Cls3(values)\n"
- u"\n Raises ValueError:"
- u"\n IF THE VALUES ARE OUT OF RANGE\n"
- u"\nclass class Cls4(values)\n"
- u"\n Raises:"
- u"\n * **TypeError** -- IF THE VALUES ARE NOT VALID\n"
- u"\n * **ValueError** -- IF THE VALUES ARE OUT OF RANGE\n"
- u"\nclass class Cls5\n"
- u"\n Returns:"
- u'\n A NEW "Cls3" INSTANCE\n')
- assert result == expect
+ # expect no error by build
+ (app.outdir / 'docfields.html').text(encoding='utf-8')
+ # --- gettext template
-@with_intl_app(buildername='text', cleanenv=True)
-def test_i18n_admonitions(app):
- # #1206: gettext did not translate admonition directive's title
- # seealso: http://docutils.sourceforge.net/docs/ref/rst/directives.html#admonitions
- app.builder.build(['admonitions'])
- result = (app.outdir / 'admonitions.txt').text(encoding='utf-8')
- directives = (
- "attention", "caution", "danger", "error", "hint",
- "important", "note", "tip", "warning", "admonition",)
- for d in directives:
- assert d.upper() + " TITLE" in result
- assert d.upper() + " BODY" in result
+ result = (app.outdir / 'index.html').text(encoding='utf-8')
+ yield assert_in, "WELCOME", result
+ yield assert_in, "SPHINX 2013.120", result
+ # --- rebuild by .mo mtime
-@with_intl_app(buildername='html', cleanenv=True)
-def test_i18n_docfields_html(app):
- app.builder.build(['docfields'])
- result = (app.outdir / 'docfields.html').text(encoding='utf-8')
- # expect no error by build
+ app.builder.build_update()
+ updated = app.env.update(app.config, app.srcdir, app.doctreedir, app)
+ yield assert_equal, len(updated), 0
+
+ (app.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo').utime(None)
+ updated = app.env.update(app.config, app.srcdir, app.doctreedir, app)
+ yield assert_equal, len(updated), 1
-@with_intl_app(buildername='html')
-def test_gettext_template(app):
+@gen_with_intl_app('xml', freshenv=True)
+def test_xml_builder(app, status, warning):
app.builder.build_all()
- result = (app.outdir / 'index.html').text(encoding='utf-8')
- assert "WELCOME" in result
- assert "SPHINX 2013.120" in result
+ # --- footnotes: regression test for fix #955, #1176
-@with_intl_app(buildername='html')
-def test_rebuild_by_mo_mtime(app):
- app.builder.build_update()
- _, count, _ = app.env.update(app.config, app.srcdir, app.doctreedir, app)
- assert count == 0
+ et = ElementTree.parse(app.outdir / 'footnote.xml')
+ secs = et.findall('section')
+
+ para0 = secs[0].findall('paragraph')
+ yield (assert_elem,
+ para0[0],
+ ['I18N WITH FOOTNOTE', 'INCLUDE THIS CONTENTS',
+ '2', '[ref]', '1', '100', '.'],
+ ['i18n-with-footnote', 'ref'])
+
+ footnote0 = secs[0].findall('footnote')
+ yield (assert_elem,
+ footnote0[0],
+ ['1', 'THIS IS A AUTO NUMBERED FOOTNOTE.'],
+ None,
+ ['1'])
+ yield (assert_elem,
+ footnote0[1],
+ ['100', 'THIS IS A NUMBERED FOOTNOTE.'],
+ None,
+ ['100'])
+ yield (assert_elem,
+ footnote0[2],
+ ['2', 'THIS IS A AUTO NUMBERED NAMED FOOTNOTE.'],
+ None,
+ ['named'])
+
+ citation0 = secs[0].findall('citation')
+ yield (assert_elem,
+ citation0[0],
+ ['ref', 'THIS IS A NAMED FOOTNOTE.'],
+ None,
+ ['ref'])
+
+ warnings = warning.getvalue().replace(os.sep, '/')
+ warning_expr = u'.*/footnote.xml:\\d*: SEVERE: Duplicate ID: ".*".\n'
+ yield assert_not_re_search, warning_expr, warnings
+
+ # --- footnote backlinks: i18n test for #1058
+
+ et = ElementTree.parse(app.outdir / 'footnote.xml')
+ secs = et.findall('section')
+
+ para0 = secs[0].findall('paragraph')
+ refs0 = para0[0].findall('footnote_reference')
+ refid2id = dict([
+ (r.attrib.get('refid'), r.attrib.get('ids')) for r in refs0])
+
+ footnote0 = secs[0].findall('footnote')
+ for footnote in footnote0:
+ ids = footnote.attrib.get('ids')
+ backrefs = footnote.attrib.get('backrefs')
+ yield assert_equal, refid2id[ids], backrefs
+
+ # --- refs in the Python domain
- mo = (app.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo').bytes()
- (app.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo').write_bytes(mo)
- _, count, _ = app.env.update(app.config, app.srcdir, app.doctreedir, app)
- assert count == 1
+ et = ElementTree.parse(app.outdir / 'refs_python_domain.xml')
+ secs = et.findall('section')
+
+ # regression test for fix #1363
+ para0 = secs[0].findall('paragraph')
+ yield (assert_elem,
+ para0[0],
+ ['SEE THIS DECORATOR:', 'sensitive_variables()', '.'],
+ ['sensitive.sensitive_variables'])
+
+ # --- keep external links: regression test for #1044
+
+ et = ElementTree.parse(app.outdir / 'external_links.xml')
+ secs = et.findall('section')
+
+ para0 = secs[0].findall('paragraph')
+ # external link check
+ yield (assert_elem,
+ para0[0],
+ ['EXTERNAL LINK TO', 'Python', '.'],
+ ['http://python.org/index.html'])
+
+ # internal link check
+ yield (assert_elem,
+ para0[1],
+ ['EXTERNAL LINKS', 'IS INTERNAL LINK.'],
+ ['i18n-with-external-links'])
+
+ # inline link check
+ yield (assert_elem,
+ para0[2],
+ ['INLINE LINK BY', 'THE SPHINX SITE', '.'],
+ ['http://sphinx-doc.org'])
+
+ # unnamed link check
+ yield (assert_elem,
+ para0[3],
+ ['UNNAMED', 'LINK', '.'],
+ ['http://google.com'])
+
+ # link target swapped translation
+ para1 = secs[1].findall('paragraph')
+ yield (assert_elem,
+ para1[0],
+ ['LINK TO', 'external2', 'AND', 'external1', '.'],
+ ['http://example.com/external2',
+ 'http://example.com/external1'])
+ yield (assert_elem,
+ para1[1],
+ ['LINK TO', 'THE PYTHON SITE', 'AND', 'THE SPHINX SITE', '.'],
+ ['http://python.org', 'http://sphinx-doc.org'])
+
+ # multiple references in the same line
+ para2 = secs[2].findall('paragraph')
+ yield (assert_elem,
+ para2[0],
+ ['LINK TO', 'EXTERNAL LINKS', ',', 'Python', ',',
+ 'THE SPHINX SITE', ',', 'UNNAMED', 'AND',
+ 'THE PYTHON SITE', '.'],
+ ['i18n-with-external-links', 'http://python.org/index.html',
+ 'http://sphinx-doc.org', 'http://google.com',
+ 'http://python.org'])
+
+ # --- role xref: regression test for #1090, #1193
+
+ et = ElementTree.parse(app.outdir / 'role_xref.xml')
+ sec1, sec2 = et.findall('section')
+
+ para1, = sec1.findall('paragraph')
+ yield (assert_elem,
+ para1,
+ ['LINK TO', "I18N ROCK'N ROLE XREF", ',', 'CONTENTS', ',',
+ 'SOME NEW TERM', '.'],
+ ['i18n-role-xref', 'contents',
+ 'glossary_terms#term-some-term'])
+
+ para2 = sec2.findall('paragraph')
+ yield (assert_elem,
+ para2[0],
+ ['LINK TO', 'SOME OTHER NEW TERM', 'AND', 'SOME NEW TERM', '.'],
+ ['glossary_terms#term-some-other-term',
+ 'glossary_terms#term-some-term'])
+ yield(assert_elem,
+ para2[1],
+ ['LINK TO', 'SAME TYPE LINKS', 'AND',
+ "I18N ROCK'N ROLE XREF", '.'],
+ ['same-type-links', 'i18n-role-xref'])
+ yield (assert_elem,
+ para2[2],
+ ['LINK TO', 'I18N WITH GLOSSARY TERMS', 'AND', 'CONTENTS', '.'],
+ ['glossary_terms', 'contents'])
+ yield (assert_elem,
+ para2[3],
+ ['LINK TO', '--module', 'AND', '-m', '.'],
+ ['cmdoption--module', 'cmdoption-m'])
+ yield (assert_elem,
+ para2[4],
+ ['LINK TO', 'env2', 'AND', 'env1', '.'],
+ ['envvar-env2', 'envvar-env1'])
+ yield (assert_elem,
+ para2[5],
+ ['LINK TO', 'token2', 'AND', 'token1', '.'],
+ []) # TODO: how do I link token role to productionlist?
+ yield (assert_elem,
+ para2[6],
+ ['LINK TO', 'same-type-links', 'AND', "i18n-role-xref", '.'],
+ ['same-type-links', 'i18n-role-xref'])
+
+ # warnings
+ warnings = warning.getvalue().replace(os.sep, '/')
+ yield assert_not_in, 'term not in glossary', warnings
+ yield assert_not_in, 'undefined label', warnings
+ yield assert_not_in, 'unknown document', warnings
+
+ # --- label targets: regression test for #1193, #1265
+
+ et = ElementTree.parse(app.outdir / 'label_target.xml')
+ secs = et.findall('section')
+
+ para0 = secs[0].findall('paragraph')
+ yield (assert_elem,
+ para0[0],
+ ['X SECTION AND LABEL', 'POINT TO', 'implicit-target', 'AND',
+ 'X SECTION AND LABEL', 'POINT TO', 'section-and-label', '.'],
+ ['implicit-target', 'section-and-label'])
+
+ para1 = secs[1].findall('paragraph')
+ yield (assert_elem,
+ para1[0],
+ ['X EXPLICIT-TARGET', 'POINT TO', 'explicit-target', 'AND',
+ 'X EXPLICIT-TARGET', 'POINT TO DUPLICATED ID LIKE', 'id1',
+ '.'],
+ ['explicit-target', 'id1'])
+
+ para2 = secs[2].findall('paragraph')
+ yield (assert_elem,
+ para2[0],
+ ['X IMPLICIT SECTION NAME', 'POINT TO',
+ 'implicit-section-name', '.'],
+ ['implicit-section-name'])
+
+ sec2 = secs[2].findall('section')
+
+ para2_0 = sec2[0].findall('paragraph')
+ yield (assert_elem,
+ para2_0[0],
+ ['`X DUPLICATED SUB SECTION`_', 'IS BROKEN LINK.'],
+ [])
+
+ para3 = secs[3].findall('paragraph')
+ yield (assert_elem,
+ para3[0],
+ ['X', 'bridge label',
+ 'IS NOT TRANSLATABLE BUT LINKED TO TRANSLATED ' +
+ 'SECTION TITLE.'],
+ ['label-bridged-target-section'])
+ yield (assert_elem,
+ para3[1],
+ ['X', 'bridge label', 'POINT TO',
+ 'LABEL BRIDGED TARGET SECTION', 'AND', 'bridge label2',
+ 'POINT TO', 'SECTION AND LABEL', '. THE SECOND APPEARED',
+ 'bridge label2', 'POINT TO CORRECT TARGET.'],
+ ['label-bridged-target-section',
+ 'section-and-label',
+ 'section-and-label'])
diff --git a/tests/test_linkcode.py b/tests/test_linkcode.py
deleted file mode 100644
index 45847778..00000000
--- a/tests/test_linkcode.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- test_linkcode
- ~~~~~~~~~~~~~
-
- Test the sphinx.ext.linkcode extension.
-
- :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import os
-from util import with_app
-
-
-@with_app(buildername='html', tags=['test_linkcode'], _copy_to_temp=True)
-def test_html(app):
- app.builder.build_all()
-
- stuff = (app.outdir / 'objects.html').text(encoding='utf-8')
-
- assert 'http://foobar/source/foolib.py' in stuff
- assert 'http://foobar/js/' in stuff
- assert 'http://foobar/c/' in stuff
- assert 'http://foobar/cpp/' in stuff
diff --git a/tests/test_markup.py b/tests/test_markup.py
index 81ade641..88b8a2c7 100644
--- a/tests/test_markup.py
+++ b/tests/test_markup.py
@@ -23,10 +23,11 @@ from util import TestApp
app = settings = parser = None
+
def setup_module():
global app, settings, parser
texescape.init() # otherwise done by the latex builder
- app = TestApp(cleanenv=True)
+ app = TestApp()
optparser = frontend.OptionParser(
components=(rst.Parser, HTMLWriter, LaTeXWriter))
settings = optparser.get_default_values()
@@ -35,6 +36,7 @@ def setup_module():
settings.env.temp_data['docname'] = 'dummy'
parser = rst.Parser()
+
def teardown_module():
app.cleanup()
@@ -42,12 +44,15 @@ def teardown_module():
class ForgivingTranslator:
def visit_pending_xref(self, node):
pass
+
def depart_pending_xref(self, node):
pass
+
class ForgivingHTMLTranslator(SmartyPantsHTMLTranslator, ForgivingTranslator):
pass
+
class ForgivingLaTeXTranslator(LaTeXTranslator, ForgivingTranslator):
pass
@@ -108,8 +113,9 @@ def test_inline():
# non-interpolation of dashes in option role
yield (verify_re, ':option:`--with-option`',
- '<p><em( class="xref std std-option")?>--with-option</em></p>$',
- r'\\emph{\\texttt{-{-}with-option}}$')
+ '<p><code( class="xref std std-option docutils literal")?>'
+ '<span class="pre">--with-option</span></code></p>$',
+ r'\\code{-{-}with-option}$')
# verify smarty-pants quotes
yield verify, '"John"', '<p>&#8220;John&#8221;</p>', "``John''"
diff --git a/tests/test_metadata.py b/tests/test_metadata.py
index a3fd1358..944aa157 100644
--- a/tests/test_metadata.py
+++ b/tests/test_metadata.py
@@ -12,27 +12,13 @@
# adapted from an example of bibliographic metadata at
# http://docutils.sourceforge.net/docs/user/rst/demo.txt
-from util import TestApp
+from util import with_app
from nose.tools import assert_equal
-app = env = None
-warnings = []
-
-def setup_module():
- # Is there a better way of generating this doctree than manually iterating?
- global app, env
- app = TestApp(_copy_to_temp=True)
- env = app.env
- msg, num, it = env.update(app.config, app.srcdir, app.doctreedir, app)
- for docname in it:
- pass
-
-def teardown_module():
- app.cleanup()
-
-def test_docinfo():
+@with_app('pseudoxml')
+def test_docinfo(app, status, warning):
"""
Inspect the 'docinfo' metadata stored in the first node of the document.
Note this doesn't give us access to data stored in subsequence blocks
@@ -40,6 +26,8 @@ def test_docinfo():
'dedication' blocks, or the 'meta' role. Doing otherwise is probably more
messing with the internals of sphinx than this rare use case merits.
"""
+ app.builder.build(['metadata'])
+ env = app.env
exampledocinfo = env.metadata['metadata']
expecteddocinfo = {
'author': u'David Goodger',
diff --git a/tests/test_quickstart.py b/tests/test_quickstart.py
index 74deb46d..1d3bcd9e 100644
--- a/tests/test_quickstart.py
+++ b/tests/test_quickstart.py
@@ -29,8 +29,10 @@ warnfile = StringIO()
def setup_module():
nocolor()
+
def mock_input(answers, needanswer=False):
called = set()
+
def input_(prompt):
if prompt in called:
raise AssertionError('answer for %r missing and no default '
@@ -50,8 +52,10 @@ def mock_input(answers, needanswer=False):
return ''
return input_
+
real_input = input
+
def teardown_module():
qs.term_input = real_input
qs.TERM_ENCODING = getattr(sys.stdin, 'encoding', None)
@@ -200,7 +204,7 @@ def test_quickstart_all_answers(tempdir):
assert ns['master_doc'] == 'contents'
assert ns['project'] == u'STASIâ„¢'
assert ns['copyright'] == u'%s, Wolfgang Schäuble & G\'Beckstein' % \
- time.strftime('%Y')
+ time.strftime('%Y')
assert ns['version'] == '2.0'
assert ns['release'] == '2.0.1'
assert ns['html_static_path'] == ['.static']
@@ -214,7 +218,7 @@ def test_quickstart_all_answers(tempdir):
assert ns['texinfo_documents'] == [
('contents', 'STASI', u'STASIâ„¢ Documentation',
u'Wolfgang Schäuble & G\'Beckstein', 'STASI',
- 'One line description of project.', 'Miscellaneous'),]
+ 'One line description of project.', 'Miscellaneous')]
assert (tempdir / 'build').isdir()
assert (tempdir / 'source' / '.static').isdir()
@@ -237,7 +241,7 @@ def test_generated_files_eol(tempdir):
def assert_eol(filename, eol):
content = filename.bytes().decode('unicode-escape')
- assert all([l[-len(eol):]==eol for l in content.splitlines(True)])
+ assert all([l[-len(eol):] == eol for l in content.splitlines(True)])
assert_eol(tempdir / 'make.bat', '\r\n')
assert_eol(tempdir / 'Makefile', '\n')
@@ -257,13 +261,13 @@ def test_quickstart_and_build(tempdir):
qs.generate(d)
app = application.Sphinx(
- tempdir, #srcdir
- tempdir, #confdir
- (tempdir / '_build' / 'html'), #outdir
- (tempdir / '_build' / '.doctree'), #doctreedir
- 'html', #buildername
- status=StringIO(),
- warning=warnfile)
+ tempdir, # srcdir
+ tempdir, # confdir
+ (tempdir / '_build' / 'html'), # outdir
+ (tempdir / '_build' / '.doctree'), # doctreedir
+ 'html', # buildername
+ status=StringIO(),
+ warning=warnfile)
app.builder.build_all()
warnings = warnfile.getvalue()
assert not warnings
@@ -273,7 +277,7 @@ def test_quickstart_and_build(tempdir):
def test_default_filename(tempdir):
answers = {
'Root path': tempdir,
- 'Project name': u'\u30c9\u30a4\u30c4', #Fullwidth characters only
+ 'Project name': u'\u30c9\u30a4\u30c4', # Fullwidth characters only
'Author name': 'Georg Brandl',
'Project version': '0.1',
}
diff --git a/tests/test_searchadapters.py b/tests/test_searchadapters.py
index 9a41601d..4a91f96d 100644
--- a/tests/test_searchadapters.py
+++ b/tests/test_searchadapters.py
@@ -9,33 +9,24 @@
:license: BSD, see LICENSE for details.
"""
-import os
-
from six import StringIO
from sphinx.websupport import WebSupport
from test_websupport import sqlalchemy_missing
-from util import test_root, skip_if, skip_unless_importable
-
-
-def clear_builddir():
- (test_root / 'websupport').rmtree(True)
+from util import rootdir, tempdir, skip_if, skip_unless_importable
def teardown_module():
- (test_root / 'generated').rmtree(True)
- clear_builddir()
+ (tempdir / 'websupport').rmtree(True)
def search_adapter_helper(adapter):
- clear_builddir()
-
- settings = {'builddir': os.path.join(test_root, 'websupport'),
+ settings = {'srcdir': rootdir / 'root',
+ 'builddir': tempdir / 'websupport',
'status': StringIO(),
- 'warning': StringIO()}
- settings.update({'srcdir': test_root,
- 'search': adapter})
+ 'warning': StringIO(),
+ 'search': adapter}
support = WebSupport(**settings)
support.build()
@@ -63,7 +54,7 @@ def search_adapter_helper(adapter):
'%s search adapter returned %s search result(s), should have been 1'\
% (adapter, len(results))
# Make sure it works through the WebSupport API
- html = support.get_search_results(u'SomeLongRandomWord')
+ support.get_search_results(u'SomeLongRandomWord')
@skip_unless_importable('xapian', 'needs xapian bindings installed')
diff --git a/tests/test_setup_command.py b/tests/test_setup_command.py
index c165b2d3..70826721 100644
--- a/tests/test_setup_command.py
+++ b/tests/test_setup_command.py
@@ -16,11 +16,16 @@ from functools import wraps
import tempfile
import sphinx
-from util import with_tempdir, test_roots, SkipTest
+from util import rootdir, tempdir, SkipTest
from path import path
from textwrap import dedent
-root = test_roots / 'test-setup'
+root = tempdir / 'test-setup'
+
+
+def setup_module():
+ if not root.exists():
+ (rootdir / 'roots' / 'test-setup').copytree(root)
def with_setup_command(root, *args, **kwds):
diff --git a/tests/test_templating.py b/tests/test_templating.py
index e8fafca2..5f8fcaeb 100644
--- a/tests/test_templating.py
+++ b/tests/test_templating.py
@@ -9,28 +9,23 @@
:license: BSD, see LICENSE for details.
"""
-from util import test_roots, with_app
+from util import with_app
-def teardown_module():
- (test_roots / 'test-templating' / '_build').rmtree(True),
-
-
-@with_app(buildername='html', srcdir=(test_roots / 'test-templating'))
-def test_layout_overloading(app):
- app.builder.build_all()
+@with_app('html', testroot='templating')
+def test_layout_overloading(app, status, warning):
+ app.builder.build_update()
result = (app.outdir / 'contents.html').text(encoding='utf-8')
assert '<!-- layout overloading -->' in result
-@with_app(buildername='html', srcdir=(test_roots / 'test-templating'))
-def test_autosummary_class_template_overloading(app):
- app.builder.build_all()
+@with_app('html', testroot='templating')
+def test_autosummary_class_template_overloading(app, status, warning):
+ app.builder.build_update()
- result = (app.outdir / 'generated' / 'sphinx.application.Sphinx.html').text(
- encoding='utf-8')
+ result = (app.outdir / 'generated' / 'sphinx.application.TemplateBridge.html').text(
+ encoding='utf-8')
assert 'autosummary/class.rst method block overloading' in result
-
diff --git a/tests/test_theming.py b/tests/test_theming.py
index 067c4319..404c3197 100644
--- a/tests/test_theming.py
+++ b/tests/test_theming.py
@@ -19,14 +19,14 @@ from util import with_app, raises
@with_app(confoverrides={'html_theme': 'ziptheme',
'html_theme_options.testopt': 'foo'})
-def test_theme_api(app):
+def test_theme_api(app, status, warning):
cfg = app.config
# test Theme class API
assert set(Theme.themes.keys()) == \
- set(['basic', 'default', 'scrolls', 'agogo', 'sphinxdoc', 'haiku',
- 'traditional', 'testtheme', 'ziptheme', 'epub', 'nature',
- 'pyramid', 'bizstyle'])
+ set(['basic', 'default', 'scrolls', 'agogo', 'sphinxdoc', 'haiku',
+ 'traditional', 'testtheme', 'ziptheme', 'epub', 'nature',
+ 'pyramid', 'bizstyle'])
assert Theme.themes['testtheme'][1] is None
assert isinstance(Theme.themes['ziptheme'][1], zipfile.ZipFile)
@@ -56,14 +56,15 @@ def test_theme_api(app):
theme.cleanup()
assert not os.path.exists(themedir)
-@with_app(buildername='html')
-def test_js_source(app):
+
+@with_app(testroot='tocdepth') # a minimal root
+def test_js_source(app, status, warning):
# Now sphinx provides non-minified JS files for jquery.js and underscore.js
# to clarify the source of the minified files. see also #1434.
# If you update the version of the JS file, please update the source of the
# JS file and version number in this test.
- app.builder.build_all()
+ app.builder.build(['contents'])
v = '1.8.3'
msg = 'jquery.js version does not match to {v}'.format(v=v)
diff --git a/tests/test_util_i18n.py b/tests/test_util_i18n.py
index afc9fb36..d69c2acd 100644
--- a/tests/test_util_i18n.py
+++ b/tests/test_util_i18n.py
@@ -1,163 +1,163 @@
-# -*- coding: utf-8 -*-
-"""
- test_util_i18n
- ~~~~~~~~~~~~~~
-
- Test i18n util.
-
- :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-from __future__ import print_function
-
-import os
-from os import path
-
-from babel.messages.mofile import read_mo
-from sphinx.util import i18n
-
-from util import with_tempdir
-
-
-def test_catalog_info_for_file_and_path():
- cat = i18n.CatalogInfo('path', 'domain')
- assert cat.po_file == 'domain.po'
- assert cat.mo_file == 'domain.mo'
- assert cat.po_path == path.join('path', 'domain.po')
- assert cat.mo_path == path.join('path', 'domain.mo')
-
-
-def test_catalog_info_for_sub_domain_file_and_path():
- cat = i18n.CatalogInfo('path', 'sub/domain')
- assert cat.po_file == 'sub/domain.po'
- assert cat.mo_file == 'sub/domain.mo'
- assert cat.po_path == path.join('path', 'sub/domain.po')
- assert cat.mo_path == path.join('path', 'sub/domain.mo')
-
-
-@with_tempdir
-def test_catalog_outdated(dir):
- (dir / 'test.po').write_text('#')
- cat = i18n.CatalogInfo(dir, 'test')
- assert cat.is_outdated() # if mo is not exist
-
- mo_file = (dir / 'test.mo')
- mo_file.write_text('#')
- assert not cat.is_outdated() # if mo is exist and newer than po
-
- os.utime(mo_file, (os.stat(mo_file).st_mtime - 10,) * 2) # to be outdate
- assert cat.is_outdated() # if mo is exist and older than po
-
-
-@with_tempdir
-def test_catalog_write_mo(dir):
- (dir / 'test.po').write_text('#')
- cat = i18n.CatalogInfo(dir, 'test')
- cat.write_mo('en')
- assert path.exists(cat.mo_path)
- assert read_mo(open(cat.mo_path, 'rb')) is not None
-
-
-@with_tempdir
-def test_get_catalogs_for_xx(dir):
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#')
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test3.pot').write_text('#')
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub').makedirs()
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#')
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test5.po').write_text('#')
- (dir / 'loc1' / 'en' / 'LC_MESSAGES').makedirs()
- (dir / 'loc1' / 'en' / 'LC_MESSAGES' / 'test6.po').write_text('#')
- (dir / 'loc1' / 'xx' / 'LC_ALL').makedirs()
- (dir / 'loc1' / 'xx' / 'LC_ALL' / 'test7.po').write_text('#')
-
- catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', force_all=False)
- domains = set(c.domain for c in catalogs)
- assert domains == set([
- 'test1',
- 'test2',
- path.normpath('sub/test4'),
- path.normpath('sub/test5'),
- ])
-
-
-@with_tempdir
-def test_get_catalogs_for_en(dir):
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'xx_dom.po').write_text('#')
- (dir / 'loc1' / 'en' / 'LC_MESSAGES').makedirs()
- (dir / 'loc1' / 'en' / 'LC_MESSAGES' / 'en_dom.po').write_text('#')
-
- catalogs = i18n.get_catalogs([dir / 'loc1'], 'en', force_all=False)
- domains = set(c.domain for c in catalogs)
- assert domains == set(['en_dom'])
-
-
-@with_tempdir
-def test_get_catalogs_with_non_existent_locale(dir):
- catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx')
- assert not catalogs
-
- catalogs = i18n.get_catalogs([dir / 'loc1'], None)
- assert not catalogs
-
-
-def test_get_catalogs_with_non_existent_locale_dirs():
- catalogs = i18n.get_catalogs(['dummy'], 'xx')
- assert not catalogs
-
-
-@with_tempdir
-def test_get_catalogs_for_xx_without_outdated(dir):
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.mo').write_text('#')
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#')
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.mo').write_text('#')
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test3.pot').write_text('#')
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test3.mo').write_text('#')
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub').makedirs()
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#')
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.mo').write_text('#')
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test5.po').write_text('#')
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test5.mo').write_text('#')
-
- catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', force_all=False)
- assert not catalogs
-
- catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', force_all=True)
- domains = set(c.domain for c in catalogs)
- assert domains == set([
- 'test1',
- 'test2',
- path.normpath('sub/test4'),
- path.normpath('sub/test5'),
- ])
-
-
-@with_tempdir
-def test_get_catalogs_from_multiple_locale_dirs(dir):
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
- (dir / 'loc2' / 'xx' / 'LC_MESSAGES').makedirs()
- (dir / 'loc2' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
- (dir / 'loc2' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#')
-
- catalogs = i18n.get_catalogs([dir / 'loc1', dir / 'loc2'], 'xx')
- domains = sorted(c.domain for c in catalogs)
- assert domains == ['test1', 'test1', 'test2']
-
-
-@with_tempdir
-def test_get_catalogs_with_compact(dir):
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#')
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub').makedirs()
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test3.po').write_text('#')
- (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#')
-
- catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', gettext_compact=True)
- domains = set(c.domain for c in catalogs)
- assert domains == set(['test1', 'test2', 'sub'])
+# -*- coding: utf-8 -*-
+"""
+ test_util_i18n
+ ~~~~~~~~~~~~~~
+
+ Test i18n util.
+
+ :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+from __future__ import print_function
+
+import os
+from os import path
+
+from babel.messages.mofile import read_mo
+from sphinx.util import i18n
+
+from util import with_tempdir
+
+
+def test_catalog_info_for_file_and_path():
+ cat = i18n.CatalogInfo('path', 'domain')
+ assert cat.po_file == 'domain.po'
+ assert cat.mo_file == 'domain.mo'
+ assert cat.po_path == path.join('path', 'domain.po')
+ assert cat.mo_path == path.join('path', 'domain.mo')
+
+
+def test_catalog_info_for_sub_domain_file_and_path():
+ cat = i18n.CatalogInfo('path', 'sub/domain')
+ assert cat.po_file == 'sub/domain.po'
+ assert cat.mo_file == 'sub/domain.mo'
+ assert cat.po_path == path.join('path', 'sub/domain.po')
+ assert cat.mo_path == path.join('path', 'sub/domain.mo')
+
+
+@with_tempdir
+def test_catalog_outdated(dir):
+ (dir / 'test.po').write_text('#')
+ cat = i18n.CatalogInfo(dir, 'test')
+ assert cat.is_outdated() # if mo is not exist
+
+ mo_file = (dir / 'test.mo')
+ mo_file.write_text('#')
+ assert not cat.is_outdated() # if mo is exist and newer than po
+
+ os.utime(mo_file, (os.stat(mo_file).st_mtime - 10,) * 2) # to be outdate
+ assert cat.is_outdated() # if mo is exist and older than po
+
+
+@with_tempdir
+def test_catalog_write_mo(dir):
+ (dir / 'test.po').write_text('#')
+ cat = i18n.CatalogInfo(dir, 'test')
+ cat.write_mo('en')
+ assert path.exists(cat.mo_path)
+ assert read_mo(open(cat.mo_path, 'rb')) is not None
+
+
+@with_tempdir
+def test_get_catalogs_for_xx(dir):
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test3.pot').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub').makedirs()
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test5.po').write_text('#')
+ (dir / 'loc1' / 'en' / 'LC_MESSAGES').makedirs()
+ (dir / 'loc1' / 'en' / 'LC_MESSAGES' / 'test6.po').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_ALL').makedirs()
+ (dir / 'loc1' / 'xx' / 'LC_ALL' / 'test7.po').write_text('#')
+
+ catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', force_all=False)
+ domains = set(c.domain for c in catalogs)
+ assert domains == set([
+ 'test1',
+ 'test2',
+ path.normpath('sub/test4'),
+ path.normpath('sub/test5'),
+ ])
+
+
+@with_tempdir
+def test_get_catalogs_for_en(dir):
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'xx_dom.po').write_text('#')
+ (dir / 'loc1' / 'en' / 'LC_MESSAGES').makedirs()
+ (dir / 'loc1' / 'en' / 'LC_MESSAGES' / 'en_dom.po').write_text('#')
+
+ catalogs = i18n.get_catalogs([dir / 'loc1'], 'en', force_all=False)
+ domains = set(c.domain for c in catalogs)
+ assert domains == set(['en_dom'])
+
+
+@with_tempdir
+def test_get_catalogs_with_non_existent_locale(dir):
+ catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx')
+ assert not catalogs
+
+ catalogs = i18n.get_catalogs([dir / 'loc1'], None)
+ assert not catalogs
+
+
+def test_get_catalogs_with_non_existent_locale_dirs():
+ catalogs = i18n.get_catalogs(['dummy'], 'xx')
+ assert not catalogs
+
+
+@with_tempdir
+def test_get_catalogs_for_xx_without_outdated(dir):
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.mo').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.mo').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test3.pot').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test3.mo').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub').makedirs()
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.mo').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test5.po').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test5.mo').write_text('#')
+
+ catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', force_all=False)
+ assert not catalogs
+
+ catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', force_all=True)
+ domains = set(c.domain for c in catalogs)
+ assert domains == set([
+ 'test1',
+ 'test2',
+ path.normpath('sub/test4'),
+ path.normpath('sub/test5'),
+ ])
+
+
+@with_tempdir
+def test_get_catalogs_from_multiple_locale_dirs(dir):
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
+ (dir / 'loc2' / 'xx' / 'LC_MESSAGES').makedirs()
+ (dir / 'loc2' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
+ (dir / 'loc2' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#')
+
+ catalogs = i18n.get_catalogs([dir / 'loc1', dir / 'loc2'], 'xx')
+ domains = sorted(c.domain for c in catalogs)
+ assert domains == ['test1', 'test1', 'test2']
+
+
+@with_tempdir
+def test_get_catalogs_with_compact(dir):
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub').makedirs()
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test3.po').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#')
+
+ catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', gettext_compact=True)
+ domains = set(c.domain for c in catalogs)
+ assert domains == set(['test1', 'test2', 'sub'])
diff --git a/tests/test_util_nodes.py b/tests/test_util_nodes.py
index 9ddc049d..a385245d 100644
--- a/tests/test_util_nodes.py
+++ b/tests/test_util_nodes.py
@@ -1,121 +1,121 @@
-# -*- coding: utf-8 -*-
-"""
- test_util_nodes
- ~~~~~~~~~~~~~~~
-
- Tests uti.nodes functions.
-
- :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-from textwrap import dedent
-
-from docutils import nodes
-from docutils.parsers import rst
-from docutils.utils import new_document
-from docutils import frontend
-
-from sphinx.util.nodes import extract_messages
-
-
-def _get_doctree(text):
- settings = frontend.OptionParser(
- components=(rst.Parser,)).get_default_values()
- document = new_document('dummy.txt', settings)
- rst.Parser().parse(text, document)
- return document
-
-
-def assert_node_count(messages, node_type, expect_count):
- count = 0
- node_list = [node for node, msg in messages]
- for node in node_list:
- if isinstance(node, node_type):
- count += 1
-
- assert count == expect_count, (
- "Count of %r in the %r is %d instead of %d"
- % (node_type, node_list, count, expect_count))
-
-
-def test_extract_messages():
- text = dedent(
- """
- .. admonition:: admonition title
-
- admonition body
- """
- )
- yield (
- assert_node_count,
- extract_messages(_get_doctree(text)),
- nodes.title, 1,
- )
-
- text = dedent(
- """
- .. figure:: foo.jpg
-
- this is title
- """
- )
- yield (
- assert_node_count,
- extract_messages(_get_doctree(text)),
- nodes.caption, 1,
- )
-
- text = dedent(
- """
- .. rubric:: spam
- """
- )
- yield (
- assert_node_count,
- extract_messages(_get_doctree(text)),
- nodes.rubric, 1,
- )
-
-
- text = dedent(
- """
- | spam
- | egg
- """
- )
- yield (
- assert_node_count,
- extract_messages(_get_doctree(text)),
- nodes.line, 2,
- )
-
-
- text = dedent(
- """
- section
- =======
-
- +----------------+
- | | **Title 1** |
- | | Message 1 |
- +----------------+
- """
- )
- yield (
- assert_node_count,
- extract_messages(_get_doctree(text)),
- nodes.line, 2,
- )
-
-
- text = dedent(
- """
- * | **Title 1**
- | Message 1
- """
- )
- yield (
- assert_node_count,
- extract_messages(_get_doctree(text)),
- nodes.line, 2,
- )
+# -*- coding: utf-8 -*-
+"""
+ test_util_nodes
+ ~~~~~~~~~~~~~~~
+
+ Tests uti.nodes functions.
+
+ :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+from textwrap import dedent
+
+from docutils import nodes
+from docutils.parsers import rst
+from docutils.utils import new_document
+from docutils import frontend
+
+from sphinx.util.nodes import extract_messages
+
+
+def _get_doctree(text):
+ settings = frontend.OptionParser(
+ components=(rst.Parser,)).get_default_values()
+ document = new_document('dummy.txt', settings)
+ rst.Parser().parse(text, document)
+ return document
+
+
+def assert_node_count(messages, node_type, expect_count):
+ count = 0
+ node_list = [node for node, msg in messages]
+ for node in node_list:
+ if isinstance(node, node_type):
+ count += 1
+
+ assert count == expect_count, (
+ "Count of %r in the %r is %d instead of %d"
+ % (node_type, node_list, count, expect_count))
+
+
+def test_extract_messages():
+ text = dedent(
+ """
+ .. admonition:: admonition title
+
+ admonition body
+ """
+ )
+ yield (
+ assert_node_count,
+ extract_messages(_get_doctree(text)),
+ nodes.title, 1,
+ )
+
+ text = dedent(
+ """
+ .. figure:: foo.jpg
+
+ this is title
+ """
+ )
+ yield (
+ assert_node_count,
+ extract_messages(_get_doctree(text)),
+ nodes.caption, 1,
+ )
+
+ text = dedent(
+ """
+ .. rubric:: spam
+ """
+ )
+ yield (
+ assert_node_count,
+ extract_messages(_get_doctree(text)),
+ nodes.rubric, 1,
+ )
+
+
+ text = dedent(
+ """
+ | spam
+ | egg
+ """
+ )
+ yield (
+ assert_node_count,
+ extract_messages(_get_doctree(text)),
+ nodes.line, 2,
+ )
+
+
+ text = dedent(
+ """
+ section
+ =======
+
+ +----------------+
+ | | **Title 1** |
+ | | Message 1 |
+ +----------------+
+ """
+ )
+ yield (
+ assert_node_count,
+ extract_messages(_get_doctree(text)),
+ nodes.line, 2,
+ )
+
+
+ text = dedent(
+ """
+ * | **Title 1**
+ | Message 1
+ """
+ )
+ yield (
+ assert_node_count,
+ extract_messages(_get_doctree(text)),
+ nodes.line, 2,
+ )
diff --git a/tests/test_versioning.py b/tests/test_versioning.py
index d9cbf8eb..bd8c697c 100644
--- a/tests/test_versioning.py
+++ b/tests/test_versioning.py
@@ -16,39 +16,46 @@ from docutils.parsers.rst.directives.html import MetaBody
from sphinx import addnodes
from sphinx.versioning import add_uids, merge_doctrees, get_ratio
-from util import test_root, TestApp
+from util import TestApp
app = original = original_uids = None
+
def setup_module():
global app, original, original_uids
- app = TestApp()
+ app = TestApp(testroot='versioning')
app.builder.env.app = app
app.connect('doctree-resolved', on_doctree_resolved)
app.build()
- original = doctrees['versioning/original']
+ original = doctrees['original']
original_uids = [n.uid for n in add_uids(original, is_paragraph)]
+
def teardown_module():
app.cleanup()
- (test_root / '_build').rmtree(True)
+
doctrees = {}
+
def on_doctree_resolved(app, doctree, docname):
doctrees[docname] = doctree
+
def is_paragraph(node):
return node.__class__.__name__ == 'paragraph'
+
def test_get_ratio():
assert get_ratio('', 'a')
assert get_ratio('a', '')
+
def test_add_uids():
assert len(original_uids) == 3
+
def test_picklablility():
# we have to modify the doctree so we can pickle it
copy = original.copy()
@@ -62,44 +69,50 @@ def test_picklablility():
loaded = pickle.loads(pickle.dumps(copy, pickle.HIGHEST_PROTOCOL))
assert all(getattr(n, 'uid', False) for n in loaded.traverse(is_paragraph))
+
def test_modified():
- modified = doctrees['versioning/modified']
+ modified = doctrees['modified']
new_nodes = list(merge_doctrees(original, modified, is_paragraph))
uids = [n.uid for n in modified.traverse(is_paragraph)]
assert not new_nodes
assert original_uids == uids
+
def test_added():
- added = doctrees['versioning/added']
+ added = doctrees['added']
new_nodes = list(merge_doctrees(original, added, is_paragraph))
uids = [n.uid for n in added.traverse(is_paragraph)]
assert len(new_nodes) == 1
assert original_uids == uids[:-1]
+
def test_deleted():
- deleted = doctrees['versioning/deleted']
+ deleted = doctrees['deleted']
new_nodes = list(merge_doctrees(original, deleted, is_paragraph))
uids = [n.uid for n in deleted.traverse(is_paragraph)]
assert not new_nodes
assert original_uids[::2] == uids
+
def test_deleted_end():
- deleted_end = doctrees['versioning/deleted_end']
+ deleted_end = doctrees['deleted_end']
new_nodes = list(merge_doctrees(original, deleted_end, is_paragraph))
uids = [n.uid for n in deleted_end.traverse(is_paragraph)]
assert not new_nodes
assert original_uids[:-1] == uids
+
def test_insert():
- insert = doctrees['versioning/insert']
+ insert = doctrees['insert']
new_nodes = list(merge_doctrees(original, insert, is_paragraph))
uids = [n.uid for n in insert.traverse(is_paragraph)]
assert len(new_nodes) == 1
assert original_uids[0] == uids[0]
assert original_uids[1:] == uids[2:]
+
def test_insert_beginning():
- insert_beginning = doctrees['versioning/insert_beginning']
+ insert_beginning = doctrees['insert_beginning']
new_nodes = list(merge_doctrees(original, insert_beginning, is_paragraph))
uids = [n.uid for n in insert_beginning.traverse(is_paragraph)]
assert len(new_nodes) == 1
@@ -107,8 +120,9 @@ def test_insert_beginning():
assert original_uids == uids[1:]
assert original_uids[0] != uids[0]
+
def test_insert_similar():
- insert_similar = doctrees['versioning/insert_similar']
+ insert_similar = doctrees['insert_similar']
new_nodes = list(merge_doctrees(original, insert_similar, is_paragraph))
uids = [n.uid for n in insert_similar.traverse(is_paragraph)]
assert len(new_nodes) == 1
diff --git a/tests/test_websupport.py b/tests/test_websupport.py
index d355422c..9e88a60f 100644
--- a/tests/test_websupport.py
+++ b/tests/test_websupport.py
@@ -9,34 +9,33 @@
:license: BSD, see LICENSE for details.
"""
-import os
from functools import wraps
from six import StringIO
from sphinx.websupport import WebSupport
from sphinx.websupport.errors import DocumentNotFoundError, \
- CommentNotAllowedError, UserNotAuthorizedError
+ CommentNotAllowedError, UserNotAuthorizedError
from sphinx.websupport.storage import StorageBackend
from sphinx.websupport.storage.differ import CombinedHtmlDiff
try:
from sphinx.websupport.storage.sqlalchemystorage import Session, \
- Comment, CommentVote
+ Comment, CommentVote
from sphinx.websupport.storage.sqlalchemy_db import Node
sqlalchemy_missing = False
except ImportError:
sqlalchemy_missing = True
-from util import test_root, raises, skip_if
+from util import rootdir, tempdir, raises, skip_if
-default_settings = {'builddir': os.path.join(test_root, 'websupport'),
+default_settings = {'builddir': tempdir / 'websupport',
'status': StringIO(),
'warning': StringIO()}
+
def teardown_module():
- (test_root / 'generated').rmtree(True)
- (test_root / 'websupport').rmtree(True)
+ (tempdir / 'websupport').rmtree(True)
def with_support(*args, **kwargs):
@@ -59,12 +58,12 @@ class NullStorage(StorageBackend):
@with_support(storage=NullStorage())
def test_no_srcdir(support):
- """Make sure the correct exception is raised if srcdir is not given."""
+ # make sure the correct exception is raised if srcdir is not given.
raises(RuntimeError, support.build)
@skip_if(sqlalchemy_missing, 'needs sqlalchemy')
-@with_support(srcdir=test_root)
+@with_support(srcdir=rootdir / 'root')
def test_build(support):
support.build()
@@ -173,9 +172,9 @@ def test_proposals(support):
source = data['source']
proposal = source[:5] + source[10:15] + 'asdf' + source[15:]
- comment = support.add_comment('Proposal comment',
- node_id=node.id,
- proposal=proposal)
+ support.add_comment('Proposal comment',
+ node_id=node.id,
+ proposal=proposal)
@skip_if(sqlalchemy_missing, 'needs sqlalchemy')
@@ -234,6 +233,8 @@ def test_update_username(support):
called = False
+
+
def moderation_callback(comment):
global called
called = True
@@ -251,7 +252,7 @@ def test_moderation(support):
deleted = support.add_comment('Comment to delete', node_id=node.id,
displayed=False)
# Make sure the moderation_callback is called.
- assert called == True
+ assert called
# Make sure the user must be a moderator.
raises(UserNotAuthorizedError, support.accept_comment, accepted['id'])
raises(UserNotAuthorizedError, support.delete_comment, deleted['id'])
diff --git a/tests/util.py b/tests/util.py
index bbfb2d4b..bf35d279 100644
--- a/tests/util.py
+++ b/tests/util.py
@@ -8,22 +8,22 @@
"""
import os
+import re
import sys
import tempfile
-import shutil
-import re
from functools import wraps
from six import StringIO
+from nose import tools, SkipTest
+
from sphinx import application
from sphinx.theming import Theme
from sphinx.ext.autodoc import AutoDirective
+from sphinx.pycode import ModuleAnalyzer
from path import path
-from nose import tools, SkipTest
-
try:
# Python >=3.3
from unittest import mock
@@ -32,7 +32,7 @@ except ImportError:
__all__ = [
- 'test_root', 'test_roots', 'raises', 'raises_msg',
+ 'rootdir', 'tempdir', 'raises', 'raises_msg',
'skip_if', 'skip_unless', 'skip_unless_importable', 'Struct',
'ListOutput', 'TestApp', 'with_app', 'gen_with_app',
'path', 'with_tempdir',
@@ -41,8 +41,8 @@ __all__ = [
]
-test_root = path(__file__).parent.joinpath('root').abspath()
-test_roots = path(__file__).parent.joinpath('roots').abspath()
+rootdir = path(os.path.dirname(__file__) or '.').abspath()
+tempdir = path(os.environ['SPHINX_TEST_TEMPDIR']).abspath()
def _excstr(exc):
@@ -50,11 +50,9 @@ def _excstr(exc):
return str(tuple(map(_excstr, exc)))
return exc.__name__
+
def raises(exc, func, *args, **kwds):
- """
- Raise :exc:`AssertionError` if ``func(*args, **kwds)`` does not
- raise *exc*.
- """
+ """Raise AssertionError if ``func(*args, **kwds)`` does not raise *exc*."""
try:
func(*args, **kwds)
except exc:
@@ -63,10 +61,10 @@ def raises(exc, func, *args, **kwds):
raise AssertionError('%s did not raise %s' %
(func.__name__, _excstr(exc)))
+
def raises_msg(exc, msg, func, *args, **kwds):
- """
- Raise :exc:`AssertionError` if ``func(*args, **kwds)`` does not
- raise *exc*, and check if the message contains *msg*.
+ """Raise AssertionError if ``func(*args, **kwds)`` does not raise *exc*,
+ and check if the message contains *msg*.
"""
try:
func(*args, **kwds)
@@ -76,6 +74,32 @@ def raises_msg(exc, msg, func, *args, **kwds):
raise AssertionError('%s did not raise %s' %
(func.__name__, _excstr(exc)))
+
+def assert_re_search(regex, text, flags=0):
+ if not re.search(regex, text, flags):
+ assert False, '%r did not match %r' % (regex, text)
+
+
+def assert_not_re_search(regex, text, flags=0):
+ if re.search(regex, text, flags):
+ assert False, '%r did match %r' % (regex, text)
+
+
+def assert_startswith(thing, prefix):
+ if not thing.startswith(prefix):
+ assert False, '%r does not start with %r' % (thing, prefix)
+
+
+def assert_in(x, thing):
+ if x not in thing:
+ assert False, '%r is not in %r' % (x, thing)
+
+
+def assert_not_in(x, thing):
+ if x in thing:
+ assert False, '%r is in %r' % (x, thing)
+
+
def skip_if(condition, msg=None):
"""Decorator to skip test if condition is true."""
def deco(test):
@@ -87,10 +111,12 @@ def skip_if(condition, msg=None):
return skipper
return deco
+
def skip_unless(condition, msg=None):
"""Decorator to skip test if condition is false."""
return skip_if(not condition, msg)
+
def skip_unless_importable(module, msg=None):
"""Decorator to skip test if module is not importable."""
try:
@@ -127,61 +153,47 @@ class TestApp(application.Sphinx):
better default values for the initialization parameters.
"""
- def __init__(self, srcdir=None, confdir=None, outdir=None, doctreedir=None,
- buildername='html', confoverrides=None,
- status=None, warning=None, freshenv=None,
- warningiserror=None, tags=None,
- confname='conf.py', cleanenv=False,
- _copy_to_temp=False,
- ):
-
- application.CONFIG_FILENAME = confname
-
- self.cleanup_trees = [test_root / 'generated']
-
+ def __init__(self, buildername='html', testroot=None, srcdir=None,
+ freshenv=False, confoverrides=None, status=None, warning=None,
+ tags=None, docutilsconf=None):
+ if testroot is None:
+ defaultsrcdir = 'root'
+ testroot = rootdir / 'root'
+ else:
+ defaultsrcdir = 'test-' + testroot
+ testroot = rootdir / 'roots' / ('test-' + testroot)
if srcdir is None:
- srcdir = test_root
- elif srcdir == '(empty)':
- tempdir = path(tempfile.mkdtemp())
- self.cleanup_trees.append(tempdir)
- temproot = tempdir / 'root'
- temproot.makedirs()
- (temproot / 'conf.py').write_text('')
- srcdir = temproot
+ srcdir = tempdir / defaultsrcdir
else:
- srcdir = path(srcdir)
-
- if _copy_to_temp:
- tempdir = path(tempfile.mkdtemp())
- self.cleanup_trees.append(tempdir)
- temproot = tempdir / srcdir.basename()
- srcdir.copytree(temproot)
- srcdir = temproot
-
- self.builddir = srcdir.joinpath('_build')
- if confdir is None:
- confdir = srcdir
- if outdir is None:
- outdir = srcdir.joinpath(self.builddir, buildername)
- if not outdir.isdir():
- outdir.makedirs()
- self.cleanup_trees.insert(0, outdir)
- if doctreedir is None:
- doctreedir = srcdir.joinpath(srcdir, self.builddir, 'doctrees')
- if not doctreedir.isdir():
- doctreedir.makedirs()
- if cleanenv:
- self.cleanup_trees.insert(0, doctreedir)
+ srcdir = tempdir / srcdir
+
+ if not srcdir.exists():
+ testroot.copytree(srcdir)
+
+ if docutilsconf is not None:
+ (srcdir / 'docutils.conf').write_text(docutilsconf)
+
+ builddir = srcdir / '_build'
+# if confdir is None:
+ confdir = srcdir
+# if outdir is None:
+ outdir = builddir.joinpath(buildername)
+ if not outdir.isdir():
+ outdir.makedirs()
+# if doctreedir is None:
+ doctreedir = builddir.joinpath('doctrees')
+ if not doctreedir.isdir():
+ doctreedir.makedirs()
if confoverrides is None:
confoverrides = {}
if status is None:
status = StringIO()
if warning is None:
warning = ListOutput('stderr')
- if freshenv is None:
- freshenv = False
- if warningiserror is None:
- warningiserror = False
+# if warningiserror is None:
+ warningiserror = False
+
+ self._saved_path = sys.path[:]
application.Sphinx.__init__(self, srcdir, confdir, outdir, doctreedir,
buildername, confoverrides, status, warning,
@@ -190,8 +202,9 @@ class TestApp(application.Sphinx):
def cleanup(self, doctrees=False):
Theme.themes.clear()
AutoDirective._registry.clear()
- for tree in self.cleanup_trees:
- shutil.rmtree(tree, True)
+ ModuleAnalyzer.cache.clear()
+ sys.path[:] = self._saved_path
+ sys.modules.pop('autodoc_fodder', None)
def __repr__(self):
return '<%s buildername=%r>' % (self.__class__.__name__, self.builder.name)
@@ -205,10 +218,14 @@ def with_app(*args, **kwargs):
def generator(func):
@wraps(func)
def deco(*args2, **kwargs2):
+ status, warning = StringIO(), StringIO()
+ kwargs['status'] = status
+ kwargs['warning'] = warning
app = TestApp(*args, **kwargs)
- func(app, *args2, **kwargs2)
- # don't execute cleanup if test failed
- app.cleanup()
+ try:
+ func(app, status, warning, *args2, **kwargs2)
+ finally:
+ app.cleanup()
return deco
return generator
@@ -221,20 +238,24 @@ def gen_with_app(*args, **kwargs):
def generator(func):
@wraps(func)
def deco(*args2, **kwargs2):
+ status, warning = StringIO(), StringIO()
+ kwargs['status'] = status
+ kwargs['warning'] = warning
app = TestApp(*args, **kwargs)
- for item in func(app, *args2, **kwargs2):
- yield item
- # don't execute cleanup if test failed
- app.cleanup()
+ try:
+ for item in func(app, status, warning, *args2, **kwargs2):
+ yield item
+ finally:
+ app.cleanup()
return deco
return generator
def with_tempdir(func):
def new_func(*args, **kwds):
- tempdir = path(tempfile.mkdtemp())
- func(tempdir, *args, **kwds)
- tempdir.rmtree()
+ new_tempdir = path(tempfile.mkdtemp(dir=tempdir))
+ func(new_tempdir, *args, **kwds)
+ new_tempdir.rmtree() # not when test fails...
new_func.__name__ = func.__name__
return new_func
@@ -242,7 +263,10 @@ def with_tempdir(func):
def sprint(*args):
sys.stderr.write(' '.join(map(str, args)) + '\n')
+
_unicode_literals_re = re.compile(r'u(".*?")|u(\'.*?\')')
+
+
def remove_unicode_literals(s):
return _unicode_literals_re.sub(lambda x: x.group(1) or x.group(2), s)
diff --git a/tox.ini b/tox.ini
index 2c3ddfdf..f308880e 100644
--- a/tox.ini
+++ b/tox.ini
@@ -7,10 +7,10 @@ deps=
sqlalchemy
whoosh
setenv =
- BUILD_TEST_PATH = {envdir}/tests
+ SPHINX_TEST_TEMPDIR = {envdir}/testbuild
commands=
{envpython} tests/run.py {posargs}
- sphinx-build -W -b html -d {envtmpdir}/doctrees doc {envtmpdir}/html
+ sphinx-build -q -W -b html -d {envtmpdir}/doctrees doc {envtmpdir}/html
[testenv:py26]
deps=
diff --git a/utils/convert.py b/utils/convert.py
deleted file mode 100755
index c90be8e6..00000000
--- a/utils/convert.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python3
-# coding: utf-8
-"""
- Converts files with 2to3
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Creates a Python 3 version of each file.
-
- The Python3 version of a file foo.py will be called foo3.py.
-
- :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-import os
-import sys
-from glob import iglob
-from optparse import OptionParser
-from shutil import copy
-from distutils.util import run_2to3
-
-def main(argv):
- parser = OptionParser(usage='%prog [path]')
- parser.add_option('-i', '--ignorepath', dest='ignored_paths',
- action='append', default=[])
- options, args = parser.parse_args(argv)
-
- ignored_paths = {os.path.abspath(p) for p in options.ignored_paths}
-
- path = os.path.abspath(args[0]) if args else os.getcwd()
- convertables = []
- for filename in iglob(os.path.join(path, '*.py')):
- if filename in ignored_paths:
- continue
- basename, ext = os.path.splitext(filename)
- if basename.endswith('3'):
- continue
- filename3 = basename + '3' + ext
- copy(filename, filename3)
- convertables.append(filename3)
- run_2to3(convertables)
-
-if __name__ == "__main__":
- main(sys.argv[1:])