summaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
Diffstat (limited to 'tests')
-rwxr-xr-xtests/coverage.py88
-rw-r--r--tests/etree13/ElementPath.py12
-rw-r--r--tests/etree13/ElementTree.py27
-rw-r--r--tests/etree13/HTMLTreeBuilder.py7
-rwxr-xr-xtests/path.py23
-rw-r--r--tests/root/autodoc.txt2
-rw-r--r--tests/root/autodoc_missing_imports.py9
-rw-r--r--tests/root/conf.py57
-rw-r--r--tests/root/contents.txt16
-rw-r--r--tests/root/img.pngbin67861 -> 67765 bytes
-rw-r--r--tests/root/includes.txt20
-rw-r--r--tests/root/literal_orig.inc13
-rw-r--r--tests/root/markup.txt29
-rw-r--r--tests/root/metadata.txt4
-rw-r--r--tests/root/objects.txt15
-rw-r--r--tests/root/special/code.py4
-rw-r--r--tests/root/subdir/img.pngbin67861 -> 67765 bytes
-rw-r--r--tests/root/subdir/simg.pngbin67861 -> 67765 bytes
-rw-r--r--tests/root/undecodable.txt3
-rw-r--r--tests/roots/test-api-set-translator/conf.py80
-rw-r--r--tests/roots/test-api-set-translator/index.rst3
-rw-r--r--tests/roots/test-api-set-translator/nonext/conf.py9
-rw-r--r--tests/roots/test-api-set-translator/translator.py6
-rw-r--r--tests/roots/test-autosummary/conf.py4
-rw-r--r--tests/roots/test-autosummary/contents.rst1
-rw-r--r--tests/roots/test-autosummary/sphinx.rst (renamed from tests/root/autosummary.txt)0
-rw-r--r--tests/roots/test-build-text/conf.py2
-rw-r--r--tests/roots/test-build-text/contents.txt8
-rw-r--r--tests/roots/test-build-text/lineblock.txt6
-rw-r--r--tests/roots/test-build-text/maxwidth.txt6
-rw-r--r--tests/roots/test-build-text/nonascii_maxwidth.txt5
-rw-r--r--tests/roots/test-build-text/nonascii_table.txt7
-rw-r--r--tests/roots/test-build-text/nonascii_title.txt2
-rw-r--r--tests/roots/test-build-text/table.txt7
-rw-r--r--tests/roots/test-circular/conf.py0
-rw-r--r--tests/roots/test-circular/contents.rst4
-rw-r--r--tests/roots/test-circular/sub.rst3
-rw-r--r--tests/roots/test-directive-code/caption.rst21
-rw-r--r--tests/roots/test-directive-code/conf.py3
-rw-r--r--tests/roots/test-directive-code/dedent.rst35
-rw-r--r--tests/roots/test-directive-code/dedent_code.rst53
-rw-r--r--tests/roots/test-directive-code/index.rst25
-rw-r--r--tests/roots/test-directive-code/lineno_match.rst17
-rw-r--r--tests/roots/test-directive-code/lineno_start.rst6
-rw-r--r--tests/roots/test-directive-code/linenos.rst6
-rw-r--r--tests/roots/test-directive-code/literal.inc13
-rw-r--r--tests/roots/test-directive-only/conf.py2
-rw-r--r--tests/roots/test-directive-only/contents.rst (renamed from tests/roots/test-only-directive/contents.rst)2
-rw-r--r--tests/roots/test-directive-only/only.rst (renamed from tests/roots/test-only-directive/only.rst)0
-rw-r--r--tests/roots/test-doctest/conf.py5
-rw-r--r--tests/roots/test-doctest/doctest.txt (renamed from tests/root/doctest.txt)4
-rw-r--r--tests/roots/test-docutilsconf/contents.txt30
-rw-r--r--tests/roots/test-ext-viewcode/conf.py24
-rw-r--r--tests/roots/test-ext-viewcode/index.rst34
-rw-r--r--tests/roots/test-ext-viewcode/objects.rst169
-rw-r--r--tests/roots/test-ext-viewcode/spam/__init__.py7
-rw-r--r--tests/roots/test-ext-viewcode/spam/mod1.py15
-rw-r--r--tests/roots/test-ext-viewcode/spam/mod2.py15
-rw-r--r--tests/roots/test-intl/admonitions.po2
-rw-r--r--tests/roots/test-intl/conf.py1
-rw-r--r--tests/roots/test-intl/definition_terms.po2
-rw-r--r--tests/roots/test-intl/docfields.po2
-rw-r--r--tests/roots/test-intl/external_links.po2
-rw-r--r--tests/roots/test-intl/figure_caption.po2
-rw-r--r--tests/roots/test-intl/footnote.po2
-rw-r--r--tests/roots/test-intl/glossary_terms.po2
-rw-r--r--tests/roots/test-intl/glossary_terms_inconsistency.po2
-rw-r--r--tests/roots/test-intl/i18n.pngbin67861 -> 67765 bytes
-rw-r--r--tests/roots/test-intl/index_entries.po2
-rw-r--r--tests/roots/test-intl/literalblock.po2
-rw-r--r--tests/roots/test-intl/refs_inconsistency.po2
-rw-r--r--tests/roots/test-intl/refs_python_domain.txt30
-rw-r--r--tests/roots/test-intl/role_xref.po2
-rw-r--r--tests/roots/test-intl/rubric.po2
-rw-r--r--tests/roots/test-intl/seealso.po2
-rw-r--r--tests/roots/test-intl/sphinx.po2
-rw-r--r--tests/roots/test-intl/subdir/contents.txt4
-rw-r--r--tests/roots/test-intl/versionchange.po2
-rw-r--r--tests/roots/test-intl/warnings.po2
-rw-r--r--tests/roots/test-numbered-circular/conf.py0
-rw-r--r--tests/roots/test-numbered-circular/contents.rst5
-rw-r--r--tests/roots/test-numbered-circular/sub.rst3
-rw-r--r--tests/roots/test-numfig/bar.rst58
-rw-r--r--tests/roots/test-numfig/baz.rst22
-rw-r--r--tests/roots/test-numfig/conf.py3
-rw-r--r--tests/roots/test-numfig/foo.rst71
-rw-r--r--tests/roots/test-numfig/index.rst50
-rw-r--r--tests/roots/test-numfig/rimg.pngbin0 -> 218 bytes
-rw-r--r--tests/roots/test-only-directive/conf.py2
-rw-r--r--tests/roots/test-setup/doc/contents.txt10
-rw-r--r--tests/roots/test-templating/autosummary_templating.txt8
-rw-r--r--tests/roots/test-tocdepth/bar.rst27
-rw-r--r--tests/roots/test-tocdepth/baz.rst5
-rw-r--r--tests/roots/test-tocdepth/conf.py3
-rw-r--r--tests/roots/test-tocdepth/foo.rst26
-rw-r--r--tests/roots/test-tocdepth/index.rst8
-rw-r--r--tests/roots/test-versioning/added.txt (renamed from tests/root/versioning/added.txt)0
-rw-r--r--tests/roots/test-versioning/conf.py3
-rw-r--r--tests/roots/test-versioning/deleted.txt (renamed from tests/root/versioning/deleted.txt)0
-rw-r--r--tests/roots/test-versioning/deleted_end.txt (renamed from tests/root/versioning/deleted_end.txt)0
-rw-r--r--tests/roots/test-versioning/index.txt (renamed from tests/root/versioning/index.txt)0
-rw-r--r--tests/roots/test-versioning/insert.txt (renamed from tests/root/versioning/insert.txt)0
-rw-r--r--tests/roots/test-versioning/insert_beginning.txt (renamed from tests/root/versioning/insert_beginning.txt)0
-rw-r--r--tests/roots/test-versioning/insert_similar.txt (renamed from tests/root/versioning/insert_similar.txt)0
-rw-r--r--tests/roots/test-versioning/modified.txt (renamed from tests/root/versioning/modified.txt)0
-rw-r--r--tests/roots/test-versioning/original.txt (renamed from tests/root/versioning/original.txt)0
-rwxr-xr-xtests/run.py84
-rw-r--r--tests/test_api_translator.py141
-rw-r--r--tests/test_apidoc.py42
-rw-r--r--tests/test_application.py91
-rw-r--r--tests/test_autodoc.py98
-rw-r--r--tests/test_build.py119
-rw-r--r--tests/test_build_gettext.py107
-rw-r--r--tests/test_build_html.py730
-rw-r--r--tests/test_build_latex.py124
-rw-r--r--tests/test_build_texinfo.py33
-rw-r--r--tests/test_build_text.py120
-rw-r--r--tests/test_catalogs.py78
-rw-r--r--tests/test_config.py75
-rw-r--r--tests/test_cpp_domain.py157
-rw-r--r--tests/test_directive_code.py191
-rw-r--r--tests/test_directive_only.py (renamed from tests/test_only_directive.py)13
-rw-r--r--tests/test_docutilsconf.py84
-rw-r--r--tests/test_domain_cpp.py140
-rw-r--r--tests/test_domain_py.py (renamed from tests/test_py_domain.py)16
-rw-r--r--tests/test_domain_rst.py (renamed from tests/test_rst_domain.py)0
-rw-r--r--tests/test_domain_std.py80
-rw-r--r--tests/test_environment.py (renamed from tests/test_env.py)71
-rw-r--r--tests/test_ext_autosummary.py (renamed from tests/test_autosummary.py)70
-rw-r--r--tests/test_ext_coverage.py (renamed from tests/test_coverage.py)4
-rw-r--r--tests/test_ext_doctest.py (renamed from tests/test_doctest.py)14
-rw-r--r--tests/test_ext_intersphinx.py (renamed from tests/test_intersphinx.py)37
-rw-r--r--tests/test_ext_napoleon.py199
-rw-r--r--tests/test_ext_napoleon_docstring.py442
-rw-r--r--tests/test_ext_napoleon_iterators.py346
-rw-r--r--tests/test_ext_viewcode.py44
-rw-r--r--tests/test_footnote.py37
-rw-r--r--tests/test_highlighting.py14
-rw-r--r--tests/test_i18n.py2
-rw-r--r--tests/test_intl.py812
-rw-r--r--tests/test_linkcode.py25
-rw-r--r--tests/test_markup.py33
-rw-r--r--tests/test_metadata.py25
-rw-r--r--tests/test_quickstart.py71
-rw-r--r--tests/test_search.py3
-rw-r--r--tests/test_searchadapters.py24
-rw-r--r--tests/test_setup_command.py9
-rw-r--r--tests/test_templating.py23
-rw-r--r--tests/test_theming.py32
-rw-r--r--tests/test_util_i18n.py163
-rw-r--r--tests/test_util_nodes.py242
-rw-r--r--tests/test_versioning.py37
-rw-r--r--tests/test_websupport.py35
-rw-r--r--tests/util.py208
154 files changed, 5018 insertions, 1868 deletions
diff --git a/tests/coverage.py b/tests/coverage.py
index 95f6f844..f9341d8b 100755
--- a/tests/coverage.py
+++ b/tests/coverage.py
@@ -52,7 +52,9 @@ coverage.py -a [-d dir] [-o dir1,dir2,...] FILE1 FILE2 ...
e.g. python coverage.py -i -r -o c:\python23,lib\enthought\traits
Coverage data is saved in the file .coverage by default. Set the
-COVERAGE_FILE environment variable to save it somewhere else."""
+COVERAGE_FILE environment variable to save it somewhere else.
+"""
+from __future__ import print_function
__version__ = "2.85.20080914" # see detailed history at the end of this file.
@@ -64,17 +66,14 @@ import re
import string
import symbol
import sys
+import atexit
import threading
import token
-import types
import zipimport
from socket import gethostname
-# Python version compatibility
-try:
- strclass = basestring # new to 2.3
-except:
- strclass = str
+from six import string_types
+
# 2. IMPLEMENTATION
#
@@ -187,9 +186,9 @@ class StatementFindingAstVisitor(compiler.visitor.ASTVisitor):
return 0
# If this line is excluded, or suite_spots maps this line to
# another line that is exlcuded, then we're excluded.
- elif self.excluded.has_key(lineno) or \
- self.suite_spots.has_key(lineno) and \
- self.excluded.has_key(self.suite_spots[lineno][1]):
+ elif lineno in self.excluded or \
+ lineno in self.suite_spots and \
+ self.suite_spots[lineno][1] in self.excluded:
return 0
# Otherwise, this is an executable line.
else:
@@ -218,8 +217,8 @@ class StatementFindingAstVisitor(compiler.visitor.ASTVisitor):
lastprev = self.getLastLine(prevsuite)
firstelse = self.getFirstLine(suite)
for l in range(lastprev+1, firstelse):
- if self.suite_spots.has_key(l):
- self.doSuite(None, suite, exclude=self.excluded.has_key(l))
+ if l in self.suite_spots:
+ self.doSuite(None, suite, exclude=l in self.excluded)
break
else:
self.doSuite(None, suite)
@@ -328,9 +327,9 @@ class coverage:
def help(self, error=None): #pragma: no cover
if error:
- print error
- print
- print __doc__
+ print(error)
+ print()
+ print(__doc__)
sys.exit(1)
def command_line(self, argv, help_fn=None):
@@ -354,9 +353,9 @@ class coverage:
long_opts = optmap.values()
options, args = getopt.getopt(argv, short_opts, long_opts)
for o, a in options:
- if optmap.has_key(o):
+ if o in optmap:
settings[optmap[o]] = 1
- elif optmap.has_key(o + ':'):
+ elif o + ':' in optmap:
settings[optmap[o + ':']] = a
elif o[2:] in long_opts:
settings[o[2:]] = 1
@@ -398,11 +397,11 @@ class coverage:
self.start()
import __main__
sys.path[0] = os.path.dirname(sys.argv[0])
- execfile(sys.argv[0], __main__.__dict__)
+ exec(compile(open(sys.argv[0]).read(), sys.argv[0], 'exec'), __main__.__dict__)
if settings.get('collect'):
self.collect()
if not args:
- args = self.cexecuted.keys()
+ args = list(self.cexecuted.keys())
ignore_errors = settings.get('ignore-errors')
show_missing = settings.get('show-missing')
@@ -493,7 +492,7 @@ class coverage:
import marshal
cexecuted = marshal.load(cache)
cache.close()
- if isinstance(cexecuted, types.DictType):
+ if isinstance(cexecuted, dict):
return cexecuted
else:
return {}
@@ -514,14 +513,14 @@ class coverage:
def merge_data(self, new_data):
for file_name, file_data in new_data.items():
- if self.cexecuted.has_key(file_name):
+ if file_name in self.cexecuted:
self.merge_file_data(self.cexecuted[file_name], file_data)
else:
self.cexecuted[file_name] = file_data
def merge_file_data(self, cache_data, new_data):
for line_number in new_data.keys():
- if not cache_data.has_key(line_number):
+ if line_number not in cache_data:
cache_data[line_number] = new_data[line_number]
def abs_file(self, filename):
@@ -554,7 +553,7 @@ class coverage:
# normalized case). See [GDR 2001-12-04b, 3.3].
def canonical_filename(self, filename):
- if not self.canonical_filename_cache.has_key(filename):
+ if filename not in self.canonical_filename_cache:
f = filename
if os.path.isabs(f) and not os.path.exists(f):
if not self.get_zip_data(f):
@@ -578,7 +577,7 @@ class coverage:
# Can't do anything useful with exec'd strings, so skip them.
continue
f = self.canonical_filename(filename)
- if not self.cexecuted.has_key(f):
+ if f not in self.cexecuted:
self.cexecuted[f] = {}
self.cexecuted[f][lineno] = 1
self.c = {}
@@ -601,7 +600,7 @@ class coverage:
# statements that cross lines.
def analyze_morf(self, morf):
- if self.analysis_cache.has_key(morf):
+ if morf in self.analysis_cache:
return self.analysis_cache[morf]
filename = self.morf_filename(morf)
ext = os.path.splitext(filename)[1]
@@ -621,7 +620,7 @@ class coverage:
lines, excluded_lines, line_map = self.find_executable_statements(
source, exclude=self.exclude_re
)
- except SyntaxError, synerr:
+ except SyntaxError as synerr:
raise CoverageException(
"Couldn't parse '%s' as Python source: '%s' at line %d" %
(filename, synerr.msg, synerr.lineno)
@@ -744,10 +743,8 @@ class coverage:
visitor = StatementFindingAstVisitor(statements, excluded, suite_spots)
compiler.walk(ast, visitor, walker=visitor)
- lines = statements.keys()
- lines.sort()
- excluded_lines = excluded.keys()
- excluded_lines.sort()
+ lines = sorted(statements.keys())
+ excluded_lines = sorted(excluded.keys())
return lines, excluded_lines, suite_spots
# format_lines(statements, lines). Format a list of line numbers
@@ -792,13 +789,13 @@ class coverage:
def analysis2(self, morf):
filename, statements, excluded, line_map = self.analyze_morf(morf)
self.canonicalize_filenames()
- if not self.cexecuted.has_key(filename):
+ if filename not in self.cexecuted:
self.cexecuted[filename] = {}
missing = []
for line in statements:
lines = line_map.get(line, [line, line])
for l in range(lines[0], lines[1]+1):
- if self.cexecuted[filename].has_key(l):
+ if l in self.cexecuted[filename]:
break
else:
missing.append(line)
@@ -837,12 +834,12 @@ class coverage:
def report(self, morfs, show_missing=1, ignore_errors=0, file=None,
omit_prefixes=[]):
- if not isinstance(morfs, types.ListType):
+ if not isinstance(morfs, list):
morfs = [morfs]
# On windows, the shell doesn't expand wildcards. Do it here.
globbed = []
for morf in morfs:
- if isinstance(morf, strclass):
+ if isinstance(morf, string_types):
globbed.extend(glob.glob(morf))
else:
globbed.append(morf)
@@ -851,7 +848,7 @@ class coverage:
morfs = self.filter_by_prefix(morfs, omit_prefixes)
morfs.sort(self.morf_name_compare)
- max_name = max([5,] + map(len, map(self.morf_name, morfs)))
+ max_name = max(5, *map(len, map(self.morf_name, morfs)))
fmt_name = "%%- %ds " % max_name
fmt_err = fmt_name + "%s: %s"
header = fmt_name % "Name" + " Stmts Exec Cover"
@@ -861,8 +858,8 @@ class coverage:
fmt_coverage = fmt_coverage + " %s"
if not file:
file = sys.stdout
- print >>file, header
- print >>file, "-" * len(header)
+ print(header, file=file)
+ print("-" * len(header), file=file)
total_statements = 0
total_executed = 0
for morf in morfs:
@@ -878,7 +875,7 @@ class coverage:
args = (name, n, m, pc)
if show_missing:
args = args + (readable,)
- print >>file, fmt_coverage % args
+ print(fmt_coverage % args, file=file)
total_statements = total_statements + n
total_executed = total_executed + m
except KeyboardInterrupt: #pragma: no cover
@@ -886,9 +883,9 @@ class coverage:
except:
if not ignore_errors:
typ, msg = sys.exc_info()[:2]
- print >>file, fmt_err % (name, typ, msg)
+ print(fmt_err % (name, typ, msg), file=file)
if len(morfs) > 1:
- print >>file, "-" * len(header)
+ print("-" * len(header), file=file)
if total_statements > 0:
pc = 100.0 * total_executed / total_statements
else:
@@ -896,7 +893,7 @@ class coverage:
args = ("TOTAL", total_statements, total_executed, pc)
if show_missing:
args = args + ("",)
- print >>file, fmt_coverage % args
+ print(fmt_coverage % args, file=file)
# annotate(morfs, ignore_errors).
@@ -1006,14 +1003,7 @@ def annotate(*args, **kw):
def annotate_file(*args, **kw):
return the_coverage.annotate_file(*args, **kw)
-# Save coverage data when Python exits. (The atexit module wasn't
-# introduced until Python 2.0, so use sys.exitfunc when it's not
-# available.)
-try:
- import atexit
- atexit.register(the_coverage.save)
-except ImportError:
- sys.exitfunc = the_coverage.save
+atexit.register(the_coverage.save)
def main():
the_coverage.command_line(sys.argv[1:])
diff --git a/tests/etree13/ElementPath.py b/tests/etree13/ElementPath.py
index b097d816..d26a0d7a 100644
--- a/tests/etree13/ElementPath.py
+++ b/tests/etree13/ElementPath.py
@@ -177,7 +177,7 @@ class _SelectorContext:
def find(elem, path):
try:
- return findall(elem, path).next()
+ return next(findall(elem, path))
except StopIteration:
return None
@@ -194,17 +194,17 @@ def findall(elem, path):
if path[:1] == "/":
raise SyntaxError("cannot use absolute path on element")
stream = iter(xpath_tokenizer(path))
- next = stream.next; token = next()
+ next_ = lambda: next(stream); token = next_()
selector = []
while 1:
try:
- selector.append(ops[token[0]](next, token))
+ selector.append(ops[token[0]](next_, token))
except StopIteration:
raise SyntaxError("invalid path")
try:
- token = next()
+ token = next_()
if token[0] == "/":
- token = next()
+ token = next_()
except StopIteration:
break
_cache[path] = selector
@@ -220,7 +220,7 @@ def findall(elem, path):
def findtext(elem, path, default=None):
try:
- elem = findall(elem, path).next()
+ elem = next(findall(elem, path))
return elem.text
except StopIteration:
return default
diff --git a/tests/etree13/ElementTree.py b/tests/etree13/ElementTree.py
index f459c7f8..0dd12ddb 100644
--- a/tests/etree13/ElementTree.py
+++ b/tests/etree13/ElementTree.py
@@ -79,6 +79,10 @@
# --------------------------------------------------------------------
from __future__ import generators
+from __future__ import absolute_import
+
+from six import string_types
+
__all__ = [
# public symbols
@@ -144,7 +148,7 @@ class _SimpleElementPath(object):
return result
try:
- import ElementPath
+ from . import ElementPath
except ImportError:
# FIXME: issue warning in this case?
ElementPath = _SimpleElementPath()
@@ -242,7 +246,7 @@ class Element(object):
def __len__(self):
return len(self._children)
- def __nonzero__(self):
+ def __bool__(self):
import warnings
warnings.warn(
"The behavior of this method will change in future versions. "
@@ -250,6 +254,7 @@ class Element(object):
FutureWarning
)
return len(self._children) != 0 # emulate old behaviour
+ __nonzero__ = __bool__ # for python2 compatibility
##
# Returns the given subelement.
@@ -827,7 +832,7 @@ def _namespaces(elem, encoding, default_namespace=None):
tag = elem.tag
if isinstance(tag, QName) and tag.text not in qnames:
add_qname(tag.text)
- elif isinstance(tag, basestring):
+ elif isinstance(tag, string_types):
if tag not in qnames:
add_qname(tag)
elif tag is not None and tag is not Comment and tag is not PI:
@@ -862,7 +867,7 @@ def _serialize_xml(write, elem, encoding, qnames, namespaces):
write("<" + tag)
items = elem.items()
if items or namespaces:
- items.sort() # lexical order
+ items = sorted(items) # lexical order
for k, v in items:
if isinstance(k, QName):
k = k.text
@@ -873,7 +878,7 @@ def _serialize_xml(write, elem, encoding, qnames, namespaces):
write(" %s=\"%s\"" % (qnames[k], v))
if namespaces:
items = namespaces.items()
- items.sort(key=lambda x: x[1]) # sort on prefix
+ items = sorted(items, key=lambda x: x[1]) # sort on prefix
for v, k in items:
if k:
k = ":" + k
@@ -919,7 +924,7 @@ def _serialize_html(write, elem, encoding, qnames, namespaces):
write("<" + tag)
items = elem.items()
if items or namespaces:
- items.sort() # lexical order
+ items = sorted(items) # lexical order
for k, v in items:
if isinstance(k, QName):
k = k.text
@@ -931,7 +936,7 @@ def _serialize_html(write, elem, encoding, qnames, namespaces):
write(" %s=\"%s\"" % (qnames[k], v))
if namespaces:
items = namespaces.items()
- items.sort(key=lambda x: x[1]) # sort on prefix
+ items = sorted(items, key=lambda x: x[1]) # sort on prefix
for v, k in items:
if k:
k = ":" + k
@@ -1183,7 +1188,7 @@ class _IterParseIterator(object):
append((event, None))
parser.EndNamespaceDeclHandler = handler
- def next(self):
+ def __next__(self):
while 1:
try:
item = self._events[self._index]
@@ -1204,6 +1209,8 @@ class _IterParseIterator(object):
self._index = self._index + 1
return item
+ next = __next__ # Python 2 compatibility
+
def __iter__(self):
return self
@@ -1524,7 +1531,7 @@ class XMLParser(object):
def feed(self, data):
try:
self._parser.Parse(data, 0)
- except self._error, v:
+ except self._error as v:
self._raiseerror(v)
##
@@ -1536,7 +1543,7 @@ class XMLParser(object):
def close(self):
try:
self._parser.Parse("", 1) # end of data
- except self._error, v:
+ except self._error as v:
self._raiseerror(v)
tree = self.target.close()
del self.target, self._parser # get rid of circular references
diff --git a/tests/etree13/HTMLTreeBuilder.py b/tests/etree13/HTMLTreeBuilder.py
index 4c5a24f6..cf332c75 100644
--- a/tests/etree13/HTMLTreeBuilder.py
+++ b/tests/etree13/HTMLTreeBuilder.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
#
# ElementTree
# $Id$
@@ -53,7 +54,9 @@ import htmlentitydefs
import re, string, sys
import mimetools, StringIO
-import ElementTree
+from six import text_type
+
+from . import ElementTree
AUTOCLOSE = "p", "li", "tr", "th", "td", "head", "body"
IGNOREEND = "img", "hr", "meta", "link", "br"
@@ -198,7 +201,7 @@ class HTMLTreeBuilder(HTMLParser):
def handle_data(self, data):
if isinstance(data, type('')) and is_not_ascii(data):
# convert to unicode, but only if necessary
- data = unicode(data, self.encoding, "ignore")
+ data = text_type(data, self.encoding, "ignore")
self.__builder.data(data)
##
diff --git a/tests/path.py b/tests/path.py
index fa90a6f5..573d3d3c 100755
--- a/tests/path.py
+++ b/tests/path.py
@@ -12,20 +12,22 @@ import sys
import shutil
from codecs import open
+from six import PY2, text_type
+
FILESYSTEMENCODING = sys.getfilesystemencoding() or sys.getdefaultencoding()
-class path(unicode):
+class path(text_type):
"""
Represents a path which behaves like a string.
"""
- if sys.version_info < (3, 0):
+ if PY2:
def __new__(cls, s, encoding=FILESYSTEMENCODING, errors='strict'):
if isinstance(s, str):
s = s.decode(encoding, errors)
- return unicode.__new__(cls, s)
- return unicode.__new__(cls, s)
+ return text_type.__new__(cls, s)
+ return text_type.__new__(cls, s)
@property
def parent(self):
@@ -34,6 +36,9 @@ class path(unicode):
"""
return self.__class__(os.path.dirname(self))
+ def basename(self):
+ return os.path.basename(self)
+
def abspath(self):
"""
Returns the absolute path.
@@ -118,6 +123,9 @@ class path(unicode):
"""
os.unlink(self)
+ def utime(self, arg):
+ os.utime(self, arg)
+
def write_text(self, text, **kwargs):
"""
Writes the given `text` to the file.
@@ -178,7 +186,7 @@ class path(unicode):
"""
return os.path.lexists(self)
- def makedirs(self, mode=0777):
+ def makedirs(self, mode=0o777):
"""
Recursively create directories.
"""
@@ -190,7 +198,10 @@ class path(unicode):
"""
return self.__class__(os.path.join(self, *map(self.__class__, args)))
+ def listdir(self):
+ return os.listdir(self)
+
__div__ = __truediv__ = joinpath
def __repr__(self):
- return '%s(%s)' % (self.__class__.__name__, unicode.__repr__(self))
+ return '%s(%s)' % (self.__class__.__name__, text_type.__repr__(self))
diff --git a/tests/root/autodoc.txt b/tests/root/autodoc.txt
index d4b3404c..aa0dffba 100644
--- a/tests/root/autodoc.txt
+++ b/tests/root/autodoc.txt
@@ -45,3 +45,5 @@ Just testing a few autodoc possibilities...
:members: ca1, ia1
Specific members (2 total)
+
+.. automodule:: autodoc_missing_imports
diff --git a/tests/root/autodoc_missing_imports.py b/tests/root/autodoc_missing_imports.py
new file mode 100644
index 00000000..7a717345
--- /dev/null
+++ b/tests/root/autodoc_missing_imports.py
@@ -0,0 +1,9 @@
+
+import missing_module
+from missing_module import missing_name
+import missing_package1.missing_module1
+from missing_package2 import missing_module2
+from missing_package3.missing_module3 import missing_name
+
+class TestAutodoc(object):
+ """TestAutodoc docstring."""
diff --git a/tests/root/conf.py b/tests/root/conf.py
index 8025ba33..5186f371 100644
--- a/tests/root/conf.py
+++ b/tests/root/conf.py
@@ -3,12 +3,9 @@
import sys, os
sys.path.append(os.path.abspath('.'))
-sys.path.append(os.path.abspath('..'))
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.jsmath', 'sphinx.ext.todo',
- 'sphinx.ext.coverage', 'sphinx.ext.autosummary',
- 'sphinx.ext.doctest', 'sphinx.ext.extlinks',
- 'sphinx.ext.viewcode', 'sphinx.ext.oldcmarkup', 'ext']
+ 'sphinx.ext.coverage', 'sphinx.ext.extlinks', 'ext']
jsmath_path = 'dummy.js'
@@ -18,16 +15,16 @@ master_doc = 'contents'
source_suffix = '.txt'
project = 'Sphinx <Tests>'
-copyright = '2010, Georg Brandl & Team'
+copyright = '2010-2014, Georg Brandl & Team'
# If this is changed, remember to update the versionchanges!
version = '0.6'
release = '0.6alpha1'
today_fmt = '%B %d, %Y'
-# unused_docs = []
exclude_patterns = ['_build', '**/excluded.*']
keep_warnings = True
pygments_style = 'sphinx'
show_authors = True
+numfig = True
rst_epilog = '.. |subst| replace:: global substitution'
@@ -35,7 +32,8 @@ html_theme = 'testtheme'
html_theme_path = ['.']
html_theme_options = {'testopt': 'testoverride'}
html_sidebars = {'**': 'customsb.html',
- 'contents': ['contentssb.html', 'localtoc.html'] }
+ 'contents': ['contentssb.html', 'localtoc.html',
+ 'globaltoc.html']}
html_style = 'default.css'
html_static_path = ['_static', 'templated.css_t']
html_extra_path = ['robots.txt']
@@ -45,15 +43,15 @@ html_context = {'hckey': 'hcval', 'hckey_co': 'wrong_hcval_co'}
htmlhelp_basename = 'SphinxTestsdoc'
latex_documents = [
- ('contents', 'SphinxTests.tex', 'Sphinx Tests Documentation',
- 'Georg Brandl \\and someone else', 'manual'),
+ ('contents', 'SphinxTests.tex', 'Sphinx Tests Documentation',
+ 'Georg Brandl \\and someone else', 'manual'),
]
latex_additional_files = ['svgimg.svg']
texinfo_documents = [
- ('contents', 'SphinxTests', 'Sphinx Tests',
- 'Georg Brandl \\and someone else', 'Sphinx Testing', 'Miscellaneous'),
+ ('contents', 'SphinxTests', 'Sphinx Tests',
+ 'Georg Brandl \\and someone else', 'Sphinx Testing', 'Miscellaneous'),
]
man_pages = [
@@ -66,43 +64,26 @@ value_from_conf_py = 84
coverage_c_path = ['special/*.h']
coverage_c_regexes = {'function': r'^PyAPI_FUNC\(.*\)\s+([^_][\w_]+)'}
-autosummary_generate = ['autosummary']
-
extlinks = {'issue': ('http://bugs.python.org/issue%s', 'issue '),
'pyurl': ('http://python.org/%s', None)}
+autodoc_mock_imports = [
+ 'missing_module',
+ 'missing_package1.missing_module1',
+ 'missing_package2.missing_module2',
+ 'missing_package3.missing_module3',
+]
+
# modify tags from conf.py
tags.add('confpytag')
-# -- linkcode
-
-if 'test_linkcode' in tags:
- import glob
-
- extensions.remove('sphinx.ext.viewcode')
- extensions.append('sphinx.ext.linkcode')
-
- exclude_patterns.extend(glob.glob('*.txt') + glob.glob('*/*.txt'))
- exclude_patterns.remove('contents.txt')
- exclude_patterns.remove('objects.txt')
-
- def linkcode_resolve(domain, info):
- if domain == 'py':
- fn = info['module'].replace('.', '/')
- return "http://foobar/source/%s.py" % fn
- elif domain == "js":
- return "http://foobar/js/" + info['fullname']
- elif domain in ("c", "cpp"):
- return "http://foobar/%s/%s" % (domain, "".join(info['names']))
- else:
- raise AssertionError()
-
# -- extension API
from docutils import nodes
from sphinx import addnodes
from sphinx.util.compat import Directive
+
def userdesc_parse(env, sig, signode):
x, y = sig.split(':')
signode += addnodes.desc_name(x, x)
@@ -110,15 +91,19 @@ def userdesc_parse(env, sig, signode):
signode[-1] += addnodes.desc_parameter(y, y)
return x
+
def functional_directive(name, arguments, options, content, lineno,
content_offset, block_text, state, state_machine):
return [nodes.strong(text='from function: %s' % options['opt'])]
+
class ClassDirective(Directive):
option_spec = {'opt': lambda x: x}
+
def run(self):
return [nodes.strong(text='from class: %s' % self.options['opt'])]
+
def setup(app):
app.add_config_value('value_from_conf_py', 42, False)
app.add_directive('funcdir', functional_directive, opt=lambda x: x)
diff --git a/tests/root/contents.txt b/tests/root/contents.txt
index 7486750d..d786b914 100644
--- a/tests/root/contents.txt
+++ b/tests/root/contents.txt
@@ -21,15 +21,14 @@ Contents:
bom
math
autodoc
- autosummary
metadata
extensions
- doctest
extensions
- versioning/index
footnote
lists
+ http://sphinx-doc.org/
+ Latest reference <http://sphinx-doc.org/latest/>
Python <http://python.org/>
Indices and tables
@@ -43,3 +42,14 @@ References
==========
.. [Ref1] Reference target.
+.. [Ref_1] Reference target 2.
+
+Test for issue #1157
+====================
+
+This used to crash:
+
+.. toctree::
+
+.. toctree::
+ :hidden:
diff --git a/tests/root/img.png b/tests/root/img.png
index 72c12d13..4c8f8992 100644
--- a/tests/root/img.png
+++ b/tests/root/img.png
Binary files differ
diff --git a/tests/root/includes.txt b/tests/root/includes.txt
index 904f0677..907b81e9 100644
--- a/tests/root/includes.txt
+++ b/tests/root/includes.txt
@@ -40,6 +40,7 @@ Literalinclude options
.. cssclass:: inc-lines
.. literalinclude:: literal.inc
:lines: 6-7,9
+ :lineno-start: 6
.. cssclass:: inc-startend
.. literalinclude:: literal.inc
@@ -57,6 +58,9 @@ Literalinclude options
.. literalinclude:: literal.inc
:end-before: class Foo
+.. literalinclude:: literal.inc
+ :diff: literal_orig.inc
+
.. cssclass:: inc-tab3
.. literalinclude:: tabs.inc
:tab-width: 3
@@ -67,6 +71,22 @@ Literalinclude options
:tab-width: 8
:language: python
+.. cssclass:: inc-pyobj-lines-match
+.. literalinclude:: literal.inc
+ :pyobject: Foo
+ :lineno-match:
+
+.. cssclass:: inc-lines-match
+.. literalinclude:: literal.inc
+ :lines: 6-7,8
+ :lineno-match:
+
+.. cssclass:: inc-startend-match
+.. literalinclude:: literal.inc
+ :start-after: coding: utf-8
+ :end-before: class Foo
+ :lineno-match:
+
Test if dedenting before parsing works.
.. highlight:: python
diff --git a/tests/root/literal_orig.inc b/tests/root/literal_orig.inc
new file mode 100644
index 00000000..14fd214c
--- /dev/null
+++ b/tests/root/literal_orig.inc
@@ -0,0 +1,13 @@
+# Literally included file using Python highlighting
+# -*- coding: utf-8 -*-
+
+foo = "Including Unicode characters: üöä" # This will be changed
+
+class FooOrig:
+ pass
+
+class BarOrig:
+ def baz():
+ pass
+
+def bar(): pass
diff --git a/tests/root/markup.txt b/tests/root/markup.txt
index 34e8fdb8..1ad7a13b 100644
--- a/tests/root/markup.txt
+++ b/tests/root/markup.txt
@@ -132,7 +132,9 @@ Adding \n to test unescaping.
*Linking inline markup*
* :pep:`8`
+* :pep:`Python Enhancement Proposal #8 <8>`
* :rfc:`1`
+* :rfc:`Request for Comments #1 <1>`
* :envvar:`HOME`
* :keyword:`with`
* :token:`try statement <try_stmt>`
@@ -140,6 +142,10 @@ Adding \n to test unescaping.
* :ref:`here <some-label>`
* :ref:`my-figure`
* :ref:`my-table`
+* :ref:`my-code-block`
+* :numref:`my-figure`
+* :numref:`my-table`
+* :numref:`my-code-block`
* :doc:`subdir/includes`
* ``:download:`` is tested in includes.txt
* :option:`Python -c option <python -c>`
@@ -226,8 +232,11 @@ Version markup
Code blocks
-----------
+.. _my-code-block:
+
.. code-block:: ruby
:linenos:
+ :caption: my ruby code
def ruby?
false
@@ -240,6 +249,7 @@ Misc stuff
Stuff [#]_
Reference lookup: [Ref1]_ (defined in another file).
+Reference lookup underscore: [Ref_1]_
.. seealso:: something, something else, something more
@@ -353,6 +363,25 @@ Only directive
Always present, because set through conf.py/command line.
+Any role
+--------
+
+.. default-role:: any
+
+Test referencing to `headings <with>` and `objects <func_without_body>`.
+Also `modules <mod>` and `classes <Time>`.
+
+More domains:
+
+* `JS <bar.baz>`
+* `C <SphinxType>`
+* `myobj` (user markup)
+* `n::Array`
+* `perl -c`
+
+.. default-role::
+
+
.. rubric:: Footnotes
.. [#] Like footnotes.
diff --git a/tests/root/metadata.txt b/tests/root/metadata.txt
index 9b3044ba..821816a9 100644
--- a/tests/root/metadata.txt
+++ b/tests/root/metadata.txt
@@ -32,6 +32,10 @@
language, containing examples of all basic reStructuredText
constructs and many advanced constructs.
+:nocomments:
+:orphan:
+:tocdepth: 1
+
.. meta::
:keywords: reStructuredText, demonstration, demo, parser
:description lang=en: A demonstration of the reStructuredText
diff --git a/tests/root/objects.txt b/tests/root/objects.txt
index 57e82212..cbc6281d 100644
--- a/tests/root/objects.txt
+++ b/tests/root/objects.txt
@@ -92,7 +92,7 @@ Referring to :func:`nothing <>`.
:type isdst: * some complex
* expression
:returns: a new :class:`Time` instance
- :rtype: :class:`Time`
+ :rtype: Time
:raises ValueError: if the values are out of range
:ivar int hour: like *hour*
:ivar minute: like *minute*
@@ -101,6 +101,7 @@ Referring to :func:`nothing <>`.
:type hour: DuplicateType
:param hour: Duplicate param. Should not lead to crashes.
:type hour: DuplicateType
+ :param .Cls extcls: A class from another module.
C items
@@ -117,14 +118,6 @@ C items
.. c:var:: sphinx_global
-Old C items (from oldcmarkup ext)
----------------------------------
-
-.. cfunction:: Sphinx_Func()
-
-Refer to :cfunc:`Sphinx_Func`.
-
-
Javascript items
================
@@ -177,6 +170,10 @@ Others
.. cmdoption:: -c
+.. option:: +p
+
+Link to :option:`perl +p`.
+
User markup
===========
diff --git a/tests/root/special/code.py b/tests/root/special/code.py
index 70c48d2e..b7934b23 100644
--- a/tests/root/special/code.py
+++ b/tests/root/special/code.py
@@ -1,2 +1,2 @@
-print "line 1"
-print "line 2"
+print("line 1")
+print("line 2")
diff --git a/tests/root/subdir/img.png b/tests/root/subdir/img.png
index 72c12d13..4c8f8992 100644
--- a/tests/root/subdir/img.png
+++ b/tests/root/subdir/img.png
Binary files differ
diff --git a/tests/root/subdir/simg.png b/tests/root/subdir/simg.png
index 72c12d13..4c8f8992 100644
--- a/tests/root/subdir/simg.png
+++ b/tests/root/subdir/simg.png
Binary files differ
diff --git a/tests/root/undecodable.txt b/tests/root/undecodable.txt
new file mode 100644
index 00000000..a4cf5c37
--- /dev/null
+++ b/tests/root/undecodable.txt
@@ -0,0 +1,3 @@
+:orphan:
+
+here: »
diff --git a/tests/roots/test-api-set-translator/conf.py b/tests/roots/test-api-set-translator/conf.py
new file mode 100644
index 00000000..3c160664
--- /dev/null
+++ b/tests/roots/test-api-set-translator/conf.py
@@ -0,0 +1,80 @@
+# -*- coding: utf-8 -*-
+## set this by test
+# import os
+# import sys
+# sys.path.insert(0, os.path.abspath('.'))
+
+from sphinx.writers.html import HTMLTranslator
+from sphinx.writers.latex import LaTeXTranslator
+from sphinx.writers.manpage import ManualPageTranslator
+from sphinx.writers.texinfo import TexinfoTranslator
+from sphinx.writers.text import TextTranslator
+from sphinx.writers.websupport import WebSupportTranslator
+from docutils.writers.docutils_xml import XMLTranslator
+
+
+project = 'test'
+master_doc = 'index'
+
+
+class ConfHTMLTranslator(HTMLTranslator):
+ pass
+
+
+class ConfDirHTMLTranslator(HTMLTranslator):
+ pass
+
+
+class ConfSingleHTMLTranslator(HTMLTranslator):
+ pass
+
+
+class ConfPickleTranslator(HTMLTranslator):
+ pass
+
+
+class ConfJsonTranslator(HTMLTranslator):
+ pass
+
+
+class ConfLaTeXTranslator(LaTeXTranslator):
+ pass
+
+
+class ConfManualPageTranslator(ManualPageTranslator):
+ pass
+
+
+class ConfTexinfoTranslator(TexinfoTranslator):
+ pass
+
+
+class ConfTextTranslator(TextTranslator):
+ pass
+
+
+class ConfWebSupportTranslator(WebSupportTranslator):
+ pass
+
+
+class ConfXMLTranslator(XMLTranslator):
+ pass
+
+
+class ConfPseudoXMLTranslator(XMLTranslator):
+ pass
+
+
+def setup(app):
+ app.set_translator('html', ConfHTMLTranslator)
+ app.set_translator('dirhtml', ConfDirHTMLTranslator)
+ app.set_translator('singlehtml', ConfSingleHTMLTranslator)
+ app.set_translator('pickle', ConfPickleTranslator)
+ app.set_translator('json', ConfJsonTranslator)
+ app.set_translator('latex', ConfLaTeXTranslator)
+ app.set_translator('man', ConfManualPageTranslator)
+ app.set_translator('texinfo', ConfTexinfoTranslator)
+ app.set_translator('text', ConfTextTranslator)
+ app.set_translator('websupport', ConfWebSupportTranslator)
+ app.set_translator('xml', ConfXMLTranslator)
+ app.set_translator('pseudoxml', ConfPseudoXMLTranslator)
diff --git a/tests/roots/test-api-set-translator/index.rst b/tests/roots/test-api-set-translator/index.rst
new file mode 100644
index 00000000..e5a29cf2
--- /dev/null
+++ b/tests/roots/test-api-set-translator/index.rst
@@ -0,0 +1,3 @@
+=======================
+Test API set_translator
+======================= \ No newline at end of file
diff --git a/tests/roots/test-api-set-translator/nonext/conf.py b/tests/roots/test-api-set-translator/nonext/conf.py
new file mode 100644
index 00000000..5a92f736
--- /dev/null
+++ b/tests/roots/test-api-set-translator/nonext/conf.py
@@ -0,0 +1,9 @@
+# -*- coding: utf-8 -*-
+
+import os
+import sys
+
+sys.path.insert(0, os.path.dirname(os.path.abspath('.')))
+
+project = 'test'
+master_doc = 'index'
diff --git a/tests/roots/test-api-set-translator/translator.py b/tests/roots/test-api-set-translator/translator.py
new file mode 100644
index 00000000..015b4aa2
--- /dev/null
+++ b/tests/roots/test-api-set-translator/translator.py
@@ -0,0 +1,6 @@
+# -*- coding: utf-8 -*-
+
+from sphinx.writers.html import HTMLTranslator
+
+class ExtHTMLTranslator(HTMLTranslator):
+ pass
diff --git a/tests/roots/test-autosummary/conf.py b/tests/roots/test-autosummary/conf.py
index 542696e9..d9a44748 100644
--- a/tests/roots/test-autosummary/conf.py
+++ b/tests/roots/test-autosummary/conf.py
@@ -1,3 +1,7 @@
+import sys, os
+
+sys.path.insert(0, os.path.abspath('.'))
+
extensions = ['sphinx.ext.autosummary']
# The suffix of source filenames.
diff --git a/tests/roots/test-autosummary/contents.rst b/tests/roots/test-autosummary/contents.rst
index 3f16af99..0355c95b 100644
--- a/tests/roots/test-autosummary/contents.rst
+++ b/tests/roots/test-autosummary/contents.rst
@@ -4,3 +4,4 @@
:toctree:
dummy_module
+ sphinx
diff --git a/tests/root/autosummary.txt b/tests/roots/test-autosummary/sphinx.rst
index fc1a35a0..fc1a35a0 100644
--- a/tests/root/autosummary.txt
+++ b/tests/roots/test-autosummary/sphinx.rst
diff --git a/tests/roots/test-build-text/conf.py b/tests/roots/test-build-text/conf.py
new file mode 100644
index 00000000..1ba342a6
--- /dev/null
+++ b/tests/roots/test-build-text/conf.py
@@ -0,0 +1,2 @@
+master_doc = 'contents'
+source_suffix = '.txt'
diff --git a/tests/roots/test-build-text/contents.txt b/tests/roots/test-build-text/contents.txt
new file mode 100644
index 00000000..420d1428
--- /dev/null
+++ b/tests/roots/test-build-text/contents.txt
@@ -0,0 +1,8 @@
+.. toctree::
+
+ maxwidth
+ lineblock
+ nonascii_title
+ nonascii_table
+ nonascii_maxwidth
+ table
diff --git a/tests/roots/test-build-text/lineblock.txt b/tests/roots/test-build-text/lineblock.txt
new file mode 100644
index 00000000..b9cd0ed7
--- /dev/null
+++ b/tests/roots/test-build-text/lineblock.txt
@@ -0,0 +1,6 @@
+* one
+
+ | line-block 1
+ | line-block 2
+
+followed paragraph.
diff --git a/tests/roots/test-build-text/maxwidth.txt b/tests/roots/test-build-text/maxwidth.txt
new file mode 100644
index 00000000..c36f8a02
--- /dev/null
+++ b/tests/roots/test-build-text/maxwidth.txt
@@ -0,0 +1,6 @@
+.. seealso:: ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham
+
+* ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham
+* ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham ham
+
+spam egg
diff --git a/tests/roots/test-build-text/nonascii_maxwidth.txt b/tests/roots/test-build-text/nonascii_maxwidth.txt
new file mode 100644
index 00000000..e9f0fd9b
--- /dev/null
+++ b/tests/roots/test-build-text/nonascii_maxwidth.txt
@@ -0,0 +1,5 @@
+abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc abc
+
+日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語 日本語
+
+abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語 abc 日本語
diff --git a/tests/roots/test-build-text/nonascii_table.txt b/tests/roots/test-build-text/nonascii_table.txt
new file mode 100644
index 00000000..709e0f2f
--- /dev/null
+++ b/tests/roots/test-build-text/nonascii_table.txt
@@ -0,0 +1,7 @@
+.. list-table::
+
+ - - spam
+ - egg
+
+ - - 日本語
+ - 日本語
diff --git a/tests/roots/test-build-text/nonascii_title.txt b/tests/roots/test-build-text/nonascii_title.txt
new file mode 100644
index 00000000..6d3b1f61
--- /dev/null
+++ b/tests/roots/test-build-text/nonascii_title.txt
@@ -0,0 +1,2 @@
+日本語
+======
diff --git a/tests/roots/test-build-text/table.txt b/tests/roots/test-build-text/table.txt
new file mode 100644
index 00000000..84328940
--- /dev/null
+++ b/tests/roots/test-build-text/table.txt
@@ -0,0 +1,7 @@
+ +-----+-----+
+ | XXX | XXX |
+ +-----+-----+
+ | | XXX |
+ +-----+-----+
+ | XXX | |
+ +-----+-----+
diff --git a/tests/roots/test-circular/conf.py b/tests/roots/test-circular/conf.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/tests/roots/test-circular/conf.py
diff --git a/tests/roots/test-circular/contents.rst b/tests/roots/test-circular/contents.rst
new file mode 100644
index 00000000..294e674d
--- /dev/null
+++ b/tests/roots/test-circular/contents.rst
@@ -0,0 +1,4 @@
+.. toctree::
+
+ sub
+
diff --git a/tests/roots/test-circular/sub.rst b/tests/roots/test-circular/sub.rst
new file mode 100644
index 00000000..070c3974
--- /dev/null
+++ b/tests/roots/test-circular/sub.rst
@@ -0,0 +1,3 @@
+.. toctree::
+
+ contents
diff --git a/tests/roots/test-directive-code/caption.rst b/tests/roots/test-directive-code/caption.rst
new file mode 100644
index 00000000..5a2fe4a1
--- /dev/null
+++ b/tests/roots/test-directive-code/caption.rst
@@ -0,0 +1,21 @@
+Dedent
+======
+
+Code blocks
+-----------
+
+.. code-block:: ruby
+ :caption: caption *test* rb
+
+ def ruby?
+ false
+ end
+
+
+Literal Include
+---------------
+
+.. literalinclude:: literal.inc
+ :language: python
+ :caption: caption **test** py
+ :lines: 10-11
diff --git a/tests/roots/test-directive-code/conf.py b/tests/roots/test-directive-code/conf.py
new file mode 100644
index 00000000..f81c30bc
--- /dev/null
+++ b/tests/roots/test-directive-code/conf.py
@@ -0,0 +1,3 @@
+# -*- coding: utf-8 -*-
+
+master_doc = 'index'
diff --git a/tests/roots/test-directive-code/dedent.rst b/tests/roots/test-directive-code/dedent.rst
new file mode 100644
index 00000000..9ec1c0ee
--- /dev/null
+++ b/tests/roots/test-directive-code/dedent.rst
@@ -0,0 +1,35 @@
+Dedent
+======
+
+Literal Include
+---------------
+
+.. literalinclude:: literal.inc
+ :language: python
+ :lines: 10-11
+ :dedent: 0
+
+.. literalinclude:: literal.inc
+ :language: python
+ :lines: 10-11
+ :dedent: 1
+
+.. literalinclude:: literal.inc
+ :language: python
+ :lines: 10-11
+ :dedent: 2
+
+.. literalinclude:: literal.inc
+ :language: python
+ :lines: 10-11
+ :dedent: 3
+
+.. literalinclude:: literal.inc
+ :language: python
+ :lines: 10-11
+ :dedent: 4
+
+.. literalinclude:: literal.inc
+ :language: python
+ :lines: 10-11
+ :dedent: 1000
diff --git a/tests/roots/test-directive-code/dedent_code.rst b/tests/roots/test-directive-code/dedent_code.rst
new file mode 100644
index 00000000..3e8dacd6
--- /dev/null
+++ b/tests/roots/test-directive-code/dedent_code.rst
@@ -0,0 +1,53 @@
+Dedent
+======
+
+Code blocks
+-----------
+
+.. code-block:: ruby
+ :linenos:
+ :dedent: 0
+
+ def ruby?
+ false
+ end
+
+.. code-block:: ruby
+ :linenos:
+ :dedent: 1
+
+ def ruby?
+ false
+ end
+
+.. code-block:: ruby
+ :linenos:
+ :dedent: 2
+
+ def ruby?
+ false
+ end
+
+.. code-block:: ruby
+ :linenos:
+ :dedent: 3
+
+ def ruby?
+ false
+ end
+
+.. code-block:: ruby
+ :linenos:
+ :dedent: 4
+
+ def ruby?
+ false
+ end
+
+.. code-block:: ruby
+ :linenos:
+ :dedent: 1000
+
+ def ruby?
+ false
+ end
diff --git a/tests/roots/test-directive-code/index.rst b/tests/roots/test-directive-code/index.rst
new file mode 100644
index 00000000..dab6b708
--- /dev/null
+++ b/tests/roots/test-directive-code/index.rst
@@ -0,0 +1,25 @@
+test-directive-code
+===================
+
+.. toctree::
+ :glob:
+
+ *
+
+
+Code blocks
+-----------
+
+.. code-block:: ruby
+ :linenos:
+
+ def ruby?
+ false
+ end
+
+
+Literal Includes
+----------------
+
+.. literalinclude:: literal.inc
+ :language: python
diff --git a/tests/roots/test-directive-code/lineno_match.rst b/tests/roots/test-directive-code/lineno_match.rst
new file mode 100644
index 00000000..4e3b3835
--- /dev/null
+++ b/tests/roots/test-directive-code/lineno_match.rst
@@ -0,0 +1,17 @@
+Literal Includes with Line Numbers Matching
+===========================================
+
+.. literalinclude:: literal.inc
+ :language: python
+ :pyobject: Bar
+ :lineno-match:
+
+.. literalinclude:: literal.inc
+ :language: python
+ :lines: 5-6,7,8-9
+ :lineno-match:
+
+.. literalinclude:: literal.inc
+ :language: python
+ :start-after: pass
+ :lineno-match:
diff --git a/tests/roots/test-directive-code/lineno_start.rst b/tests/roots/test-directive-code/lineno_start.rst
new file mode 100644
index 00000000..1beaabbf
--- /dev/null
+++ b/tests/roots/test-directive-code/lineno_start.rst
@@ -0,0 +1,6 @@
+Literal Includes with Line Numbers Starting from 200
+====================================================
+
+.. literalinclude:: literal.inc
+ :language: python
+ :lineno-start: 200
diff --git a/tests/roots/test-directive-code/linenos.rst b/tests/roots/test-directive-code/linenos.rst
new file mode 100644
index 00000000..2f64498d
--- /dev/null
+++ b/tests/roots/test-directive-code/linenos.rst
@@ -0,0 +1,6 @@
+Literal Includes with Line Numbers
+==================================
+
+.. literalinclude:: literal.inc
+ :language: python
+ :linenos:
diff --git a/tests/roots/test-directive-code/literal.inc b/tests/roots/test-directive-code/literal.inc
new file mode 100644
index 00000000..694f15ed
--- /dev/null
+++ b/tests/roots/test-directive-code/literal.inc
@@ -0,0 +1,13 @@
+# Literally included file using Python highlighting
+# -*- coding: utf-8 -*-
+
+foo = "Including Unicode characters: üöä"
+
+class Foo:
+ pass
+
+class Bar:
+ def baz():
+ pass
+
+def bar(): pass
diff --git a/tests/roots/test-directive-only/conf.py b/tests/roots/test-directive-only/conf.py
new file mode 100644
index 00000000..eb3a3d0d
--- /dev/null
+++ b/tests/roots/test-directive-only/conf.py
@@ -0,0 +1,2 @@
+
+project = 'test-directive-only'
diff --git a/tests/roots/test-only-directive/contents.rst b/tests/roots/test-directive-only/contents.rst
index 9a93be9e..80ec0031 100644
--- a/tests/roots/test-only-directive/contents.rst
+++ b/tests/roots/test-directive-only/contents.rst
@@ -1,4 +1,4 @@
-test-only-directive
+test-directive-only
===================
.. toctree::
diff --git a/tests/roots/test-only-directive/only.rst b/tests/roots/test-directive-only/only.rst
index 4a3eb48a..4a3eb48a 100644
--- a/tests/roots/test-only-directive/only.rst
+++ b/tests/roots/test-directive-only/only.rst
diff --git a/tests/roots/test-doctest/conf.py b/tests/roots/test-doctest/conf.py
new file mode 100644
index 00000000..f6a12edb
--- /dev/null
+++ b/tests/roots/test-doctest/conf.py
@@ -0,0 +1,5 @@
+extensions = ['sphinx.ext.doctest']
+
+project = 'test project for doctest'
+master_doc = 'doctest.txt'
+source_suffix = '.txt'
diff --git a/tests/root/doctest.txt b/tests/roots/test-doctest/doctest.txt
index d029cd88..ce4d88bd 100644
--- a/tests/root/doctest.txt
+++ b/tests/roots/test-doctest/doctest.txt
@@ -125,5 +125,5 @@ Special directives
.. testcleanup:: *
- import test_doctest
- test_doctest.cleanup_call()
+ import test_ext_doctest
+ test_ext_doctest.cleanup_call()
diff --git a/tests/roots/test-docutilsconf/contents.txt b/tests/roots/test-docutilsconf/contents.txt
index 3d0003b8..b20204e6 100644
--- a/tests/roots/test-docutilsconf/contents.txt
+++ b/tests/roots/test-docutilsconf/contents.txt
@@ -1,15 +1,15 @@
-docutils conf
-=============
-
-field-name-limit
-----------------
-
-:short: desc
-:long long long long: long title
-
-option-limit
-------------
-
---short short desc
---long-long-long-long long desc
-
+docutils conf
+=============
+
+field-name-limit
+----------------
+
+:short: desc
+:long long long long: long title
+
+option-limit
+------------
+
+--short short desc
+--long-long-long-long long desc
+
diff --git a/tests/roots/test-ext-viewcode/conf.py b/tests/roots/test-ext-viewcode/conf.py
new file mode 100644
index 00000000..a99a72bb
--- /dev/null
+++ b/tests/roots/test-ext-viewcode/conf.py
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+
+import sys
+import os
+
+sys.path.insert(0, os.path.abspath('.'))
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
+master_doc = 'index'
+
+
+if 'test_linkcode' in tags:
+ extensions.remove('sphinx.ext.viewcode')
+ extensions.append('sphinx.ext.linkcode')
+
+ def linkcode_resolve(domain, info):
+ if domain == 'py':
+ fn = info['module'].replace('.', '/')
+ return "http://foobar/source/%s.py" % fn
+ elif domain == "js":
+ return "http://foobar/js/" + info['fullname']
+ elif domain in ("c", "cpp"):
+ return "http://foobar/%s/%s" % (domain, "".join(info['names']))
+ else:
+ raise AssertionError()
diff --git a/tests/roots/test-ext-viewcode/index.rst b/tests/roots/test-ext-viewcode/index.rst
new file mode 100644
index 00000000..b5776cfa
--- /dev/null
+++ b/tests/roots/test-ext-viewcode/index.rst
@@ -0,0 +1,34 @@
+viewcode
+========
+
+.. py:module:: spam
+
+.. autofunction:: func1
+
+.. autofunction:: func2
+
+.. autofunction:: spam.mod1.func1
+
+.. autofunction:: spam.mod2.func2
+
+.. autofunction:: Class1
+
+.. autofunction:: Class2
+
+.. autofunction:: spam.mod1.Class1
+
+.. autofunction:: spam.mod2.Class2
+
+
+.. literalinclude:: spam/__init__.py
+ :language: python
+ :pyobject: func1
+
+.. literalinclude:: spam/mod1.py
+ :language: python
+ :pyobject: func1
+
+
+.. toctree::
+
+ objects
diff --git a/tests/roots/test-ext-viewcode/objects.rst b/tests/roots/test-ext-viewcode/objects.rst
new file mode 100644
index 00000000..fcea2165
--- /dev/null
+++ b/tests/roots/test-ext-viewcode/objects.rst
@@ -0,0 +1,169 @@
+Testing object descriptions
+===========================
+
+.. function:: func_without_module(a, b, *c[, d])
+
+ Does something.
+
+.. function:: func_without_body()
+
+.. function:: func_noindex
+ :noindex:
+
+.. function:: func_with_module
+ :module: foolib
+
+Referring to :func:`func with no index <func_noindex>`.
+Referring to :func:`nothing <>`.
+
+.. module:: mod
+ :synopsis: Module synopsis.
+ :platform: UNIX
+
+.. function:: func_in_module
+
+.. class:: Cls
+
+ .. method:: meth1
+
+ .. staticmethod:: meths
+
+ .. attribute:: attr
+
+.. explicit class given
+.. method:: Cls.meth2
+
+.. explicit module given
+.. exception:: Error(arg1, arg2)
+ :module: errmod
+
+.. data:: var
+
+
+.. currentmodule:: None
+
+.. function:: func_without_module2() -> annotation
+
+.. object:: long(parameter, \
+ list)
+ another one
+
+.. class:: TimeInt
+
+ Has only one parameter (triggers special behavior...)
+
+ :param moo: |test|
+ :type moo: |test|
+
+.. |test| replace:: Moo
+
+.. class:: Time(hour, minute, isdst)
+
+ :param year: The year.
+ :type year: TimeInt
+ :param TimeInt minute: The minute.
+ :param isdst: whether it's DST
+ :type isdst: * some complex
+ * expression
+ :returns: a new :class:`Time` instance
+ :rtype: :class:`Time`
+ :raises ValueError: if the values are out of range
+ :ivar int hour: like *hour*
+ :ivar minute: like *minute*
+ :vartype minute: int
+ :param hour: Some parameter
+ :type hour: DuplicateType
+ :param hour: Duplicate param. Should not lead to crashes.
+ :type hour: DuplicateType
+ :param .Cls extcls: A class from another module.
+
+
+C items
+=======
+
+.. c:function:: Sphinx_DoSomething()
+
+.. c:member:: SphinxStruct.member
+
+.. c:macro:: SPHINX_USE_PYTHON
+
+.. c:type:: SphinxType
+
+.. c:var:: sphinx_global
+
+
+Javascript items
+================
+
+.. js:function:: foo()
+
+.. js:data:: bar
+
+.. documenting the method of any object
+.. js:function:: bar.baz(href, callback[, errback])
+
+ :param string href: The location of the resource.
+ :param callback: Get's called with the data returned by the resource.
+ :throws InvalidHref: If the `href` is invalid.
+ :returns: `undefined`
+
+.. js:attribute:: bar.spam
+
+References
+==========
+
+Referencing :class:`mod.Cls` or :Class:`mod.Cls` should be the same.
+
+With target: :c:func:`Sphinx_DoSomething()` (parentheses are handled),
+:c:member:`SphinxStruct.member`, :c:macro:`SPHINX_USE_PYTHON`,
+:c:type:`SphinxType *` (pointer is handled), :c:data:`sphinx_global`.
+
+Without target: :c:func:`CFunction`. :c:func:`!malloc`.
+
+:js:func:`foo()`
+:js:func:`foo`
+
+:js:data:`bar`
+:js:func:`bar.baz()`
+:js:func:`bar.baz`
+:js:func:`~bar.baz()`
+
+:js:attr:`bar.baz`
+
+
+Others
+======
+
+.. envvar:: HOME
+
+.. program:: python
+
+.. cmdoption:: -c command
+
+.. program:: perl
+
+.. cmdoption:: -c
+
+.. option:: +p
+
+Link to :option:`perl +p`.
+
+
+User markup
+===========
+
+.. userdesc:: myobj:parameter
+
+ Description of userdesc.
+
+
+Referencing :userdescrole:`myobj`.
+
+
+CPP domain
+==========
+
+.. cpp:class:: n::Array<T,d>
+
+ .. cpp:function:: T& operator[]( unsigned j )
+ const T& operator[]( unsigned j ) const
diff --git a/tests/roots/test-ext-viewcode/spam/__init__.py b/tests/roots/test-ext-viewcode/spam/__init__.py
new file mode 100644
index 00000000..2c8603c1
--- /dev/null
+++ b/tests/roots/test-ext-viewcode/spam/__init__.py
@@ -0,0 +1,7 @@
+from __future__ import absolute_import
+
+from .mod1 import func1, Class1
+from .mod2 import (
+ func2,
+ Class2,
+)
diff --git a/tests/roots/test-ext-viewcode/spam/mod1.py b/tests/roots/test-ext-viewcode/spam/mod1.py
new file mode 100644
index 00000000..7133fc82
--- /dev/null
+++ b/tests/roots/test-ext-viewcode/spam/mod1.py
@@ -0,0 +1,15 @@
+"""
+mod1
+"""
+
+def func1(a, b):
+ """
+ this is func1
+ """
+ return a, b
+
+
+class Class1(object):
+ """
+ this is Class1
+ """
diff --git a/tests/roots/test-ext-viewcode/spam/mod2.py b/tests/roots/test-ext-viewcode/spam/mod2.py
new file mode 100644
index 00000000..79834b66
--- /dev/null
+++ b/tests/roots/test-ext-viewcode/spam/mod2.py
@@ -0,0 +1,15 @@
+"""
+mod2
+"""
+
+def func2(a, b):
+ """
+ this is func2
+ """
+ return a, b
+
+
+class Class2(object):
+ """
+ this is Class2
+ """
diff --git a/tests/roots/test-intl/admonitions.po b/tests/roots/test-intl/admonitions.po
index 0dd16376..bc722e58 100644
--- a/tests/roots/test-intl/admonitions.po
+++ b/tests/roots/test-intl/admonitions.po
@@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: sphinx 1.2\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2013-07-03 12:00\n"
+"POT-Creation-Date: 2013-07-03 12:00+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
diff --git a/tests/roots/test-intl/conf.py b/tests/roots/test-intl/conf.py
index 4c37f771..1b20244c 100644
--- a/tests/roots/test-intl/conf.py
+++ b/tests/roots/test-intl/conf.py
@@ -6,3 +6,4 @@ keep_warnings = True
templates_path = ['_templates']
html_additional_pages = {'index': 'index.html'}
release = version = '2013.120'
+gettext_enables = ['index']
diff --git a/tests/roots/test-intl/definition_terms.po b/tests/roots/test-intl/definition_terms.po
index 2c3a3bca..a147fe5e 100644
--- a/tests/roots/test-intl/definition_terms.po
+++ b/tests/roots/test-intl/definition_terms.po
@@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: sphinx 1.0\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2013-01-01 05:00\n"
+"POT-Creation-Date: 2013-01-01 05:00+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
diff --git a/tests/roots/test-intl/docfields.po b/tests/roots/test-intl/docfields.po
index f906ca19..8c3b8f97 100644
--- a/tests/roots/test-intl/docfields.po
+++ b/tests/roots/test-intl/docfields.po
@@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: Sphinx <Tests> 0.6\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2012-12-16 14:11\n"
+"POT-Creation-Date: 2012-12-16 14:11+0000\n"
"PO-Revision-Date: 2012-12-18 06:14+0900\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
diff --git a/tests/roots/test-intl/external_links.po b/tests/roots/test-intl/external_links.po
index e4e67643..8c53abbd 100644
--- a/tests/roots/test-intl/external_links.po
+++ b/tests/roots/test-intl/external_links.po
@@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: sphinx 1.0\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2012-11-22 08:28\n"
+"POT-Creation-Date: 2012-11-22 08:28+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
diff --git a/tests/roots/test-intl/figure_caption.po b/tests/roots/test-intl/figure_caption.po
index 2b85aea3..2fb1e5a5 100644
--- a/tests/roots/test-intl/figure_caption.po
+++ b/tests/roots/test-intl/figure_caption.po
@@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: sphinx 1.0\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2013-01-04 7:00\n"
+"POT-Creation-Date: 2013-01-04 07:00+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
diff --git a/tests/roots/test-intl/footnote.po b/tests/roots/test-intl/footnote.po
index b3876f51..3dfd3587 100644
--- a/tests/roots/test-intl/footnote.po
+++ b/tests/roots/test-intl/footnote.po
@@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: sphinx 1.0\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2012-11-22 08:28\n"
+"POT-Creation-Date: 2012-11-22 08:28+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
diff --git a/tests/roots/test-intl/glossary_terms.po b/tests/roots/test-intl/glossary_terms.po
index 1ffcaeb2..2746655e 100644
--- a/tests/roots/test-intl/glossary_terms.po
+++ b/tests/roots/test-intl/glossary_terms.po
@@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: sphinx 1.0\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2013-01-29 14:10\n"
+"POT-Creation-Date: 2013-01-29 14:10+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
diff --git a/tests/roots/test-intl/glossary_terms_inconsistency.po b/tests/roots/test-intl/glossary_terms_inconsistency.po
index 5e301657..ef2bf30f 100644
--- a/tests/roots/test-intl/glossary_terms_inconsistency.po
+++ b/tests/roots/test-intl/glossary_terms_inconsistency.po
@@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: sphinx 1.0\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2013-01-29 14:10\n"
+"POT-Creation-Date: 2013-01-29 14:10+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
diff --git a/tests/roots/test-intl/i18n.png b/tests/roots/test-intl/i18n.png
index 72c12d13..4c8f8992 100644
--- a/tests/roots/test-intl/i18n.png
+++ b/tests/roots/test-intl/i18n.png
Binary files differ
diff --git a/tests/roots/test-intl/index_entries.po b/tests/roots/test-intl/index_entries.po
index 6da9a813..83619b49 100644
--- a/tests/roots/test-intl/index_entries.po
+++ b/tests/roots/test-intl/index_entries.po
@@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: foo foo\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2013-01-05 18:10\n"
+"POT-Creation-Date: 2013-01-05 18:10+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
diff --git a/tests/roots/test-intl/literalblock.po b/tests/roots/test-intl/literalblock.po
index 8ea83b3b..5b5f71e0 100644
--- a/tests/roots/test-intl/literalblock.po
+++ b/tests/roots/test-intl/literalblock.po
@@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: sphinx 1.0\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2012-11-22 08:28\n"
+"POT-Creation-Date: 2012-11-22 08:28+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
diff --git a/tests/roots/test-intl/refs_inconsistency.po b/tests/roots/test-intl/refs_inconsistency.po
index 9cab687f..cb2de9ad 100644
--- a/tests/roots/test-intl/refs_inconsistency.po
+++ b/tests/roots/test-intl/refs_inconsistency.po
@@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: sphinx 1.0\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2012-12-05 08:28\n"
+"POT-Creation-Date: 2012-12-05 08:28+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
diff --git a/tests/roots/test-intl/refs_python_domain.txt b/tests/roots/test-intl/refs_python_domain.txt
index 20a8bc50..2b021f2e 100644
--- a/tests/roots/test-intl/refs_python_domain.txt
+++ b/tests/roots/test-intl/refs_python_domain.txt
@@ -1,15 +1,15 @@
-:tocdepth: 2
-
-i18n with python domain refs
-=============================
-
-.. currentmodule:: sensitive
-
-See this decorator: :func:`sensitive_variables`.
-
-.. function:: sensitive_variables(*variables)
-
- Some description
-
-.. currentmodule:: reporting
-
+:tocdepth: 2
+
+i18n with python domain refs
+=============================
+
+.. currentmodule:: sensitive
+
+See this decorator: :func:`sensitive_variables`.
+
+.. function:: sensitive_variables(*variables)
+
+ Some description
+
+.. currentmodule:: reporting
+
diff --git a/tests/roots/test-intl/role_xref.po b/tests/roots/test-intl/role_xref.po
index 8730c499..5b6d114c 100644
--- a/tests/roots/test-intl/role_xref.po
+++ b/tests/roots/test-intl/role_xref.po
@@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: sphinx 1.0\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2013-02-04 14:00\n"
+"POT-Creation-Date: 2013-02-04 14:00+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
diff --git a/tests/roots/test-intl/rubric.po b/tests/roots/test-intl/rubric.po
index 2c1214b7..91376236 100644
--- a/tests/roots/test-intl/rubric.po
+++ b/tests/roots/test-intl/rubric.po
@@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: sphinx 1.0\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2013-11-12 7:00\n"
+"POT-Creation-Date: 2013-11-12 07:00+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
diff --git a/tests/roots/test-intl/seealso.po b/tests/roots/test-intl/seealso.po
index d3b27e51..86a1c73c 100644
--- a/tests/roots/test-intl/seealso.po
+++ b/tests/roots/test-intl/seealso.po
@@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: Sphinx <Tests> 0.6\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2012-12-16 06:06\n"
+"POT-Creation-Date: 2012-12-16 06:06+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
diff --git a/tests/roots/test-intl/sphinx.po b/tests/roots/test-intl/sphinx.po
index cac5d4ad..a236f2f1 100644
--- a/tests/roots/test-intl/sphinx.po
+++ b/tests/roots/test-intl/sphinx.po
@@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: sphinx 1.0\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2012-11-22 08:28\n"
+"POT-Creation-Date: 2012-11-22 08:28+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
diff --git a/tests/roots/test-intl/subdir/contents.txt b/tests/roots/test-intl/subdir/contents.txt
index b6509baf..7578ce38 100644
--- a/tests/roots/test-intl/subdir/contents.txt
+++ b/tests/roots/test-intl/subdir/contents.txt
@@ -1,2 +1,2 @@
-subdir contents
-===============
+subdir contents
+===============
diff --git a/tests/roots/test-intl/versionchange.po b/tests/roots/test-intl/versionchange.po
index 911d3d9f..5a8df380 100644
--- a/tests/roots/test-intl/versionchange.po
+++ b/tests/roots/test-intl/versionchange.po
@@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: sphinx 1.0\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2012-12-15 03:17\n"
+"POT-Creation-Date: 2012-12-15 03:17+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
diff --git a/tests/roots/test-intl/warnings.po b/tests/roots/test-intl/warnings.po
index bf82510e..7963a0a8 100644
--- a/tests/roots/test-intl/warnings.po
+++ b/tests/roots/test-intl/warnings.po
@@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: Sphinx <Tests> 0.6\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2013-02-04 13:06\n"
+"POT-Creation-Date: 2013-02-04 13:06+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
diff --git a/tests/roots/test-numbered-circular/conf.py b/tests/roots/test-numbered-circular/conf.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/tests/roots/test-numbered-circular/conf.py
diff --git a/tests/roots/test-numbered-circular/contents.rst b/tests/roots/test-numbered-circular/contents.rst
new file mode 100644
index 00000000..c3129cd4
--- /dev/null
+++ b/tests/roots/test-numbered-circular/contents.rst
@@ -0,0 +1,5 @@
+.. toctree::
+ :numbered:
+
+ sub
+
diff --git a/tests/roots/test-numbered-circular/sub.rst b/tests/roots/test-numbered-circular/sub.rst
new file mode 100644
index 00000000..070c3974
--- /dev/null
+++ b/tests/roots/test-numbered-circular/sub.rst
@@ -0,0 +1,3 @@
+.. toctree::
+
+ contents
diff --git a/tests/roots/test-numfig/bar.rst b/tests/roots/test-numfig/bar.rst
new file mode 100644
index 00000000..f86e7475
--- /dev/null
+++ b/tests/roots/test-numfig/bar.rst
@@ -0,0 +1,58 @@
+===
+Bar
+===
+
+Bar A
+=====
+
+.. figure:: rimg.png
+
+ should be Fig.2.1
+
+.. csv-table:: should be Table 2.1
+ :header-rows: 0
+
+ hello,world
+
+.. code-block:: python
+ :caption: should be List 2.1
+
+ print('hello world')
+
+.. toctree::
+
+ baz
+
+.. figure:: rimg.png
+
+ should be Fig.2.3
+
+.. csv-table:: should be Table 2.3
+ :header-rows: 0
+
+ hello,world
+
+.. code-block:: python
+ :caption: should be List 2.3
+
+ print('hello world')
+
+Bar B
+=====
+
+Bar B1
+------
+
+.. figure:: rimg.png
+
+ should be Fig.2.4
+
+.. csv-table:: should be Table 2.4
+ :header-rows: 0
+
+ hello,world
+
+.. code-block:: python
+ :caption: should be List 2.4
+
+ print('hello world')
diff --git a/tests/roots/test-numfig/baz.rst b/tests/roots/test-numfig/baz.rst
new file mode 100644
index 00000000..da9e0fe0
--- /dev/null
+++ b/tests/roots/test-numfig/baz.rst
@@ -0,0 +1,22 @@
+Baz A
+-----
+
+.. _fig22:
+
+.. figure:: rimg.png
+
+ should be Fig.2.2
+
+.. _table22:
+
+.. csv-table:: should be Table 2.2
+ :header-rows: 0
+
+ hello,world
+
+.. _code22:
+
+.. code-block:: python
+ :caption: should be List 2.2
+
+ print('hello world')
diff --git a/tests/roots/test-numfig/conf.py b/tests/roots/test-numfig/conf.py
new file mode 100644
index 00000000..f81c30bc
--- /dev/null
+++ b/tests/roots/test-numfig/conf.py
@@ -0,0 +1,3 @@
+# -*- coding: utf-8 -*-
+
+master_doc = 'index'
diff --git a/tests/roots/test-numfig/foo.rst b/tests/roots/test-numfig/foo.rst
new file mode 100644
index 00000000..ef713574
--- /dev/null
+++ b/tests/roots/test-numfig/foo.rst
@@ -0,0 +1,71 @@
+===
+Foo
+===
+
+.. figure:: rimg.png
+
+ should be Fig.1.1
+
+.. csv-table:: should be Table 1.1
+ :header-rows: 0
+
+ hello,world
+
+.. code-block:: python
+ :caption: should be List 1.1
+
+ print('hello world')
+
+Foo A
+=====
+
+.. figure:: rimg.png
+
+ should be Fig.1.2
+
+.. figure:: rimg.png
+
+ should be Fig.1.3
+
+.. csv-table:: should be Table 1.2
+ :header-rows: 0
+
+ hello,world
+
+.. csv-table:: should be Table 1.3
+ :header-rows: 0
+
+ hello,world
+
+.. code-block:: python
+ :caption: should be List 1.2
+
+ print('hello world')
+
+.. code-block:: python
+ :caption: should be List 1.3
+
+ print('hello world')
+
+Foo A1
+------
+
+Foo B
+=====
+
+Foo B1
+------
+
+.. figure:: rimg.png
+
+ should be Fig.1.4
+
+.. csv-table:: should be Table 1.4
+ :header-rows: 0
+
+ hello,world
+
+.. code-block:: python
+ :caption: should be List 1.4
+
+ print('hello world')
diff --git a/tests/roots/test-numfig/index.rst b/tests/roots/test-numfig/index.rst
new file mode 100644
index 00000000..7779fe91
--- /dev/null
+++ b/tests/roots/test-numfig/index.rst
@@ -0,0 +1,50 @@
+test-tocdepth
+=============
+
+.. toctree::
+ :numbered:
+
+ foo
+ bar
+
+.. _fig1:
+
+.. figure:: rimg.png
+
+ should be Fig.1
+
+.. figure:: rimg.png
+
+ should be Fig.2
+
+.. _table-1:
+
+.. csv-table:: should be Table 1
+ :header-rows: 0
+
+ hello,world
+
+.. csv-table:: should be Table 2
+ :header-rows: 0
+
+ hello,world
+
+.. _code_1:
+
+.. code-block:: python
+ :caption: should be List 1
+
+ print('hello world')
+
+.. code-block:: python
+ :caption: should be List 2
+
+ print('hello world')
+
+
+* Fig.1 is :numref:`fig1`
+* Fig.2.2 is :numref:`Figure# <fig22>`
+* Table.1 is :numref:`table-1`
+* Table.2.2 is :numref:`Table:# <table22>`
+* List.1 is :numref:`code_1`
+* List.2.2 is :numref:`Code-# <code22>`
diff --git a/tests/roots/test-numfig/rimg.png b/tests/roots/test-numfig/rimg.png
new file mode 100644
index 00000000..1081dc14
--- /dev/null
+++ b/tests/roots/test-numfig/rimg.png
Binary files differ
diff --git a/tests/roots/test-only-directive/conf.py b/tests/roots/test-only-directive/conf.py
deleted file mode 100644
index bcb4305d..00000000
--- a/tests/roots/test-only-directive/conf.py
+++ /dev/null
@@ -1,2 +0,0 @@
-
-project = 'test-only-directive'
diff --git a/tests/roots/test-setup/doc/contents.txt b/tests/roots/test-setup/doc/contents.txt
index cb52405f..56960f53 100644
--- a/tests/roots/test-setup/doc/contents.txt
+++ b/tests/roots/test-setup/doc/contents.txt
@@ -1,5 +1,5 @@
-contents
-=========
-
-spam egg ham
-
+contents
+=========
+
+spam egg ham
+
diff --git a/tests/roots/test-templating/autosummary_templating.txt b/tests/roots/test-templating/autosummary_templating.txt
index 05643a02..6b396a3f 100644
--- a/tests/roots/test-templating/autosummary_templating.txt
+++ b/tests/roots/test-templating/autosummary_templating.txt
@@ -4,10 +4,4 @@ Autosummary templating test
.. autosummary::
:toctree: generated
- sphinx.application.Sphinx
-
-.. currentmodule:: sphinx.application
-
-.. autoclass:: TemplateBridge
-
- .. automethod:: render
+ sphinx.application.TemplateBridge
diff --git a/tests/roots/test-tocdepth/bar.rst b/tests/roots/test-tocdepth/bar.rst
new file mode 100644
index 00000000..d70dec90
--- /dev/null
+++ b/tests/roots/test-tocdepth/bar.rst
@@ -0,0 +1,27 @@
+:tocdepth: 2
+
+===
+Bar
+===
+
+should be 2
+
+Bar A
+=====
+
+should be 2.1
+
+.. toctree::
+
+ baz
+
+Bar B
+=====
+
+should be 2.2
+
+Bar B1
+------
+
+should be 2.2.1
+
diff --git a/tests/roots/test-tocdepth/baz.rst b/tests/roots/test-tocdepth/baz.rst
new file mode 100644
index 00000000..b07fa050
--- /dev/null
+++ b/tests/roots/test-tocdepth/baz.rst
@@ -0,0 +1,5 @@
+Baz A
+-----
+
+should be 2.1.1
+
diff --git a/tests/roots/test-tocdepth/conf.py b/tests/roots/test-tocdepth/conf.py
new file mode 100644
index 00000000..f81c30bc
--- /dev/null
+++ b/tests/roots/test-tocdepth/conf.py
@@ -0,0 +1,3 @@
+# -*- coding: utf-8 -*-
+
+master_doc = 'index'
diff --git a/tests/roots/test-tocdepth/foo.rst b/tests/roots/test-tocdepth/foo.rst
new file mode 100644
index 00000000..61fd539f
--- /dev/null
+++ b/tests/roots/test-tocdepth/foo.rst
@@ -0,0 +1,26 @@
+===
+Foo
+===
+
+should be 1
+
+Foo A
+=====
+
+should be 1.1
+
+Foo A1
+------
+
+should be 1.1.1
+
+Foo B
+=====
+
+should be 1.2
+
+Foo B1
+------
+
+should be 1.2.1
+
diff --git a/tests/roots/test-tocdepth/index.rst b/tests/roots/test-tocdepth/index.rst
new file mode 100644
index 00000000..0b651d48
--- /dev/null
+++ b/tests/roots/test-tocdepth/index.rst
@@ -0,0 +1,8 @@
+test-tocdepth
+=============
+
+.. toctree::
+ :numbered:
+
+ foo
+ bar
diff --git a/tests/root/versioning/added.txt b/tests/roots/test-versioning/added.txt
index 22a70739..22a70739 100644
--- a/tests/root/versioning/added.txt
+++ b/tests/roots/test-versioning/added.txt
diff --git a/tests/roots/test-versioning/conf.py b/tests/roots/test-versioning/conf.py
new file mode 100644
index 00000000..edcf9295
--- /dev/null
+++ b/tests/roots/test-versioning/conf.py
@@ -0,0 +1,3 @@
+project = 'versioning test root'
+master_doc = 'index'
+source_suffix = '.txt'
diff --git a/tests/root/versioning/deleted.txt b/tests/roots/test-versioning/deleted.txt
index a1a9c4c9..a1a9c4c9 100644
--- a/tests/root/versioning/deleted.txt
+++ b/tests/roots/test-versioning/deleted.txt
diff --git a/tests/root/versioning/deleted_end.txt b/tests/roots/test-versioning/deleted_end.txt
index f30e6300..f30e6300 100644
--- a/tests/root/versioning/deleted_end.txt
+++ b/tests/roots/test-versioning/deleted_end.txt
diff --git a/tests/root/versioning/index.txt b/tests/roots/test-versioning/index.txt
index 9d098f75..9d098f75 100644
--- a/tests/root/versioning/index.txt
+++ b/tests/roots/test-versioning/index.txt
diff --git a/tests/root/versioning/insert.txt b/tests/roots/test-versioning/insert.txt
index 1c157cc9..1c157cc9 100644
--- a/tests/root/versioning/insert.txt
+++ b/tests/roots/test-versioning/insert.txt
diff --git a/tests/root/versioning/insert_beginning.txt b/tests/roots/test-versioning/insert_beginning.txt
index 57102a76..57102a76 100644
--- a/tests/root/versioning/insert_beginning.txt
+++ b/tests/roots/test-versioning/insert_beginning.txt
diff --git a/tests/root/versioning/insert_similar.txt b/tests/roots/test-versioning/insert_similar.txt
index ee9b5305..ee9b5305 100644
--- a/tests/root/versioning/insert_similar.txt
+++ b/tests/roots/test-versioning/insert_similar.txt
diff --git a/tests/root/versioning/modified.txt b/tests/roots/test-versioning/modified.txt
index 49cdad93..49cdad93 100644
--- a/tests/root/versioning/modified.txt
+++ b/tests/roots/test-versioning/modified.txt
diff --git a/tests/root/versioning/original.txt b/tests/roots/test-versioning/original.txt
index b3fe0609..b3fe0609 100644
--- a/tests/root/versioning/original.txt
+++ b/tests/roots/test-versioning/original.txt
diff --git a/tests/run.py b/tests/run.py
index 37922f3b..95308212 100755
--- a/tests/run.py
+++ b/tests/run.py
@@ -9,53 +9,43 @@
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+from __future__ import print_function
+import os
import sys
-from os import path, chdir, listdir, environ
-import shutil
-
-testroot = path.dirname(__file__) or '.'
-if 'BUILD_TEST_PATH' in environ:
- # for tox testing
- newroot = environ['BUILD_TEST_PATH']
- # tox installs the sphinx package, no need for sys.path.insert
-else:
- newroot = path.join(testroot, path.pardir, 'build')
- newroot = path.join(newroot, listdir(newroot)[0], 'tests')
-
-shutil.rmtree(newroot, ignore_errors=True)
-
-if sys.version_info >= (3, 0):
- print('Copying and converting sources to build/lib/tests...')
- from distutils.util import copydir_run_2to3
- copydir_run_2to3(testroot, newroot)
-else:
- # just copying test directory to parallel testing
- print('Copying sources to build/lib/tests...')
- shutil.copytree(testroot, newroot)
-
-# always test the sphinx package from build/lib/
-sys.path.insert(0, path.abspath(path.join(newroot, path.pardir)))
-# switch to the copy/converted dir so nose tests the right tests
-chdir(newroot)
-
-try:
- import nose
-except ImportError:
- print('The nose package is needed to run the Sphinx test suite.')
- sys.exit(1)
-
-try:
- import docutils
-except ImportError:
- print('Sphinx requires the docutils package to be installed.')
- sys.exit(1)
-
-try:
- import jinja2
-except ImportError:
- print('Sphinx requires the jinja2 package to be installed.')
- sys.exit(1)
-
-print('Running Sphinx test suite...')
+import traceback
+
+from path import path
+
+testroot = os.path.dirname(__file__) or '.'
+sys.path.insert(0, os.path.abspath(os.path.join(testroot, os.path.pardir)))
+
+# check dependencies before testing
+print('Checking dependencies...')
+for modname in ('nose', 'mock', 'six', 'docutils', 'jinja2', 'pygments',
+ 'snowballstemmer', 'babel'):
+ try:
+ __import__(modname)
+ except ImportError as err:
+ if modname == 'mock' and sys.version_info[0] == 3:
+ continue
+ traceback.print_exc()
+ print('The %r package is needed to run the Sphinx test suite.' % modname)
+ sys.exit(1)
+
+# find a temp dir for testing and clean it up now
+os.environ['SPHINX_TEST_TEMPDIR'] = \
+ os.path.abspath(os.path.join(testroot, 'build')) \
+ if 'SPHINX_TEST_TEMPDIR' not in os.environ \
+ else os.path.abspath(os.environ['SPHINX_TEST_TEMPDIR'])
+tempdir = path(os.environ['SPHINX_TEST_TEMPDIR'])
+print('Temporary files will be placed in %s.' % tempdir)
+if tempdir.exists():
+ tempdir.rmtree()
+tempdir.makedirs()
+
+print('Running Sphinx test suite (with Python %s)...' % sys.version.split()[0])
+sys.stdout.flush()
+
+import nose
nose.main()
diff --git a/tests/test_api_translator.py b/tests/test_api_translator.py
new file mode 100644
index 00000000..4dd70a2c
--- /dev/null
+++ b/tests/test_api_translator.py
@@ -0,0 +1,141 @@
+# -*- coding: utf-8 -*-
+"""
+ test_api_translator
+ ~~~~~~~~~~~~~~~~~~~
+
+ Test the Sphinx API for translator.
+
+ :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import sys
+
+from util import with_app, rootdir
+
+
+def setup_module():
+ sys.path.insert(0, rootdir / 'roots' / 'test-api-set-translator')
+
+
+def teardown_module():
+ sys.path.remove(rootdir / 'roots' / 'test-api-set-translator')
+
+
+@with_app('html')
+def test_html_translator(app, status, warning):
+ # no set_translator(), no html_translator_class
+ translator_class = app.builder.translator_class
+ assert translator_class
+ assert translator_class.__name__ == 'SmartyPantsHTMLTranslator'
+
+
+@with_app('html', confoverrides={
+ 'html_translator_class': 'translator.ExtHTMLTranslator'})
+def test_html_with_html_translator_class(app, status, warning):
+ # no set_translator(), but html_translator_class
+ translator_class = app.builder.translator_class
+ assert translator_class
+ assert translator_class.__name__ == 'ExtHTMLTranslator'
+
+
+@with_app('html',
+ confoverrides={'html_use_smartypants': False})
+def test_html_with_smartypants(app, status, warning):
+ # no set_translator(), html_use_smartypants=False
+ translator_class = app.builder.translator_class
+ assert translator_class
+ assert translator_class.__name__ == 'HTMLTranslator'
+
+
+@with_app('html', testroot='api-set-translator')
+def test_html_with_set_translator_for_html_(app, status, warning):
+ # use set_translator(), no html_translator_class
+ translator_class = app.builder.translator_class
+ assert translator_class
+ assert translator_class.__name__ == 'ConfHTMLTranslator'
+
+
+@with_app('html', testroot='api-set-translator',
+ confoverrides={'html_translator_class': 'ext.ExtHTMLTranslator'})
+def test_html_with_set_translator_for_html_and_html_translator_class(
+ app, status, warning):
+ # use set_translator() and html_translator_class.
+ # set_translator() is given priority over html_translator_clas.
+ translator_class = app.builder.translator_class
+ assert translator_class
+ assert translator_class.__name__ == 'ConfHTMLTranslator'
+
+
+## this test break test_websupport.test_comments test. why?
+# @with_app(
+# buildername='dirhtml',
+# srcdir=(test_roots / 'test-api-set-translator'),
+# )
+# def test_dirhtml_set_translator_for_dirhtml(app, status, warning):
+# translator_class = app.builder.translator_class
+# assert translator_class
+# assert translator_class.__name__ == 'ConfDirHTMLTranslator'
+
+
+@with_app('singlehtml', testroot='api-set-translator')
+def test_singlehtml_set_translator_for_singlehtml(app, status, warning):
+ translator_class = app.builder.translator_class
+ assert translator_class
+ assert translator_class.__name__ == 'ConfSingleHTMLTranslator'
+
+
+@with_app('pickle', testroot='api-set-translator')
+def test_pickle_set_translator_for_pickle(app, status, warning):
+ translator_class = app.builder.translator_class
+ assert translator_class
+ assert translator_class.__name__ == 'ConfPickleTranslator'
+
+
+@with_app('json', testroot='api-set-translator')
+def test_json_set_translator_for_json(app, status, warning):
+ translator_class = app.builder.translator_class
+ assert translator_class
+ assert translator_class.__name__ == 'ConfJsonTranslator'
+
+
+@with_app('latex', testroot='api-set-translator')
+def test_html_with_set_translator_for_latex(app, status, warning):
+ translator_class = app.builder.translator_class
+ assert translator_class
+ assert translator_class.__name__ == 'ConfLaTeXTranslator'
+
+
+@with_app('man', testroot='api-set-translator')
+def test_html_with_set_translator_for_man(app, status, warning):
+ translator_class = app.builder.translator_class
+ assert translator_class
+ assert translator_class.__name__ == 'ConfManualPageTranslator'
+
+
+@with_app('texinfo', testroot='api-set-translator')
+def test_html_with_set_translator_for_texinfo(app, status, warning):
+ translator_class = app.builder.translator_class
+ assert translator_class
+ assert translator_class.__name__ == 'ConfTexinfoTranslator'
+
+
+@with_app('text', testroot='api-set-translator')
+def test_html_with_set_translator_for_text(app, status, warning):
+ translator_class = app.builder.translator_class
+ assert translator_class
+ assert translator_class.__name__ == 'ConfTextTranslator'
+
+
+@with_app('xml', testroot='api-set-translator')
+def test_html_with_set_translator_for_xml(app, status, warning):
+ translator_class = app.builder.translator_class
+ assert translator_class
+ assert translator_class.__name__ == 'ConfXMLTranslator'
+
+
+@with_app('pseudoxml', testroot='api-set-translator')
+def test_html_with_set_translator_for_pseudoxml(app, status, warning):
+ translator_class = app.builder.translator_class
+ assert translator_class
+ assert translator_class.__name__ == 'ConfPseudoXMLTranslator'
diff --git a/tests/test_apidoc.py b/tests/test_apidoc.py
new file mode 100644
index 00000000..3bc16f5c
--- /dev/null
+++ b/tests/test_apidoc.py
@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+"""
+ test_apidoc
+ ~~~~~~~~~~~
+
+ Test the sphinx.apidoc module.
+
+ :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from __future__ import print_function
+
+import sys
+
+from sphinx import apidoc
+
+from util import with_tempdir, with_app, rootdir
+
+
+@with_tempdir
+def test_simple(tempdir):
+ codedir = rootdir / 'root'
+ outdir = tempdir / 'out'
+ args = ['sphinx-apidoc', '-o', outdir, '-F', codedir]
+ apidoc.main(args)
+
+ assert (outdir / 'conf.py').isfile()
+ assert (outdir / 'autodoc_fodder.rst').isfile()
+ assert (outdir / 'index.rst').isfile()
+
+ @with_app('text', srcdir=outdir)
+ def assert_build(app, status, warning):
+ app.build()
+ print(status.getvalue())
+ print(warning.getvalue())
+
+ sys.path.append(codedir)
+ try:
+ assert_build()
+ finally:
+ sys.path.remove(codedir)
diff --git a/tests/test_application.py b/tests/test_application.py
index 3d464eb5..1f188de1 100644
--- a/tests/test_application.py
+++ b/tests/test_application.py
@@ -9,22 +9,21 @@
:license: BSD, see LICENSE for details.
"""
-from StringIO import StringIO
-
from docutils import nodes
+
from sphinx.application import ExtensionError
from sphinx.domains import Domain
-from util import with_app, raises_msg, TestApp
+from util import with_app, raises_msg
@with_app()
-def test_events(app):
- def empty(): pass
+def test_events(app, status, warning):
+ def empty():
+ pass
raises_msg(ExtensionError, "Unknown event name: invalid",
app.connect, "invalid", empty)
-
app.add_event("my_event")
raises_msg(ExtensionError, "Event 'my_event' already present",
app.add_event, "my_event")
@@ -43,57 +42,49 @@ def test_events(app):
@with_app()
-def test_emit_with_nonascii_name_node(app):
+def test_emit_with_nonascii_name_node(app, status, warning):
node = nodes.section(names=[u'\u65e5\u672c\u8a9e'])
app.emit('my_event', node)
-def test_output():
- status, warnings = StringIO(), StringIO()
- app = TestApp(status=status, warning=warnings)
- try:
- status.truncate(0) # __init__ writes to status
- status.seek(0)
- app.info("Nothing here...")
- assert status.getvalue() == "Nothing here...\n"
- status.truncate(0)
- status.seek(0)
- app.info("Nothing here...", True)
- assert status.getvalue() == "Nothing here..."
-
- old_count = app._warncount
- app.warn("Bad news!")
- assert warnings.getvalue() == "WARNING: Bad news!\n"
- assert app._warncount == old_count + 1
- finally:
- app.cleanup()
-
-
-def test_extensions():
- status, warnings = StringIO(), StringIO()
- app = TestApp(status=status, warning=warnings)
- try:
- app.setup_extension('shutil')
- assert warnings.getvalue().startswith("WARNING: extension 'shutil'")
- finally:
- app.cleanup()
-
-def test_domain_override():
+@with_app()
+def test_output(app, status, warning):
+ status.truncate(0) # __init__ writes to status
+ status.seek(0)
+ app.info("Nothing here...")
+ assert status.getvalue() == "Nothing here...\n"
+ status.truncate(0)
+ status.seek(0)
+ app.info("Nothing here...", True)
+ assert status.getvalue() == "Nothing here..."
+
+ old_count = app._warncount
+ app.warn("Bad news!")
+ assert warning.getvalue() == "WARNING: Bad news!\n"
+ assert app._warncount == old_count + 1
+
+
+@with_app()
+def test_extensions(app, status, warning):
+ app.setup_extension('shutil')
+ assert warning.getvalue().startswith("WARNING: extension 'shutil'")
+
+
+@with_app()
+def test_domain_override(app, status, warning):
class A(Domain):
name = 'foo'
+
class B(A):
name = 'foo'
+
class C(Domain):
name = 'foo'
- status, warnings = StringIO(), StringIO()
- app = TestApp(status=status, warning=warnings)
- try:
- # No domain know named foo.
- raises_msg(ExtensionError, 'domain foo not yet registered',
- app.override_domain, A)
- assert app.add_domain(A) is None
- assert app.override_domain(B) is None
- raises_msg(ExtensionError, 'new domain not a subclass of registered '
- 'foo domain', app.override_domain, C)
- finally:
- app.cleanup()
+
+ # No domain know named foo.
+ raises_msg(ExtensionError, 'domain foo not yet registered',
+ app.override_domain, A)
+ assert app.add_domain(A) is None
+ assert app.override_domain(B) is None
+ raises_msg(ExtensionError, 'new domain not a subclass of registered '
+ 'foo domain', app.override_domain, C)
diff --git a/tests/test_autodoc.py b/tests/test_autodoc.py
index ed906d1c..0025170f 100644
--- a/tests/test_autodoc.py
+++ b/tests/test_autodoc.py
@@ -10,17 +10,15 @@
:license: BSD, see LICENSE for details.
"""
-import sys
-from StringIO import StringIO
-
# "raises" imported for usage by autodoc
from util import TestApp, Struct, raises
from nose.tools import with_setup
+from six import StringIO
from docutils.statemachine import ViewList
from sphinx.ext.autodoc import AutoDirective, add_documenter, \
- ModuleLevelDocumenter, FunctionDocumenter, cut_lines, between, ALL
+ ModuleLevelDocumenter, FunctionDocumenter, cut_lines, between, ALL
app = None
@@ -125,24 +123,24 @@ def test_parse_name():
directive.env.temp_data['autodoc:module'] = 'util'
verify('function', 'raises', ('util', ['raises'], None, None))
del directive.env.temp_data['autodoc:module']
- directive.env.temp_data['py:module'] = 'util'
+ directive.env.ref_context['py:module'] = 'util'
verify('function', 'raises', ('util', ['raises'], None, None))
verify('class', 'TestApp', ('util', ['TestApp'], None, None))
# for members
- directive.env.temp_data['py:module'] = 'foo'
+ directive.env.ref_context['py:module'] = 'foo'
verify('method', 'util.TestApp.cleanup',
('util', ['TestApp', 'cleanup'], None, None))
- directive.env.temp_data['py:module'] = 'util'
- directive.env.temp_data['py:class'] = 'Foo'
+ directive.env.ref_context['py:module'] = 'util'
+ directive.env.ref_context['py:class'] = 'Foo'
directive.env.temp_data['autodoc:class'] = 'TestApp'
verify('method', 'cleanup', ('util', ['TestApp', 'cleanup'], None, None))
verify('method', 'TestApp.cleanup',
('util', ['TestApp', 'cleanup'], None, None))
# and clean up
- del directive.env.temp_data['py:module']
- del directive.env.temp_data['py:class']
+ del directive.env.ref_context['py:module']
+ del directive.env.ref_context['py:class']
del directive.env.temp_data['autodoc:class']
@@ -157,7 +155,7 @@ def test_format_signature():
inst.args = args
inst.retann = retann
res = inst.format_signature()
- print res
+ print(res)
return res
# no signatures for modules
@@ -257,7 +255,7 @@ def test_get_doc():
ds = inst.get_doc(encoding)
# for testing purposes, concat them and strip the empty line at the end
res = sum(ds, [])[:-1]
- print res
+ print(res)
return res
# objects without docstring
@@ -436,6 +434,43 @@ def test_docstring_processing():
@with_setup(setup_test)
+def test_docstring_property_processing():
+ def genarate_docstring(objtype, name, **kw):
+ del processed_docstrings[:]
+ del processed_signatures[:]
+ inst = AutoDirective._registry[objtype](directive, name)
+ inst.generate(**kw)
+ results = list(directive.result)
+ docstrings = inst.get_doc()[0]
+ del directive.result[:]
+ return results, docstrings
+
+ directive.env.config.autodoc_docstring_signature = False
+ results, docstrings = \
+ genarate_docstring('attribute', 'test_autodoc.DocstringSig.prop1')
+ assert '.. py:attribute:: DocstringSig.prop1' in results
+ assert 'First line of docstring' in docstrings
+ assert 'DocstringSig.prop1(self)' in docstrings
+ results, docstrings = \
+ genarate_docstring('attribute', 'test_autodoc.DocstringSig.prop2')
+ assert '.. py:attribute:: DocstringSig.prop2' in results
+ assert 'First line of docstring' in docstrings
+ assert 'Second line of docstring' in docstrings
+
+ directive.env.config.autodoc_docstring_signature = True
+ results, docstrings = \
+ genarate_docstring('attribute', 'test_autodoc.DocstringSig.prop1')
+ assert '.. py:attribute:: DocstringSig.prop1' in results
+ assert 'First line of docstring' in docstrings
+ assert 'DocstringSig.prop1(self)' not in docstrings
+ results, docstrings = \
+ genarate_docstring('attribute', 'test_autodoc.DocstringSig.prop2')
+ assert '.. py:attribute:: DocstringSig.prop2' in results
+ assert 'First line of docstring' in docstrings
+ assert 'Second line of docstring' in docstrings
+
+
+@with_setup(setup_test)
def test_new_documenter():
class MyDocumenter(ModuleLevelDocumenter):
objtype = 'integer'
@@ -565,7 +600,7 @@ def test_generate():
'method', 'test_autodoc.Class.foobar', more_content=None)
# test auto and given content mixing
- directive.env.temp_data['py:module'] = 'test_autodoc'
+ directive.env.ref_context['py:module'] = 'test_autodoc'
assert_result_contains(' Function.', 'method', 'Class.meth')
add_content = ViewList()
add_content.append('Content.', '', 0)
@@ -663,12 +698,12 @@ def test_generate():
'attribute', 'test_autodoc.Class.descr')
# test generation for C modules (which have no source file)
- directive.env.temp_data['py:module'] = 'time'
+ directive.env.ref_context['py:module'] = 'time'
assert_processes([('function', 'time.asctime')], 'function', 'asctime')
assert_processes([('function', 'time.asctime')], 'function', 'asctime')
# test autodoc_member_order == 'source'
- directive.env.temp_data['py:module'] = 'test_autodoc'
+ directive.env.ref_context['py:module'] = 'test_autodoc'
assert_order(['.. py:class:: Class(arg)',
' .. py:attribute:: Class.descr',
' .. py:method:: Class.meth()',
@@ -685,7 +720,7 @@ def test_generate():
' .. py:method:: Class.inheritedmeth()',
],
'class', 'Class', member_order='bysource', all_members=True)
- del directive.env.temp_data['py:module']
+ del directive.env.ref_context['py:module']
# test attribute initialized to class instance from other module
directive.env.temp_data['autodoc:class'] = 'test_autodoc.Class'
@@ -710,7 +745,7 @@ def test_generate():
'test_autodoc.Class.moore')
# test new attribute documenter behavior
- directive.env.temp_data['py:module'] = 'test_autodoc'
+ directive.env.ref_context['py:module'] = 'test_autodoc'
options.undoc_members = True
assert_processes([('class', 'test_autodoc.AttCls'),
('attribute', 'test_autodoc.AttCls.a1'),
@@ -724,7 +759,7 @@ def test_generate():
# test explicit members with instance attributes
del directive.env.temp_data['autodoc:class']
del directive.env.temp_data['autodoc:module']
- directive.env.temp_data['py:module'] = 'test_autodoc'
+ directive.env.ref_context['py:module'] = 'test_autodoc'
options.inherited_members = False
options.undoc_members = False
options.members = ALL
@@ -746,7 +781,7 @@ def test_generate():
], 'class', 'InstAttCls')
del directive.env.temp_data['autodoc:class']
del directive.env.temp_data['autodoc:module']
- del directive.env.temp_data['py:module']
+ del directive.env.ref_context['py:module']
# test descriptor class documentation
options.members = ['CustomDataDescriptor']
@@ -788,12 +823,8 @@ def _funky_classmethod(name, b, c, d, docstring=None):
some arguments."""
def template(cls, a, b, c, d=4, e=5, f=6):
return a, b, c, d, e, f
- if sys.version_info >= (2, 5):
- from functools import partial
- function = partial(template, b=b, c=c, d=d)
- else:
- def function(cls, a, e=5, f=6):
- return template(a, b, c, d, e, f)
+ from functools import partial
+ function = partial(template, b=b, c=c, d=d)
function.__name__ = name
function.__doc__ = docstring
return classmethod(function)
@@ -825,10 +856,9 @@ class Class(Base):
#: should be documented -- süß
attr = 'bar'
+ @property
def prop(self):
"""Property."""
- # stay 2.4 compatible (docstring!)
- prop = property(prop, doc="Property.")
docattr = 'baz'
"""should likewise be documented -- süß"""
@@ -898,6 +928,20 @@ First line of docstring
indented line
"""
+ @property
+ def prop1(self):
+ """DocstringSig.prop1(self)
+ First line of docstring
+ """
+ return 123
+
+ @property
+ def prop2(self):
+ """First line of docstring
+ Second line of docstring
+ """
+ return 456
+
class StrRepr(str):
def __repr__(self):
return self
diff --git a/tests/test_build.py b/tests/test_build.py
index c355b162..117bb84c 100644
--- a/tests/test_build.py
+++ b/tests/test_build.py
@@ -3,79 +3,94 @@
test_build
~~~~~~~~~~
- Test all builders that have no special checks.
+ Test all builders.
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-from util import with_app, test_root, path, SkipTest, TestApp
+from six import BytesIO
+
from textwrap import dedent
+from util import with_app, rootdir, tempdir, SkipTest, TestApp
+
try:
from docutils.writers.manpage import Writer as ManWriter
except ImportError:
ManWriter = None
-def teardown_module():
- (test_root / '_build').rmtree(True)
-
+class MockOpener(object):
+ def open(self, req, **kwargs):
+ class result(BytesIO):
+ headers = None
+ url = req.url
+ return result()
-def test_build():
- for buildername in ('pickle', 'json', 'linkcheck', 'text', 'htmlhelp',
- 'qthelp', 'epub', 'changes', 'singlehtml', 'xml',
- 'pseudoxml'):
- app = TestApp(buildername=buildername)
- yield lambda app: app.builder.build_all(), app
- app.cleanup()
+import sphinx.builders.linkcheck
+sphinx.builders.linkcheck.opener = MockOpener()
-@with_app(buildername='man')
-def test_man(app):
- if ManWriter is None:
+def verify_build(buildername, srcdir):
+ if buildername == 'man' and ManWriter is None:
raise SkipTest('man writer is not available')
- app.builder.build_all()
- assert (app.outdir / 'SphinxTests.1').exists()
+ app = TestApp(buildername=buildername, srcdir=srcdir)
+ try:
+ app.builder.build_all()
+ finally:
+ app.cleanup()
-def _test_nonascii_path(app):
- srcdir = path(app.srcdir)
- mb_name = u'\u65e5\u672c\u8a9e'
+def test_build_all():
+ # If supported, build in a non-ASCII source dir
+ test_name = u'\u65e5\u672c\u8a9e'
try:
- (srcdir / mb_name).makedirs()
+ srcdir = tempdir / test_name
+ (rootdir / 'root').copytree(tempdir / test_name)
except UnicodeEncodeError:
- from path import FILESYSTEMENCODING
- raise SkipTest(
- 'nonascii filename not supported on this filesystem encoding: '
- '%s', FILESYSTEMENCODING)
-
- (srcdir / mb_name / (mb_name + '.txt')).write_text(dedent("""
- multi byte file name page
- ==========================
- """))
-
- master_doc = srcdir / 'contents.txt'
- master_doc.write_bytes((master_doc.text() + dedent("""
- .. toctree::
-
- %(mb_name)s/%(mb_name)s
- """ % {'mb_name': mb_name})
- ).encode('utf-8'))
+ srcdir = tempdir / 'all'
+ else:
+ # add a doc with a non-ASCII file name to the source dir
+ (srcdir / (test_name + '.txt')).write_text(dedent("""
+ nonascii file name page
+ =======================
+ """))
+
+ master_doc = srcdir / 'contents.txt'
+ master_doc.write_bytes((master_doc.text() + dedent("""
+ .. toctree::
+
+ %(test_name)s/%(test_name)s
+ """ % {'test_name': test_name})
+ ).encode('utf-8'))
+
+ # note: no 'html' - if it's ok with dirhtml it's ok with html
+ for buildername in ['dirhtml', 'singlehtml', 'latex', 'texinfo',
+ 'pickle', 'json', 'text', 'htmlhelp', 'qthelp', 'epub',
+ 'changes', 'xml', 'pseudoxml', 'man', 'linkcheck']:
+ yield verify_build, buildername, srcdir
+
+
+@with_app(buildername='text', testroot='circular')
+def test_circular_toctree(app, status, warning):
app.builder.build_all()
+ warnings = warning.getvalue()
+ assert (
+ 'circular toctree references detected, ignoring: '
+ 'sub <- contents <- sub') in warnings
+ assert (
+ 'circular toctree references detected, ignoring: '
+ 'contents <- sub <- contents') in warnings
-def test_nonascii_path():
- (test_root / '_build').rmtree(True) #keep this to build first gettext
-
- builder_names = ['gettext', 'html', 'dirhtml', 'singlehtml', 'latex',
- 'texinfo', 'pickle', 'json', 'linkcheck', 'text',
- 'htmlhelp', 'qthelp', 'epub', 'changes', 'xml',
- 'pseudoxml']
- if ManWriter is not None:
- builder_names.append('man')
-
- for buildername in builder_names:
- app = TestApp(buildername=buildername, srcdir='(temp)')
- yield _test_nonascii_path, app
- app.cleanup()
+@with_app(buildername='text', testroot='numbered-circular')
+def test_numbered_circular_toctree(app, status, warning):
+ app.builder.build_all()
+ warnings = warning.getvalue()
+ assert (
+ 'circular toctree references detected, ignoring: '
+ 'sub <- contents <- sub') in warnings
+ assert (
+ 'circular toctree references detected, ignoring: '
+ 'contents <- sub <- contents') in warnings
diff --git a/tests/test_build_gettext.py b/tests/test_build_gettext.py
index 9bde44b5..677f3505 100644
--- a/tests/test_build_gettext.py
+++ b/tests/test_build_gettext.py
@@ -8,47 +8,32 @@
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+from __future__ import print_function
-import gettext
import os
import re
+import gettext
from subprocess import Popen, PIPE
-from util import test_root, test_roots, with_app, SkipTest
-
+from nose.tools import assert_true, assert_equal
-def teardown_module():
- (test_root / '_build').rmtree(True)
- (test_roots / 'test-intl' / '_build').rmtree(True),
+from util import with_app, gen_with_app, SkipTest, assert_in
-@with_app(buildername='gettext')
-def test_all(app):
+@gen_with_app('gettext', srcdir='root-gettext')
+def test_all(app, status, warning):
# Generic build; should fail only when the builder is horribly broken.
app.builder.build_all()
-
-@with_app(buildername='gettext')
-def test_build(app):
# Do messages end up in the correct location?
- app.builder.build(['extapi', 'subdir/includes'])
# top-level documents end up in a message catalog
- assert (app.outdir / 'extapi.pot').isfile()
+ yield assert_true, (app.outdir / 'extapi.pot').isfile()
# directory items are grouped into sections
- assert (app.outdir / 'subdir.pot').isfile()
-
+ yield assert_true, (app.outdir / 'subdir.pot').isfile()
-@with_app(buildername='gettext')
-def test_seealso(app):
# regression test for issue #960
- app.builder.build(['markup'])
catalog = (app.outdir / 'markup.pot').text(encoding='utf-8')
- assert 'msgid "something, something else, something more"' in catalog
-
-
-@with_app(buildername='gettext')
-def test_gettext(app):
- app.builder.build(['markup'])
+ yield assert_in, 'msgid "something, something else, something more"', catalog
(app.outdir / 'en' / 'LC_MESSAGES').makedirs()
cwd = os.getcwd()
@@ -57,48 +42,48 @@ def test_gettext(app):
try:
p = Popen(['msginit', '--no-translator', '-i', 'markup.pot',
'--locale', 'en_US'],
- stdout=PIPE, stderr=PIPE)
+ stdout=PIPE, stderr=PIPE)
except OSError:
raise SkipTest # most likely msginit was not found
else:
stdout, stderr = p.communicate()
if p.returncode != 0:
- print stdout
- print stderr
+ print(stdout)
+ print(stderr)
assert False, 'msginit exited with return code %s' % \
- p.returncode
- assert (app.outdir / 'en_US.po').isfile(), 'msginit failed'
+ p.returncode
+ yield assert_true, (app.outdir / 'en_US.po').isfile(), 'msginit failed'
try:
p = Popen(['msgfmt', 'en_US.po', '-o',
- os.path.join('en', 'LC_MESSAGES', 'test_root.mo')],
- stdout=PIPE, stderr=PIPE)
+ os.path.join('en', 'LC_MESSAGES', 'test_root.mo')],
+ stdout=PIPE, stderr=PIPE)
except OSError:
raise SkipTest # most likely msgfmt was not found
else:
stdout, stderr = p.communicate()
if p.returncode != 0:
- print stdout
- print stderr
+ print(stdout)
+ print(stderr)
assert False, 'msgfmt exited with return code %s' % \
- p.returncode
- assert (app.outdir / 'en' / 'LC_MESSAGES' / 'test_root.mo').isfile(), \
- 'msgfmt failed'
+ p.returncode
+ yield (assert_true,
+ (app.outdir / 'en' / 'LC_MESSAGES' / 'test_root.mo').isfile(),
+ 'msgfmt failed')
finally:
os.chdir(cwd)
_ = gettext.translation('test_root', app.outdir, languages=['en']).gettext
- assert _("Testing various markup") == u"Testing various markup"
+ yield assert_equal, _("Testing various markup"), u"Testing various markup"
-@with_app(buildername='gettext',
- srcdir=(test_roots / 'test-intl'),
- doctreedir=(test_roots / 'test-intl' / '_build' / 'doctree'),
+@with_app('gettext', testroot='intl',
confoverrides={'gettext_compact': False})
-def test_gettext_index_entries(app):
+def test_gettext_index_entries(app, status, warning):
# regression test for #976
app.builder.build(['index_entries'])
_msgid_getter = re.compile(r'msgid "(.*)"').search
+
def msgid_getter(msgid):
m = _msgid_getter(msgid)
if m:
@@ -106,7 +91,7 @@ def test_gettext_index_entries(app):
return None
pot = (app.outdir / 'index_entries.pot').text(encoding='utf-8')
- msgids = filter(None, map(msgid_getter, pot.splitlines()))
+ msgids = [_f for _f in map(msgid_getter, pot.splitlines()) if _f]
expected_msgids = [
"i18n with index entries",
@@ -129,6 +114,38 @@ def test_gettext_index_entries(app):
"Exception",
"Statement",
"Builtin",
+ ]
+ for expect in expected_msgids:
+ assert expect in msgids
+ msgids.remove(expect)
+
+ # unexpected msgid existent
+ assert msgids == []
+
+
+@with_app('gettext', testroot='intl',
+ confoverrides={'gettext_compact': False, 'gettext_enables': []})
+def test_gettext_disable_index_entries(app, status, warning):
+ # regression test for #976
+ app.builder.build(['index_entries'])
+
+ _msgid_getter = re.compile(r'msgid "(.*)"').search
+
+ def msgid_getter(msgid):
+ m = _msgid_getter(msgid)
+ if m:
+ return m.groups()[0]
+ return None
+
+ pot = (app.outdir / 'index_entries.pot').text(encoding='utf-8')
+ msgids = [_f for _f in map(msgid_getter, pot.splitlines()) if _f]
+
+ expected_msgids = [
+ "i18n with index entries",
+ "index target section",
+ "this is :index:`Newsletter` target paragraph.",
+ "various index entries",
+ "That's all.",
]
for expect in expected_msgids:
assert expect in msgids
@@ -138,10 +155,8 @@ def test_gettext_index_entries(app):
assert msgids == []
-@with_app(buildername='gettext',
- srcdir=(test_roots / 'test-intl'),
- doctreedir=(test_roots / 'test-intl' / '_build' / 'doctree'))
-def test_gettext_template(app):
+@with_app(buildername='gettext', testroot='intl')
+def test_gettext_template(app, status, warning):
app.builder.build_all()
assert (app.outdir / 'sphinx.pot').isfile()
diff --git a/tests/test_build_html.py b/tests/test_build_html.py
index c361f778..f0512aed 100644
--- a/tests/test_build_html.py
+++ b/tests/test_build_html.py
@@ -11,43 +11,31 @@
import os
import re
-import sys
-import htmlentitydefs
-from StringIO import StringIO
-try:
- import pygments
-except ImportError:
- pygments = None
+from six import PY3, iteritems
+from six.moves import html_entities
from sphinx import __version__
-from util import test_root, remove_unicode_literals, gen_with_app, with_app
+from util import remove_unicode_literals, gen_with_app
from etree13 import ElementTree as ET
-def teardown_module():
- (test_root / '_build').rmtree(True)
-
-
-html_warnfile = StringIO()
-
ENV_WARNINGS = """\
-%(root)s/autodoc_fodder.py:docstring of autodoc_fodder\\.MarkupError:2: \
+(%(root)s/autodoc_fodder.py:docstring of autodoc_fodder\\.MarkupError:2: \
WARNING: Explicit markup ends without a blank line; unexpected \
unindent\\.\\n?
-%(root)s/images.txt:9: WARNING: image file not readable: foo.png
+)?%(root)s/images.txt:9: WARNING: image file not readable: foo.png
%(root)s/images.txt:23: WARNING: nonlocal image URI found: \
http://www.python.org/logo.png
%(root)s/includes.txt:\\d*: WARNING: Encoding 'utf-8-sig' used for \
reading included file u'.*?wrongenc.inc' seems to be wrong, try giving an \
:encoding: option\\n?
%(root)s/includes.txt:4: WARNING: download file not readable: .*?nonexisting.png
-%(root)s/markup.txt:\\d+: WARNING: Malformed :option: u'Python c option', does \
-not contain option marker - or -- or /
-%(root)s/objects.txt:\\d*: WARNING: using old C markup; please migrate to \
-new-style markup \(e.g. c:function instead of cfunction\), see \
-http://sphinx-doc.org/domains.html
-"""
+(%(root)s/markup.txt:\\d+: WARNING: Malformed :option: u'Python c option', does \
+not contain option marker - or -- or / or \\+
+%(root)s/undecodable.txt:3: WARNING: undecodable source characters, replacing \
+with "\\?": b?'here: >>>(\\\\|/)xbb<<<'
+)?"""
HTML_WARNINGS = ENV_WARNINGS + """\
%(root)s/images.txt:20: WARNING: no matching candidate for image URI u'foo.\\*'
@@ -57,13 +45,14 @@ None:\\d+: WARNING: citation not found: missing
%(root)s/markup.txt:: WARNING: invalid pair index entry u'keyword; '
"""
-if sys.version_info >= (3, 0):
+if PY3:
ENV_WARNINGS = remove_unicode_literals(ENV_WARNINGS)
HTML_WARNINGS = remove_unicode_literals(HTML_WARNINGS)
def tail_check(check):
rex = re.compile(check)
+
def checker(nodes):
for node in nodes:
if node.tail and rex.search(node.tail):
@@ -87,6 +76,8 @@ HTML_XPATH = {
(".//a[@href='../_downloads/img.png']", ''),
(".//img[@src='../_images/img.png']", ''),
(".//p", 'This is an include file.'),
+ (".//pre/span", 'line 1'),
+ (".//pre/span", 'line 2'),
],
'includes.html': [
(".//pre", u'Max Strauß'),
@@ -94,6 +85,23 @@ HTML_XPATH = {
(".//a[@href='_downloads/img1.png']", ''),
(".//pre", u'"quotes"'),
(".//pre", u"'included'"),
+ (".//pre/span[@class='s']", u'üöä'),
+ (".//div[@class='inc-pyobj1 highlight-text']//pre",
+ r'^class Foo:\n pass\n\s*$'),
+ (".//div[@class='inc-pyobj2 highlight-text']//pre",
+ r'^ def baz\(\):\n pass\n\s*$'),
+ (".//div[@class='inc-lines highlight-text']//pre",
+ r'^class Foo:\n pass\nclass Bar:\n$'),
+ (".//div[@class='inc-startend highlight-text']//pre",
+ u'^foo = "Including Unicode characters: üöä"\\n$'),
+ (".//div[@class='inc-preappend highlight-text']//pre",
+ r'(?m)^START CODE$'),
+ (".//div[@class='inc-pyobj-dedent highlight-python']//span",
+ r'def'),
+ (".//div[@class='inc-tab3 highlight-text']//pre",
+ r'-| |-'),
+ (".//div[@class='inc-tab8 highlight-python']//pre/span",
+ r'-| |-'),
],
'autodoc.html': [
(".//dt[@id='test_autodoc.Class']", ''),
@@ -125,25 +133,30 @@ HTML_XPATH = {
(".//li/strong", r'^command\\n$'),
(".//li/strong", r'^program\\n$'),
(".//li/em", r'^dfn\\n$'),
- (".//li/tt/span[@class='pre']", r'^kbd\\n$'),
+ (".//li/code/span[@class='pre']", r'^kbd\\n$'),
(".//li/em", u'File \N{TRIANGULAR BULLET} Close'),
- (".//li/tt/span[@class='pre']", '^a/$'),
- (".//li/tt/em/span[@class='pre']", '^varpart$'),
- (".//li/tt/em/span[@class='pre']", '^i$'),
+ (".//li/code/span[@class='pre']", '^a/$'),
+ (".//li/code/em/span[@class='pre']", '^varpart$'),
+ (".//li/code/em/span[@class='pre']", '^i$'),
(".//a[@href='https://www.python.org/dev/peps/pep-0008']"
"[@class='pep reference external']/strong", 'PEP 8'),
+ (".//a[@href='https://www.python.org/dev/peps/pep-0008']"
+ "[@class='pep reference external']/strong",
+ 'Python Enhancement Proposal #8'),
(".//a[@href='https://tools.ietf.org/html/rfc1.html']"
"[@class='rfc reference external']/strong", 'RFC 1'),
+ (".//a[@href='https://tools.ietf.org/html/rfc1.html']"
+ "[@class='rfc reference external']/strong", 'Request for Comments #1'),
(".//a[@href='objects.html#envvar-HOME']"
- "[@class='reference internal']/tt/span[@class='pre']", 'HOME'),
+ "[@class='reference internal']/code/span[@class='pre']", 'HOME'),
(".//a[@href='#with']"
- "[@class='reference internal']/tt/span[@class='pre']", '^with$'),
+ "[@class='reference internal']/code/span[@class='pre']", '^with$'),
(".//a[@href='#grammar-token-try_stmt']"
- "[@class='reference internal']/tt/span", '^statement$'),
+ "[@class='reference internal']/code/span", '^statement$'),
(".//a[@href='subdir/includes.html']"
"[@class='reference internal']/em", 'Including in subdir'),
(".//a[@href='objects.html#cmdoption-python-c']"
- "[@class='reference internal']/em", 'Python -c option'),
+ "[@class='reference internal']/code/span[@class='pre']", '-c'),
# abbreviations
(".//abbr[@title='abbreviation']", '^abbr$'),
# version stuff
@@ -168,22 +181,28 @@ HTML_XPATH = {
(".//dl/dt[@id='term-boson']", 'boson'),
# a production list
(".//pre/strong", 'try_stmt'),
- (".//pre/a[@href='#grammar-token-try1_stmt']/tt/span", 'try1_stmt'),
+ (".//pre/a[@href='#grammar-token-try1_stmt']/code/span", 'try1_stmt'),
# tests for ``only`` directive
(".//p", 'A global substitution.'),
(".//p", 'In HTML.'),
(".//p", 'In both.'),
(".//p", 'Always present'),
+ # tests for ``any`` role
+ (".//a[@href='#with']/em", 'headings'),
+ (".//a[@href='objects.html#func_without_body']/code/span", 'objects'),
],
'objects.html': [
(".//dt[@id='mod.Cls.meth1']", ''),
(".//dt[@id='errmod.Error']", ''),
- (".//dt/tt", r'long\(parameter,\s* list\)'),
- (".//dt/tt", 'another one'),
+ (".//dt/code", r'long\(parameter,\s* list\)'),
+ (".//dt/code", 'another one'),
(".//a[@href='#mod.Cls'][@class='reference internal']", ''),
(".//dl[@class='userdesc']", ''),
(".//dt[@id='userdesc-myobj']", ''),
(".//a[@href='#userdesc-myobj'][@class='reference internal']", ''),
+ # docfields
+ (".//a[@class='reference internal'][@href='#TimeInt']/em", 'TimeInt'),
+ (".//a[@class='reference internal'][@href='#Time']", 'Time'),
# C references
(".//span[@class='pre']", 'CFunction()'),
(".//a[@href='#c.Sphinx_DoSomething']", ''),
@@ -191,8 +210,6 @@ HTML_XPATH = {
(".//a[@href='#c.SPHINX_USE_PYTHON']", ''),
(".//a[@href='#c.SphinxType']", ''),
(".//a[@href='#c.sphinx_global']", ''),
- # reference from old C markup extension
- (".//a[@href='#c.Sphinx_Func']", ''),
# test global TOC created by toctree()
(".//ul[@class='current']/li[@class='toctree-l1 current']/a[@href='']",
'Testing object descriptions'),
@@ -213,12 +230,10 @@ HTML_XPATH = {
(".//h4", 'Custom sidebar'),
# docfields
(".//td[@class='field-body']/strong", '^moo$'),
- (".//td[@class='field-body']/strong",
- tail_check(r'\(Moo\) .* Moo')),
+ (".//td[@class='field-body']/strong", tail_check(r'\(Moo\) .* Moo')),
(".//td[@class='field-body']/ul/li/strong", '^hour$'),
(".//td[@class='field-body']/ul/li/em", '^DuplicateType$'),
- (".//td[@class='field-body']/ul/li/em",
- tail_check(r'.* Some parameter')),
+ (".//td[@class='field-body']/ul/li/em", tail_check(r'.* Some parameter')),
],
'contents.html': [
(".//meta[@name='hc'][@content='hcval']", ''),
@@ -239,6 +254,11 @@ HTML_XPATH = {
(".//h4", 'Contents sidebar'),
# custom JavaScript
(".//script[@src='file://moo.js']", ''),
+ # URL in contents
+ (".//a[@class='reference external'][@href='http://sphinx-doc.org/']",
+ 'http://sphinx-doc.org/'),
+ (".//a[@class='reference external'][@href='http://sphinx-doc.org/latest/']",
+ 'Latest reference'),
],
'bom.html': [
(".//title", " File with UTF-8 BOM"),
@@ -258,33 +278,19 @@ HTML_XPATH = {
(".//a/strong", "Other"),
(".//a", "entry"),
(".//dt/a", "double"),
- ]
+ ],
+ 'footnote.html': [
+ (".//a[@class='footnote-reference'][@href='#id5'][@id='id1']", r"\[1\]"),
+ (".//a[@class='footnote-reference'][@href='#id6'][@id='id2']", r"\[2\]"),
+ (".//a[@class='footnote-reference'][@href='#foo'][@id='id3']", r"\[3\]"),
+ (".//a[@class='reference internal'][@href='#bar'][@id='id4']", r"\[bar\]"),
+ (".//a[@class='fn-backref'][@href='#id1']", r"\[1\]"),
+ (".//a[@class='fn-backref'][@href='#id2']", r"\[2\]"),
+ (".//a[@class='fn-backref'][@href='#id3']", r"\[3\]"),
+ (".//a[@class='fn-backref'][@href='#id4']", r"\[bar\]"),
+ ],
}
-if pygments:
- HTML_XPATH['includes.html'].extend([
- (".//pre/span[@class='s']", u'üöä'),
- (".//div[@class='inc-pyobj1 highlight-text']//pre",
- r'^class Foo:\n pass\n\s*$'),
- (".//div[@class='inc-pyobj2 highlight-text']//pre",
- r'^ def baz\(\):\n pass\n\s*$'),
- (".//div[@class='inc-lines highlight-text']//pre",
- r'^class Foo:\n pass\nclass Bar:\n$'),
- (".//div[@class='inc-startend highlight-text']//pre",
- ur'^foo = "Including Unicode characters: üöä"\n$'),
- (".//div[@class='inc-preappend highlight-text']//pre",
- r'(?m)^START CODE$'),
- (".//div[@class='inc-pyobj-dedent highlight-python']//span",
- r'def'),
- (".//div[@class='inc-tab3 highlight-text']//pre",
- r'-| |-'),
- (".//div[@class='inc-tab8 highlight-python']//pre/span",
- r'-| |-'),
- ])
- HTML_XPATH['subdir/includes.html'].extend([
- (".//pre/span", 'line 1'),
- (".//pre/span", 'line 2'),
- ])
class NslessParser(ET.XMLParser):
"""XMLParser that throws away namespaces in tag names."""
@@ -301,10 +307,15 @@ class NslessParser(ET.XMLParser):
return name
-def check_xpath(etree, fname, path, check):
+def check_xpath(etree, fname, path, check, be_found=True):
nodes = list(etree.findall(path))
- assert nodes != [], ('did not find any node matching xpath '
- '%r in file %s' % (path, fname))
+ if check is None:
+ assert nodes == [], ('found any nodes matching xpath '
+ '%r in file %s' % (path, fname))
+ return
+ else:
+ assert nodes != [], ('did not find any node matching xpath '
+ '%r in file %s' % (path, fname))
if hasattr(check, '__call__'):
check(nodes)
elif not check:
@@ -312,13 +323,17 @@ def check_xpath(etree, fname, path, check):
pass
else:
rex = re.compile(check)
- for node in nodes:
- if node.text and rex.search(node.text):
- break
+ if be_found:
+ if any(node.text and rex.search(node.text) for node in nodes):
+ return
else:
- assert False, ('%r not found in any node matching '
- 'path %s in %s: %r' % (check, path, fname,
- [node.text for node in nodes]))
+ if all(node.text and not rex.search(node.text) for node in nodes):
+ return
+
+ assert False, ('%r not found in any node matching '
+ 'path %s in %s: %r' % (check, path, fname,
+ [node.text for node in nodes]))
+
def check_static_entries(outdir):
staticdir = outdir / '_static'
@@ -333,25 +348,27 @@ def check_static_entries(outdir):
# a file from _static, but matches exclude_patterns
assert not (staticdir / 'excluded.css').exists()
+
def check_extra_entries(outdir):
assert (outdir / 'robots.txt').isfile()
-@gen_with_app(buildername='html', warning=html_warnfile, cleanenv=True,
+
+@gen_with_app(buildername='html',
confoverrides={'html_context.hckey_co': 'hcval_co'},
tags=['testtag'])
-def test_html(app):
+def test_html_output(app, status, warning):
app.builder.build_all()
- html_warnings = html_warnfile.getvalue().replace(os.sep, '/')
+ html_warnings = warning.getvalue().replace(os.sep, '/')
html_warnings_exp = HTML_WARNINGS % {
- 'root': re.escape(app.srcdir.replace(os.sep, '/'))}
+ 'root': re.escape(app.srcdir.replace(os.sep, '/'))}
assert re.match(html_warnings_exp + '$', html_warnings), \
- 'Warnings don\'t match:\n' + \
- '--- Expected (regex):\n' + html_warnings_exp + \
- '--- Got:\n' + html_warnings
+ 'Warnings don\'t match:\n' + \
+ '--- Expected (regex):\n' + html_warnings_exp + \
+ '--- Got:\n' + html_warnings
- for fname, paths in HTML_XPATH.iteritems():
+ for fname, paths in iteritems(HTML_XPATH):
parser = NslessParser()
- parser.entity.update(htmlentitydefs.entitydefs)
+ parser.entity.update(html_entities.entitydefs)
fp = open(os.path.join(app.outdir, fname), 'rb')
try:
etree = ET.parse(fp, parser)
@@ -363,16 +380,539 @@ def test_html(app):
check_static_entries(app.builder.outdir)
check_extra_entries(app.builder.outdir)
-@with_app(buildername='html', srcdir='(empty)',
- confoverrides={'html_sidebars': {'*': ['globaltoc.html']}},
- )
-def test_html_with_globaltoc_and_hidden_toctree(app):
- # issue #1157: combination of 'globaltoc.html' and hidden toctree cause
- # exception.
- (app.srcdir / 'contents.rst').write_text(
- '\n.. toctree::'
- '\n'
- '\n.. toctree::'
- '\n :hidden:'
- '\n')
+
+@gen_with_app(buildername='html', testroot='tocdepth')
+def test_tocdepth(app, status, warning):
+ # issue #1251
+ app.builder.build_all()
+
+ expects = {
+ 'index.html': [
+ (".//li[@class='toctree-l3']/a", '1.1.1. Foo A1', True),
+ (".//li[@class='toctree-l3']/a", '1.2.1. Foo B1', True),
+ (".//li[@class='toctree-l3']/a", '2.1.1. Bar A1', False),
+ (".//li[@class='toctree-l3']/a", '2.2.1. Bar B1', False),
+ ],
+ 'foo.html': [
+ (".//h1", '1. Foo', True),
+ (".//h2", '1.1. Foo A', True),
+ (".//h3", '1.1.1. Foo A1', True),
+ (".//h2", '1.2. Foo B', True),
+ (".//h3", '1.2.1. Foo B1', True),
+ (".//div[@class='sphinxsidebarwrapper']//li/a", '1.1. Foo A', True),
+ (".//div[@class='sphinxsidebarwrapper']//li/a", '1.1.1. Foo A1', True),
+ (".//div[@class='sphinxsidebarwrapper']//li/a", '1.2. Foo B', True),
+ (".//div[@class='sphinxsidebarwrapper']//li/a", '1.2.1. Foo B1', True),
+ ],
+ 'bar.html': [
+ (".//h1", '2. Bar', True),
+ (".//h2", '2.1. Bar A', True),
+ (".//h2", '2.2. Bar B', True),
+ (".//h3", '2.2.1. Bar B1', True),
+ (".//div[@class='sphinxsidebarwrapper']//li/a", '2. Bar', True),
+ (".//div[@class='sphinxsidebarwrapper']//li/a", '2.1. Bar A', True),
+ (".//div[@class='sphinxsidebarwrapper']//li/a", '2.2. Bar B', True),
+ (".//div[@class='sphinxsidebarwrapper']//li/a", '2.2.1. Bar B1', False),
+ ],
+ 'baz.html': [
+ (".//h1", '2.1.1. Baz A', True),
+ ],
+ }
+
+ for fname, paths in iteritems(expects):
+ parser = NslessParser()
+ parser.entity.update(html_entities.entitydefs)
+ fp = open(os.path.join(app.outdir, fname), 'rb')
+ try:
+ etree = ET.parse(fp, parser)
+ finally:
+ fp.close()
+
+ for xpath, check, be_found in paths:
+ yield check_xpath, etree, fname, xpath, check, be_found
+
+
+@gen_with_app(buildername='singlehtml', testroot='tocdepth')
+def test_tocdepth_singlehtml(app, status, warning):
+ app.builder.build_all()
+
+ expects = {
+ 'index.html': [
+ (".//li[@class='toctree-l3']/a", '1.1.1. Foo A1', True),
+ (".//li[@class='toctree-l3']/a", '1.2.1. Foo B1', True),
+ (".//li[@class='toctree-l3']/a", '2.1.1. Bar A1', False),
+ (".//li[@class='toctree-l3']/a", '2.2.1. Bar B1', False),
+
+ # index.rst
+ (".//h1", 'test-tocdepth', True),
+
+ # foo.rst
+ (".//h2", '1. Foo', True),
+ (".//h3", '1.1. Foo A', True),
+ (".//h4", '1.1.1. Foo A1', True),
+ (".//h3", '1.2. Foo B', True),
+ (".//h4", '1.2.1. Foo B1', True),
+
+ # bar.rst
+ (".//h2", '2. Bar', True),
+ (".//h3", '2.1. Bar A', True),
+ (".//h3", '2.2. Bar B', True),
+ (".//h4", '2.2.1. Bar B1', True),
+
+ # baz.rst
+ (".//h4", '2.1.1. Baz A', True),
+ ],
+ }
+
+ for fname, paths in iteritems(expects):
+ parser = NslessParser()
+ parser.entity.update(html_entities.entitydefs)
+ fp = open(os.path.join(app.outdir, fname), 'rb')
+ try:
+ etree = ET.parse(fp, parser)
+ finally:
+ fp.close()
+
+ for xpath, check, be_found in paths:
+ yield check_xpath, etree, fname, xpath, check, be_found
+
+
+@gen_with_app(buildername='html', testroot='numfig')
+def test_numfig_disabled(app, status, warning):
app.builder.build_all()
+
+ expects = {
+ 'index.html': [
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", None, True),
+ (".//table/caption/span[@class='caption-number']", None, True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", None, True),
+ (".//li/code/span", '^fig1$', True),
+ (".//li/code/span", '^Figure#$', True),
+ (".//li/code/span", '^table-1$', True),
+ (".//li/code/span", '^Table:#$', True),
+ (".//li/code/span", '^code_1$', True),
+ (".//li/code/span", '^Code-#$', True),
+ ],
+ 'foo.html': [
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", None, True),
+ (".//table/caption/span[@class='caption-number']", None, True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", None, True),
+ ],
+ 'bar.html': [
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", None, True),
+ (".//table/caption/span[@class='caption-number']", None, True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", None, True),
+ ],
+ 'baz.html': [
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", None, True),
+ (".//table/caption/span[@class='caption-number']", None, True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", None, True),
+ ],
+ }
+
+ for fname, paths in iteritems(expects):
+ parser = NslessParser()
+ parser.entity.update(html_entities.entitydefs)
+ fp = open(os.path.join(app.outdir, fname), 'rb')
+ try:
+ etree = ET.parse(fp, parser)
+ finally:
+ fp.close()
+
+ for xpath, check, be_found in paths:
+ yield check_xpath, etree, fname, xpath, check, be_found
+
+
+@gen_with_app(buildername='html', testroot='numfig',
+ confoverrides={'numfig': True})
+def test_numfig_without_numbered_toctree(app, status, warning):
+ # remove :numbered: option
+ index = (app.srcdir / 'index.rst').text()
+ index = re.sub(':numbered:.*', '', index, re.MULTILINE)
+ (app.srcdir / 'index.rst').write_text(index, encoding='utf-8')
+ app.builder.build_all()
+
+ expects = {
+ 'index.html': [
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 9 $', True),
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 10 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 9 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 10 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 9 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 10 $', True),
+ (".//li/a/em", '^Fig. 9$', True),
+ (".//li/a/em", '^Figure6$', True),
+ (".//li/a/em", '^Table 9$', True),
+ (".//li/a/em", '^Table:6$', True),
+ (".//li/a/em", '^Listing 9$', True),
+ (".//li/a/em", '^Code-6$', True),
+ ],
+ 'foo.html': [
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 1 $', True),
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 2 $', True),
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 3 $', True),
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 4 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 1 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 2 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 3 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 4 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 1 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 2 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 3 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 4 $', True),
+ ],
+ 'bar.html': [
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 5 $', True),
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 7 $', True),
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 8 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 5 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 7 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 8 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 5 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 7 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 8 $', True),
+ ],
+ 'baz.html': [
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 6 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 6 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 6 $', True),
+ ],
+ }
+
+ for fname, paths in iteritems(expects):
+ parser = NslessParser()
+ parser.entity.update(html_entities.entitydefs)
+ fp = open(os.path.join(app.outdir, fname), 'rb')
+ try:
+ etree = ET.parse(fp, parser)
+ finally:
+ fp.close()
+
+ for xpath, check, be_found in paths:
+ yield check_xpath, etree, fname, xpath, check, be_found
+
+
+@gen_with_app(buildername='html', testroot='numfig',
+ confoverrides={'numfig': True})
+def test_numfig_with_numbered_toctree(app, status, warning):
+ app.builder.build_all()
+
+ expects = {
+ 'index.html': [
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 1 $', True),
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 2 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 1 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 2 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 1 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 2 $', True),
+ (".//li/a/em", '^Fig. 1$', True),
+ (".//li/a/em", '^Figure2.2$', True),
+ (".//li/a/em", '^Table 1$', True),
+ (".//li/a/em", '^Table:2.2$', True),
+ (".//li/a/em", '^Listing 1$', True),
+ (".//li/a/em", '^Code-2.2$', True),
+ ],
+ 'foo.html': [
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 1.1 $', True),
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 1.2 $', True),
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 1.3 $', True),
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 1.4 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 1.1 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 1.2 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 1.3 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 1.4 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 1.1 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 1.2 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 1.3 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 1.4 $', True),
+ ],
+ 'bar.html': [
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 2.1 $', True),
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 2.3 $', True),
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 2.4 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 2.1 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 2.3 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 2.4 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 2.1 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 2.3 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 2.4 $', True),
+ ],
+ 'baz.html': [
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 2.2 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 2.2 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 2.2 $', True),
+ ],
+ }
+
+ for fname, paths in iteritems(expects):
+ parser = NslessParser()
+ parser.entity.update(html_entities.entitydefs)
+ fp = open(os.path.join(app.outdir, fname), 'rb')
+ try:
+ etree = ET.parse(fp, parser)
+ finally:
+ fp.close()
+
+ for xpath, check, be_found in paths:
+ yield check_xpath, etree, fname, xpath, check, be_found
+
+
+@gen_with_app(buildername='html', testroot='numfig',
+ confoverrides={'numfig': True,
+ 'numfig_format': {'figure': 'Figure:%s',
+ 'table': 'Tab_%s',
+ 'code-block': 'Code-%s'}})
+def test_numfig_with_prefix(app, status, warning):
+ app.builder.build_all()
+
+ expects = {
+ 'index.html': [
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Figure:1 $', True),
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Figure:2 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Tab_1 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Tab_2 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Code-1 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Code-2 $', True),
+ (".//li/a/em", '^Figure:1$', True),
+ (".//li/a/em", '^Figure2.2$', True),
+ (".//li/a/em", '^Tab_1$', True),
+ (".//li/a/em", '^Table:2.2$', True),
+ (".//li/a/em", '^Code-1$', True),
+ (".//li/a/em", '^Code-2.2$', True),
+ ],
+ 'foo.html': [
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Figure:1.1 $', True),
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Figure:1.2 $', True),
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Figure:1.3 $', True),
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Figure:1.4 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Tab_1.1 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Tab_1.2 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Tab_1.3 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Tab_1.4 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Code-1.1 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Code-1.2 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Code-1.3 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Code-1.4 $', True),
+ ],
+ 'bar.html': [
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Figure:2.1 $', True),
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Figure:2.3 $', True),
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Figure:2.4 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Tab_2.1 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Tab_2.3 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Tab_2.4 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Code-2.1 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Code-2.3 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Code-2.4 $', True),
+ ],
+ 'baz.html': [
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Figure:2.2 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Tab_2.2 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Code-2.2 $', True),
+ ],
+ }
+
+ for fname, paths in iteritems(expects):
+ parser = NslessParser()
+ parser.entity.update(html_entities.entitydefs)
+ fp = open(os.path.join(app.outdir, fname), 'rb')
+ try:
+ etree = ET.parse(fp, parser)
+ finally:
+ fp.close()
+
+ for xpath, check, be_found in paths:
+ yield check_xpath, etree, fname, xpath, check, be_found
+
+
+@gen_with_app(buildername='html', testroot='numfig',
+ confoverrides={'numfig': True, 'numfig_secnum_depth': 2})
+def test_numfig_with_secnum_depth(app, status, warning):
+ app.builder.build_all()
+
+ expects = {
+ 'index.html': [
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 1 $', True),
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 2 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 1 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 2 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 1 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 2 $', True),
+ (".//li/a/em", '^Fig. 1$', True),
+ (".//li/a/em", '^Figure2.1.2$', True),
+ (".//li/a/em", '^Table 1$', True),
+ (".//li/a/em", '^Table:2.1.2$', True),
+ (".//li/a/em", '^Listing 1$', True),
+ (".//li/a/em", '^Code-2.1.2$', True),
+ ],
+ 'foo.html': [
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 1.1 $', True),
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 1.1.1 $', True),
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 1.1.2 $', True),
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 1.2.1 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 1.1 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 1.1.1 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 1.1.2 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 1.2.1 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 1.1 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 1.1.1 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 1.1.2 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 1.2.1 $', True),
+ ],
+ 'bar.html': [
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 2.1.1 $', True),
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 2.1.3 $', True),
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 2.2.1 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 2.1.1 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 2.1.3 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 2.2.1 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 2.1.1 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 2.1.3 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 2.2.1 $', True),
+ ],
+ 'baz.html': [
+ (".//div[@class='figure']/p[@class='caption']/"
+ "span[@class='caption-number']", '^Fig. 2.1.2 $', True),
+ (".//table/caption/span[@class='caption-number']",
+ '^Table 2.1.2 $', True),
+ (".//div[@class='code-block-caption']/"
+ "span[@class='caption-number']", '^Listing 2.1.2 $', True),
+ ],
+ }
+
+ for fname, paths in iteritems(expects):
+ parser = NslessParser()
+ parser.entity.update(html_entities.entitydefs)
+ fp = open(os.path.join(app.outdir, fname), 'rb')
+ try:
+ etree = ET.parse(fp, parser)
+ finally:
+ fp.close()
+
+ for xpath, check, be_found in paths:
+ yield check_xpath, etree, fname, xpath, check, be_found
diff --git a/tests/test_build_latex.py b/tests/test_build_latex.py
index 78aa71cc..005f99af 100644
--- a/tests/test_build_latex.py
+++ b/tests/test_build_latex.py
@@ -8,25 +8,20 @@
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+from __future__ import print_function
import os
import re
-import sys
-from StringIO import StringIO
from subprocess import Popen, PIPE
+from six import PY3
+
from sphinx.writers.latex import LaTeXTranslator
-from util import test_root, SkipTest, remove_unicode_literals, with_app
+from util import SkipTest, remove_unicode_literals, with_app
from test_build_html import ENV_WARNINGS
-def teardown_module():
- (test_root / '_build').rmtree(True)
-
-
-latex_warnfile = StringIO()
-
LATEX_WARNINGS = ENV_WARNINGS + """\
None:None: WARNING: citation not found: missing
None:None: WARNING: no matching candidate for image URI u'foo.\\*'
@@ -34,21 +29,21 @@ WARNING: invalid pair index entry u''
WARNING: invalid pair index entry u'keyword; '
"""
-if sys.version_info >= (3, 0):
+if PY3:
LATEX_WARNINGS = remove_unicode_literals(LATEX_WARNINGS)
-@with_app(buildername='latex', warning=latex_warnfile, cleanenv=True)
-def test_latex(app):
+@with_app(buildername='latex')
+def test_latex(app, status, warning):
LaTeXTranslator.ignore_missing_images = True
app.builder.build_all()
- latex_warnings = latex_warnfile.getvalue().replace(os.sep, '/')
+ latex_warnings = warning.getvalue().replace(os.sep, '/')
latex_warnings_exp = LATEX_WARNINGS % {
- 'root': re.escape(app.srcdir.replace(os.sep, '/'))}
+ 'root': re.escape(app.srcdir.replace(os.sep, '/'))}
assert re.match(latex_warnings_exp + '$', latex_warnings), \
- 'Warnings don\'t match:\n' + \
- '--- Expected (regex):\n' + latex_warnings_exp + \
- '--- Got:\n' + latex_warnings
+ 'Warnings don\'t match:\n' + \
+ '--- Expected (regex):\n' + latex_warnings_exp + \
+ '--- Got:\n' + latex_warnings
# file from latex_additional_files
assert (app.outdir / 'svgimg.svg').isfile()
@@ -90,9 +85,100 @@ def test_latex(app):
else:
stdout, stderr = p.communicate()
if p.returncode != 0:
- print stdout
- print stderr
+ print(stdout)
+ print(stderr)
del app.cleanup_trees[:]
assert False, 'latex exited with return code %s' % p.returncode
finally:
os.chdir(cwd)
+
+
+@with_app(buildername='latex',
+ confoverrides={'latex_documents': [
+ ('contents', 'SphinxTests.tex', 'Sphinx Tests Documentation',
+ 'Georg Brandl \\and someone else', 'howto'),
+ ]},
+ srcdir='latex_howto')
+def test_latex_howto(app, status, warning):
+ LaTeXTranslator.ignore_missing_images = True
+ app.builder.build_all()
+ latex_warnings = warning.getvalue().replace(os.sep, '/')
+ latex_warnings_exp = LATEX_WARNINGS % {
+ 'root': re.escape(app.srcdir.replace(os.sep, '/'))}
+ assert re.match(latex_warnings_exp + '$', latex_warnings), \
+ 'Warnings don\'t match:\n' + \
+ '--- Expected (regex):\n' + latex_warnings_exp + \
+ '--- Got:\n' + latex_warnings
+
+ # file from latex_additional_files
+ assert (app.outdir / 'svgimg.svg').isfile()
+
+ # only run latex if all needed packages are there
+ def kpsetest(filename):
+ try:
+ p = Popen(['kpsewhich', filename], stdout=PIPE)
+ except OSError:
+ # no kpsewhich... either no tex distribution is installed or it is
+ # a "strange" one -- don't bother running latex
+ return None
+ else:
+ p.communicate()
+ if p.returncode != 0:
+ # not found
+ return False
+ # found
+ return True
+
+ if kpsetest('article.sty') is None:
+ raise SkipTest('not running latex, it doesn\'t seem to be installed')
+ for filename in ['fancyhdr.sty', 'fancybox.sty', 'titlesec.sty',
+ 'amsmath.sty', 'framed.sty', 'color.sty', 'fancyvrb.sty',
+ 'threeparttable.sty']:
+ if not kpsetest(filename):
+ raise SkipTest('not running latex, the %s package doesn\'t '
+ 'seem to be installed' % filename)
+
+ # now, try to run latex over it
+ cwd = os.getcwd()
+ os.chdir(app.outdir)
+ try:
+ try:
+ p = Popen(['pdflatex', '--interaction=nonstopmode',
+ 'SphinxTests.tex'], stdout=PIPE, stderr=PIPE)
+ except OSError:
+ raise SkipTest # most likely pdflatex was not found
+ else:
+ stdout, stderr = p.communicate()
+ if p.returncode != 0:
+ print(stdout)
+ print(stderr)
+ app.cleanup()
+ assert False, 'latex exited with return code %s' % p.returncode
+ finally:
+ os.chdir(cwd)
+
+
+@with_app(buildername='latex', testroot='numfig',
+ confoverrides={'numfig': True})
+def test_numref(app, status, warning):
+ app.builder.build_all()
+ result = (app.outdir / 'Python.tex').text(encoding='utf8')
+ print(result)
+ print(status.getvalue())
+ print(warning.getvalue())
+ assert '\\ref{index:fig1}' in result
+ assert '\\ref{baz:fig22}' in result
+ assert '\\ref{index:table-1}' in result
+ assert '\\ref{baz:table22}' in result
+ assert '\\ref{index:code-1}' in result
+ assert '\\ref{baz:code22}' in result
+
+
+@with_app(buildername='latex')
+def test_latex_add_latex_package(app, status, warning):
+ app.add_latex_package('foo')
+ app.add_latex_package('bar', 'baz')
+ app.builder.build_all()
+ result = (app.outdir / 'SphinxTests.tex').text(encoding='utf8')
+ assert '\\usepackage{foo}' in result
+ assert '\\usepackage[baz]{bar}' in result
diff --git a/tests/test_build_texinfo.py b/tests/test_build_texinfo.py
index 2f519435..bb10f8fa 100644
--- a/tests/test_build_texinfo.py
+++ b/tests/test_build_texinfo.py
@@ -8,46 +8,41 @@
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+from __future__ import print_function
import os
import re
-import sys
-from StringIO import StringIO
from subprocess import Popen, PIPE
+from six import PY3
+
from sphinx.writers.texinfo import TexinfoTranslator
-from util import test_root, SkipTest, remove_unicode_literals, with_app
+from util import SkipTest, remove_unicode_literals, with_app
from test_build_html import ENV_WARNINGS
-def teardown_module():
- (test_root / '_build').rmtree(True)
-
-
-texinfo_warnfile = StringIO()
-
TEXINFO_WARNINGS = ENV_WARNINGS + """\
None:None: WARNING: citation not found: missing
None:None: WARNING: no matching candidate for image URI u'foo.\\*'
None:None: WARNING: no matching candidate for image URI u'svgimg.\\*'
"""
-if sys.version_info >= (3, 0):
+if PY3:
TEXINFO_WARNINGS = remove_unicode_literals(TEXINFO_WARNINGS)
-@with_app(buildername='texinfo', warning=texinfo_warnfile, cleanenv=True)
-def test_texinfo(app):
+@with_app('texinfo')
+def test_texinfo(app, status, warning):
TexinfoTranslator.ignore_missing_images = True
app.builder.build_all()
- texinfo_warnings = texinfo_warnfile.getvalue().replace(os.sep, '/')
+ texinfo_warnings = warning.getvalue().replace(os.sep, '/')
texinfo_warnings_exp = TEXINFO_WARNINGS % {
- 'root': re.escape(app.srcdir.replace(os.sep, '/'))}
+ 'root': re.escape(app.srcdir.replace(os.sep, '/'))}
assert re.match(texinfo_warnings_exp + '$', texinfo_warnings), \
- 'Warnings don\'t match:\n' + \
- '--- Expected (regex):\n' + texinfo_warnings_exp + \
- '--- Got:\n' + texinfo_warnings
+ 'Warnings don\'t match:\n' + \
+ '--- Expected (regex):\n' + texinfo_warnings_exp + \
+ '--- Got:\n' + texinfo_warnings
# now, try to run makeinfo over it
cwd = os.getcwd()
os.chdir(app.outdir)
@@ -61,8 +56,8 @@ def test_texinfo(app):
stdout, stderr = p.communicate()
retcode = p.returncode
if retcode != 0:
- print stdout
- print stderr
+ print(stdout)
+ print(stderr)
del app.cleanup_trees[:]
assert False, 'makeinfo exited with return code %s' % retcode
finally:
diff --git a/tests/test_build_text.py b/tests/test_build_text.py
index e6e4d5be..d486bed2 100644
--- a/tests/test_build_text.py
+++ b/tests/test_build_text.py
@@ -18,29 +18,16 @@ from util import with_app
def with_text_app(*args, **kw):
default_kw = {
'buildername': 'text',
- 'srcdir': '(empty)',
- 'confoverrides': {
- 'project': 'text',
- 'master_doc': 'contents',
- },
+ 'testroot': 'build-text',
}
default_kw.update(kw)
return with_app(*args, **default_kw)
@with_text_app()
-def test_maxwitdh_with_prefix(app):
- long_string = u' '.join([u"ham"] * 30)
- contents = (
- u".. seealso:: %(long_string)s\n\n"
- u"* %(long_string)s\n"
- u"* %(long_string)s\n"
- u"\nspam egg\n"
- ) % locals()
-
- (app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8')
- app.builder.build_all()
- result = (app.outdir / 'contents.txt').text(encoding='utf-8')
+def test_maxwitdh_with_prefix(app, status, warning):
+ app.builder.build_update()
+ result = (app.outdir / 'maxwidth.txt').text(encoding='utf-8')
lines = result.splitlines()
line_widths = [column_width(line) for line in lines]
@@ -58,105 +45,52 @@ def test_maxwitdh_with_prefix(app):
@with_text_app()
-def test_lineblock(app):
+def test_lineblock(app, status, warning):
# regression test for #1109: need empty line after line block
- contents = (
- u"* one\n"
- u"\n"
- u" | line-block 1\n"
- u" | line-block 2\n"
- u"\n"
- u"followed paragraph.\n"
- )
-
- (app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8')
- app.builder.build_all()
- result = (app.outdir / 'contents.txt').text(encoding='utf-8')
-
+ app.builder.build_update()
+ result = (app.outdir / 'lineblock.txt').text(encoding='utf-8')
expect = (
- u"* one\n"
- u"\n"
- u" line-block 1\n"
- u" line-block 2\n"
- u"\n"
- u"followed paragraph.\n"
- )
-
+ u"* one\n"
+ u"\n"
+ u" line-block 1\n"
+ u" line-block 2\n"
+ u"\n"
+ u"followed paragraph.\n"
+ )
assert result == expect
@with_text_app()
-def test_nonascii_title_line(app):
- title = u'\u65e5\u672c\u8a9e'
- underline = u'=' * column_width(title)
- content = u'\n'.join((title, underline, u''))
-
- (app.srcdir / 'contents.rst').write_text(content, encoding='utf-8')
- app.builder.build_all()
- result = (app.outdir / 'contents.txt').text(encoding='utf-8')
-
- expect_underline = underline.replace('=', '*')
+def test_nonascii_title_line(app, status, warning):
+ app.builder.build_update()
+ result = (app.outdir / 'nonascii_title.txt').text(encoding='utf-8')
+ expect_underline = '******'
result_underline = result.splitlines()[2].strip()
assert expect_underline == result_underline
@with_text_app()
-def test_nonascii_table(app):
- text = u'\u65e5\u672c\u8a9e'
- contents = (u"\n.. list-table::"
- "\n"
- "\n - - spam"
- "\n - egg"
- "\n"
- "\n - - %(text)s"
- "\n - %(text)s"
- "\n" % locals())
-
- (app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8')
- app.builder.build_all()
- result = (app.outdir / 'contents.txt').text(encoding='utf-8')
-
+def test_nonascii_table(app, status, warning):
+ app.builder.build_update()
+ result = (app.outdir / 'nonascii_table.txt').text(encoding='utf-8')
lines = [line.strip() for line in result.splitlines() if line.strip()]
line_widths = [column_width(line) for line in lines]
assert len(set(line_widths)) == 1 # same widths
@with_text_app()
-def test_nonascii_maxwidth(app):
- sb_text = u'abc' #length=3
- mb_text = u'\u65e5\u672c\u8a9e' #length=3
-
- sb_line = ' '.join([sb_text] * int(MAXWIDTH / 3))
- mb_line = ' '.join([mb_text] * int(MAXWIDTH / 3))
- mix_line = ' '.join([sb_text, mb_text] * int(MAXWIDTH / 6))
-
- contents = u'\n\n'.join((sb_line, mb_line, mix_line))
-
- (app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8')
- app.builder.build_all()
- result = (app.outdir / 'contents.txt').text(encoding='utf-8')
-
+def test_nonascii_maxwidth(app, status, warning):
+ app.builder.build_update()
+ result = (app.outdir / 'nonascii_maxwidth.txt').text(encoding='utf-8')
lines = [line.strip() for line in result.splitlines() if line.strip()]
line_widths = [column_width(line) for line in lines]
assert max(line_widths) < MAXWIDTH
@with_text_app()
-def test_table_with_empty_cell(app):
- contents = (u"""
- +-----+-----+
- | XXX | XXX |
- +-----+-----+
- | | XXX |
- +-----+-----+
- | XXX | |
- +-----+-----+
- """)
-
- (app.srcdir / 'contents.rst').write_text(contents, encoding='utf-8')
- app.builder.build_all()
- result = (app.outdir / 'contents.txt').text(encoding='utf-8')
-
+def test_table_with_empty_cell(app, status, warning):
+ app.builder.build_update()
+ result = (app.outdir / 'table.txt').text(encoding='utf-8')
lines = [line.strip() for line in result.splitlines() if line.strip()]
assert lines[0] == "+-------+-------+"
assert lines[1] == "| XXX | XXX |"
diff --git a/tests/test_catalogs.py b/tests/test_catalogs.py
new file mode 100644
index 00000000..c4f5c08f
--- /dev/null
+++ b/tests/test_catalogs.py
@@ -0,0 +1,78 @@
+# -*- coding: utf-8 -*-
+"""
+ test_build_base
+ ~~~~~~~~~~~~~~~
+
+ Test the base build process.
+
+ :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+import shutil
+
+from nose.tools import with_setup
+
+from util import with_app, find_files, rootdir, tempdir
+
+root = tempdir / 'test-intl'
+build_dir = root / '_build'
+locale_dir = build_dir / 'locale'
+
+
+def setup_test():
+ # delete remnants left over after failed build
+ root.rmtree(True)
+ (rootdir / 'roots' / 'test-intl').copytree(root)
+ # copy all catalogs into locale layout directory
+ for po in find_files(root, '.po'):
+ copy_po = (locale_dir / 'en' / 'LC_MESSAGES' / po)
+ if not copy_po.parent.exists():
+ copy_po.parent.makedirs()
+ shutil.copy(root / po, copy_po)
+
+
+def teardown_test():
+ build_dir.rmtree(True)
+
+
+@with_setup(setup_test, teardown_test)
+@with_app(buildername='html', testroot='intl',
+ confoverrides={'language': 'en', 'locale_dirs': [locale_dir]})
+def test_compile_all_catalogs(app, status, warning):
+ app.builder.compile_all_catalogs()
+
+ catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES'
+ expect = set([
+ x.replace('.po', '.mo')
+ for x in find_files(catalog_dir, '.po')
+ ])
+ actual = set(find_files(catalog_dir, '.mo'))
+ assert actual # not empty
+ assert actual == expect
+
+
+@with_setup(setup_test, teardown_test)
+@with_app(buildername='html', testroot='intl',
+ confoverrides={'language': 'en', 'locale_dirs': [locale_dir]})
+def test_compile_specific_catalogs(app, status, warning):
+ app.builder.compile_specific_catalogs(['admonitions'])
+
+ catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES'
+ actual = set(find_files(catalog_dir, '.mo'))
+ assert actual == set(['admonitions.mo'])
+
+
+@with_setup(setup_test, teardown_test)
+@with_app(buildername='html', testroot='intl',
+ confoverrides={'language': 'en', 'locale_dirs': [locale_dir]})
+def test_compile_update_catalogs(app, status, warning):
+ app.builder.compile_update_catalogs()
+
+ catalog_dir = locale_dir / app.config.language / 'LC_MESSAGES'
+ expect = set([
+ x.replace('.po', '.mo')
+ for x in find_files(catalog_dir, '.po')
+ ])
+ actual = set(find_files(catalog_dir, '.mo'))
+ assert actual # not empty
+ assert actual == expect
diff --git a/tests/test_config.py b/tests/test_config.py
index 1e00091d..db5a9560 100644
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -9,18 +9,18 @@
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import sys
+from six import PY2, PY3, StringIO
-from util import TestApp, with_app, with_tempdir, raises, raises_msg, write_file
+from util import TestApp, with_app, with_tempdir, raises, raises_msg
from sphinx.config import Config
from sphinx.errors import ExtensionError, ConfigError, VersionRequirementError
-from sphinx.util.pycompat import b
@with_app(confoverrides={'master_doc': 'master', 'nonexisting_value': 'True',
- 'latex_elements.docclass': 'scrartcl'})
-def test_core_config(app):
+ 'latex_elements.docclass': 'scrartcl',
+ 'modindex_common_prefix': 'path1,path2'})
+def test_core_config(app, status, warning):
cfg = app.config
# simple values
@@ -31,11 +31,12 @@ def test_core_config(app):
# overrides
assert cfg.master_doc == 'master'
assert cfg.latex_elements['docclass'] == 'scrartcl'
+ assert cfg.modindex_common_prefix == ['path1', 'path2']
# simple default values
assert 'locale_dirs' not in cfg.__dict__
assert cfg.locale_dirs == []
- assert cfg.trim_footnote_reference_space == False
+ assert cfg.trim_footnote_reference_space is False
# complex default values
assert 'html_title' not in cfg.__dict__
@@ -67,7 +68,7 @@ def test_core_config(app):
@with_app()
-def test_extension_values(app):
+def test_extension_values(app, status, warning):
cfg = app.config
# default value
@@ -85,23 +86,24 @@ def test_extension_values(app):
@with_tempdir
def test_errors_warnings(dir):
# test the error for syntax errors in the config file
- write_file(dir / 'conf.py', u'project = \n', 'ascii')
+ (dir / 'conf.py').write_text(u'project = \n', encoding='ascii')
raises_msg(ConfigError, 'conf.py', Config, dir, 'conf.py', {}, None)
# test the automatic conversion of 2.x only code in configs
- write_file(dir / 'conf.py', u'# -*- coding: utf-8\n\n'
- u'project = u"Jägermeister"\n', 'utf-8')
+ (dir / 'conf.py').write_text(
+ u'# -*- coding: utf-8\n\nproject = u"Jägermeister"\n',
+ encoding='utf-8')
cfg = Config(dir, 'conf.py', {}, None)
- cfg.init_values()
+ cfg.init_values(lambda warning: 1/0)
assert cfg.project == u'Jägermeister'
# test the warning for bytestrings with non-ascii content
# bytestrings with non-ascii content are a syntax error in python3 so we
# skip the test there
- if sys.version_info >= (3, 0):
+ if PY3:
return
- write_file(dir / 'conf.py', u'# -*- coding: latin-1\nproject = "fooä"\n',
- 'latin-1')
+ (dir / 'conf.py').write_text(
+ u'# -*- coding: latin-1\nproject = "fooä"\n', encoding='latin-1')
cfg = Config(dir, 'conf.py', {}, None)
warned = [False]
def warn(msg):
@@ -126,8 +128,47 @@ def test_needs_sphinx():
def test_config_eol(tmpdir):
# test config file's eol patterns: LF, CRLF
configfile = tmpdir / 'conf.py'
- for eol in ('\n', '\r\n'):
- configfile.write_bytes(b('project = "spam"' + eol))
+ for eol in (b'\n', b'\r\n'):
+ configfile.write_bytes(b'project = "spam"' + eol)
cfg = Config(tmpdir, 'conf.py', {}, None)
- cfg.init_values()
+ cfg.init_values(lambda warning: 1/0)
assert cfg.project == u'spam'
+
+
+TYPECHECK_OVERRIDES = [
+ # configuration key, override value, should warn, default type
+ ('master_doc', 123, True, str),
+ ('man_pages', 123, True, list), # lambda
+ ('man_pages', [], False, list),
+ ('epub_tocdepth', True, True, int), # child type
+ ('nitpicky', 3, False, bool), # parent type
+ ('templates_path', (), True, list), # other sequence, also raises
+]
+if PY2:
+ # Run a check for proper sibling detection in Python 2. Under py3k, the
+ # default types do not have any siblings.
+ TYPECHECK_OVERRIDES.append(
+ ('html_add_permalinks', 'bar', False, unicode))
+
+def test_gen_check_types():
+ for key, value, should, deftype in TYPECHECK_OVERRIDES:
+ warning = StringIO()
+ try:
+ app = TestApp(confoverrides={key: value}, warning=warning)
+ except:
+ pass
+ else:
+ app.cleanup()
+
+ real = type(value).__name__
+ msg = ("WARNING: the config value %r has type `%s',"
+ " defaults to `%s.'\n" % (key, real, deftype.__name__))
+ def test():
+ warning_list = warning.getvalue()
+ assert (msg in warning_list) == should, \
+ "Setting %s to %r should%s raise: %s" % \
+ (key, value, " not" if should else "", msg)
+ test.description = "test_check_type_%s_on_%s" % \
+ (real, type(Config.config_values[key][0]).__name__)
+
+ yield test
diff --git a/tests/test_cpp_domain.py b/tests/test_cpp_domain.py
deleted file mode 100644
index 8e1cb22b..00000000
--- a/tests/test_cpp_domain.py
+++ /dev/null
@@ -1,157 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- test_cpp_domain
- ~~~~~~~~~~~~~~~
-
- Tests the C++ Domain
-
- :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from util import raises
-
-from sphinx.domains.cpp import DefinitionParser, DefinitionError
-
-
-def parse(name, string):
- return getattr(DefinitionParser(string), 'parse_' + name)()
-
-
-def test_type_definitions():
- rv = parse('member_object', ' const std::string & name = 42')
- assert unicode(rv) == 'const std::string& name = 42'
-
- rv = parse('member_object', ' const std::string & name leftover')
- assert unicode(rv) == 'const std::string& name'
-
- rv = parse('member_object', ' const std::string & name [n] leftover')
- assert unicode(rv) == 'const std::string& name[n]'
-
- rv = parse('member_object', 'const std::vector< unsigned int, long> &name')
- assert unicode(rv) == 'const std::vector<unsigned int, long>& name'
-
- x = 'std::vector<std::pair<std::string, int>>& module::test(register ' \
- 'foo, bar, std::string baz="foobar, blah, bleh") const = 0'
- assert unicode(parse('function', x)) == x
-
- x = 'module::myclass::operator std::vector<std::string>()'
- assert unicode(parse('function', x)) == x
- x = 'explicit module::myclass::foo::foo()'
- assert unicode(parse('function', x)) == x
-
- x = 'int printf(const char* fmt, ...)'
- assert unicode(parse('function', x)) == x
-
- x = 'int foo(const unsigned int j)'
- assert unicode(parse('function', x)) == x
-
- x = 'int foo(const unsigned int const j)'
- assert unicode(parse('function', x)) == x
-
- x = 'int foo(const int* const ptr)'
- assert unicode(parse('function', x)) == x
-
- x = 'std::vector<std::pair<std::string, long long>> module::blah'
- assert unicode(parse('type_object', x)) == x
-
- assert unicode(parse('type_object', 'long long int foo')) == 'long long foo'
-
- x = 'void operator()(const boost::array<VertexID, 2>& v) const'
- assert unicode(parse('function', x)) == x
-
- x = 'void operator()(const boost::array<VertexID, 2, "foo, bar">& v) const'
- assert unicode(parse('function', x)) == x
-
- x = 'MyClass::MyClass(MyClass::MyClass&&)'
- assert unicode(parse('function', x)) == x
-
- x = 'constexpr int get_value()'
- assert unicode(parse('function', x)) == x
-
- x = 'static constexpr int get_value()'
- assert unicode(parse('function', x)) == x
-
- x = 'int get_value() const noexcept'
- assert unicode(parse('function', x)) == x
-
- x = 'int get_value() const noexcept = delete'
- assert unicode(parse('function', x)) == x
-
- x = 'MyClass::MyClass(MyClass::MyClass&&) = default'
- assert unicode(parse('function', x)) == x
-
- x = 'MyClass::a_virtual_function() const override'
- assert unicode(parse('function', x)) == x
-
- x = 'MyClass::a_member_function() volatile'
- assert unicode(parse('function', x)) == x
-
- x = 'MyClass::a_member_function() const volatile'
- assert unicode(parse('function', x)) == x
-
- x = 'MyClass::a_member_function() &&'
- assert unicode(parse('function', x)) == x
-
- x = 'MyClass::a_member_function() &'
- assert unicode(parse('function', x)) == x
-
- x = 'MyClass::a_member_function() const &'
- assert unicode(parse('function', x)) == x
-
- x = 'int main(int argc, char* argv[][])'
- assert unicode(parse('function', x)) == x
-
- x = 'std::vector<std::pair<std::string, int>>& module::test(register ' \
- 'foo, bar[n], std::string baz="foobar, blah, bleh") const = 0'
- assert unicode(parse('function', x)) == x
-
- x = 'module::myclass foo[n]'
- assert unicode(parse('member_object', x)) == x
-
- x = 'int foo(Foo f=Foo(double(), std::make_pair(int(2), double(3.4))))'
- assert unicode(parse('function', x)) == x
-
- x = 'int foo(A a=x(a))'
- assert unicode(parse('function', x)) == x
-
- x = 'int foo(B b=x(a)'
- raises(DefinitionError, parse, 'function', x)
-
- x = 'int foo)C c=x(a))'
- raises(DefinitionError, parse, 'function', x)
-
- x = 'int foo(D d=x(a'
- raises(DefinitionError, parse, 'function', x)
-
-
-def test_bases():
- x = 'A'
- assert unicode(parse('class', x)) == x
-
- x = 'A : B'
- assert unicode(parse('class', x)) == x
-
- x = 'A : private B'
- assert unicode(parse('class', x)) == 'A : B'
-
- x = 'A : public B'
- assert unicode(parse('class', x)) == x
-
- x = 'A : B, C'
- assert unicode(parse('class', x)) == x
-
- x = 'A : B, protected C, D'
- assert unicode(parse('class', x)) == x
-
-
-def test_operators():
- x = parse('function', 'void operator new [ ] ()')
- assert unicode(x) == 'void operator new[]()'
-
- x = parse('function', 'void operator delete ()')
- assert unicode(x) == 'void operator delete()'
-
- for op in '*-+=/%!':
- x = parse('function', 'void operator %s ()' % op)
- assert unicode(x) == 'void operator%s()' % op
diff --git a/tests/test_directive_code.py b/tests/test_directive_code.py
new file mode 100644
index 00000000..651527c7
--- /dev/null
+++ b/tests/test_directive_code.py
@@ -0,0 +1,191 @@
+# -*- coding: utf-8 -*-
+"""
+ test_directive_code
+ ~~~~~~~~~~~~~~~~~~~
+
+ Test the code-block directive.
+
+ :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from xml.etree import ElementTree
+
+from util import with_app
+
+
+@with_app('xml', testroot='directive-code')
+def test_code_block(app, status, warning):
+ app.builder.build('index')
+ et = ElementTree.parse(app.outdir / 'index.xml')
+ secs = et.findall('./section/section')
+ code_block = secs[0].findall('literal_block')
+ assert len(code_block) > 0
+ actual = code_block[0].text
+ expect = (
+ " def ruby?\n" +
+ " false\n" +
+ " end"
+ )
+ assert actual == expect
+
+
+@with_app('xml', testroot='directive-code')
+def test_code_block_dedent(app, status, warning):
+ app.builder.build(['dedent_code'])
+ et = ElementTree.parse(app.outdir / 'dedent_code.xml')
+ blocks = et.findall('./section/section/literal_block')
+
+ for i in range(5): # 0-4
+ actual = blocks[i].text
+ indent = " " * (4 - i)
+ expect = (
+ indent + "def ruby?\n" +
+ indent + " false\n" +
+ indent + "end"
+ )
+ assert (i, actual) == (i, expect)
+
+ assert blocks[5].text == '\n\n' # dedent: 1000
+
+
+@with_app('html', testroot='directive-code')
+def test_code_block_caption_html(app, status, warning):
+ app.builder.build(['caption'])
+ html = (app.outdir / 'caption.html').text(encoding='utf-8')
+ caption = (u'<div class="code-block-caption">'
+ u'<span class="caption-text">caption <em>test</em> rb'
+ u'</span><a class="headerlink" href="#id1" '
+ u'title="Permalink to this code">\xb6</a></div>')
+ assert caption in html
+
+
+@with_app('latex', testroot='directive-code')
+def test_code_block_caption_latex(app, status, warning):
+ app.builder.build_all()
+ latex = (app.outdir / 'Python.tex').text()
+ caption = '\\caption{caption \\emph{test} rb}'
+ assert caption in latex
+
+
+@with_app('xml', testroot='directive-code')
+def test_literal_include(app, status, warning):
+ app.builder.build(['index'])
+ et = ElementTree.parse(app.outdir / 'index.xml')
+ secs = et.findall('./section/section')
+ literal_include = secs[1].findall('literal_block')
+ literal_src = (app.srcdir / 'literal.inc').text(encoding='utf-8')
+ assert len(literal_include) > 0
+ actual = literal_include[0].text
+ assert actual == literal_src
+
+
+@with_app('xml', testroot='directive-code')
+def test_literal_include_dedent(app, status, warning):
+ literal_src = (app.srcdir / 'literal.inc').text(encoding='utf-8')
+ literal_lines = [l[4:] for l in literal_src.split('\n')[9:11]]
+
+ app.builder.build(['dedent'])
+ et = ElementTree.parse(app.outdir / 'dedent.xml')
+ blocks = et.findall('./section/section/literal_block')
+
+ for i in range(5): # 0-4
+ actual = blocks[i].text
+ indent = ' ' * (4 - i)
+ expect = '\n'.join(indent + l for l in literal_lines) + '\n'
+ assert (i, actual) == (i, expect)
+
+ assert blocks[5].text == '\n\n' # dedent: 1000
+
+
+@with_app('html', testroot='directive-code')
+def test_literal_include_linenos(app, status, warning):
+ app.builder.build(['linenos'])
+ html = (app.outdir / 'linenos.html').text()
+ linenos = (
+ '<td class="linenos"><div class="linenodiv"><pre>'
+ ' 1\n'
+ ' 2\n'
+ ' 3\n'
+ ' 4\n'
+ ' 5\n'
+ ' 6\n'
+ ' 7\n'
+ ' 8\n'
+ ' 9\n'
+ '10\n'
+ '11\n'
+ '12\n'
+ '13</pre></div></td>')
+ assert linenos in html
+
+
+@with_app('html', testroot='directive-code')
+def test_literal_include_lineno_start(app, status, warning):
+ app.builder.build(['lineno_start'])
+ html = (app.outdir / 'lineno_start.html').text()
+ linenos = (
+ '<td class="linenos"><div class="linenodiv"><pre>'
+ '200\n'
+ '201\n'
+ '202\n'
+ '203\n'
+ '204\n'
+ '205\n'
+ '206\n'
+ '207\n'
+ '208\n'
+ '209\n'
+ '210\n'
+ '211\n'
+ '212</pre></div></td>')
+ assert linenos in html
+
+
+@with_app('html', testroot='directive-code')
+def test_literal_include_lineno_match(app, status, warning):
+ app.builder.build(['lineno_match'])
+ html = (app.outdir / 'lineno_match.html').text()
+ pyobject = (
+ '<td class="linenos"><div class="linenodiv"><pre>'
+ ' 9\n'
+ '10\n'
+ '11</pre></div></td>')
+
+ assert pyobject in html
+
+ lines = (
+ '<td class="linenos"><div class="linenodiv"><pre>'
+ '6\n'
+ '7\n'
+ '8\n'
+ '9</pre></div></td>')
+ assert lines in html
+
+ start_after = (
+ '<td class="linenos"><div class="linenodiv"><pre>'
+ ' 9\n'
+ '10\n'
+ '11\n'
+ '12\n'
+ '13</pre></div></td>')
+ assert start_after in html
+
+
+@with_app('html', testroot='directive-code')
+def test_literalinclude_caption_html(app, status, warning):
+ app.builder.build('index')
+ html = (app.outdir / 'caption.html').text(encoding='utf-8')
+ caption = (u'<div class="code-block-caption">'
+ u'<span class="caption-text">caption <strong>test</strong> py'
+ u'</span><a class="headerlink" href="#id2" '
+ u'title="Permalink to this code">\xb6</a></div>')
+ assert caption in html
+
+
+@with_app('latex', testroot='directive-code')
+def test_literalinclude_caption_latex(app, status, warning):
+ app.builder.build('index')
+ latex = (app.outdir / 'Python.tex').text()
+ caption = '\\caption{caption \\textbf{test} py}'
+ assert caption in latex
diff --git a/tests/test_only_directive.py b/tests/test_directive_only.py
index 4717ff9f..0cf44663 100644
--- a/tests/test_only_directive.py
+++ b/tests/test_directive_only.py
@@ -13,15 +13,11 @@ import re
from docutils import nodes
-from util import with_app, test_roots
+from util import with_app
-def teardown_module():
- (test_roots / 'test-only-directive' / '_build').rmtree(True)
-
-
-@with_app(buildername='text', srcdir=(test_roots / 'test-only-directive'))
-def test_sectioning(app):
+@with_app('text', testroot='directive-only')
+def test_sectioning(app, status, warning):
def getsects(section):
if not isinstance(section, nodes.section):
@@ -53,8 +49,7 @@ def test_sectioning(app):
app.env.process_only_nodes(doctree, app.builder)
parts = [getsects(n)
- for n in filter(lambda n: isinstance(n, nodes.section),
- doctree.children)]
+ for n in [_n for _n in doctree.children if isinstance(_n, nodes.section)]]
for i, s in enumerate(parts):
testsects(str(i+1) + '.', s, 4)
assert len(parts) == 4, 'Expected 4 document level headings, got:\n%s' % \
diff --git a/tests/test_docutilsconf.py b/tests/test_docutilsconf.py
index 4aeaa56a..90fa5db3 100644
--- a/tests/test_docutilsconf.py
+++ b/tests/test_docutilsconf.py
@@ -9,49 +9,17 @@
:license: BSD, see LICENSE for details.
"""
-import os
import re
-from StringIO import StringIO
-from functools import wraps
-
-from util import test_roots, TestApp, path, SkipTest
-
-
-html_warnfile = StringIO()
-root = test_roots / 'test-docutilsconf'
-
-
-# need cleanenv to rebuild everytime.
-# docutils.conf change did not effect to rebuild.
-def with_conf_app(docutilsconf='', *args, **kwargs):
- default_kw = {
- 'srcdir': root,
- 'cleanenv': True,
- }
- default_kw.update(kwargs)
- def generator(func):
- @wraps(func)
- def deco(*args2, **kwargs2):
- app = TestApp(*args, **default_kw)
- (app.srcdir / 'docutils.conf').write_text(docutilsconf)
- try:
- cwd = os.getcwd()
- os.chdir(app.srcdir)
- func(app, *args2, **kwargs2)
- finally:
- os.chdir(cwd)
- # don't execute cleanup if test failed
- app.cleanup()
- return deco
- return generator
+
+from util import with_app, path, SkipTest
def regex_count(expr, result):
return len(re.findall(expr, result))
-@with_conf_app(buildername='html')
-def test_html_with_default_docutilsconf(app):
+@with_app('html', testroot='docutilsconf', freshenv=True, docutilsconf='')
+def test_html_with_default_docutilsconf(app, status, warning):
app.builder.build(['contents'])
result = (app.outdir / 'contents.html').text(encoding='utf-8')
@@ -61,13 +29,13 @@ def test_html_with_default_docutilsconf(app):
assert regex_count(r'<td class="option-group" colspan="2">', result) == 1
-@with_conf_app(buildername='html', docutilsconf=(
+@with_app('html', testroot='docutilsconf', freshenv=True, docutilsconf=(
'\n[html4css1 writer]'
'\noption-limit:1'
'\nfield-name-limit:1'
'\n')
)
-def test_html_with_docutilsconf(app):
+def test_html_with_docutilsconf(app, status, warning):
app.builder.build(['contents'])
result = (app.outdir / 'contents.html').text(encoding='utf-8')
@@ -77,41 +45,32 @@ def test_html_with_docutilsconf(app):
assert regex_count(r'<td class="option-group" colspan="2">', result) == 2
-@with_conf_app(buildername='html', warning=html_warnfile)
-def test_html(app):
+@with_app('html', testroot='docutilsconf')
+def test_html(app, status, warning):
app.builder.build(['contents'])
- assert html_warnfile.getvalue() == ''
+ assert warning.getvalue() == ''
-@with_conf_app(buildername='latex', warning=html_warnfile)
-def test_latex(app):
+@with_app('latex', testroot='docutilsconf')
+def test_latex(app, status, warning):
app.builder.build(['contents'])
- assert html_warnfile.getvalue() == ''
+ assert warning.getvalue() == ''
-@with_conf_app(buildername='man', warning=html_warnfile)
-def test_man(app):
+@with_app('man', testroot='docutilsconf')
+def test_man(app, status, warning):
app.builder.build(['contents'])
- assert html_warnfile.getvalue() == ''
+ assert warning.getvalue() == ''
-@with_conf_app(buildername='texinfo', warning=html_warnfile)
-def test_texinfo(app):
+@with_app('texinfo', testroot='docutilsconf')
+def test_texinfo(app, status, warning):
app.builder.build(['contents'])
-@with_conf_app(buildername='html', srcdir='(empty)',
- docutilsconf='[general]\nsource_link=true\n')
-def test_docutils_source_link(app):
- srcdir = path(app.srcdir)
- (srcdir / 'conf.py').write_text('')
- (srcdir / 'contents.rst').write_text('')
- app.builder.build_all()
-
-
-@with_conf_app(buildername='html', srcdir='(empty)',
- docutilsconf='[general]\nsource_link=true\n')
-def test_docutils_source_link_with_nonascii_file(app):
+@with_app('html', testroot='docutilsconf',
+ docutilsconf='[general]\nsource_link=true\n')
+def test_docutils_source_link_with_nonascii_file(app, status, warning):
srcdir = path(app.srcdir)
mb_name = u'\u65e5\u672c\u8a9e'
try:
@@ -122,7 +81,4 @@ def test_docutils_source_link_with_nonascii_file(app):
'nonascii filename not supported on this filesystem encoding: '
'%s', FILESYSTEMENCODING)
- (srcdir / 'conf.py').write_text('')
- (srcdir / 'contents.rst').write_text('')
-
app.builder.build_all()
diff --git a/tests/test_domain_cpp.py b/tests/test_domain_cpp.py
new file mode 100644
index 00000000..5b839fad
--- /dev/null
+++ b/tests/test_domain_cpp.py
@@ -0,0 +1,140 @@
+# -*- coding: utf-8 -*-
+"""
+ test_domain_cpp
+ ~~~~~~~~~~~~~~~
+
+ Tests the C++ Domain
+
+ :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from six import text_type
+
+from util import raises
+
+from sphinx.domains.cpp import DefinitionParser, DefinitionError
+
+def parse(name, string):
+ parser = DefinitionParser(string)
+ res = getattr(parser, "parse_" + name + "_object")()
+ if not parser.eof:
+ print("Parsing stopped at", parser.pos)
+ print(string)
+ print('-'*parser.pos + '^')
+ raise DefinitionError("")
+ return res
+
+def check(name, input, output=None):
+ # first a simple check of the AST
+ if output is None:
+ output = input
+ ast = parse(name, input)
+ res = text_type(ast)
+ if res != output:
+ print("Input: ", text_type(input))
+ print("Result: ", res)
+ print("Expected: ", output)
+ raise DefinitionError("")
+ ast.describe_signature([], 'lastIsName', None)
+ ast.prefixedName = ast.name # otherwise the get_id fails, it would be set
+ # in handle_signarue
+ ast.get_id()
+ #print ".. %s:: %s" % (name, input)
+
+def test_type_definitions():
+ check("type", "public bool b", "bool b")
+ check("type", "bool A::b")
+ check("type", "bool *b")
+ check("type", "bool *const b")
+ check("type", "bool *volatile const b")
+ check("type", "bool *volatile const b")
+ check("type", "bool *volatile const *b")
+ check("type", "bool &b")
+ check("type", "bool b[]")
+ check("type", "std::pair<int, int> coord")
+ check("type", "long long int foo")
+ check("type", 'std::vector<std::pair<std::string, long long>> module::blah')
+ check("type", "std::function<void()> F")
+ check("type", "std::function<R(A1, A2, A3)> F")
+ check("type", "std::function<R(A1, A2, A3, As...)> F")
+ check("type", "MyContainer::const_iterator")
+ check("type",
+ "public MyContainer::const_iterator",
+ "MyContainer::const_iterator")
+
+ check('member',
+ ' const std::string & name = 42',
+ 'const std::string &name = 42')
+ check('member', ' const std::string & name', 'const std::string &name')
+ check('member',
+ ' const std::string & name [ n ]',
+ 'const std::string &name[n]')
+ check('member',
+ 'const std::vector< unsigned int, long> &name',
+ 'const std::vector<unsigned int, long> &name')
+ check('member', 'module::myclass foo[n]')
+
+ check('function', 'operator bool() const')
+ check('function', 'bool namespaced::theclass::method(arg1, arg2)')
+ x = 'std::vector<std::pair<std::string, int>> &module::test(register ' \
+ 'foo, bar, std::string baz = "foobar, blah, bleh") const = 0'
+ check('function', x)
+ check('function', 'explicit module::myclass::foo::foo()')
+ check('function', 'module::myclass::foo::~foo()')
+ check('function', 'int printf(const char *fmt, ...)')
+ check('function', 'int foo(const unsigned int j)')
+ check('function', 'int foo(const int *const ptr)')
+ check('function', 'module::myclass::operator std::vector<std::string>()')
+ check('function',
+ 'void operator()(const boost::array<VertexID, 2> &v) const')
+ check('function',
+ 'void operator()(const boost::array<VertexID, 2, "foo, bar"> &v) const')
+ check('function', 'MyClass::MyClass(MyClass::MyClass&&)')
+ check('function', 'constexpr int get_value()')
+ check('function', 'static constexpr int get_value()')
+ check('function', 'int get_value() const noexcept')
+ check('function', 'int get_value() const noexcept = delete')
+ check('function', 'MyClass::MyClass(MyClass::MyClass&&) = default')
+ check('function', 'virtual MyClass::a_virtual_function() const override')
+ check('function', 'A B() override')
+ check('function', 'A B() final')
+ check('function', 'A B() final override')
+ check('function', 'A B() override final', 'A B() final override')
+ check('function', 'MyClass::a_member_function() volatile')
+ check('function', 'MyClass::a_member_function() volatile const')
+ check('function', 'MyClass::a_member_function() &&')
+ check('function', 'MyClass::a_member_function() &')
+ check('function', 'MyClass::a_member_function() const &')
+ check('function', 'int main(int argc, char *argv[])')
+ check('function', 'MyClass &MyClass::operator++()')
+ check('function', 'MyClass::pointer MyClass::operator->()')
+
+ x = 'std::vector<std::pair<std::string, int>> &module::test(register ' \
+ 'foo, bar[n], std::string baz = "foobar, blah, bleh") const = 0'
+ check('function', x)
+ check('function',
+ 'int foo(Foo f = Foo(double(), std::make_pair(int(2), double(3.4))))')
+ check('function', 'int foo(A a = x(a))')
+ raises(DefinitionError, parse, 'function', 'int foo(B b=x(a)')
+ raises(DefinitionError, parse, 'function', 'int foo)C c=x(a))')
+ raises(DefinitionError, parse, 'function', 'int foo(D d=x(a')
+ check('function', 'int foo(const A&... a)')
+ check('function', 'virtual void f()')
+
+def test_bases():
+ check('class', 'A')
+ check('class', 'A::B::C')
+ check('class', 'A : B')
+ check('class', 'A : private B', 'A : B')
+ check('class', 'A : public B')
+ check('class', 'A : B, C')
+ check('class', 'A : B, protected C, D')
+
+
+def test_operators():
+ check('function', 'void operator new [ ] ()', 'void operator new[]()')
+ check('function', 'void operator delete ()', 'void operator delete()')
+ check('function', 'void operator bool() const', 'void operator bool() const')
+ for op in '*-+=/%!':
+ check('function', 'void operator %s ()' % op, 'void operator%s()' % op)
diff --git a/tests/test_py_domain.py b/tests/test_domain_py.py
index 68634d83..1d0fcc5f 100644
--- a/tests/test_py_domain.py
+++ b/tests/test_domain_py.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
"""
- test_py_domain
+ test_domain_py
~~~~~~~~~~~~~~
Tests the Python Domain
@@ -9,6 +9,8 @@
:license: BSD, see LICENSE for details.
"""
+from six import text_type
+
from sphinx import addnodes
from sphinx.domains.python import py_sig_re, _pseudo_parse_arglist
@@ -26,19 +28,19 @@ def parse(sig):
def test_function_signatures():
rv = parse('func(a=1) -> int object')
- assert unicode(rv) == u'a=1'
+ assert text_type(rv) == u'a=1'
rv = parse('func(a=1, [b=None])')
- assert unicode(rv) == u'a=1, [b=None]'
+ assert text_type(rv) == u'a=1, [b=None]'
rv = parse('func(a=1[, b=None])')
- assert unicode(rv) == u'a=1, [b=None]'
+ assert text_type(rv) == u'a=1, [b=None]'
rv = parse("compile(source : string, filename, symbol='file')")
- assert unicode(rv) == u"source : string, filename, symbol='file'"
+ assert text_type(rv) == u"source : string, filename, symbol='file'"
rv = parse('func(a=[], [b=None])')
- assert unicode(rv) == u'a=[], [b=None]'
+ assert text_type(rv) == u'a=[], [b=None]'
rv = parse('func(a=[][, b=None])')
- assert unicode(rv) == u'a=[], [b=None]'
+ assert text_type(rv) == u'a=[], [b=None]'
diff --git a/tests/test_rst_domain.py b/tests/test_domain_rst.py
index 9f70f5b1..9f70f5b1 100644
--- a/tests/test_rst_domain.py
+++ b/tests/test_domain_rst.py
diff --git a/tests/test_domain_std.py b/tests/test_domain_std.py
new file mode 100644
index 00000000..a1e5bdc1
--- /dev/null
+++ b/tests/test_domain_std.py
@@ -0,0 +1,80 @@
+# -*- coding: utf-8 -*-
+"""
+ test_domain_std
+ ~~~~~~~~~~~~~~~
+
+ Tests the std domain
+
+ :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from docutils import nodes
+
+from sphinx.domains.std import StandardDomain
+from util import mock
+
+
+def test_process_doc_handle_figure_caption():
+ env = mock.Mock(domaindata={})
+ figure_node = nodes.figure(
+ '',
+ nodes.caption('caption text', 'caption text'),
+ )
+ document = mock.Mock(
+ nametypes={'testname': True},
+ nameids={'testname': 'testid'},
+ ids={'testid': figure_node},
+ )
+
+ domain = StandardDomain(env)
+ if 'testname' in domain.data['labels']:
+ del domain.data['labels']['testname']
+ domain.process_doc(env, 'testdoc', document)
+ assert 'testname' in domain.data['labels']
+ assert domain.data['labels']['testname'] == (
+ 'testdoc', 'testid', 'caption text')
+
+
+def test_process_doc_handle_image_parent_figure_caption():
+ env = mock.Mock(domaindata={})
+ img_node = nodes.image('', alt='image alt')
+ figure_node = nodes.figure(
+ '',
+ nodes.caption('caption text', 'caption text'),
+ img_node,
+ )
+ document = mock.Mock(
+ nametypes={'testname': True},
+ nameids={'testname': 'testid'},
+ ids={'testid': img_node},
+ )
+
+ domain = StandardDomain(env)
+ if 'testname' in domain.data['labels']:
+ del domain.data['labels']['testname']
+ domain.process_doc(env, 'testdoc', document)
+ assert 'testname' in domain.data['labels']
+ assert domain.data['labels']['testname'] == (
+ 'testdoc', 'testid', 'caption text')
+
+
+def test_process_doc_handle_table_title():
+ env = mock.Mock(domaindata={})
+ table_node = nodes.table(
+ '',
+ nodes.title('title text', 'title text'),
+ )
+ document = mock.Mock(
+ nametypes={'testname': True},
+ nameids={'testname': 'testid'},
+ ids={'testid': table_node},
+ )
+
+ domain = StandardDomain(env)
+ if 'testname' in domain.data['labels']:
+ del domain.data['labels']['testname']
+ domain.process_doc(env, 'testdoc', document)
+ assert 'testname' in domain.data['labels']
+ assert domain.data['labels']['testname'] == (
+ 'testdoc', 'testid', 'title text')
diff --git a/tests/test_env.py b/tests/test_environment.py
index c5a091cd..b5da325f 100644
--- a/tests/test_env.py
+++ b/tests/test_environment.py
@@ -8,11 +8,10 @@
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import os
-import sys
-import tempfile
-from util import TestApp, test_root, remove_unicode_literals, path
+from six import PY3
+
+from util import TestApp, remove_unicode_literals, path
from sphinx.builders.html import StandaloneHTMLBuilder
from sphinx.builders.latex import LaTeXBuilder
@@ -20,34 +19,35 @@ from sphinx.builders.latex import LaTeXBuilder
app = env = None
warnings = []
+
def setup_module():
global app, env
- app = TestApp(srcdir='(temp)', freshenv=True)
+ app = TestApp(srcdir='root-envtest')
env = app.env
env.set_warnfunc(lambda *args: warnings.append(args))
+
def teardown_module():
app.cleanup()
+
def warning_emitted(file, text):
for warning in warnings:
if len(warning) == 2 and file in warning[1] and text in warning[0]:
return True
return False
+
# Tests are run in the order they appear in the file, therefore we can
# afford to not run update() in the setup but in its own test
def test_first_update():
- msg, num, it = env.update(app.config, app.srcdir, app.doctreedir, app)
- assert msg.endswith('%d added, 0 changed, 0 removed' % len(env.found_docs))
- docnames = set()
- for docname in it: # the generator does all the work
- docnames.add(docname)
- assert docnames == env.found_docs == set(env.all_docs)
+ updated = env.update(app.config, app.srcdir, app.doctreedir, app)
+ assert set(updated) == env.found_docs == set(env.all_docs)
# test if exclude_patterns works ok
assert 'subdir/excluded' not in env.found_docs
+
def test_images():
assert warning_emitted('images', 'image file not readable: foo.png')
assert warning_emitted('images', 'nonlocal image URI found: '
@@ -59,7 +59,7 @@ def test_images():
htmlbuilder.imgpath = 'dummy'
htmlbuilder.post_process_images(tree)
image_uri_message = "no matching candidate for image URI u'foo.*'"
- if sys.version_info >= (3, 0):
+ if PY3:
image_uri_message = remove_unicode_literals(image_uri_message)
assert image_uri_message in app._warning.content[-1]
assert set(htmlbuilder.images.keys()) == \
@@ -77,6 +77,7 @@ def test_images():
assert set(latexbuilder.images.values()) == \
set(['img.pdf', 'img.png', 'img1.png', 'simg.png', 'svgimg.pdf'])
+
def test_second_update():
# delete, add and "edit" (change saved mtime) some files and update again
env.all_docs['contents'] = 0
@@ -85,39 +86,33 @@ def test_second_update():
# the contents.txt toctree; otherwise section numbers would shift
(root / 'autodoc.txt').unlink()
(root / 'new.txt').write_text('New file\n========\n')
- msg, num, it = env.update(app.config, app.srcdir, app.doctreedir, app)
- assert '1 added, 3 changed, 1 removed' in msg
- docnames = set()
- for docname in it:
- docnames.add(docname)
+ updated = env.update(app.config, app.srcdir, app.doctreedir, app)
# "includes" and "images" are in there because they contain references
# to nonexisting downloadable or image files, which are given another
# chance to exist
- assert docnames == set(['contents', 'new', 'includes', 'images'])
+ assert set(updated) == set(['contents', 'new', 'includes', 'images'])
assert 'autodoc' not in env.all_docs
assert 'autodoc' not in env.found_docs
-def test_record_dependency_on_multibyte_directory():
- app = None
- olddir = os.getcwd()
- try:
- tmproot = path(os.path.abspath(tempfile.mkdtemp()))
- tmpdir = tmproot / u'テスト'
- test_root.copytree(tmpdir)
- os.chdir(tmpdir)
- tmpdir = path(os.getcwd()) # for MacOSX; tmpdir is based on symlinks
-
- app = TestApp(srcdir=tmpdir, freshenv=True)
- (app.srcdir / 'test.txt').write_text('.. include:: test.inc')
- (app.srcdir / 'test.inc').write_text('hello sphinx')
- _, _, it = app.env.update(app.config, app.srcdir, app.doctreedir, app)
- list(it) # take all from iterator
- finally:
- tmproot.rmtree(ignore_errors=True)
- os.chdir(olddir)
- if app:
- app.cleanup()
+def test_env_read_docs():
+ """By default, docnames are read in alphanumeric order"""
+ def on_env_read_docs_1(app, env, docnames):
+ pass
+
+ app.connect('env-before-read-docs', on_env_read_docs_1)
+
+ read_docnames = env.update(app.config, app.srcdir, app.doctreedir, app)
+ assert len(read_docnames) > 2 and read_docnames == sorted(read_docnames)
+
+ def on_env_read_docs_2(app, env, docnames):
+ docnames.reverse()
+
+ app.connect('env-before-read-docs', on_env_read_docs_2)
+
+ read_docnames = env.update(app.config, app.srcdir, app.doctreedir, app)
+ reversed_read_docnames = sorted(read_docnames, reverse=True)
+ assert len(read_docnames) > 2 and read_docnames == reversed_read_docnames
def test_object_inventory():
diff --git a/tests/test_autosummary.py b/tests/test_ext_autosummary.py
index de26a054..363c11e9 100644
--- a/tests/test_autosummary.py
+++ b/tests/test_ext_autosummary.py
@@ -8,48 +8,24 @@
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import sys
-from functools import wraps
-from StringIO import StringIO
+
+from six import iteritems, StringIO
from sphinx.ext.autosummary import mangle_signature
-from util import test_roots, TestApp
+from util import with_app
html_warnfile = StringIO()
-def with_autosummary_app(*args, **kw):
- default_kw = {
- 'srcdir': (test_roots / 'test-autosummary'),
- 'confoverrides': {
- 'extensions': ['sphinx.ext.autosummary'],
- 'autosummary_generate': True,
- 'source_suffix': '.rst'
- }
+default_kw = {
+ 'testroot': 'autosummary',
+ 'confoverrides': {
+ 'extensions': ['sphinx.ext.autosummary'],
+ 'autosummary_generate': True,
+ 'source_suffix': '.rst'
}
- default_kw.update(kw)
- def generator(func):
- @wraps(func)
- def deco(*args2, **kwargs2):
- # Now, modify the python path...
- srcdir = default_kw['srcdir']
- sys.path.insert(0, srcdir)
- try:
- app = TestApp(*args, **default_kw)
- func(app, *args2, **kwargs2)
- finally:
- if srcdir in sys.path:
- sys.path.remove(srcdir)
- # remove the auto-generated dummy_module.rst
- dummy_rst = srcdir / 'dummy_module.rst'
- if dummy_rst.isfile():
- dummy_rst.unlink()
-
- # don't execute cleanup if test failed
- app.cleanup()
- return deco
- return generator
+}
def test_mangle_signature():
@@ -71,17 +47,15 @@ def test_mangle_signature():
(a=1, b=<SomeClass: a, b, c>, c=3) :: ([a, b, c])
"""
- TEST = [map(lambda x: x.strip(), x.split("::")) for x in TEST.split("\n")
+ TEST = [[y.strip() for y in x.split("::")] for x in TEST.split("\n")
if '::' in x]
for inp, outp in TEST:
res = mangle_signature(inp).strip().replace(u"\u00a0", " ")
assert res == outp, (u"'%s' -> '%s' != '%s'" % (inp, res, outp))
-@with_autosummary_app(buildername='html', warning=html_warnfile)
-def test_get_items_summary(app):
- app.builddir.rmtree(True)
-
+@with_app(buildername='html', **default_kw)
+def test_get_items_summary(app, status, warning):
# monkey-patch Autosummary.get_items so we can easily get access to it's
# results..
import sphinx.ext.autosummary
@@ -95,13 +69,17 @@ def test_get_items_summary(app):
autosummary_items[name] = result
return results
+ def handler(app, what, name, obj, options, lines):
+ assert isinstance(lines, list)
+ app.connect('autodoc-process-docstring', handler)
+
sphinx.ext.autosummary.Autosummary.get_items = new_get_items
try:
app.builder.build_all()
finally:
sphinx.ext.autosummary.Autosummary.get_items = orig_get_items
- html_warnings = html_warnfile.getvalue()
+ html_warnings = warning.getvalue()
assert html_warnings == ''
expected_values = {
@@ -114,16 +92,6 @@ def test_get_items_summary(app):
'C.prop_attr2': 'This is a attribute docstring',
'C.C2': 'This is a nested inner class docstring',
}
- for key, expected in expected_values.iteritems():
+ for key, expected in iteritems(expected_values):
assert autosummary_items[key][2] == expected, 'Summary for %s was %r -'\
' expected %r' % (key, autosummary_items[key], expected)
-
-
-@with_autosummary_app(buildername='html')
-def test_process_doc_event(app):
- app.builddir.rmtree(True)
-
- def handler(app, what, name, obj, options, lines):
- assert isinstance(lines, list)
- app.connect('autodoc-process-docstring', handler)
- app.builder.build_all()
diff --git a/tests/test_coverage.py b/tests/test_ext_coverage.py
index e6747b0c..e59d42bc 100644
--- a/tests/test_coverage.py
+++ b/tests/test_ext_coverage.py
@@ -15,7 +15,7 @@ from util import with_app
@with_app(buildername='coverage')
-def test_build(app):
+def test_build(app, status, warning):
app.builder.build_all()
py_undoc = (app.outdir / 'python.txt').text()
@@ -38,7 +38,7 @@ def test_build(app):
undoc_py, undoc_c = pickle.loads((app.outdir / 'undoc.pickle').bytes())
assert len(undoc_c) == 1
# the key is the full path to the header file, which isn't testable
- assert undoc_c.values()[0] == [('function', 'Py_SphinxTest')]
+ assert list(undoc_c.values())[0] == set([('function', 'Py_SphinxTest')])
assert 'test_autodoc' in undoc_py
assert 'funcs' in undoc_py['test_autodoc']
diff --git a/tests/test_doctest.py b/tests/test_ext_doctest.py
index 2aab541b..002afff6 100644
--- a/tests/test_doctest.py
+++ b/tests/test_ext_doctest.py
@@ -9,27 +9,23 @@
:license: BSD, see LICENSE for details.
"""
-import sys
-import StringIO
-
from util import with_app
-
-status = StringIO.StringIO()
cleanup_called = 0
-@with_app(buildername='doctest', status=status)
-def test_build(app):
+
+@with_app(buildername='doctest', testroot='doctest')
+def test_build(app, status, warning):
global cleanup_called
cleanup_called = 0
app.builder.build_all()
if app.statuscode != 0:
- print >>sys.stderr, status.getvalue()
- assert False, 'failures in doctests'
+ assert False, 'failures in doctests:' + status.getvalue()
# in doctest.txt, there are two named groups and the default group,
# so the cleanup function must be called three times
assert cleanup_called == 3, 'testcleanup did not get executed enough times'
+
def cleanup_call():
global cleanup_called
cleanup_called += 1
diff --git a/tests/test_intersphinx.py b/tests/test_ext_intersphinx.py
index 8f0263b9..fb540668 100644
--- a/tests/test_intersphinx.py
+++ b/tests/test_ext_intersphinx.py
@@ -11,18 +11,15 @@
import zlib
import posixpath
-try:
- from io import BytesIO
-except ImportError:
- from cStringIO import StringIO as BytesIO
+from six import BytesIO
from docutils import nodes
from sphinx import addnodes
from sphinx.ext.intersphinx import read_inventory_v1, read_inventory_v2, \
- load_mappings, missing_reference
+ load_mappings, missing_reference
-from util import with_app, with_tempdir, write_file
+from util import with_app, with_tempdir
inventory_v1 = '''\
@@ -52,9 +49,9 @@ def test_read_inventory_v1():
f.readline()
invdata = read_inventory_v1(f, '/util', posixpath.join)
assert invdata['py:module']['module'] == \
- ('foo', '1.0', '/util/foo.html#module-module', '-')
+ ('foo', '1.0', '/util/foo.html#module-module', '-')
assert invdata['py:class']['module.cls'] == \
- ('foo', '1.0', '/util/foo.html#module.cls', '-')
+ ('foo', '1.0', '/util/foo.html#module.cls', '-')
def test_read_inventory_v2():
@@ -71,21 +68,21 @@ def test_read_inventory_v2():
assert len(invdata1['py:module']) == 2
assert invdata1['py:module']['module1'] == \
- ('foo', '2.0', '/util/foo.html#module-module1', 'Long Module desc')
+ ('foo', '2.0', '/util/foo.html#module-module1', 'Long Module desc')
assert invdata1['py:module']['module2'] == \
- ('foo', '2.0', '/util/foo.html#module-module2', '-')
+ ('foo', '2.0', '/util/foo.html#module-module2', '-')
assert invdata1['py:function']['module1.func'][2] == \
- '/util/sub/foo.html#module1.func'
+ '/util/sub/foo.html#module1.func'
assert invdata1['c:function']['CFunc'][2] == '/util/cfunc.html#CFunc'
assert invdata1['std:term']['a term'][2] == \
- '/util/glossary.html#term-a-term'
+ '/util/glossary.html#term-a-term'
-@with_app(confoverrides={'extensions': 'sphinx.ext.intersphinx'})
+@with_app()
@with_tempdir
-def test_missing_reference(tempdir, app):
+def test_missing_reference(tempdir, app, status, warning):
inv_file = tempdir / 'inventory'
- write_file(inv_file, inventory_v2)
+ inv_file.write_bytes(inventory_v2)
app.config.intersphinx_mapping = {
'http://docs.python.org/': inv_file,
'py3k': ('http://docs.python.org/py3k/', inv_file),
@@ -97,7 +94,7 @@ def test_missing_reference(tempdir, app):
inv = app.env.intersphinx_inventory
assert inv['py:module']['module2'] == \
- ('foo', '2.0', 'http://docs.python.org/foo.html#module-module2', '-')
+ ('foo', '2.0', 'http://docs.python.org/foo.html#module-module2', '-')
# create fake nodes and check referencing
@@ -157,15 +154,15 @@ def test_missing_reference(tempdir, app):
assert contnode[0].astext() == 'py3k:unknown'
-@with_app(confoverrides={'extensions': 'sphinx.ext.intersphinx'})
+@with_app()
@with_tempdir
-def test_load_mappings_warnings(tempdir, app):
+def test_load_mappings_warnings(tempdir, app, status, warning):
"""
load_mappings issues a warning if new-style mapping
identifiers are not alphanumeric
"""
inv_file = tempdir / 'inventory'
- write_file(inv_file, inventory_v2)
+ inv_file.write_bytes(inventory_v2)
app.config.intersphinx_mapping = {
'http://docs.python.org/': inv_file,
'py3k': ('http://docs.python.org/py3k/', inv_file),
@@ -177,4 +174,4 @@ def test_load_mappings_warnings(tempdir, app):
app.config.intersphinx_cache_limit = 0
# load the inventory and check if it's done correctly
load_mappings(app)
- assert len(app._warning.content) == 2
+ assert warning.getvalue().count('\n') == 2
diff --git a/tests/test_ext_napoleon.py b/tests/test_ext_napoleon.py
new file mode 100644
index 00000000..e2790d38
--- /dev/null
+++ b/tests/test_ext_napoleon.py
@@ -0,0 +1,199 @@
+# -*- coding: utf-8 -*-
+"""
+ test_napoleon
+ ~~~~~~~~~~~~~
+
+ Tests for :mod:`sphinx.ext.napoleon.__init__` module.
+
+
+ :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from collections import namedtuple
+from unittest import TestCase
+
+from sphinx.application import Sphinx
+from sphinx.ext.napoleon import (_process_docstring, _skip_member, Config,
+ setup)
+from util import mock
+
+
+def _private_doc():
+ """module._private_doc.DOCSTRING"""
+ pass
+
+
+def _private_undoc():
+ pass
+
+
+def __special_doc__():
+ """module.__special_doc__.DOCSTRING"""
+ pass
+
+
+def __special_undoc__():
+ pass
+
+
+class SampleClass(object):
+ def _private_doc(self):
+ """SampleClass._private_doc.DOCSTRING"""
+ pass
+
+ def _private_undoc(self):
+ pass
+
+ def __special_doc__(self):
+ """SampleClass.__special_doc__.DOCSTRING"""
+ pass
+
+ def __special_undoc__(self):
+ pass
+
+
+class SampleError(Exception):
+ def _private_doc(self):
+ """SampleError._private_doc.DOCSTRING"""
+ pass
+
+ def _private_undoc(self):
+ pass
+
+ def __special_doc__(self):
+ """SampleError.__special_doc__.DOCSTRING"""
+ pass
+
+ def __special_undoc__(self):
+ pass
+
+SampleNamedTuple = namedtuple('SampleNamedTuple', 'user_id block_type def_id')
+
+
+class ProcessDocstringTest(TestCase):
+ def test_modify_in_place(self):
+ lines = ['Summary line.',
+ '',
+ 'Args:',
+ ' arg1: arg1 description']
+ app = mock.Mock()
+ app.config = Config()
+ _process_docstring(app, 'class', 'SampleClass', SampleClass,
+ mock.Mock(), lines)
+
+ expected = ['Summary line.',
+ '',
+ ':param arg1: arg1 description',
+ '']
+ self.assertEqual(expected, lines)
+
+
+class SetupTest(TestCase):
+ def test_unknown_app_type(self):
+ setup(object())
+
+ def test_add_config_values(self):
+ app = mock.Mock(Sphinx)
+ setup(app)
+ for name, (default, rebuild) in Config._config_values.items():
+ has_config = False
+ for method_name, args, kwargs in app.method_calls:
+ if(method_name == 'add_config_value' and
+ args[0] == name):
+ has_config = True
+ if not has_config:
+ self.fail('Config value was not added to app %s' % name)
+
+ has_process_docstring = False
+ has_skip_member = False
+ for method_name, args, kwargs in app.method_calls:
+ if method_name == 'connect':
+ if(args[0] == 'autodoc-process-docstring' and
+ args[1] == _process_docstring):
+ has_process_docstring = True
+ elif(args[0] == 'autodoc-skip-member' and
+ args[1] == _skip_member):
+ has_skip_member = True
+ if not has_process_docstring:
+ self.fail('autodoc-process-docstring never connected')
+ if not has_skip_member:
+ self.fail('autodoc-skip-member never connected')
+
+
+class SkipMemberTest(TestCase):
+ def assertSkip(self, what, member, obj, expect_skip, config_name):
+ skip = 'default skip'
+ app = mock.Mock()
+ app.config = Config()
+ setattr(app.config, config_name, True)
+ if expect_skip:
+ self.assertEqual(skip, _skip_member(app, what, member, obj, skip,
+ mock.Mock()))
+ else:
+ self.assertFalse(_skip_member(app, what, member, obj, skip,
+ mock.Mock()))
+ setattr(app.config, config_name, False)
+ self.assertEqual(skip, _skip_member(app, what, member, obj, skip,
+ mock.Mock()))
+
+ def test_namedtuple(self):
+ self.assertSkip('class', '_asdict',
+ SampleNamedTuple._asdict, False,
+ 'napoleon_include_private_with_doc')
+
+ def test_class_private_doc(self):
+ self.assertSkip('class', '_private_doc',
+ SampleClass._private_doc, False,
+ 'napoleon_include_private_with_doc')
+
+ def test_class_private_undoc(self):
+ self.assertSkip('class', '_private_undoc',
+ SampleClass._private_undoc, True,
+ 'napoleon_include_private_with_doc')
+
+ def test_class_special_doc(self):
+ self.assertSkip('class', '__special_doc__',
+ SampleClass.__special_doc__, False,
+ 'napoleon_include_special_with_doc')
+
+ def test_class_special_undoc(self):
+ self.assertSkip('class', '__special_undoc__',
+ SampleClass.__special_undoc__, True,
+ 'napoleon_include_special_with_doc')
+
+ def test_exception_private_doc(self):
+ self.assertSkip('exception', '_private_doc',
+ SampleError._private_doc, False,
+ 'napoleon_include_private_with_doc')
+
+ def test_exception_private_undoc(self):
+ self.assertSkip('exception', '_private_undoc',
+ SampleError._private_undoc, True,
+ 'napoleon_include_private_with_doc')
+
+ def test_exception_special_doc(self):
+ self.assertSkip('exception', '__special_doc__',
+ SampleError.__special_doc__, False,
+ 'napoleon_include_special_with_doc')
+
+ def test_exception_special_undoc(self):
+ self.assertSkip('exception', '__special_undoc__',
+ SampleError.__special_undoc__, True,
+ 'napoleon_include_special_with_doc')
+
+ def test_module_private_doc(self):
+ self.assertSkip('module', '_private_doc', _private_doc, False,
+ 'napoleon_include_private_with_doc')
+
+ def test_module_private_undoc(self):
+ self.assertSkip('module', '_private_undoc', _private_undoc, True,
+ 'napoleon_include_private_with_doc')
+
+ def test_module_special_doc(self):
+ self.assertSkip('module', '__special_doc__', __special_doc__, False,
+ 'napoleon_include_special_with_doc')
+
+ def test_module_special_undoc(self):
+ self.assertSkip('module', '__special_undoc__', __special_undoc__, True,
+ 'napoleon_include_special_with_doc')
diff --git a/tests/test_ext_napoleon_docstring.py b/tests/test_ext_napoleon_docstring.py
new file mode 100644
index 00000000..3b5c4fc7
--- /dev/null
+++ b/tests/test_ext_napoleon_docstring.py
@@ -0,0 +1,442 @@
+# -*- coding: utf-8 -*-
+"""
+ test_napoleon_docstring
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Tests for :mod:`sphinx.ext.napoleon.docstring` module.
+
+
+ :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import textwrap
+from unittest import TestCase
+
+from sphinx.ext.napoleon import Config
+from sphinx.ext.napoleon.docstring import GoogleDocstring, NumpyDocstring
+from util import mock
+
+
+class BaseDocstringTest(TestCase):
+ pass
+
+
+class GoogleDocstringTest(BaseDocstringTest):
+ docstrings = [(
+ """Single line summary""",
+ """Single line summary"""
+ ), (
+ """
+ Single line summary
+
+ Extended description
+
+ """,
+ """
+ Single line summary
+
+ Extended description
+ """
+ ), (
+ """
+ Single line summary
+
+ Args:
+ arg1(str):Extended
+ description of arg1
+ """,
+ """
+ Single line summary
+
+ :Parameters: **arg1** (*str*) --
+ Extended
+ description of arg1"""
+ ), (
+ """
+ Single line summary
+
+ Args:
+ arg1(str):Extended
+ description of arg1
+ arg2 ( int ) : Extended
+ description of arg2
+
+ Keyword Args:
+ kwarg1(str):Extended
+ description of kwarg1
+ kwarg2 ( int ) : Extended
+ description of kwarg2""",
+ """
+ Single line summary
+
+ :Parameters: * **arg1** (*str*) --
+ Extended
+ description of arg1
+ * **arg2** (*int*) --
+ Extended
+ description of arg2
+
+ :Keyword Arguments: * **kwarg1** (*str*) --
+ Extended
+ description of kwarg1
+ * **kwarg2** (*int*) --
+ Extended
+ description of kwarg2"""
+ ), (
+ """
+ Single line summary
+
+ Arguments:
+ arg1(str):Extended
+ description of arg1
+ arg2 ( int ) : Extended
+ description of arg2
+
+ Keyword Arguments:
+ kwarg1(str):Extended
+ description of kwarg1
+ kwarg2 ( int ) : Extended
+ description of kwarg2""",
+ """
+ Single line summary
+
+ :Parameters: * **arg1** (*str*) --
+ Extended
+ description of arg1
+ * **arg2** (*int*) --
+ Extended
+ description of arg2
+
+ :Keyword Arguments: * **kwarg1** (*str*) --
+ Extended
+ description of kwarg1
+ * **kwarg2** (*int*) --
+ Extended
+ description of kwarg2"""
+ ), (
+ """
+ Single line summary
+
+ Return:
+ str:Extended
+ description of return value
+ """,
+ """
+ Single line summary
+
+ :returns: *str* --
+ Extended
+ description of return value"""
+ ), (
+ """
+ Single line summary
+
+ Returns:
+ str:Extended
+ description of return value
+ """,
+ """
+ Single line summary
+
+ :returns: *str* --
+ Extended
+ description of return value"""
+ ), (
+ """
+ Single line summary
+
+ Returns:
+ Extended
+ description of return value
+ """,
+ """
+ Single line summary
+
+ :returns: Extended
+ description of return value"""
+ )]
+
+ def test_docstrings(self):
+ config = Config(napoleon_use_param=False, napoleon_use_rtype=False)
+ for docstring, expected in self.docstrings:
+ actual = str(GoogleDocstring(textwrap.dedent(docstring), config))
+ expected = textwrap.dedent(expected)
+ self.assertEqual(expected, actual)
+
+ def test_parameters_with_class_reference(self):
+ docstring = """\
+Construct a new XBlock.
+
+This class should only be used by runtimes.
+
+Arguments:
+ runtime (:class:`Runtime`): Use it to access the environment.
+ It is available in XBlock code as ``self.runtime``.
+
+ field_data (:class:`FieldData`): Interface used by the XBlock
+ fields to access their data from wherever it is persisted.
+
+ scope_ids (:class:`ScopeIds`): Identifiers needed to resolve scopes.
+
+"""
+
+ actual = str(GoogleDocstring(docstring))
+ expected = """\
+Construct a new XBlock.
+
+This class should only be used by runtimes.
+
+:param runtime: Use it to access the environment.
+ It is available in XBlock code as ``self.runtime``.
+
+:type runtime: :class:`Runtime`
+:param field_data: Interface used by the XBlock
+ fields to access their data from wherever it is persisted.
+
+:type field_data: :class:`FieldData`
+:param scope_ids: Identifiers needed to resolve scopes.
+
+:type scope_ids: :class:`ScopeIds`
+"""
+ self.assertEqual(expected, actual)
+
+ def test_attributes_with_class_reference(self):
+ docstring = """\
+Attributes:
+ in_attr(:class:`numpy.ndarray`): super-dooper attribute
+"""
+
+ actual = str(GoogleDocstring(docstring))
+ expected = """\
+.. attribute:: in_attr
+
+ :class:`numpy.ndarray`
+
+ super-dooper attribute
+"""
+ self.assertEqual(expected, actual)
+
+ docstring = """\
+Attributes:
+ in_attr(numpy.ndarray): super-dooper attribute
+"""
+
+ actual = str(GoogleDocstring(docstring))
+ expected = """\
+.. attribute:: in_attr
+
+ *numpy.ndarray*
+
+ super-dooper attribute
+"""
+ self.assertEqual(expected, actual)
+
+
+class NumpyDocstringTest(BaseDocstringTest):
+ docstrings = [(
+ """Single line summary""",
+ """Single line summary"""
+ ), (
+ """
+ Single line summary
+
+ Extended description
+
+ """,
+ """
+ Single line summary
+
+ Extended description
+ """
+ ), (
+ """
+ Single line summary
+
+ Parameters
+ ----------
+ arg1:str
+ Extended
+ description of arg1
+ """,
+ """
+ Single line summary
+
+ :Parameters: **arg1** (*str*) --
+ Extended
+ description of arg1"""
+ ), (
+ """
+ Single line summary
+
+ Parameters
+ ----------
+ arg1:str
+ Extended
+ description of arg1
+ arg2 : int
+ Extended
+ description of arg2
+
+ Keyword Arguments
+ -----------------
+ kwarg1:str
+ Extended
+ description of kwarg1
+ kwarg2 : int
+ Extended
+ description of kwarg2
+ """,
+ """
+ Single line summary
+
+ :Parameters: * **arg1** (*str*) --
+ Extended
+ description of arg1
+ * **arg2** (*int*) --
+ Extended
+ description of arg2
+
+ :Keyword Arguments: * **kwarg1** (*str*) --
+ Extended
+ description of kwarg1
+ * **kwarg2** (*int*) --
+ Extended
+ description of kwarg2"""
+ ), (
+ """
+ Single line summary
+
+ Return
+ ------
+ str
+ Extended
+ description of return value
+ """,
+ """
+ Single line summary
+
+ :returns: *str* --
+ Extended
+ description of return value"""
+ ), (
+ """
+ Single line summary
+
+ Returns
+ -------
+ str
+ Extended
+ description of return value
+ """,
+ """
+ Single line summary
+
+ :returns: *str* --
+ Extended
+ description of return value"""
+ )]
+
+ def test_docstrings(self):
+ config = Config(napoleon_use_param=False, napoleon_use_rtype=False)
+ for docstring, expected in self.docstrings:
+ actual = str(NumpyDocstring(textwrap.dedent(docstring), config))
+ expected = textwrap.dedent(expected)
+ self.assertEqual(expected, actual)
+
+ def test_parameters_with_class_reference(self):
+ docstring = """\
+Parameters
+----------
+param1 : :class:`MyClass <name.space.MyClass>` instance
+
+"""
+
+ config = Config(napoleon_use_param=False)
+ actual = str(NumpyDocstring(docstring, config))
+ expected = """\
+:Parameters: **param1** (:class:`MyClass <name.space.MyClass>` instance)
+"""
+ self.assertEqual(expected, actual)
+
+ config = Config(napoleon_use_param=True)
+ actual = str(NumpyDocstring(docstring, config))
+ expected = """\
+
+:type param1: :class:`MyClass <name.space.MyClass>` instance
+"""
+ self.assertEqual(expected, actual)
+
+ def test_parameters_without_class_reference(self):
+ docstring = """\
+Parameters
+----------
+param1 : MyClass instance
+
+"""
+
+ config = Config(napoleon_use_param=False)
+ actual = str(NumpyDocstring(docstring, config))
+ expected = """\
+:Parameters: **param1** (*MyClass instance*)
+"""
+ self.assertEqual(expected, actual)
+
+ config = Config(napoleon_use_param=True)
+ actual = str(NumpyDocstring(textwrap.dedent(docstring), config))
+ expected = """\
+
+:type param1: MyClass instance
+"""
+ self.assertEqual(expected, actual)
+
+ def test_see_also_refs(self):
+ docstring = """\
+numpy.multivariate_normal(mean, cov, shape=None, spam=None)
+
+See Also
+--------
+some, other, funcs
+otherfunc : relationship
+
+"""
+
+ actual = str(NumpyDocstring(docstring))
+
+ expected = """\
+numpy.multivariate_normal(mean, cov, shape=None, spam=None)
+
+.. seealso::
+
+ :obj:`some`, :obj:`other`, :obj:`funcs`
+ \n\
+ :obj:`otherfunc`
+ relationship
+"""
+ self.assertEqual(expected, actual)
+
+ docstring = """\
+numpy.multivariate_normal(mean, cov, shape=None, spam=None)
+
+See Also
+--------
+some, other, funcs
+otherfunc : relationship
+
+"""
+
+ config = Config()
+ app = mock.Mock()
+ actual = str(NumpyDocstring(docstring, config, app, "method"))
+
+ expected = """\
+numpy.multivariate_normal(mean, cov, shape=None, spam=None)
+
+.. seealso::
+
+ :meth:`some`, :meth:`other`, :meth:`funcs`
+ \n\
+ :meth:`otherfunc`
+ relationship
+"""
+ self.assertEqual(expected, actual)
diff --git a/tests/test_ext_napoleon_iterators.py b/tests/test_ext_napoleon_iterators.py
new file mode 100644
index 00000000..320047e5
--- /dev/null
+++ b/tests/test_ext_napoleon_iterators.py
@@ -0,0 +1,346 @@
+# -*- coding: utf-8 -*-
+"""
+ test_napoleon_iterators
+ ~~~~~~~~~~~~~~~~~~~~~~~
+
+ Tests for :mod:`sphinx.ext.napoleon.iterators` module.
+
+
+ :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+from sphinx.ext.napoleon.iterators import peek_iter, modify_iter
+from unittest import TestCase
+
+
+class BaseIteratorsTest(TestCase):
+ def assertEqualTwice(self, expected, func, *args):
+ self.assertEqual(expected, func(*args))
+ self.assertEqual(expected, func(*args))
+
+ def assertFalseTwice(self, func, *args):
+ self.assertFalse(func(*args))
+ self.assertFalse(func(*args))
+
+ def assertNext(self, it, expected, is_last):
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice(expected, it.peek)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice(expected, it.peek)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqual(expected, next(it))
+ if is_last:
+ self.assertFalseTwice(it.has_next)
+ self.assertRaisesTwice(StopIteration, it.next)
+ else:
+ self.assertTrueTwice(it.has_next)
+
+ def assertRaisesTwice(self, exc, func, *args):
+ self.assertRaises(exc, func, *args)
+ self.assertRaises(exc, func, *args)
+
+ def assertTrueTwice(self, func, *args):
+ self.assertTrue(func(*args))
+ self.assertTrue(func(*args))
+
+
+class PeekIterTest(BaseIteratorsTest):
+ def test_init_with_sentinel(self):
+ a = iter(['1', '2', 'DONE'])
+ sentinel = 'DONE'
+ self.assertRaises(TypeError, peek_iter, a, sentinel)
+
+ def get_next():
+ return next(a)
+ it = peek_iter(get_next, sentinel)
+ self.assertEqual(it.sentinel, sentinel)
+ self.assertNext(it, '1', is_last=False)
+ self.assertNext(it, '2', is_last=True)
+
+ def test_iter(self):
+ a = ['1', '2', '3']
+ it = peek_iter(a)
+ self.assertTrue(it is it.__iter__())
+
+ a = []
+ b = [i for i in peek_iter(a)]
+ self.assertEqual([], b)
+
+ a = ['1']
+ b = [i for i in peek_iter(a)]
+ self.assertEqual(['1'], b)
+
+ a = ['1', '2']
+ b = [i for i in peek_iter(a)]
+ self.assertEqual(['1', '2'], b)
+
+ a = ['1', '2', '3']
+ b = [i for i in peek_iter(a)]
+ self.assertEqual(['1', '2', '3'], b)
+
+ def test_next_with_multi(self):
+ a = []
+ it = peek_iter(a)
+ self.assertFalseTwice(it.has_next)
+ self.assertRaisesTwice(StopIteration, it.next, 2)
+
+ a = ['1']
+ it = peek_iter(a)
+ self.assertTrueTwice(it.has_next)
+ self.assertRaisesTwice(StopIteration, it.next, 2)
+ self.assertTrueTwice(it.has_next)
+
+ a = ['1', '2']
+ it = peek_iter(a)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqual(['1', '2'], it.next(2))
+ self.assertFalseTwice(it.has_next)
+
+ a = ['1', '2', '3']
+ it = peek_iter(a)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqual(['1', '2'], it.next(2))
+ self.assertTrueTwice(it.has_next)
+ self.assertRaisesTwice(StopIteration, it.next, 2)
+ self.assertTrueTwice(it.has_next)
+
+ a = ['1', '2', '3', '4']
+ it = peek_iter(a)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqual(['1', '2'], it.next(2))
+ self.assertTrueTwice(it.has_next)
+ self.assertEqual(['3', '4'], it.next(2))
+ self.assertFalseTwice(it.has_next)
+ self.assertRaisesTwice(StopIteration, it.next, 2)
+ self.assertFalseTwice(it.has_next)
+
+ def test_next_with_none(self):
+ a = []
+ it = peek_iter(a)
+ self.assertFalseTwice(it.has_next)
+ self.assertRaisesTwice(StopIteration, it.next)
+ self.assertFalseTwice(it.has_next)
+
+ a = ['1']
+ it = peek_iter(a)
+ self.assertEqual('1', it.__next__())
+
+ a = ['1']
+ it = peek_iter(a)
+ self.assertNext(it, '1', is_last=True)
+
+ a = ['1', '2']
+ it = peek_iter(a)
+ self.assertNext(it, '1', is_last=False)
+ self.assertNext(it, '2', is_last=True)
+
+ a = ['1', '2', '3']
+ it = peek_iter(a)
+ self.assertNext(it, '1', is_last=False)
+ self.assertNext(it, '2', is_last=False)
+ self.assertNext(it, '3', is_last=True)
+
+ def test_next_with_one(self):
+ a = []
+ it = peek_iter(a)
+ self.assertFalseTwice(it.has_next)
+ self.assertRaisesTwice(StopIteration, it.next, 1)
+
+ a = ['1']
+ it = peek_iter(a)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqual(['1'], it.next(1))
+ self.assertFalseTwice(it.has_next)
+ self.assertRaisesTwice(StopIteration, it.next, 1)
+
+ a = ['1', '2']
+ it = peek_iter(a)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqual(['1'], it.next(1))
+ self.assertTrueTwice(it.has_next)
+ self.assertEqual(['2'], it.next(1))
+ self.assertFalseTwice(it.has_next)
+ self.assertRaisesTwice(StopIteration, it.next, 1)
+
+ def test_next_with_zero(self):
+ a = []
+ it = peek_iter(a)
+ self.assertFalseTwice(it.has_next)
+ self.assertRaisesTwice(StopIteration, it.next, 0)
+
+ a = ['1']
+ it = peek_iter(a)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice([], it.next, 0)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice([], it.next, 0)
+
+ a = ['1', '2']
+ it = peek_iter(a)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice([], it.next, 0)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice([], it.next, 0)
+
+ def test_peek_with_multi(self):
+ a = []
+ it = peek_iter(a)
+ self.assertFalseTwice(it.has_next)
+ self.assertEqualTwice([it.sentinel, it.sentinel], it.peek, 2)
+ self.assertFalseTwice(it.has_next)
+
+ a = ['1']
+ it = peek_iter(a)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice(['1', it.sentinel], it.peek, 2)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice(['1', it.sentinel, it.sentinel], it.peek, 3)
+ self.assertTrueTwice(it.has_next)
+
+ a = ['1', '2']
+ it = peek_iter(a)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice(['1', '2'], it.peek, 2)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice(['1', '2', it.sentinel], it.peek, 3)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice(['1', '2', it.sentinel, it.sentinel], it.peek, 4)
+ self.assertTrueTwice(it.has_next)
+
+ a = ['1', '2', '3']
+ it = peek_iter(a)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice(['1', '2'], it.peek, 2)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice(['1', '2', '3'], it.peek, 3)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice(['1', '2', '3', it.sentinel], it.peek, 4)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqual('1', next(it))
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice(['2', '3'], it.peek, 2)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice(['2', '3', it.sentinel], it.peek, 3)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice(['2', '3', it.sentinel, it.sentinel], it.peek, 4)
+ self.assertTrueTwice(it.has_next)
+
+ def test_peek_with_none(self):
+ a = []
+ it = peek_iter(a)
+ self.assertFalseTwice(it.has_next)
+ self.assertEqualTwice(it.sentinel, it.peek)
+ self.assertFalseTwice(it.has_next)
+
+ a = ['1']
+ it = peek_iter(a)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice('1', it.peek)
+ self.assertEqual('1', next(it))
+ self.assertFalseTwice(it.has_next)
+ self.assertEqualTwice(it.sentinel, it.peek)
+ self.assertFalseTwice(it.has_next)
+
+ a = ['1', '2']
+ it = peek_iter(a)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice('1', it.peek)
+ self.assertEqual('1', next(it))
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice('2', it.peek)
+ self.assertEqual('2', next(it))
+ self.assertFalseTwice(it.has_next)
+ self.assertEqualTwice(it.sentinel, it.peek)
+ self.assertFalseTwice(it.has_next)
+
+ def test_peek_with_one(self):
+ a = []
+ it = peek_iter(a)
+ self.assertFalseTwice(it.has_next)
+ self.assertEqualTwice([it.sentinel], it.peek, 1)
+ self.assertFalseTwice(it.has_next)
+
+ a = ['1']
+ it = peek_iter(a)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice(['1'], it.peek, 1)
+ self.assertEqual('1', next(it))
+ self.assertFalseTwice(it.has_next)
+ self.assertEqualTwice([it.sentinel], it.peek, 1)
+ self.assertFalseTwice(it.has_next)
+
+ a = ['1', '2']
+ it = peek_iter(a)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice(['1'], it.peek, 1)
+ self.assertEqual('1', next(it))
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice(['2'], it.peek, 1)
+ self.assertEqual('2', next(it))
+ self.assertFalseTwice(it.has_next)
+ self.assertEqualTwice([it.sentinel], it.peek, 1)
+ self.assertFalseTwice(it.has_next)
+
+ def test_peek_with_zero(self):
+ a = []
+ it = peek_iter(a)
+ self.assertFalseTwice(it.has_next)
+ self.assertEqualTwice([], it.peek, 0)
+
+ a = ['1']
+ it = peek_iter(a)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice([], it.peek, 0)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice([], it.peek, 0)
+
+ a = ['1', '2']
+ it = peek_iter(a)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice([], it.peek, 0)
+ self.assertTrueTwice(it.has_next)
+ self.assertEqualTwice([], it.peek, 0)
+
+
+class ModifyIterTest(BaseIteratorsTest):
+ def test_init_with_sentinel_args(self):
+ a = iter(['1', '2', '3', 'DONE'])
+ sentinel = 'DONE'
+
+ def get_next():
+ return next(a)
+ it = modify_iter(get_next, sentinel, int)
+ expected = [1, 2, 3]
+ self.assertEqual(expected, [i for i in it])
+
+ def test_init_with_sentinel_kwargs(self):
+ a = iter([1, 2, 3, 4])
+ sentinel = 4
+
+ def get_next():
+ return next(a)
+ it = modify_iter(get_next, sentinel, modifier=str)
+ expected = ['1', '2', '3']
+ self.assertEqual(expected, [i for i in it])
+
+ def test_modifier_default(self):
+ a = ['', ' ', ' a ', 'b ', ' c', ' ', '']
+ it = modify_iter(a)
+ expected = ['', ' ', ' a ', 'b ', ' c', ' ', '']
+ self.assertEqual(expected, [i for i in it])
+
+ def test_modifier_not_callable(self):
+ self.assertRaises(TypeError, modify_iter, [1], modifier='not_callable')
+
+ def test_modifier_rstrip(self):
+ a = ['', ' ', ' a ', 'b ', ' c', ' ', '']
+ it = modify_iter(a, modifier=lambda s: s.rstrip())
+ expected = ['', '', ' a', 'b', ' c', '', '']
+ self.assertEqual(expected, [i for i in it])
+
+ def test_modifier_rstrip_unicode(self):
+ a = [u'', u' ', u' a ', u'b ', u' c', u' ', u'']
+ it = modify_iter(a, modifier=lambda s: s.rstrip())
+ expected = [u'', u'', u' a', u'b', u' c', u'', u'']
+ self.assertEqual(expected, [i for i in it])
diff --git a/tests/test_ext_viewcode.py b/tests/test_ext_viewcode.py
new file mode 100644
index 00000000..fb24f765
--- /dev/null
+++ b/tests/test_ext_viewcode.py
@@ -0,0 +1,44 @@
+# -*- coding: utf-8 -*-
+"""
+ test_ext_viewcode
+ ~~~~~~~~~~~~~~~~~
+
+ Test sphinx.ext.viewcode extension.
+
+ :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import re
+
+from util import with_app
+
+
+@with_app(testroot='ext-viewcode')
+def test_viewcode(app, status, warning):
+ app.builder.build_all()
+
+ warnings = re.sub(r'\\+', '/', warning.getvalue())
+ assert re.findall(
+ r"index.rst:\d+: WARNING: Object named 'func1' not found in include " +
+ r"file .*/spam/__init__.py'",
+ warnings
+ )
+
+ result = (app.outdir / 'index.html').text(encoding='utf-8')
+ assert result.count('href="_modules/spam/mod1.html#func1"') == 2
+ assert result.count('href="_modules/spam/mod2.html#func2"') == 2
+ assert result.count('href="_modules/spam/mod1.html#Class1"') == 2
+ assert result.count('href="_modules/spam/mod2.html#Class2"') == 2
+
+
+@with_app(testroot='ext-viewcode', tags=['test_linkcode'])
+def test_linkcode(app, status, warning):
+ app.builder.build(['objects'])
+
+ stuff = (app.outdir / 'objects.html').text(encoding='utf-8')
+
+ assert 'http://foobar/source/foolib.py' in stuff
+ assert 'http://foobar/js/' in stuff
+ assert 'http://foobar/c/' in stuff
+ assert 'http://foobar/cpp/' in stuff
diff --git a/tests/test_footnote.py b/tests/test_footnote.py
deleted file mode 100644
index 964bb3e7..00000000
--- a/tests/test_footnote.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- test_footnote
- ~~~~~~~~~~~~~
-
- Test for footnote and citation.
-
- :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from util import test_root, with_app
-
-
-def teardown_module():
- (test_root / '_build').rmtree(True)
-
-
-@with_app(buildername='html')
-def test_html(app):
- app.builder.build(['footnote'])
- result = (app.outdir / 'footnote.html').text(encoding='utf-8')
- expects = [
- '<a class="footnote-reference" href="#id5" id="id1">[1]</a>',
- '<a class="footnote-reference" href="#id6" id="id2">[2]</a>',
- '<a class="footnote-reference" href="#foo" id="id3">[3]</a>',
- '<a class="reference internal" href="#bar" id="id4">[bar]</a>',
- '<a class="fn-backref" href="#id1">[1]</a>',
- '<a class="fn-backref" href="#id2">[2]</a>',
- '<a class="fn-backref" href="#id3">[3]</a>',
- '<a class="fn-backref" href="#id4">[bar]</a>',
- ]
- for expect in expects:
- matches = re.findall(re.escape(expect), result)
- assert len(matches) == 1
diff --git a/tests/test_highlighting.py b/tests/test_highlighting.py
index b4e5149f..5044ab0e 100644
--- a/tests/test_highlighting.py
+++ b/tests/test_highlighting.py
@@ -15,12 +15,7 @@ from pygments.formatters.html import HtmlFormatter
from sphinx.highlighting import PygmentsBridge
-from util import with_app, SkipTest
-
-try:
- import pygments
-except ImportError:
- raise SkipTest('pygments not available')
+from util import with_app
class MyLexer(RegexLexer):
@@ -46,13 +41,14 @@ class ComplainOnUnhighlighted(PygmentsBridge):
@with_app()
-def test_add_lexer(app):
+def test_add_lexer(app, status, warning):
app.add_lexer('test', MyLexer())
bridge = PygmentsBridge('html')
ret = bridge.highlight_block('ab', 'test')
assert '<span class="n">a</span>b' in ret
+
def test_detect_interactive():
bridge = ComplainOnUnhighlighted('html')
blocks = [
@@ -60,11 +56,12 @@ def test_detect_interactive():
>>> testing()
True
""",
- ]
+ ]
for block in blocks:
ret = bridge.highlight_block(block.lstrip(), 'python')
assert ret.startswith("<div class=\"highlight\">")
+
def test_set_formatter():
PygmentsBridge.html_formatter = MyFormatter
try:
@@ -74,6 +71,7 @@ def test_set_formatter():
finally:
PygmentsBridge.html_formatter = HtmlFormatter
+
def test_trim_doctest_flags():
PygmentsBridge.html_formatter = MyFormatter
try:
diff --git a/tests/test_i18n.py b/tests/test_i18n.py
index 06f6b28b..8144663c 100644
--- a/tests/test_i18n.py
+++ b/tests/test_i18n.py
@@ -13,5 +13,5 @@ from util import with_app
@with_app(confoverrides={'language': 'de'})
-def test_i18n(app):
+def test_i18n(app, status, warning):
app.builder.build_all()
diff --git a/tests/test_intl.py b/tests/test_intl.py
index ca0273c8..69437c1a 100644
--- a/tests/test_intl.py
+++ b/tests/test_intl.py
@@ -9,75 +9,70 @@
:copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+from __future__ import print_function
import os
import re
-from StringIO import StringIO
from subprocess import Popen, PIPE
from xml.etree import ElementTree
-from sphinx.util.pycompat import relpath
+from nose.tools import assert_equal
+from six import string_types
-from util import test_roots, path, with_app, SkipTest
+from util import tempdir, rootdir, path, gen_with_app, SkipTest, \
+ assert_re_search, assert_not_re_search, assert_in, assert_not_in, \
+ assert_startswith
-warnfile = StringIO()
-root = test_roots / 'test-intl'
-doctreedir = root / '_build' / 'doctree'
+root = tempdir / 'test-intl'
-def with_intl_app(*args, **kw):
+def gen_with_intl_app(*args, **kw):
default_kw = {
- 'srcdir': root,
- 'doctreedir': doctreedir,
+ 'testroot': 'intl',
'confoverrides': {
'language': 'xx', 'locale_dirs': ['.'],
'gettext_compact': False,
},
}
default_kw.update(kw)
- return with_app(*args, **default_kw)
+ return gen_with_app(*args, **default_kw)
def setup_module():
+ if not root.exists():
+ (rootdir / 'roots' / 'test-intl').copytree(root)
# Delete remnants left over after failed build
- (root / 'xx').rmtree(True)
- (root / 'xx' / 'LC_MESSAGES').makedirs()
# Compile all required catalogs into binary format (*.mo).
for dirpath, dirs, files in os.walk(root):
dirpath = path(dirpath)
for f in [f for f in files if f.endswith('.po')]:
po = dirpath / f
mo = root / 'xx' / 'LC_MESSAGES' / (
- relpath(po[:-3], root) + '.mo')
+ os.path.relpath(po[:-3], root) + '.mo')
if not mo.parent.exists():
mo.parent.makedirs()
try:
p = Popen(['msgfmt', po, '-o', mo],
- stdout=PIPE, stderr=PIPE)
+ stdout=PIPE, stderr=PIPE)
except OSError:
raise SkipTest # most likely msgfmt was not found
else:
stdout, stderr = p.communicate()
if p.returncode != 0:
- print stdout
- print stderr
+ print(stdout)
+ print(stderr)
assert False, \
'msgfmt exited with return code %s' % p.returncode
assert mo.isfile(), 'msgfmt failed'
-def teardown_module():
- (root / '_build').rmtree(True)
- (root / 'xx').rmtree(True)
-
-
def elem_gettexts(elem):
def itertext(self):
# this function copied from Python-2.7 'ElementTree.itertext'.
- # for compatibility to Python-2.5, 2.6, 3.1
+ # for compatibility to Python-2.6
tag = self.tag
- if not isinstance(tag, basestring) and tag is not None:
+ if not isinstance(tag, string_types) and tag is not None:
return
if self.text:
yield self.text
@@ -86,7 +81,7 @@ def elem_gettexts(elem):
yield s
if e.tail:
yield e.tail
- return filter(None, [s.strip() for s in itertext(elem)])
+ return [_f for _f in [s.strip() for s in itertext(elem)] if _f]
def elem_getref(elem):
@@ -98,132 +93,45 @@ def assert_elem(elem, texts=None, refs=None, names=None):
_texts = elem_gettexts(elem)
assert _texts == texts
if refs is not None:
- _refs = map(elem_getref, elem.findall('reference'))
+ _refs = [elem_getref(x) for x in elem.findall('reference')]
assert _refs == refs
if names is not None:
_names = elem.attrib.get('names').split()
assert _names == names
-@with_intl_app(buildername='text')
-def test_simple(app):
- app.builder.build(['bom'])
- result = (app.outdir / 'bom.txt').text(encoding='utf-8')
- expect = (u"\nDatei mit UTF-8"
- u"\n***************\n" # underline matches new translation
- u"\nThis file has umlauts: äöü.\n")
- assert result == expect
-
+@gen_with_intl_app('text', freshenv=True)
+def test_text_builder(app, status, warning):
+ app.builder.build_all()
-@with_intl_app(buildername='text')
-def test_subdir(app):
- app.builder.build(['subdir/contents'])
- result = (app.outdir / 'subdir' / 'contents.txt').text(encoding='utf-8')
- assert result.startswith(u"\nsubdir contents\n***************\n")
+ # --- warnings in translation
+ warnings = warning.getvalue().replace(os.sep, '/')
+ warning_expr = u'.*/warnings.txt:4: ' \
+ u'WARNING: Inline literal start-string without end-string.\n'
+ yield assert_re_search, warning_expr, warnings
-@with_intl_app(buildername='text', warning=warnfile)
-def test_i18n_warnings_in_translation(app):
- app.builddir.rmtree(True)
- app.builder.build(['warnings'])
result = (app.outdir / 'warnings.txt').text(encoding='utf-8')
expect = (u"\nI18N WITH REST WARNINGS"
u"\n***********************\n"
u"\nLINE OF >>``<<BROKEN LITERAL MARKUP.\n")
+ yield assert_equal, result, expect
- assert result == expect
-
- warnings = warnfile.getvalue().replace(os.sep, '/')
- warning_expr = u'.*/warnings.txt:4: ' \
- u'WARNING: Inline literal start-string without end-string.\n'
- assert re.search(warning_expr, warnings)
-
-
-@with_intl_app(buildername='html', cleanenv=True)
-def test_i18n_footnote_break_refid(app):
- """test for #955 cant-build-html-with-footnotes-when-using"""
- app.builder.build(['footnote'])
- result = (app.outdir / 'footnote.html').text(encoding='utf-8')
- # expect no error by build
-
-
-@with_intl_app(buildername='xml', warning=warnfile)
-def test_i18n_footnote_regression(app):
- # regression test for fix #955, #1176
- app.builddir.rmtree(True)
- app.builder.build(['footnote'])
- et = ElementTree.parse(app.outdir / 'footnote.xml')
- secs = et.findall('section')
-
- para0 = secs[0].findall('paragraph')
- assert_elem(
- para0[0],
- texts=['I18N WITH FOOTNOTE', 'INCLUDE THIS CONTENTS',
- '2', '[ref]', '1', '100', '.'],
- refs=['i18n-with-footnote', 'ref'])
-
- footnote0 = secs[0].findall('footnote')
- assert_elem(
- footnote0[0],
- texts=['1','THIS IS A AUTO NUMBERED FOOTNOTE.'],
- names=['1'])
- assert_elem(
- footnote0[1],
- texts=['100','THIS IS A NUMBERED FOOTNOTE.'],
- names=['100'])
- assert_elem(
- footnote0[2],
- texts=['2','THIS IS A AUTO NUMBERED NAMED FOOTNOTE.'],
- names=['named'])
-
- citation0 = secs[0].findall('citation')
- assert_elem(
- citation0[0],
- texts=['ref','THIS IS A NAMED FOOTNOTE.'],
- names=['ref'])
-
- warnings = warnfile.getvalue().replace(os.sep, '/')
- warning_expr = u'.*/footnote.xml:\\d*: SEVERE: Duplicate ID: ".*".\n'
- assert not re.search(warning_expr, warnings)
-
-
-@with_intl_app(buildername='xml', cleanenv=True)
-def test_i18n_footnote_backlink(app):
- # i18n test for #1058
- app.builder.build(['footnote'])
- et = ElementTree.parse(app.outdir / 'footnote.xml')
- secs = et.findall('section')
-
- para0 = secs[0].findall('paragraph')
- refs0 = para0[0].findall('footnote_reference')
- refid2id = dict([
- (r.attrib.get('refid'), r.attrib.get('ids')) for r in refs0])
-
- footnote0 = secs[0].findall('footnote')
- for footnote in footnote0:
- ids = footnote.attrib.get('ids')
- backrefs = footnote.attrib.get('backrefs')
- assert refid2id[ids] == backrefs
+ # --- simple translation; check title underlines
+ result = (app.outdir / 'bom.txt').text(encoding='utf-8')
+ expect = (u"\nDatei mit UTF-8"
+ u"\n***************\n" # underline matches new translation
+ u"\nThis file has umlauts: äöü.\n")
+ yield assert_equal, result, expect
-@with_intl_app(buildername='xml', warning=warnfile)
-def test_i18n_refs_python_domain(app):
- app.builder.build(['refs_python_domain'])
- et = ElementTree.parse(app.outdir / 'refs_python_domain.xml')
- secs = et.findall('section')
+ # --- check translation in subdirs
- # regression test for fix #1363
- para0 = secs[0].findall('paragraph')
- assert_elem(
- para0[0],
- texts=['SEE THIS DECORATOR:', 'sensitive_variables()', '.'],
- refs=['sensitive.sensitive_variables'])
+ result = (app.outdir / 'subdir' / 'contents.txt').text(encoding='utf-8')
+ yield assert_startswith, result, u"\nsubdir contents\n***************\n"
+ # --- check warnings for inconsistency in number of references
-@with_intl_app(buildername='text', warning=warnfile, cleanenv=True)
-def test_i18n_warn_for_number_of_references_inconsistency(app):
- app.builddir.rmtree(True)
- app.builder.build(['refs_inconsistency'])
result = (app.outdir / 'refs_inconsistency.txt').text(encoding='utf-8')
expect = (u"\nI18N WITH REFS INCONSISTENCY"
u"\n****************************\n"
@@ -233,98 +141,19 @@ def test_i18n_warn_for_number_of_references_inconsistency(app):
u"\n[1] THIS IS A AUTO NUMBERED FOOTNOTE.\n"
u"\n[ref2] THIS IS A NAMED FOOTNOTE.\n"
u"\n[100] THIS IS A NUMBERED FOOTNOTE.\n")
- assert result == expect
+ yield assert_equal, result, expect
- warnings = warnfile.getvalue().replace(os.sep, '/')
+ warnings = warning.getvalue().replace(os.sep, '/')
warning_fmt = u'.*/refs_inconsistency.txt:\\d+: ' \
- u'WARNING: inconsistent %s in translated message\n'
+ u'WARNING: inconsistent %s in translated message\n'
expected_warning_expr = (
warning_fmt % 'footnote references' +
warning_fmt % 'references' +
warning_fmt % 'references')
- assert re.search(expected_warning_expr, warnings)
-
-
-@with_intl_app(buildername='html', cleanenv=True)
-def test_i18n_link_to_undefined_reference(app):
- app.builder.build(['refs_inconsistency'])
- result = (app.outdir / 'refs_inconsistency.html').text(encoding='utf-8')
-
- expected_expr = ('<a class="reference external" '
- 'href="http://www.example.com">reference</a>')
- assert len(re.findall(expected_expr, result)) == 2
-
- expected_expr = ('<a class="reference internal" '
- 'href="#reference">reference</a>')
- assert len(re.findall(expected_expr, result)) == 0
-
- expected_expr = ('<a class="reference internal" '
- 'href="#i18n-with-refs-inconsistency">I18N WITH '
- 'REFS INCONSISTENCY</a>')
- assert len(re.findall(expected_expr, result)) == 1
-
-
-@with_intl_app(buildername='xml', cleanenv=True)
-def test_i18n_keep_external_links(app):
- # regression test for #1044
- app.builder.build(['external_links'])
- et = ElementTree.parse(app.outdir / 'external_links.xml')
- secs = et.findall('section')
-
- para0 = secs[0].findall('paragraph')
- # external link check
- assert_elem(
- para0[0],
- texts=['EXTERNAL LINK TO', 'Python', '.'],
- refs=['http://python.org/index.html'])
-
- # internal link check
- assert_elem(
- para0[1],
- texts=['EXTERNAL LINKS', 'IS INTERNAL LINK.'],
- refs=['i18n-with-external-links'])
-
- # inline link check
- assert_elem(
- para0[2],
- texts=['INLINE LINK BY', 'THE SPHINX SITE', '.'],
- refs=['http://sphinx-doc.org'])
-
- # unnamed link check
- assert_elem(
- para0[3],
- texts=['UNNAMED', 'LINK', '.'],
- refs=['http://google.com'])
+ yield assert_re_search, expected_warning_expr, warnings
- # link target swapped translation
- para1 = secs[1].findall('paragraph')
- assert_elem(
- para1[0],
- texts=['LINK TO', 'external2', 'AND', 'external1', '.'],
- refs=['http://example.com/external2',
- 'http://example.com/external1'])
- assert_elem(
- para1[1],
- texts=['LINK TO', 'THE PYTHON SITE', 'AND', 'THE SPHINX SITE',
- '.'],
- refs=['http://python.org', 'http://sphinx-doc.org'])
+ # --- check warning for literal block
- # multiple references in the same line
- para2 = secs[2].findall('paragraph')
- assert_elem(
- para2[0],
- texts=['LINK TO', 'EXTERNAL LINKS', ',', 'Python', ',',
- 'THE SPHINX SITE', ',', 'UNNAMED', 'AND',
- 'THE PYTHON SITE', '.'],
- refs=['i18n-with-external-links', 'http://python.org/index.html',
- 'http://sphinx-doc.org', 'http://google.com',
- 'http://python.org'])
-
-
-@with_intl_app(buildername='text', warning=warnfile, cleanenv=True)
-def test_i18n_literalblock_warning(app):
- app.builddir.rmtree(True) #for warnings acceleration
- app.builder.build(['literalblock'])
result = (app.outdir / 'literalblock.txt').text(encoding='utf-8')
expect = (u"\nI18N WITH LITERAL BLOCK"
u"\n***********************\n"
@@ -333,18 +162,15 @@ def test_i18n_literalblock_warning(app):
u"\n literal block\n"
u"\nMISSING LITERAL BLOCK:\n"
u"\n<SYSTEM MESSAGE:")
- assert result.startswith(expect)
+ yield assert_startswith, result, expect
- warnings = warnfile.getvalue().replace(os.sep, '/')
+ warnings = warning.getvalue().replace(os.sep, '/')
expected_warning_expr = u'.*/literalblock.txt:\\d+: ' \
- u'WARNING: Literal block expected; none found.'
- assert re.search(expected_warning_expr, warnings)
+ u'WARNING: Literal block expected; none found.'
+ yield assert_re_search, expected_warning_expr, warnings
+ # --- definition terms: regression test for #975
-@with_intl_app(buildername='text')
-def test_i18n_definition_terms(app):
- # regression test for #975
- app.builder.build(['definition_terms'])
result = (app.outdir / 'definition_terms.txt').text(encoding='utf-8')
expect = (u"\nI18N WITH DEFINITION TERMS"
u"\n**************************\n"
@@ -352,15 +178,10 @@ def test_i18n_definition_terms(app):
u"\n THE CORRESPONDING DEFINITION\n"
u"\nSOME OTHER TERM"
u"\n THE CORRESPONDING DEFINITION #2\n")
+ yield assert_equal, result, expect
- assert result == expect
-
+ # --- glossary terms: regression test for #1090
-@with_intl_app(buildername='text', warning=warnfile)
-def test_i18n_glossary_terms(app):
- # regression test for #1090
- app.builddir.rmtree(True) #for warnings acceleration
- app.builder.build(['glossary_terms'])
result = (app.outdir / 'glossary_terms.txt').text(encoding='utf-8')
expect = (u"\nI18N WITH GLOSSARY TERMS"
u"\n************************\n"
@@ -369,146 +190,26 @@ def test_i18n_glossary_terms(app):
u"\nSOME OTHER NEW TERM"
u"\n THE CORRESPONDING GLOSSARY #2\n"
u"\nLINK TO *SOME NEW TERM*.\n")
- assert result == expect
+ yield assert_equal, result, expect
+ warnings = warning.getvalue().replace(os.sep, '/')
+ yield assert_not_in, 'term not in glossary', warnings
- warnings = warnfile.getvalue().replace(os.sep, '/')
- assert 'term not in glossary' not in warnings
-
-
-@with_intl_app(buildername='xml', warning=warnfile)
-def test_i18n_role_xref(app):
- # regression test for #1090, #1193
- app.builddir.rmtree(True) #for warnings acceleration
- app.builder.build(['role_xref'])
- et = ElementTree.parse(app.outdir / 'role_xref.xml')
- sec1, sec2 = et.findall('section')
-
- para1, = sec1.findall('paragraph')
- assert_elem(
- para1,
- texts=['LINK TO', "I18N ROCK'N ROLE XREF", ',', 'CONTENTS', ',',
- 'SOME NEW TERM', '.'],
- refs=['i18n-role-xref', 'contents',
- 'glossary_terms#term-some-term'])
-
- para2 = sec2.findall('paragraph')
- assert_elem(
- para2[0],
- texts=['LINK TO', 'SOME OTHER NEW TERM', 'AND', 'SOME NEW TERM',
- '.'],
- refs=['glossary_terms#term-some-other-term',
- 'glossary_terms#term-some-term'])
- assert_elem(
- para2[1],
- texts=['LINK TO', 'SAME TYPE LINKS', 'AND',
- "I18N ROCK'N ROLE XREF", '.'],
- refs=['same-type-links', 'i18n-role-xref'])
- assert_elem(
- para2[2],
- texts=['LINK TO', 'I18N WITH GLOSSARY TERMS', 'AND', 'CONTENTS',
- '.'],
- refs=['glossary_terms', 'contents'])
- assert_elem(
- para2[3],
- texts=['LINK TO', '--module', 'AND', '-m', '.'],
- refs=['cmdoption--module', 'cmdoption-m'])
- assert_elem(
- para2[4],
- texts=['LINK TO', 'env2', 'AND', 'env1', '.'],
- refs=['envvar-env2', 'envvar-env1'])
- assert_elem(
- para2[5],
- texts=['LINK TO', 'token2', 'AND', 'token1', '.'],
- refs=[]) #TODO: how do I link token role to productionlist?
- assert_elem(
- para2[6],
- texts=['LINK TO', 'same-type-links', 'AND', "i18n-role-xref", '.'],
- refs=['same-type-links', 'i18n-role-xref'])
-
- #warnings
- warnings = warnfile.getvalue().replace(os.sep, '/')
- assert 'term not in glossary' not in warnings
- assert 'undefined label' not in warnings
- assert 'unknown document' not in warnings
-
-
-@with_intl_app(buildername='xml', warning=warnfile)
-def test_i18n_label_target(app):
- # regression test for #1193, #1265
- app.builder.build(['label_target'])
- et = ElementTree.parse(app.outdir / 'label_target.xml')
- secs = et.findall('section')
-
- para0 = secs[0].findall('paragraph')
- assert_elem(
- para0[0],
- texts=['X SECTION AND LABEL', 'POINT TO', 'implicit-target', 'AND',
- 'X SECTION AND LABEL', 'POINT TO', 'section-and-label', '.'],
- refs=['implicit-target', 'section-and-label'])
-
- para1 = secs[1].findall('paragraph')
- assert_elem(
- para1[0],
- texts=['X EXPLICIT-TARGET', 'POINT TO', 'explicit-target', 'AND',
- 'X EXPLICIT-TARGET', 'POINT TO DUPLICATED ID LIKE', 'id1',
- '.'],
- refs=['explicit-target', 'id1'])
-
- para2 = secs[2].findall('paragraph')
- assert_elem(
- para2[0],
- texts=['X IMPLICIT SECTION NAME', 'POINT TO',
- 'implicit-section-name', '.'],
- refs=['implicit-section-name'])
+ # --- glossary term inconsistencies: regression test for #1090
- sec2 = secs[2].findall('section')
-
- para2_0 = sec2[0].findall('paragraph')
- assert_elem(
- para2_0[0],
- texts=['`X DUPLICATED SUB SECTION`_', 'IS BROKEN LINK.'],
- refs=[])
-
- para3 = secs[3].findall('paragraph')
- assert_elem(
- para3[0],
- texts=['X', 'bridge label',
- 'IS NOT TRANSLATABLE BUT LINKED TO TRANSLATED ' +
- 'SECTION TITLE.'],
- refs=['label-bridged-target-section'])
- assert_elem(
- para3[1],
- texts=['X', 'bridge label', 'POINT TO',
- 'LABEL BRIDGED TARGET SECTION', 'AND', 'bridge label2',
- 'POINT TO', 'SECTION AND LABEL', '. THE SECOND APPEARED',
- 'bridge label2', 'POINT TO CORRECT TARGET.'],
- refs=['label-bridged-target-section',
- 'section-and-label',
- 'section-and-label'])
-
-
-@with_intl_app(buildername='text', warning=warnfile)
-def test_i18n_glossary_terms_inconsistency(app):
- # regression test for #1090
- app.builddir.rmtree(True) #for warnings acceleration
- app.builder.build(['glossary_terms_inconsistency'])
- result = (app.outdir / 'glossary_terms_inconsistency.txt'
- ).text(encoding='utf-8')
+ result = (app.outdir / 'glossary_terms_inconsistency.txt').text(encoding='utf-8')
expect = (u"\nI18N WITH GLOSSARY TERMS INCONSISTENCY"
u"\n**************************************\n"
u"\n1. LINK TO *SOME NEW TERM*.\n")
- assert result == expect
+ yield assert_equal, result, expect
- warnings = warnfile.getvalue().replace(os.sep, '/')
+ warnings = warning.getvalue().replace(os.sep, '/')
expected_warning_expr = (
- u'.*/glossary_terms_inconsistency.txt:\\d+: '
- u'WARNING: inconsistent term references in translated message\n')
- assert re.search(expected_warning_expr, warnings)
+ u'.*/glossary_terms_inconsistency.txt:\\d+: '
+ u'WARNING: inconsistent term references in translated message\n')
+ yield assert_re_search, expected_warning_expr, warnings
+ # --- seealso
-@with_intl_app(buildername='text')
-def test_seealso(app):
- app.builder.build(['seealso'])
result = (app.outdir / 'seealso.txt').text(encoding='utf-8')
expect = (u"\nI18N WITH SEEALSO"
u"\n*****************\n"
@@ -516,13 +217,10 @@ def test_seealso(app):
u"\nSee also: LONG TEXT 1\n"
u"\nSee also: SHORT TEXT 2\n"
u"\n LONG TEXT 2\n")
- assert result == expect
+ yield assert_equal, result, expect
+ # --- figure captions: regression test for #940
-@with_intl_app(buildername='text')
-def test_i18n_figure_caption(app):
- # regression test for #940
- app.builder.build(['figure_caption'])
result = (app.outdir / 'figure_caption.txt').text(encoding='utf-8')
expect = (u"\nI18N WITH FIGURE CAPTION"
u"\n************************\n"
@@ -536,14 +234,10 @@ def test_i18n_figure_caption(app):
u"\n [image]MY CAPTION OF THE FIGURE\n"
u"\n MY DESCRIPTION PARAGRAPH1 OF THE FIGURE.\n"
u"\n MY DESCRIPTION PARAGRAPH2 OF THE FIGURE.\n")
+ yield assert_equal, result, expect
- assert result == expect
-
+ # --- rubric: regression test for pull request #190
-@with_intl_app(buildername='text')
-def test_i18n_rubric(app):
- # regression test for pull request #190
- app.builder.build(['rubric'])
result = (app.outdir / 'rubric.txt').text(encoding='utf-8')
expect = (u"\nI18N WITH RUBRIC"
u"\n****************\n"
@@ -553,14 +247,73 @@ def test_i18n_rubric(app):
u"\n===================\n"
u"\nBLOCK\n"
u"\n -[ RUBRIC TITLE ]-\n")
+ yield assert_equal, result, expect
+
+ # --- docfields
+
+ result = (app.outdir / 'docfields.txt').text(encoding='utf-8')
+ expect = (u"\nI18N WITH DOCFIELDS"
+ u"\n*******************\n"
+ u"\nclass class Cls1\n"
+ u"\n Parameters:"
+ u"\n **param** -- DESCRIPTION OF PARAMETER param\n"
+ u"\nclass class Cls2\n"
+ u"\n Parameters:"
+ u"\n * **foo** -- DESCRIPTION OF PARAMETER foo\n"
+ u"\n * **bar** -- DESCRIPTION OF PARAMETER bar\n"
+ u"\nclass class Cls3(values)\n"
+ u"\n Raises ValueError:"
+ u"\n IF THE VALUES ARE OUT OF RANGE\n"
+ u"\nclass class Cls4(values)\n"
+ u"\n Raises:"
+ u"\n * **TypeError** -- IF THE VALUES ARE NOT VALID\n"
+ u"\n * **ValueError** -- IF THE VALUES ARE OUT OF RANGE\n"
+ u"\nclass class Cls5\n"
+ u"\n Returns:"
+ u'\n A NEW "Cls3" INSTANCE\n')
+ yield assert_equal, result, expect
+
+ # --- admonitions
+ # #1206: gettext did not translate admonition directive's title
+ # seealso: http://docutils.sourceforge.net/docs/ref/rst/directives.html#admonitions
- assert result == expect
+ result = (app.outdir / 'admonitions.txt').text(encoding='utf-8')
+ directives = (
+ "attention", "caution", "danger", "error", "hint",
+ "important", "note", "tip", "warning", "admonition")
+ for d in directives:
+ yield assert_in, d.upper() + " TITLE", result
+ yield assert_in, d.upper() + " BODY", result
+
+
+@gen_with_intl_app('html', freshenv=True)
+def test_html_builder(app, status, warning):
+ app.builder.build_all()
+
+ # --- test for #955 cant-build-html-with-footnotes-when-using
+
+ # expect no error by build
+ (app.outdir / 'footnote.html').text(encoding='utf-8')
+
+ # --- links to undefined reference
+
+ result = (app.outdir / 'refs_inconsistency.html').text(encoding='utf-8')
+
+ expected_expr = ('<a class="reference external" '
+ 'href="http://www.example.com">reference</a>')
+ yield assert_equal, len(re.findall(expected_expr, result)), 2
+
+ expected_expr = ('<a class="reference internal" '
+ 'href="#reference">reference</a>')
+ yield assert_equal, len(re.findall(expected_expr, result)), 0
+
+ expected_expr = ('<a class="reference internal" '
+ 'href="#i18n-with-refs-inconsistency">I18N WITH '
+ 'REFS INCONSISTENCY</a>')
+ yield assert_equal, len(re.findall(expected_expr, result)), 1
+ # --- index entries: regression test for #976
-@with_intl_app(buildername='html')
-def test_i18n_index_entries(app):
- # regression test for #976
- app.builder.build(['index_entries'])
result = (app.outdir / 'genindex.html').text(encoding='utf-8')
def wrap(tag, keyword):
@@ -586,12 +339,10 @@ def test_i18n_index_entries(app):
wrap('a', 'BUILTIN'),
]
for expr in expected_exprs:
- assert re.search(expr, result, re.M)
+ yield assert_re_search, expr, result, re.M
+ # --- versionchanges
-@with_intl_app(buildername='html', cleanenv=True)
-def test_versionchange(app):
- app.builder.build(['versionchange'])
result = (app.outdir / 'versionchange.html').text(encoding='utf-8')
def get_content(result, name):
@@ -607,83 +358,266 @@ def test_versionchange(app):
u"""THIS IS THE <em>FIRST</em> PARAGRAPH OF DEPRECATED.</p>\n"""
u"""<p>THIS IS THE <em>SECOND</em> PARAGRAPH OF DEPRECATED.</p>\n""")
matched_content = get_content(result, "deprecated")
- assert expect1 == matched_content
+ yield assert_equal, expect1, matched_content
expect2 = (
u"""<p><span class="versionmodified">New in version 1.0: </span>"""
u"""THIS IS THE <em>FIRST</em> PARAGRAPH OF VERSIONADDED.</p>\n""")
matched_content = get_content(result, "versionadded")
- assert expect2 == matched_content
+ yield assert_equal, expect2, matched_content
expect3 = (
u"""<p><span class="versionmodified">Changed in version 1.0: </span>"""
u"""THIS IS THE <em>FIRST</em> PARAGRAPH OF VERSIONCHANGED.</p>\n""")
matched_content = get_content(result, "versionchanged")
- assert expect3 == matched_content
+ yield assert_equal, expect3, matched_content
+ # --- docfields
-@with_intl_app(buildername='text', cleanenv=True)
-def test_i18n_docfields(app):
- app.builder.build(['docfields'])
- result = (app.outdir / 'docfields.txt').text(encoding='utf-8')
- expect = (u"\nI18N WITH DOCFIELDS"
- u"\n*******************\n"
- u"\nclass class Cls1\n"
- u"\n Parameters:"
- u"\n **param** -- DESCRIPTION OF PARAMETER param\n"
- u"\nclass class Cls2\n"
- u"\n Parameters:"
- u"\n * **foo** -- DESCRIPTION OF PARAMETER foo\n"
- u"\n * **bar** -- DESCRIPTION OF PARAMETER bar\n"
- u"\nclass class Cls3(values)\n"
- u"\n Raises ValueError:"
- u"\n IF THE VALUES ARE OUT OF RANGE\n"
- u"\nclass class Cls4(values)\n"
- u"\n Raises:"
- u"\n * **TypeError** -- IF THE VALUES ARE NOT VALID\n"
- u"\n * **ValueError** -- IF THE VALUES ARE OUT OF RANGE\n"
- u"\nclass class Cls5\n"
- u"\n Returns:"
- u'\n A NEW "Cls3" INSTANCE\n')
- assert result == expect
+ # expect no error by build
+ (app.outdir / 'docfields.html').text(encoding='utf-8')
+ # --- gettext template
-@with_intl_app(buildername='text', cleanenv=True)
-def test_i18n_admonitions(app):
- # #1206: gettext did not translate admonition directive's title
- # seealso: http://docutils.sourceforge.net/docs/ref/rst/directives.html#admonitions
- app.builder.build(['admonitions'])
- result = (app.outdir / 'admonitions.txt').text(encoding='utf-8')
- directives = (
- "attention", "caution", "danger", "error", "hint",
- "important", "note", "tip", "warning", "admonition",)
- for d in directives:
- assert d.upper() + " TITLE" in result
- assert d.upper() + " BODY" in result
+ result = (app.outdir / 'index.html').text(encoding='utf-8')
+ yield assert_in, "WELCOME", result
+ yield assert_in, "SPHINX 2013.120", result
+ # --- rebuild by .mo mtime
-@with_intl_app(buildername='html', cleanenv=True)
-def test_i18n_docfields_html(app):
- app.builder.build(['docfields'])
- result = (app.outdir / 'docfields.html').text(encoding='utf-8')
- # expect no error by build
+ app.builder.build_update()
+ updated = app.env.update(app.config, app.srcdir, app.doctreedir, app)
+ yield assert_equal, len(updated), 0
+
+ (app.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo').utime(None)
+ updated = app.env.update(app.config, app.srcdir, app.doctreedir, app)
+ yield assert_equal, len(updated), 1
-@with_intl_app(buildername='html')
-def test_gettext_template(app):
+@gen_with_intl_app('xml', freshenv=True)
+def test_xml_builder(app, status, warning):
app.builder.build_all()
- result = (app.outdir / 'index.html').text(encoding='utf-8')
- assert "WELCOME" in result
- assert "SPHINX 2013.120" in result
+ # --- footnotes: regression test for fix #955, #1176
-@with_intl_app(buildername='html')
-def test_rebuild_by_mo_mtime(app):
- app.builder.build_update()
- _, count, _ = app.env.update(app.config, app.srcdir, app.doctreedir, app)
- assert count == 0
+ et = ElementTree.parse(app.outdir / 'footnote.xml')
+ secs = et.findall('section')
+
+ para0 = secs[0].findall('paragraph')
+ yield (assert_elem,
+ para0[0],
+ ['I18N WITH FOOTNOTE', 'INCLUDE THIS CONTENTS',
+ '2', '[ref]', '1', '100', '.'],
+ ['i18n-with-footnote', 'ref'])
+
+ footnote0 = secs[0].findall('footnote')
+ yield (assert_elem,
+ footnote0[0],
+ ['1', 'THIS IS A AUTO NUMBERED FOOTNOTE.'],
+ None,
+ ['1'])
+ yield (assert_elem,
+ footnote0[1],
+ ['100', 'THIS IS A NUMBERED FOOTNOTE.'],
+ None,
+ ['100'])
+ yield (assert_elem,
+ footnote0[2],
+ ['2', 'THIS IS A AUTO NUMBERED NAMED FOOTNOTE.'],
+ None,
+ ['named'])
+
+ citation0 = secs[0].findall('citation')
+ yield (assert_elem,
+ citation0[0],
+ ['ref', 'THIS IS A NAMED FOOTNOTE.'],
+ None,
+ ['ref'])
+
+ warnings = warning.getvalue().replace(os.sep, '/')
+ warning_expr = u'.*/footnote.xml:\\d*: SEVERE: Duplicate ID: ".*".\n'
+ yield assert_not_re_search, warning_expr, warnings
+
+ # --- footnote backlinks: i18n test for #1058
+
+ et = ElementTree.parse(app.outdir / 'footnote.xml')
+ secs = et.findall('section')
+
+ para0 = secs[0].findall('paragraph')
+ refs0 = para0[0].findall('footnote_reference')
+ refid2id = dict([
+ (r.attrib.get('refid'), r.attrib.get('ids')) for r in refs0])
+
+ footnote0 = secs[0].findall('footnote')
+ for footnote in footnote0:
+ ids = footnote.attrib.get('ids')
+ backrefs = footnote.attrib.get('backrefs')
+ yield assert_equal, refid2id[ids], backrefs
+
+ # --- refs in the Python domain
- mo = (app.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo').bytes()
- (app.srcdir / 'xx' / 'LC_MESSAGES' / 'bom.mo').write_bytes(mo)
- _, count, _ = app.env.update(app.config, app.srcdir, app.doctreedir, app)
- assert count == 1
+ et = ElementTree.parse(app.outdir / 'refs_python_domain.xml')
+ secs = et.findall('section')
+
+ # regression test for fix #1363
+ para0 = secs[0].findall('paragraph')
+ yield (assert_elem,
+ para0[0],
+ ['SEE THIS DECORATOR:', 'sensitive_variables()', '.'],
+ ['sensitive.sensitive_variables'])
+
+ # --- keep external links: regression test for #1044
+
+ et = ElementTree.parse(app.outdir / 'external_links.xml')
+ secs = et.findall('section')
+
+ para0 = secs[0].findall('paragraph')
+ # external link check
+ yield (assert_elem,
+ para0[0],
+ ['EXTERNAL LINK TO', 'Python', '.'],
+ ['http://python.org/index.html'])
+
+ # internal link check
+ yield (assert_elem,
+ para0[1],
+ ['EXTERNAL LINKS', 'IS INTERNAL LINK.'],
+ ['i18n-with-external-links'])
+
+ # inline link check
+ yield (assert_elem,
+ para0[2],
+ ['INLINE LINK BY', 'THE SPHINX SITE', '.'],
+ ['http://sphinx-doc.org'])
+
+ # unnamed link check
+ yield (assert_elem,
+ para0[3],
+ ['UNNAMED', 'LINK', '.'],
+ ['http://google.com'])
+
+ # link target swapped translation
+ para1 = secs[1].findall('paragraph')
+ yield (assert_elem,
+ para1[0],
+ ['LINK TO', 'external2', 'AND', 'external1', '.'],
+ ['http://example.com/external2',
+ 'http://example.com/external1'])
+ yield (assert_elem,
+ para1[1],
+ ['LINK TO', 'THE PYTHON SITE', 'AND', 'THE SPHINX SITE', '.'],
+ ['http://python.org', 'http://sphinx-doc.org'])
+
+ # multiple references in the same line
+ para2 = secs[2].findall('paragraph')
+ yield (assert_elem,
+ para2[0],
+ ['LINK TO', 'EXTERNAL LINKS', ',', 'Python', ',',
+ 'THE SPHINX SITE', ',', 'UNNAMED', 'AND',
+ 'THE PYTHON SITE', '.'],
+ ['i18n-with-external-links', 'http://python.org/index.html',
+ 'http://sphinx-doc.org', 'http://google.com',
+ 'http://python.org'])
+
+ # --- role xref: regression test for #1090, #1193
+
+ et = ElementTree.parse(app.outdir / 'role_xref.xml')
+ sec1, sec2 = et.findall('section')
+
+ para1, = sec1.findall('paragraph')
+ yield (assert_elem,
+ para1,
+ ['LINK TO', "I18N ROCK'N ROLE XREF", ',', 'CONTENTS', ',',
+ 'SOME NEW TERM', '.'],
+ ['i18n-role-xref', 'contents',
+ 'glossary_terms#term-some-term'])
+
+ para2 = sec2.findall('paragraph')
+ yield (assert_elem,
+ para2[0],
+ ['LINK TO', 'SOME OTHER NEW TERM', 'AND', 'SOME NEW TERM', '.'],
+ ['glossary_terms#term-some-other-term',
+ 'glossary_terms#term-some-term'])
+ yield(assert_elem,
+ para2[1],
+ ['LINK TO', 'SAME TYPE LINKS', 'AND',
+ "I18N ROCK'N ROLE XREF", '.'],
+ ['same-type-links', 'i18n-role-xref'])
+ yield (assert_elem,
+ para2[2],
+ ['LINK TO', 'I18N WITH GLOSSARY TERMS', 'AND', 'CONTENTS', '.'],
+ ['glossary_terms', 'contents'])
+ yield (assert_elem,
+ para2[3],
+ ['LINK TO', '--module', 'AND', '-m', '.'],
+ ['cmdoption--module', 'cmdoption-m'])
+ yield (assert_elem,
+ para2[4],
+ ['LINK TO', 'env2', 'AND', 'env1', '.'],
+ ['envvar-env2', 'envvar-env1'])
+ yield (assert_elem,
+ para2[5],
+ ['LINK TO', 'token2', 'AND', 'token1', '.'],
+ []) # TODO: how do I link token role to productionlist?
+ yield (assert_elem,
+ para2[6],
+ ['LINK TO', 'same-type-links', 'AND', "i18n-role-xref", '.'],
+ ['same-type-links', 'i18n-role-xref'])
+
+ # warnings
+ warnings = warning.getvalue().replace(os.sep, '/')
+ yield assert_not_in, 'term not in glossary', warnings
+ yield assert_not_in, 'undefined label', warnings
+ yield assert_not_in, 'unknown document', warnings
+
+ # --- label targets: regression test for #1193, #1265
+
+ et = ElementTree.parse(app.outdir / 'label_target.xml')
+ secs = et.findall('section')
+
+ para0 = secs[0].findall('paragraph')
+ yield (assert_elem,
+ para0[0],
+ ['X SECTION AND LABEL', 'POINT TO', 'implicit-target', 'AND',
+ 'X SECTION AND LABEL', 'POINT TO', 'section-and-label', '.'],
+ ['implicit-target', 'section-and-label'])
+
+ para1 = secs[1].findall('paragraph')
+ yield (assert_elem,
+ para1[0],
+ ['X EXPLICIT-TARGET', 'POINT TO', 'explicit-target', 'AND',
+ 'X EXPLICIT-TARGET', 'POINT TO DUPLICATED ID LIKE', 'id1',
+ '.'],
+ ['explicit-target', 'id1'])
+
+ para2 = secs[2].findall('paragraph')
+ yield (assert_elem,
+ para2[0],
+ ['X IMPLICIT SECTION NAME', 'POINT TO',
+ 'implicit-section-name', '.'],
+ ['implicit-section-name'])
+
+ sec2 = secs[2].findall('section')
+
+ para2_0 = sec2[0].findall('paragraph')
+ yield (assert_elem,
+ para2_0[0],
+ ['`X DUPLICATED SUB SECTION`_', 'IS BROKEN LINK.'],
+ [])
+
+ para3 = secs[3].findall('paragraph')
+ yield (assert_elem,
+ para3[0],
+ ['X', 'bridge label',
+ 'IS NOT TRANSLATABLE BUT LINKED TO TRANSLATED ' +
+ 'SECTION TITLE.'],
+ ['label-bridged-target-section'])
+ yield (assert_elem,
+ para3[1],
+ ['X', 'bridge label', 'POINT TO',
+ 'LABEL BRIDGED TARGET SECTION', 'AND', 'bridge label2',
+ 'POINT TO', 'SECTION AND LABEL', '. THE SECOND APPEARED',
+ 'bridge label2', 'POINT TO CORRECT TARGET.'],
+ ['label-bridged-target-section',
+ 'section-and-label',
+ 'section-and-label'])
diff --git a/tests/test_linkcode.py b/tests/test_linkcode.py
deleted file mode 100644
index 83b72098..00000000
--- a/tests/test_linkcode.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- test_linkcode
- ~~~~~~~~~~~~~
-
- Test the sphinx.ext.linkcode extension.
-
- :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import os
-from util import with_app
-
-
-@with_app(srcdir='(temp)', buildername='html', tags=['test_linkcode'])
-def test_html(app):
- app.builder.build_all()
-
- stuff = (app.outdir / 'objects.html').text(encoding='utf-8')
-
- assert 'http://foobar/source/foolib.py' in stuff
- assert 'http://foobar/js/' in stuff
- assert 'http://foobar/c/' in stuff
- assert 'http://foobar/cpp/' in stuff
diff --git a/tests/test_markup.py b/tests/test_markup.py
index 4f0b0de8..8bac6324 100644
--- a/tests/test_markup.py
+++ b/tests/test_markup.py
@@ -15,7 +15,6 @@ from docutils import frontend, utils, nodes
from docutils.parsers import rst
from sphinx.util import texescape
-from sphinx.util.pycompat import b
from sphinx.writers.html import HTMLWriter, SmartyPantsHTMLTranslator
from sphinx.writers.latex import LaTeXWriter, LaTeXTranslator
@@ -24,10 +23,11 @@ from util import TestApp
app = settings = parser = None
+
def setup_module():
global app, settings, parser
texescape.init() # otherwise done by the latex builder
- app = TestApp(cleanenv=True)
+ app = TestApp()
optparser = frontend.OptionParser(
components=(rst.Parser, HTMLWriter, LaTeXWriter))
settings = optparser.get_default_values()
@@ -36,6 +36,7 @@ def setup_module():
settings.env.temp_data['docname'] = 'dummy'
parser = rst.Parser()
+
def teardown_module():
app.cleanup()
@@ -43,18 +44,21 @@ def teardown_module():
class ForgivingTranslator:
def visit_pending_xref(self, node):
pass
+
def depart_pending_xref(self, node):
pass
+
class ForgivingHTMLTranslator(SmartyPantsHTMLTranslator, ForgivingTranslator):
pass
+
class ForgivingLaTeXTranslator(LaTeXTranslator, ForgivingTranslator):
pass
def verify_re(rst, html_expected, latex_expected):
- document = utils.new_document(b('test data'), settings)
+ document = utils.new_document(b'test data', settings)
document['file'] = 'dummy'
parser.parse(rst, document)
for msg in document.traverse(nodes.system_message):
@@ -84,16 +88,16 @@ def verify(rst, html_expected, latex_expected):
def test_inline():
# correct interpretation of code with whitespace
- _html = ('<p><tt class="(samp )?docutils literal"><span class="pre">'
- 'code</span>&nbsp;&nbsp; <span class="pre">sample</span></tt></p>')
+ _html = ('<p><code class="(samp )?docutils literal"><span class="pre">'
+ 'code</span>&nbsp;&nbsp; <span class="pre">sample</span></code></p>')
yield verify_re, '``code sample``', _html, r'\\code{code sample}'
yield verify_re, ':samp:`code sample`', _html, r'\\code{code sample}'
# interpolation of braces in samp and file roles (HTML only)
yield (verify, ':samp:`a{b}c`',
- '<p><tt class="samp docutils literal"><span class="pre">a</span>'
+ '<p><code class="samp docutils literal"><span class="pre">a</span>'
'<em><span class="pre">b</span></em>'
- '<span class="pre">c</span></tt></p>',
+ '<span class="pre">c</span></code></p>',
'\\code{a\\emph{b}c}')
# interpolation of arrows in menuselection
@@ -109,15 +113,16 @@ def test_inline():
# non-interpolation of dashes in option role
yield (verify_re, ':option:`--with-option`',
- '<p><em( class="xref std std-option")?>--with-option</em></p>$',
- r'\\emph{\\texttt{-{-}with-option}}$')
+ '<p><code( class="xref std std-option docutils literal")?>'
+ '<span class="pre">--with-option</span></code></p>$',
+ r'\\code{-{-}with-option}$')
# verify smarty-pants quotes
yield verify, '"John"', '<p>&#8220;John&#8221;</p>', "``John''"
# ... but not in literal text
yield (verify, '``"John"``',
- '<p><tt class="docutils literal"><span class="pre">'
- '&quot;John&quot;</span></tt></p>',
+ '<p><code class="docutils literal"><span class="pre">'
+ '&quot;John&quot;</span></code></p>',
'\\code{"John"}')
# verify classes for inline roles
@@ -128,12 +133,12 @@ def test_inline():
def test_latex_escaping():
# correct escaping in normal mode
yield (verify, u'Γ\\\\∞$', None,
- ur'\(\Gamma\)\textbackslash{}\(\infty\)\$')
+ r'\(\Gamma\)\textbackslash{}\(\infty\)\$')
# in verbatim code fragments
yield (verify, u'::\n\n @Γ\\∞${}', None,
- u'\\begin{Verbatim}[commandchars=\\\\\\{\\}]\n'
+ u'\\begin{Verbatim}[frame=single,commandchars=\\\\\\{\\}]\n'
u'@\\(\\Gamma\\)\\PYGZbs{}\\(\\infty\\)\\PYGZdl{}\\PYGZob{}\\PYGZcb{}\n'
u'\\end{Verbatim}')
# in URIs
yield (verify_re, u'`test <http://example.com/~me/>`_', None,
- ur'\\href{http://example.com/~me/}{test}.*')
+ r'\\href{http://example.com/~me/}{test}.*')
diff --git a/tests/test_metadata.py b/tests/test_metadata.py
index a0d44457..944aa157 100644
--- a/tests/test_metadata.py
+++ b/tests/test_metadata.py
@@ -12,27 +12,13 @@
# adapted from an example of bibliographic metadata at
# http://docutils.sourceforge.net/docs/user/rst/demo.txt
-from util import TestApp
+from util import with_app
from nose.tools import assert_equal
-app = env = None
-warnings = []
-
-def setup_module():
- # Is there a better way of generating this doctree than manually iterating?
- global app, env
- app = TestApp(srcdir='(temp)')
- env = app.env
- msg, num, it = env.update(app.config, app.srcdir, app.doctreedir, app)
- for docname in it:
- pass
-
-def teardown_module():
- app.cleanup()
-
-def test_docinfo():
+@with_app('pseudoxml')
+def test_docinfo(app, status, warning):
"""
Inspect the 'docinfo' metadata stored in the first node of the document.
Note this doesn't give us access to data stored in subsequence blocks
@@ -40,6 +26,8 @@ def test_docinfo():
'dedication' blocks, or the 'meta' role. Doing otherwise is probably more
messing with the internals of sphinx than this rare use case merits.
"""
+ app.builder.build(['metadata'])
+ env = app.env
exampledocinfo = env.metadata['metadata']
expecteddocinfo = {
'author': u'David Goodger',
@@ -61,6 +49,9 @@ def test_docinfo():
'date': u'2006-05-21',
'organization': u'humankind',
'revision': u'4564',
+ 'tocdepth': 1,
+ 'orphan': u'',
+ 'nocomments': u'',
}
# I like this way of comparing dicts - easier to see the error.
for key in exampledocinfo:
diff --git a/tests/test_quickstart.py b/tests/test_quickstart.py
index 8f630700..1d3bcd9e 100644
--- a/tests/test_quickstart.py
+++ b/tests/test_quickstart.py
@@ -11,10 +11,11 @@
import sys
import time
-from StringIO import StringIO
-import tempfile
-from util import raises, with_tempdir, with_app, SkipTest
+from six import PY2, text_type, StringIO
+from six.moves import input
+
+from util import raises, with_tempdir, SkipTest
from sphinx import application
from sphinx import quickstart as qs
@@ -28,18 +29,20 @@ warnfile = StringIO()
def setup_module():
nocolor()
-def mock_raw_input(answers, needanswer=False):
+
+def mock_input(answers, needanswer=False):
called = set()
- def raw_input(prompt):
+
+ def input_(prompt):
if prompt in called:
raise AssertionError('answer for %r missing and no default '
'present' % prompt)
called.add(prompt)
- if sys.version_info < (3, 0):
+ if PY2:
prompt = str(prompt) # Python2.x raw_input emulation
# `raw_input` encode `prompt` by default encoding to print.
else:
- prompt = unicode(prompt) # Python3.x input emulation
+ prompt = text_type(prompt) # Python3.x input emulation
# `input` decode prompt by default encoding before print.
for question in answers:
if prompt.startswith(qs.PROMPT_PREFIX + question):
@@ -47,15 +50,14 @@ def mock_raw_input(answers, needanswer=False):
if needanswer:
raise AssertionError('answer for %r missing' % prompt)
return ''
- return raw_input
+ return input_
+
+
+real_input = input
-try:
- real_raw_input = raw_input
-except NameError:
- real_raw_input = input
def teardown_module():
- qs.term_input = real_raw_input
+ qs.term_input = real_input
qs.TERM_ENCODING = getattr(sys.stdin, 'encoding', None)
coloron()
@@ -63,12 +65,12 @@ def teardown_module():
def test_quickstart_inputstrip():
d = {}
answers = {
- 'Q1': 'Y\r', # input() return with '\r' on Python-3.2.0 for Windows
- 'Q2': ' Yes \r',
+ 'Q1': 'Y',
+ 'Q2': ' Yes ',
'Q3': 'N',
'Q4': 'N ',
}
- qs.term_input = mock_raw_input(answers)
+ qs.term_input = mock_input(answers)
qs.do_prompt(d, 'k1', 'Q1')
assert d['k1'] == 'Y'
qs.do_prompt(d, 'k2', 'Q2')
@@ -88,7 +90,7 @@ def test_do_prompt():
'Q5': 'no',
'Q6': 'foo',
}
- qs.term_input = mock_raw_input(answers)
+ qs.term_input = mock_input(answers)
try:
qs.do_prompt(d, 'k1', 'Q1')
except AssertionError:
@@ -113,7 +115,7 @@ def test_do_prompt_with_nonascii():
answers = {
'Q1': u'\u30c9\u30a4\u30c4',
}
- qs.term_input = mock_raw_input(answers)
+ qs.term_input = mock_input(answers)
try:
qs.do_prompt(d, 'k1', 'Q1', default=u'\u65e5\u672c')
except UnicodeEncodeError:
@@ -131,7 +133,7 @@ def test_quickstart_defaults(tempdir):
'Author name': 'Georg Brandl',
'Project version': '0.1',
}
- qs.term_input = mock_raw_input(answers)
+ qs.term_input = mock_input(answers)
d = {}
qs.ask_user(d)
qs.generate(d)
@@ -170,6 +172,7 @@ def test_quickstart_all_answers(tempdir):
'Author name': u'Wolfgang Schäuble & G\'Beckstein'.encode('utf-8'),
'Project version': '2.0',
'Project release': '2.0.1',
+ 'Project language': 'de',
'Source file suffix': '.txt',
'Name of your master document': 'contents',
'autodoc': 'y',
@@ -185,7 +188,7 @@ def test_quickstart_all_answers(tempdir):
'Create Windows command file': 'no',
'Do you want to use the epub builder': 'yes',
}
- qs.term_input = mock_raw_input(answers, needanswer=True)
+ qs.term_input = mock_input(answers, needanswer=True)
qs.TERM_ENCODING = 'utf-8'
d = {}
qs.ask_user(d)
@@ -201,7 +204,7 @@ def test_quickstart_all_answers(tempdir):
assert ns['master_doc'] == 'contents'
assert ns['project'] == u'STASIâ„¢'
assert ns['copyright'] == u'%s, Wolfgang Schäuble & G\'Beckstein' % \
- time.strftime('%Y')
+ time.strftime('%Y')
assert ns['version'] == '2.0'
assert ns['release'] == '2.0.1'
assert ns['html_static_path'] == ['.static']
@@ -215,7 +218,7 @@ def test_quickstart_all_answers(tempdir):
assert ns['texinfo_documents'] == [
('contents', 'STASI', u'STASIâ„¢ Documentation',
u'Wolfgang Schäuble & G\'Beckstein', 'STASI',
- 'One line description of project.', 'Miscellaneous'),]
+ 'One line description of project.', 'Miscellaneous')]
assert (tempdir / 'build').isdir()
assert (tempdir / 'source' / '.static').isdir()
@@ -231,14 +234,14 @@ def test_generated_files_eol(tempdir):
'Author name': 'Georg Brandl',
'Project version': '0.1',
}
- qs.term_input = mock_raw_input(answers)
+ qs.term_input = mock_input(answers)
d = {}
qs.ask_user(d)
qs.generate(d)
def assert_eol(filename, eol):
content = filename.bytes().decode('unicode-escape')
- assert all([l[-len(eol):]==eol for l in content.splitlines(True)])
+ assert all([l[-len(eol):] == eol for l in content.splitlines(True)])
assert_eol(tempdir / 'make.bat', '\r\n')
assert_eol(tempdir / 'Makefile', '\n')
@@ -252,19 +255,19 @@ def test_quickstart_and_build(tempdir):
'Author name': 'Georg Brandl',
'Project version': '0.1',
}
- qs.term_input = mock_raw_input(answers)
+ qs.term_input = mock_input(answers)
d = {}
qs.ask_user(d)
qs.generate(d)
app = application.Sphinx(
- tempdir, #srcdir
- tempdir, #confdir
- (tempdir / '_build' / 'html'), #outdir
- (tempdir / '_build' / '.doctree'), #doctreedir
- 'html', #buildername
- status=StringIO(),
- warning=warnfile)
+ tempdir, # srcdir
+ tempdir, # confdir
+ (tempdir / '_build' / 'html'), # outdir
+ (tempdir / '_build' / '.doctree'), # doctreedir
+ 'html', # buildername
+ status=StringIO(),
+ warning=warnfile)
app.builder.build_all()
warnings = warnfile.getvalue()
assert not warnings
@@ -274,11 +277,11 @@ def test_quickstart_and_build(tempdir):
def test_default_filename(tempdir):
answers = {
'Root path': tempdir,
- 'Project name': u'\u30c9\u30a4\u30c4', #Fullwidth characters only
+ 'Project name': u'\u30c9\u30a4\u30c4', # Fullwidth characters only
'Author name': 'Georg Brandl',
'Project version': '0.1',
}
- qs.term_input = mock_raw_input(answers)
+ qs.term_input = mock_input(answers)
d = {}
qs.ask_user(d)
qs.generate(d)
diff --git a/tests/test_search.py b/tests/test_search.py
index 2efd753c..a7e99e04 100644
--- a/tests/test_search.py
+++ b/tests/test_search.py
@@ -13,7 +13,6 @@ from docutils import frontend, utils
from docutils.parsers import rst
from sphinx.search import IndexBuilder
-from sphinx.util.pycompat import b
settings = parser = None
@@ -32,7 +31,7 @@ test that non-comments are indexed: fermion
'''
def test_wordcollector():
- doc = utils.new_document(b('test data'), settings)
+ doc = utils.new_document(b'test data', settings)
doc['file'] = 'dummy'
parser.parse(FILE_CONTENTS, doc)
diff --git a/tests/test_searchadapters.py b/tests/test_searchadapters.py
index 81d7c178..4a91f96d 100644
--- a/tests/test_searchadapters.py
+++ b/tests/test_searchadapters.py
@@ -9,32 +9,24 @@
:license: BSD, see LICENSE for details.
"""
-import os
-from StringIO import StringIO
+from six import StringIO
from sphinx.websupport import WebSupport
from test_websupport import sqlalchemy_missing
-from util import test_root, skip_if, skip_unless_importable
-
-
-def clear_builddir():
- (test_root / 'websupport').rmtree(True)
+from util import rootdir, tempdir, skip_if, skip_unless_importable
def teardown_module():
- (test_root / 'generated').rmtree(True)
- clear_builddir()
+ (tempdir / 'websupport').rmtree(True)
def search_adapter_helper(adapter):
- clear_builddir()
-
- settings = {'builddir': os.path.join(test_root, 'websupport'),
+ settings = {'srcdir': rootdir / 'root',
+ 'builddir': tempdir / 'websupport',
'status': StringIO(),
- 'warning': StringIO()}
- settings.update({'srcdir': test_root,
- 'search': adapter})
+ 'warning': StringIO(),
+ 'search': adapter}
support = WebSupport(**settings)
support.build()
@@ -62,7 +54,7 @@ def search_adapter_helper(adapter):
'%s search adapter returned %s search result(s), should have been 1'\
% (adapter, len(results))
# Make sure it works through the WebSupport API
- html = support.get_search_results(u'SomeLongRandomWord')
+ support.get_search_results(u'SomeLongRandomWord')
@skip_unless_importable('xapian', 'needs xapian bindings installed')
diff --git a/tests/test_setup_command.py b/tests/test_setup_command.py
index c165b2d3..70826721 100644
--- a/tests/test_setup_command.py
+++ b/tests/test_setup_command.py
@@ -16,11 +16,16 @@ from functools import wraps
import tempfile
import sphinx
-from util import with_tempdir, test_roots, SkipTest
+from util import rootdir, tempdir, SkipTest
from path import path
from textwrap import dedent
-root = test_roots / 'test-setup'
+root = tempdir / 'test-setup'
+
+
+def setup_module():
+ if not root.exists():
+ (rootdir / 'roots' / 'test-setup').copytree(root)
def with_setup_command(root, *args, **kwds):
diff --git a/tests/test_templating.py b/tests/test_templating.py
index e8fafca2..5f8fcaeb 100644
--- a/tests/test_templating.py
+++ b/tests/test_templating.py
@@ -9,28 +9,23 @@
:license: BSD, see LICENSE for details.
"""
-from util import test_roots, with_app
+from util import with_app
-def teardown_module():
- (test_roots / 'test-templating' / '_build').rmtree(True),
-
-
-@with_app(buildername='html', srcdir=(test_roots / 'test-templating'))
-def test_layout_overloading(app):
- app.builder.build_all()
+@with_app('html', testroot='templating')
+def test_layout_overloading(app, status, warning):
+ app.builder.build_update()
result = (app.outdir / 'contents.html').text(encoding='utf-8')
assert '<!-- layout overloading -->' in result
-@with_app(buildername='html', srcdir=(test_roots / 'test-templating'))
-def test_autosummary_class_template_overloading(app):
- app.builder.build_all()
+@with_app('html', testroot='templating')
+def test_autosummary_class_template_overloading(app, status, warning):
+ app.builder.build_update()
- result = (app.outdir / 'generated' / 'sphinx.application.Sphinx.html').text(
- encoding='utf-8')
+ result = (app.outdir / 'generated' / 'sphinx.application.TemplateBridge.html').text(
+ encoding='utf-8')
assert 'autosummary/class.rst method block overloading' in result
-
diff --git a/tests/test_theming.py b/tests/test_theming.py
index 7f91a32a..85138b97 100644
--- a/tests/test_theming.py
+++ b/tests/test_theming.py
@@ -19,14 +19,14 @@ from util import with_app, raises
@with_app(confoverrides={'html_theme': 'ziptheme',
'html_theme_options.testopt': 'foo'})
-def test_theme_api(app):
+def test_theme_api(app, status, warning):
cfg = app.config
# test Theme class API
assert set(Theme.themes.keys()) == \
- set(['basic', 'default', 'scrolls', 'agogo', 'sphinxdoc', 'haiku',
- 'traditional', 'testtheme', 'ziptheme', 'epub', 'nature',
- 'pyramid'])
+ set(['basic', 'default', 'scrolls', 'agogo', 'sphinxdoc', 'haiku',
+ 'traditional', 'testtheme', 'ziptheme', 'epub', 'nature',
+ 'pyramid', 'bizstyle'])
assert Theme.themes['testtheme'][1] is None
assert isinstance(Theme.themes['ziptheme'][1], zipfile.ZipFile)
@@ -55,3 +55,27 @@ def test_theme_api(app):
# cleanup temp directories
theme.cleanup()
assert not os.path.exists(themedir)
+
+
+@with_app(testroot='tocdepth') # a minimal root
+def test_js_source(app, status, warning):
+ # Now sphinx provides non-minified JS files for jquery.js and underscore.js
+ # to clarify the source of the minified files. see also #1434.
+ # If you update the version of the JS file, please update the source of the
+ # JS file and version number in this test.
+
+ app.builder.build(['contents'])
+
+ v = '1.11.1'
+ msg = 'jquery.js version does not match to {v}'.format(v=v)
+ jquery_min = (app.outdir / '_static' / 'jquery.js').text()
+ assert 'jQuery v{v}'.format(v=v) in jquery_min, msg
+ jquery_src = (app.outdir / '_static' / 'jquery-{v}.js'.format(v=v)).text()
+ assert 'jQuery JavaScript Library v{v}'.format(v=v) in jquery_src, msg
+
+ v = '1.3.1'
+ msg = 'underscore.js version does not match to {v}'.format(v=v)
+ underscore_min = (app.outdir / '_static' / 'underscore.js').text()
+ assert 'Underscore.js {v}'.format(v=v) in underscore_min, msg
+ underscore_src = (app.outdir / '_static' / 'underscore-{v}.js'.format(v=v)).text()
+ assert 'Underscore.js {v}'.format(v=v) in underscore_src, msg
diff --git a/tests/test_util_i18n.py b/tests/test_util_i18n.py
new file mode 100644
index 00000000..d69c2acd
--- /dev/null
+++ b/tests/test_util_i18n.py
@@ -0,0 +1,163 @@
+# -*- coding: utf-8 -*-
+"""
+ test_util_i18n
+ ~~~~~~~~~~~~~~
+
+ Test i18n util.
+
+ :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+from __future__ import print_function
+
+import os
+from os import path
+
+from babel.messages.mofile import read_mo
+from sphinx.util import i18n
+
+from util import with_tempdir
+
+
+def test_catalog_info_for_file_and_path():
+ cat = i18n.CatalogInfo('path', 'domain')
+ assert cat.po_file == 'domain.po'
+ assert cat.mo_file == 'domain.mo'
+ assert cat.po_path == path.join('path', 'domain.po')
+ assert cat.mo_path == path.join('path', 'domain.mo')
+
+
+def test_catalog_info_for_sub_domain_file_and_path():
+ cat = i18n.CatalogInfo('path', 'sub/domain')
+ assert cat.po_file == 'sub/domain.po'
+ assert cat.mo_file == 'sub/domain.mo'
+ assert cat.po_path == path.join('path', 'sub/domain.po')
+ assert cat.mo_path == path.join('path', 'sub/domain.mo')
+
+
+@with_tempdir
+def test_catalog_outdated(dir):
+ (dir / 'test.po').write_text('#')
+ cat = i18n.CatalogInfo(dir, 'test')
+ assert cat.is_outdated() # if mo is not exist
+
+ mo_file = (dir / 'test.mo')
+ mo_file.write_text('#')
+ assert not cat.is_outdated() # if mo is exist and newer than po
+
+ os.utime(mo_file, (os.stat(mo_file).st_mtime - 10,) * 2) # to be outdate
+ assert cat.is_outdated() # if mo is exist and older than po
+
+
+@with_tempdir
+def test_catalog_write_mo(dir):
+ (dir / 'test.po').write_text('#')
+ cat = i18n.CatalogInfo(dir, 'test')
+ cat.write_mo('en')
+ assert path.exists(cat.mo_path)
+ assert read_mo(open(cat.mo_path, 'rb')) is not None
+
+
+@with_tempdir
+def test_get_catalogs_for_xx(dir):
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test3.pot').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub').makedirs()
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test5.po').write_text('#')
+ (dir / 'loc1' / 'en' / 'LC_MESSAGES').makedirs()
+ (dir / 'loc1' / 'en' / 'LC_MESSAGES' / 'test6.po').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_ALL').makedirs()
+ (dir / 'loc1' / 'xx' / 'LC_ALL' / 'test7.po').write_text('#')
+
+ catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', force_all=False)
+ domains = set(c.domain for c in catalogs)
+ assert domains == set([
+ 'test1',
+ 'test2',
+ path.normpath('sub/test4'),
+ path.normpath('sub/test5'),
+ ])
+
+
+@with_tempdir
+def test_get_catalogs_for_en(dir):
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'xx_dom.po').write_text('#')
+ (dir / 'loc1' / 'en' / 'LC_MESSAGES').makedirs()
+ (dir / 'loc1' / 'en' / 'LC_MESSAGES' / 'en_dom.po').write_text('#')
+
+ catalogs = i18n.get_catalogs([dir / 'loc1'], 'en', force_all=False)
+ domains = set(c.domain for c in catalogs)
+ assert domains == set(['en_dom'])
+
+
+@with_tempdir
+def test_get_catalogs_with_non_existent_locale(dir):
+ catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx')
+ assert not catalogs
+
+ catalogs = i18n.get_catalogs([dir / 'loc1'], None)
+ assert not catalogs
+
+
+def test_get_catalogs_with_non_existent_locale_dirs():
+ catalogs = i18n.get_catalogs(['dummy'], 'xx')
+ assert not catalogs
+
+
+@with_tempdir
+def test_get_catalogs_for_xx_without_outdated(dir):
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.mo').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.mo').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test3.pot').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test3.mo').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub').makedirs()
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.mo').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test5.po').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test5.mo').write_text('#')
+
+ catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', force_all=False)
+ assert not catalogs
+
+ catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', force_all=True)
+ domains = set(c.domain for c in catalogs)
+ assert domains == set([
+ 'test1',
+ 'test2',
+ path.normpath('sub/test4'),
+ path.normpath('sub/test5'),
+ ])
+
+
+@with_tempdir
+def test_get_catalogs_from_multiple_locale_dirs(dir):
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
+ (dir / 'loc2' / 'xx' / 'LC_MESSAGES').makedirs()
+ (dir / 'loc2' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
+ (dir / 'loc2' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#')
+
+ catalogs = i18n.get_catalogs([dir / 'loc1', dir / 'loc2'], 'xx')
+ domains = sorted(c.domain for c in catalogs)
+ assert domains == ['test1', 'test1', 'test2']
+
+
+@with_tempdir
+def test_get_catalogs_with_compact(dir):
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES').makedirs()
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test1.po').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'test2.po').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub').makedirs()
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test3.po').write_text('#')
+ (dir / 'loc1' / 'xx' / 'LC_MESSAGES' / 'sub' / 'test4.po').write_text('#')
+
+ catalogs = i18n.get_catalogs([dir / 'loc1'], 'xx', gettext_compact=True)
+ domains = set(c.domain for c in catalogs)
+ assert domains == set(['test1', 'test2', 'sub'])
diff --git a/tests/test_util_nodes.py b/tests/test_util_nodes.py
index 9ddc049d..a385245d 100644
--- a/tests/test_util_nodes.py
+++ b/tests/test_util_nodes.py
@@ -1,121 +1,121 @@
-# -*- coding: utf-8 -*-
-"""
- test_util_nodes
- ~~~~~~~~~~~~~~~
-
- Tests uti.nodes functions.
-
- :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-from textwrap import dedent
-
-from docutils import nodes
-from docutils.parsers import rst
-from docutils.utils import new_document
-from docutils import frontend
-
-from sphinx.util.nodes import extract_messages
-
-
-def _get_doctree(text):
- settings = frontend.OptionParser(
- components=(rst.Parser,)).get_default_values()
- document = new_document('dummy.txt', settings)
- rst.Parser().parse(text, document)
- return document
-
-
-def assert_node_count(messages, node_type, expect_count):
- count = 0
- node_list = [node for node, msg in messages]
- for node in node_list:
- if isinstance(node, node_type):
- count += 1
-
- assert count == expect_count, (
- "Count of %r in the %r is %d instead of %d"
- % (node_type, node_list, count, expect_count))
-
-
-def test_extract_messages():
- text = dedent(
- """
- .. admonition:: admonition title
-
- admonition body
- """
- )
- yield (
- assert_node_count,
- extract_messages(_get_doctree(text)),
- nodes.title, 1,
- )
-
- text = dedent(
- """
- .. figure:: foo.jpg
-
- this is title
- """
- )
- yield (
- assert_node_count,
- extract_messages(_get_doctree(text)),
- nodes.caption, 1,
- )
-
- text = dedent(
- """
- .. rubric:: spam
- """
- )
- yield (
- assert_node_count,
- extract_messages(_get_doctree(text)),
- nodes.rubric, 1,
- )
-
-
- text = dedent(
- """
- | spam
- | egg
- """
- )
- yield (
- assert_node_count,
- extract_messages(_get_doctree(text)),
- nodes.line, 2,
- )
-
-
- text = dedent(
- """
- section
- =======
-
- +----------------+
- | | **Title 1** |
- | | Message 1 |
- +----------------+
- """
- )
- yield (
- assert_node_count,
- extract_messages(_get_doctree(text)),
- nodes.line, 2,
- )
-
-
- text = dedent(
- """
- * | **Title 1**
- | Message 1
- """
- )
- yield (
- assert_node_count,
- extract_messages(_get_doctree(text)),
- nodes.line, 2,
- )
+# -*- coding: utf-8 -*-
+"""
+ test_util_nodes
+ ~~~~~~~~~~~~~~~
+
+ Tests uti.nodes functions.
+
+ :copyright: Copyright 2007-2014 by the Sphinx team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+from textwrap import dedent
+
+from docutils import nodes
+from docutils.parsers import rst
+from docutils.utils import new_document
+from docutils import frontend
+
+from sphinx.util.nodes import extract_messages
+
+
+def _get_doctree(text):
+ settings = frontend.OptionParser(
+ components=(rst.Parser,)).get_default_values()
+ document = new_document('dummy.txt', settings)
+ rst.Parser().parse(text, document)
+ return document
+
+
+def assert_node_count(messages, node_type, expect_count):
+ count = 0
+ node_list = [node for node, msg in messages]
+ for node in node_list:
+ if isinstance(node, node_type):
+ count += 1
+
+ assert count == expect_count, (
+ "Count of %r in the %r is %d instead of %d"
+ % (node_type, node_list, count, expect_count))
+
+
+def test_extract_messages():
+ text = dedent(
+ """
+ .. admonition:: admonition title
+
+ admonition body
+ """
+ )
+ yield (
+ assert_node_count,
+ extract_messages(_get_doctree(text)),
+ nodes.title, 1,
+ )
+
+ text = dedent(
+ """
+ .. figure:: foo.jpg
+
+ this is title
+ """
+ )
+ yield (
+ assert_node_count,
+ extract_messages(_get_doctree(text)),
+ nodes.caption, 1,
+ )
+
+ text = dedent(
+ """
+ .. rubric:: spam
+ """
+ )
+ yield (
+ assert_node_count,
+ extract_messages(_get_doctree(text)),
+ nodes.rubric, 1,
+ )
+
+
+ text = dedent(
+ """
+ | spam
+ | egg
+ """
+ )
+ yield (
+ assert_node_count,
+ extract_messages(_get_doctree(text)),
+ nodes.line, 2,
+ )
+
+
+ text = dedent(
+ """
+ section
+ =======
+
+ +----------------+
+ | | **Title 1** |
+ | | Message 1 |
+ +----------------+
+ """
+ )
+ yield (
+ assert_node_count,
+ extract_messages(_get_doctree(text)),
+ nodes.line, 2,
+ )
+
+
+ text = dedent(
+ """
+ * | **Title 1**
+ | Message 1
+ """
+ )
+ yield (
+ assert_node_count,
+ extract_messages(_get_doctree(text)),
+ nodes.line, 2,
+ )
diff --git a/tests/test_versioning.py b/tests/test_versioning.py
index bf2f65ee..bd8c697c 100644
--- a/tests/test_versioning.py
+++ b/tests/test_versioning.py
@@ -15,41 +15,47 @@ from docutils.parsers.rst.directives.html import MetaBody
from sphinx import addnodes
from sphinx.versioning import add_uids, merge_doctrees, get_ratio
-from sphinx.util.pycompat import all
-from util import test_root, TestApp
+from util import TestApp
app = original = original_uids = None
+
def setup_module():
global app, original, original_uids
- app = TestApp()
+ app = TestApp(testroot='versioning')
app.builder.env.app = app
app.connect('doctree-resolved', on_doctree_resolved)
app.build()
- original = doctrees['versioning/original']
+ original = doctrees['original']
original_uids = [n.uid for n in add_uids(original, is_paragraph)]
+
def teardown_module():
app.cleanup()
- (test_root / '_build').rmtree(True)
+
doctrees = {}
+
def on_doctree_resolved(app, doctree, docname):
doctrees[docname] = doctree
+
def is_paragraph(node):
return node.__class__.__name__ == 'paragraph'
+
def test_get_ratio():
assert get_ratio('', 'a')
assert get_ratio('a', '')
+
def test_add_uids():
assert len(original_uids) == 3
+
def test_picklablility():
# we have to modify the doctree so we can pickle it
copy = original.copy()
@@ -63,44 +69,50 @@ def test_picklablility():
loaded = pickle.loads(pickle.dumps(copy, pickle.HIGHEST_PROTOCOL))
assert all(getattr(n, 'uid', False) for n in loaded.traverse(is_paragraph))
+
def test_modified():
- modified = doctrees['versioning/modified']
+ modified = doctrees['modified']
new_nodes = list(merge_doctrees(original, modified, is_paragraph))
uids = [n.uid for n in modified.traverse(is_paragraph)]
assert not new_nodes
assert original_uids == uids
+
def test_added():
- added = doctrees['versioning/added']
+ added = doctrees['added']
new_nodes = list(merge_doctrees(original, added, is_paragraph))
uids = [n.uid for n in added.traverse(is_paragraph)]
assert len(new_nodes) == 1
assert original_uids == uids[:-1]
+
def test_deleted():
- deleted = doctrees['versioning/deleted']
+ deleted = doctrees['deleted']
new_nodes = list(merge_doctrees(original, deleted, is_paragraph))
uids = [n.uid for n in deleted.traverse(is_paragraph)]
assert not new_nodes
assert original_uids[::2] == uids
+
def test_deleted_end():
- deleted_end = doctrees['versioning/deleted_end']
+ deleted_end = doctrees['deleted_end']
new_nodes = list(merge_doctrees(original, deleted_end, is_paragraph))
uids = [n.uid for n in deleted_end.traverse(is_paragraph)]
assert not new_nodes
assert original_uids[:-1] == uids
+
def test_insert():
- insert = doctrees['versioning/insert']
+ insert = doctrees['insert']
new_nodes = list(merge_doctrees(original, insert, is_paragraph))
uids = [n.uid for n in insert.traverse(is_paragraph)]
assert len(new_nodes) == 1
assert original_uids[0] == uids[0]
assert original_uids[1:] == uids[2:]
+
def test_insert_beginning():
- insert_beginning = doctrees['versioning/insert_beginning']
+ insert_beginning = doctrees['insert_beginning']
new_nodes = list(merge_doctrees(original, insert_beginning, is_paragraph))
uids = [n.uid for n in insert_beginning.traverse(is_paragraph)]
assert len(new_nodes) == 1
@@ -108,8 +120,9 @@ def test_insert_beginning():
assert original_uids == uids[1:]
assert original_uids[0] != uids[0]
+
def test_insert_similar():
- insert_similar = doctrees['versioning/insert_similar']
+ insert_similar = doctrees['insert_similar']
new_nodes = list(merge_doctrees(original, insert_similar, is_paragraph))
uids = [n.uid for n in insert_similar.traverse(is_paragraph)]
assert len(new_nodes) == 1
diff --git a/tests/test_websupport.py b/tests/test_websupport.py
index 9376a2a2..7126e7d2 100644
--- a/tests/test_websupport.py
+++ b/tests/test_websupport.py
@@ -9,39 +9,30 @@
:license: BSD, see LICENSE for details.
"""
-import os
-from StringIO import StringIO
+from functools import wraps
-try:
- from functools import wraps
-except ImportError:
- # functools is new in 2.5
- wraps = lambda f: (lambda w: w)
+from six import StringIO
from sphinx.websupport import WebSupport
from sphinx.websupport.errors import DocumentNotFoundError, \
- CommentNotAllowedError, UserNotAuthorizedError
+ CommentNotAllowedError, UserNotAuthorizedError
from sphinx.websupport.storage import StorageBackend
from sphinx.websupport.storage.differ import CombinedHtmlDiff
try:
from sphinx.websupport.storage.sqlalchemystorage import Session, \
- Comment, CommentVote
+ Comment, CommentVote
from sphinx.websupport.storage.sqlalchemy_db import Node
sqlalchemy_missing = False
except ImportError:
sqlalchemy_missing = True
-from util import test_root, raises, skip_if
+from util import rootdir, tempdir, raises, skip_if
-default_settings = {'builddir': os.path.join(test_root, 'websupport'),
+default_settings = {'builddir': tempdir / 'websupport',
'status': StringIO(),
'warning': StringIO()}
-def teardown_module():
- (test_root / 'generated').rmtree(True)
- (test_root / 'websupport').rmtree(True)
-
def with_support(*args, **kwargs):
"""Make a WebSupport object and pass it the test."""
@@ -63,12 +54,12 @@ class NullStorage(StorageBackend):
@with_support(storage=NullStorage())
def test_no_srcdir(support):
- """Make sure the correct exception is raised if srcdir is not given."""
+ # make sure the correct exception is raised if srcdir is not given.
raises(RuntimeError, support.build)
@skip_if(sqlalchemy_missing, 'needs sqlalchemy')
-@with_support(srcdir=test_root)
+@with_support(srcdir=rootdir / 'root')
def test_build(support):
support.build()
@@ -177,9 +168,9 @@ def test_proposals(support):
source = data['source']
proposal = source[:5] + source[10:15] + 'asdf' + source[15:]
- comment = support.add_comment('Proposal comment',
- node_id=node.id,
- proposal=proposal)
+ support.add_comment('Proposal comment',
+ node_id=node.id,
+ proposal=proposal)
@skip_if(sqlalchemy_missing, 'needs sqlalchemy')
@@ -238,6 +229,8 @@ def test_update_username(support):
called = False
+
+
def moderation_callback(comment):
global called
called = True
@@ -255,7 +248,7 @@ def test_moderation(support):
deleted = support.add_comment('Comment to delete', node_id=node.id,
displayed=False)
# Make sure the moderation_callback is called.
- assert called == True
+ assert called
# Make sure the user must be a moderator.
raises(UserNotAuthorizedError, support.accept_comment, accepted['id'])
raises(UserNotAuthorizedError, support.delete_comment, deleted['id'])
diff --git a/tests/util.py b/tests/util.py
index 61c9bd0d..e184c28d 100644
--- a/tests/util.py
+++ b/tests/util.py
@@ -7,39 +7,43 @@
:license: BSD, see LICENSE for details.
"""
+import os
+import re
import sys
-import StringIO
import tempfile
-import shutil
-import re
-from codecs import open
+from functools import wraps
-try:
- from functools import wraps
-except ImportError:
- # functools is new in 2.4
- wraps = lambda f: (lambda w: w)
+from six import StringIO
+
+from nose import tools, SkipTest
from sphinx import application
+from sphinx.builders.latex import LaTeXBuilder
from sphinx.theming import Theme
from sphinx.ext.autodoc import AutoDirective
+from sphinx.pycode import ModuleAnalyzer
from path import path
-from nose import tools, SkipTest
+try:
+ # Python >=3.3
+ from unittest import mock
+except ImportError:
+ import mock
__all__ = [
- 'test_root', 'test_roots', 'raises', 'raises_msg',
+ 'rootdir', 'tempdir', 'raises', 'raises_msg',
'skip_if', 'skip_unless', 'skip_unless_importable', 'Struct',
'ListOutput', 'TestApp', 'with_app', 'gen_with_app',
- 'path', 'with_tempdir', 'write_file',
+ 'path', 'with_tempdir',
'sprint', 'remove_unicode_literals',
+ 'mock',
]
-test_root = path(__file__).parent.joinpath('root').abspath()
-test_roots = path(__file__).parent.joinpath('roots').abspath()
+rootdir = path(os.path.dirname(__file__) or '.').abspath()
+tempdir = path(os.environ['SPHINX_TEST_TEMPDIR']).abspath()
def _excstr(exc):
@@ -47,11 +51,9 @@ def _excstr(exc):
return str(tuple(map(_excstr, exc)))
return exc.__name__
+
def raises(exc, func, *args, **kwds):
- """
- Raise :exc:`AssertionError` if ``func(*args, **kwds)`` does not
- raise *exc*.
- """
+ """Raise AssertionError if ``func(*args, **kwds)`` does not raise *exc*."""
try:
func(*args, **kwds)
except exc:
@@ -60,19 +62,45 @@ def raises(exc, func, *args, **kwds):
raise AssertionError('%s did not raise %s' %
(func.__name__, _excstr(exc)))
+
def raises_msg(exc, msg, func, *args, **kwds):
- """
- Raise :exc:`AssertionError` if ``func(*args, **kwds)`` does not
- raise *exc*, and check if the message contains *msg*.
+ """Raise AssertionError if ``func(*args, **kwds)`` does not raise *exc*,
+ and check if the message contains *msg*.
"""
try:
func(*args, **kwds)
- except exc, err:
+ except exc as err:
assert msg in str(err), "\"%s\" not in \"%s\"" % (msg, err)
else:
raise AssertionError('%s did not raise %s' %
(func.__name__, _excstr(exc)))
+
+def assert_re_search(regex, text, flags=0):
+ if not re.search(regex, text, flags):
+ assert False, '%r did not match %r' % (regex, text)
+
+
+def assert_not_re_search(regex, text, flags=0):
+ if re.search(regex, text, flags):
+ assert False, '%r did match %r' % (regex, text)
+
+
+def assert_startswith(thing, prefix):
+ if not thing.startswith(prefix):
+ assert False, '%r does not start with %r' % (thing, prefix)
+
+
+def assert_in(x, thing):
+ if x not in thing:
+ assert False, '%r is not in %r' % (x, thing)
+
+
+def assert_not_in(x, thing):
+ if x in thing:
+ assert False, '%r is in %r' % (x, thing)
+
+
def skip_if(condition, msg=None):
"""Decorator to skip test if condition is true."""
def deco(test):
@@ -84,10 +112,12 @@ def skip_if(condition, msg=None):
return skipper
return deco
+
def skip_unless(condition, msg=None):
"""Decorator to skip test if condition is false."""
return skip_if(not condition, msg)
+
def skip_unless_importable(module, msg=None):
"""Decorator to skip test if module is not importable."""
try:
@@ -124,57 +154,47 @@ class TestApp(application.Sphinx):
better default values for the initialization parameters.
"""
- def __init__(self, srcdir=None, confdir=None, outdir=None, doctreedir=None,
- buildername='html', confoverrides=None,
- status=None, warning=None, freshenv=None,
- warningiserror=None, tags=None,
- confname='conf.py', cleanenv=False):
-
- application.CONFIG_FILENAME = confname
-
- self.cleanup_trees = [test_root / 'generated']
-
+ def __init__(self, buildername='html', testroot=None, srcdir=None,
+ freshenv=False, confoverrides=None, status=None, warning=None,
+ tags=None, docutilsconf=None):
+ if testroot is None:
+ defaultsrcdir = 'root'
+ testroot = rootdir / 'root'
+ else:
+ defaultsrcdir = 'test-' + testroot
+ testroot = rootdir / 'roots' / ('test-' + testroot)
if srcdir is None:
- srcdir = test_root
- if srcdir == '(temp)':
- tempdir = path(tempfile.mkdtemp())
- self.cleanup_trees.append(tempdir)
- temproot = tempdir / 'root'
- test_root.copytree(temproot)
- srcdir = temproot
- elif srcdir == '(empty)':
- tempdir = path(tempfile.mkdtemp())
- self.cleanup_trees.append(tempdir)
- temproot = tempdir / 'root'
- temproot.makedirs()
- (temproot / 'conf.py').write_text('')
- srcdir = temproot
+ srcdir = tempdir / defaultsrcdir
else:
- srcdir = path(srcdir)
- self.builddir = srcdir.joinpath('_build')
- if confdir is None:
- confdir = srcdir
- if outdir is None:
- outdir = srcdir.joinpath(self.builddir, buildername)
- if not outdir.isdir():
- outdir.makedirs()
- self.cleanup_trees.insert(0, outdir)
- if doctreedir is None:
- doctreedir = srcdir.joinpath(srcdir, self.builddir, 'doctrees')
- if not doctreedir.isdir():
- doctreedir.makedirs()
- if cleanenv:
- self.cleanup_trees.insert(0, doctreedir)
+ srcdir = tempdir / srcdir
+
+ if not srcdir.exists():
+ testroot.copytree(srcdir)
+
+ if docutilsconf is not None:
+ (srcdir / 'docutils.conf').write_text(docutilsconf)
+
+ builddir = srcdir / '_build'
+# if confdir is None:
+ confdir = srcdir
+# if outdir is None:
+ outdir = builddir.joinpath(buildername)
+ if not outdir.isdir():
+ outdir.makedirs()
+# if doctreedir is None:
+ doctreedir = builddir.joinpath('doctrees')
+ if not doctreedir.isdir():
+ doctreedir.makedirs()
if confoverrides is None:
confoverrides = {}
if status is None:
- status = StringIO.StringIO()
+ status = StringIO()
if warning is None:
warning = ListOutput('stderr')
- if freshenv is None:
- freshenv = False
- if warningiserror is None:
- warningiserror = False
+# if warningiserror is None:
+ warningiserror = False
+
+ self._saved_path = sys.path[:]
application.Sphinx.__init__(self, srcdir, confdir, outdir, doctreedir,
buildername, confoverrides, status, warning,
@@ -183,8 +203,10 @@ class TestApp(application.Sphinx):
def cleanup(self, doctrees=False):
Theme.themes.clear()
AutoDirective._registry.clear()
- for tree in self.cleanup_trees:
- shutil.rmtree(tree, True)
+ ModuleAnalyzer.cache.clear()
+ LaTeXBuilder.usepackages = []
+ sys.path[:] = self._saved_path
+ sys.modules.pop('autodoc_fodder', None)
def __repr__(self):
return '<%s buildername=%r>' % (self.__class__.__name__, self.builder.name)
@@ -198,10 +220,14 @@ def with_app(*args, **kwargs):
def generator(func):
@wraps(func)
def deco(*args2, **kwargs2):
+ status, warning = StringIO(), StringIO()
+ kwargs['status'] = status
+ kwargs['warning'] = warning
app = TestApp(*args, **kwargs)
- func(app, *args2, **kwargs2)
- # don't execute cleanup if test failed
- app.cleanup()
+ try:
+ func(app, status, warning, *args2, **kwargs2)
+ finally:
+ app.cleanup()
return deco
return generator
@@ -214,39 +240,41 @@ def gen_with_app(*args, **kwargs):
def generator(func):
@wraps(func)
def deco(*args2, **kwargs2):
+ status, warning = StringIO(), StringIO()
+ kwargs['status'] = status
+ kwargs['warning'] = warning
app = TestApp(*args, **kwargs)
- for item in func(app, *args2, **kwargs2):
- yield item
- # don't execute cleanup if test failed
- app.cleanup()
+ try:
+ for item in func(app, status, warning, *args2, **kwargs2):
+ yield item
+ finally:
+ app.cleanup()
return deco
return generator
def with_tempdir(func):
def new_func(*args, **kwds):
- tempdir = path(tempfile.mkdtemp())
- func(tempdir, *args, **kwds)
- tempdir.rmtree()
+ new_tempdir = path(tempfile.mkdtemp(dir=tempdir))
+ func(new_tempdir, *args, **kwds)
new_func.__name__ = func.__name__
return new_func
-def write_file(name, contents, encoding=None):
- if encoding is None:
- mode = 'wb'
- if isinstance(contents, unicode):
- contents = contents.encode('ascii')
- else:
- mode = 'w'
- f = open(str(name), mode, encoding=encoding)
- f.write(contents)
- f.close()
-
-
def sprint(*args):
sys.stderr.write(' '.join(map(str, args)) + '\n')
+
_unicode_literals_re = re.compile(r'u(".*?")|u(\'.*?\')')
+
+
def remove_unicode_literals(s):
return _unicode_literals_re.sub(lambda x: x.group(1) or x.group(2), s)
+
+
+def find_files(root, suffix=None):
+ for dirpath, dirs, files in os.walk(root, followlinks=True):
+ dirpath = path(dirpath)
+ for f in [f for f in files if not suffix or f.endswith(suffix)]:
+ fpath = dirpath / f
+ yield os.path.relpath(fpath, root)