summaryrefslogtreecommitdiff
path: root/tests/test_pycco.py
blob: 0bca07736026631a87b0c058916351ef7ff2dd17 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
from __future__ import absolute_import

import copy
import os
import os.path
import tempfile
import time

import pytest

import pycco.generate_index as generate_index
import pycco.main as p
from hypothesis import assume, example, given
from hypothesis.strategies import booleans, lists, none, text, sampled_from, data
from pycco.languages import supported_languages

try:
    from unittest.mock import patch
except ImportError:
    from mock import patch



PYTHON = supported_languages['.py']
PYCCO_SOURCE = 'pycco/main.py'
FOO_FUNCTION = """def foo():\n    return True"""


def get_language(data):
    return data.draw(sampled_from(list(supported_languages.values())))


@given(lists(text()), text())
def test_shift(fragments, default):
    if fragments == []:
        assert p.shift(fragments, default) == default
    else:
        fragments2 = copy.copy(fragments)
        head = p.shift(fragments, default)
        assert [head] + fragments == fragments2


@given(text(), booleans(), text(min_size=1))
@example("/foo", True, "0")
def test_destination(filepath, preserve_paths, outdir):
    dest = p.destination(
        filepath, preserve_paths=preserve_paths, outdir=outdir)
    assert dest.startswith(outdir)
    assert dest.endswith(".html")


@given(data(), text())
def test_parse(data, source):
    lang = get_language(data)
    parsed = p.parse(source, lang)
    for s in parsed:
        assert {"code_text", "docs_text"} == set(s.keys())


def test_skip_coding_directive():
    source = "# -*- coding: utf-8 -*-\n" + FOO_FUNCTION
    parsed = p.parse(source, PYTHON)
    for section in parsed:
        assert "coding" not in section['code_text']


def test_multi_line_leading_spaces():
    source = "# This is a\n# comment that\n# is indented\n"
    source += FOO_FUNCTION
    parsed = p.parse(source, PYTHON)
    # The resulting comment has leading spaces stripped out.
    assert parsed[0]["docs_text"] == "This is a\ncomment that\nis indented\n"


def test_comment_with_only_cross_ref():
    source = (
        '''# ==Link Target==\n\ndef test_link():\n    """[[testing.py#link-target]]"""\n    pass'''
    )
    sections = p.parse(source, PYTHON)
    p.highlight(sections, PYTHON, outdir=tempfile.gettempdir())
    assert sections[1][
        'docs_html'] == '<p><a href="testing.html#link-target">testing.py</a></p>'


@given(text(), text())
def test_get_language_specify_language(source, code):
    assert p.get_language(
        source, code, language_name="python") == supported_languages['.py']

    with pytest.raises(ValueError):
        p.get_language(source, code, language_name="non-existent")


@given(text() | none())
def test_get_language_bad_source(source):
    code = "#!/usr/bin/python\n"
    code += FOO_FUNCTION
    assert p.get_language(source, code) == PYTHON
    with pytest.raises(ValueError) as e:
        assert p.get_language(source, "badlang")

    msg = "Can't figure out the language!"
    try:
        assert e.value.message == msg
    except AttributeError:
        assert e.value.args[0] == msg


@given(text() | none())
def test_get_language_bad_code(code):
    source = "test.py"
    assert p.get_language(source, code) == PYTHON


@given(text(max_size=64))
def test_ensure_directory(dir_name):
    tempdir = os.path.join(tempfile.gettempdir(),
                           str(int(time.time())), dir_name)

    # Use sanitization from function, but only for housekeeping. We
    # pass in the unsanitized string to the function.
    safe_name = p.remove_control_chars(dir_name)

    if not os.path.isdir(safe_name) and os.access(safe_name, os.W_OK):
        p.ensure_directory(tempdir)
        assert os.path.isdir(safe_name)


def test_ensure_multiline_string_support():
    code = '''x = """
multi-line-string
"""

y = z  # comment

# *comment with formatting*

def x():
    """multi-line-string
    """'''

    docs_code_tuple_list = p.parse(code, PYTHON)

    assert docs_code_tuple_list[0]['docs_text'] == ''
    assert "#" not in docs_code_tuple_list[1]['docs_text']


def test_indented_block():

    code = '''"""To install Pycco, simply

    pip install pycco
"""
'''
    parsed = p.parse(code, PYTHON)
    highlighted = p.highlight(parsed, PYTHON, outdir=tempfile.gettempdir())
    pre_block = highlighted[0]['docs_html']
    assert '<pre>' in pre_block
    assert '</pre>' in pre_block


def test_generate_documentation():
    p.generate_documentation(PYCCO_SOURCE, outdir=tempfile.gettempdir())


@given(booleans(), booleans(), data())
def test_process(preserve_paths, index, data):
    lang_name = data.draw(sampled_from([l["name"] for l in supported_languages.values()]))
    p.process([PYCCO_SOURCE], preserve_paths=preserve_paths,
              index=index,
              outdir=tempfile.gettempdir(),
              language=lang_name)


@patch('pygments.lexers.guess_lexer')
def test_process_skips_unknown_languages(mock_guess_lexer):
    class Name:
        name = 'this language does not exist'
    mock_guess_lexer.return_value = Name()

    with pytest.raises(ValueError):
        p.process(['LICENSE'], outdir=tempfile.gettempdir(), skip=False)

    p.process(['LICENSE'], outdir=tempfile.gettempdir(), skip=True)


one_or_more_chars = text(min_size=1, max_size=255)
paths = lists(one_or_more_chars, min_size=1, max_size=30)
@given(
    lists(paths, min_size=1, max_size=255),
    lists(one_or_more_chars, min_size=1, max_size=255)
)
def test_generate_index(path_lists, outdir_list):
    file_paths = [os.path.join(*path_list) for path_list in path_lists]
    outdir = os.path.join(*outdir_list)
    generate_index.generate_index(file_paths, outdir=outdir)


def test_flatten_sources(tmpdir):
    sources = [str(tmpdir)]
    expected_sources = []

    # Setup the base dir
    td = tmpdir.join("test.py")
    td.write("#!/bin/env python")
    expected_sources.append(str(td))

    # Make some more directories, each with a file present
    for d in ["foo", "bar", "buzz"]:
        dd = tmpdir.mkdir(d)
        dummy_file = dd.join("test.py")
        dummy_file.write("#!/bin/env python")
        expected_sources.append(str(dummy_file))

    # Get the flattened version of the base directory
    flattened = p._flatten_sources(sources)

    # Make sure that the lists are the same
    assert sorted(expected_sources) == sorted(flattened)