summaryrefslogtreecommitdiff
path: root/jsonschema/tests/test_jsonschema_test_suite.py
blob: 146bf3e9a4e7a0cb0261683dd6a3907a2116dc18 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
"""
Test runner for the JSON Schema official test suite

Tests comprehensive correctness of each draft's validator.

See https://github.com/json-schema/JSON-Schema-Test-Suite for details.

"""

from decimal import Decimal
import glob
import json
import io
import itertools
import os
import re
import subprocess

try:
    from sys import pypy_version_info
except ImportError:
    pypy_version_info = None

from jsonschema import (
    FormatError, SchemaError, ValidationError, Draft3Validator,
    Draft4Validator, FormatChecker, draft3_format_checker,
    draft4_format_checker, validate,
)
from jsonschema.compat import PY3
from jsonschema.tests.compat import mock, unittest
import jsonschema


REPO_ROOT = os.path.join(os.path.dirname(jsonschema.__file__), os.path.pardir)
SUITE = os.getenv("JSON_SCHEMA_TEST_SUITE", os.path.join(REPO_ROOT, "json"))

if not os.path.isdir(SUITE):
    raise ValueError(
        "Can't find the JSON-Schema-Test-Suite directory. Set the "
        "'JSON_SCHEMA_TEST_SUITE' environment variable or run the tests from "
        "alongside a checkout of the suite."
    )

TESTS_DIR = os.path.join(SUITE, "tests")
JSONSCHEMA_SUITE = os.path.join(SUITE, "bin", "jsonschema_suite")

REMOTES = subprocess.Popen(
    ["python", JSONSCHEMA_SUITE, "remotes"], stdout=subprocess.PIPE,
).stdout
if PY3:
    REMOTES = io.TextIOWrapper(REMOTES)
REMOTES = json.load(REMOTES)


def make_case(schema, data, valid, name):
    if valid:
        def test_case(self):
            kwargs = getattr(self, "validator_kwargs", {})
            validate(data, schema, cls=self.validator_class, **kwargs)
    else:
        def test_case(self):
            kwargs = getattr(self, "validator_kwargs", {})
            with self.assertRaises(ValidationError):
                validate(data, schema, cls=self.validator_class, **kwargs)

    if not PY3:
        name = name.encode("utf-8")
    test_case.__name__ = name

    return test_case


def maybe_skip(skip, test, case):
    if skip is not None:
        reason = skip(case)
        if reason is not None:
            test = unittest.skip(reason)(test)
    return test


def load_json_cases(tests_glob, ignore_glob="", basedir=TESTS_DIR, skip=None):
    if ignore_glob:
        ignore_glob = os.path.join(basedir, ignore_glob)

    def add_test_methods(test_class):
        ignored = set(glob.iglob(ignore_glob))

        for filename in glob.iglob(os.path.join(basedir, tests_glob)):
            if filename in ignored:
                continue

            validating, _ = os.path.splitext(os.path.basename(filename))
            id = itertools.count(1)

            with open(filename) as test_file:
                for case in json.load(test_file):
                    for test in case["tests"]:
                        name = "test_%s_%s_%s" % (
                            validating,
                            next(id),
                            re.sub(r"[\W ]+", "_", test["description"]),
                        )
                        assert not hasattr(test_class, name), name

                        test_case = make_case(
                            data=test["data"],
                            schema=case["schema"],
                            valid=test["valid"],
                            name=name,
                        )
                        test_case = maybe_skip(skip, test_case, case)
                        setattr(test_class, name, test_case)

        return test_class
    return add_test_methods


class TypesMixin(object):
    @unittest.skipIf(PY3, "In Python 3 json.load always produces unicode")
    def test_string_a_bytestring_is_a_string(self):
        self.validator_class({"type" : "string"}).validate(b"foo")


class DecimalMixin(object):
    def test_it_can_validate_with_decimals(self):
        schema = {"type" : "number"}
        validator = self.validator_class(
            schema, types={"number" : (int, float, Decimal)}
        )

        for valid in [1, 1.1, Decimal(1) / Decimal(8)]:
            validator.validate(valid)

        for invalid in ["foo", {}, [], True, None]:
            with self.assertRaises(ValidationError):
                validator.validate(invalid)


def missing_format(checker):
    def missing_format(case):
        format = case["schema"].get("format")
        if format not in checker.checkers:
            return "Format checker {0!r} not found.".format(format)
        elif (
            format == "date-time" and
            pypy_version_info is not None and
            pypy_version_info[:2] <= (1, 9)
        ):
            # datetime.datetime is overzealous about typechecking in <=1.9
            return "datetime.datetime is broken on this version of PyPy."
    return missing_format


class FormatMixin(object):
    def test_it_returns_true_for_formats_it_does_not_know_about(self):
        validator = self.validator_class(
            {"format" : "carrot"}, format_checker=FormatChecker(),
        )
        validator.validate("bugs")

    def test_it_does_not_validate_formats_by_default(self):
        validator = self.validator_class({})
        self.assertIsNone(validator.format_checker)

    def test_it_validates_formats_if_a_checker_is_provided(self):
        checker = mock.Mock(spec=FormatChecker)
        validator = self.validator_class(
            {"format" : "foo"}, format_checker=checker,
        )

        validator.validate("bar")

        checker.check.assert_called_once_with("bar", "foo")

        cause = ValueError()
        checker.check.side_effect = FormatError('aoeu', cause=cause)

        with self.assertRaises(ValidationError) as cm:
            validator.validate("bar")
        # Make sure original cause is attached
        self.assertIs(cm.exception.cause, cause)

    def test_it_validates_formats_of_any_type(self):
        checker = mock.Mock(spec=FormatChecker)
        validator = self.validator_class(
            {"format" : "foo"}, format_checker=checker,
        )

        validator.validate([1, 2, 3])

        checker.check.assert_called_once_with([1, 2, 3], "foo")

        cause = ValueError()
        checker.check.side_effect = FormatError('aoeu', cause=cause)

        with self.assertRaises(ValidationError) as cm:
            validator.validate([1, 2, 3])
        # Make sure original cause is attached
        self.assertIs(cm.exception.cause, cause)


@load_json_cases("draft3/*.json", ignore_glob="draft3/refRemote.json")
@load_json_cases(
    "draft3/optional/format.json", skip=missing_format(draft3_format_checker)
)
@load_json_cases("draft3/optional/bignum.json")
@load_json_cases("draft3/optional/zeroTerminatedFloats.json")
class TestDraft3(unittest.TestCase, TypesMixin, DecimalMixin, FormatMixin):
    validator_class = Draft3Validator
    validator_kwargs = {"format_checker" : draft3_format_checker}

    def test_any_type_is_valid_for_type_any(self):
        validator = self.validator_class({"type" : "any"})
        validator.validate(mock.Mock())

    # TODO: we're in need of more meta schema tests
    def test_invalid_properties(self):
        with self.assertRaises(SchemaError):
            validate({}, {"properties": {"test": True}},
                     cls=self.validator_class)

    def test_minItems_invalid_string(self):
        with self.assertRaises(SchemaError):
            # needs to be an integer
            validate([1], {"minItems" : "1"}, cls=self.validator_class)


@load_json_cases("draft4/*.json", ignore_glob="draft4/refRemote.json")
@load_json_cases(
    "draft4/optional/format.json", skip=missing_format(draft4_format_checker)
)
@load_json_cases("draft4/optional/bignum.json")
@load_json_cases("draft4/optional/zeroTerminatedFloats.json")
class TestDraft4(unittest.TestCase, TypesMixin, DecimalMixin, FormatMixin):
    validator_class = Draft4Validator
    validator_kwargs = {"format_checker" : draft4_format_checker}

    # TODO: we're in need of more meta schema tests
    def test_invalid_properties(self):
        with self.assertRaises(SchemaError):
            validate({}, {"properties": {"test": True}},
                     cls=self.validator_class)

    def test_minItems_invalid_string(self):
        with self.assertRaises(SchemaError):
            # needs to be an integer
            validate([1], {"minItems" : "1"}, cls=self.validator_class)


class RemoteRefResolutionMixin(object):
    def setUp(self):
        patch = mock.patch("jsonschema.validators.requests")
        requests = patch.start()
        requests.get.side_effect = self.resolve
        self.addCleanup(patch.stop)

    def resolve(self, reference):
        _, _, reference = reference.partition("http://localhost:1234/")
        return mock.Mock(**{"json.return_value" : REMOTES.get(reference)})


@load_json_cases("draft3/refRemote.json")
class Draft3RemoteResolution(RemoteRefResolutionMixin, unittest.TestCase):
    validator_class = Draft3Validator


@load_json_cases("draft4/refRemote.json")
class Draft4RemoteResolution(RemoteRefResolutionMixin, unittest.TestCase):
    validator_class = Draft4Validator