summaryrefslogtreecommitdiff
path: root/docs
diff options
context:
space:
mode:
Diffstat (limited to 'docs')
-rw-r--r--docs/Makefile227
-rw-r--r--docs/api/index.rst24
-rw-r--r--docs/api/jsonschema/exceptions/index.rst6
-rw-r--r--docs/api/jsonschema/protocols/index.rst6
-rw-r--r--docs/api/jsonschema/validators/index.rst7
-rw-r--r--docs/conf.py136
-rw-r--r--docs/creating.rst38
-rw-r--r--docs/errors.rst412
-rw-r--r--docs/faq.rst263
-rw-r--r--docs/index.rst24
-rw-r--r--docs/make.bat190
-rw-r--r--docs/referencing.rst375
-rw-r--r--docs/requirements.in10
-rw-r--r--docs/requirements.txt137
-rw-r--r--docs/spelling-wordlist.txt59
-rw-r--r--docs/validate.rst306
16 files changed, 2220 insertions, 0 deletions
diff --git a/docs/Makefile b/docs/Makefile
new file mode 100644
index 0000000..f6315df
--- /dev/null
+++ b/docs/Makefile
@@ -0,0 +1,227 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+PYTHON = python
+PAPER =
+BUILDDIR = _build
+SOURCEDIR = $(dir $(abspath $(lastword $(MAKEFILE_LIST))))
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) $(SOURCEDIR)
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " applehelp to make an Apple Help Book"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " epub3 to make an epub3"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " xml to make Docutils-native XML files"
+ @echo " pseudoxml to make pseudoxml-XML files for display purposes"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation"
+ @echo " coverage to run coverage check of the documentation (if enabled)"
+ @echo " spelling to run a spell check of the documentation"
+ @echo " dummy to check syntax errors of document sources"
+
+.PHONY: clean
+clean:
+ rm -rf $(BUILDDIR)/*
+
+.PHONY: html
+html:
+ $(PYTHON) -m sphinx -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+.PHONY: dirhtml
+dirhtml:
+ $(PYTHON) -m sphinx -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+.PHONY: singlehtml
+singlehtml:
+ $(PYTHON) -m sphinx -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+.PHONY: json
+json:
+ $(PYTHON) -m sphinx -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+.PHONY: htmlhelp
+htmlhelp:
+ $(PYTHON) -m sphinx -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+.PHONY: qthelp
+qthelp:
+ $(PYTHON) -m sphinx -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/jsonschema.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/jsonschema.qhc"
+
+.PHONY: applehelp
+applehelp:
+ $(PYTHON) -m sphinx -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
+ @echo
+ @echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
+ @echo "N.B. You won't be able to view it unless you put it in" \
+ "~/Library/Documentation/Help or install it in your application" \
+ "bundle."
+
+.PHONY: devhelp
+devhelp:
+ $(PYTHON) -m sphinx -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/jsonschema"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/jsonschema"
+ @echo "# devhelp"
+
+.PHONY: epub
+epub:
+ $(PYTHON) -m sphinx -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+.PHONY: epub3
+epub3:
+ $(PYTHON) -m sphinx -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3
+ @echo
+ @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3."
+
+.PHONY: latex
+latex:
+ $(PYTHON) -m sphinx -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+.PHONY: latexpdf
+latexpdf:
+ $(PYTHON) -m sphinx -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+.PHONY: latexpdfja
+latexpdfja:
+ $(PYTHON) -m sphinx -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through platex and dvipdfmx..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+.PHONY: text
+text:
+ $(PYTHON) -m sphinx -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+.PHONY: man
+man:
+ $(PYTHON) -m sphinx -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+.PHONY: texinfo
+texinfo:
+ $(PYTHON) -m sphinx -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+.PHONY: info
+info:
+ $(PYTHON) -m sphinx -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+.PHONY: gettext
+gettext:
+ $(PYTHON) -m sphinx -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+.PHONY: changes
+changes:
+ $(PYTHON) -m sphinx -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+.PHONY: linkcheck
+linkcheck:
+ $(PYTHON) -m sphinx -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+.PHONY: doctest
+doctest:
+ $(PYTHON) -m sphinx -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
+
+.PHONY: coverage
+coverage:
+ $(PYTHON) -m sphinx -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
+ @echo "Testing of coverage in the sources finished, look at the " \
+ "results in $(BUILDDIR)/coverage/python.txt."
+
+.PHONY: xml
+xml:
+ $(PYTHON) -m sphinx -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
+ @echo
+ @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
+
+.PHONY: pseudoxml
+pseudoxml:
+ $(PYTHON) -m sphinx -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
+ @echo
+ @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
+
+.PHONY: spelling
+spelling:
+ $(PYTHON) -m sphinx -b spelling $(ALLSPHINXOPTS) $(BUILDDIR)/spelling
+ @echo
+ @echo "Build finished. The spelling files are in $(BUILDDIR)/spelling."
+
+.PHONY: dummy
+dummy:
+ $(PYTHON) -m sphinx -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy
+ @echo
+ @echo "Build finished. Dummy builder generates no files."
diff --git a/docs/api/index.rst b/docs/api/index.rst
new file mode 100644
index 0000000..6624e83
--- /dev/null
+++ b/docs/api/index.rst
@@ -0,0 +1,24 @@
+API Reference
+=============
+
+Submodules
+----------
+
+.. toctree::
+ :titlesonly:
+
+ /api/jsonschema/validators/index
+ /api/jsonschema/exceptions/index
+ /api/jsonschema/protocols/index
+
+:mod:`jsonschema`
+-----------------
+
+.. automodule:: jsonschema
+ :members:
+ :imported-members:
+ :exclude-members: Validator
+
+.. autodata:: jsonschema._format._F
+
+.. autodata:: jsonschema._typing.id_of
diff --git a/docs/api/jsonschema/exceptions/index.rst b/docs/api/jsonschema/exceptions/index.rst
new file mode 100644
index 0000000..8fb1f4f
--- /dev/null
+++ b/docs/api/jsonschema/exceptions/index.rst
@@ -0,0 +1,6 @@
+:py:mod:`jsonschema.exceptions`
+===============================
+
+.. automodule:: jsonschema.exceptions
+ :members:
+ :undoc-members:
diff --git a/docs/api/jsonschema/protocols/index.rst b/docs/api/jsonschema/protocols/index.rst
new file mode 100644
index 0000000..195dbee
--- /dev/null
+++ b/docs/api/jsonschema/protocols/index.rst
@@ -0,0 +1,6 @@
+:py:mod:`jsonschema.protocols`
+==============================
+
+.. automodule:: jsonschema.protocols
+ :members:
+ :undoc-members:
diff --git a/docs/api/jsonschema/validators/index.rst b/docs/api/jsonschema/validators/index.rst
new file mode 100644
index 0000000..13a9991
--- /dev/null
+++ b/docs/api/jsonschema/validators/index.rst
@@ -0,0 +1,7 @@
+:py:mod:`jsonschema.validators`
+===============================
+
+.. automodule:: jsonschema.validators
+ :members:
+ :undoc-members:
+ :private-members: _RefResolver
diff --git a/docs/conf.py b/docs/conf.py
new file mode 100644
index 0000000..19d734f
--- /dev/null
+++ b/docs/conf.py
@@ -0,0 +1,136 @@
+from pathlib import Path
+import importlib.metadata
+import re
+
+ROOT = Path(__file__).parent.parent
+PACKAGE_SRC = ROOT / "jsonschema"
+
+project = "jsonschema"
+author = "Julian Berman"
+copyright = "2013, " + author
+
+release = importlib.metadata.version("jsonschema")
+version = release.partition("-")[0]
+
+language = "en"
+default_role = "any"
+
+extensions = [
+ "sphinx.ext.autodoc",
+ "sphinx.ext.autosectionlabel",
+ "sphinx.ext.coverage",
+ "sphinx.ext.doctest",
+ "sphinx.ext.imgconverter",
+ "sphinx.ext.intersphinx",
+ "sphinx.ext.napoleon",
+ "sphinx.ext.viewcode",
+ "sphinx_copybutton",
+ "sphinx_json_schema_spec",
+ "sphinxcontrib.spelling",
+ "sphinxext.opengraph",
+]
+
+cache_path = "_cache"
+
+pygments_style = "lovelace"
+pygments_dark_style = "one-dark"
+
+html_theme = "furo"
+
+# See sphinx-doc/sphinx#10785
+_TYPE_ALIASES = {
+ "jsonschema._format._F": ("data", "_F"),
+ "_typing.id_of": ("data", "jsonschema._typing.id_of"),
+}
+
+
+def _resolve_broken_refs(app, env, node, contnode):
+ if node["refdomain"] != "py":
+ return
+
+ if node["reftarget"].startswith("referencing."): # :( :( :( :( :(
+ node["reftype"] = "data"
+ from sphinx.ext import intersphinx
+ return intersphinx.resolve_reference_in_inventory(
+ env, "referencing", node, contnode,
+ )
+
+ kind, target = _TYPE_ALIASES.get(node["reftarget"], (None, None))
+ if kind is not None:
+ return app.env.get_domain("py").resolve_xref(
+ env,
+ node["refdoc"],
+ app.builder,
+ kind,
+ target,
+ node,
+ contnode,
+ )
+
+
+def setup(app):
+ app.connect("missing-reference", _resolve_broken_refs)
+
+
+# = Builders =
+
+doctest_global_setup = """
+from jsonschema import *
+from jsonschema import exceptions
+import jsonschema.validators
+"""
+
+
+def entire_domain(host):
+ return r"http.?://" + re.escape(host) + r"($|/.*)"
+
+
+linkcheck_ignore = [
+ entire_domain("img.shields.io"),
+ "https://github.com/python-jsonschema/jsonschema/actions",
+ "https://github.com/python-jsonschema/jsonschema/workflows/CI/badge.svg",
+]
+
+# = Extensions =
+
+# -- autoapi --
+
+suppress_warnings = [
+ "autoapi.python_import_resolution",
+ "autoapi.toc_reference",
+ "epub.duplicated_toc_entry",
+]
+autoapi_root = "api"
+autoapi_ignore = [
+ "*/_[a-z]*.py",
+ "*/__main__.py",
+ "*/benchmarks/*",
+ "*/cli.py",
+ "*/tests/*",
+]
+autoapi_options = [
+ "members",
+ "undoc-members",
+ "show-module-summary",
+ "imported-members",
+]
+
+autoapi_type = "python"
+autoapi_dirs = [PACKAGE_SRC]
+
+# -- autosectionlabel --
+
+autosectionlabel_prefix_document = True
+
+# -- intersphinx --
+
+intersphinx_mapping = {
+ "python": ("https://docs.python.org/3", None),
+ "referencing": ("https://referencing.readthedocs.io/en/stable/", None),
+ "ujs": ("https://json-schema.org/understanding-json-schema/", None),
+}
+
+# -- sphinxcontrib-spelling --
+
+spelling_word_list_filename = "spelling-wordlist.txt"
+spelling_show_suggestions = True
diff --git a/docs/creating.rst b/docs/creating.rst
new file mode 100644
index 0000000..8405f34
--- /dev/null
+++ b/docs/creating.rst
@@ -0,0 +1,38 @@
+.. currentmodule:: jsonschema.validators
+
+.. _creating-validators:
+
+=======================================
+Creating or Extending Validator Classes
+=======================================
+
+.. autofunction:: create
+ :noindex:
+
+.. autofunction:: extend
+ :noindex:
+
+.. autofunction:: validator_for
+ :noindex:
+
+.. autofunction:: validates
+ :noindex:
+
+
+Creating Validation Errors
+--------------------------
+
+Any validating function that validates against a subschema should call
+``descend``, rather than ``iter_errors``. If it recurses into the
+instance, or schema, it should pass one or both of the ``path`` or
+``schema_path`` arguments to ``descend`` in order to properly maintain
+where in the instance or schema respectively the error occurred.
+
+The Validator Protocol
+----------------------
+
+``jsonschema`` defines a `protocol <typing.Protocol>`,
+`jsonschema.protocols.Validator` which can be used in type annotations to
+describe the type of a validator object.
+
+For full details, see `validator-protocol`.
diff --git a/docs/errors.rst b/docs/errors.rst
new file mode 100644
index 0000000..5b0230f
--- /dev/null
+++ b/docs/errors.rst
@@ -0,0 +1,412 @@
+==========================
+Handling Validation Errors
+==========================
+
+.. currentmodule:: jsonschema.exceptions
+
+When an invalid instance is encountered, a `ValidationError` will be
+raised or returned, depending on which method or function is used.
+
+.. autoexception:: ValidationError
+ :noindex:
+
+ The information carried by an error roughly breaks down into:
+
+ =============== ================= ========================
+ What Happened Why Did It Happen What Was Being Validated
+ =============== ================= ========================
+ `message` `context` `instance`
+
+ `cause` `json_path`
+
+ `path`
+
+ `schema`
+
+ `schema_path`
+
+ `validator`
+
+ `validator_value`
+ =============== ================= ========================
+
+
+ .. attribute:: message
+
+ A human readable message explaining the error.
+
+ .. attribute:: validator
+
+ The name of the failed `keyword
+ <https://json-schema.org/draft/2020-12/json-schema-validation.html#name-a-vocabulary-for-structural>`_.
+
+ .. attribute:: validator_value
+
+ The associated value for the failed keyword in the schema.
+
+ .. attribute:: schema
+
+ The full schema that this error came from. This is potentially a
+ subschema from within the schema that was passed in originally,
+ or even an entirely different schema if a :kw:`$ref` was
+ followed.
+
+ .. attribute:: relative_schema_path
+
+ A `collections.deque` containing the path to the failed keyword
+ within the schema.
+
+ .. attribute:: absolute_schema_path
+
+ A `collections.deque` containing the path to the failed
+ keyword within the schema, but always relative to the
+ *original* schema as opposed to any subschema (i.e. the one
+ originally passed into a validator class, *not* `schema`\).
+
+ .. attribute:: schema_path
+
+ Same as `relative_schema_path`.
+
+ .. attribute:: relative_path
+
+ A `collections.deque` containing the path to the
+ offending element within the instance. The deque can be empty if
+ the error happened at the root of the instance.
+
+ .. attribute:: absolute_path
+
+ A `collections.deque` containing the path to the
+ offending element within the instance. The absolute path
+ is always relative to the *original* instance that was
+ validated (i.e. the one passed into a validation method, *not*
+ `instance`\). The deque can be empty if the error happened
+ at the root of the instance.
+
+ .. attribute:: json_path
+
+ A `JSON path <https://goessner.net/articles/JsonPath/index.html>`_
+ to the offending element within the instance.
+
+ .. attribute:: path
+
+ Same as `relative_path`.
+
+ .. attribute:: instance
+
+ The instance that was being validated. This will differ from
+ the instance originally passed into ``validate`` if the
+ validator object was in the process of validating a (possibly
+ nested) element within the top-level instance. The path within
+ the top-level instance (i.e. `ValidationError.path`) could
+ be used to find this object, but it is provided for convenience.
+
+ .. attribute:: context
+
+ If the error was caused by errors in subschemas, the list of errors
+ from the subschemas will be available on this property. The
+ `schema_path` and `path` of these errors will be relative
+ to the parent error.
+
+ .. attribute:: cause
+
+ If the error was caused by a *non*-validation error, the
+ exception object will be here. Currently this is only used
+ for the exception raised by a failed format checker in
+ `jsonschema.FormatChecker.check`.
+
+ .. attribute:: parent
+
+ A validation error which this error is the `context` of.
+ ``None`` if there wasn't one.
+
+
+In case an invalid schema itself is encountered, a `SchemaError` is
+raised.
+
+.. autoexception:: SchemaError
+ :noindex:
+
+ The same attributes are present as for `ValidationError`\s.
+
+
+These attributes can be clarified with a short example:
+
+.. testcode::
+
+ schema = {
+ "items": {
+ "anyOf": [
+ {"type": "string", "maxLength": 2},
+ {"type": "integer", "minimum": 5}
+ ]
+ }
+ }
+ instance = [{}, 3, "foo"]
+ v = Draft202012Validator(schema)
+ errors = sorted(v.iter_errors(instance), key=lambda e: e.path)
+
+The error messages in this situation are not very helpful on their own.
+
+.. testcode::
+
+ for error in errors:
+ print(error.message)
+
+outputs:
+
+.. testoutput::
+
+ {} is not valid under any of the given schemas
+ 3 is not valid under any of the given schemas
+ 'foo' is not valid under any of the given schemas
+
+If we look at `ValidationError.path` on each of the errors, we can find
+out which elements in the instance correspond to each of the errors. In
+this example, `ValidationError.path` will have only one element, which
+will be the index in our list.
+
+.. testcode::
+
+ for error in errors:
+ print(list(error.path))
+
+.. testoutput::
+
+ [0]
+ [1]
+ [2]
+
+Since our schema contained nested subschemas, it can be helpful to look at
+the specific part of the instance and subschema that caused each of the errors.
+This can be seen with the `ValidationError.instance` and
+`ValidationError.schema` attributes.
+
+With keywords like :kw:`anyOf`, the `ValidationError.context`
+attribute can be used to see the sub-errors which caused the failure. Since
+these errors actually came from two separate subschemas, it can be helpful to
+look at the `ValidationError.schema_path` attribute as well to see where
+exactly in the schema each of these errors come from. In the case of sub-errors
+from the `ValidationError.context` attribute, this path will be relative
+to the `ValidationError.schema_path` of the parent error.
+
+.. testcode::
+
+ for error in errors:
+ for suberror in sorted(error.context, key=lambda e: e.schema_path):
+ print(list(suberror.schema_path), suberror.message, sep=", ")
+
+.. testoutput::
+
+ [0, 'type'], {} is not of type 'string'
+ [1, 'type'], {} is not of type 'integer'
+ [0, 'type'], 3 is not of type 'string'
+ [1, 'minimum'], 3 is less than the minimum of 5
+ [0, 'maxLength'], 'foo' is too long
+ [1, 'type'], 'foo' is not of type 'integer'
+
+The string representation of an error combines some of these attributes for
+easier debugging.
+
+.. testcode::
+
+ print(errors[1])
+
+.. testoutput::
+
+ 3 is not valid under any of the given schemas
+
+ Failed validating 'anyOf' in schema['items']:
+ {'anyOf': [{'maxLength': 2, 'type': 'string'},
+ {'minimum': 5, 'type': 'integer'}]}
+
+ On instance[1]:
+ 3
+
+
+ErrorTrees
+----------
+
+If you want to programmatically query which validation keywords
+failed when validating a given instance, you may want to do so using
+`jsonschema.exceptions.ErrorTree` objects.
+
+.. autoclass:: jsonschema.exceptions.ErrorTree
+ :noindex:
+ :members:
+ :special-members:
+ :exclude-members: __dict__,__weakref__
+
+ .. attribute:: errors
+
+ The mapping of validator keywords to the error objects (usually
+ `jsonschema.exceptions.ValidationError`\s) at this level
+ of the tree.
+
+Consider the following example:
+
+.. testcode::
+
+ schema = {
+ "type" : "array",
+ "items" : {"type" : "number", "enum" : [1, 2, 3]},
+ "minItems" : 3,
+ }
+ instance = ["spam", 2]
+
+For clarity's sake, the given instance has three errors under this schema:
+
+.. testcode::
+
+ v = Draft202012Validator(schema)
+ for error in sorted(v.iter_errors(["spam", 2]), key=str):
+ print(error.message)
+
+.. testoutput::
+
+ 'spam' is not of type 'number'
+ 'spam' is not one of [1, 2, 3]
+ ['spam', 2] is too short
+
+Let's construct an `jsonschema.exceptions.ErrorTree` so that we
+can query the errors a bit more easily than by just iterating over the
+error objects.
+
+.. testcode::
+
+ from jsonschema.exceptions import ErrorTree
+ tree = ErrorTree(v.iter_errors(instance))
+
+As you can see, `jsonschema.exceptions.ErrorTree` takes an
+iterable of `ValidationError`\s when constructing a tree so
+you can directly pass it the return value of a validator object's
+`jsonschema.protocols.Validator.iter_errors` method.
+
+`ErrorTree`\s support a number of useful operations. The first one we
+might want to perform is to check whether a given element in our instance
+failed validation. We do so using the :keyword:`in` operator:
+
+.. doctest::
+
+ >>> 0 in tree
+ True
+
+ >>> 1 in tree
+ False
+
+The interpretation here is that the 0th index into the instance (``"spam"``)
+did have an error (in fact it had 2), while the 1th index (``2``) did not (i.e.
+it was valid).
+
+If we want to see which errors a child had, we index into the tree and look at
+the `ErrorTree.errors` attribute.
+
+.. doctest::
+
+ >>> sorted(tree[0].errors)
+ ['enum', 'type']
+
+Here we see that the :kw:`enum` and :kw:`type` keywords failed for
+index ``0``. In fact `ErrorTree.errors` is a dict, whose values are the
+`ValidationError`\s, so we can get at those directly if we want them.
+
+.. doctest::
+
+ >>> print(tree[0].errors["type"].message)
+ 'spam' is not of type 'number'
+
+Of course this means that if we want to know if a given validation
+keyword failed for a given index, we check for its presence in
+`ErrorTree.errors`:
+
+.. doctest::
+
+ >>> "enum" in tree[0].errors
+ True
+
+ >>> "minimum" in tree[0].errors
+ False
+
+Finally, if you were paying close enough attention, you'll notice that
+we haven't seen our :kw:`minItems` error appear anywhere yet. This is
+because :kw:`minItems` is an error that applies globally to the instance
+itself. So it appears in the root node of the tree.
+
+.. doctest::
+
+ >>> "minItems" in tree.errors
+ True
+
+That's all you need to know to use error trees.
+
+To summarize, each tree contains child trees that can be accessed by
+indexing the tree to get the corresponding child tree for a given
+index into the instance. Each tree and child has a `ErrorTree.errors`
+attribute, a dict, that maps the failed validation keyword to the
+corresponding validation error.
+
+
+best_match and relevance
+------------------------
+
+The `best_match` function is a simple but useful function for attempting
+to guess the most relevant error in a given bunch.
+
+.. doctest::
+
+ >>> from jsonschema import Draft202012Validator
+ >>> from jsonschema.exceptions import best_match
+
+ >>> schema = {
+ ... "type": "array",
+ ... "minItems": 3,
+ ... }
+ >>> print(best_match(Draft202012Validator(schema).iter_errors(11)).message)
+ 11 is not of type 'array'
+
+
+.. autofunction:: best_match
+ :noindex:
+
+
+.. function:: relevance(validation_error)
+ :noindex:
+
+ A key function that sorts errors based on heuristic relevance.
+
+ If you want to sort a bunch of errors entirely, you can use
+ this function to do so. Using this function as a key to e.g.
+ `sorted` or `max` will cause more relevant errors to be
+ considered greater than less relevant ones.
+
+ Within the different validation keywords that can fail, this
+ function considers :kw:`anyOf` and :kw:`oneOf` to be *weak*
+ validation errors, and will sort them lower than other errors at the
+ same level in the instance.
+
+ If you want to change the set of weak [or strong] validation
+ keywords you can create a custom version of this function with
+ `by_relevance` and provide a different set of each.
+
+.. doctest::
+
+ >>> schema = {
+ ... "properties": {
+ ... "name": {"type": "string"},
+ ... "phones": {
+ ... "properties": {
+ ... "home": {"type": "string"}
+ ... },
+ ... },
+ ... },
+ ... }
+ >>> instance = {"name": 123, "phones": {"home": [123]}}
+ >>> errors = Draft202012Validator(schema).iter_errors(instance)
+ >>> [
+ ... e.path[-1]
+ ... for e in sorted(errors, key=exceptions.relevance)
+ ... ]
+ ['home', 'name']
+
+
+.. autofunction:: by_relevance
+ :noindex:
diff --git a/docs/faq.rst b/docs/faq.rst
new file mode 100644
index 0000000..5ae3e62
--- /dev/null
+++ b/docs/faq.rst
@@ -0,0 +1,263 @@
+==========================
+Frequently Asked Questions
+==========================
+
+My schema specifies format validation. Why do invalid instances seem valid?
+---------------------------------------------------------------------------
+
+The :kw:`format` keyword can be a bit of a stumbling block for new
+users working with JSON Schema.
+
+In a schema such as:
+
+.. code-block:: json
+
+ {"type": "string", "format": "date"}
+
+JSON Schema specifications have historically differentiated between the
+:kw:`format` keyword and other keywords. In particular, the
+:kw:`format` keyword was specified to be *informational* as much
+as it may be used for validation.
+
+In other words, for many use cases, schema authors may wish to use
+values for the :kw:`format` keyword but have no expectation
+they be validated alongside other required assertions in a schema.
+
+Of course this does not represent all or even most use cases -- many
+schema authors *do* wish to assert that instances conform fully, even to
+the specific format mentioned.
+
+In drafts prior to ``draft2019-09``, the decision on whether to
+automatically enable :kw:`format` validation was left up to
+validation implementations such as this one.
+
+This library made the choice to leave it off by default, for two reasons:
+
+ * for forward compatibility and implementation complexity reasons
+ -- if :kw:`format` validation were on by default, and a
+ future draft of JSON Schema introduced a hard-to-implement format,
+ either the implementation of that format would block releases of
+ this library until it were implemented, or the behavior surrounding
+ :kw:`format` would need to be even more complex than simply
+ defaulting to be on. It therefore was safer to start with it off,
+ and defend against the expectation that a given format would always
+ automatically work.
+
+ * given that a common use of JSON Schema is for portability across
+ languages (and therefore implementations of JSON Schema), so that
+ users be aware of this point itself regarding :kw:`format`
+ validation, and therefore remember to check any *other*
+ implementations they were using to ensure they too were explicitly
+ enabled for :kw:`format` validation.
+
+As of ``draft2019-09`` however, the opt-out by default behavior
+mentioned here is now *required* for all validators.
+
+Difficult as this may sound for new users, at this point it at least
+means they should expect the same behavior that has always been
+implemented here, across any other implementation they encounter.
+
+.. seealso::
+
+ `Draft 2019-09's release notes on format <https://json-schema.org/draft/2019-09/release-notes.html#format-vocabulary>`_
+
+ for upstream details on the behavior of format and how it has changed
+ in ``draft2019-09``
+
+ `validating formats`
+
+ for details on how to enable format validation
+
+ `jsonschema.FormatChecker`
+
+ the object which implements format validation
+
+
+Can jsonschema be used to validate YAML, TOML, etc.?
+----------------------------------------------------
+
+Like most JSON Schema implementations, `jsonschema` doesn't actually deal directly with JSON at all (other than in relation to the :kw:`$ref` keyword, elaborated on below).
+
+In other words as far as this library is concerned, schemas and instances are simply runtime Python objects.
+The JSON object ``{}`` is simply the Python `dict` ``{}``, and a JSON Schema like ``{"type": "object", {"properties": {}}}`` is really an assertion about particular Python objects and their keys.
+
+.. note::
+
+ The :kw:`$ref` keyword is a single notable exception.
+
+ Specifically, in the case where `jsonschema` is asked to resolve a remote reference, it has no choice but to assume that the remote reference is serialized as JSON, and to deserialize it using the `json` module.
+
+ One cannot today therefore reference some remote piece of YAML and have it deserialized into Python objects by this library without doing some additional work.
+ See `Resolving References to Schemas Written in YAML <referencing:Resolving References to Schemas Written in YAML>` for details.
+
+In practice what this means for JSON-like formats like YAML and TOML is that indeed one can generally schematize and then validate them exactly as if they were JSON by simply first deserializing them using libraries like ``PyYAML`` or the like, and passing the resulting Python objects into functions within this library.
+
+Beware however that there are cases where the behavior of the JSON Schema specification itself is only well-defined within the data model of JSON itself, and therefore only for Python objects that could have "in theory" come from JSON.
+As an example, JSON supports only string-valued keys, whereas YAML supports additional types.
+The JSON Schema specification does not deal with how to apply the :kw:`patternProperties` keyword to non-string properties.
+The behavior of this library is therefore similarly not defined when presented with Python objects of this form, which could never have come from JSON.
+In such cases one is recommended to first pre-process the data such that the resulting behavior is well-defined.
+In the previous example, if the desired behavior is to transparently coerce numeric properties to strings, as Javascript might, then do the conversion explicitly before passing data to this library.
+
+
+Why doesn't my schema's default property set the default on my instance?
+------------------------------------------------------------------------
+
+The basic answer is that the specification does not require that
+:kw:`default` actually do anything.
+
+For an inkling as to *why* it doesn't actually do anything, consider
+that none of the other keywords modify the instance either. More
+importantly, having :kw:`default` modify the instance can produce
+quite peculiar things. It's perfectly valid (and perhaps even useful)
+to have a default that is not valid under the schema it lives in! So an
+instance modified by the default would pass validation the first time,
+but fail the second!
+
+Still, filling in defaults is a thing that is useful. `jsonschema`
+allows you to `define your own validator classes and callables
+<creating>`, so you can easily create an `jsonschema.protocols.Validator`
+that does do default setting. Here's some code to get you started. (In
+this code, we add the default properties to each object *before* the
+properties are validated, so the default values themselves will need to
+be valid under the schema.)
+
+ .. testcode::
+
+ from jsonschema import Draft202012Validator, validators
+
+
+ def extend_with_default(validator_class):
+ validate_properties = validator_class.VALIDATORS["properties"]
+
+ def set_defaults(validator, properties, instance, schema):
+ for property, subschema in properties.items():
+ if "default" in subschema:
+ instance.setdefault(property, subschema["default"])
+
+ for error in validate_properties(
+ validator, properties, instance, schema,
+ ):
+ yield error
+
+ return validators.extend(
+ validator_class, {"properties" : set_defaults},
+ )
+
+
+ DefaultValidatingValidator = extend_with_default(Draft202012Validator)
+
+
+ # Example usage:
+ obj = {}
+ schema = {'properties': {'foo': {'default': 'bar'}}}
+ # Note jsonschema.validate(obj, schema, cls=DefaultValidatingValidator)
+ # will not work because the metaschema contains `default` keywords.
+ DefaultValidatingValidator(schema).validate(obj)
+ assert obj == {'foo': 'bar'}
+
+
+See the above-linked document for more info on how this works,
+but basically, it just extends the :kw:`properties` keyword on a
+`jsonschema.validators.Draft202012Validator` to then go ahead and update
+all the defaults.
+
+.. note::
+
+ If you're interested in a more interesting solution to a larger
+ class of these types of transformations, keep an eye on `Seep
+ <https://github.com/Julian/Seep>`_, which is an experimental
+ data transformation and extraction library written on top of
+ `jsonschema`.
+
+
+.. hint::
+
+ The above code can provide default values for an entire object and
+ all of its properties, but only if your schema provides a default
+ value for the object itself, like so:
+
+ .. testcode::
+
+ schema = {
+ "type": "object",
+ "properties": {
+ "outer-object": {
+ "type": "object",
+ "properties" : {
+ "inner-object": {
+ "type": "string",
+ "default": "INNER-DEFAULT"
+ }
+ },
+ "default": {} # <-- MUST PROVIDE DEFAULT OBJECT
+ }
+ }
+ }
+
+ obj = {}
+ DefaultValidatingValidator(schema).validate(obj)
+ assert obj == {'outer-object': {'inner-object': 'INNER-DEFAULT'}}
+
+ ...but if you don't provide a default value for your object, then
+ it won't be instantiated at all, much less populated with default
+ properties.
+
+ .. testcode::
+
+ del schema["properties"]["outer-object"]["default"]
+ obj2 = {}
+ DefaultValidatingValidator(schema).validate(obj2)
+ assert obj2 == {} # whoops
+
+
+How do jsonschema version numbers work?
+---------------------------------------
+
+``jsonschema`` tries to follow the `Semantic Versioning
+<https://semver.org/>`_ specification.
+
+This means broadly that no backwards-incompatible changes should be made
+in minor releases (and certainly not in dot releases).
+
+The full picture requires defining what constitutes a
+backwards-incompatible change.
+
+The following are simple examples of things considered public API,
+and therefore should *not* be changed without bumping a major version
+number:
+
+ * module names and contents, when not marked private by Python
+ convention (a single leading underscore)
+
+ * function and object signature (parameter order and name)
+
+The following are *not* considered public API and may change without
+notice:
+
+ * the exact wording and contents of error messages; typical reasons
+ to rely on this seem to involve downstream tests in packages using
+ `jsonschema`. These use cases are encouraged to use the extensive
+ introspection provided in `jsonschema.exceptions.ValidationError`\s
+ instead to make meaningful assertions about what failed rather than
+ relying on *how* what failed is explained to a human.
+
+ * the order in which validation errors are returned or raised
+
+ * the contents of the ``jsonschema.tests`` package
+
+ * the contents of the ``jsonschema.benchmarks`` package
+
+ * the specific non-zero error codes presented by the command line
+ interface
+
+ * the exact representation of errors presented by the command line
+ interface, other than that errors represented by the plain outputter
+ will be reported one per line
+
+ * anything marked private
+
+With the exception of the last two of those, flippant changes are
+avoided, but changes can and will be made if there is improvement to be
+had. Feel free to open an issue ticket if there is a specific issue or
+question worth raising.
diff --git a/docs/index.rst b/docs/index.rst
new file mode 100644
index 0000000..949ab44
--- /dev/null
+++ b/docs/index.rst
@@ -0,0 +1,24 @@
+.. module:: jsonschema
+ :noindex:
+
+.. include:: ../README.rst
+
+
+Contents
+--------
+
+.. toctree::
+ :maxdepth: 2
+
+ validate
+ errors
+ referencing
+ creating
+ faq
+ api/index
+
+
+Indices and tables
+==================
+
+* `genindex`
diff --git a/docs/make.bat b/docs/make.bat
new file mode 100644
index 0000000..fcb914f
--- /dev/null
+++ b/docs/make.bat
@@ -0,0 +1,190 @@
+@ECHO OFF
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=sphinx-build
+)
+set BUILDDIR=_build
+set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
+set I18NSPHINXOPTS=%SPHINXOPTS% .
+if NOT "%PAPER%" == "" (
+ set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
+ set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
+)
+
+if "%1" == "" goto help
+
+if "%1" == "help" (
+ :help
+ echo.Please use `make ^<target^>` where ^<target^> is one of
+ echo. html to make standalone HTML files
+ echo. dirhtml to make HTML files named index.html in directories
+ echo. singlehtml to make a single large HTML file
+ echo. pickle to make pickle files
+ echo. json to make JSON files
+ echo. htmlhelp to make HTML files and a HTML help project
+ echo. qthelp to make HTML files and a qthelp project
+ echo. devhelp to make HTML files and a Devhelp project
+ echo. epub to make an epub
+ echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
+ echo. text to make text files
+ echo. man to make manual pages
+ echo. texinfo to make Texinfo files
+ echo. gettext to make PO message catalogs
+ echo. changes to make an overview over all changed/added/deprecated items
+ echo. linkcheck to check all external links for integrity
+ echo. doctest to run all doctests embedded in the documentation if enabled
+ goto end
+)
+
+if "%1" == "clean" (
+ for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
+ del /q /s %BUILDDIR%\*
+ goto end
+)
+
+if "%1" == "html" (
+ %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/html.
+ goto end
+)
+
+if "%1" == "dirhtml" (
+ %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
+ goto end
+)
+
+if "%1" == "singlehtml" (
+ %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
+ goto end
+)
+
+if "%1" == "pickle" (
+ %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the pickle files.
+ goto end
+)
+
+if "%1" == "json" (
+ %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can process the JSON files.
+ goto end
+)
+
+if "%1" == "htmlhelp" (
+ %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run HTML Help Workshop with the ^
+.hhp project file in %BUILDDIR%/htmlhelp.
+ goto end
+)
+
+if "%1" == "qthelp" (
+ %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; now you can run "qcollectiongenerator" with the ^
+.qhcp project file in %BUILDDIR%/qthelp, like this:
+ echo.^> qcollectiongenerator %BUILDDIR%\qthelp\jsonschema.qhcp
+ echo.To view the help file:
+ echo.^> assistant -collectionFile %BUILDDIR%\qthelp\jsonschema.ghc
+ goto end
+)
+
+if "%1" == "devhelp" (
+ %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished.
+ goto end
+)
+
+if "%1" == "epub" (
+ %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The epub file is in %BUILDDIR%/epub.
+ goto end
+)
+
+if "%1" == "latex" (
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
+ goto end
+)
+
+if "%1" == "text" (
+ %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The text files are in %BUILDDIR%/text.
+ goto end
+)
+
+if "%1" == "man" (
+ %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The manual pages are in %BUILDDIR%/man.
+ goto end
+)
+
+if "%1" == "texinfo" (
+ %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
+ goto end
+)
+
+if "%1" == "gettext" (
+ %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
+ goto end
+)
+
+if "%1" == "changes" (
+ %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.The overview file is in %BUILDDIR%/changes.
+ goto end
+)
+
+if "%1" == "linkcheck" (
+ %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Link check complete; look for any errors in the above output ^
+or in %BUILDDIR%/linkcheck/output.txt.
+ goto end
+)
+
+if "%1" == "doctest" (
+ %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
+ if errorlevel 1 exit /b 1
+ echo.
+ echo.Testing of doctests in the sources finished, look at the ^
+results in %BUILDDIR%/doctest/output.txt.
+ goto end
+)
+
+:end
diff --git a/docs/referencing.rst b/docs/referencing.rst
new file mode 100644
index 0000000..e3f0e7f
--- /dev/null
+++ b/docs/referencing.rst
@@ -0,0 +1,375 @@
+=========================
+JSON (Schema) Referencing
+=========================
+
+The JSON Schema :kw:`$ref` and :kw:`$dynamicRef` keywords allow schema authors to combine multiple schemas (or subschemas) together for reuse or deduplication.
+
+The `referencing <referencing:index>` library was written in order to provide a simple, well-behaved and well-tested implementation of this kind of reference resolution [1]_.
+It has its `own documentation which is worth reviewing <referencing:intro>`, but this page serves as an introduction which is tailored specifically to JSON Schema, and even more specifically to how to configure `referencing <referencing:index>` for use with `Validator` objects in order to customize the behavior of the :kw:`$ref` keyword and friends in your schemas.
+
+Configuring `jsonschema` for custom referencing behavior is essentially a two step process:
+
+ * Create a `referencing.Registry` object that behaves the way you wish
+
+ * Pass the `referencing.Registry` to your `Validator` when instantiating it
+
+The examples below essentially follow these two steps.
+
+.. [1] One that in fact is independent of this `jsonschema` library itself, and may some day be used by other tools or implementations.
+
+
+Introduction to the `referencing <referencing:index>` API
+---------------------------------------------------------
+
+There are 3 main objects to be aware of in the `referencing` API:
+
+ * `referencing.Registry`, which represents a specific immutable set of JSON Schemas (either in-memory or retrievable)
+ * `referencing.Specification`, which represents a specific *version* of the JSON Schema specification, which can have differing referencing behavior.
+ JSON Schema-specific specifications live in the `referencing.jsonschema` module and are named like `referencing.jsonschema.DRAFT202012`.
+ * `referencing.Resource`, which represents a specific JSON Schema (often a Python `dict`) *along* with a specific `referencing.Specification` it is to be interpreted under.
+
+As a concrete example, the simple schema ``{"type": "integer"}`` may be interpreted as a schema under either Draft 2020-12 or Draft 4 of the JSON Schema specification (amongst others); in draft 2020-12, the float ``2.0`` must be considered an integer, whereas in draft 4, it potentially is not.
+If you mean the former (i.e. to associate this schema with draft 2020-12), you'd use ``referencing.Resource(contents={"type": "integer"}, specification=referencing.jsonschema.DRAFT202012)``, whereas for the latter you'd use `referencing.jsonschema.DRAFT4`.
+
+.. seealso:: the JSON Schema :kw:`$schema` keyword
+
+ Which should generally be used to remove all ambiguity and identify *internally* to the schema what version it is written for.
+
+A schema may be identified via one or more URIs, either because they contain an :kw:`$id` keyword (in suitable versions of the JSON Schema specification) which indicates their canonical URI, or simply because you wish to externally associate a URI with the schema, regardless of whether it contains an ``$id`` keyword.
+You could add the aforementioned simple schema to a `referencing.Registry` by creating an empty registry and then identifying it via some URI:
+
+.. testcode::
+
+ from referencing import Registry, Resource
+ from referencing.jsonschema import DRAFT202012
+ schema = Resource(contents={"type": "integer"}, specification=DRAFT202012)
+ registry = Registry().with_resource(uri="http://example.com/my/schema", resource=schema)
+ print(registry)
+
+.. testoutput::
+
+ <Registry (1 uncrawled resource)>
+
+.. note::
+
+ `referencing.Registry` is an entirely immutable object.
+ All of its methods which add schemas (resources) to itself return *new* registry objects containing the added schemas.
+
+You could also confirm your schema is in the registry if you'd like, via `referencing.Registry.contents`, which will show you the contents of a resource at a given URI:
+
+.. testcode::
+
+ print(registry.contents("http://example.com/my/schema"))
+
+.. testoutput::
+
+ {'type': 'integer'}
+
+For further details, see the `referencing documentation <referencing:intro>`.
+
+Common Scenarios
+----------------
+
+.. _in-memory-schemas:
+
+Making Additional In-Memory Schemas Available
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The most common scenario one is likely to encounter is the desire to include a small number of additional in-memory schemas, making them available for use during validation.
+
+For instance, imagine the below schema for non-negative integers:
+
+.. code:: json
+
+ {
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "type": "integer",
+ "minimum": 0
+ }
+
+We may wish to have other schemas we write be able to make use of this schema, and refer to it as ``http://example.com/nonneg-int-schema`` and/or as ``urn:nonneg-integer-schema``.
+
+To do so we make use of APIs from the referencing library to create a `referencing.Registry` which maps the URIs above to this schema:
+
+.. code:: python
+
+ from referencing import Registry, Resource
+ schema = Resource.from_contents(
+ {
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "type": "integer",
+ "minimum": 0,
+ },
+ )
+ registry = Registry().with_resources(
+ [
+ ("http://example.com/nonneg-int-schema", schema),
+ ("urn:nonneg-integer-schema", schema),
+ ],
+ )
+
+What's above is likely mostly self-explanatory, other than the presence of the `referencing.Resource.from_contents` function.
+Its purpose is to convert a piece of "opaque" JSON (or really a Python `dict` containing deserialized JSON) into an object which indicates what *version* of JSON Schema the schema is meant to be interpreted under.
+Calling it will inspect a :kw:`$schema` keyword present in the given schema and use that to associate the JSON with an appropriate `specification <referencing.Specification>`.
+If your schemas do not contain ``$schema`` dialect identifiers, and you intend for them to be interpreted always under a specific dialect -- say Draft 2020-12 of JSON Schema -- you may instead use e.g.:
+
+.. code:: python
+
+ from referencing import Registry, Resource
+ from referencing.jsonschema import DRAFT2020212
+ schema = DRAFT202012.create_resource({"type": "integer", "minimum": 0})
+ registry = Registry().with_resources(
+ [
+ ("http://example.com/nonneg-int-schema", schema),
+ ("urn:nonneg-integer-schema", schema),
+ ],
+ )
+
+which has the same functional effect.
+
+You can now pass this registry to your `Validator`, which allows a schema passed to it to make use of the aforementioned URIs to refer to our non-negative integer schema.
+Here for instance is an example which validates that instances are JSON objects with non-negative integral values:
+
+.. code:: python
+
+ from jsonschema import Draft202012Validator
+ validator = Draft202012Validator(
+ {
+ "type": "object",
+ "additionalProperties": {"$ref": "urn:nonneg-integer-schema"},
+ },
+ registry=registry, # the critical argument, our registry from above
+ )
+ validator.validate({"foo": 37})
+ validator.validate({"foo": -37}) # Uh oh!
+
+.. _ref-filesystem:
+
+Resolving References from the File System
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Another common request from schema authors is to be able to map URIs to the file system, perhaps while developing a set of schemas in different local files.
+The referencing library supports doing so dynamically by configuring a callable which can be used to retrieve any schema which is *not* already pre-loaded in the manner described `above <in-memory-schemas>`.
+
+Here we resolve any schema beginning with ``http://localhost`` to a directory ``/tmp/schemas`` on the local filesystem (note of course that this will not work if run directly unless you have populated that directory with some schemas):
+
+.. code:: python
+
+ from pathlib import Path
+ import json
+
+ from referencing import Registry, Resource
+ from referencing.exceptions import NoSuchResource
+
+ SCHEMAS = Path("/tmp/schemas")
+
+ def retrieve_from_filesystem(uri: str):
+ if not uri.startswith("http://localhost/"):
+ raise NoSuchResource(ref=uri)
+ path = SCHEMAS / Path(uri.removeprefix("http://localhost/"))
+ contents = json.loads(path.read_text())
+ return Resource.from_contents(contents)
+
+ registry = Registry(retrieve=retrieve_from_filesystem)
+
+Such a registry can then be used with `Validator` objects in the same way shown above, and any such references to URIs which are not already in-memory will be retrieved from the configured directory.
+
+We can mix the two examples above if we wish for some in-memory schemas to be available in addition to the filesystem schemas, e.g.:
+
+.. code:: python
+
+ from referencing.jsonschema import DRAFT7
+ registry = Registry(retrieve=retrieve_from_filesystem).with_resource(
+ "urn:non-empty-array", DRAFT7.create_resource({"type": "array", "minItems": 1}),
+ )
+
+where we've made use of the similar `referencing.Registry.with_resource` function to add a single additional resource.
+
+Resolving References to Schemas Written in YAML
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Generalizing slightly, the retrieval function provided need not even assume that it is retrieving JSON.
+As long as you deserialize what you have retrieved into Python objects, you may equally be retrieving references to YAML documents or any other format.
+
+Here for instance we retrieve YAML documents in a way similar to the `above <ref-filesystem>` using PyYAML:
+
+.. code:: python
+
+ from pathlib import Path
+ import yaml
+
+ from referencing import Registry, Resource
+ from referencing.exceptions import NoSuchResource
+
+ SCHEMAS = Path("/tmp/yaml-schemas")
+
+ def retrieve_yaml(uri: str):
+ if not uri.startswith("http://localhost/"):
+ raise NoSuchResource(ref=uri)
+ path = SCHEMAS / Path(uri.removeprefix("http://localhost/"))
+ contents = yaml.safe_load(path.read_text())
+ return Resource.from_contents(contents)
+
+ registry = Registry(retrieve=retrieve_yaml)
+
+.. note::
+
+ Not all YAML fits within the JSON data model.
+
+ JSON Schema is defined specifically for JSON, and has well-defined behavior strictly for Python objects which could have possibly existed as JSON.
+
+ If you stick to the subset of YAML for which this is the case then you shouldn't have issue, but if you pass schemas (or instances) around whose structure could never have possibly existed as JSON (e.g. a mapping whose keys are not strings), all bets are off.
+
+One could similarly imagine a retrieval function which switches on whether to call ``yaml.safe_load`` or ``json.loads`` by file extension (or some more reliable mechanism) and thereby support retrieving references of various different file formats.
+
+.. _http:
+
+Automatically Retrieving Resources Over HTTP
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In the general case, the JSON Schema specifications tend to `discourage <https://json-schema.org/draft/2020-12/json-schema-core.html#name-loading-a-referenced-schema>`_ implementations (like this one) from automatically retrieving references over the network, or even assuming such a thing is feasible (as schemas may be identified by URIs which are strictly identifiers, and not necessarily downloadable from the URI even when such a thing is sensical).
+
+However, if you as a schema author are in a situation where you indeed do wish to do so for convenience (and understand the implications of doing so), you may do so by making use of the ``retrieve`` argument to `referencing.Registry`.
+
+Here is how one would configure a registry to automatically retrieve schemas from the `JSON Schema Store <https://www.schemastore.org>`_ on the fly using the `httpx <https://www.python-httpx.org/>`_:
+
+.. code:: python
+
+ from referencing import Registry, Resource
+ import httpx
+
+ def retrieve_via_httpx(uri: str):
+ response = httpx.get(uri)
+ return Resource.from_contents(response.json())
+
+ registry = Registry(retrieve=retrieve_via_httpx)
+
+Given such a registry, we can now, for instance, validate instances against schemas from the schema store by passing the ``registry`` we configured to our `Validator` as in previous examples:
+
+.. code:: python
+
+ from jsonschema import Draft202012Validator
+ Draft202012Validator(
+ {"$ref": "https://json.schemastore.org/pyproject.json"},
+ registry=registry,
+ ).validate({"project": {"name": 12}})
+
+which should in this case indicate the example data is invalid:
+
+.. code:: python
+
+ Traceback (most recent call last):
+ File "example.py", line 14, in <module>
+ ).validate({"project": {"name": 12}})
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ File "jsonschema/validators.py", line 345, in validate
+ raise error
+ jsonschema.exceptions.ValidationError: 12 is not of type 'string'
+
+ Failed validating 'type' in schema['properties']['project']['properties']['name']:
+ {'pattern': '^([a-zA-Z\\d]|[a-zA-Z\\d][\\w.-]*[a-zA-Z\\d])$',
+ 'title': 'Project name',
+ 'type': 'string'}
+
+ On instance['project']['name']:
+ 12
+
+Retrieving resources from a SQLite database or some other network-accessible resource should be more or less similar, replacing the HTTP client with one for your database of course.
+
+.. warning::
+
+ Be sure you understand the security implications of the reference resolution you configure.
+ And if you accept untrusted schemas, doubly sure!
+
+ You wouldn't want a user causing your machine to go off and retrieve giant files off the network by passing it a ``$ref`` to some huge blob, or exploiting similar vulnerabilities in your setup.
+
+
+Migrating From ``RefResolver``
+------------------------------
+
+Older versions of `jsonschema` used a different object -- `_RefResolver` -- for reference resolution, which you a schema author may already be configuring for your own use.
+
+`_RefResolver` is now fully deprecated and replaced by the use of `referencing.Registry` as shown in examples above.
+
+If you are not already constructing your own `_RefResolver`, this change should be transparent to you (or even recognizably improved, as the point of the migration was to improve the quality of the referencing implementation and enable some new functionality).
+
+.. table:: Rough equivalence between `_RefResolver` and `referencing.Registry` APIs
+ :widths: auto
+
+ =========================================================== =====================================================================================================================
+ Old API New API
+ =========================================================== =====================================================================================================================
+ ``RefResolver.from_schema({"$id": "urn:example:foo", ...}`` ``Registry().with_resource(uri="urn:example:foo", resource=Resource.from_contents({"$id": "urn:example:foo", ...}))``
+ Overriding ``RefResolver.resolve_from_url`` Passing a callable to `referencing.Registry`\ 's ``retrieve`` argument
+ ``DraftNValidator(..., resolver=_RefResolver(...))`` `` DraftNValidator(..., registry=Registry().with_resources(...))``
+ =========================================================== =====================================================================================================================
+
+
+Here are some more specifics on how to migrate to the newer APIs:
+
+The ``store`` argument
+~~~~~~~~~~~~~~~~~~~~~~
+
+`_RefResolver`\ 's ``store`` argument was essentially the equivalent of `referencing.Registry`\ 's in-memory schema storage.
+
+If you currently pass a set of schemas via e.g.:
+
+.. code:: python
+
+ from jsonschema import Draft202012Validator, RefResolver
+ resolver = RefResolver.from_schema(
+ schema={"title": "my schema"},
+ store={"http://example.com": {"type": "integer"}},
+ )
+ validator = Draft202012Validator(
+ {"$ref": "http://example.com"},
+ resolver=resolver,
+ )
+ validator.validate("foo")
+
+you should be able to simply move to something like:
+
+.. code:: python
+
+ from referencing import Registry
+ from referencing.jsonschema import DRAFT202012
+
+ from jsonschema import Draft202012Validator
+
+ registry = Registry().with_resource(
+ "http://example.com",
+ DRAFT202012.create_resource({"type": "integer"}),
+ )
+ validator = Draft202012Validator(
+ {"$ref": "http://example.com"},
+ registry=registry,
+ )
+ validator.validate("foo")
+
+Handlers
+~~~~~~~~
+
+The ``handlers`` functionality from `_RefResolver` was a way to support additional HTTP schemes for schema retrieval.
+
+Here you should move to a custom ``retrieve`` function which does whatever you'd like.
+E.g. in pseudocode:
+
+.. code:: python
+
+ from urllib.parse import urlsplit
+
+ def retrieve(uri: str):
+ parsed = urlsplit(uri)
+ if parsed.scheme == "file":
+ ...
+ elif parsed.scheme == "custom":
+ ...
+
+ registry = Registry(retrieve=retrieve)
+
+
+Other Key Functional Differences
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Whilst `_RefResolver` *did* automatically retrieve remote references (against the recommendation of the spec, and in a way which therefore could lead to questionable security concerns when combined with untrusted schemas), `referencing.Registry` does *not* do so.
+If you rely on this behavior, you should follow the `above example of retrieving resources over HTTP <http>`.
diff --git a/docs/requirements.in b/docs/requirements.in
new file mode 100644
index 0000000..0a686cf
--- /dev/null
+++ b/docs/requirements.in
@@ -0,0 +1,10 @@
+file:.#egg=jsonschema
+furo
+lxml
+sphinx
+sphinx-autoapi
+sphinx-autodoc-typehints
+sphinx-copybutton
+sphinx-json-schema-spec
+sphinxcontrib-spelling
+sphinxext-opengraph
diff --git a/docs/requirements.txt b/docs/requirements.txt
new file mode 100644
index 0000000..2458219
--- /dev/null
+++ b/docs/requirements.txt
@@ -0,0 +1,137 @@
+#
+# This file is autogenerated by pip-compile with Python 3.11
+# by the following command:
+#
+# pip-compile --resolver=backtracking docs/requirements.in
+#
+alabaster==0.7.13
+ # via sphinx
+astroid==2.15.2
+ # via sphinx-autoapi
+attrs==22.2.0
+ # via
+ # jsonschema
+ # referencing
+babel==2.12.1
+ # via sphinx
+beautifulsoup4==4.12.2
+ # via furo
+certifi==2022.12.7
+ # via requests
+charset-normalizer==3.1.0
+ # via requests
+contourpy==1.0.7
+ # via matplotlib
+cycler==0.11.0
+ # via matplotlib
+docutils==0.19
+ # via sphinx
+fonttools==4.39.3
+ # via matplotlib
+furo==2023.3.27
+ # via -r docs/requirements.in
+idna==3.4
+ # via requests
+imagesize==1.4.1
+ # via sphinx
+jinja2==3.1.2
+ # via
+ # sphinx
+ # sphinx-autoapi
+file:.#egg=jsonschema
+ # via -r docs/requirements.in
+jsonschema-specifications==2023.3.6
+ # via jsonschema
+kiwisolver==1.4.4
+ # via matplotlib
+lazy-object-proxy==1.9.0
+ # via astroid
+lxml==4.9.2
+ # via
+ # -r docs/requirements.in
+ # sphinx-json-schema-spec
+markupsafe==2.1.2
+ # via jinja2
+matplotlib==3.7.1
+ # via sphinxext-opengraph
+numpy==1.24.2
+ # via
+ # contourpy
+ # matplotlib
+packaging==23.1
+ # via
+ # matplotlib
+ # sphinx
+pillow==9.5.0
+ # via matplotlib
+pyenchant==3.2.2
+ # via sphinxcontrib-spelling
+pygments==2.15.0
+ # via
+ # furo
+ # sphinx
+pyparsing==3.0.9
+ # via matplotlib
+python-dateutil==2.8.2
+ # via matplotlib
+pyyaml==6.0
+ # via sphinx-autoapi
+referencing==0.27.2
+ # via
+ # jsonschema
+ # jsonschema-specifications
+requests==2.28.2
+ # via sphinx
+rpds-py==0.7.1
+ # via
+ # jsonschema
+ # referencing
+six==1.16.0
+ # via python-dateutil
+snowballstemmer==2.2.0
+ # via sphinx
+soupsieve==2.4
+ # via beautifulsoup4
+sphinx==6.1.3
+ # via
+ # -r docs/requirements.in
+ # furo
+ # sphinx-autoapi
+ # sphinx-autodoc-typehints
+ # sphinx-basic-ng
+ # sphinx-copybutton
+ # sphinx-json-schema-spec
+ # sphinxcontrib-spelling
+ # sphinxext-opengraph
+sphinx-autoapi==2.1.0
+ # via -r docs/requirements.in
+sphinx-autodoc-typehints==1.22
+ # via -r docs/requirements.in
+sphinx-basic-ng==1.0.0b1
+ # via furo
+sphinx-copybutton==0.5.1
+ # via -r docs/requirements.in
+sphinx-json-schema-spec==2023.2.4
+ # via -r docs/requirements.in
+sphinxcontrib-applehelp==1.0.4
+ # via sphinx
+sphinxcontrib-devhelp==1.0.2
+ # via sphinx
+sphinxcontrib-htmlhelp==2.0.1
+ # via sphinx
+sphinxcontrib-jsmath==1.0.1
+ # via sphinx
+sphinxcontrib-qthelp==1.0.3
+ # via sphinx
+sphinxcontrib-serializinghtml==1.1.5
+ # via sphinx
+sphinxcontrib-spelling==8.0.0
+ # via -r docs/requirements.in
+sphinxext-opengraph==0.8.2
+ # via -r docs/requirements.in
+unidecode==1.3.6
+ # via sphinx-autoapi
+urllib3==1.26.15
+ # via requests
+wrapt==1.15.0
+ # via astroid
diff --git a/docs/spelling-wordlist.txt b/docs/spelling-wordlist.txt
new file mode 100644
index 0000000..640d56f
--- /dev/null
+++ b/docs/spelling-wordlist.txt
@@ -0,0 +1,59 @@
+# this appears to be misinterpreting Napoleon types as prose, sigh...
+Validator
+TypeChecker
+UnknownType
+ValidationError
+
+# 0th, sigh...
+th
+amongst
+callables
+# non-codeblocked cls from autoapi
+cls
+deque
+deduplication
+dereferences
+deserialize
+deserialized
+deserializing
+filesystem
+hostname
+implementers
+indices
+# ipv4/6, sigh...
+ipv
+iterable
+iteratively
+Javascript
+jsonschema
+majorly
+metaschema
+online
+outputter
+pre
+programmatically
+pseudocode
+recurses
+regex
+repr
+runtime
+sensical
+subclassing
+submodule
+submodules
+subschema
+subschemas
+subscopes
+untrusted
+uri
+validator
+validators
+versioned
+schemas
+
+Zac
+HD
+
+Berman
+Libera
+GPL
diff --git a/docs/validate.rst b/docs/validate.rst
new file mode 100644
index 0000000..71ec19d
--- /dev/null
+++ b/docs/validate.rst
@@ -0,0 +1,306 @@
+=================
+Schema Validation
+=================
+
+
+.. currentmodule:: jsonschema
+
+.. tip::
+
+ Most of the documentation for this package assumes you're familiar with the fundamentals of writing JSON schemas themselves, and focuses on how this library helps you validate with them in Python.
+
+ If you aren't already comfortable with writing schemas and need an introduction which teaches about JSON Schema the specification, you may find `Understanding JSON Schema <ujs:basics>` to be a good read!
+
+
+The Basics
+----------
+
+The simplest way to validate an instance under a given schema is to use the
+`validate <jsonschema.validators.validate>` function.
+
+.. autofunction:: validate
+ :noindex:
+
+.. _validator-protocol:
+
+The Validator Protocol
+----------------------
+
+`jsonschema` defines a `protocol <typing.Protocol>` that all validator classes adhere to.
+
+.. hint::
+
+ If you are unfamiliar with protocols, either as a general notion or as specifically implemented by `typing.Protocol`, you can think of them as a set of attributes and methods that all objects satisfying the protocol have.
+
+ Here, in the context of `jsonschema`, the `Validator.iter_errors` method can be called on `jsonschema.validators.Draft202012Validator`, or `jsonschema.validators.Draft7Validator`, or indeed any validator class, as all of them have it, along with all of the other methods described below.
+
+.. autoclass:: jsonschema.protocols.Validator
+ :noindex:
+ :members:
+
+All of the `versioned validators <versioned-validators>` that are included with `jsonschema` adhere to the protocol, and any `extensions of these validators <jsonschema.validators.extend>` will as well.
+For more information on `creating <jsonschema.validators.create>` or `extending <jsonschema.validators.extend>` validators see `creating-validators`.
+
+Type Checking
+-------------
+
+To handle JSON Schema's :kw:`type` keyword, a `Validator` uses
+an associated `TypeChecker`. The type checker provides an immutable
+mapping between names of types and functions that can test if an instance is
+of that type. The defaults are suitable for most users - each of the
+`versioned validators <versioned-validators>` that are included with
+`jsonschema` have a `TypeChecker` that can correctly handle their respective
+versions.
+
+.. seealso:: `validating-types`
+
+ For an example of providing a custom type check.
+
+.. autoclass:: TypeChecker
+ :members:
+ :noindex:
+
+.. autoexception:: jsonschema.exceptions.UndefinedTypeCheck
+ :noindex:
+
+ Raised when trying to remove a type check that is not known to this
+ TypeChecker, or when calling `jsonschema.TypeChecker.is_type`
+ directly.
+
+.. _validating-types:
+
+Validating With Additional Types
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Occasionally it can be useful to provide additional or alternate types when
+validating JSON Schema's :kw:`type` keyword.
+
+`jsonschema` tries to strike a balance between performance in the common
+case and generality. For instance, JSON Schema defines a ``number`` type, which
+can be validated with a schema such as ``{"type" : "number"}``. By default,
+this will accept instances of Python `numbers.Number`. This includes in
+particular `int`\s and `float`\s, along with
+`decimal.Decimal` objects, `complex` numbers etc. For
+``integer`` and ``object``, however, rather than checking for
+`numbers.Integral` and `collections.abc.Mapping`,
+`jsonschema` simply checks for `int` and `dict`, since the
+more general instance checks can introduce significant slowdown, especially
+given how common validating these types are.
+
+If you *do* want the generality, or just want to add a few specific additional
+types as being acceptable for a validator object, then you should update an
+existing `jsonschema.TypeChecker` or create a new one. You may then create a new
+`Validator` via `jsonschema.validators.extend`.
+
+.. testcode::
+
+ from jsonschema import validators
+
+ class MyInteger:
+ pass
+
+ def is_my_int(checker, instance):
+ return (
+ Draft202012Validator.TYPE_CHECKER.is_type(instance, "number") or
+ isinstance(instance, MyInteger)
+ )
+
+ type_checker = Draft202012Validator.TYPE_CHECKER.redefine(
+ "number", is_my_int,
+ )
+
+ CustomValidator = validators.extend(
+ Draft202012Validator,
+ type_checker=type_checker,
+ )
+ validator = CustomValidator(schema={"type" : "number"})
+
+
+.. autoexception:: jsonschema.exceptions.UnknownType
+ :noindex:
+
+.. _versioned-validators:
+
+Versioned Validators
+--------------------
+
+`jsonschema` ships with validator classes for various versions of the JSON Schema specification.
+For details on the methods and attributes that each validator class provides see the `Validator` protocol, which each included validator class implements.
+
+Each of the below cover a specific release of the JSON Schema specification.
+
+.. autoclass:: Draft202012Validator
+ :noindex:
+
+.. autoclass:: Draft201909Validator
+ :noindex:
+
+.. autoclass:: Draft7Validator
+ :noindex:
+
+.. autoclass:: Draft6Validator
+ :noindex:
+
+.. autoclass:: Draft4Validator
+ :noindex:
+
+.. autoclass:: Draft3Validator
+ :noindex:
+
+
+For example, if you wanted to validate a schema you created against the
+Draft 2020-12 meta-schema, you could use:
+
+.. testcode::
+
+ from jsonschema import Draft202012Validator
+
+ schema = {
+ "$schema": Draft202012Validator.META_SCHEMA["$id"],
+
+ "type": "object",
+ "properties": {
+ "name": {"type": "string"},
+ "email": {"type": "string"},
+ },
+ "required": ["email"]
+ }
+ Draft202012Validator.check_schema(schema)
+
+
+.. _validating formats:
+
+Validating Formats
+------------------
+
+JSON Schema defines the :kw:`format` keyword which can be used to check if primitive types (``string``\s, ``number``\s, ``boolean``\s) conform to well-defined formats.
+By default, as per the specification, no validation is enforced.
+Optionally however, validation can be enabled by hooking a `format-checking object <jsonschema.FormatChecker>` into a `Validator`.
+
+.. doctest::
+
+ >>> validate("127.0.0.1", {"format" : "ipv4"})
+ >>> validate(
+ ... instance="-12",
+ ... schema={"format" : "ipv4"},
+ ... format_checker=Draft202012Validator.FORMAT_CHECKER,
+ ... )
+ Traceback (most recent call last):
+ ...
+ ValidationError: "-12" is not a "ipv4"
+
+
+Some formats require additional dependencies to be installed.
+
+The easiest way to ensure you have what is needed is to install ``jsonschema`` using the ``format`` or ``format-nongpl`` extras.
+
+For example:
+
+.. code:: sh
+
+ $ pip install jsonschema[format]
+
+Or if you want to avoid GPL dependencies, a second extra is available:
+
+.. code:: sh
+
+ $ pip install jsonschema[format-nongpl]
+
+At the moment, it supports all the available checkers except for ``iri`` and ``iri-reference``.
+
+.. warning::
+
+ It is your own responsibility ultimately to ensure you are license-compliant, so you should be double checking your own dependencies if you rely on this extra.
+
+The more specific list of formats along with any additional dependencies they have is shown below.
+
+.. warning::
+
+ If a dependency is not installed when using a checker that requires it, validation will succeed without throwing an error, as also specified by the specification.
+
+========================= ====================
+Checker Notes
+========================= ====================
+``color`` requires webcolors_
+``date``
+``date-time`` requires rfc3339-validator_
+``duration`` requires isoduration_
+``email``
+``hostname`` requires fqdn_
+``idn-hostname`` requires idna_
+``ipv4``
+``ipv6`` OS must have `socket.inet_pton` function
+``iri`` requires rfc3987_
+``iri-reference`` requires rfc3987_
+``json-pointer`` requires jsonpointer_
+``regex``
+``relative-json-pointer`` requires jsonpointer_
+``time`` requires rfc3339-validator_
+``uri`` requires rfc3987_ or rfc3986-validator_
+``uri-reference`` requires rfc3987_ or rfc3986-validator_
+``uri-template`` requires uri-template_
+========================= ====================
+
+
+.. _fqdn: https://pypi.org/pypi/fqdn/
+.. _idna: https://pypi.org/pypi/idna/
+.. _isoduration: https://pypi.org/pypi/isoduration/
+.. _jsonpointer: https://pypi.org/pypi/jsonpointer/
+.. _rfc3339-validator: https://pypi.org/project/rfc3339-validator/
+.. _rfc3986-validator: https://pypi.org/project/rfc3986-validator/
+.. _rfc3987: https://pypi.org/pypi/rfc3987/
+.. _uri-template: https://pypi.org/pypi/uri-template/
+.. _webcolors: https://pypi.org/pypi/webcolors/
+
+The supported mechanism for ensuring these dependencies are present is again as shown above, not by directly installing the packages.
+
+.. autoclass:: FormatChecker
+ :members:
+ :noindex:
+ :exclude-members: cls_checks
+
+ .. attribute:: checkers
+
+ A mapping of currently known formats to tuple of functions that validate them and errors that should be caught.
+ New checkers can be added and removed either per-instance or globally for all checkers using the `FormatChecker.checks` decorator.
+
+ .. classmethod:: cls_checks(format, raises=())
+
+ Register a decorated function as *globally* validating a new format.
+
+ Any instance created after this function is called will pick up the supplied checker.
+
+ :argument str format: the format that the decorated function will check
+ :argument Exception raises: the exception(s) raised
+ by the decorated function when an invalid instance is
+ found. The exception object will be accessible as the
+ `jsonschema.exceptions.ValidationError.cause` attribute
+ of the resulting validation error.
+
+ .. deprecated:: v4.14.0
+
+ Use `FormatChecker.checks` on an instance instead.
+
+.. autoexception:: FormatError
+ :noindex:
+ :members:
+
+
+Format-Specific Notes
+~~~~~~~~~~~~~~~~~~~~~
+
+regex
+^^^^^
+
+The JSON Schema specification `recommends (but does not require) <https://json-schema.org/draft/2020-12/json-schema-core.html#name-regular-expressions>`_ that implementations use ECMA 262 regular expressions.
+
+Given that there is no current library in Python capable of supporting the ECMA 262 dialect, the ``regex`` format will instead validate *Python* regular expressions, which are the ones used by this implementation for other keywords like :kw:`pattern` or :kw:`patternProperties`.
+
+email
+^^^^^
+
+Since in most cases "validating" an email address is an attempt instead to confirm that mail sent to it will deliver to a recipient, and that that recipient is the correct one the email is intended for, and since many valid email addresses are in many places incorrectly rejected, and many invalid email addresses are in many places incorrectly accepted, the ``email`` format keyword only provides a sanity check, not full :RFC:`5322` validation.
+
+The same applies to the ``idn-email`` format.
+
+If you indeed want a particular well-specified set of emails to be considered valid, you can use `FormatChecker.checks` to provide your specific definition.