first commit

This commit is contained in:
Ayxan
2022-05-23 00:16:32 +04:00
commit d660f2a4ca
24786 changed files with 4428337 additions and 0 deletions

View File

@@ -0,0 +1,58 @@
"""
An implementation of JSON Schema for Python
The main functionality is provided by the validator classes for each of the
supported JSON Schema versions.
Most commonly, `validate` is the quickest way to simply validate a given
instance under a schema, and will create a validator for you.
"""
import warnings
from jsonschema._format import (
FormatChecker,
draft3_format_checker,
draft4_format_checker,
draft6_format_checker,
draft7_format_checker,
draft201909_format_checker,
draft202012_format_checker,
)
from jsonschema._types import TypeChecker
from jsonschema.exceptions import (
ErrorTree,
FormatError,
RefResolutionError,
SchemaError,
ValidationError,
)
from jsonschema.protocols import Validator
from jsonschema.validators import (
Draft3Validator,
Draft4Validator,
Draft6Validator,
Draft7Validator,
Draft201909Validator,
Draft202012Validator,
RefResolver,
validate,
)
def __getattr__(name):
if name == "__version__":
warnings.warn(
"Accessing jsonschema.__version__ is deprecated and will be "
"removed in a future release. Use importlib.metadata directly "
"to query for jsonschema's version.",
DeprecationWarning,
stacklevel=2,
)
try:
from importlib import metadata
except ImportError:
import importlib_metadata as metadata
return metadata.version("jsonschema")
raise AttributeError(f"module {__name__} has no attribute {name}")

View File

@@ -0,0 +1,3 @@
from jsonschema.cli import main
main()

View File

@@ -0,0 +1,481 @@
from __future__ import annotations
from contextlib import suppress
from uuid import UUID
import datetime
import ipaddress
import re
import typing
from jsonschema.exceptions import FormatError
class FormatChecker(object):
"""
A ``format`` property checker.
JSON Schema does not mandate that the ``format`` property actually do any
validation. If validation is desired however, instances of this class can
be hooked into validators to enable format validation.
`FormatChecker` objects always return ``True`` when asked about
formats that they do not know how to validate.
To check a custom format using a function that takes an instance and
returns a ``bool``, use the `FormatChecker.checks` or
`FormatChecker.cls_checks` decorators.
Arguments:
formats (~collections.abc.Iterable):
The known formats to validate. This argument can be used to
limit which formats will be used during validation.
"""
checkers: dict[
str,
tuple[
typing.Callable[[typing.Any], bool],
Exception | tuple[Exception, ...],
],
] = {}
def __init__(self, formats=None):
if formats is None:
self.checkers = self.checkers.copy()
else:
self.checkers = dict((k, self.checkers[k]) for k in formats)
def __repr__(self):
return "<FormatChecker checkers={}>".format(sorted(self.checkers))
def checks(self, format, raises=()):
"""
Register a decorated function as validating a new format.
Arguments:
format (str):
The format that the decorated function will check.
raises (Exception):
The exception(s) raised by the decorated function when an
invalid instance is found.
The exception object will be accessible as the
`jsonschema.exceptions.ValidationError.cause` attribute of the
resulting validation error.
"""
def _checks(func):
self.checkers[format] = (func, raises)
return func
return _checks
cls_checks = classmethod(checks)
def check(self, instance, format):
"""
Check whether the instance conforms to the given format.
Arguments:
instance (*any primitive type*, i.e. str, number, bool):
The instance to check
format (str):
The format that instance should conform to
Raises:
FormatError: if the instance does not conform to ``format``
"""
if format not in self.checkers:
return
func, raises = self.checkers[format]
result, cause = None, None
try:
result = func(instance)
except raises as e:
cause = e
if not result:
raise FormatError(f"{instance!r} is not a {format!r}", cause=cause)
def conforms(self, instance, format):
"""
Check whether the instance conforms to the given format.
Arguments:
instance (*any primitive type*, i.e. str, number, bool):
The instance to check
format (str):
The format that instance should conform to
Returns:
bool: whether it conformed
"""
try:
self.check(instance, format)
except FormatError:
return False
else:
return True
draft3_format_checker = FormatChecker()
draft4_format_checker = FormatChecker()
draft6_format_checker = FormatChecker()
draft7_format_checker = FormatChecker()
draft201909_format_checker = FormatChecker()
draft202012_format_checker = FormatChecker()
_draft_checkers = dict(
draft3=draft3_format_checker,
draft4=draft4_format_checker,
draft6=draft6_format_checker,
draft7=draft7_format_checker,
draft201909=draft201909_format_checker,
draft202012=draft202012_format_checker,
)
def _checks_drafts(
name=None,
draft3=None,
draft4=None,
draft6=None,
draft7=None,
draft201909=None,
draft202012=None,
raises=(),
):
draft3 = draft3 or name
draft4 = draft4 or name
draft6 = draft6 or name
draft7 = draft7 or name
draft201909 = draft201909 or name
draft202012 = draft202012 or name
def wrap(func):
if draft3:
func = _draft_checkers["draft3"].checks(draft3, raises)(func)
if draft4:
func = _draft_checkers["draft4"].checks(draft4, raises)(func)
if draft6:
func = _draft_checkers["draft6"].checks(draft6, raises)(func)
if draft7:
func = _draft_checkers["draft7"].checks(draft7, raises)(func)
if draft201909:
func = _draft_checkers["draft201909"].checks(draft201909, raises)(
func,
)
if draft202012:
func = _draft_checkers["draft202012"].checks(draft202012, raises)(
func,
)
# Oy. This is bad global state, but relied upon for now, until
# deprecation. See #519 and test_format_checkers_come_with_defaults
FormatChecker.cls_checks(
draft202012 or draft201909 or draft7 or draft6 or draft4 or draft3,
raises,
)(func)
return func
return wrap
@_checks_drafts(name="idn-email")
@_checks_drafts(name="email")
def is_email(instance):
if not isinstance(instance, str):
return True
return "@" in instance
@_checks_drafts(
draft3="ip-address",
draft4="ipv4",
draft6="ipv4",
draft7="ipv4",
draft201909="ipv4",
draft202012="ipv4",
raises=ipaddress.AddressValueError,
)
def is_ipv4(instance):
if not isinstance(instance, str):
return True
return ipaddress.IPv4Address(instance)
@_checks_drafts(name="ipv6", raises=ipaddress.AddressValueError)
def is_ipv6(instance):
if not isinstance(instance, str):
return True
address = ipaddress.IPv6Address(instance)
return not getattr(address, "scope_id", "")
with suppress(ImportError):
from fqdn import FQDN
@_checks_drafts(
draft3="host-name",
draft4="hostname",
draft6="hostname",
draft7="hostname",
draft201909="hostname",
draft202012="hostname",
)
def is_host_name(instance):
if not isinstance(instance, str):
return True
return FQDN(instance).is_valid
with suppress(ImportError):
# The built-in `idna` codec only implements RFC 3890, so we go elsewhere.
import idna
@_checks_drafts(
draft7="idn-hostname",
draft201909="idn-hostname",
draft202012="idn-hostname",
raises=(idna.IDNAError, UnicodeError),
)
def is_idn_host_name(instance):
if not isinstance(instance, str):
return True
idna.encode(instance)
return True
try:
import rfc3987
except ImportError:
with suppress(ImportError):
from rfc3986_validator import validate_rfc3986
@_checks_drafts(name="uri")
def is_uri(instance):
if not isinstance(instance, str):
return True
return validate_rfc3986(instance, rule="URI")
@_checks_drafts(
draft6="uri-reference",
draft7="uri-reference",
draft201909="uri-reference",
draft202012="uri-reference",
raises=ValueError,
)
def is_uri_reference(instance):
if not isinstance(instance, str):
return True
return validate_rfc3986(instance, rule="URI_reference")
else:
@_checks_drafts(
draft7="iri",
draft201909="iri",
draft202012="iri",
raises=ValueError,
)
def is_iri(instance):
if not isinstance(instance, str):
return True
return rfc3987.parse(instance, rule="IRI")
@_checks_drafts(
draft7="iri-reference",
draft201909="iri-reference",
draft202012="iri-reference",
raises=ValueError,
)
def is_iri_reference(instance):
if not isinstance(instance, str):
return True
return rfc3987.parse(instance, rule="IRI_reference")
@_checks_drafts(name="uri", raises=ValueError)
def is_uri(instance):
if not isinstance(instance, str):
return True
return rfc3987.parse(instance, rule="URI")
@_checks_drafts(
draft6="uri-reference",
draft7="uri-reference",
draft201909="uri-reference",
draft202012="uri-reference",
raises=ValueError,
)
def is_uri_reference(instance):
if not isinstance(instance, str):
return True
return rfc3987.parse(instance, rule="URI_reference")
with suppress(ImportError):
from rfc3339_validator import validate_rfc3339
@_checks_drafts(name="date-time")
def is_datetime(instance):
if not isinstance(instance, str):
return True
return validate_rfc3339(instance.upper())
@_checks_drafts(
draft7="time",
draft201909="time",
draft202012="time",
)
def is_time(instance):
if not isinstance(instance, str):
return True
return is_datetime("1970-01-01T" + instance)
@_checks_drafts(name="regex", raises=re.error)
def is_regex(instance):
if not isinstance(instance, str):
return True
return re.compile(instance)
@_checks_drafts(
draft3="date",
draft7="date",
draft201909="date",
draft202012="date",
raises=ValueError,
)
def is_date(instance):
if not isinstance(instance, str):
return True
return instance.isascii() and datetime.date.fromisoformat(instance)
@_checks_drafts(draft3="time", raises=ValueError)
def is_draft3_time(instance):
if not isinstance(instance, str):
return True
return datetime.datetime.strptime(instance, "%H:%M:%S")
with suppress(ImportError):
from webcolors import CSS21_NAMES_TO_HEX
import webcolors
def is_css_color_code(instance):
return webcolors.normalize_hex(instance)
@_checks_drafts(draft3="color", raises=(ValueError, TypeError))
def is_css21_color(instance):
if (
not isinstance(instance, str)
or instance.lower() in CSS21_NAMES_TO_HEX
):
return True
return is_css_color_code(instance)
with suppress(ImportError):
import jsonpointer
@_checks_drafts(
draft6="json-pointer",
draft7="json-pointer",
draft201909="json-pointer",
draft202012="json-pointer",
raises=jsonpointer.JsonPointerException,
)
def is_json_pointer(instance):
if not isinstance(instance, str):
return True
return jsonpointer.JsonPointer(instance)
# TODO: I don't want to maintain this, so it
# needs to go either into jsonpointer (pending
# https://github.com/stefankoegl/python-json-pointer/issues/34) or
# into a new external library.
@_checks_drafts(
draft7="relative-json-pointer",
draft201909="relative-json-pointer",
draft202012="relative-json-pointer",
raises=jsonpointer.JsonPointerException,
)
def is_relative_json_pointer(instance):
# Definition taken from:
# https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
if not isinstance(instance, str):
return True
non_negative_integer, rest = [], ""
for i, character in enumerate(instance):
if character.isdigit():
# digits with a leading "0" are not allowed
if i > 0 and int(instance[i - 1]) == 0:
return False
non_negative_integer.append(character)
continue
if not non_negative_integer:
return False
rest = instance[i:]
break
return (rest == "#") or jsonpointer.JsonPointer(rest)
with suppress(ImportError):
import uri_template
@_checks_drafts(
draft6="uri-template",
draft7="uri-template",
draft201909="uri-template",
draft202012="uri-template",
)
def is_uri_template(instance):
if not isinstance(instance, str):
return True
return uri_template.validate(instance)
with suppress(ImportError):
import isoduration
@_checks_drafts(
draft201909="duration",
draft202012="duration",
raises=isoduration.DurationParsingException,
)
def is_duration(instance):
if not isinstance(instance, str):
return True
return isoduration.parse_duration(instance)
@_checks_drafts(
draft201909="uuid",
draft202012="uuid",
raises=ValueError,
)
def is_uuid(instance):
if not isinstance(instance, str):
return True
UUID(instance)
return all(instance[position] == "-" for position in (8, 13, 18, 23))

View File

@@ -0,0 +1,224 @@
from jsonschema import _utils
from jsonschema.exceptions import ValidationError
def ignore_ref_siblings(schema):
"""
Ignore siblings of ``$ref`` if it is present.
Otherwise, return all validators.
Suitable for use with `create`'s ``applicable_validators`` argument.
"""
ref = schema.get("$ref")
if ref is not None:
return [("$ref", ref)]
else:
return schema.items()
def dependencies_draft3(validator, dependencies, instance, schema):
if not validator.is_type(instance, "object"):
return
for property, dependency in dependencies.items():
if property not in instance:
continue
if validator.is_type(dependency, "object"):
yield from validator.descend(
instance, dependency, schema_path=property,
)
elif validator.is_type(dependency, "string"):
if dependency not in instance:
message = f"{dependency!r} is a dependency of {property!r}"
yield ValidationError(message)
else:
for each in dependency:
if each not in instance:
message = f"{each!r} is a dependency of {property!r}"
yield ValidationError(message)
def dependencies_draft4_draft6_draft7(
validator,
dependencies,
instance,
schema,
):
"""
Support for the ``dependencies`` validator from pre-draft 2019-09.
In later drafts, the validator was split into separate
``dependentRequired`` and ``dependentSchemas`` validators.
"""
if not validator.is_type(instance, "object"):
return
for property, dependency in dependencies.items():
if property not in instance:
continue
if validator.is_type(dependency, "array"):
for each in dependency:
if each not in instance:
message = f"{each!r} is a dependency of {property!r}"
yield ValidationError(message)
else:
yield from validator.descend(
instance, dependency, schema_path=property,
)
def disallow_draft3(validator, disallow, instance, schema):
for disallowed in _utils.ensure_list(disallow):
if validator.evolve(schema={"type": [disallowed]}).is_valid(instance):
message = f"{disallowed!r} is disallowed for {instance!r}"
yield ValidationError(message)
def extends_draft3(validator, extends, instance, schema):
if validator.is_type(extends, "object"):
yield from validator.descend(instance, extends)
return
for index, subschema in enumerate(extends):
yield from validator.descend(instance, subschema, schema_path=index)
def items_draft3_draft4(validator, items, instance, schema):
if not validator.is_type(instance, "array"):
return
if validator.is_type(items, "object"):
for index, item in enumerate(instance):
yield from validator.descend(item, items, path=index)
else:
for (index, item), subschema in zip(enumerate(instance), items):
yield from validator.descend(
item, subschema, path=index, schema_path=index,
)
def items_draft6_draft7_draft201909(validator, items, instance, schema):
if not validator.is_type(instance, "array"):
return
if validator.is_type(items, "array"):
for (index, item), subschema in zip(enumerate(instance), items):
yield from validator.descend(
item, subschema, path=index, schema_path=index,
)
else:
for index, item in enumerate(instance):
yield from validator.descend(item, items, path=index)
def minimum_draft3_draft4(validator, minimum, instance, schema):
if not validator.is_type(instance, "number"):
return
if schema.get("exclusiveMinimum", False):
failed = instance <= minimum
cmp = "less than or equal to"
else:
failed = instance < minimum
cmp = "less than"
if failed:
message = f"{instance!r} is {cmp} the minimum of {minimum!r}"
yield ValidationError(message)
def maximum_draft3_draft4(validator, maximum, instance, schema):
if not validator.is_type(instance, "number"):
return
if schema.get("exclusiveMaximum", False):
failed = instance >= maximum
cmp = "greater than or equal to"
else:
failed = instance > maximum
cmp = "greater than"
if failed:
message = f"{instance!r} is {cmp} the maximum of {maximum!r}"
yield ValidationError(message)
def properties_draft3(validator, properties, instance, schema):
if not validator.is_type(instance, "object"):
return
for property, subschema in properties.items():
if property in instance:
yield from validator.descend(
instance[property],
subschema,
path=property,
schema_path=property,
)
elif subschema.get("required", False):
error = ValidationError(f"{property!r} is a required property")
error._set(
validator="required",
validator_value=subschema["required"],
instance=instance,
schema=schema,
)
error.path.appendleft(property)
error.schema_path.extend([property, "required"])
yield error
def type_draft3(validator, types, instance, schema):
types = _utils.ensure_list(types)
all_errors = []
for index, type in enumerate(types):
if validator.is_type(type, "object"):
errors = list(validator.descend(instance, type, schema_path=index))
if not errors:
return
all_errors.extend(errors)
else:
if validator.is_type(instance, type):
return
else:
reprs = []
for type in types:
try:
reprs.append(repr(type["name"]))
except Exception:
reprs.append(repr(type))
yield ValidationError(
f"{instance!r} is not of type {', '.join(reprs)}",
context=all_errors,
)
def contains_draft6_draft7(validator, contains, instance, schema):
if not validator.is_type(instance, "array"):
return
if not any(
validator.evolve(schema=contains).is_valid(element)
for element in instance
):
yield ValidationError(
f"None of {instance!r} are valid under the given schema",
)
def recursiveRef(validator, recursiveRef, instance, schema):
lookup_url, target = validator.resolver.resolution_scope, validator.schema
for each in reversed(validator.resolver._scopes_stack[1:]):
lookup_url, next_target = validator.resolver.resolve(each)
if next_target.get("$recursiveAnchor"):
target = next_target
else:
break
fragment = recursiveRef.lstrip("#")
subschema = validator.resolver.resolve_fragment(target, fragment)
yield from validator.descend(instance, subschema)

View File

@@ -0,0 +1,149 @@
# -*- test-case-name: twisted.test.test_reflect -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Standardized versions of various cool and/or strange things that you can do
with Python's reflection capabilities.
"""
import sys
class _NoModuleFound(Exception):
"""
No module was found because none exists.
"""
class InvalidName(ValueError):
"""
The given name is not a dot-separated list of Python objects.
"""
class ModuleNotFound(InvalidName):
"""
The module associated with the given name doesn't exist and it can't be
imported.
"""
class ObjectNotFound(InvalidName):
"""
The object associated with the given name doesn't exist and it can't be
imported.
"""
def reraise(exception, traceback):
raise exception.with_traceback(traceback)
reraise.__doc__ = """
Re-raise an exception, with an optional traceback, in a way that is compatible
with both Python 2 and Python 3.
Note that on Python 3, re-raised exceptions will be mutated, with their
C{__traceback__} attribute being set.
@param exception: The exception instance.
@param traceback: The traceback to use, or C{None} indicating a new traceback.
"""
def _importAndCheckStack(importName):
"""
Import the given name as a module, then walk the stack to determine whether
the failure was the module not existing, or some code in the module (for
example a dependent import) failing. This can be helpful to determine
whether any actual application code was run. For example, to distiguish
administrative error (entering the wrong module name), from programmer
error (writing buggy code in a module that fails to import).
@param importName: The name of the module to import.
@type importName: C{str}
@raise Exception: if something bad happens. This can be any type of
exception, since nobody knows what loading some arbitrary code might
do.
@raise _NoModuleFound: if no module was found.
"""
try:
return __import__(importName)
except ImportError:
excType, excValue, excTraceback = sys.exc_info()
while excTraceback:
execName = excTraceback.tb_frame.f_globals["__name__"]
# in Python 2 execName is None when an ImportError is encountered,
# where in Python 3 execName is equal to the importName.
if execName is None or execName == importName:
reraise(excValue, excTraceback)
excTraceback = excTraceback.tb_next
raise _NoModuleFound()
def namedAny(name):
"""
Retrieve a Python object by its fully qualified name from the global Python
module namespace. The first part of the name, that describes a module,
will be discovered and imported. Each subsequent part of the name is
treated as the name of an attribute of the object specified by all of the
name which came before it. For example, the fully-qualified name of this
object is 'twisted.python.reflect.namedAny'.
@type name: L{str}
@param name: The name of the object to return.
@raise InvalidName: If the name is an empty string, starts or ends with
a '.', or is otherwise syntactically incorrect.
@raise ModuleNotFound: If the name is syntactically correct but the
module it specifies cannot be imported because it does not appear to
exist.
@raise ObjectNotFound: If the name is syntactically correct, includes at
least one '.', but the module it specifies cannot be imported because
it does not appear to exist.
@raise AttributeError: If an attribute of an object along the way cannot be
accessed, or a module along the way is not found.
@return: the Python object identified by 'name'.
"""
if not name:
raise InvalidName('Empty module name')
names = name.split('.')
# if the name starts or ends with a '.' or contains '..', the __import__
# will raise an 'Empty module name' error. This will provide a better error
# message.
if '' in names:
raise InvalidName(
"name must be a string giving a '.'-separated list of Python "
"identifiers, not %r" % (name,))
topLevelPackage = None
moduleNames = names[:]
while not topLevelPackage:
if moduleNames:
trialname = '.'.join(moduleNames)
try:
topLevelPackage = _importAndCheckStack(trialname)
except _NoModuleFound:
moduleNames.pop()
else:
if len(names) == 1:
raise ModuleNotFound("No module named %r" % (name,))
else:
raise ObjectNotFound('%r does not name an object' % (name,))
obj = topLevelPackage
for n in names[1:]:
obj = getattr(obj, n)
return obj

View File

@@ -0,0 +1,217 @@
from __future__ import annotations
import numbers
import typing
from pyrsistent import pmap
import attr
from jsonschema.exceptions import UndefinedTypeCheck
# unfortunately, the type of pmap is generic, and if used as the attr.ib
# converter, the generic type is presented to mypy, which then fails to match
# the concrete type of a type checker mapping
# this "do nothing" wrapper presents the correct information to mypy
def _typed_pmap_converter(
init_val: typing.Mapping[
str,
typing.Callable[["TypeChecker", typing.Any], bool],
],
) -> typing.Mapping[str, typing.Callable[["TypeChecker", typing.Any], bool]]:
return typing.cast(
typing.Mapping[
str,
typing.Callable[["TypeChecker", typing.Any], bool],
],
pmap(init_val),
)
def is_array(checker, instance):
return isinstance(instance, list)
def is_bool(checker, instance):
return isinstance(instance, bool)
def is_integer(checker, instance):
# bool inherits from int, so ensure bools aren't reported as ints
if isinstance(instance, bool):
return False
return isinstance(instance, int)
def is_null(checker, instance):
return instance is None
def is_number(checker, instance):
# bool inherits from int, so ensure bools aren't reported as ints
if isinstance(instance, bool):
return False
return isinstance(instance, numbers.Number)
def is_object(checker, instance):
return isinstance(instance, dict)
def is_string(checker, instance):
return isinstance(instance, str)
def is_any(checker, instance):
return True
@attr.s(frozen=True)
class TypeChecker(object):
"""
A ``type`` property checker.
A `TypeChecker` performs type checking for a `Validator`. Type
checks to perform are updated using `TypeChecker.redefine` or
`TypeChecker.redefine_many` and removed via `TypeChecker.remove`.
Each of these return a new `TypeChecker` object.
Arguments:
type_checkers (dict):
The initial mapping of types to their checking functions.
"""
_type_checkers: typing.Mapping[
str, typing.Callable[["TypeChecker", typing.Any], bool],
] = attr.ib(
default=pmap(),
converter=_typed_pmap_converter,
)
def is_type(self, instance, type):
"""
Check if the instance is of the appropriate type.
Arguments:
instance (object):
The instance to check
type (str):
The name of the type that is expected.
Returns:
bool: Whether it conformed.
Raises:
`jsonschema.exceptions.UndefinedTypeCheck`:
if type is unknown to this object.
"""
try:
fn = self._type_checkers[type]
except KeyError:
raise UndefinedTypeCheck(type) from None
return fn(self, instance)
def redefine(self, type, fn):
"""
Produce a new checker with the given type redefined.
Arguments:
type (str):
The name of the type to check.
fn (collections.abc.Callable):
A function taking exactly two parameters - the type
checker calling the function and the instance to check.
The function should return true if instance is of this
type and false otherwise.
Returns:
A new `TypeChecker` instance.
"""
return self.redefine_many({type: fn})
def redefine_many(self, definitions=()):
"""
Produce a new checker with the given types redefined.
Arguments:
definitions (dict):
A dictionary mapping types to their checking functions.
Returns:
A new `TypeChecker` instance.
"""
return attr.evolve(
self, type_checkers=self._type_checkers.update(definitions),
)
def remove(self, *types):
"""
Produce a new checker with the given types forgotten.
Arguments:
types (~collections.abc.Iterable):
the names of the types to remove.
Returns:
A new `TypeChecker` instance
Raises:
`jsonschema.exceptions.UndefinedTypeCheck`:
if any given type is unknown to this object
"""
checkers = self._type_checkers
for each in types:
try:
checkers = checkers.remove(each)
except KeyError:
raise UndefinedTypeCheck(each)
return attr.evolve(self, type_checkers=checkers)
draft3_type_checker = TypeChecker(
{
"any": is_any,
"array": is_array,
"boolean": is_bool,
"integer": is_integer,
"object": is_object,
"null": is_null,
"number": is_number,
"string": is_string,
},
)
draft4_type_checker = draft3_type_checker.remove("any")
draft6_type_checker = draft4_type_checker.redefine(
"integer",
lambda checker, instance: (
is_integer(checker, instance)
or isinstance(instance, float) and instance.is_integer()
),
)
draft7_type_checker = draft6_type_checker
draft201909_type_checker = draft7_type_checker
draft202012_type_checker = draft201909_type_checker

View File

@@ -0,0 +1,348 @@
from collections.abc import Mapping, MutableMapping, Sequence
from urllib.parse import urlsplit
import itertools
import json
import re
import sys
# The files() API was added in Python 3.9.
if sys.version_info >= (3, 9): # pragma: no cover
from importlib import resources
else: # pragma: no cover
import importlib_resources as resources # type: ignore
class URIDict(MutableMapping):
"""
Dictionary which uses normalized URIs as keys.
"""
def normalize(self, uri):
return urlsplit(uri).geturl()
def __init__(self, *args, **kwargs):
self.store = dict()
self.store.update(*args, **kwargs)
def __getitem__(self, uri):
return self.store[self.normalize(uri)]
def __setitem__(self, uri, value):
self.store[self.normalize(uri)] = value
def __delitem__(self, uri):
del self.store[self.normalize(uri)]
def __iter__(self):
return iter(self.store)
def __len__(self):
return len(self.store)
def __repr__(self):
return repr(self.store)
class Unset(object):
"""
An as-of-yet unset attribute or unprovided default parameter.
"""
def __repr__(self):
return "<unset>"
def load_schema(name):
"""
Load a schema from ./schemas/``name``.json and return it.
"""
path = resources.files(__package__).joinpath(f"schemas/{name}.json")
data = path.read_text(encoding="utf-8")
return json.loads(data)
def format_as_index(container, indices):
"""
Construct a single string containing indexing operations for the indices.
For example for a container ``bar``, [1, 2, "foo"] -> bar[1][2]["foo"]
Arguments:
container (str):
A word to use for the thing being indexed
indices (sequence):
The indices to format.
"""
if not indices:
return container
return f"{container}[{']['.join(repr(index) for index in indices)}]"
def find_additional_properties(instance, schema):
"""
Return the set of additional properties for the given ``instance``.
Weeds out properties that should have been validated by ``properties`` and
/ or ``patternProperties``.
Assumes ``instance`` is dict-like already.
"""
properties = schema.get("properties", {})
patterns = "|".join(schema.get("patternProperties", {}))
for property in instance:
if property not in properties:
if patterns and re.search(patterns, property):
continue
yield property
def extras_msg(extras):
"""
Create an error message for extra items or properties.
"""
if len(extras) == 1:
verb = "was"
else:
verb = "were"
return ", ".join(repr(extra) for extra in sorted(extras)), verb
def ensure_list(thing):
"""
Wrap ``thing`` in a list if it's a single str.
Otherwise, return it unchanged.
"""
if isinstance(thing, str):
return [thing]
return thing
def _mapping_equal(one, two):
"""
Check if two mappings are equal using the semantics of `equal`.
"""
if len(one) != len(two):
return False
return all(
key in two and equal(value, two[key])
for key, value in one.items()
)
def _sequence_equal(one, two):
"""
Check if two sequences are equal using the semantics of `equal`.
"""
if len(one) != len(two):
return False
return all(equal(i, j) for i, j in zip(one, two))
def equal(one, two):
"""
Check if two things are equal evading some Python type hierarchy semantics.
Specifically in JSON Schema, evade `bool` inheriting from `int`,
recursing into sequences to do the same.
"""
if isinstance(one, str) or isinstance(two, str):
return one == two
if isinstance(one, Sequence) and isinstance(two, Sequence):
return _sequence_equal(one, two)
if isinstance(one, Mapping) and isinstance(two, Mapping):
return _mapping_equal(one, two)
return unbool(one) == unbool(two)
def unbool(element, true=object(), false=object()):
"""
A hack to make True and 1 and False and 0 unique for ``uniq``.
"""
if element is True:
return true
elif element is False:
return false
return element
def uniq(container):
"""
Check if all of a container's elements are unique.
Tries to rely on the container being recursively sortable, or otherwise
falls back on (slow) brute force.
"""
try:
sort = sorted(unbool(i) for i in container)
sliced = itertools.islice(sort, 1, None)
for i, j in zip(sort, sliced):
if equal(i, j):
return False
except (NotImplementedError, TypeError):
seen = []
for e in container:
e = unbool(e)
for i in seen:
if equal(i, e):
return False
seen.append(e)
return True
def find_evaluated_item_indexes_by_schema(validator, instance, schema):
"""
Get all indexes of items that get evaluated under the current schema
Covers all keywords related to unevaluatedItems: items, prefixItems, if,
then, else, contains, unevaluatedItems, allOf, oneOf, anyOf
"""
if validator.is_type(schema, "boolean"):
return []
evaluated_indexes = []
if "items" in schema:
return list(range(0, len(instance)))
if "$ref" in schema:
scope, resolved = validator.resolver.resolve(schema["$ref"])
validator.resolver.push_scope(scope)
try:
evaluated_indexes += find_evaluated_item_indexes_by_schema(
validator, instance, resolved)
finally:
validator.resolver.pop_scope()
if "prefixItems" in schema:
evaluated_indexes += list(range(0, len(schema["prefixItems"])))
if "if" in schema:
if validator.evolve(schema=schema["if"]).is_valid(instance):
evaluated_indexes += find_evaluated_item_indexes_by_schema(
validator, instance, schema["if"],
)
if "then" in schema:
evaluated_indexes += find_evaluated_item_indexes_by_schema(
validator, instance, schema["then"],
)
else:
if "else" in schema:
evaluated_indexes += find_evaluated_item_indexes_by_schema(
validator, instance, schema["else"],
)
for keyword in ["contains", "unevaluatedItems"]:
if keyword in schema:
for k, v in enumerate(instance):
if validator.evolve(schema=schema[keyword]).is_valid(v):
evaluated_indexes.append(k)
for keyword in ["allOf", "oneOf", "anyOf"]:
if keyword in schema:
for subschema in schema[keyword]:
errs = list(validator.descend(instance, subschema))
if not errs:
evaluated_indexes += find_evaluated_item_indexes_by_schema(
validator, instance, subschema,
)
return evaluated_indexes
def find_evaluated_property_keys_by_schema(validator, instance, schema):
"""
Get all keys of items that get evaluated under the current schema
Covers all keywords related to unevaluatedProperties: properties,
additionalProperties, unevaluatedProperties, patternProperties,
dependentSchemas, allOf, oneOf, anyOf, if, then, else
"""
if validator.is_type(schema, "boolean"):
return []
evaluated_keys = []
if "$ref" in schema:
scope, resolved = validator.resolver.resolve(schema["$ref"])
validator.resolver.push_scope(scope)
try:
evaluated_keys += find_evaluated_property_keys_by_schema(
validator, instance, resolved,
)
finally:
validator.resolver.pop_scope()
for keyword in [
"properties", "additionalProperties", "unevaluatedProperties",
]:
if keyword in schema:
if validator.is_type(schema[keyword], "boolean"):
for property, value in instance.items():
if validator.evolve(schema=schema[keyword]).is_valid(
{property: value},
):
evaluated_keys.append(property)
if validator.is_type(schema[keyword], "object"):
for property, subschema in schema[keyword].items():
if property in instance and validator.evolve(
schema=subschema,
).is_valid(instance[property]):
evaluated_keys.append(property)
if "patternProperties" in schema:
for property, value in instance.items():
for pattern, _ in schema["patternProperties"].items():
if re.search(pattern, property) and validator.evolve(
schema=schema["patternProperties"],
).is_valid({property: value}):
evaluated_keys.append(property)
if "dependentSchemas" in schema:
for property, subschema in schema["dependentSchemas"].items():
if property not in instance:
continue
evaluated_keys += find_evaluated_property_keys_by_schema(
validator, instance, subschema,
)
for keyword in ["allOf", "oneOf", "anyOf"]:
if keyword in schema:
for subschema in schema[keyword]:
errs = list(validator.descend(instance, subschema))
if not errs:
evaluated_keys += find_evaluated_property_keys_by_schema(
validator, instance, subschema,
)
if "if" in schema:
if validator.evolve(schema=schema["if"]).is_valid(instance):
evaluated_keys += find_evaluated_property_keys_by_schema(
validator, instance, schema["if"],
)
if "then" in schema:
evaluated_keys += find_evaluated_property_keys_by_schema(
validator, instance, schema["then"],
)
else:
if "else" in schema:
evaluated_keys += find_evaluated_property_keys_by_schema(
validator, instance, schema["else"],
)
return evaluated_keys

View File

@@ -0,0 +1,463 @@
from fractions import Fraction
from urllib.parse import urldefrag, urljoin
import re
from jsonschema._utils import (
ensure_list,
equal,
extras_msg,
find_additional_properties,
find_evaluated_item_indexes_by_schema,
find_evaluated_property_keys_by_schema,
unbool,
uniq,
)
from jsonschema.exceptions import FormatError, ValidationError
def patternProperties(validator, patternProperties, instance, schema):
if not validator.is_type(instance, "object"):
return
for pattern, subschema in patternProperties.items():
for k, v in instance.items():
if re.search(pattern, k):
yield from validator.descend(
v, subschema, path=k, schema_path=pattern,
)
def propertyNames(validator, propertyNames, instance, schema):
if not validator.is_type(instance, "object"):
return
for property in instance:
yield from validator.descend(instance=property, schema=propertyNames)
def additionalProperties(validator, aP, instance, schema):
if not validator.is_type(instance, "object"):
return
extras = set(find_additional_properties(instance, schema))
if validator.is_type(aP, "object"):
for extra in extras:
yield from validator.descend(instance[extra], aP, path=extra)
elif not aP and extras:
if "patternProperties" in schema:
if len(extras) == 1:
verb = "does"
else:
verb = "do"
joined = ", ".join(repr(each) for each in sorted(extras))
patterns = ", ".join(
repr(each) for each in sorted(schema["patternProperties"])
)
error = f"{joined} {verb} not match any of the regexes: {patterns}"
yield ValidationError(error)
else:
error = "Additional properties are not allowed (%s %s unexpected)"
yield ValidationError(error % extras_msg(extras))
def items(validator, items, instance, schema):
if not validator.is_type(instance, "array"):
return
prefix = len(schema.get("prefixItems", []))
total = len(instance)
if items is False and total > prefix:
message = f"Expected at most {prefix} items, but found {total}"
yield ValidationError(message)
else:
for index in range(prefix, total):
yield from validator.descend(
instance=instance[index],
schema=items,
path=index,
)
def additionalItems(validator, aI, instance, schema):
if (
not validator.is_type(instance, "array")
or validator.is_type(schema.get("items", {}), "object")
):
return
len_items = len(schema.get("items", []))
if validator.is_type(aI, "object"):
for index, item in enumerate(instance[len_items:], start=len_items):
yield from validator.descend(item, aI, path=index)
elif not aI and len(instance) > len(schema.get("items", [])):
error = "Additional items are not allowed (%s %s unexpected)"
yield ValidationError(
error % extras_msg(instance[len(schema.get("items", [])):]),
)
def const(validator, const, instance, schema):
if not equal(instance, const):
yield ValidationError(f"{const!r} was expected")
def contains(validator, contains, instance, schema):
if not validator.is_type(instance, "array"):
return
matches = 0
min_contains = schema.get("minContains", 1)
max_contains = schema.get("maxContains", len(instance))
for each in instance:
if validator.evolve(schema=contains).is_valid(each):
matches += 1
if matches > max_contains:
yield ValidationError(
"Too many items match the given schema "
f"(expected at most {max_contains})",
validator="maxContains",
validator_value=max_contains,
)
return
if matches < min_contains:
if not matches:
yield ValidationError(
f"{instance!r} does not contain items "
"matching the given schema",
)
else:
yield ValidationError(
"Too few items match the given schema (expected at least "
f"{min_contains} but only {matches} matched)",
validator="minContains",
validator_value=min_contains,
)
def exclusiveMinimum(validator, minimum, instance, schema):
if not validator.is_type(instance, "number"):
return
if instance <= minimum:
yield ValidationError(
f"{instance!r} is less than or equal to "
f"the minimum of {minimum!r}",
)
def exclusiveMaximum(validator, maximum, instance, schema):
if not validator.is_type(instance, "number"):
return
if instance >= maximum:
yield ValidationError(
f"{instance!r} is greater than or equal "
f"to the maximum of {maximum!r}",
)
def minimum(validator, minimum, instance, schema):
if not validator.is_type(instance, "number"):
return
if instance < minimum:
message = f"{instance!r} is less than the minimum of {minimum!r}"
yield ValidationError(message)
def maximum(validator, maximum, instance, schema):
if not validator.is_type(instance, "number"):
return
if instance > maximum:
message = f"{instance!r} is greater than the maximum of {maximum!r}"
yield ValidationError(message)
def multipleOf(validator, dB, instance, schema):
if not validator.is_type(instance, "number"):
return
if isinstance(dB, float):
quotient = instance / dB
try:
failed = int(quotient) != quotient
except OverflowError:
# When `instance` is large and `dB` is less than one,
# quotient can overflow to infinity; and then casting to int
# raises an error.
#
# In this case we fall back to Fraction logic, which is
# exact and cannot overflow. The performance is also
# acceptable: we try the fast all-float option first, and
# we know that fraction(dB) can have at most a few hundred
# digits in each part. The worst-case slowdown is therefore
# for already-slow enormous integers or Decimals.
failed = (Fraction(instance) / Fraction(dB)).denominator != 1
else:
failed = instance % dB
if failed:
yield ValidationError(f"{instance!r} is not a multiple of {dB}")
def minItems(validator, mI, instance, schema):
if validator.is_type(instance, "array") and len(instance) < mI:
yield ValidationError(f"{instance!r} is too short")
def maxItems(validator, mI, instance, schema):
if validator.is_type(instance, "array") and len(instance) > mI:
yield ValidationError(f"{instance!r} is too long")
def uniqueItems(validator, uI, instance, schema):
if (
uI
and validator.is_type(instance, "array")
and not uniq(instance)
):
yield ValidationError(f"{instance!r} has non-unique elements")
def pattern(validator, patrn, instance, schema):
if (
validator.is_type(instance, "string")
and not re.search(patrn, instance)
):
yield ValidationError(f"{instance!r} does not match {patrn!r}")
def format(validator, format, instance, schema):
if validator.format_checker is not None:
try:
validator.format_checker.check(instance, format)
except FormatError as error:
yield ValidationError(error.message, cause=error.cause)
def minLength(validator, mL, instance, schema):
if validator.is_type(instance, "string") and len(instance) < mL:
yield ValidationError(f"{instance!r} is too short")
def maxLength(validator, mL, instance, schema):
if validator.is_type(instance, "string") and len(instance) > mL:
yield ValidationError(f"{instance!r} is too long")
def dependentRequired(validator, dependentRequired, instance, schema):
if not validator.is_type(instance, "object"):
return
for property, dependency in dependentRequired.items():
if property not in instance:
continue
for each in dependency:
if each not in instance:
message = f"{each!r} is a dependency of {property!r}"
yield ValidationError(message)
def dependentSchemas(validator, dependentSchemas, instance, schema):
if not validator.is_type(instance, "object"):
return
for property, dependency in dependentSchemas.items():
if property not in instance:
continue
yield from validator.descend(
instance, dependency, schema_path=property,
)
def enum(validator, enums, instance, schema):
if instance == 0 or instance == 1:
unbooled = unbool(instance)
if all(unbooled != unbool(each) for each in enums):
yield ValidationError(f"{instance!r} is not one of {enums!r}")
elif instance not in enums:
yield ValidationError(f"{instance!r} is not one of {enums!r}")
def ref(validator, ref, instance, schema):
resolve = getattr(validator.resolver, "resolve", None)
if resolve is None:
with validator.resolver.resolving(ref) as resolved:
yield from validator.descend(instance, resolved)
else:
scope, resolved = validator.resolver.resolve(ref)
validator.resolver.push_scope(scope)
try:
yield from validator.descend(instance, resolved)
finally:
validator.resolver.pop_scope()
def dynamicRef(validator, dynamicRef, instance, schema):
_, fragment = urldefrag(dynamicRef)
for url in validator.resolver._scopes_stack:
lookup_url = urljoin(url, dynamicRef)
with validator.resolver.resolving(lookup_url) as subschema:
if ("$dynamicAnchor" in subschema
and fragment == subschema["$dynamicAnchor"]):
yield from validator.descend(instance, subschema)
break
else:
with validator.resolver.resolving(dynamicRef) as subschema:
yield from validator.descend(instance, subschema)
def type(validator, types, instance, schema):
types = ensure_list(types)
if not any(validator.is_type(instance, type) for type in types):
reprs = ", ".join(repr(type) for type in types)
yield ValidationError(f"{instance!r} is not of type {reprs}")
def properties(validator, properties, instance, schema):
if not validator.is_type(instance, "object"):
return
for property, subschema in properties.items():
if property in instance:
yield from validator.descend(
instance[property],
subschema,
path=property,
schema_path=property,
)
def required(validator, required, instance, schema):
if not validator.is_type(instance, "object"):
return
for property in required:
if property not in instance:
yield ValidationError(f"{property!r} is a required property")
def minProperties(validator, mP, instance, schema):
if validator.is_type(instance, "object") and len(instance) < mP:
yield ValidationError(f"{instance!r} does not have enough properties")
def maxProperties(validator, mP, instance, schema):
if not validator.is_type(instance, "object"):
return
if validator.is_type(instance, "object") and len(instance) > mP:
yield ValidationError(f"{instance!r} has too many properties")
def allOf(validator, allOf, instance, schema):
for index, subschema in enumerate(allOf):
yield from validator.descend(instance, subschema, schema_path=index)
def anyOf(validator, anyOf, instance, schema):
all_errors = []
for index, subschema in enumerate(anyOf):
errs = list(validator.descend(instance, subschema, schema_path=index))
if not errs:
break
all_errors.extend(errs)
else:
yield ValidationError(
f"{instance!r} is not valid under any of the given schemas",
context=all_errors,
)
def oneOf(validator, oneOf, instance, schema):
subschemas = enumerate(oneOf)
all_errors = []
for index, subschema in subschemas:
errs = list(validator.descend(instance, subschema, schema_path=index))
if not errs:
first_valid = subschema
break
all_errors.extend(errs)
else:
yield ValidationError(
f"{instance!r} is not valid under any of the given schemas",
context=all_errors,
)
more_valid = [
each for _, each in subschemas
if validator.evolve(schema=each).is_valid(instance)
]
if more_valid:
more_valid.append(first_valid)
reprs = ", ".join(repr(schema) for schema in more_valid)
yield ValidationError(f"{instance!r} is valid under each of {reprs}")
def not_(validator, not_schema, instance, schema):
if validator.evolve(schema=not_schema).is_valid(instance):
message = f"{instance!r} should not be valid under {not_schema!r}"
yield ValidationError(message)
def if_(validator, if_schema, instance, schema):
if validator.evolve(schema=if_schema).is_valid(instance):
if "then" in schema:
then = schema["then"]
yield from validator.descend(instance, then, schema_path="then")
elif "else" in schema:
else_ = schema["else"]
yield from validator.descend(instance, else_, schema_path="else")
def unevaluatedItems(validator, unevaluatedItems, instance, schema):
evaluated_item_indexes = find_evaluated_item_indexes_by_schema(
validator, instance, schema,
)
unevaluated_items = [
item for index, item in enumerate(instance)
if index not in evaluated_item_indexes
]
if unevaluated_items:
error = "Unevaluated items are not allowed (%s %s unexpected)"
yield ValidationError(error % extras_msg(unevaluated_items))
def unevaluatedProperties(validator, unevaluatedProperties, instance, schema):
evaluated_property_keys = find_evaluated_property_keys_by_schema(
validator, instance, schema,
)
unevaluated_property_keys = []
for property in instance:
if property not in evaluated_property_keys:
for _ in validator.descend(
instance[property],
unevaluatedProperties,
path=property,
schema_path=property,
):
unevaluated_property_keys.append(property)
if unevaluated_property_keys:
error = "Unevaluated properties are not allowed (%s %s unexpected)"
yield ValidationError(error % extras_msg(unevaluated_property_keys))
def prefixItems(validator, prefixItems, instance, schema):
if not validator.is_type(instance, "array"):
return
for (index, item), subschema in zip(enumerate(instance), prefixItems):
yield from validator.descend(
instance=item,
schema=subschema,
schema_path=index,
path=index,
)

View File

@@ -0,0 +1,5 @@
"""
Benchmarks for validation.
This package is *not* public API.
"""

View File

@@ -0,0 +1,25 @@
"""
A performance benchmark using the example from issue #232.
See https://github.com/python-jsonschema/jsonschema/pull/232.
"""
from pathlib import Path
from pyperf import Runner
from pyrsistent import m
from jsonschema.tests._suite import Version
import jsonschema
issue232 = Version(
path=Path(__file__).parent / "issue232",
remotes=m(),
name="issue232",
)
if __name__ == "__main__":
issue232.benchmark(
runner=Runner(),
Validator=jsonschema.Draft4Validator,
)

View File

@@ -0,0 +1,12 @@
"""
A performance benchmark using the official test suite.
This benchmarks jsonschema using every valid example in the
JSON-Schema-Test-Suite. It will take some time to complete.
"""
from pyperf import Runner
from jsonschema.tests._suite import Suite
if __name__ == "__main__":
Suite().benchmark(runner=Runner())

View File

@@ -0,0 +1,284 @@
"""
The ``jsonschema`` command line.
"""
from json import JSONDecodeError
from textwrap import dedent
import argparse
import json
import sys
import traceback
try:
from importlib import metadata
except ImportError:
import importlib_metadata as metadata # type: ignore
import attr
from jsonschema._reflect import namedAny
from jsonschema.exceptions import SchemaError
from jsonschema.validators import RefResolver, validator_for
class _CannotLoadFile(Exception):
pass
@attr.s
class _Outputter(object):
_formatter = attr.ib()
_stdout = attr.ib()
_stderr = attr.ib()
@classmethod
def from_arguments(cls, arguments, stdout, stderr):
if arguments["output"] == "plain":
formatter = _PlainFormatter(arguments["error_format"])
elif arguments["output"] == "pretty":
formatter = _PrettyFormatter()
return cls(formatter=formatter, stdout=stdout, stderr=stderr)
def load(self, path):
try:
file = open(path)
except FileNotFoundError:
self.filenotfound_error(path=path, exc_info=sys.exc_info())
raise _CannotLoadFile()
with file:
try:
return json.load(file)
except JSONDecodeError:
self.parsing_error(path=path, exc_info=sys.exc_info())
raise _CannotLoadFile()
def filenotfound_error(self, **kwargs):
self._stderr.write(self._formatter.filenotfound_error(**kwargs))
def parsing_error(self, **kwargs):
self._stderr.write(self._formatter.parsing_error(**kwargs))
def validation_error(self, **kwargs):
self._stderr.write(self._formatter.validation_error(**kwargs))
def validation_success(self, **kwargs):
self._stdout.write(self._formatter.validation_success(**kwargs))
@attr.s
class _PrettyFormatter(object):
_ERROR_MSG = dedent(
"""\
===[{type}]===({path})===
{body}
-----------------------------
""",
)
_SUCCESS_MSG = "===[SUCCESS]===({path})===\n"
def filenotfound_error(self, path, exc_info):
return self._ERROR_MSG.format(
path=path,
type="FileNotFoundError",
body="{!r} does not exist.".format(path),
)
def parsing_error(self, path, exc_info):
exc_type, exc_value, exc_traceback = exc_info
exc_lines = "".join(
traceback.format_exception(exc_type, exc_value, exc_traceback),
)
return self._ERROR_MSG.format(
path=path,
type=exc_type.__name__,
body=exc_lines,
)
def validation_error(self, instance_path, error):
return self._ERROR_MSG.format(
path=instance_path,
type=error.__class__.__name__,
body=error,
)
def validation_success(self, instance_path):
return self._SUCCESS_MSG.format(path=instance_path)
@attr.s
class _PlainFormatter(object):
_error_format = attr.ib()
def filenotfound_error(self, path, exc_info):
return "{!r} does not exist.\n".format(path)
def parsing_error(self, path, exc_info):
return "Failed to parse {}: {}\n".format(
"<stdin>" if path == "<stdin>" else repr(path),
exc_info[1],
)
def validation_error(self, instance_path, error):
return self._error_format.format(file_name=instance_path, error=error)
def validation_success(self, instance_path):
return ""
def _namedAnyWithDefault(name):
if "." not in name:
name = "jsonschema." + name
return namedAny(name)
parser = argparse.ArgumentParser(
description="JSON Schema Validation CLI",
)
parser.add_argument(
"-i", "--instance",
action="append",
dest="instances",
help="""
a path to a JSON instance (i.e. filename.json) to validate (may
be specified multiple times). If no instances are provided via this
option, one will be expected on standard input.
""",
)
parser.add_argument(
"-F", "--error-format",
help="""
the format to use for each validation error message, specified
in a form suitable for str.format. This string will be passed
one formatted object named 'error' for each ValidationError.
Only provide this option when using --output=plain, which is the
default. If this argument is unprovided and --output=plain is
used, a simple default representation will be used.
""",
)
parser.add_argument(
"-o", "--output",
choices=["plain", "pretty"],
default="plain",
help="""
an output format to use. 'plain' (default) will produce minimal
text with one line for each error, while 'pretty' will produce
more detailed human-readable output on multiple lines.
""",
)
parser.add_argument(
"-V", "--validator",
type=_namedAnyWithDefault,
help="""
the fully qualified object name of a validator to use, or, for
validators that are registered with jsonschema, simply the name
of the class.
""",
)
parser.add_argument(
"--base-uri",
help="""
a base URI to assign to the provided schema, even if it does not
declare one (via e.g. $id). This option can be used if you wish to
resolve relative references to a particular URI (or local path)
""",
)
parser.add_argument(
"--version",
action="version",
version=metadata.version("jsonschema"),
)
parser.add_argument(
"schema",
help="the path to a JSON Schema to validate with (i.e. schema.json)",
)
def parse_args(args):
arguments = vars(parser.parse_args(args=args or ["--help"]))
if arguments["output"] != "plain" and arguments["error_format"]:
raise parser.error(
"--error-format can only be used with --output plain",
)
if arguments["output"] == "plain" and arguments["error_format"] is None:
arguments["error_format"] = "{error.instance}: {error.message}\n"
return arguments
def _validate_instance(instance_path, instance, validator, outputter):
invalid = False
for error in validator.iter_errors(instance):
invalid = True
outputter.validation_error(instance_path=instance_path, error=error)
if not invalid:
outputter.validation_success(instance_path=instance_path)
return invalid
def main(args=sys.argv[1:]):
sys.exit(run(arguments=parse_args(args=args)))
def run(arguments, stdout=sys.stdout, stderr=sys.stderr, stdin=sys.stdin):
outputter = _Outputter.from_arguments(
arguments=arguments,
stdout=stdout,
stderr=stderr,
)
try:
schema = outputter.load(arguments["schema"])
except _CannotLoadFile:
return 1
if arguments["validator"] is None:
arguments["validator"] = validator_for(schema)
try:
arguments["validator"].check_schema(schema)
except SchemaError as error:
outputter.validation_error(
instance_path=arguments["schema"],
error=error,
)
return 1
if arguments["instances"]:
load, instances = outputter.load, arguments["instances"]
else:
def load(_):
try:
return json.load(stdin)
except JSONDecodeError:
outputter.parsing_error(
path="<stdin>", exc_info=sys.exc_info(),
)
raise _CannotLoadFile()
instances = ["<stdin>"]
resolver = RefResolver(
base_uri=arguments["base_uri"],
referrer=schema,
) if arguments["base_uri"] is not None else None
validator = arguments["validator"](schema, resolver=resolver)
exit_code = 0
for each in instances:
try:
instance = load(each)
except _CannotLoadFile:
exit_code = 1
else:
exit_code |= _validate_instance(
instance_path=each,
instance=instance,
validator=validator,
outputter=outputter,
)
return exit_code

View File

@@ -0,0 +1,363 @@
"""
Validation errors, and some surrounding helpers.
"""
from __future__ import annotations
from collections import defaultdict, deque
from pprint import pformat
from textwrap import dedent, indent
import itertools
import attr
from jsonschema import _utils
WEAK_MATCHES: frozenset[str] = frozenset(["anyOf", "oneOf"])
STRONG_MATCHES: frozenset[str] = frozenset()
_unset = _utils.Unset()
class _Error(Exception):
def __init__(
self,
message,
validator=_unset,
path=(),
cause=None,
context=(),
validator_value=_unset,
instance=_unset,
schema=_unset,
schema_path=(),
parent=None,
):
super(_Error, self).__init__(
message,
validator,
path,
cause,
context,
validator_value,
instance,
schema,
schema_path,
parent,
)
self.message = message
self.path = self.relative_path = deque(path)
self.schema_path = self.relative_schema_path = deque(schema_path)
self.context = list(context)
self.cause = self.__cause__ = cause
self.validator = validator
self.validator_value = validator_value
self.instance = instance
self.schema = schema
self.parent = parent
for error in context:
error.parent = self
def __repr__(self):
return f"<{self.__class__.__name__}: {self.message!r}>"
def __str__(self):
essential_for_verbose = (
self.validator, self.validator_value, self.instance, self.schema,
)
if any(m is _unset for m in essential_for_verbose):
return self.message
schema_path = _utils.format_as_index(
container=self._word_for_schema_in_error_message,
indices=list(self.relative_schema_path)[:-1],
)
instance_path = _utils.format_as_index(
container=self._word_for_instance_in_error_message,
indices=self.relative_path,
)
prefix = 16 * " "
return dedent(
f"""\
{self.message}
Failed validating {self.validator!r} in {schema_path}:
{indent(pformat(self.schema, width=72), prefix).lstrip()}
On {instance_path}:
{indent(pformat(self.instance, width=72), prefix).lstrip()}
""".rstrip(),
)
@classmethod
def create_from(cls, other):
return cls(**other._contents())
@property
def absolute_path(self):
parent = self.parent
if parent is None:
return self.relative_path
path = deque(self.relative_path)
path.extendleft(reversed(parent.absolute_path))
return path
@property
def absolute_schema_path(self):
parent = self.parent
if parent is None:
return self.relative_schema_path
path = deque(self.relative_schema_path)
path.extendleft(reversed(parent.absolute_schema_path))
return path
@property
def json_path(self):
path = "$"
for elem in self.absolute_path:
if isinstance(elem, int):
path += "[" + str(elem) + "]"
else:
path += "." + elem
return path
def _set(self, **kwargs):
for k, v in kwargs.items():
if getattr(self, k) is _unset:
setattr(self, k, v)
def _contents(self):
attrs = (
"message", "cause", "context", "validator", "validator_value",
"path", "schema_path", "instance", "schema", "parent",
)
return dict((attr, getattr(self, attr)) for attr in attrs)
class ValidationError(_Error):
"""
An instance was invalid under a provided schema.
"""
_word_for_schema_in_error_message = "schema"
_word_for_instance_in_error_message = "instance"
class SchemaError(_Error):
"""
A schema was invalid under its corresponding metaschema.
"""
_word_for_schema_in_error_message = "metaschema"
_word_for_instance_in_error_message = "schema"
@attr.s(hash=True)
class RefResolutionError(Exception):
"""
A ref could not be resolved.
"""
_cause = attr.ib()
def __str__(self):
return str(self._cause)
class UndefinedTypeCheck(Exception):
"""
A type checker was asked to check a type it did not have registered.
"""
def __init__(self, type):
self.type = type
def __str__(self):
return f"Type {self.type!r} is unknown to this type checker"
class UnknownType(Exception):
"""
A validator was asked to validate an instance against an unknown type.
"""
def __init__(self, type, instance, schema):
self.type = type
self.instance = instance
self.schema = schema
def __str__(self):
prefix = 16 * " "
return dedent(
f"""\
Unknown type {self.type!r} for validator with schema:
{indent(pformat(self.schema, width=72), prefix).lstrip()}
While checking instance:
{indent(pformat(self.instance, width=72), prefix).lstrip()}
""".rstrip(),
)
class FormatError(Exception):
"""
Validating a format failed.
"""
def __init__(self, message, cause=None):
super(FormatError, self).__init__(message, cause)
self.message = message
self.cause = self.__cause__ = cause
def __str__(self):
return self.message
class ErrorTree(object):
"""
ErrorTrees make it easier to check which validations failed.
"""
_instance = _unset
def __init__(self, errors=()):
self.errors = {}
self._contents = defaultdict(self.__class__)
for error in errors:
container = self
for element in error.path:
container = container[element]
container.errors[error.validator] = error
container._instance = error.instance
def __contains__(self, index):
"""
Check whether ``instance[index]`` has any errors.
"""
return index in self._contents
def __getitem__(self, index):
"""
Retrieve the child tree one level down at the given ``index``.
If the index is not in the instance that this tree corresponds
to and is not known by this tree, whatever error would be raised
by ``instance.__getitem__`` will be propagated (usually this is
some subclass of `LookupError`.
"""
if self._instance is not _unset and index not in self:
self._instance[index]
return self._contents[index]
def __setitem__(self, index, value):
"""
Add an error to the tree at the given ``index``.
"""
self._contents[index] = value
def __iter__(self):
"""
Iterate (non-recursively) over the indices in the instance with errors.
"""
return iter(self._contents)
def __len__(self):
"""
Return the `total_errors`.
"""
return self.total_errors
def __repr__(self):
return f"<{self.__class__.__name__} ({len(self)} total errors)>"
@property
def total_errors(self):
"""
The total number of errors in the entire tree, including children.
"""
child_errors = sum(len(tree) for _, tree in self._contents.items())
return len(self.errors) + child_errors
def by_relevance(weak=WEAK_MATCHES, strong=STRONG_MATCHES):
"""
Create a key function that can be used to sort errors by relevance.
Arguments:
weak (set):
a collection of validator names to consider to be "weak".
If there are two errors at the same level of the instance
and one is in the set of weak validator names, the other
error will take priority. By default, :validator:`anyOf` and
:validator:`oneOf` are considered weak validators and will
be superseded by other same-level validation errors.
strong (set):
a collection of validator names to consider to be "strong"
"""
def relevance(error):
validator = error.validator
return -len(error.path), validator not in weak, validator in strong
return relevance
relevance = by_relevance()
def best_match(errors, key=relevance):
"""
Try to find an error that appears to be the best match among given errors.
In general, errors that are higher up in the instance (i.e. for which
`ValidationError.path` is shorter) are considered better matches,
since they indicate "more" is wrong with the instance.
If the resulting match is either :validator:`oneOf` or :validator:`anyOf`,
the *opposite* assumption is made -- i.e. the deepest error is picked,
since these validators only need to match once, and any other errors may
not be relevant.
Arguments:
errors (collections.abc.Iterable):
the errors to select from. Do not provide a mixture of
errors from different validation attempts (i.e. from
different instances or schemas), since it won't produce
sensical output.
key (collections.abc.Callable):
the key to use when sorting errors. See `relevance` and
transitively `by_relevance` for more details (the default is
to sort with the defaults of that function). Changing the
default is only useful if you want to change the function
that rates errors but still want the error context descent
done by this function.
Returns:
the best matching error, or ``None`` if the iterable was empty
.. note::
This function is a heuristic. Its return value may change for a given
set of inputs from version to version if better heuristics are added.
"""
errors = iter(errors)
best = next(errors, None)
if best is None:
return
best = max(itertools.chain([best], errors), key=key)
while best.context:
best = min(best.context, key=key)
return best

View File

@@ -0,0 +1,171 @@
"""
typing.Protocol classes for jsonschema interfaces.
"""
# for reference material on Protocols, see
# https://www.python.org/dev/peps/pep-0544/
from __future__ import annotations
from typing import TYPE_CHECKING, Any, ClassVar, Iterator
import sys
# doing these imports with `try ... except ImportError` doesn't pass mypy
# checking because mypy sees `typing._SpecialForm` and
# `typing_extensions._SpecialForm` as incompatible
#
# see:
# https://mypy.readthedocs.io/en/stable/runtime_troubles.html#using-new-additions-to-the-typing-module
# https://github.com/python/mypy/issues/4427
if sys.version_info >= (3, 8):
from typing import Protocol, runtime_checkable
else:
from typing_extensions import Protocol, runtime_checkable
# in order for Sphinx to resolve references accurately from type annotations,
# it needs to see names like `jsonschema.TypeChecker`
# therefore, only import at type-checking time (to avoid circular references),
# but use `jsonschema` for any types which will otherwise not be resolvable
if TYPE_CHECKING:
import jsonschema
from jsonschema.exceptions import ValidationError
from jsonschema.validators import RefResolver
# For code authors working on the validator protocol, these are the three
# use-cases which should be kept in mind:
#
# 1. As a protocol class, it can be used in type annotations to describe the
# available methods and attributes of a validator
# 2. It is the source of autodoc for the validator documentation
# 3. It is runtime_checkable, meaning that it can be used in isinstance()
# checks.
#
# Since protocols are not base classes, isinstance() checking is limited in
# its capabilities. See docs on runtime_checkable for detail
@runtime_checkable
class Validator(Protocol):
"""
The protocol to which all validator classes should adhere.
:argument schema: the schema that the validator object
will validate with. It is assumed to be valid, and providing
an invalid schema can lead to undefined behavior. See
`Validator.check_schema` to validate a schema first.
:argument resolver: an instance of `jsonschema.RefResolver` that will be
used to resolve :validator:`$ref` properties (JSON references). If
unprovided, one will be created.
:argument format_checker: an instance of `jsonschema.FormatChecker`
whose `jsonschema.FormatChecker.conforms` method will be called to
check and see if instances conform to each :validator:`format`
property present in the schema. If unprovided, no validation
will be done for :validator:`format`. Certain formats require
additional packages to be installed (ipv5, uri, color, date-time).
The required packages can be found at the bottom of this page.
"""
#: An object representing the validator's meta schema (the schema that
#: describes valid schemas in the given version).
META_SCHEMA: ClassVar[dict]
#: A mapping of validator names (`str`\s) to functions
#: that validate the validator property with that name. For more
#: information see `creating-validators`.
VALIDATORS: ClassVar[dict]
#: A `jsonschema.TypeChecker` that will be used when validating
#: :validator:`type` properties in JSON schemas.
TYPE_CHECKER: ClassVar[jsonschema.TypeChecker]
#: A `jsonschema.FormatChecker` that will be used when validating
#: :validator:`format` properties in JSON schemas.
FORMAT_CHECKER: ClassVar[jsonschema.FormatChecker]
#: The schema that was passed in when initializing the object.
schema: dict | bool
def __init__(
self,
schema: dict | bool,
resolver: RefResolver | None = None,
format_checker: jsonschema.FormatChecker | None = None,
) -> None:
...
@classmethod
def check_schema(cls, schema: dict) -> None:
"""
Validate the given schema against the validator's `META_SCHEMA`.
:raises: `jsonschema.exceptions.SchemaError` if the schema
is invalid
"""
def is_type(self, instance: Any, type: str) -> bool:
"""
Check if the instance is of the given (JSON Schema) type.
:type type: str
:rtype: bool
:raises: `jsonschema.exceptions.UnknownType` if ``type``
is not a known type.
"""
def is_valid(self, instance: dict) -> bool:
"""
Check if the instance is valid under the current `schema`.
:rtype: bool
>>> schema = {"maxItems" : 2}
>>> Draft3Validator(schema).is_valid([2, 3, 4])
False
"""
def iter_errors(self, instance: dict) -> Iterator[ValidationError]:
r"""
Lazily yield each of the validation errors in the given instance.
:rtype: an `collections.abc.Iterable` of
`jsonschema.exceptions.ValidationError`\s
>>> schema = {
... "type" : "array",
... "items" : {"enum" : [1, 2, 3]},
... "maxItems" : 2,
... }
>>> v = Draft3Validator(schema)
>>> for error in sorted(v.iter_errors([2, 3, 4]), key=str):
... print(error.message)
4 is not one of [1, 2, 3]
[2, 3, 4] is too long
"""
def validate(self, instance: dict) -> None:
"""
Check if the instance is valid under the current `schema`.
:raises: `jsonschema.exceptions.ValidationError` if the
instance is invalid
>>> schema = {"maxItems" : 2}
>>> Draft3Validator(schema).validate([2, 3, 4])
Traceback (most recent call last):
...
ValidationError: [2, 3, 4] is too long
"""
def evolve(self, **kwargs) -> "Validator":
"""
Create a new validator like this one, but with given changes.
Preserves all other attributes, so can be used to e.g. create a
validator with a different schema but with the same :validator:`$ref`
resolution behavior.
>>> validator = Draft202012Validator({})
>>> validator.evolve(schema={"type": "number"})
Draft202012Validator(schema={'type': 'number'}, format_checker=None)
"""

View File

@@ -0,0 +1,42 @@
{
"$schema": "https://json-schema.org/draft/2019-09/schema",
"$id": "https://json-schema.org/draft/2019-09/schema",
"$vocabulary": {
"https://json-schema.org/draft/2019-09/vocab/core": true,
"https://json-schema.org/draft/2019-09/vocab/applicator": true,
"https://json-schema.org/draft/2019-09/vocab/validation": true,
"https://json-schema.org/draft/2019-09/vocab/meta-data": true,
"https://json-schema.org/draft/2019-09/vocab/format": false,
"https://json-schema.org/draft/2019-09/vocab/content": true
},
"$recursiveAnchor": true,
"title": "Core and Validation specifications meta-schema",
"allOf": [
{"$ref": "meta/core"},
{"$ref": "meta/applicator"},
{"$ref": "meta/validation"},
{"$ref": "meta/meta-data"},
{"$ref": "meta/format"},
{"$ref": "meta/content"}
],
"type": ["object", "boolean"],
"properties": {
"definitions": {
"$comment": "While no longer an official keyword as it is replaced by $defs, this keyword is retained in the meta-schema to prevent incompatible extensions as it remains in common use.",
"type": "object",
"additionalProperties": { "$recursiveRef": "#" },
"default": {}
},
"dependencies": {
"$comment": "\"dependencies\" is no longer a keyword, but schema authors should avoid redefining it to facilitate a smooth transition to \"dependentSchemas\" and \"dependentRequired\"",
"type": "object",
"additionalProperties": {
"anyOf": [
{ "$recursiveRef": "#" },
{ "$ref": "meta/validation#/$defs/stringArray" }
]
}
}
}
}

View File

@@ -0,0 +1,58 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://json-schema.org/draft/2020-12/schema",
"$vocabulary": {
"https://json-schema.org/draft/2020-12/vocab/core": true,
"https://json-schema.org/draft/2020-12/vocab/applicator": true,
"https://json-schema.org/draft/2020-12/vocab/unevaluated": true,
"https://json-schema.org/draft/2020-12/vocab/validation": true,
"https://json-schema.org/draft/2020-12/vocab/meta-data": true,
"https://json-schema.org/draft/2020-12/vocab/format-annotation": true,
"https://json-schema.org/draft/2020-12/vocab/content": true
},
"$dynamicAnchor": "meta",
"title": "Core and Validation specifications meta-schema",
"allOf": [
{"$ref": "meta/core"},
{"$ref": "meta/applicator"},
{"$ref": "meta/unevaluated"},
{"$ref": "meta/validation"},
{"$ref": "meta/meta-data"},
{"$ref": "meta/format-annotation"},
{"$ref": "meta/content"}
],
"type": ["object", "boolean"],
"$comment": "This meta-schema also defines keywords that have appeared in previous drafts in order to prevent incompatible extensions as they remain in common use.",
"properties": {
"definitions": {
"$comment": "\"definitions\" has been replaced by \"$defs\".",
"type": "object",
"additionalProperties": { "$dynamicRef": "#meta" },
"deprecated": true,
"default": {}
},
"dependencies": {
"$comment": "\"dependencies\" has been split and replaced by \"dependentSchemas\" and \"dependentRequired\" in order to serve their differing semantics.",
"type": "object",
"additionalProperties": {
"anyOf": [
{ "$dynamicRef": "#meta" },
{ "$ref": "meta/validation#/$defs/stringArray" }
]
},
"deprecated": true,
"default": {}
},
"$recursiveAnchor": {
"$comment": "\"$recursiveAnchor\" has been replaced by \"$dynamicAnchor\".",
"$ref": "meta/core#/$defs/anchorString",
"deprecated": true
},
"$recursiveRef": {
"$comment": "\"$recursiveRef\" has been replaced by \"$dynamicRef\".",
"$ref": "meta/core#/$defs/uriReferenceString",
"deprecated": true
}
}
}

View File

@@ -0,0 +1,177 @@
{
"$schema" : "http://json-schema.org/draft-03/schema#",
"id" : "http://json-schema.org/draft-03/schema#",
"type" : "object",
"properties" : {
"type" : {
"type" : ["string", "array"],
"items" : {
"type" : ["string", {"$ref" : "#"}]
},
"uniqueItems" : true,
"default" : "any"
},
"properties" : {
"type" : "object",
"additionalProperties" : {"$ref" : "#", "type" : "object"},
"default" : {}
},
"patternProperties" : {
"type" : "object",
"additionalProperties" : {"$ref" : "#"},
"default" : {}
},
"additionalProperties" : {
"type" : [{"$ref" : "#"}, "boolean"],
"default" : {}
},
"items" : {
"type" : [{"$ref" : "#"}, "array"],
"items" : {"$ref" : "#"},
"default" : {}
},
"additionalItems" : {
"type" : [{"$ref" : "#"}, "boolean"],
"default" : {}
},
"required" : {
"type" : "boolean",
"default" : false
},
"dependencies" : {
"type" : ["string", "array", "object"],
"additionalProperties" : {
"type" : ["string", "array", {"$ref" : "#"}],
"items" : {
"type" : "string"
}
},
"default" : {}
},
"minimum" : {
"type" : "number"
},
"maximum" : {
"type" : "number"
},
"exclusiveMinimum" : {
"type" : "boolean",
"default" : false
},
"exclusiveMaximum" : {
"type" : "boolean",
"default" : false
},
"maxDecimal": {
"minimum": 0,
"type": "number"
},
"minItems" : {
"type" : "integer",
"minimum" : 0,
"default" : 0
},
"maxItems" : {
"type" : "integer",
"minimum" : 0
},
"uniqueItems" : {
"type" : "boolean",
"default" : false
},
"pattern" : {
"type" : "string",
"format" : "regex"
},
"minLength" : {
"type" : "integer",
"minimum" : 0,
"default" : 0
},
"maxLength" : {
"type" : "integer"
},
"enum" : {
"type" : "array"
},
"default" : {
"type" : "any"
},
"title" : {
"type" : "string"
},
"description" : {
"type" : "string"
},
"format" : {
"type" : "string"
},
"divisibleBy" : {
"type" : "number",
"minimum" : 0,
"exclusiveMinimum" : true,
"default" : 1
},
"disallow" : {
"type" : ["string", "array"],
"items" : {
"type" : ["string", {"$ref" : "#"}]
},
"uniqueItems" : true
},
"extends" : {
"type" : [{"$ref" : "#"}, "array"],
"items" : {"$ref" : "#"},
"default" : {}
},
"id" : {
"type" : "string",
"format" : "uri"
},
"$ref" : {
"type" : "string",
"format" : "uri"
},
"$schema" : {
"type" : "string",
"format" : "uri"
}
},
"dependencies" : {
"exclusiveMinimum" : "minimum",
"exclusiveMaximum" : "maximum"
},
"default" : {}
}

View File

@@ -0,0 +1,149 @@
{
"id": "http://json-schema.org/draft-04/schema#",
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "Core schema meta-schema",
"definitions": {
"schemaArray": {
"type": "array",
"minItems": 1,
"items": { "$ref": "#" }
},
"positiveInteger": {
"type": "integer",
"minimum": 0
},
"positiveIntegerDefault0": {
"allOf": [ { "$ref": "#/definitions/positiveInteger" }, { "default": 0 } ]
},
"simpleTypes": {
"enum": [ "array", "boolean", "integer", "null", "number", "object", "string" ]
},
"stringArray": {
"type": "array",
"items": { "type": "string" },
"minItems": 1,
"uniqueItems": true
}
},
"type": "object",
"properties": {
"id": {
"format": "uri",
"type": "string"
},
"$schema": {
"type": "string",
"format": "uri"
},
"title": {
"type": "string"
},
"description": {
"type": "string"
},
"default": {},
"multipleOf": {
"type": "number",
"minimum": 0,
"exclusiveMinimum": true
},
"maximum": {
"type": "number"
},
"exclusiveMaximum": {
"type": "boolean",
"default": false
},
"minimum": {
"type": "number"
},
"exclusiveMinimum": {
"type": "boolean",
"default": false
},
"maxLength": { "$ref": "#/definitions/positiveInteger" },
"minLength": { "$ref": "#/definitions/positiveIntegerDefault0" },
"pattern": {
"type": "string",
"format": "regex"
},
"additionalItems": {
"anyOf": [
{ "type": "boolean" },
{ "$ref": "#" }
],
"default": {}
},
"items": {
"anyOf": [
{ "$ref": "#" },
{ "$ref": "#/definitions/schemaArray" }
],
"default": {}
},
"maxItems": { "$ref": "#/definitions/positiveInteger" },
"minItems": { "$ref": "#/definitions/positiveIntegerDefault0" },
"uniqueItems": {
"type": "boolean",
"default": false
},
"maxProperties": { "$ref": "#/definitions/positiveInteger" },
"minProperties": { "$ref": "#/definitions/positiveIntegerDefault0" },
"required": { "$ref": "#/definitions/stringArray" },
"additionalProperties": {
"anyOf": [
{ "type": "boolean" },
{ "$ref": "#" }
],
"default": {}
},
"definitions": {
"type": "object",
"additionalProperties": { "$ref": "#" },
"default": {}
},
"properties": {
"type": "object",
"additionalProperties": { "$ref": "#" },
"default": {}
},
"patternProperties": {
"type": "object",
"additionalProperties": { "$ref": "#" },
"default": {}
},
"dependencies": {
"type": "object",
"additionalProperties": {
"anyOf": [
{ "$ref": "#" },
{ "$ref": "#/definitions/stringArray" }
]
}
},
"enum": {
"type": "array"
},
"type": {
"anyOf": [
{ "$ref": "#/definitions/simpleTypes" },
{
"type": "array",
"items": { "$ref": "#/definitions/simpleTypes" },
"minItems": 1,
"uniqueItems": true
}
]
},
"format": { "type": "string" },
"allOf": { "$ref": "#/definitions/schemaArray" },
"anyOf": { "$ref": "#/definitions/schemaArray" },
"oneOf": { "$ref": "#/definitions/schemaArray" },
"not": { "$ref": "#" }
},
"dependencies": {
"exclusiveMaximum": [ "maximum" ],
"exclusiveMinimum": [ "minimum" ]
},
"default": {}
}

View File

@@ -0,0 +1,153 @@
{
"$schema": "http://json-schema.org/draft-06/schema#",
"$id": "http://json-schema.org/draft-06/schema#",
"title": "Core schema meta-schema",
"definitions": {
"schemaArray": {
"type": "array",
"minItems": 1,
"items": { "$ref": "#" }
},
"nonNegativeInteger": {
"type": "integer",
"minimum": 0
},
"nonNegativeIntegerDefault0": {
"allOf": [
{ "$ref": "#/definitions/nonNegativeInteger" },
{ "default": 0 }
]
},
"simpleTypes": {
"enum": [
"array",
"boolean",
"integer",
"null",
"number",
"object",
"string"
]
},
"stringArray": {
"type": "array",
"items": { "type": "string" },
"uniqueItems": true,
"default": []
}
},
"type": ["object", "boolean"],
"properties": {
"$id": {
"type": "string",
"format": "uri-reference"
},
"$schema": {
"type": "string",
"format": "uri"
},
"$ref": {
"type": "string",
"format": "uri-reference"
},
"title": {
"type": "string"
},
"description": {
"type": "string"
},
"default": {},
"examples": {
"type": "array",
"items": {}
},
"multipleOf": {
"type": "number",
"exclusiveMinimum": 0
},
"maximum": {
"type": "number"
},
"exclusiveMaximum": {
"type": "number"
},
"minimum": {
"type": "number"
},
"exclusiveMinimum": {
"type": "number"
},
"maxLength": { "$ref": "#/definitions/nonNegativeInteger" },
"minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
"pattern": {
"type": "string",
"format": "regex"
},
"additionalItems": { "$ref": "#" },
"items": {
"anyOf": [
{ "$ref": "#" },
{ "$ref": "#/definitions/schemaArray" }
],
"default": {}
},
"maxItems": { "$ref": "#/definitions/nonNegativeInteger" },
"minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
"uniqueItems": {
"type": "boolean",
"default": false
},
"contains": { "$ref": "#" },
"maxProperties": { "$ref": "#/definitions/nonNegativeInteger" },
"minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
"required": { "$ref": "#/definitions/stringArray" },
"additionalProperties": { "$ref": "#" },
"definitions": {
"type": "object",
"additionalProperties": { "$ref": "#" },
"default": {}
},
"properties": {
"type": "object",
"additionalProperties": { "$ref": "#" },
"default": {}
},
"patternProperties": {
"type": "object",
"additionalProperties": { "$ref": "#" },
"propertyNames": { "format": "regex" },
"default": {}
},
"dependencies": {
"type": "object",
"additionalProperties": {
"anyOf": [
{ "$ref": "#" },
{ "$ref": "#/definitions/stringArray" }
]
}
},
"propertyNames": { "$ref": "#" },
"const": {},
"enum": {
"type": "array"
},
"type": {
"anyOf": [
{ "$ref": "#/definitions/simpleTypes" },
{
"type": "array",
"items": { "$ref": "#/definitions/simpleTypes" },
"minItems": 1,
"uniqueItems": true
}
]
},
"format": { "type": "string" },
"allOf": { "$ref": "#/definitions/schemaArray" },
"anyOf": { "$ref": "#/definitions/schemaArray" },
"oneOf": { "$ref": "#/definitions/schemaArray" },
"not": { "$ref": "#" }
},
"default": {}
}

View File

@@ -0,0 +1,166 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"$id": "http://json-schema.org/draft-07/schema#",
"title": "Core schema meta-schema",
"definitions": {
"schemaArray": {
"type": "array",
"minItems": 1,
"items": { "$ref": "#" }
},
"nonNegativeInteger": {
"type": "integer",
"minimum": 0
},
"nonNegativeIntegerDefault0": {
"allOf": [
{ "$ref": "#/definitions/nonNegativeInteger" },
{ "default": 0 }
]
},
"simpleTypes": {
"enum": [
"array",
"boolean",
"integer",
"null",
"number",
"object",
"string"
]
},
"stringArray": {
"type": "array",
"items": { "type": "string" },
"uniqueItems": true,
"default": []
}
},
"type": ["object", "boolean"],
"properties": {
"$id": {
"type": "string",
"format": "uri-reference"
},
"$schema": {
"type": "string",
"format": "uri"
},
"$ref": {
"type": "string",
"format": "uri-reference"
},
"$comment": {
"type": "string"
},
"title": {
"type": "string"
},
"description": {
"type": "string"
},
"default": true,
"readOnly": {
"type": "boolean",
"default": false
},
"examples": {
"type": "array",
"items": true
},
"multipleOf": {
"type": "number",
"exclusiveMinimum": 0
},
"maximum": {
"type": "number"
},
"exclusiveMaximum": {
"type": "number"
},
"minimum": {
"type": "number"
},
"exclusiveMinimum": {
"type": "number"
},
"maxLength": { "$ref": "#/definitions/nonNegativeInteger" },
"minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
"pattern": {
"type": "string",
"format": "regex"
},
"additionalItems": { "$ref": "#" },
"items": {
"anyOf": [
{ "$ref": "#" },
{ "$ref": "#/definitions/schemaArray" }
],
"default": true
},
"maxItems": { "$ref": "#/definitions/nonNegativeInteger" },
"minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
"uniqueItems": {
"type": "boolean",
"default": false
},
"contains": { "$ref": "#" },
"maxProperties": { "$ref": "#/definitions/nonNegativeInteger" },
"minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
"required": { "$ref": "#/definitions/stringArray" },
"additionalProperties": { "$ref": "#" },
"definitions": {
"type": "object",
"additionalProperties": { "$ref": "#" },
"default": {}
},
"properties": {
"type": "object",
"additionalProperties": { "$ref": "#" },
"default": {}
},
"patternProperties": {
"type": "object",
"additionalProperties": { "$ref": "#" },
"propertyNames": { "format": "regex" },
"default": {}
},
"dependencies": {
"type": "object",
"additionalProperties": {
"anyOf": [
{ "$ref": "#" },
{ "$ref": "#/definitions/stringArray" }
]
}
},
"propertyNames": { "$ref": "#" },
"const": true,
"enum": {
"type": "array",
"items": true
},
"type": {
"anyOf": [
{ "$ref": "#/definitions/simpleTypes" },
{
"type": "array",
"items": { "$ref": "#/definitions/simpleTypes" },
"minItems": 1,
"uniqueItems": true
}
]
},
"format": { "type": "string" },
"contentMediaType": { "type": "string" },
"contentEncoding": { "type": "string" },
"if": {"$ref": "#"},
"then": {"$ref": "#"},
"else": {"$ref": "#"},
"allOf": { "$ref": "#/definitions/schemaArray" },
"anyOf": { "$ref": "#/definitions/schemaArray" },
"oneOf": { "$ref": "#/definitions/schemaArray" },
"not": { "$ref": "#" }
},
"default": true
}

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,5 @@
def bug(issue=None):
message = "A known bug."
if issue is not None:
message += " See issue #{issue}.".format(issue=issue)
return message

View File

@@ -0,0 +1,228 @@
"""
Python representations of the JSON Schema Test Suite tests.
"""
from functools import partial
from pathlib import Path
import json
import os
import re
import subprocess
import sys
import unittest
import attr
from jsonschema.validators import _VALIDATORS
import jsonschema
def _find_suite():
root = os.environ.get("JSON_SCHEMA_TEST_SUITE")
if root is not None:
return Path(root)
root = Path(jsonschema.__file__).parent.parent / "json"
if not root.is_dir(): # pragma: no cover
raise ValueError(
(
"Can't find the JSON-Schema-Test-Suite directory. "
"Set the 'JSON_SCHEMA_TEST_SUITE' environment "
"variable or run the tests from alongside a checkout "
"of the suite."
),
)
return root
@attr.s(hash=True)
class Suite(object):
_root = attr.ib(default=attr.Factory(_find_suite))
def _remotes(self):
jsonschema_suite = self._root.joinpath("bin", "jsonschema_suite")
remotes = subprocess.check_output(
[sys.executable, str(jsonschema_suite), "remotes"],
)
return {
"http://localhost:1234/" + name.replace("\\", "/"): schema
for name, schema in json.loads(remotes.decode("utf-8")).items()
}
def benchmark(self, runner): # pragma: no cover
for name, Validator in _VALIDATORS.items():
self.version(name=name).benchmark(
runner=runner,
Validator=Validator,
)
def version(self, name):
return Version(
name=name,
path=self._root.joinpath("tests", name),
remotes=self._remotes(),
)
@attr.s(hash=True)
class Version(object):
_path = attr.ib()
_remotes = attr.ib()
name = attr.ib()
def benchmark(self, runner, **kwargs): # pragma: no cover
for suite in self.tests():
for test in suite:
runner.bench_func(
test.fully_qualified_name,
partial(test.validate_ignoring_errors, **kwargs),
)
def tests(self):
return (
test
for child in self._path.glob("*.json")
for test in self._tests_in(
subject=child.name[:-5],
path=child,
)
)
def format_tests(self):
path = self._path.joinpath("optional", "format")
return (
test
for child in path.glob("*.json")
for test in self._tests_in(
subject=child.name[:-5],
path=child,
)
)
def optional_tests_of(self, name):
return self._tests_in(
subject=name,
path=self._path.joinpath("optional", name + ".json"),
)
def to_unittest_testcase(self, *suites, **kwargs):
name = kwargs.pop("name", "Test" + self.name.title().replace("-", ""))
methods = {
test.method_name: test.to_unittest_method(**kwargs)
for suite in suites
for tests in suite
for test in tests
}
cls = type(name, (unittest.TestCase,), methods)
try:
cls.__module__ = _someone_save_us_the_module_of_the_caller()
except Exception: # pragma: no cover
# We're doing crazy things, so if they go wrong, like a function
# behaving differently on some other interpreter, just make them
# not happen.
pass
return cls
def _tests_in(self, subject, path):
for each in json.loads(path.read_text(encoding="utf-8")):
yield (
_Test(
version=self,
subject=subject,
case_description=each["description"],
schema=each["schema"],
remotes=self._remotes,
**test,
) for test in each["tests"]
)
@attr.s(hash=True, repr=False)
class _Test(object):
version = attr.ib()
subject = attr.ib()
case_description = attr.ib()
description = attr.ib()
data = attr.ib()
schema = attr.ib(repr=False)
valid = attr.ib()
_remotes = attr.ib()
comment = attr.ib(default=None)
def __repr__(self): # pragma: no cover
return "<Test {}>".format(self.fully_qualified_name)
@property
def fully_qualified_name(self): # pragma: no cover
return " > ".join(
[
self.version.name,
self.subject,
self.case_description,
self.description,
],
)
@property
def method_name(self):
delimiters = r"[\W\- ]+"
return "test_{}_{}_{}".format(
re.sub(delimiters, "_", self.subject),
re.sub(delimiters, "_", self.case_description),
re.sub(delimiters, "_", self.description),
)
def to_unittest_method(self, skip=lambda test: None, **kwargs):
if self.valid:
def fn(this):
self.validate(**kwargs)
else:
def fn(this):
with this.assertRaises(jsonschema.ValidationError):
self.validate(**kwargs)
fn.__name__ = self.method_name
reason = skip(self)
return unittest.skipIf(reason is not None, reason)(fn)
def validate(self, Validator, **kwargs):
resolver = jsonschema.RefResolver.from_schema(
schema=self.schema,
store=self._remotes,
id_of=Validator.ID_OF,
)
validator = Validator(schema=self.schema, resolver=resolver, **kwargs)
validator.validate(instance=self.data)
def validate_ignoring_errors(self, Validator): # pragma: no cover
try:
self.validate(Validator=Validator)
except jsonschema.ValidationError:
pass
def _someone_save_us_the_module_of_the_caller():
"""
The FQON of the module 2nd stack frames up from here.
This is intended to allow us to dynamicallly return test case classes that
are indistinguishable from being defined in the module that wants them.
Otherwise, trial will mis-print the FQON, and copy pasting it won't re-run
the class that really is running.
Save us all, this is all so so so so so terrible.
"""
return sys._getframe(2).f_globals["__name__"]

View File

@@ -0,0 +1,49 @@
"""
Fuzzing setup for OSS-Fuzz.
See https://github.com/google/oss-fuzz/tree/master/projects/jsonschema for the
other half of the setup here.
"""
import sys
from hypothesis import given, strategies
import jsonschema
PRIM = strategies.one_of(
strategies.booleans(),
strategies.integers(),
strategies.floats(allow_nan=False, allow_infinity=False),
strategies.text(),
)
DICT = strategies.recursive(
base=strategies.one_of(
strategies.booleans(),
strategies.dictionaries(strategies.text(), PRIM),
),
extend=lambda inner: strategies.dictionaries(strategies.text(), inner),
)
@given(obj1=DICT, obj2=DICT)
def test_schemas(obj1, obj2):
try:
jsonschema.validate(instance=obj1, schema=obj2)
except jsonschema.exceptions.ValidationError:
pass
except jsonschema.exceptions.SchemaError:
pass
def main():
atheris.Setup(
sys.argv,
test_schemas.hypothesis.fuzz_one_input,
enable_python_coverage=True,
)
atheris.Fuzz()
if __name__ == "__main__":
import atheris
main()

View File

@@ -0,0 +1,911 @@
from contextlib import redirect_stderr, redirect_stdout
from io import StringIO
from json import JSONDecodeError
from pathlib import Path
from textwrap import dedent
from unittest import TestCase
import json
import os
import subprocess
import sys
import tempfile
try: # pragma: no cover
from importlib import metadata
except ImportError: # pragma: no cover
import importlib_metadata as metadata # type: ignore
from pyrsistent import m
from jsonschema import Draft4Validator, Draft202012Validator, cli
from jsonschema.exceptions import (
RefResolutionError,
SchemaError,
ValidationError,
)
from jsonschema.validators import _LATEST_VERSION, validate
def fake_validator(*errors):
errors = list(reversed(errors))
class FakeValidator(object):
def __init__(self, *args, **kwargs):
pass
def iter_errors(self, instance):
if errors:
return errors.pop()
return [] # pragma: no cover
@classmethod
def check_schema(self, schema):
pass
return FakeValidator
def fake_open(all_contents):
def open(path):
contents = all_contents.get(path)
if contents is None:
raise FileNotFoundError(path)
return StringIO(contents)
return open
def _message_for(non_json):
try:
json.loads(non_json)
except JSONDecodeError as error:
return str(error)
else: # pragma: no cover
raise RuntimeError("Tried and failed to capture a JSON dump error.")
class TestCLI(TestCase):
def run_cli(
self, argv, files=m(), stdin=StringIO(), exit_code=0, **override,
):
arguments = cli.parse_args(argv)
arguments.update(override)
self.assertFalse(hasattr(cli, "open"))
cli.open = fake_open(files)
try:
stdout, stderr = StringIO(), StringIO()
actual_exit_code = cli.run(
arguments,
stdin=stdin,
stdout=stdout,
stderr=stderr,
)
finally:
del cli.open
self.assertEqual(
actual_exit_code, exit_code, msg=dedent(
"""
Expected an exit code of {} != {}.
stdout: {}
stderr: {}
""".format(
exit_code,
actual_exit_code,
stdout.getvalue(),
stderr.getvalue(),
),
),
)
return stdout.getvalue(), stderr.getvalue()
def assertOutputs(self, stdout="", stderr="", **kwargs):
self.assertEqual(
self.run_cli(**kwargs),
(dedent(stdout), dedent(stderr)),
)
def test_invalid_instance(self):
error = ValidationError("I am an error!", instance=12)
self.assertOutputs(
files=dict(
some_schema='{"does not": "matter since it is stubbed"}',
some_instance=json.dumps(error.instance),
),
validator=fake_validator([error]),
argv=["-i", "some_instance", "some_schema"],
exit_code=1,
stderr="12: I am an error!\n",
)
def test_invalid_instance_pretty_output(self):
error = ValidationError("I am an error!", instance=12)
self.assertOutputs(
files=dict(
some_schema='{"does not": "matter since it is stubbed"}',
some_instance=json.dumps(error.instance),
),
validator=fake_validator([error]),
argv=["-i", "some_instance", "--output", "pretty", "some_schema"],
exit_code=1,
stderr="""\
===[ValidationError]===(some_instance)===
I am an error!
-----------------------------
""",
)
def test_invalid_instance_explicit_plain_output(self):
error = ValidationError("I am an error!", instance=12)
self.assertOutputs(
files=dict(
some_schema='{"does not": "matter since it is stubbed"}',
some_instance=json.dumps(error.instance),
),
validator=fake_validator([error]),
argv=["--output", "plain", "-i", "some_instance", "some_schema"],
exit_code=1,
stderr="12: I am an error!\n",
)
def test_invalid_instance_multiple_errors(self):
instance = 12
first = ValidationError("First error", instance=instance)
second = ValidationError("Second error", instance=instance)
self.assertOutputs(
files=dict(
some_schema='{"does not": "matter since it is stubbed"}',
some_instance=json.dumps(instance),
),
validator=fake_validator([first, second]),
argv=["-i", "some_instance", "some_schema"],
exit_code=1,
stderr="""\
12: First error
12: Second error
""",
)
def test_invalid_instance_multiple_errors_pretty_output(self):
instance = 12
first = ValidationError("First error", instance=instance)
second = ValidationError("Second error", instance=instance)
self.assertOutputs(
files=dict(
some_schema='{"does not": "matter since it is stubbed"}',
some_instance=json.dumps(instance),
),
validator=fake_validator([first, second]),
argv=["-i", "some_instance", "--output", "pretty", "some_schema"],
exit_code=1,
stderr="""\
===[ValidationError]===(some_instance)===
First error
-----------------------------
===[ValidationError]===(some_instance)===
Second error
-----------------------------
""",
)
def test_multiple_invalid_instances(self):
first_instance = 12
first_errors = [
ValidationError("An error", instance=first_instance),
ValidationError("Another error", instance=first_instance),
]
second_instance = "foo"
second_errors = [ValidationError("BOOM", instance=second_instance)]
self.assertOutputs(
files=dict(
some_schema='{"does not": "matter since it is stubbed"}',
some_first_instance=json.dumps(first_instance),
some_second_instance=json.dumps(second_instance),
),
validator=fake_validator(first_errors, second_errors),
argv=[
"-i", "some_first_instance",
"-i", "some_second_instance",
"some_schema",
],
exit_code=1,
stderr="""\
12: An error
12: Another error
foo: BOOM
""",
)
def test_multiple_invalid_instances_pretty_output(self):
first_instance = 12
first_errors = [
ValidationError("An error", instance=first_instance),
ValidationError("Another error", instance=first_instance),
]
second_instance = "foo"
second_errors = [ValidationError("BOOM", instance=second_instance)]
self.assertOutputs(
files=dict(
some_schema='{"does not": "matter since it is stubbed"}',
some_first_instance=json.dumps(first_instance),
some_second_instance=json.dumps(second_instance),
),
validator=fake_validator(first_errors, second_errors),
argv=[
"--output", "pretty",
"-i", "some_first_instance",
"-i", "some_second_instance",
"some_schema",
],
exit_code=1,
stderr="""\
===[ValidationError]===(some_first_instance)===
An error
-----------------------------
===[ValidationError]===(some_first_instance)===
Another error
-----------------------------
===[ValidationError]===(some_second_instance)===
BOOM
-----------------------------
""",
)
def test_custom_error_format(self):
first_instance = 12
first_errors = [
ValidationError("An error", instance=first_instance),
ValidationError("Another error", instance=first_instance),
]
second_instance = "foo"
second_errors = [ValidationError("BOOM", instance=second_instance)]
self.assertOutputs(
files=dict(
some_schema='{"does not": "matter since it is stubbed"}',
some_first_instance=json.dumps(first_instance),
some_second_instance=json.dumps(second_instance),
),
validator=fake_validator(first_errors, second_errors),
argv=[
"--error-format", ":{error.message}._-_.{error.instance}:",
"-i", "some_first_instance",
"-i", "some_second_instance",
"some_schema",
],
exit_code=1,
stderr=":An error._-_.12::Another error._-_.12::BOOM._-_.foo:",
)
def test_invalid_schema(self):
self.assertOutputs(
files=dict(some_schema='{"type": 12}'),
argv=["some_schema"],
exit_code=1,
stderr="""\
12: 12 is not valid under any of the given schemas
""",
)
def test_invalid_schema_pretty_output(self):
schema = {"type": 12}
with self.assertRaises(SchemaError) as e:
validate(schema=schema, instance="")
error = str(e.exception)
self.assertOutputs(
files=dict(some_schema=json.dumps(schema)),
argv=["--output", "pretty", "some_schema"],
exit_code=1,
stderr=(
"===[SchemaError]===(some_schema)===\n\n"
+ str(error)
+ "\n-----------------------------\n"
),
)
def test_invalid_schema_multiple_errors(self):
self.assertOutputs(
files=dict(some_schema='{"type": 12, "items": 57}'),
argv=["some_schema"],
exit_code=1,
stderr="""\
57: 57 is not of type 'object', 'boolean'
""",
)
def test_invalid_schema_multiple_errors_pretty_output(self):
schema = {"type": 12, "items": 57}
with self.assertRaises(SchemaError) as e:
validate(schema=schema, instance="")
error = str(e.exception)
self.assertOutputs(
files=dict(some_schema=json.dumps(schema)),
argv=["--output", "pretty", "some_schema"],
exit_code=1,
stderr=(
"===[SchemaError]===(some_schema)===\n\n"
+ str(error)
+ "\n-----------------------------\n"
),
)
def test_invalid_schema_with_invalid_instance(self):
"""
"Validating" an instance that's invalid under an invalid schema
just shows the schema error.
"""
self.assertOutputs(
files=dict(
some_schema='{"type": 12, "minimum": 30}',
some_instance="13",
),
argv=["-i", "some_instance", "some_schema"],
exit_code=1,
stderr="""\
12: 12 is not valid under any of the given schemas
""",
)
def test_invalid_schema_with_invalid_instance_pretty_output(self):
instance, schema = 13, {"type": 12, "minimum": 30}
with self.assertRaises(SchemaError) as e:
validate(schema=schema, instance=instance)
error = str(e.exception)
self.assertOutputs(
files=dict(
some_schema=json.dumps(schema),
some_instance=json.dumps(instance),
),
argv=["--output", "pretty", "-i", "some_instance", "some_schema"],
exit_code=1,
stderr=(
"===[SchemaError]===(some_schema)===\n\n"
+ str(error)
+ "\n-----------------------------\n"
),
)
def test_invalid_instance_continues_with_the_rest(self):
self.assertOutputs(
files=dict(
some_schema='{"minimum": 30}',
first_instance="not valid JSON!",
second_instance="12",
),
argv=[
"-i", "first_instance",
"-i", "second_instance",
"some_schema",
],
exit_code=1,
stderr="""\
Failed to parse 'first_instance': {}
12: 12 is less than the minimum of 30
""".format(_message_for("not valid JSON!")),
)
def test_custom_error_format_applies_to_schema_errors(self):
instance, schema = 13, {"type": 12, "minimum": 30}
with self.assertRaises(SchemaError):
validate(schema=schema, instance=instance)
self.assertOutputs(
files=dict(some_schema=json.dumps(schema)),
argv=[
"--error-format", ":{error.message}._-_.{error.instance}:",
"some_schema",
],
exit_code=1,
stderr=":12 is not valid under any of the given schemas._-_.12:",
)
def test_instance_is_invalid_JSON(self):
instance = "not valid JSON!"
self.assertOutputs(
files=dict(some_schema="{}", some_instance=instance),
argv=["-i", "some_instance", "some_schema"],
exit_code=1,
stderr="""\
Failed to parse 'some_instance': {}
""".format(_message_for(instance)),
)
def test_instance_is_invalid_JSON_pretty_output(self):
stdout, stderr = self.run_cli(
files=dict(
some_schema="{}",
some_instance="not valid JSON!",
),
argv=["--output", "pretty", "-i", "some_instance", "some_schema"],
exit_code=1,
)
self.assertFalse(stdout)
self.assertIn(
"(some_instance)===\n\nTraceback (most recent call last):\n",
stderr,
)
self.assertNotIn("some_schema", stderr)
def test_instance_is_invalid_JSON_on_stdin(self):
instance = "not valid JSON!"
self.assertOutputs(
files=dict(some_schema="{}"),
stdin=StringIO(instance),
argv=["some_schema"],
exit_code=1,
stderr="""\
Failed to parse <stdin>: {}
""".format(_message_for(instance)),
)
def test_instance_is_invalid_JSON_on_stdin_pretty_output(self):
stdout, stderr = self.run_cli(
files=dict(some_schema="{}"),
stdin=StringIO("not valid JSON!"),
argv=["--output", "pretty", "some_schema"],
exit_code=1,
)
self.assertFalse(stdout)
self.assertIn(
"(<stdin>)===\n\nTraceback (most recent call last):\n",
stderr,
)
self.assertNotIn("some_schema", stderr)
def test_schema_is_invalid_JSON(self):
schema = "not valid JSON!"
self.assertOutputs(
files=dict(some_schema=schema),
argv=["some_schema"],
exit_code=1,
stderr="""\
Failed to parse 'some_schema': {}
""".format(_message_for(schema)),
)
def test_schema_is_invalid_JSON_pretty_output(self):
stdout, stderr = self.run_cli(
files=dict(some_schema="not valid JSON!"),
argv=["--output", "pretty", "some_schema"],
exit_code=1,
)
self.assertFalse(stdout)
self.assertIn(
"(some_schema)===\n\nTraceback (most recent call last):\n",
stderr,
)
def test_schema_and_instance_are_both_invalid_JSON(self):
"""
Only the schema error is reported, as we abort immediately.
"""
schema, instance = "not valid JSON!", "also not valid JSON!"
self.assertOutputs(
files=dict(some_schema=schema, some_instance=instance),
argv=["some_schema"],
exit_code=1,
stderr="""\
Failed to parse 'some_schema': {}
""".format(_message_for(schema)),
)
def test_schema_and_instance_are_both_invalid_JSON_pretty_output(self):
"""
Only the schema error is reported, as we abort immediately.
"""
stdout, stderr = self.run_cli(
files=dict(
some_schema="not valid JSON!",
some_instance="also not valid JSON!",
),
argv=["--output", "pretty", "-i", "some_instance", "some_schema"],
exit_code=1,
)
self.assertFalse(stdout)
self.assertIn(
"(some_schema)===\n\nTraceback (most recent call last):\n",
stderr,
)
self.assertNotIn("some_instance", stderr)
def test_instance_does_not_exist(self):
self.assertOutputs(
files=dict(some_schema="{}"),
argv=["-i", "nonexisting_instance", "some_schema"],
exit_code=1,
stderr="""\
'nonexisting_instance' does not exist.
""",
)
def test_instance_does_not_exist_pretty_output(self):
self.assertOutputs(
files=dict(some_schema="{}"),
argv=[
"--output", "pretty",
"-i", "nonexisting_instance",
"some_schema",
],
exit_code=1,
stderr="""\
===[FileNotFoundError]===(nonexisting_instance)===
'nonexisting_instance' does not exist.
-----------------------------
""",
)
def test_schema_does_not_exist(self):
self.assertOutputs(
argv=["nonexisting_schema"],
exit_code=1,
stderr="'nonexisting_schema' does not exist.\n",
)
def test_schema_does_not_exist_pretty_output(self):
self.assertOutputs(
argv=["--output", "pretty", "nonexisting_schema"],
exit_code=1,
stderr="""\
===[FileNotFoundError]===(nonexisting_schema)===
'nonexisting_schema' does not exist.
-----------------------------
""",
)
def test_neither_instance_nor_schema_exist(self):
self.assertOutputs(
argv=["-i", "nonexisting_instance", "nonexisting_schema"],
exit_code=1,
stderr="'nonexisting_schema' does not exist.\n",
)
def test_neither_instance_nor_schema_exist_pretty_output(self):
self.assertOutputs(
argv=[
"--output", "pretty",
"-i", "nonexisting_instance",
"nonexisting_schema",
],
exit_code=1,
stderr="""\
===[FileNotFoundError]===(nonexisting_schema)===
'nonexisting_schema' does not exist.
-----------------------------
""",
)
def test_successful_validation(self):
self.assertOutputs(
files=dict(some_schema="{}", some_instance="{}"),
argv=["-i", "some_instance", "some_schema"],
stdout="",
stderr="",
)
def test_successful_validation_pretty_output(self):
self.assertOutputs(
files=dict(some_schema="{}", some_instance="{}"),
argv=["--output", "pretty", "-i", "some_instance", "some_schema"],
stdout="===[SUCCESS]===(some_instance)===\n",
stderr="",
)
def test_successful_validation_of_stdin(self):
self.assertOutputs(
files=dict(some_schema="{}"),
stdin=StringIO("{}"),
argv=["some_schema"],
stdout="",
stderr="",
)
def test_successful_validation_of_stdin_pretty_output(self):
self.assertOutputs(
files=dict(some_schema="{}"),
stdin=StringIO("{}"),
argv=["--output", "pretty", "some_schema"],
stdout="===[SUCCESS]===(<stdin>)===\n",
stderr="",
)
def test_successful_validation_of_just_the_schema(self):
self.assertOutputs(
files=dict(some_schema="{}", some_instance="{}"),
argv=["-i", "some_instance", "some_schema"],
stdout="",
stderr="",
)
def test_successful_validation_of_just_the_schema_pretty_output(self):
self.assertOutputs(
files=dict(some_schema="{}", some_instance="{}"),
argv=["--output", "pretty", "-i", "some_instance", "some_schema"],
stdout="===[SUCCESS]===(some_instance)===\n",
stderr="",
)
def test_successful_validation_via_explicit_base_uri(self):
ref_schema_file = tempfile.NamedTemporaryFile(delete=False)
self.addCleanup(os.remove, ref_schema_file.name)
ref_path = Path(ref_schema_file.name)
ref_path.write_text('{"definitions": {"num": {"type": "integer"}}}')
schema = f'{{"$ref": "{ref_path.name}#definitions/num"}}'
self.assertOutputs(
files=dict(some_schema=schema, some_instance="1"),
argv=[
"-i", "some_instance",
"--base-uri", ref_path.parent.as_uri() + "/",
"some_schema",
],
stdout="",
stderr="",
)
def test_unsuccessful_validation_via_explicit_base_uri(self):
ref_schema_file = tempfile.NamedTemporaryFile(delete=False)
self.addCleanup(os.remove, ref_schema_file.name)
ref_path = Path(ref_schema_file.name)
ref_path.write_text('{"definitions": {"num": {"type": "integer"}}}')
schema = f'{{"$ref": "{ref_path.name}#definitions/num"}}'
self.assertOutputs(
files=dict(some_schema=schema, some_instance='"1"'),
argv=[
"-i", "some_instance",
"--base-uri", ref_path.parent.as_uri() + "/",
"some_schema",
],
exit_code=1,
stdout="",
stderr="1: '1' is not of type 'integer'\n",
)
def test_nonexistent_file_with_explicit_base_uri(self):
schema = '{"$ref": "someNonexistentFile.json#definitions/num"}'
instance = "1"
with self.assertRaises(RefResolutionError) as e:
self.assertOutputs(
files=dict(
some_schema=schema,
some_instance=instance,
),
argv=[
"-i", "some_instance",
"--base-uri", Path.cwd().as_uri(),
"some_schema",
],
)
error = str(e.exception)
self.assertIn(f"{os.sep}someNonexistentFile.json'", error)
def test_invalid_exlicit_base_uri(self):
schema = '{"$ref": "foo.json#definitions/num"}'
instance = "1"
with self.assertRaises(RefResolutionError) as e:
self.assertOutputs(
files=dict(
some_schema=schema,
some_instance=instance,
),
argv=[
"-i", "some_instance",
"--base-uri", "not@UR1",
"some_schema",
],
)
error = str(e.exception)
self.assertEqual(
error, "unknown url type: 'foo.json'",
)
def test_it_validates_using_the_latest_validator_when_unspecified(self):
# There isn't a better way now I can think of to ensure that the
# latest version was used, given that the call to validator_for
# is hidden inside the CLI, so guard that that's the case, and
# this test will have to be updated when versions change until
# we can think of a better way to ensure this behavior.
self.assertIs(Draft202012Validator, _LATEST_VERSION)
self.assertOutputs(
files=dict(some_schema='{"const": "check"}', some_instance='"a"'),
argv=["-i", "some_instance", "some_schema"],
exit_code=1,
stdout="",
stderr="a: 'check' was expected\n",
)
def test_it_validates_using_draft7_when_specified(self):
"""
Specifically, `const` validation applies for Draft 7.
"""
schema = """
{
"$schema": "http://json-schema.org/draft-07/schema#",
"const": "check"
}
"""
instance = '"foo"'
self.assertOutputs(
files=dict(some_schema=schema, some_instance=instance),
argv=["-i", "some_instance", "some_schema"],
exit_code=1,
stdout="",
stderr="foo: 'check' was expected\n",
)
def test_it_validates_using_draft4_when_specified(self):
"""
Specifically, `const` validation *does not* apply for Draft 4.
"""
schema = """
{
"$schema": "http://json-schema.org/draft-04/schema#",
"const": "check"
}
"""
instance = '"foo"'
self.assertOutputs(
files=dict(some_schema=schema, some_instance=instance),
argv=["-i", "some_instance", "some_schema"],
stdout="",
stderr="",
)
class TestParser(TestCase):
FakeValidator = fake_validator()
def test_find_validator_by_fully_qualified_object_name(self):
arguments = cli.parse_args(
[
"--validator",
"jsonschema.tests.test_cli.TestParser.FakeValidator",
"--instance", "mem://some/instance",
"mem://some/schema",
],
)
self.assertIs(arguments["validator"], self.FakeValidator)
def test_find_validator_in_jsonschema(self):
arguments = cli.parse_args(
[
"--validator", "Draft4Validator",
"--instance", "mem://some/instance",
"mem://some/schema",
],
)
self.assertIs(arguments["validator"], Draft4Validator)
def cli_output_for(self, *argv):
stdout, stderr = StringIO(), StringIO()
with redirect_stdout(stdout), redirect_stderr(stderr):
with self.assertRaises(SystemExit):
cli.parse_args(argv)
return stdout.getvalue(), stderr.getvalue()
def test_unknown_output(self):
stdout, stderr = self.cli_output_for(
"--output", "foo",
"mem://some/schema",
)
self.assertIn("invalid choice: 'foo'", stderr)
self.assertFalse(stdout)
def test_useless_error_format(self):
stdout, stderr = self.cli_output_for(
"--output", "pretty",
"--error-format", "foo",
"mem://some/schema",
)
self.assertIn(
"--error-format can only be used with --output plain",
stderr,
)
self.assertFalse(stdout)
class TestCLIIntegration(TestCase):
def test_license(self):
output = subprocess.check_output(
[sys.executable, "-m", "pip", "show", "jsonschema"],
stderr=subprocess.STDOUT,
)
self.assertIn(b"License: MIT", output)
def test_version(self):
version = subprocess.check_output(
[sys.executable, "-m", "jsonschema", "--version"],
stderr=subprocess.STDOUT,
)
version = version.decode("utf-8").strip()
self.assertEqual(version, metadata.version("jsonschema"))
def test_no_arguments_shows_usage_notes(self):
output = subprocess.check_output(
[sys.executable, "-m", "jsonschema"],
stderr=subprocess.STDOUT,
)
output_for_help = subprocess.check_output(
[sys.executable, "-m", "jsonschema", "--help"],
stderr=subprocess.STDOUT,
)
self.assertEqual(output, output_for_help)

View File

@@ -0,0 +1,123 @@
from unittest import TestCase
from jsonschema import validators
class TestDeprecations(TestCase):
def test_version(self):
"""
As of v4.0.0, __version__ is deprecated in favor of importlib.metadata.
"""
with self.assertWarns(DeprecationWarning) as w:
from jsonschema import __version__ # noqa
self.assertEqual(w.filename, __file__)
self.assertTrue(
str(w.warning).startswith(
"Accessing jsonschema.__version__ is deprecated",
),
)
def test_validators_ErrorTree(self):
"""
As of v4.0.0, importing ErrorTree from jsonschema.validators is
deprecated in favor of doing so from jsonschema.exceptions.
"""
with self.assertWarns(DeprecationWarning) as w:
from jsonschema.validators import ErrorTree # noqa
self.assertEqual(w.filename, __file__)
self.assertTrue(
str(w.warning).startswith(
"Importing ErrorTree from jsonschema.validators is deprecated",
),
)
def test_validators_validators(self):
"""
As of v4.0.0, accessing jsonschema.validators.validators is
deprecated.
"""
with self.assertWarns(DeprecationWarning) as w:
value = validators.validators
self.assertEqual(value, validators._VALIDATORS)
self.assertEqual(w.filename, __file__)
self.assertTrue(
str(w.warning).startswith(
"Accessing jsonschema.validators.validators is deprecated",
),
)
def test_validators_meta_schemas(self):
"""
As of v4.0.0, accessing jsonschema.validators.meta_schemas is
deprecated.
"""
with self.assertWarns(DeprecationWarning) as w:
value = validators.meta_schemas
self.assertEqual(value, validators._META_SCHEMAS)
self.assertEqual(w.filename, __file__)
self.assertTrue(
str(w.warning).startswith(
"Accessing jsonschema.validators.meta_schemas is deprecated",
),
)
def test_RefResolver_in_scope(self):
"""
As of v4.0.0, RefResolver.in_scope is deprecated.
"""
resolver = validators.RefResolver.from_schema({})
with self.assertWarns(DeprecationWarning) as w:
with resolver.in_scope("foo"):
pass
self.assertEqual(w.filename, __file__)
self.assertTrue(
str(w.warning).startswith(
"jsonschema.RefResolver.in_scope is deprecated ",
),
)
def test_Validator_is_valid_two_arguments(self):
"""
As of v4.0.0, calling is_valid with two arguments (to provide a
different schema) is deprecated.
"""
validator = validators.Draft7Validator({})
with self.assertWarns(DeprecationWarning) as w:
result = validator.is_valid("foo", {"type": "number"})
self.assertFalse(result)
self.assertEqual(w.filename, __file__)
self.assertTrue(
str(w.warning).startswith(
"Passing a schema to Validator.is_valid is deprecated ",
),
)
def test_Validator_iter_errors_two_arguments(self):
"""
As of v4.0.0, calling iter_errors with two arguments (to provide a
different schema) is deprecated.
"""
validator = validators.Draft7Validator({})
with self.assertWarns(DeprecationWarning) as w:
error, = validator.iter_errors("foo", {"type": "number"})
self.assertEqual(error.validator, "type")
self.assertEqual(w.filename, __file__)
self.assertTrue(
str(w.warning).startswith(
"Passing a schema to Validator.iter_errors is deprecated ",
),
)

View File

@@ -0,0 +1,475 @@
from unittest import TestCase
import textwrap
from jsonschema import Draft4Validator, exceptions
class TestBestMatch(TestCase):
def best_match(self, errors):
errors = list(errors)
best = exceptions.best_match(errors)
reversed_best = exceptions.best_match(reversed(errors))
msg = "Didn't return a consistent best match!\nGot: {0}\n\nThen: {1}"
self.assertEqual(
best._contents(), reversed_best._contents(),
msg=msg.format(best, reversed_best),
)
return best
def test_shallower_errors_are_better_matches(self):
validator = Draft4Validator(
{
"properties": {
"foo": {
"minProperties": 2,
"properties": {"bar": {"type": "object"}},
},
},
},
)
best = self.best_match(validator.iter_errors({"foo": {"bar": []}}))
self.assertEqual(best.validator, "minProperties")
def test_oneOf_and_anyOf_are_weak_matches(self):
"""
A property you *must* match is probably better than one you have to
match a part of.
"""
validator = Draft4Validator(
{
"minProperties": 2,
"anyOf": [{"type": "string"}, {"type": "number"}],
"oneOf": [{"type": "string"}, {"type": "number"}],
},
)
best = self.best_match(validator.iter_errors({}))
self.assertEqual(best.validator, "minProperties")
def test_if_the_most_relevant_error_is_anyOf_it_is_traversed(self):
"""
If the most relevant error is an anyOf, then we traverse its context
and select the otherwise *least* relevant error, since in this case
that means the most specific, deep, error inside the instance.
I.e. since only one of the schemas must match, we look for the most
relevant one.
"""
validator = Draft4Validator(
{
"properties": {
"foo": {
"anyOf": [
{"type": "string"},
{"properties": {"bar": {"type": "array"}}},
],
},
},
},
)
best = self.best_match(validator.iter_errors({"foo": {"bar": 12}}))
self.assertEqual(best.validator_value, "array")
def test_if_the_most_relevant_error_is_oneOf_it_is_traversed(self):
"""
If the most relevant error is an oneOf, then we traverse its context
and select the otherwise *least* relevant error, since in this case
that means the most specific, deep, error inside the instance.
I.e. since only one of the schemas must match, we look for the most
relevant one.
"""
validator = Draft4Validator(
{
"properties": {
"foo": {
"oneOf": [
{"type": "string"},
{"properties": {"bar": {"type": "array"}}},
],
},
},
},
)
best = self.best_match(validator.iter_errors({"foo": {"bar": 12}}))
self.assertEqual(best.validator_value, "array")
def test_if_the_most_relevant_error_is_allOf_it_is_traversed(self):
"""
Now, if the error is allOf, we traverse but select the *most* relevant
error from the context, because all schemas here must match anyways.
"""
validator = Draft4Validator(
{
"properties": {
"foo": {
"allOf": [
{"type": "string"},
{"properties": {"bar": {"type": "array"}}},
],
},
},
},
)
best = self.best_match(validator.iter_errors({"foo": {"bar": 12}}))
self.assertEqual(best.validator_value, "string")
def test_nested_context_for_oneOf(self):
validator = Draft4Validator(
{
"properties": {
"foo": {
"oneOf": [
{"type": "string"},
{
"oneOf": [
{"type": "string"},
{
"properties": {
"bar": {"type": "array"},
},
},
],
},
],
},
},
},
)
best = self.best_match(validator.iter_errors({"foo": {"bar": 12}}))
self.assertEqual(best.validator_value, "array")
def test_one_error(self):
validator = Draft4Validator({"minProperties": 2})
error, = validator.iter_errors({})
self.assertEqual(
exceptions.best_match(validator.iter_errors({})).validator,
"minProperties",
)
def test_no_errors(self):
validator = Draft4Validator({})
self.assertIsNone(exceptions.best_match(validator.iter_errors({})))
class TestByRelevance(TestCase):
def test_short_paths_are_better_matches(self):
shallow = exceptions.ValidationError("Oh no!", path=["baz"])
deep = exceptions.ValidationError("Oh yes!", path=["foo", "bar"])
match = max([shallow, deep], key=exceptions.relevance)
self.assertIs(match, shallow)
match = max([deep, shallow], key=exceptions.relevance)
self.assertIs(match, shallow)
def test_global_errors_are_even_better_matches(self):
shallow = exceptions.ValidationError("Oh no!", path=[])
deep = exceptions.ValidationError("Oh yes!", path=["foo"])
errors = sorted([shallow, deep], key=exceptions.relevance)
self.assertEqual(
[list(error.path) for error in errors],
[["foo"], []],
)
errors = sorted([deep, shallow], key=exceptions.relevance)
self.assertEqual(
[list(error.path) for error in errors],
[["foo"], []],
)
def test_weak_validators_are_lower_priority(self):
weak = exceptions.ValidationError("Oh no!", path=[], validator="a")
normal = exceptions.ValidationError("Oh yes!", path=[], validator="b")
best_match = exceptions.by_relevance(weak="a")
match = max([weak, normal], key=best_match)
self.assertIs(match, normal)
match = max([normal, weak], key=best_match)
self.assertIs(match, normal)
def test_strong_validators_are_higher_priority(self):
weak = exceptions.ValidationError("Oh no!", path=[], validator="a")
normal = exceptions.ValidationError("Oh yes!", path=[], validator="b")
strong = exceptions.ValidationError("Oh fine!", path=[], validator="c")
best_match = exceptions.by_relevance(weak="a", strong="c")
match = max([weak, normal, strong], key=best_match)
self.assertIs(match, strong)
match = max([strong, normal, weak], key=best_match)
self.assertIs(match, strong)
class TestErrorTree(TestCase):
def test_it_knows_how_many_total_errors_it_contains(self):
# FIXME: #442
errors = [
exceptions.ValidationError("Something", validator=i)
for i in range(8)
]
tree = exceptions.ErrorTree(errors)
self.assertEqual(tree.total_errors, 8)
def test_it_contains_an_item_if_the_item_had_an_error(self):
errors = [exceptions.ValidationError("a message", path=["bar"])]
tree = exceptions.ErrorTree(errors)
self.assertIn("bar", tree)
def test_it_does_not_contain_an_item_if_the_item_had_no_error(self):
errors = [exceptions.ValidationError("a message", path=["bar"])]
tree = exceptions.ErrorTree(errors)
self.assertNotIn("foo", tree)
def test_validators_that_failed_appear_in_errors_dict(self):
error = exceptions.ValidationError("a message", validator="foo")
tree = exceptions.ErrorTree([error])
self.assertEqual(tree.errors, {"foo": error})
def test_it_creates_a_child_tree_for_each_nested_path(self):
errors = [
exceptions.ValidationError("a bar message", path=["bar"]),
exceptions.ValidationError("a bar -> 0 message", path=["bar", 0]),
]
tree = exceptions.ErrorTree(errors)
self.assertIn(0, tree["bar"])
self.assertNotIn(1, tree["bar"])
def test_children_have_their_errors_dicts_built(self):
e1, e2 = (
exceptions.ValidationError("1", validator="foo", path=["bar", 0]),
exceptions.ValidationError("2", validator="quux", path=["bar", 0]),
)
tree = exceptions.ErrorTree([e1, e2])
self.assertEqual(tree["bar"][0].errors, {"foo": e1, "quux": e2})
def test_multiple_errors_with_instance(self):
e1, e2 = (
exceptions.ValidationError(
"1",
validator="foo",
path=["bar", "bar2"],
instance="i1"),
exceptions.ValidationError(
"2",
validator="quux",
path=["foobar", 2],
instance="i2"),
)
exceptions.ErrorTree([e1, e2])
def test_it_does_not_contain_subtrees_that_are_not_in_the_instance(self):
error = exceptions.ValidationError("123", validator="foo", instance=[])
tree = exceptions.ErrorTree([error])
with self.assertRaises(IndexError):
tree[0]
def test_if_its_in_the_tree_anyhow_it_does_not_raise_an_error(self):
"""
If a validator is dumb (like :validator:`required` in draft 3) and
refers to a path that isn't in the instance, the tree still properly
returns a subtree for that path.
"""
error = exceptions.ValidationError(
"a message", validator="foo", instance={}, path=["foo"],
)
tree = exceptions.ErrorTree([error])
self.assertIsInstance(tree["foo"], exceptions.ErrorTree)
def test_repr(self):
e1, e2 = (
exceptions.ValidationError(
"1",
validator="foo",
path=["bar", "bar2"],
instance="i1"),
exceptions.ValidationError(
"2",
validator="quux",
path=["foobar", 2],
instance="i2"),
)
tree = exceptions.ErrorTree([e1, e2])
self.assertEqual(repr(tree), "<ErrorTree (2 total errors)>")
class TestErrorInitReprStr(TestCase):
def make_error(self, **kwargs):
defaults = dict(
message="hello",
validator="type",
validator_value="string",
instance=5,
schema={"type": "string"},
)
defaults.update(kwargs)
return exceptions.ValidationError(**defaults)
def assertShows(self, expected, **kwargs):
expected = textwrap.dedent(expected).rstrip("\n")
error = self.make_error(**kwargs)
message_line, _, rest = str(error).partition("\n")
self.assertEqual(message_line, error.message)
self.assertEqual(rest, expected)
def test_it_calls_super_and_sets_args(self):
error = self.make_error()
self.assertGreater(len(error.args), 1)
def test_repr(self):
self.assertEqual(
repr(exceptions.ValidationError(message="Hello!")),
"<ValidationError: 'Hello!'>",
)
def test_unset_error(self):
error = exceptions.ValidationError("message")
self.assertEqual(str(error), "message")
kwargs = {
"validator": "type",
"validator_value": "string",
"instance": 5,
"schema": {"type": "string"},
}
# Just the message should show if any of the attributes are unset
for attr in kwargs:
k = dict(kwargs)
del k[attr]
error = exceptions.ValidationError("message", **k)
self.assertEqual(str(error), "message")
def test_empty_paths(self):
self.assertShows(
"""
Failed validating 'type' in schema:
{'type': 'string'}
On instance:
5
""",
path=[],
schema_path=[],
)
def test_one_item_paths(self):
self.assertShows(
"""
Failed validating 'type' in schema:
{'type': 'string'}
On instance[0]:
5
""",
path=[0],
schema_path=["items"],
)
def test_multiple_item_paths(self):
self.assertShows(
"""
Failed validating 'type' in schema['items'][0]:
{'type': 'string'}
On instance[0]['a']:
5
""",
path=[0, "a"],
schema_path=["items", 0, 1],
)
def test_uses_pprint(self):
self.assertShows(
"""
Failed validating 'maxLength' in schema:
{0: 0,
1: 1,
2: 2,
3: 3,
4: 4,
5: 5,
6: 6,
7: 7,
8: 8,
9: 9,
10: 10,
11: 11,
12: 12,
13: 13,
14: 14,
15: 15,
16: 16,
17: 17,
18: 18,
19: 19}
On instance:
[0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24]
""",
instance=list(range(25)),
schema=dict(zip(range(20), range(20))),
validator="maxLength",
)
def test_str_works_with_instances_having_overriden_eq_operator(self):
"""
Check for #164 which rendered exceptions unusable when a
`ValidationError` involved instances with an `__eq__` method
that returned truthy values.
"""
class DontEQMeBro(object):
def __eq__(this, other): # pragma: no cover
self.fail("Don't!")
def __ne__(this, other): # pragma: no cover
self.fail("Don't!")
instance = DontEQMeBro()
error = exceptions.ValidationError(
"a message",
validator="foo",
instance=instance,
validator_value="some",
schema="schema",
)
self.assertIn(repr(instance), str(error))
class TestHashable(TestCase):
def test_hashable(self):
set([exceptions.ValidationError("")])
set([exceptions.SchemaError("")])

View File

@@ -0,0 +1,107 @@
"""
Tests for the parts of jsonschema related to the :validator:`format` property.
"""
from unittest import TestCase
from jsonschema import FormatChecker, FormatError, ValidationError
from jsonschema.validators import Draft4Validator
BOOM = ValueError("Boom!")
BANG = ZeroDivisionError("Bang!")
def boom(thing):
if thing == "bang":
raise BANG
raise BOOM
class TestFormatChecker(TestCase):
def test_it_can_validate_no_formats(self):
checker = FormatChecker(formats=())
self.assertFalse(checker.checkers)
def test_it_raises_a_key_error_for_unknown_formats(self):
with self.assertRaises(KeyError):
FormatChecker(formats=["o noes"])
def test_it_can_register_cls_checkers(self):
original = dict(FormatChecker.checkers)
self.addCleanup(FormatChecker.checkers.pop, "boom")
FormatChecker.cls_checks("boom")(boom)
self.assertEqual(
FormatChecker.checkers,
dict(original, boom=(boom, ())),
)
def test_it_can_register_checkers(self):
checker = FormatChecker()
checker.checks("boom")(boom)
self.assertEqual(
checker.checkers,
dict(FormatChecker.checkers, boom=(boom, ())),
)
def test_it_catches_registered_errors(self):
checker = FormatChecker()
checker.checks("boom", raises=type(BOOM))(boom)
with self.assertRaises(FormatError) as cm:
checker.check(instance=12, format="boom")
self.assertIs(cm.exception.cause, BOOM)
self.assertIs(cm.exception.__cause__, BOOM)
# Unregistered errors should not be caught
with self.assertRaises(type(BANG)):
checker.check(instance="bang", format="boom")
def test_format_error_causes_become_validation_error_causes(self):
checker = FormatChecker()
checker.checks("boom", raises=ValueError)(boom)
validator = Draft4Validator({"format": "boom"}, format_checker=checker)
with self.assertRaises(ValidationError) as cm:
validator.validate("BOOM")
self.assertIs(cm.exception.cause, BOOM)
self.assertIs(cm.exception.__cause__, BOOM)
def test_format_checkers_come_with_defaults(self):
# This is bad :/ but relied upon.
# The docs for quite awhile recommended people do things like
# validate(..., format_checker=FormatChecker())
# We should change that, but we can't without deprecation...
checker = FormatChecker()
with self.assertRaises(FormatError):
checker.check(instance="not-an-ipv4", format="ipv4")
def test_repr(self):
checker = FormatChecker(formats=())
checker.checks("foo")(lambda thing: True) # pragma: no cover
checker.checks("bar")(lambda thing: True) # pragma: no cover
checker.checks("baz")(lambda thing: True) # pragma: no cover
self.assertEqual(
repr(checker),
"<FormatChecker checkers=['bar', 'baz', 'foo']>",
)
def test_duration_format(self):
try:
from jsonschema._format import is_duration # noqa: F401
except ImportError: # pragma: no cover
pass
else:
checker = FormatChecker()
self.assertTrue(checker.conforms(1, "duration"))
self.assertTrue(checker.conforms("P4Y", "duration"))
self.assertFalse(checker.conforms("test", "duration"))
def test_uuid_format(self):
checker = FormatChecker()
self.assertTrue(checker.conforms(1, "uuid"))
self.assertTrue(
checker.conforms("6e6659ec-4503-4428-9f03-2e2ea4d6c278", "uuid"),
)
self.assertFalse(checker.conforms("test", "uuid"))

View File

@@ -0,0 +1,437 @@
"""
Test runner for the JSON Schema official test suite
Tests comprehensive correctness of each draft's validator.
See https://github.com/json-schema-org/JSON-Schema-Test-Suite for details.
"""
import sys
from jsonschema import (
Draft3Validator,
Draft4Validator,
Draft6Validator,
Draft7Validator,
Draft201909Validator,
Draft202012Validator,
draft3_format_checker,
draft4_format_checker,
draft6_format_checker,
draft7_format_checker,
draft201909_format_checker,
draft202012_format_checker,
)
from jsonschema.tests._helpers import bug
from jsonschema.tests._suite import Suite
SUITE = Suite()
DRAFT3 = SUITE.version(name="draft3")
DRAFT4 = SUITE.version(name="draft4")
DRAFT6 = SUITE.version(name="draft6")
DRAFT7 = SUITE.version(name="draft7")
DRAFT201909 = SUITE.version(name="draft2019-09")
DRAFT202012 = SUITE.version(name="draft2020-12")
def skip(message, **kwargs):
def skipper(test):
if all(value == getattr(test, attr) for attr, value in kwargs.items()):
return message
return skipper
def missing_format(checker):
def missing_format(test): # pragma: no cover
schema = test.schema
if (
schema is True
or schema is False
or "format" not in schema
or schema["format"] in checker.checkers
or test.valid
):
return
return "Format checker {0!r} not found.".format(schema["format"])
return missing_format
def complex_email_validation(test):
if test.subject != "email":
return
message = "Complex email validation is (intentionally) unsupported."
return skip(
message=message,
description="an invalid domain",
)(test) or skip(
message=message,
description="an invalid IPv4-address-literal",
)(test) or skip(
message=message,
description="dot after local part is not valid",
)(test) or skip(
message=message,
description="dot before local part is not valid",
)(test) or skip(
message=message,
description="two subsequent dots inside local part are not valid",
)(test)
is_narrow_build = sys.maxunicode == 2 ** 16 - 1
if is_narrow_build: # pragma: no cover
message = "Not running surrogate Unicode case, this Python is narrow."
def narrow_unicode_build(test): # pragma: no cover
return skip(
message=message,
description=(
"one supplementary Unicode code point is not long enough"
),
)(test) or skip(
message=message,
description="two supplementary Unicode code points is long enough",
)(test)
else:
def narrow_unicode_build(test): # pragma: no cover
return
if sys.version_info < (3, 9): # pragma: no cover
message = "Rejecting leading zeros is 3.9+"
allowed_leading_zeros = skip(
message=message,
subject="ipv4",
description=(
"leading zeroes should be rejected, as they are treated as octals"
),
)
else:
def allowed_leading_zeros(test): # pragma: no cover
return
def leap_second(test):
message = "Leap seconds are unsupported."
return skip(
message=message,
subject="time",
description="a valid time string with leap second",
)(test) or skip(
message=message,
subject="time",
description="a valid time string with leap second, Zulu",
)(test) or skip(
message=message,
subject="time",
description="a valid time string with leap second with offset",
)(test) or skip(
message=message,
subject="time",
description="valid leap second, positive time-offset",
)(test) or skip(
message=message,
subject="time",
description="valid leap second, negative time-offset",
)(test) or skip(
message=message,
subject="time",
description="valid leap second, large positive time-offset",
)(test) or skip(
message=message,
subject="time",
description="valid leap second, large negative time-offset",
)(test) or skip(
message=message,
subject="time",
description="valid leap second, zero time-offset",
)(test) or skip(
message=message,
subject="date-time",
description="a valid date-time with a leap second, UTC",
)(test) or skip(
message=message,
subject="date-time",
description="a valid date-time with a leap second, with minus offset",
)(test)
TestDraft3 = DRAFT3.to_unittest_testcase(
DRAFT3.tests(),
DRAFT3.format_tests(),
DRAFT3.optional_tests_of(name="bignum"),
DRAFT3.optional_tests_of(name="non-bmp-regex"),
DRAFT3.optional_tests_of(name="zeroTerminatedFloats"),
Validator=Draft3Validator,
format_checker=draft3_format_checker,
skip=lambda test: (
narrow_unicode_build(test)
or missing_format(draft3_format_checker)(test)
or complex_email_validation(test)
or skip(
message=bug(371),
subject="ref",
case_description=(
"$ref prevents a sibling id from changing the base uri"
),
)(test)
),
)
TestDraft4 = DRAFT4.to_unittest_testcase(
DRAFT4.tests(),
DRAFT4.format_tests(),
DRAFT4.optional_tests_of(name="bignum"),
DRAFT4.optional_tests_of(name="float-overflow"),
DRAFT4.optional_tests_of(name="non-bmp-regex"),
DRAFT4.optional_tests_of(name="zeroTerminatedFloats"),
Validator=Draft4Validator,
format_checker=draft4_format_checker,
skip=lambda test: (
narrow_unicode_build(test)
or allowed_leading_zeros(test)
or leap_second(test)
or missing_format(draft4_format_checker)(test)
or complex_email_validation(test)
or skip(
message=bug(),
subject="ref",
case_description="Recursive references between schemas",
)(test)
or skip(
message=bug(371),
subject="ref",
case_description=(
"Location-independent identifier with "
"base URI change in subschema"
),
)(test)
or skip(
message=bug(371),
subject="ref",
case_description=(
"$ref prevents a sibling id from changing the base uri"
),
)(test)
or skip(
message=bug(371),
subject="id",
description="match $ref to id",
)(test)
or skip(
message=bug(371),
subject="id",
description="no match on enum or $ref to id",
)(test)
or skip(
message=bug(),
subject="refRemote",
case_description="base URI change - change folder in subschema",
)(test)
or skip(
message=bug(),
subject="ref",
case_description=(
"id must be resolved against nearest parent, "
"not just immediate parent"
),
)(test)
),
)
TestDraft6 = DRAFT6.to_unittest_testcase(
DRAFT6.tests(),
DRAFT6.format_tests(),
DRAFT6.optional_tests_of(name="bignum"),
DRAFT6.optional_tests_of(name="float-overflow"),
DRAFT6.optional_tests_of(name="non-bmp-regex"),
Validator=Draft6Validator,
format_checker=draft6_format_checker,
skip=lambda test: (
narrow_unicode_build(test)
or allowed_leading_zeros(test)
or leap_second(test)
or missing_format(draft6_format_checker)(test)
or complex_email_validation(test)
or skip(
message=bug(),
subject="refRemote",
case_description="base URI change - change folder in subschema",
)(test)
or skip(
message=bug(371),
subject="ref",
case_description=(
"$ref prevents a sibling $id from changing the base uri"
),
)(test)
),
)
TestDraft7 = DRAFT7.to_unittest_testcase(
DRAFT7.tests(),
DRAFT7.format_tests(),
DRAFT7.optional_tests_of(name="bignum"),
DRAFT7.optional_tests_of(name="content"),
DRAFT7.optional_tests_of(name="float-overflow"),
DRAFT7.optional_tests_of(name="non-bmp-regex"),
Validator=Draft7Validator,
format_checker=draft7_format_checker,
skip=lambda test: (
narrow_unicode_build(test)
or allowed_leading_zeros(test)
or leap_second(test)
or missing_format(draft7_format_checker)(test)
or complex_email_validation(test)
or skip(
message=bug(),
subject="refRemote",
case_description="base URI change - change folder in subschema",
)(test)
or skip(
message=bug(371),
subject="ref",
case_description=(
"$ref prevents a sibling $id from changing the base uri"
),
)(test)
or skip(
message=bug(),
subject="ref",
case_description=(
"$id must be resolved against nearest parent, "
"not just immediate parent"
),
)(test)
or skip(
message=bug(593),
subject="content",
valid=False,
case_description=(
"validation of string-encoded content based on media type"
),
)(test)
or skip(
message=bug(593),
subject="content",
valid=False,
case_description="validation of binary string-encoding",
)(test)
or skip(
message=bug(593),
subject="content",
valid=False,
case_description=(
"validation of binary-encoded media type documents"
),
)(test)
),
)
TestDraft201909 = DRAFT201909.to_unittest_testcase(
DRAFT201909.tests(),
DRAFT201909.optional_tests_of(name="bignum"),
DRAFT201909.optional_tests_of(name="float-overflow"),
DRAFT201909.optional_tests_of(name="non-bmp-regex"),
DRAFT201909.optional_tests_of(name="refOfUnknownKeyword"),
Validator=Draft201909Validator,
skip=lambda test: (
skip(
message="unevaluatedItems is different in 2019-09 (needs work).",
subject="unevaluatedItems",
)(test)
or skip(
message="dynamicRef support isn't working yet.",
subject="recursiveRef",
)(test)
or skip(
message="These tests depends on dynamicRef working.",
subject="anchor",
case_description="same $anchor with different base uri",
)(test)
or skip(
message="Vocabulary support is not yet present.",
subject="vocabulary",
)(test)
or skip(
message=bug(),
subject="ref",
case_description=(
"$id must be resolved against nearest parent, "
"not just immediate parent"
),
)(test)
),
)
TestDraft201909Format = DRAFT201909.to_unittest_testcase(
DRAFT201909.format_tests(),
Validator=Draft201909Validator,
format_checker=draft201909_format_checker,
skip=lambda test: (
complex_email_validation(test)
or allowed_leading_zeros(test)
or leap_second(test)
or missing_format(draft201909_format_checker)(test)
or complex_email_validation(test)
),
)
TestDraft202012 = DRAFT202012.to_unittest_testcase(
DRAFT202012.tests(),
DRAFT202012.optional_tests_of(name="bignum"),
DRAFT202012.optional_tests_of(name="float-overflow"),
DRAFT202012.optional_tests_of(name="non-bmp-regex"),
DRAFT202012.optional_tests_of(name="refOfUnknownKeyword"),
Validator=Draft202012Validator,
skip=lambda test: (
narrow_unicode_build(test)
or skip(
message="dynamicRef support isn't working yet.",
subject="dynamicRef",
)(test)
or skip(
message="These tests depends on dynamicRef working.",
subject="defs",
)(test)
or skip(
message="These tests depends on dynamicRef working.",
subject="anchor",
case_description="same $anchor with different base uri",
)(test)
or skip(
message="Vocabulary support is not yet present.",
subject="vocabulary",
)(test)
or skip(
message=bug(),
subject="ref",
case_description=(
"$id must be resolved against nearest parent, "
"not just immediate parent"
),
)(test)
),
)
TestDraft202012Format = DRAFT202012.to_unittest_testcase(
DRAFT202012.format_tests(),
Validator=Draft202012Validator,
format_checker=draft202012_format_checker,
skip=lambda test: (
complex_email_validation(test)
or allowed_leading_zeros(test)
or leap_second(test)
or missing_format(draft202012_format_checker)(test)
or complex_email_validation(test)
),
)

View File

@@ -0,0 +1,217 @@
"""
Tests for the `TypeChecker`-based type interface.
The actual correctness of the type checking is handled in
`test_jsonschema_test_suite`; these tests check that TypeChecker
functions correctly at a more granular level.
"""
from collections import namedtuple
from unittest import TestCase
from jsonschema import ValidationError, _validators
from jsonschema._types import TypeChecker
from jsonschema.exceptions import UndefinedTypeCheck, UnknownType
from jsonschema.validators import Draft202012Validator, extend
def equals_2(checker, instance):
return instance == 2
def is_namedtuple(instance):
return isinstance(instance, tuple) and getattr(instance, "_fields", None)
def is_object_or_named_tuple(checker, instance):
if Draft202012Validator.TYPE_CHECKER.is_type(instance, "object"):
return True
return is_namedtuple(instance)
class TestTypeChecker(TestCase):
def test_is_type(self):
checker = TypeChecker({"two": equals_2})
self.assertEqual(
(
checker.is_type(instance=2, type="two"),
checker.is_type(instance="bar", type="two"),
),
(True, False),
)
def test_is_unknown_type(self):
with self.assertRaises(UndefinedTypeCheck) as e:
TypeChecker().is_type(4, "foobar")
self.assertIn(
"'foobar' is unknown to this type checker",
str(e.exception),
)
self.assertTrue(
e.exception.__suppress_context__,
msg="Expected the internal KeyError to be hidden.",
)
def test_checks_can_be_added_at_init(self):
checker = TypeChecker({"two": equals_2})
self.assertEqual(checker, TypeChecker().redefine("two", equals_2))
def test_redefine_existing_type(self):
self.assertEqual(
TypeChecker().redefine("two", object()).redefine("two", equals_2),
TypeChecker().redefine("two", equals_2),
)
def test_remove(self):
self.assertEqual(
TypeChecker({"two": equals_2}).remove("two"),
TypeChecker(),
)
def test_remove_unknown_type(self):
with self.assertRaises(UndefinedTypeCheck) as context:
TypeChecker().remove("foobar")
self.assertIn("foobar", str(context.exception))
def test_redefine_many(self):
self.assertEqual(
TypeChecker().redefine_many({"foo": int, "bar": str}),
TypeChecker().redefine("foo", int).redefine("bar", str),
)
def test_remove_multiple(self):
self.assertEqual(
TypeChecker({"foo": int, "bar": str}).remove("foo", "bar"),
TypeChecker(),
)
def test_type_check_can_raise_key_error(self):
"""
Make sure no one writes:
try:
self._type_checkers[type](...)
except KeyError:
ignoring the fact that the function itself can raise that.
"""
error = KeyError("Stuff")
def raises_keyerror(checker, instance):
raise error
with self.assertRaises(KeyError) as context:
TypeChecker({"foo": raises_keyerror}).is_type(4, "foo")
self.assertIs(context.exception, error)
class TestCustomTypes(TestCase):
def test_simple_type_can_be_extended(self):
def int_or_str_int(checker, instance):
if not isinstance(instance, (int, str)):
return False
try:
int(instance)
except ValueError:
return False
return True
CustomValidator = extend(
Draft202012Validator,
type_checker=Draft202012Validator.TYPE_CHECKER.redefine(
"integer", int_or_str_int,
),
)
validator = CustomValidator({"type": "integer"})
validator.validate(4)
validator.validate("4")
with self.assertRaises(ValidationError):
validator.validate(4.4)
with self.assertRaises(ValidationError):
validator.validate("foo")
def test_object_can_be_extended(self):
schema = {"type": "object"}
Point = namedtuple("Point", ["x", "y"])
type_checker = Draft202012Validator.TYPE_CHECKER.redefine(
"object", is_object_or_named_tuple,
)
CustomValidator = extend(
Draft202012Validator,
type_checker=type_checker,
)
validator = CustomValidator(schema)
validator.validate(Point(x=4, y=5))
def test_object_extensions_require_custom_validators(self):
schema = {"type": "object", "required": ["x"]}
type_checker = Draft202012Validator.TYPE_CHECKER.redefine(
"object", is_object_or_named_tuple,
)
CustomValidator = extend(
Draft202012Validator,
type_checker=type_checker,
)
validator = CustomValidator(schema)
Point = namedtuple("Point", ["x", "y"])
# Cannot handle required
with self.assertRaises(ValidationError):
validator.validate(Point(x=4, y=5))
def test_object_extensions_can_handle_custom_validators(self):
schema = {
"type": "object",
"required": ["x"],
"properties": {"x": {"type": "integer"}},
}
type_checker = Draft202012Validator.TYPE_CHECKER.redefine(
"object", is_object_or_named_tuple,
)
def coerce_named_tuple(fn):
def coerced(validator, value, instance, schema):
if is_namedtuple(instance):
instance = instance._asdict()
return fn(validator, value, instance, schema)
return coerced
required = coerce_named_tuple(_validators.required)
properties = coerce_named_tuple(_validators.properties)
CustomValidator = extend(
Draft202012Validator,
type_checker=type_checker,
validators={"required": required, "properties": properties},
)
validator = CustomValidator(schema)
Point = namedtuple("Point", ["x", "y"])
# Can now process required and properties
validator.validate(Point(x=4, y=5))
with self.assertRaises(ValidationError):
validator.validate(Point(x="not an integer", y=5))
# As well as still handle objects.
validator.validate({"x": 4, "y": 5})
with self.assertRaises(ValidationError):
validator.validate({"x": "not an integer", "y": 5})
def test_unknown_type(self):
with self.assertRaises(UnknownType) as e:
Draft202012Validator({}).is_type(12, "some unknown type")
self.assertIn("'some unknown type'", str(e.exception))

View File

@@ -0,0 +1,124 @@
from unittest import TestCase
from jsonschema._utils import equal
class TestEqual(TestCase):
def test_none(self):
self.assertTrue(equal(None, None))
class TestDictEqual(TestCase):
def test_equal_dictionaries(self):
dict_1 = {"a": "b", "c": "d"}
dict_2 = {"c": "d", "a": "b"}
self.assertTrue(equal(dict_1, dict_2))
def test_missing_key(self):
dict_1 = {"a": "b", "c": "d"}
dict_2 = {"c": "d", "x": "b"}
self.assertFalse(equal(dict_1, dict_2))
def test_additional_key(self):
dict_1 = {"a": "b", "c": "d"}
dict_2 = {"c": "d", "a": "b", "x": "x"}
self.assertFalse(equal(dict_1, dict_2))
def test_missing_value(self):
dict_1 = {"a": "b", "c": "d"}
dict_2 = {"c": "d", "a": "x"}
self.assertFalse(equal(dict_1, dict_2))
def test_empty_dictionaries(self):
dict_1 = {}
dict_2 = {}
self.assertTrue(equal(dict_1, dict_2))
def test_one_none(self):
dict_1 = None
dict_2 = {"a": "b", "c": "d"}
self.assertFalse(equal(dict_1, dict_2))
def test_same_item(self):
dict_1 = {"a": "b", "c": "d"}
self.assertTrue(equal(dict_1, dict_1))
def test_nested_equal(self):
dict_1 = {"a": {"a": "b", "c": "d"}, "c": "d"}
dict_2 = {"c": "d", "a": {"a": "b", "c": "d"}}
self.assertTrue(equal(dict_1, dict_2))
def test_nested_dict_unequal(self):
dict_1 = {"a": {"a": "b", "c": "d"}, "c": "d"}
dict_2 = {"c": "d", "a": {"a": "b", "c": "x"}}
self.assertFalse(equal(dict_1, dict_2))
def test_mixed_nested_equal(self):
dict_1 = {"a": ["a", "b", "c", "d"], "c": "d"}
dict_2 = {"c": "d", "a": ["a", "b", "c", "d"]}
self.assertTrue(equal(dict_1, dict_2))
def test_nested_list_unequal(self):
dict_1 = {"a": ["a", "b", "c", "d"], "c": "d"}
dict_2 = {"c": "d", "a": ["b", "c", "d", "a"]}
self.assertFalse(equal(dict_1, dict_2))
class TestListEqual(TestCase):
def test_equal_lists(self):
list_1 = ["a", "b", "c"]
list_2 = ["a", "b", "c"]
self.assertTrue(equal(list_1, list_2))
def test_unsorted_lists(self):
list_1 = ["a", "b", "c"]
list_2 = ["b", "b", "a"]
self.assertFalse(equal(list_1, list_2))
def test_first_list_larger(self):
list_1 = ["a", "b", "c"]
list_2 = ["a", "b"]
self.assertFalse(equal(list_1, list_2))
def test_second_list_larger(self):
list_1 = ["a", "b"]
list_2 = ["a", "b", "c"]
self.assertFalse(equal(list_1, list_2))
def test_list_with_none_unequal(self):
list_1 = ["a", "b", None]
list_2 = ["a", "b", "c"]
self.assertFalse(equal(list_1, list_2))
list_1 = ["a", "b", None]
list_2 = [None, "b", "c"]
self.assertFalse(equal(list_1, list_2))
def test_list_with_none_equal(self):
list_1 = ["a", None, "c"]
list_2 = ["a", None, "c"]
self.assertTrue(equal(list_1, list_2))
def test_empty_list(self):
list_1 = []
list_2 = []
self.assertTrue(equal(list_1, list_2))
def test_one_none(self):
list_1 = None
list_2 = []
self.assertFalse(equal(list_1, list_2))
def test_same_list(self):
list_1 = ["a", "b", "c"]
self.assertTrue(equal(list_1, list_1))
def test_equal_nested_lists(self):
list_1 = ["a", ["b", "c"], "d"]
list_2 = ["a", ["b", "c"], "d"]
self.assertTrue(equal(list_1, list_2))
def test_unequal_nested_lists(self):
list_1 = ["a", ["b", "c"], "d"]
list_2 = ["a", [], "c"]
self.assertFalse(equal(list_1, list_2))

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff