PK!4ee __init__.pynu[""" An implementation of JSON Schema for Python. The main functionality is provided by the validator classes for each of the supported JSON Schema versions. Most commonly, `jsonschema.validators.validate` is the quickest way to simply validate a given instance under a schema, and will create a validator for you. """ import warnings from jsonschema._format import FormatChecker from jsonschema._types import TypeChecker from jsonschema.exceptions import SchemaError, ValidationError from jsonschema.validators import ( Draft3Validator, Draft4Validator, Draft6Validator, Draft7Validator, Draft201909Validator, Draft202012Validator, validate, ) def __getattr__(name): if name == "__version__": warnings.warn( "Accessing jsonschema.__version__ is deprecated and will be " "removed in a future release. Use importlib.metadata directly " "to query for jsonschema's version.", DeprecationWarning, stacklevel=2, ) from importlib import metadata return metadata.version("jsonschema") elif name == "RefResolver": from jsonschema.validators import _RefResolver warnings.warn( _RefResolver._DEPRECATION_MESSAGE, DeprecationWarning, stacklevel=2, ) return _RefResolver elif name == "ErrorTree": warnings.warn( "Importing ErrorTree directly from the jsonschema package " "is deprecated and will become an ImportError. Import it from " "jsonschema.exceptions instead.", DeprecationWarning, stacklevel=2, ) from jsonschema.exceptions import ErrorTree return ErrorTree elif name == "FormatError": warnings.warn( "Importing FormatError directly from the jsonschema package " "is deprecated and will become an ImportError. Import it from " "jsonschema.exceptions instead.", DeprecationWarning, stacklevel=2, ) from jsonschema.exceptions import FormatError return FormatError elif name == "Validator": warnings.warn( "Importing Validator directly from the jsonschema package " "is deprecated and will become an ImportError. Import it from " "jsonschema.protocols instead.", DeprecationWarning, stacklevel=2, ) from jsonschema.protocols import Validator return Validator elif name == "RefResolutionError": from jsonschema.exceptions import _RefResolutionError warnings.warn( _RefResolutionError._DEPRECATION_MESSAGE, DeprecationWarning, stacklevel=2, ) return _RefResolutionError format_checkers = { "draft3_format_checker": Draft3Validator, "draft4_format_checker": Draft4Validator, "draft6_format_checker": Draft6Validator, "draft7_format_checker": Draft7Validator, "draft201909_format_checker": Draft201909Validator, "draft202012_format_checker": Draft202012Validator, } ValidatorForFormat = format_checkers.get(name) if ValidatorForFormat is not None: warnings.warn( f"Accessing jsonschema.{name} is deprecated and will be " "removed in a future release. Instead, use the FORMAT_CHECKER " "attribute on the corresponding Validator.", DeprecationWarning, stacklevel=2, ) return ValidatorForFormat.FORMAT_CHECKER raise AttributeError(f"module {__name__} has no attribute {name}") __all__ = [ "Draft201909Validator", "Draft202012Validator", "Draft3Validator", "Draft4Validator", "Draft6Validator", "Draft7Validator", "FormatChecker", "SchemaError", "TypeChecker", "ValidationError", "validate", ] PK!i,Duss __main__.pynu[""" The jsonschema CLI is now deprecated in favor of check-jsonschema. """ from jsonschema.cli import main main() PK!88 _format.pynu[from __future__ import annotations from contextlib import suppress from datetime import date, datetime from uuid import UUID import ipaddress import re import typing import warnings from jsonschema.exceptions import FormatError _FormatCheckCallable = typing.Callable[[object], bool] #: A format checker callable. _F = typing.TypeVar("_F", bound=_FormatCheckCallable) _RaisesType = typing.Union[ typing.Type[Exception], typing.Tuple[typing.Type[Exception], ...], ] _RE_DATE = re.compile(r"^\d{4}-\d{2}-\d{2}$", re.ASCII) class FormatChecker: """ A ``format`` property checker. JSON Schema does not mandate that the ``format`` property actually do any validation. If validation is desired however, instances of this class can be hooked into validators to enable format validation. `FormatChecker` objects always return ``True`` when asked about formats that they do not know how to validate. To add a check for a custom format use the `FormatChecker.checks` decorator. Arguments: formats: The known formats to validate. This argument can be used to limit which formats will be used during validation. """ checkers: dict[ str, tuple[_FormatCheckCallable, _RaisesType], ] = {} # noqa: RUF012 def __init__(self, formats: typing.Iterable[str] | None = None): if formats is None: formats = self.checkers.keys() self.checkers = {k: self.checkers[k] for k in formats} def __repr__(self): return f"" def checks( self, format: str, raises: _RaisesType = (), ) -> typing.Callable[[_F], _F]: """ Register a decorated function as validating a new format. Arguments: format: The format that the decorated function will check. raises: The exception(s) raised by the decorated function when an invalid instance is found. The exception object will be accessible as the `jsonschema.exceptions.ValidationError.cause` attribute of the resulting validation error. """ def _checks(func: _F) -> _F: self.checkers[format] = (func, raises) return func return _checks @classmethod def cls_checks( cls, format: str, raises: _RaisesType = (), ) -> typing.Callable[[_F], _F]: warnings.warn( ( "FormatChecker.cls_checks is deprecated. Call " "FormatChecker.checks on a specific FormatChecker instance " "instead." ), DeprecationWarning, stacklevel=2, ) return cls._cls_checks(format=format, raises=raises) @classmethod def _cls_checks( cls, format: str, raises: _RaisesType = (), ) -> typing.Callable[[_F], _F]: def _checks(func: _F) -> _F: cls.checkers[format] = (func, raises) return func return _checks def check(self, instance: object, format: str) -> None: """ Check whether the instance conforms to the given format. Arguments: instance (*any primitive type*, i.e. str, number, bool): The instance to check format: The format that instance should conform to Raises: FormatError: if the instance does not conform to ``format`` """ if format not in self.checkers: return func, raises = self.checkers[format] result, cause = None, None try: result = func(instance) except raises as e: cause = e if not result: raise FormatError(f"{instance!r} is not a {format!r}", cause=cause) def conforms(self, instance: object, format: str) -> bool: """ Check whether the instance conforms to the given format. Arguments: instance (*any primitive type*, i.e. str, number, bool): The instance to check format: The format that instance should conform to Returns: bool: whether it conformed """ try: self.check(instance, format) except FormatError: return False else: return True draft3_format_checker = FormatChecker() draft4_format_checker = FormatChecker() draft6_format_checker = FormatChecker() draft7_format_checker = FormatChecker() draft201909_format_checker = FormatChecker() draft202012_format_checker = FormatChecker() _draft_checkers: dict[str, FormatChecker] = dict( draft3=draft3_format_checker, draft4=draft4_format_checker, draft6=draft6_format_checker, draft7=draft7_format_checker, draft201909=draft201909_format_checker, draft202012=draft202012_format_checker, ) def _checks_drafts( name=None, draft3=None, draft4=None, draft6=None, draft7=None, draft201909=None, draft202012=None, raises=(), ) -> typing.Callable[[_F], _F]: draft3 = draft3 or name draft4 = draft4 or name draft6 = draft6 or name draft7 = draft7 or name draft201909 = draft201909 or name draft202012 = draft202012 or name def wrap(func: _F) -> _F: if draft3: func = _draft_checkers["draft3"].checks(draft3, raises)(func) if draft4: func = _draft_checkers["draft4"].checks(draft4, raises)(func) if draft6: func = _draft_checkers["draft6"].checks(draft6, raises)(func) if draft7: func = _draft_checkers["draft7"].checks(draft7, raises)(func) if draft201909: func = _draft_checkers["draft201909"].checks(draft201909, raises)( func, ) if draft202012: func = _draft_checkers["draft202012"].checks(draft202012, raises)( func, ) # Oy. This is bad global state, but relied upon for now, until # deprecation. See #519 and test_format_checkers_come_with_defaults FormatChecker._cls_checks( draft202012 or draft201909 or draft7 or draft6 or draft4 or draft3, raises, )(func) return func return wrap @_checks_drafts(name="idn-email") @_checks_drafts(name="email") def is_email(instance: object) -> bool: if not isinstance(instance, str): return True return "@" in instance @_checks_drafts( draft3="ip-address", draft4="ipv4", draft6="ipv4", draft7="ipv4", draft201909="ipv4", draft202012="ipv4", raises=ipaddress.AddressValueError, ) def is_ipv4(instance: object) -> bool: if not isinstance(instance, str): return True return bool(ipaddress.IPv4Address(instance)) @_checks_drafts(name="ipv6", raises=ipaddress.AddressValueError) def is_ipv6(instance: object) -> bool: if not isinstance(instance, str): return True address = ipaddress.IPv6Address(instance) return not getattr(address, "scope_id", "") with suppress(ImportError): from fqdn import FQDN @_checks_drafts( draft3="host-name", draft4="hostname", draft6="hostname", draft7="hostname", draft201909="hostname", draft202012="hostname", ) def is_host_name(instance: object) -> bool: if not isinstance(instance, str): return True return FQDN(instance, min_labels=1).is_valid with suppress(ImportError): # The built-in `idna` codec only implements RFC 3890, so we go elsewhere. import idna @_checks_drafts( draft7="idn-hostname", draft201909="idn-hostname", draft202012="idn-hostname", raises=(idna.IDNAError, UnicodeError), ) def is_idn_host_name(instance: object) -> bool: if not isinstance(instance, str): return True idna.encode(instance) return True try: import rfc3987 except ImportError: with suppress(ImportError): from rfc3986_validator import validate_rfc3986 @_checks_drafts(name="uri") def is_uri(instance: object) -> bool: if not isinstance(instance, str): return True return validate_rfc3986(instance, rule="URI") @_checks_drafts( draft6="uri-reference", draft7="uri-reference", draft201909="uri-reference", draft202012="uri-reference", raises=ValueError, ) def is_uri_reference(instance: object) -> bool: if not isinstance(instance, str): return True return validate_rfc3986(instance, rule="URI_reference") else: @_checks_drafts( draft7="iri", draft201909="iri", draft202012="iri", raises=ValueError, ) def is_iri(instance: object) -> bool: if not isinstance(instance, str): return True return rfc3987.parse(instance, rule="IRI") @_checks_drafts( draft7="iri-reference", draft201909="iri-reference", draft202012="iri-reference", raises=ValueError, ) def is_iri_reference(instance: object) -> bool: if not isinstance(instance, str): return True return rfc3987.parse(instance, rule="IRI_reference") @_checks_drafts(name="uri", raises=ValueError) def is_uri(instance: object) -> bool: if not isinstance(instance, str): return True return rfc3987.parse(instance, rule="URI") @_checks_drafts( draft6="uri-reference", draft7="uri-reference", draft201909="uri-reference", draft202012="uri-reference", raises=ValueError, ) def is_uri_reference(instance: object) -> bool: if not isinstance(instance, str): return True return rfc3987.parse(instance, rule="URI_reference") with suppress(ImportError): from rfc3339_validator import validate_rfc3339 @_checks_drafts(name="date-time") def is_datetime(instance: object) -> bool: if not isinstance(instance, str): return True return validate_rfc3339(instance.upper()) @_checks_drafts( draft7="time", draft201909="time", draft202012="time", ) def is_time(instance: object) -> bool: if not isinstance(instance, str): return True return is_datetime("1970-01-01T" + instance) @_checks_drafts(name="regex", raises=re.error) def is_regex(instance: object) -> bool: if not isinstance(instance, str): return True return bool(re.compile(instance)) @_checks_drafts( draft3="date", draft7="date", draft201909="date", draft202012="date", raises=ValueError, ) def is_date(instance: object) -> bool: if not isinstance(instance, str): return True return bool(_RE_DATE.fullmatch(instance) and date.fromisoformat(instance)) @_checks_drafts(draft3="time", raises=ValueError) def is_draft3_time(instance: object) -> bool: if not isinstance(instance, str): return True return bool(datetime.strptime(instance, "%H:%M:%S")) # noqa: DTZ007 with suppress(ImportError): import webcolors @_checks_drafts(draft3="color", raises=(ValueError, TypeError)) def is_css21_color(instance: object) -> bool: if isinstance(instance, str): try: webcolors.name_to_hex(instance) except ValueError: webcolors.normalize_hex(instance.lower()) return True with suppress(ImportError): import jsonpointer @_checks_drafts( draft6="json-pointer", draft7="json-pointer", draft201909="json-pointer", draft202012="json-pointer", raises=jsonpointer.JsonPointerException, ) def is_json_pointer(instance: object) -> bool: if not isinstance(instance, str): return True return bool(jsonpointer.JsonPointer(instance)) # TODO: I don't want to maintain this, so it # needs to go either into jsonpointer (pending # https://github.com/stefankoegl/python-json-pointer/issues/34) or # into a new external library. @_checks_drafts( draft7="relative-json-pointer", draft201909="relative-json-pointer", draft202012="relative-json-pointer", raises=jsonpointer.JsonPointerException, ) def is_relative_json_pointer(instance: object) -> bool: # Definition taken from: # https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3 if not isinstance(instance, str): return True if not instance: return False non_negative_integer, rest = [], "" for i, character in enumerate(instance): if character.isdigit(): # digits with a leading "0" are not allowed if i > 0 and int(instance[i - 1]) == 0: return False non_negative_integer.append(character) continue if not non_negative_integer: return False rest = instance[i:] break return (rest == "#") or bool(jsonpointer.JsonPointer(rest)) with suppress(ImportError): import uri_template @_checks_drafts( draft6="uri-template", draft7="uri-template", draft201909="uri-template", draft202012="uri-template", ) def is_uri_template(instance: object) -> bool: if not isinstance(instance, str): return True return uri_template.validate(instance) with suppress(ImportError): import isoduration @_checks_drafts( draft201909="duration", draft202012="duration", raises=isoduration.DurationParsingException, ) def is_duration(instance: object) -> bool: if not isinstance(instance, str): return True isoduration.parse_duration(instance) # FIXME: See bolsote/isoduration#25 and bolsote/isoduration#21 return instance.endswith(tuple("DMYWHMS")) @_checks_drafts( draft201909="uuid", draft202012="uuid", raises=ValueError, ) def is_uuid(instance: object) -> bool: if not isinstance(instance, str): return True UUID(instance) return all(instance[position] == "-" for position in (8, 13, 18, 23)) PK!W e:e: _keywords.pynu[from fractions import Fraction import re from jsonschema._utils import ( ensure_list, equal, extras_msg, find_additional_properties, find_evaluated_item_indexes_by_schema, find_evaluated_property_keys_by_schema, uniq, ) from jsonschema.exceptions import FormatError, ValidationError def patternProperties(validator, patternProperties, instance, schema): if not validator.is_type(instance, "object"): return for pattern, subschema in patternProperties.items(): for k, v in instance.items(): if re.search(pattern, k): yield from validator.descend( v, subschema, path=k, schema_path=pattern, ) def propertyNames(validator, propertyNames, instance, schema): if not validator.is_type(instance, "object"): return for property in instance: yield from validator.descend(instance=property, schema=propertyNames) def additionalProperties(validator, aP, instance, schema): if not validator.is_type(instance, "object"): return extras = set(find_additional_properties(instance, schema)) if validator.is_type(aP, "object"): for extra in extras: yield from validator.descend(instance[extra], aP, path=extra) elif not aP and extras: if "patternProperties" in schema: verb = "does" if len(extras) == 1 else "do" joined = ", ".join(repr(each) for each in sorted(extras)) patterns = ", ".join( repr(each) for each in sorted(schema["patternProperties"]) ) error = f"{joined} {verb} not match any of the regexes: {patterns}" yield ValidationError(error) else: error = "Additional properties are not allowed (%s %s unexpected)" yield ValidationError(error % extras_msg(sorted(extras, key=str))) def items(validator, items, instance, schema): if not validator.is_type(instance, "array"): return prefix = len(schema.get("prefixItems", [])) total = len(instance) extra = total - prefix if extra <= 0: return if items is False: rest = instance[prefix:] if extra != 1 else instance[prefix] item = "items" if prefix != 1 else "item" yield ValidationError( f"Expected at most {prefix} {item} but found {extra} " f"extra: {rest!r}", ) else: for index in range(prefix, total): yield from validator.descend( instance=instance[index], schema=items, path=index, ) def const(validator, const, instance, schema): if not equal(instance, const): yield ValidationError(f"{const!r} was expected") def contains(validator, contains, instance, schema): if not validator.is_type(instance, "array"): return matches = 0 min_contains = schema.get("minContains", 1) max_contains = schema.get("maxContains", len(instance)) contains_validator = validator.evolve(schema=contains) for each in instance: if contains_validator.is_valid(each): matches += 1 if matches > max_contains: yield ValidationError( "Too many items match the given schema " f"(expected at most {max_contains})", validator="maxContains", validator_value=max_contains, ) return if matches < min_contains: if not matches: yield ValidationError( f"{instance!r} does not contain items " "matching the given schema", ) else: yield ValidationError( "Too few items match the given schema (expected at least " f"{min_contains} but only {matches} matched)", validator="minContains", validator_value=min_contains, ) def exclusiveMinimum(validator, minimum, instance, schema): if not validator.is_type(instance, "number"): return if instance <= minimum: yield ValidationError( f"{instance!r} is less than or equal to " f"the minimum of {minimum!r}", ) def exclusiveMaximum(validator, maximum, instance, schema): if not validator.is_type(instance, "number"): return if instance >= maximum: yield ValidationError( f"{instance!r} is greater than or equal " f"to the maximum of {maximum!r}", ) def minimum(validator, minimum, instance, schema): if not validator.is_type(instance, "number"): return if instance < minimum: message = f"{instance!r} is less than the minimum of {minimum!r}" yield ValidationError(message) def maximum(validator, maximum, instance, schema): if not validator.is_type(instance, "number"): return if instance > maximum: message = f"{instance!r} is greater than the maximum of {maximum!r}" yield ValidationError(message) def multipleOf(validator, dB, instance, schema): if not validator.is_type(instance, "number"): return if isinstance(dB, float): quotient = instance / dB try: failed = int(quotient) != quotient except OverflowError: # When `instance` is large and `dB` is less than one, # quotient can overflow to infinity; and then casting to int # raises an error. # # In this case we fall back to Fraction logic, which is # exact and cannot overflow. The performance is also # acceptable: we try the fast all-float option first, and # we know that fraction(dB) can have at most a few hundred # digits in each part. The worst-case slowdown is therefore # for already-slow enormous integers or Decimals. failed = (Fraction(instance) / Fraction(dB)).denominator != 1 else: failed = instance % dB if failed: yield ValidationError(f"{instance!r} is not a multiple of {dB}") def minItems(validator, mI, instance, schema): if validator.is_type(instance, "array") and len(instance) < mI: message = "should be non-empty" if mI == 1 else "is too short" yield ValidationError(f"{instance!r} {message}") def maxItems(validator, mI, instance, schema): if validator.is_type(instance, "array") and len(instance) > mI: message = "is expected to be empty" if mI == 0 else "is too long" yield ValidationError(f"{instance!r} {message}") def uniqueItems(validator, uI, instance, schema): if ( uI and validator.is_type(instance, "array") and not uniq(instance) ): yield ValidationError(f"{instance!r} has non-unique elements") def pattern(validator, patrn, instance, schema): if ( validator.is_type(instance, "string") and not re.search(patrn, instance) ): yield ValidationError(f"{instance!r} does not match {patrn!r}") def format(validator, format, instance, schema): if validator.format_checker is not None: try: validator.format_checker.check(instance, format) except FormatError as error: yield ValidationError(error.message, cause=error.cause) def minLength(validator, mL, instance, schema): if validator.is_type(instance, "string") and len(instance) < mL: message = "should be non-empty" if mL == 1 else "is too short" yield ValidationError(f"{instance!r} {message}") def maxLength(validator, mL, instance, schema): if validator.is_type(instance, "string") and len(instance) > mL: message = "is expected to be empty" if mL == 0 else "is too long" yield ValidationError(f"{instance!r} {message}") def dependentRequired(validator, dependentRequired, instance, schema): if not validator.is_type(instance, "object"): return for property, dependency in dependentRequired.items(): if property not in instance: continue for each in dependency: if each not in instance: message = f"{each!r} is a dependency of {property!r}" yield ValidationError(message) def dependentSchemas(validator, dependentSchemas, instance, schema): if not validator.is_type(instance, "object"): return for property, dependency in dependentSchemas.items(): if property not in instance: continue yield from validator.descend( instance, dependency, schema_path=property, ) def enum(validator, enums, instance, schema): if all(not equal(each, instance) for each in enums): yield ValidationError(f"{instance!r} is not one of {enums!r}") def ref(validator, ref, instance, schema): yield from validator._validate_reference(ref=ref, instance=instance) def dynamicRef(validator, dynamicRef, instance, schema): yield from validator._validate_reference(ref=dynamicRef, instance=instance) def type(validator, types, instance, schema): types = ensure_list(types) if not any(validator.is_type(instance, type) for type in types): reprs = ", ".join(repr(type) for type in types) yield ValidationError(f"{instance!r} is not of type {reprs}") def properties(validator, properties, instance, schema): if not validator.is_type(instance, "object"): return for property, subschema in properties.items(): if property in instance: yield from validator.descend( instance[property], subschema, path=property, schema_path=property, ) def required(validator, required, instance, schema): if not validator.is_type(instance, "object"): return for property in required: if property not in instance: yield ValidationError(f"{property!r} is a required property") def minProperties(validator, mP, instance, schema): if validator.is_type(instance, "object") and len(instance) < mP: message = ( "should be non-empty" if mP == 1 else "does not have enough properties" ) yield ValidationError(f"{instance!r} {message}") def maxProperties(validator, mP, instance, schema): if not validator.is_type(instance, "object"): return if validator.is_type(instance, "object") and len(instance) > mP: message = ( "is expected to be empty" if mP == 0 else "has too many properties" ) yield ValidationError(f"{instance!r} {message}") def allOf(validator, allOf, instance, schema): for index, subschema in enumerate(allOf): yield from validator.descend(instance, subschema, schema_path=index) def anyOf(validator, anyOf, instance, schema): all_errors = [] for index, subschema in enumerate(anyOf): errs = list(validator.descend(instance, subschema, schema_path=index)) if not errs: break all_errors.extend(errs) else: yield ValidationError( f"{instance!r} is not valid under any of the given schemas", context=all_errors, ) def oneOf(validator, oneOf, instance, schema): subschemas = enumerate(oneOf) all_errors = [] for index, subschema in subschemas: errs = list(validator.descend(instance, subschema, schema_path=index)) if not errs: first_valid = subschema break all_errors.extend(errs) else: yield ValidationError( f"{instance!r} is not valid under any of the given schemas", context=all_errors, ) more_valid = [ each for _, each in subschemas if validator.evolve(schema=each).is_valid(instance) ] if more_valid: more_valid.append(first_valid) reprs = ", ".join(repr(schema) for schema in more_valid) yield ValidationError(f"{instance!r} is valid under each of {reprs}") def not_(validator, not_schema, instance, schema): if validator.evolve(schema=not_schema).is_valid(instance): message = f"{instance!r} should not be valid under {not_schema!r}" yield ValidationError(message) def if_(validator, if_schema, instance, schema): if validator.evolve(schema=if_schema).is_valid(instance): if "then" in schema: then = schema["then"] yield from validator.descend(instance, then, schema_path="then") elif "else" in schema: else_ = schema["else"] yield from validator.descend(instance, else_, schema_path="else") def unevaluatedItems(validator, unevaluatedItems, instance, schema): if not validator.is_type(instance, "array"): return evaluated_item_indexes = find_evaluated_item_indexes_by_schema( validator, instance, schema, ) unevaluated_items = [ item for index, item in enumerate(instance) if index not in evaluated_item_indexes ] if unevaluated_items: error = "Unevaluated items are not allowed (%s %s unexpected)" yield ValidationError(error % extras_msg(unevaluated_items)) def unevaluatedProperties(validator, unevaluatedProperties, instance, schema): if not validator.is_type(instance, "object"): return evaluated_keys = find_evaluated_property_keys_by_schema( validator, instance, schema, ) unevaluated_keys = [] for property in instance: if property not in evaluated_keys: for _ in validator.descend( instance[property], unevaluatedProperties, path=property, schema_path=property, ): # FIXME: Include context for each unevaluated property # indicating why it's invalid under the subschema. unevaluated_keys.append(property) # noqa: PERF401 if unevaluated_keys: if unevaluatedProperties is False: error = "Unevaluated properties are not allowed (%s %s unexpected)" extras = sorted(unevaluated_keys, key=str) yield ValidationError(error % extras_msg(extras)) else: error = ( "Unevaluated properties are not valid under " "the given schema (%s %s unevaluated and invalid)" ) yield ValidationError(error % extras_msg(unevaluated_keys)) def prefixItems(validator, prefixItems, instance, schema): if not validator.is_type(instance, "array"): return for (index, item), subschema in zip(enumerate(instance), prefixItems): yield from validator.descend( instance=item, schema=subschema, schema_path=index, path=index, ) PK!5W;W;_legacy_keywords.pynu[import re from referencing.jsonschema import lookup_recursive_ref from jsonschema import _utils from jsonschema.exceptions import ValidationError def ignore_ref_siblings(schema): """ Ignore siblings of ``$ref`` if it is present. Otherwise, return all keywords. Suitable for use with `create`'s ``applicable_validators`` argument. """ ref = schema.get("$ref") if ref is not None: return [("$ref", ref)] else: return schema.items() def dependencies_draft3(validator, dependencies, instance, schema): if not validator.is_type(instance, "object"): return for property, dependency in dependencies.items(): if property not in instance: continue if validator.is_type(dependency, "object"): yield from validator.descend( instance, dependency, schema_path=property, ) elif validator.is_type(dependency, "string"): if dependency not in instance: message = f"{dependency!r} is a dependency of {property!r}" yield ValidationError(message) else: for each in dependency: if each not in instance: message = f"{each!r} is a dependency of {property!r}" yield ValidationError(message) def dependencies_draft4_draft6_draft7( validator, dependencies, instance, schema, ): """ Support for the ``dependencies`` keyword from pre-draft 2019-09. In later drafts, the keyword was split into separate ``dependentRequired`` and ``dependentSchemas`` validators. """ if not validator.is_type(instance, "object"): return for property, dependency in dependencies.items(): if property not in instance: continue if validator.is_type(dependency, "array"): for each in dependency: if each not in instance: message = f"{each!r} is a dependency of {property!r}" yield ValidationError(message) else: yield from validator.descend( instance, dependency, schema_path=property, ) def disallow_draft3(validator, disallow, instance, schema): for disallowed in _utils.ensure_list(disallow): if validator.evolve(schema={"type": [disallowed]}).is_valid(instance): message = f"{disallowed!r} is disallowed for {instance!r}" yield ValidationError(message) def extends_draft3(validator, extends, instance, schema): if validator.is_type(extends, "object"): yield from validator.descend(instance, extends) return for index, subschema in enumerate(extends): yield from validator.descend(instance, subschema, schema_path=index) def items_draft3_draft4(validator, items, instance, schema): if not validator.is_type(instance, "array"): return if validator.is_type(items, "object"): for index, item in enumerate(instance): yield from validator.descend(item, items, path=index) else: for (index, item), subschema in zip(enumerate(instance), items): yield from validator.descend( item, subschema, path=index, schema_path=index, ) def additionalItems(validator, aI, instance, schema): if ( not validator.is_type(instance, "array") or validator.is_type(schema.get("items", {}), "object") ): return len_items = len(schema.get("items", [])) if validator.is_type(aI, "object"): for index, item in enumerate(instance[len_items:], start=len_items): yield from validator.descend(item, aI, path=index) elif not aI and len(instance) > len(schema.get("items", [])): error = "Additional items are not allowed (%s %s unexpected)" yield ValidationError( error % _utils.extras_msg(instance[len(schema.get("items", [])):]), ) def items_draft6_draft7_draft201909(validator, items, instance, schema): if not validator.is_type(instance, "array"): return if validator.is_type(items, "array"): for (index, item), subschema in zip(enumerate(instance), items): yield from validator.descend( item, subschema, path=index, schema_path=index, ) else: for index, item in enumerate(instance): yield from validator.descend(item, items, path=index) def minimum_draft3_draft4(validator, minimum, instance, schema): if not validator.is_type(instance, "number"): return if schema.get("exclusiveMinimum", False): failed = instance <= minimum cmp = "less than or equal to" else: failed = instance < minimum cmp = "less than" if failed: message = f"{instance!r} is {cmp} the minimum of {minimum!r}" yield ValidationError(message) def maximum_draft3_draft4(validator, maximum, instance, schema): if not validator.is_type(instance, "number"): return if schema.get("exclusiveMaximum", False): failed = instance >= maximum cmp = "greater than or equal to" else: failed = instance > maximum cmp = "greater than" if failed: message = f"{instance!r} is {cmp} the maximum of {maximum!r}" yield ValidationError(message) def properties_draft3(validator, properties, instance, schema): if not validator.is_type(instance, "object"): return for property, subschema in properties.items(): if property in instance: yield from validator.descend( instance[property], subschema, path=property, schema_path=property, ) elif subschema.get("required", False): error = ValidationError(f"{property!r} is a required property") error._set( validator="required", validator_value=subschema["required"], instance=instance, schema=schema, ) error.path.appendleft(property) error.schema_path.extend([property, "required"]) yield error def type_draft3(validator, types, instance, schema): types = _utils.ensure_list(types) all_errors = [] for index, type in enumerate(types): if validator.is_type(type, "object"): errors = list(validator.descend(instance, type, schema_path=index)) if not errors: return all_errors.extend(errors) elif validator.is_type(instance, type): return reprs = [] for type in types: try: reprs.append(repr(type["name"])) except Exception: # noqa: BLE001 reprs.append(repr(type)) yield ValidationError( f"{instance!r} is not of type {', '.join(reprs)}", context=all_errors, ) def contains_draft6_draft7(validator, contains, instance, schema): if not validator.is_type(instance, "array"): return if not any( validator.evolve(schema=contains).is_valid(element) for element in instance ): yield ValidationError( f"None of {instance!r} are valid under the given schema", ) def recursiveRef(validator, recursiveRef, instance, schema): resolved = lookup_recursive_ref(validator._resolver) yield from validator.descend( instance, resolved.contents, resolver=resolved.resolver, ) def find_evaluated_item_indexes_by_schema(validator, instance, schema): """ Get all indexes of items that get evaluated under the current schema. Covers all keywords related to unevaluatedItems: items, prefixItems, if, then, else, contains, unevaluatedItems, allOf, oneOf, anyOf """ if validator.is_type(schema, "boolean"): return [] evaluated_indexes = [] ref = schema.get("$ref") if ref is not None: resolved = validator._resolver.lookup(ref) evaluated_indexes.extend( find_evaluated_item_indexes_by_schema( validator.evolve( schema=resolved.contents, _resolver=resolved.resolver, ), instance, resolved.contents, ), ) if "$recursiveRef" in schema: resolved = lookup_recursive_ref(validator._resolver) evaluated_indexes.extend( find_evaluated_item_indexes_by_schema( validator.evolve( schema=resolved.contents, _resolver=resolved.resolver, ), instance, resolved.contents, ), ) if "items" in schema: if "additionalItems" in schema: return list(range(len(instance))) if validator.is_type(schema["items"], "object"): return list(range(len(instance))) evaluated_indexes += list(range(len(schema["items"]))) if "if" in schema: if validator.evolve(schema=schema["if"]).is_valid(instance): evaluated_indexes += find_evaluated_item_indexes_by_schema( validator, instance, schema["if"], ) if "then" in schema: evaluated_indexes += find_evaluated_item_indexes_by_schema( validator, instance, schema["then"], ) elif "else" in schema: evaluated_indexes += find_evaluated_item_indexes_by_schema( validator, instance, schema["else"], ) for keyword in ["contains", "unevaluatedItems"]: if keyword in schema: for k, v in enumerate(instance): if validator.evolve(schema=schema[keyword]).is_valid(v): evaluated_indexes.append(k) for keyword in ["allOf", "oneOf", "anyOf"]: if keyword in schema: for subschema in schema[keyword]: errs = next(validator.descend(instance, subschema), None) if errs is None: evaluated_indexes += find_evaluated_item_indexes_by_schema( validator, instance, subschema, ) return evaluated_indexes def unevaluatedItems_draft2019(validator, unevaluatedItems, instance, schema): if not validator.is_type(instance, "array"): return evaluated_item_indexes = find_evaluated_item_indexes_by_schema( validator, instance, schema, ) unevaluated_items = [ item for index, item in enumerate(instance) if index not in evaluated_item_indexes ] if unevaluated_items: error = "Unevaluated items are not allowed (%s %s unexpected)" yield ValidationError(error % _utils.extras_msg(unevaluated_items)) def find_evaluated_property_keys_by_schema(validator, instance, schema): if validator.is_type(schema, "boolean"): return [] evaluated_keys = [] ref = schema.get("$ref") if ref is not None: resolved = validator._resolver.lookup(ref) evaluated_keys.extend( find_evaluated_property_keys_by_schema( validator.evolve( schema=resolved.contents, _resolver=resolved.resolver, ), instance, resolved.contents, ), ) if "$recursiveRef" in schema: resolved = lookup_recursive_ref(validator._resolver) evaluated_keys.extend( find_evaluated_property_keys_by_schema( validator.evolve( schema=resolved.contents, _resolver=resolved.resolver, ), instance, resolved.contents, ), ) for keyword in [ "properties", "additionalProperties", "unevaluatedProperties", ]: if keyword in schema: schema_value = schema[keyword] if validator.is_type(schema_value, "boolean") and schema_value: evaluated_keys += instance.keys() elif validator.is_type(schema_value, "object"): for property in schema_value: if property in instance: evaluated_keys.append(property) if "patternProperties" in schema: for property in instance: for pattern in schema["patternProperties"]: if re.search(pattern, property): evaluated_keys.append(property) if "dependentSchemas" in schema: for property, subschema in schema["dependentSchemas"].items(): if property not in instance: continue evaluated_keys += find_evaluated_property_keys_by_schema( validator, instance, subschema, ) for keyword in ["allOf", "oneOf", "anyOf"]: if keyword in schema: for subschema in schema[keyword]: errs = next(validator.descend(instance, subschema), None) if errs is None: evaluated_keys += find_evaluated_property_keys_by_schema( validator, instance, subschema, ) if "if" in schema: if validator.evolve(schema=schema["if"]).is_valid(instance): evaluated_keys += find_evaluated_property_keys_by_schema( validator, instance, schema["if"], ) if "then" in schema: evaluated_keys += find_evaluated_property_keys_by_schema( validator, instance, schema["then"], ) elif "else" in schema: evaluated_keys += find_evaluated_property_keys_by_schema( validator, instance, schema["else"], ) return evaluated_keys def unevaluatedProperties_draft2019(validator, uP, instance, schema): if not validator.is_type(instance, "object"): return evaluated_keys = find_evaluated_property_keys_by_schema( validator, instance, schema, ) unevaluated_keys = [] for property in instance: if property not in evaluated_keys: for _ in validator.descend( instance[property], uP, path=property, schema_path=property, ): # FIXME: Include context for each unevaluated property # indicating why it's invalid under the subschema. unevaluated_keys.append(property) # noqa: PERF401 if unevaluated_keys: if uP is False: error = "Unevaluated properties are not allowed (%s %s unexpected)" extras = sorted(unevaluated_keys, key=str) yield ValidationError(error % _utils.extras_msg(extras)) else: error = ( "Unevaluated properties are not valid under " "the given schema (%s %s unevaluated and invalid)" ) yield ValidationError(error % _utils.extras_msg(unevaluated_keys)) PK!nJ _types.pynu[from __future__ import annotations from typing import Any, Callable, Mapping import numbers from attrs import evolve, field, frozen from rpds import HashTrieMap from jsonschema.exceptions import UndefinedTypeCheck # unfortunately, the type of HashTrieMap is generic, and if used as an attrs # converter, the generic type is presented to mypy, which then fails to match # the concrete type of a type checker mapping # this "do nothing" wrapper presents the correct information to mypy def _typed_map_converter( init_val: Mapping[str, Callable[[TypeChecker, Any], bool]], ) -> HashTrieMap[str, Callable[[TypeChecker, Any], bool]]: return HashTrieMap.convert(init_val) def is_array(checker, instance): return isinstance(instance, list) def is_bool(checker, instance): return isinstance(instance, bool) def is_integer(checker, instance): # bool inherits from int, so ensure bools aren't reported as ints if isinstance(instance, bool): return False return isinstance(instance, int) def is_null(checker, instance): return instance is None def is_number(checker, instance): # bool inherits from int, so ensure bools aren't reported as ints if isinstance(instance, bool): return False return isinstance(instance, numbers.Number) def is_object(checker, instance): return isinstance(instance, dict) def is_string(checker, instance): return isinstance(instance, str) def is_any(checker, instance): return True @frozen(repr=False) class TypeChecker: """ A :kw:`type` property checker. A `TypeChecker` performs type checking for a `Validator`, converting between the defined JSON Schema types and some associated Python types or objects. Modifying the behavior just mentioned by redefining which Python objects are considered to be of which JSON Schema types can be done using `TypeChecker.redefine` or `TypeChecker.redefine_many`, and types can be removed via `TypeChecker.remove`. Each of these return a new `TypeChecker`. Arguments: type_checkers: The initial mapping of types to their checking functions. """ _type_checkers: HashTrieMap[ str, Callable[[TypeChecker, Any], bool], ] = field(default=HashTrieMap(), converter=_typed_map_converter) def __repr__(self): types = ", ".join(repr(k) for k in sorted(self._type_checkers)) return f"<{self.__class__.__name__} types={{{types}}}>" def is_type(self, instance, type: str) -> bool: """ Check if the instance is of the appropriate type. Arguments: instance: The instance to check type: The name of the type that is expected. Raises: `jsonschema.exceptions.UndefinedTypeCheck`: if ``type`` is unknown to this object. """ try: fn = self._type_checkers[type] except KeyError: raise UndefinedTypeCheck(type) from None return fn(self, instance) def redefine(self, type: str, fn) -> TypeChecker: """ Produce a new checker with the given type redefined. Arguments: type: The name of the type to check. fn (collections.abc.Callable): A callable taking exactly two parameters - the type checker calling the function and the instance to check. The function should return true if instance is of this type and false otherwise. """ return self.redefine_many({type: fn}) def redefine_many(self, definitions=()) -> TypeChecker: """ Produce a new checker with the given types redefined. Arguments: definitions (dict): A dictionary mapping types to their checking functions. """ type_checkers = self._type_checkers.update(definitions) return evolve(self, type_checkers=type_checkers) def remove(self, *types) -> TypeChecker: """ Produce a new checker with the given types forgotten. Arguments: types: the names of the types to remove. Raises: `jsonschema.exceptions.UndefinedTypeCheck`: if any given type is unknown to this object """ type_checkers = self._type_checkers for each in types: try: type_checkers = type_checkers.remove(each) except KeyError: raise UndefinedTypeCheck(each) from None return evolve(self, type_checkers=type_checkers) draft3_type_checker = TypeChecker( { "any": is_any, "array": is_array, "boolean": is_bool, "integer": is_integer, "object": is_object, "null": is_null, "number": is_number, "string": is_string, }, ) draft4_type_checker = draft3_type_checker.remove("any") draft6_type_checker = draft4_type_checker.redefine( "integer", lambda checker, instance: ( is_integer(checker, instance) or isinstance(instance, float) and instance.is_integer() ), ) draft7_type_checker = draft6_type_checker draft201909_type_checker = draft7_type_checker draft202012_type_checker = draft201909_type_checker PK!;8bb _typing.pynu[""" Some (initially private) typing helpers for jsonschema's types. """ from typing import Any, Callable, Iterable, Protocol, Tuple, Union import referencing.jsonschema from jsonschema.protocols import Validator class SchemaKeywordValidator(Protocol): def __call__( self, validator: Validator, value: Any, instance: Any, schema: referencing.jsonschema.Schema, ) -> None: ... id_of = Callable[[referencing.jsonschema.Schema], Union[str, None]] ApplicableValidators = Callable[ [referencing.jsonschema.Schema], Iterable[Tuple[str, Any]], ] PK!?0)) _utils.pynu[from collections.abc import Mapping, MutableMapping, Sequence from urllib.parse import urlsplit import itertools import re class URIDict(MutableMapping): """ Dictionary which uses normalized URIs as keys. """ def normalize(self, uri): return urlsplit(uri).geturl() def __init__(self, *args, **kwargs): self.store = dict() self.store.update(*args, **kwargs) def __getitem__(self, uri): return self.store[self.normalize(uri)] def __setitem__(self, uri, value): self.store[self.normalize(uri)] = value def __delitem__(self, uri): del self.store[self.normalize(uri)] def __iter__(self): return iter(self.store) def __len__(self): # pragma: no cover -- untested, but to be removed return len(self.store) def __repr__(self): # pragma: no cover -- untested, but to be removed return repr(self.store) class Unset: """ An as-of-yet unset attribute or unprovided default parameter. """ def __repr__(self): # pragma: no cover return "" def format_as_index(container, indices): """ Construct a single string containing indexing operations for the indices. For example for a container ``bar``, [1, 2, "foo"] -> bar[1][2]["foo"] Arguments: container (str): A word to use for the thing being indexed indices (sequence): The indices to format. """ if not indices: return container return f"{container}[{']['.join(repr(index) for index in indices)}]" def find_additional_properties(instance, schema): """ Return the set of additional properties for the given ``instance``. Weeds out properties that should have been validated by ``properties`` and / or ``patternProperties``. Assumes ``instance`` is dict-like already. """ properties = schema.get("properties", {}) patterns = "|".join(schema.get("patternProperties", {})) for property in instance: if property not in properties: if patterns and re.search(patterns, property): continue yield property def extras_msg(extras): """ Create an error message for extra items or properties. """ verb = "was" if len(extras) == 1 else "were" return ", ".join(repr(extra) for extra in extras), verb def ensure_list(thing): """ Wrap ``thing`` in a list if it's a single str. Otherwise, return it unchanged. """ if isinstance(thing, str): return [thing] return thing def _mapping_equal(one, two): """ Check if two mappings are equal using the semantics of `equal`. """ if len(one) != len(two): return False return all( key in two and equal(value, two[key]) for key, value in one.items() ) def _sequence_equal(one, two): """ Check if two sequences are equal using the semantics of `equal`. """ if len(one) != len(two): return False return all(equal(i, j) for i, j in zip(one, two)) def equal(one, two): """ Check if two things are equal evading some Python type hierarchy semantics. Specifically in JSON Schema, evade `bool` inheriting from `int`, recursing into sequences to do the same. """ if one is two: return True if isinstance(one, str) or isinstance(two, str): return one == two if isinstance(one, Sequence) and isinstance(two, Sequence): return _sequence_equal(one, two) if isinstance(one, Mapping) and isinstance(two, Mapping): return _mapping_equal(one, two) return unbool(one) == unbool(two) def unbool(element, true=object(), false=object()): """ A hack to make True and 1 and False and 0 unique for ``uniq``. """ if element is True: return true elif element is False: return false return element def uniq(container): """ Check if all of a container's elements are unique. Tries to rely on the container being recursively sortable, or otherwise falls back on (slow) brute force. """ try: sort = sorted(unbool(i) for i in container) sliced = itertools.islice(sort, 1, None) for i, j in zip(sort, sliced): if equal(i, j): return False except (NotImplementedError, TypeError): seen = [] for e in container: e = unbool(e) for i in seen: if equal(i, e): return False seen.append(e) return True def find_evaluated_item_indexes_by_schema(validator, instance, schema): """ Get all indexes of items that get evaluated under the current schema. Covers all keywords related to unevaluatedItems: items, prefixItems, if, then, else, contains, unevaluatedItems, allOf, oneOf, anyOf """ if validator.is_type(schema, "boolean"): return [] evaluated_indexes = [] if "items" in schema: return list(range(len(instance))) ref = schema.get("$ref") if ref is not None: resolved = validator._resolver.lookup(ref) evaluated_indexes.extend( find_evaluated_item_indexes_by_schema( validator.evolve( schema=resolved.contents, _resolver=resolved.resolver, ), instance, resolved.contents, ), ) dynamicRef = schema.get("$dynamicRef") if dynamicRef is not None: resolved = validator._resolver.lookup(dynamicRef) evaluated_indexes.extend( find_evaluated_item_indexes_by_schema( validator.evolve( schema=resolved.contents, _resolver=resolved.resolver, ), instance, resolved.contents, ), ) if "prefixItems" in schema: evaluated_indexes += list(range(len(schema["prefixItems"]))) if "if" in schema: if validator.evolve(schema=schema["if"]).is_valid(instance): evaluated_indexes += find_evaluated_item_indexes_by_schema( validator, instance, schema["if"], ) if "then" in schema: evaluated_indexes += find_evaluated_item_indexes_by_schema( validator, instance, schema["then"], ) elif "else" in schema: evaluated_indexes += find_evaluated_item_indexes_by_schema( validator, instance, schema["else"], ) for keyword in ["contains", "unevaluatedItems"]: if keyword in schema: for k, v in enumerate(instance): if validator.evolve(schema=schema[keyword]).is_valid(v): evaluated_indexes.append(k) for keyword in ["allOf", "oneOf", "anyOf"]: if keyword in schema: for subschema in schema[keyword]: errs = next(validator.descend(instance, subschema), None) if errs is None: evaluated_indexes += find_evaluated_item_indexes_by_schema( validator, instance, subschema, ) return evaluated_indexes def find_evaluated_property_keys_by_schema(validator, instance, schema): """ Get all keys of items that get evaluated under the current schema. Covers all keywords related to unevaluatedProperties: properties, additionalProperties, unevaluatedProperties, patternProperties, dependentSchemas, allOf, oneOf, anyOf, if, then, else """ if validator.is_type(schema, "boolean"): return [] evaluated_keys = [] ref = schema.get("$ref") if ref is not None: resolved = validator._resolver.lookup(ref) evaluated_keys.extend( find_evaluated_property_keys_by_schema( validator.evolve( schema=resolved.contents, _resolver=resolved.resolver, ), instance, resolved.contents, ), ) dynamicRef = schema.get("$dynamicRef") if dynamicRef is not None: resolved = validator._resolver.lookup(dynamicRef) evaluated_keys.extend( find_evaluated_property_keys_by_schema( validator.evolve( schema=resolved.contents, _resolver=resolved.resolver, ), instance, resolved.contents, ), ) for keyword in [ "properties", "additionalProperties", "unevaluatedProperties", ]: if keyword in schema: schema_value = schema[keyword] if validator.is_type(schema_value, "boolean") and schema_value: evaluated_keys += instance.keys() elif validator.is_type(schema_value, "object"): for property in schema_value: if property in instance: evaluated_keys.append(property) if "patternProperties" in schema: for property in instance: for pattern in schema["patternProperties"]: if re.search(pattern, property): evaluated_keys.append(property) if "dependentSchemas" in schema: for property, subschema in schema["dependentSchemas"].items(): if property not in instance: continue evaluated_keys += find_evaluated_property_keys_by_schema( validator, instance, subschema, ) for keyword in ["allOf", "oneOf", "anyOf"]: if keyword in schema: for subschema in schema[keyword]: errs = next(validator.descend(instance, subschema), None) if errs is None: evaluated_keys += find_evaluated_property_keys_by_schema( validator, instance, subschema, ) if "if" in schema: if validator.evolve(schema=schema["if"]).is_valid(instance): evaluated_keys += find_evaluated_property_keys_by_schema( validator, instance, schema["if"], ) if "then" in schema: evaluated_keys += find_evaluated_property_keys_by_schema( validator, instance, schema["then"], ) elif "else" in schema: evaluated_keys += find_evaluated_property_keys_by_schema( validator, instance, schema["else"], ) return evaluated_keys PK! xg!g!cli.pynu[""" The ``jsonschema`` command line. """ from importlib import metadata from json import JSONDecodeError from textwrap import dedent import argparse import json import sys import traceback import warnings try: from pkgutil import resolve_name except ImportError: from pkgutil_resolve_name import resolve_name # type: ignore[no-redef] from attrs import define, field from jsonschema.exceptions import SchemaError from jsonschema.validators import _RefResolver, validator_for warnings.warn( ( "The jsonschema CLI is deprecated and will be removed in a future " "version. Please use check-jsonschema instead, which can be installed " "from https://pypi.org/project/check-jsonschema/" ), DeprecationWarning, stacklevel=2, ) class _CannotLoadFile(Exception): pass @define class _Outputter: _formatter = field() _stdout = field() _stderr = field() @classmethod def from_arguments(cls, arguments, stdout, stderr): if arguments["output"] == "plain": formatter = _PlainFormatter(arguments["error_format"]) elif arguments["output"] == "pretty": formatter = _PrettyFormatter() return cls(formatter=formatter, stdout=stdout, stderr=stderr) def load(self, path): try: file = open(path) # noqa: SIM115, PTH123 except FileNotFoundError as error: self.filenotfound_error(path=path, exc_info=sys.exc_info()) raise _CannotLoadFile() from error with file: try: return json.load(file) except JSONDecodeError as error: self.parsing_error(path=path, exc_info=sys.exc_info()) raise _CannotLoadFile() from error def filenotfound_error(self, **kwargs): self._stderr.write(self._formatter.filenotfound_error(**kwargs)) def parsing_error(self, **kwargs): self._stderr.write(self._formatter.parsing_error(**kwargs)) def validation_error(self, **kwargs): self._stderr.write(self._formatter.validation_error(**kwargs)) def validation_success(self, **kwargs): self._stdout.write(self._formatter.validation_success(**kwargs)) @define class _PrettyFormatter: _ERROR_MSG = dedent( """\ ===[{type}]===({path})=== {body} ----------------------------- """, ) _SUCCESS_MSG = "===[SUCCESS]===({path})===\n" def filenotfound_error(self, path, exc_info): return self._ERROR_MSG.format( path=path, type="FileNotFoundError", body=f"{path!r} does not exist.", ) def parsing_error(self, path, exc_info): exc_type, exc_value, exc_traceback = exc_info exc_lines = "".join( traceback.format_exception(exc_type, exc_value, exc_traceback), ) return self._ERROR_MSG.format( path=path, type=exc_type.__name__, body=exc_lines, ) def validation_error(self, instance_path, error): return self._ERROR_MSG.format( path=instance_path, type=error.__class__.__name__, body=error, ) def validation_success(self, instance_path): return self._SUCCESS_MSG.format(path=instance_path) @define class _PlainFormatter: _error_format = field() def filenotfound_error(self, path, exc_info): return f"{path!r} does not exist.\n" def parsing_error(self, path, exc_info): return "Failed to parse {}: {}\n".format( "" if path == "" else repr(path), exc_info[1], ) def validation_error(self, instance_path, error): return self._error_format.format(file_name=instance_path, error=error) def validation_success(self, instance_path): return "" def _resolve_name_with_default(name): if "." not in name: name = "jsonschema." + name return resolve_name(name) parser = argparse.ArgumentParser( description="JSON Schema Validation CLI", ) parser.add_argument( "-i", "--instance", action="append", dest="instances", help=""" a path to a JSON instance (i.e. filename.json) to validate (may be specified multiple times). If no instances are provided via this option, one will be expected on standard input. """, ) parser.add_argument( "-F", "--error-format", help=""" the format to use for each validation error message, specified in a form suitable for str.format. This string will be passed one formatted object named 'error' for each ValidationError. Only provide this option when using --output=plain, which is the default. If this argument is unprovided and --output=plain is used, a simple default representation will be used. """, ) parser.add_argument( "-o", "--output", choices=["plain", "pretty"], default="plain", help=""" an output format to use. 'plain' (default) will produce minimal text with one line for each error, while 'pretty' will produce more detailed human-readable output on multiple lines. """, ) parser.add_argument( "-V", "--validator", type=_resolve_name_with_default, help=""" the fully qualified object name of a validator to use, or, for validators that are registered with jsonschema, simply the name of the class. """, ) parser.add_argument( "--base-uri", help=""" a base URI to assign to the provided schema, even if it does not declare one (via e.g. $id). This option can be used if you wish to resolve relative references to a particular URI (or local path) """, ) parser.add_argument( "--version", action="version", version=metadata.version("jsonschema"), ) parser.add_argument( "schema", help="the path to a JSON Schema to validate with (i.e. schema.json)", ) def parse_args(args): # noqa: D103 arguments = vars(parser.parse_args(args=args or ["--help"])) if arguments["output"] != "plain" and arguments["error_format"]: raise parser.error( "--error-format can only be used with --output plain", ) if arguments["output"] == "plain" and arguments["error_format"] is None: arguments["error_format"] = "{error.instance}: {error.message}\n" return arguments def _validate_instance(instance_path, instance, validator, outputter): invalid = False for error in validator.iter_errors(instance): invalid = True outputter.validation_error(instance_path=instance_path, error=error) if not invalid: outputter.validation_success(instance_path=instance_path) return invalid def main(args=sys.argv[1:]): # noqa: D103 sys.exit(run(arguments=parse_args(args=args))) def run(arguments, stdout=sys.stdout, stderr=sys.stderr, stdin=sys.stdin): # noqa: D103 outputter = _Outputter.from_arguments( arguments=arguments, stdout=stdout, stderr=stderr, ) try: schema = outputter.load(arguments["schema"]) except _CannotLoadFile: return 1 Validator = arguments["validator"] if Validator is None: Validator = validator_for(schema) try: Validator.check_schema(schema) except SchemaError as error: outputter.validation_error( instance_path=arguments["schema"], error=error, ) return 1 if arguments["instances"]: load, instances = outputter.load, arguments["instances"] else: def load(_): try: return json.load(stdin) except JSONDecodeError as error: outputter.parsing_error( path="", exc_info=sys.exc_info(), ) raise _CannotLoadFile() from error instances = [""] resolver = _RefResolver( base_uri=arguments["base_uri"], referrer=schema, ) if arguments["base_uri"] is not None else None validator = Validator(schema, resolver=resolver) exit_code = 0 for each in instances: try: instance = load(each) except _CannotLoadFile: exit_code = 1 else: exit_code |= _validate_instance( instance_path=each, instance=instance, validator=validator, outputter=outputter, ) return exit_code PK!硻:: exceptions.pynu[""" Validation errors, and some surrounding helpers. """ from __future__ import annotations from collections import defaultdict, deque from pprint import pformat from textwrap import dedent, indent from typing import TYPE_CHECKING, Any, ClassVar import heapq import itertools import warnings from attrs import define from referencing.exceptions import Unresolvable as _Unresolvable from jsonschema import _utils if TYPE_CHECKING: from collections.abc import Iterable, Mapping, MutableMapping, Sequence from jsonschema import _types WEAK_MATCHES: frozenset[str] = frozenset(["anyOf", "oneOf"]) STRONG_MATCHES: frozenset[str] = frozenset() _unset = _utils.Unset() def _pretty(thing: Any, prefix: str): """ Format something for an error message as prettily as we currently can. """ return indent(pformat(thing, width=72, sort_dicts=False), prefix).lstrip() def __getattr__(name): if name == "RefResolutionError": warnings.warn( _RefResolutionError._DEPRECATION_MESSAGE, DeprecationWarning, stacklevel=2, ) return _RefResolutionError raise AttributeError(f"module {__name__} has no attribute {name}") class _Error(Exception): _word_for_schema_in_error_message: ClassVar[str] _word_for_instance_in_error_message: ClassVar[str] def __init__( self, message: str, validator: str = _unset, # type: ignore[assignment] path: Iterable[str | int] = (), cause: Exception | None = None, context=(), validator_value: Any = _unset, instance: Any = _unset, schema: Mapping[str, Any] | bool = _unset, # type: ignore[assignment] schema_path: Iterable[str | int] = (), parent: _Error | None = None, type_checker: _types.TypeChecker = _unset, # type: ignore[assignment] ) -> None: super().__init__( message, validator, path, cause, context, validator_value, instance, schema, schema_path, parent, ) self.message = message self.path = self.relative_path = deque(path) self.schema_path = self.relative_schema_path = deque(schema_path) self.context = list(context) self.cause = self.__cause__ = cause self.validator = validator self.validator_value = validator_value self.instance = instance self.schema = schema self.parent = parent self._type_checker = type_checker for error in context: error.parent = self def __repr__(self) -> str: return f"<{self.__class__.__name__}: {self.message!r}>" def __str__(self) -> str: essential_for_verbose = ( self.validator, self.validator_value, self.instance, self.schema, ) if any(m is _unset for m in essential_for_verbose): return self.message schema_path = _utils.format_as_index( container=self._word_for_schema_in_error_message, indices=list(self.relative_schema_path)[:-1], ) instance_path = _utils.format_as_index( container=self._word_for_instance_in_error_message, indices=self.relative_path, ) prefix = 16 * " " return dedent( f"""\ {self.message} Failed validating {self.validator!r} in {schema_path}: {_pretty(self.schema, prefix=prefix)} On {instance_path}: {_pretty(self.instance, prefix=prefix)} """.rstrip(), ) @classmethod def create_from(cls, other: _Error): return cls(**other._contents()) @property def absolute_path(self) -> Sequence[str | int]: parent = self.parent if parent is None: return self.relative_path path = deque(self.relative_path) path.extendleft(reversed(parent.absolute_path)) return path @property def absolute_schema_path(self) -> Sequence[str | int]: parent = self.parent if parent is None: return self.relative_schema_path path = deque(self.relative_schema_path) path.extendleft(reversed(parent.absolute_schema_path)) return path @property def json_path(self) -> str: path = "$" for elem in self.absolute_path: if isinstance(elem, int): path += "[" + str(elem) + "]" else: path += "." + elem return path def _set( self, type_checker: _types.TypeChecker | None = None, **kwargs: Any, ) -> None: if type_checker is not None and self._type_checker is _unset: self._type_checker = type_checker for k, v in kwargs.items(): if getattr(self, k) is _unset: setattr(self, k, v) def _contents(self): attrs = ( "message", "cause", "context", "validator", "validator_value", "path", "schema_path", "instance", "schema", "parent", ) return {attr: getattr(self, attr) for attr in attrs} def _matches_type(self) -> bool: try: # We ignore this as we want to simply crash if this happens expected = self.schema["type"] # type: ignore[index] except (KeyError, TypeError): return False if isinstance(expected, str): return self._type_checker.is_type(self.instance, expected) return any( self._type_checker.is_type(self.instance, expected_type) for expected_type in expected ) class ValidationError(_Error): """ An instance was invalid under a provided schema. """ _word_for_schema_in_error_message = "schema" _word_for_instance_in_error_message = "instance" class SchemaError(_Error): """ A schema was invalid under its corresponding metaschema. """ _word_for_schema_in_error_message = "metaschema" _word_for_instance_in_error_message = "schema" @define(slots=False) class _RefResolutionError(Exception): """ A ref could not be resolved. """ _DEPRECATION_MESSAGE = ( "jsonschema.exceptions.RefResolutionError is deprecated as of version " "4.18.0. If you wish to catch potential reference resolution errors, " "directly catch referencing.exceptions.Unresolvable." ) _cause: Exception def __eq__(self, other): if self.__class__ is not other.__class__: return NotImplemented # pragma: no cover -- uncovered but deprecated # noqa: E501 return self._cause == other._cause def __str__(self) -> str: return str(self._cause) class _WrappedReferencingError(_RefResolutionError, _Unresolvable): # pragma: no cover -- partially uncovered but to be removed # noqa: E501 def __init__(self, cause: _Unresolvable): object.__setattr__(self, "_wrapped", cause) def __eq__(self, other): if other.__class__ is self.__class__: return self._wrapped == other._wrapped elif other.__class__ is self._wrapped.__class__: return self._wrapped == other return NotImplemented def __getattr__(self, attr): return getattr(self._wrapped, attr) def __hash__(self): return hash(self._wrapped) def __repr__(self): return f"" def __str__(self): return f"{self._wrapped.__class__.__name__}: {self._wrapped}" class UndefinedTypeCheck(Exception): """ A type checker was asked to check a type it did not have registered. """ def __init__(self, type: str) -> None: self.type = type def __str__(self) -> str: return f"Type {self.type!r} is unknown to this type checker" class UnknownType(Exception): """ A validator was asked to validate an instance against an unknown type. """ def __init__(self, type, instance, schema): self.type = type self.instance = instance self.schema = schema def __str__(self): prefix = 16 * " " return dedent( f"""\ Unknown type {self.type!r} for validator with schema: {_pretty(self.schema, prefix=prefix)} While checking instance: {_pretty(self.instance, prefix=prefix)} """.rstrip(), ) class FormatError(Exception): """ Validating a format failed. """ def __init__(self, message, cause=None): super().__init__(message, cause) self.message = message self.cause = self.__cause__ = cause def __str__(self): return self.message class ErrorTree: """ ErrorTrees make it easier to check which validations failed. """ _instance = _unset def __init__(self, errors: Iterable[ValidationError] = ()): self.errors: MutableMapping[str, ValidationError] = {} self._contents: Mapping[str, ErrorTree] = defaultdict(self.__class__) for error in errors: container = self for element in error.path: container = container[element] container.errors[error.validator] = error container._instance = error.instance def __contains__(self, index: str | int): """ Check whether ``instance[index]`` has any errors. """ return index in self._contents def __getitem__(self, index): """ Retrieve the child tree one level down at the given ``index``. If the index is not in the instance that this tree corresponds to and is not known by this tree, whatever error would be raised by ``instance.__getitem__`` will be propagated (usually this is some subclass of `LookupError`. """ if self._instance is not _unset and index not in self: self._instance[index] return self._contents[index] def __setitem__(self, index: str | int, value: ErrorTree): """ Add an error to the tree at the given ``index``. .. deprecated:: v4.20.0 Setting items on an `ErrorTree` is deprecated without replacement. To populate a tree, provide all of its sub-errors when you construct the tree. """ warnings.warn( "ErrorTree.__setitem__ is deprecated without replacement.", DeprecationWarning, stacklevel=2, ) self._contents[index] = value # type: ignore[index] def __iter__(self): """ Iterate (non-recursively) over the indices in the instance with errors. """ return iter(self._contents) def __len__(self): """ Return the `total_errors`. """ return self.total_errors def __repr__(self): total = len(self) errors = "error" if total == 1 else "errors" return f"<{self.__class__.__name__} ({total} total {errors})>" @property def total_errors(self): """ The total number of errors in the entire tree, including children. """ child_errors = sum(len(tree) for _, tree in self._contents.items()) return len(self.errors) + child_errors def by_relevance(weak=WEAK_MATCHES, strong=STRONG_MATCHES): """ Create a key function that can be used to sort errors by relevance. Arguments: weak (set): a collection of validation keywords to consider to be "weak". If there are two errors at the same level of the instance and one is in the set of weak validation keywords, the other error will take priority. By default, :kw:`anyOf` and :kw:`oneOf` are considered weak keywords and will be superseded by other same-level validation errors. strong (set): a collection of validation keywords to consider to be "strong" """ def relevance(error): validator = error.validator return ( # prefer errors which are ... -len(error.path), # 'deeper' and thereby more specific error.path, # earlier (for sibling errors) validator not in weak, # for a non-low-priority keyword validator in strong, # for a high priority keyword not error._matches_type(), # at least match the instance's type ) # otherwise we'll treat them the same return relevance relevance = by_relevance() """ A key function (e.g. to use with `sorted`) which sorts errors by relevance. Example: .. code:: python sorted(validator.iter_errors(12), key=jsonschema.exceptions.relevance) """ def best_match(errors, key=relevance): """ Try to find an error that appears to be the best match among given errors. In general, errors that are higher up in the instance (i.e. for which `ValidationError.path` is shorter) are considered better matches, since they indicate "more" is wrong with the instance. If the resulting match is either :kw:`oneOf` or :kw:`anyOf`, the *opposite* assumption is made -- i.e. the deepest error is picked, since these keywords only need to match once, and any other errors may not be relevant. Arguments: errors (collections.abc.Iterable): the errors to select from. Do not provide a mixture of errors from different validation attempts (i.e. from different instances or schemas), since it won't produce sensical output. key (collections.abc.Callable): the key to use when sorting errors. See `relevance` and transitively `by_relevance` for more details (the default is to sort with the defaults of that function). Changing the default is only useful if you want to change the function that rates errors but still want the error context descent done by this function. Returns: the best matching error, or ``None`` if the iterable was empty .. note:: This function is a heuristic. Its return value may change for a given set of inputs from version to version if better heuristics are added. """ errors = iter(errors) best = next(errors, None) if best is None: return best = max(itertools.chain([best], errors), key=key) while best.context: # Calculate the minimum via nsmallest, because we don't recurse if # all nested errors have the same relevance (i.e. if min == max == all) smallest = heapq.nsmallest(2, best.context, key=key) if len(smallest) == 2 and key(smallest[0]) == key(smallest[1]): # noqa: PLR2004 return best best = smallest[0] return best PK!7aU protocols.pynu[""" typing.Protocol classes for jsonschema interfaces. """ # for reference material on Protocols, see # https://www.python.org/dev/peps/pep-0544/ from __future__ import annotations from typing import ( TYPE_CHECKING, Any, ClassVar, Iterable, Protocol, runtime_checkable, ) # in order for Sphinx to resolve references accurately from type annotations, # it needs to see names like `jsonschema.TypeChecker` # therefore, only import at type-checking time (to avoid circular references), # but use `jsonschema` for any types which will otherwise not be resolvable if TYPE_CHECKING: from collections.abc import Mapping import referencing.jsonschema from jsonschema import _typing from jsonschema.exceptions import ValidationError import jsonschema import jsonschema.validators # For code authors working on the validator protocol, these are the three # use-cases which should be kept in mind: # # 1. As a protocol class, it can be used in type annotations to describe the # available methods and attributes of a validator # 2. It is the source of autodoc for the validator documentation # 3. It is runtime_checkable, meaning that it can be used in isinstance() # checks. # # Since protocols are not base classes, isinstance() checking is limited in # its capabilities. See docs on runtime_checkable for detail @runtime_checkable class Validator(Protocol): """ The protocol to which all validator classes adhere. Arguments: schema: The schema that the validator object will validate with. It is assumed to be valid, and providing an invalid schema can lead to undefined behavior. See `Validator.check_schema` to validate a schema first. registry: a schema registry that will be used for looking up JSON references resolver: a resolver that will be used to resolve :kw:`$ref` properties (JSON references). If unprovided, one will be created. .. deprecated:: v4.18.0 `RefResolver <_RefResolver>` has been deprecated in favor of `referencing`, and with it, this argument. format_checker: if provided, a checker which will be used to assert about :kw:`format` properties present in the schema. If unprovided, *no* format validation is done, and the presence of format within schemas is strictly informational. Certain formats require additional packages to be installed in order to assert against instances. Ensure you've installed `jsonschema` with its `extra (optional) dependencies ` when invoking ``pip``. .. deprecated:: v4.12.0 Subclassing validator classes now explicitly warns this is not part of their public API. """ #: An object representing the validator's meta schema (the schema that #: describes valid schemas in the given version). META_SCHEMA: ClassVar[Mapping] #: A mapping of validation keywords (`str`\s) to functions that #: validate the keyword with that name. For more information see #: `creating-validators`. VALIDATORS: ClassVar[Mapping] #: A `jsonschema.TypeChecker` that will be used when validating #: :kw:`type` keywords in JSON schemas. TYPE_CHECKER: ClassVar[jsonschema.TypeChecker] #: A `jsonschema.FormatChecker` that will be used when validating #: :kw:`format` keywords in JSON schemas. FORMAT_CHECKER: ClassVar[jsonschema.FormatChecker] #: A function which given a schema returns its ID. ID_OF: _typing.id_of #: The schema that will be used to validate instances schema: Mapping | bool def __init__( self, schema: Mapping | bool, registry: referencing.jsonschema.SchemaRegistry, format_checker: jsonschema.FormatChecker | None = None, ) -> None: ... @classmethod def check_schema(cls, schema: Mapping | bool) -> None: """ Validate the given schema against the validator's `META_SCHEMA`. Raises: `jsonschema.exceptions.SchemaError`: if the schema is invalid """ def is_type(self, instance: Any, type: str) -> bool: """ Check if the instance is of the given (JSON Schema) type. Arguments: instance: the value to check type: the name of a known (JSON Schema) type Returns: whether the instance is of the given type Raises: `jsonschema.exceptions.UnknownType`: if ``type`` is not a known type """ def is_valid(self, instance: Any) -> bool: """ Check if the instance is valid under the current `schema`. Returns: whether the instance is valid or not >>> schema = {"maxItems" : 2} >>> Draft202012Validator(schema).is_valid([2, 3, 4]) False """ def iter_errors(self, instance: Any) -> Iterable[ValidationError]: r""" Lazily yield each of the validation errors in the given instance. >>> schema = { ... "type" : "array", ... "items" : {"enum" : [1, 2, 3]}, ... "maxItems" : 2, ... } >>> v = Draft202012Validator(schema) >>> for error in sorted(v.iter_errors([2, 3, 4]), key=str): ... print(error.message) 4 is not one of [1, 2, 3] [2, 3, 4] is too long .. deprecated:: v4.0.0 Calling this function with a second schema argument is deprecated. Use `Validator.evolve` instead. """ def validate(self, instance: Any) -> None: """ Check if the instance is valid under the current `schema`. Raises: `jsonschema.exceptions.ValidationError`: if the instance is invalid >>> schema = {"maxItems" : 2} >>> Draft202012Validator(schema).validate([2, 3, 4]) Traceback (most recent call last): ... ValidationError: [2, 3, 4] is too long """ def evolve(self, **kwargs) -> Validator: """ Create a new validator like this one, but with given changes. Preserves all other attributes, so can be used to e.g. create a validator with a different schema but with the same :kw:`$ref` resolution behavior. >>> validator = Draft202012Validator({}) >>> validator.evolve(schema={"type": "number"}) Draft202012Validator(schema={'type': 'number'}, format_checker=None) The returned object satisfies the validator protocol, but may not be of the same concrete class! In particular this occurs when a :kw:`$ref` occurs to a schema with a different :kw:`$schema` than this one (i.e. for a different draft). >>> validator.evolve( ... schema={"$schema": Draft7Validator.META_SCHEMA["$id"]} ... ) Draft7Validator(schema=..., format_checker=None) """ PK!yŷŷ validators.pynu[""" Creation and extension of validators, with implementations for existing drafts. """ from __future__ import annotations from collections import deque from collections.abc import Iterable, Mapping, Sequence from functools import lru_cache from operator import methodcaller from typing import TYPE_CHECKING from urllib.parse import unquote, urldefrag, urljoin, urlsplit from urllib.request import urlopen from warnings import warn import contextlib import json import reprlib import warnings from attrs import define, field, fields from jsonschema_specifications import REGISTRY as SPECIFICATIONS from rpds import HashTrieMap import referencing.exceptions import referencing.jsonschema from jsonschema import ( _format, _keywords, _legacy_keywords, _types, _typing, _utils, exceptions, ) if TYPE_CHECKING: from jsonschema.protocols import Validator _UNSET = _utils.Unset() _VALIDATORS: dict[str, Validator] = {} _META_SCHEMAS = _utils.URIDict() def __getattr__(name): if name == "ErrorTree": warnings.warn( "Importing ErrorTree from jsonschema.validators is deprecated. " "Instead import it from jsonschema.exceptions.", DeprecationWarning, stacklevel=2, ) from jsonschema.exceptions import ErrorTree return ErrorTree elif name == "validators": warnings.warn( "Accessing jsonschema.validators.validators is deprecated. " "Use jsonschema.validators.validator_for with a given schema.", DeprecationWarning, stacklevel=2, ) return _VALIDATORS elif name == "meta_schemas": warnings.warn( "Accessing jsonschema.validators.meta_schemas is deprecated. " "Use jsonschema.validators.validator_for with a given schema.", DeprecationWarning, stacklevel=2, ) return _META_SCHEMAS elif name == "RefResolver": warnings.warn( _RefResolver._DEPRECATION_MESSAGE, DeprecationWarning, stacklevel=2, ) return _RefResolver raise AttributeError(f"module {__name__} has no attribute {name}") def validates(version): """ Register the decorated validator for a ``version`` of the specification. Registered validators and their meta schemas will be considered when parsing :kw:`$schema` keywords' URIs. Arguments: version (str): An identifier to use as the version's name Returns: collections.abc.Callable: a class decorator to decorate the validator with the version """ def _validates(cls): _VALIDATORS[version] = cls meta_schema_id = cls.ID_OF(cls.META_SCHEMA) _META_SCHEMAS[meta_schema_id] = cls return cls return _validates def _warn_for_remote_retrieve(uri: str): from urllib.request import Request, urlopen headers = {"User-Agent": "python-jsonschema (deprecated $ref resolution)"} request = Request(uri, headers=headers) # noqa: S310 with urlopen(request) as response: # noqa: S310 warnings.warn( "Automatically retrieving remote references can be a security " "vulnerability and is discouraged by the JSON Schema " "specifications. Relying on this behavior is deprecated " "and will shortly become an error. If you are sure you want to " "remotely retrieve your reference and that it is safe to do so, " "you can find instructions for doing so via referencing.Registry " "in the referencing documentation " "(https://referencing.readthedocs.org).", DeprecationWarning, stacklevel=9, # Ha ha ha ha magic numbers :/ ) return referencing.Resource.from_contents( json.load(response), default_specification=referencing.jsonschema.DRAFT202012, ) _REMOTE_WARNING_REGISTRY = SPECIFICATIONS.combine( referencing.Registry(retrieve=_warn_for_remote_retrieve), # type: ignore[call-arg] ) def create( meta_schema: referencing.jsonschema.ObjectSchema, validators: ( Mapping[str, _typing.SchemaKeywordValidator] | Iterable[tuple[str, _typing.SchemaKeywordValidator]] ) = (), version: str | None = None, type_checker: _types.TypeChecker = _types.draft202012_type_checker, format_checker: _format.FormatChecker = _format.draft202012_format_checker, id_of: _typing.id_of = referencing.jsonschema.DRAFT202012.id_of, applicable_validators: _typing.ApplicableValidators = methodcaller( "items", ), ): """ Create a new validator class. Arguments: meta_schema: the meta schema for the new validator class validators: a mapping from names to callables, where each callable will validate the schema property with the given name. Each callable should take 4 arguments: 1. a validator instance, 2. the value of the property being validated within the instance 3. the instance 4. the schema version: an identifier for the version that this validator class will validate. If provided, the returned validator class will have its ``__name__`` set to include the version, and also will have `jsonschema.validators.validates` automatically called for the given version. type_checker: a type checker, used when applying the :kw:`type` keyword. If unprovided, a `jsonschema.TypeChecker` will be created with a set of default types typical of JSON Schema drafts. format_checker: a format checker, used when applying the :kw:`format` keyword. If unprovided, a `jsonschema.FormatChecker` will be created with a set of default formats typical of JSON Schema drafts. id_of: A function that given a schema, returns its ID. applicable_validators: A function that, given a schema, returns the list of applicable schema keywords and associated values which will be used to validate the instance. This is mostly used to support pre-draft 7 versions of JSON Schema which specified behavior around ignoring keywords if they were siblings of a ``$ref`` keyword. If you're not attempting to implement similar behavior, you can typically ignore this argument and leave it at its default. Returns: a new `jsonschema.protocols.Validator` class """ # preemptively don't shadow the `Validator.format_checker` local format_checker_arg = format_checker specification = referencing.jsonschema.specification_with( dialect_id=id_of(meta_schema) or "urn:unknown-dialect", default=referencing.Specification.OPAQUE, ) @define class Validator: VALIDATORS = dict(validators) # noqa: RUF012 META_SCHEMA = dict(meta_schema) # noqa: RUF012 TYPE_CHECKER = type_checker FORMAT_CHECKER = format_checker_arg ID_OF = staticmethod(id_of) _APPLICABLE_VALIDATORS = applicable_validators _validators = field(init=False, repr=False, eq=False) schema: referencing.jsonschema.Schema = field(repr=reprlib.repr) _ref_resolver = field(default=None, repr=False, alias="resolver") format_checker: _format.FormatChecker | None = field(default=None) # TODO: include new meta-schemas added at runtime _registry: referencing.jsonschema.SchemaRegistry = field( default=_REMOTE_WARNING_REGISTRY, kw_only=True, repr=False, ) _resolver = field( alias="_resolver", default=None, kw_only=True, repr=False, ) def __init_subclass__(cls): warnings.warn( ( "Subclassing validator classes is not intended to " "be part of their public API. A future version " "will make doing so an error, as the behavior of " "subclasses isn't guaranteed to stay the same " "between releases of jsonschema. Instead, prefer " "composition of validators, wrapping them in an object " "owned entirely by the downstream library." ), DeprecationWarning, stacklevel=2, ) def evolve(self, **changes): cls = self.__class__ schema = changes.setdefault("schema", self.schema) NewValidator = validator_for(schema, default=cls) for field in fields(cls): # noqa: F402 if not field.init: continue attr_name = field.name init_name = field.alias if init_name not in changes: changes[init_name] = getattr(self, attr_name) return NewValidator(**changes) cls.evolve = evolve def __attrs_post_init__(self): if self._resolver is None: registry = self._registry if registry is not _REMOTE_WARNING_REGISTRY: registry = SPECIFICATIONS.combine(registry) resource = specification.create_resource(self.schema) self._resolver = registry.resolver_with_root(resource) if self.schema is True or self.schema is False: self._validators = [] else: self._validators = [ (self.VALIDATORS[k], k, v) for k, v in applicable_validators(self.schema) if k in self.VALIDATORS ] # REMOVEME: Legacy ref resolution state management. push_scope = getattr(self._ref_resolver, "push_scope", None) if push_scope is not None: id = id_of(self.schema) if id is not None: push_scope(id) @classmethod def check_schema(cls, schema, format_checker=_UNSET): Validator = validator_for(cls.META_SCHEMA, default=cls) if format_checker is _UNSET: format_checker = Validator.FORMAT_CHECKER validator = Validator( schema=cls.META_SCHEMA, format_checker=format_checker, ) for error in validator.iter_errors(schema): raise exceptions.SchemaError.create_from(error) @property def resolver(self): warnings.warn( ( f"Accessing {self.__class__.__name__}.resolver is " "deprecated as of v4.18.0, in favor of the " "https://github.com/python-jsonschema/referencing " "library, which provides more compliant referencing " "behavior as well as more flexible APIs for " "customization." ), DeprecationWarning, stacklevel=2, ) if self._ref_resolver is None: self._ref_resolver = _RefResolver.from_schema( self.schema, id_of=id_of, ) return self._ref_resolver def evolve(self, **changes): schema = changes.setdefault("schema", self.schema) NewValidator = validator_for(schema, default=self.__class__) for (attr_name, init_name) in evolve_fields: if init_name not in changes: changes[init_name] = getattr(self, attr_name) return NewValidator(**changes) def iter_errors(self, instance, _schema=None): if _schema is not None: warnings.warn( ( "Passing a schema to Validator.iter_errors " "is deprecated and will be removed in a future " "release. Call validator.evolve(schema=new_schema)." "iter_errors(...) instead." ), DeprecationWarning, stacklevel=2, ) validators = [ (self.VALIDATORS[k], k, v) for k, v in applicable_validators(_schema) if k in self.VALIDATORS ] else: _schema, validators = self.schema, self._validators if _schema is True: return elif _schema is False: yield exceptions.ValidationError( f"False schema does not allow {instance!r}", validator=None, validator_value=None, instance=instance, schema=_schema, ) return for validator, k, v in validators: errors = validator(self, v, instance, _schema) or () for error in errors: # set details if not already set by the called fn error._set( validator=k, validator_value=v, instance=instance, schema=_schema, type_checker=self.TYPE_CHECKER, ) if k not in {"if", "$ref"}: error.schema_path.appendleft(k) yield error def descend( self, instance, schema, path=None, schema_path=None, resolver=None, ): if schema is True: return elif schema is False: yield exceptions.ValidationError( f"False schema does not allow {instance!r}", validator=None, validator_value=None, instance=instance, schema=schema, ) return if self._ref_resolver is not None: evolved = self.evolve(schema=schema) else: if resolver is None: resolver = self._resolver.in_subresource( specification.create_resource(schema), ) evolved = self.evolve(schema=schema, _resolver=resolver) for k, v in applicable_validators(schema): validator = evolved.VALIDATORS.get(k) if validator is None: continue errors = validator(evolved, v, instance, schema) or () for error in errors: # set details if not already set by the called fn error._set( validator=k, validator_value=v, instance=instance, schema=schema, type_checker=evolved.TYPE_CHECKER, ) if k not in {"if", "$ref"}: error.schema_path.appendleft(k) if path is not None: error.path.appendleft(path) if schema_path is not None: error.schema_path.appendleft(schema_path) yield error def validate(self, *args, **kwargs): for error in self.iter_errors(*args, **kwargs): raise error def is_type(self, instance, type): try: return self.TYPE_CHECKER.is_type(instance, type) except exceptions.UndefinedTypeCheck: exc = exceptions.UnknownType(type, instance, self.schema) raise exc from None def _validate_reference(self, ref, instance): if self._ref_resolver is None: try: resolved = self._resolver.lookup(ref) except referencing.exceptions.Unresolvable as err: raise exceptions._WrappedReferencingError(err) from err return self.descend( instance, resolved.contents, resolver=resolved.resolver, ) else: resolve = getattr(self._ref_resolver, "resolve", None) if resolve is None: with self._ref_resolver.resolving(ref) as resolved: return self.descend(instance, resolved) else: scope, resolved = resolve(ref) self._ref_resolver.push_scope(scope) try: return list(self.descend(instance, resolved)) finally: self._ref_resolver.pop_scope() def is_valid(self, instance, _schema=None): if _schema is not None: warnings.warn( ( "Passing a schema to Validator.is_valid is deprecated " "and will be removed in a future release. Call " "validator.evolve(schema=new_schema).is_valid(...) " "instead." ), DeprecationWarning, stacklevel=2, ) self = self.evolve(schema=_schema) error = next(self.iter_errors(instance), None) return error is None evolve_fields = [ (field.name, field.alias) for field in fields(Validator) if field.init ] if version is not None: safe = version.title().replace(" ", "").replace("-", "") Validator.__name__ = Validator.__qualname__ = f"{safe}Validator" Validator = validates(version)(Validator) # type: ignore[misc] return Validator def extend( validator, validators=(), version=None, type_checker=None, format_checker=None, ): """ Create a new validator class by extending an existing one. Arguments: validator (jsonschema.protocols.Validator): an existing validator class validators (collections.abc.Mapping): a mapping of new validator callables to extend with, whose structure is as in `create`. .. note:: Any validator callables with the same name as an existing one will (silently) replace the old validator callable entirely, effectively overriding any validation done in the "parent" validator class. If you wish to instead extend the behavior of a parent's validator callable, delegate and call it directly in the new validator function by retrieving it using ``OldValidator.VALIDATORS["validation_keyword_name"]``. version (str): a version for the new validator class type_checker (jsonschema.TypeChecker): a type checker, used when applying the :kw:`type` keyword. If unprovided, the type checker of the extended `jsonschema.protocols.Validator` will be carried along. format_checker (jsonschema.FormatChecker): a format checker, used when applying the :kw:`format` keyword. If unprovided, the format checker of the extended `jsonschema.protocols.Validator` will be carried along. Returns: a new `jsonschema.protocols.Validator` class extending the one provided .. note:: Meta Schemas The new validator class will have its parent's meta schema. If you wish to change or extend the meta schema in the new validator class, modify ``META_SCHEMA`` directly on the returned class. Note that no implicit copying is done, so a copy should likely be made before modifying it, in order to not affect the old validator. """ all_validators = dict(validator.VALIDATORS) all_validators.update(validators) if type_checker is None: type_checker = validator.TYPE_CHECKER if format_checker is None: format_checker = validator.FORMAT_CHECKER return create( meta_schema=validator.META_SCHEMA, validators=all_validators, version=version, type_checker=type_checker, format_checker=format_checker, id_of=validator.ID_OF, applicable_validators=validator._APPLICABLE_VALIDATORS, ) Draft3Validator = create( meta_schema=SPECIFICATIONS.contents( "http://json-schema.org/draft-03/schema#", ), validators={ "$ref": _keywords.ref, "additionalItems": _legacy_keywords.additionalItems, "additionalProperties": _keywords.additionalProperties, "dependencies": _legacy_keywords.dependencies_draft3, "disallow": _legacy_keywords.disallow_draft3, "divisibleBy": _keywords.multipleOf, "enum": _keywords.enum, "extends": _legacy_keywords.extends_draft3, "format": _keywords.format, "items": _legacy_keywords.items_draft3_draft4, "maxItems": _keywords.maxItems, "maxLength": _keywords.maxLength, "maximum": _legacy_keywords.maximum_draft3_draft4, "minItems": _keywords.minItems, "minLength": _keywords.minLength, "minimum": _legacy_keywords.minimum_draft3_draft4, "pattern": _keywords.pattern, "patternProperties": _keywords.patternProperties, "properties": _legacy_keywords.properties_draft3, "type": _legacy_keywords.type_draft3, "uniqueItems": _keywords.uniqueItems, }, type_checker=_types.draft3_type_checker, format_checker=_format.draft3_format_checker, version="draft3", id_of=referencing.jsonschema.DRAFT3.id_of, applicable_validators=_legacy_keywords.ignore_ref_siblings, ) Draft4Validator = create( meta_schema=SPECIFICATIONS.contents( "http://json-schema.org/draft-04/schema#", ), validators={ "$ref": _keywords.ref, "additionalItems": _legacy_keywords.additionalItems, "additionalProperties": _keywords.additionalProperties, "allOf": _keywords.allOf, "anyOf": _keywords.anyOf, "dependencies": _legacy_keywords.dependencies_draft4_draft6_draft7, "enum": _keywords.enum, "format": _keywords.format, "items": _legacy_keywords.items_draft3_draft4, "maxItems": _keywords.maxItems, "maxLength": _keywords.maxLength, "maxProperties": _keywords.maxProperties, "maximum": _legacy_keywords.maximum_draft3_draft4, "minItems": _keywords.minItems, "minLength": _keywords.minLength, "minProperties": _keywords.minProperties, "minimum": _legacy_keywords.minimum_draft3_draft4, "multipleOf": _keywords.multipleOf, "not": _keywords.not_, "oneOf": _keywords.oneOf, "pattern": _keywords.pattern, "patternProperties": _keywords.patternProperties, "properties": _keywords.properties, "required": _keywords.required, "type": _keywords.type, "uniqueItems": _keywords.uniqueItems, }, type_checker=_types.draft4_type_checker, format_checker=_format.draft4_format_checker, version="draft4", id_of=referencing.jsonschema.DRAFT4.id_of, applicable_validators=_legacy_keywords.ignore_ref_siblings, ) Draft6Validator = create( meta_schema=SPECIFICATIONS.contents( "http://json-schema.org/draft-06/schema#", ), validators={ "$ref": _keywords.ref, "additionalItems": _legacy_keywords.additionalItems, "additionalProperties": _keywords.additionalProperties, "allOf": _keywords.allOf, "anyOf": _keywords.anyOf, "const": _keywords.const, "contains": _legacy_keywords.contains_draft6_draft7, "dependencies": _legacy_keywords.dependencies_draft4_draft6_draft7, "enum": _keywords.enum, "exclusiveMaximum": _keywords.exclusiveMaximum, "exclusiveMinimum": _keywords.exclusiveMinimum, "format": _keywords.format, "items": _legacy_keywords.items_draft6_draft7_draft201909, "maxItems": _keywords.maxItems, "maxLength": _keywords.maxLength, "maxProperties": _keywords.maxProperties, "maximum": _keywords.maximum, "minItems": _keywords.minItems, "minLength": _keywords.minLength, "minProperties": _keywords.minProperties, "minimum": _keywords.minimum, "multipleOf": _keywords.multipleOf, "not": _keywords.not_, "oneOf": _keywords.oneOf, "pattern": _keywords.pattern, "patternProperties": _keywords.patternProperties, "properties": _keywords.properties, "propertyNames": _keywords.propertyNames, "required": _keywords.required, "type": _keywords.type, "uniqueItems": _keywords.uniqueItems, }, type_checker=_types.draft6_type_checker, format_checker=_format.draft6_format_checker, version="draft6", id_of=referencing.jsonschema.DRAFT6.id_of, applicable_validators=_legacy_keywords.ignore_ref_siblings, ) Draft7Validator = create( meta_schema=SPECIFICATIONS.contents( "http://json-schema.org/draft-07/schema#", ), validators={ "$ref": _keywords.ref, "additionalItems": _legacy_keywords.additionalItems, "additionalProperties": _keywords.additionalProperties, "allOf": _keywords.allOf, "anyOf": _keywords.anyOf, "const": _keywords.const, "contains": _legacy_keywords.contains_draft6_draft7, "dependencies": _legacy_keywords.dependencies_draft4_draft6_draft7, "enum": _keywords.enum, "exclusiveMaximum": _keywords.exclusiveMaximum, "exclusiveMinimum": _keywords.exclusiveMinimum, "format": _keywords.format, "if": _keywords.if_, "items": _legacy_keywords.items_draft6_draft7_draft201909, "maxItems": _keywords.maxItems, "maxLength": _keywords.maxLength, "maxProperties": _keywords.maxProperties, "maximum": _keywords.maximum, "minItems": _keywords.minItems, "minLength": _keywords.minLength, "minProperties": _keywords.minProperties, "minimum": _keywords.minimum, "multipleOf": _keywords.multipleOf, "not": _keywords.not_, "oneOf": _keywords.oneOf, "pattern": _keywords.pattern, "patternProperties": _keywords.patternProperties, "properties": _keywords.properties, "propertyNames": _keywords.propertyNames, "required": _keywords.required, "type": _keywords.type, "uniqueItems": _keywords.uniqueItems, }, type_checker=_types.draft7_type_checker, format_checker=_format.draft7_format_checker, version="draft7", id_of=referencing.jsonschema.DRAFT7.id_of, applicable_validators=_legacy_keywords.ignore_ref_siblings, ) Draft201909Validator = create( meta_schema=SPECIFICATIONS.contents( "https://json-schema.org/draft/2019-09/schema", ), validators={ "$recursiveRef": _legacy_keywords.recursiveRef, "$ref": _keywords.ref, "additionalItems": _legacy_keywords.additionalItems, "additionalProperties": _keywords.additionalProperties, "allOf": _keywords.allOf, "anyOf": _keywords.anyOf, "const": _keywords.const, "contains": _keywords.contains, "dependentRequired": _keywords.dependentRequired, "dependentSchemas": _keywords.dependentSchemas, "enum": _keywords.enum, "exclusiveMaximum": _keywords.exclusiveMaximum, "exclusiveMinimum": _keywords.exclusiveMinimum, "format": _keywords.format, "if": _keywords.if_, "items": _legacy_keywords.items_draft6_draft7_draft201909, "maxItems": _keywords.maxItems, "maxLength": _keywords.maxLength, "maxProperties": _keywords.maxProperties, "maximum": _keywords.maximum, "minItems": _keywords.minItems, "minLength": _keywords.minLength, "minProperties": _keywords.minProperties, "minimum": _keywords.minimum, "multipleOf": _keywords.multipleOf, "not": _keywords.not_, "oneOf": _keywords.oneOf, "pattern": _keywords.pattern, "patternProperties": _keywords.patternProperties, "properties": _keywords.properties, "propertyNames": _keywords.propertyNames, "required": _keywords.required, "type": _keywords.type, "unevaluatedItems": _legacy_keywords.unevaluatedItems_draft2019, "unevaluatedProperties": ( _legacy_keywords.unevaluatedProperties_draft2019 ), "uniqueItems": _keywords.uniqueItems, }, type_checker=_types.draft201909_type_checker, format_checker=_format.draft201909_format_checker, version="draft2019-09", ) Draft202012Validator = create( meta_schema=SPECIFICATIONS.contents( "https://json-schema.org/draft/2020-12/schema", ), validators={ "$dynamicRef": _keywords.dynamicRef, "$ref": _keywords.ref, "additionalProperties": _keywords.additionalProperties, "allOf": _keywords.allOf, "anyOf": _keywords.anyOf, "const": _keywords.const, "contains": _keywords.contains, "dependentRequired": _keywords.dependentRequired, "dependentSchemas": _keywords.dependentSchemas, "enum": _keywords.enum, "exclusiveMaximum": _keywords.exclusiveMaximum, "exclusiveMinimum": _keywords.exclusiveMinimum, "format": _keywords.format, "if": _keywords.if_, "items": _keywords.items, "maxItems": _keywords.maxItems, "maxLength": _keywords.maxLength, "maxProperties": _keywords.maxProperties, "maximum": _keywords.maximum, "minItems": _keywords.minItems, "minLength": _keywords.minLength, "minProperties": _keywords.minProperties, "minimum": _keywords.minimum, "multipleOf": _keywords.multipleOf, "not": _keywords.not_, "oneOf": _keywords.oneOf, "pattern": _keywords.pattern, "patternProperties": _keywords.patternProperties, "prefixItems": _keywords.prefixItems, "properties": _keywords.properties, "propertyNames": _keywords.propertyNames, "required": _keywords.required, "type": _keywords.type, "unevaluatedItems": _keywords.unevaluatedItems, "unevaluatedProperties": _keywords.unevaluatedProperties, "uniqueItems": _keywords.uniqueItems, }, type_checker=_types.draft202012_type_checker, format_checker=_format.draft202012_format_checker, version="draft2020-12", ) _LATEST_VERSION = Draft202012Validator class _RefResolver: """ Resolve JSON References. Arguments: base_uri (str): The URI of the referring document referrer: The actual referring document store (dict): A mapping from URIs to documents to cache cache_remote (bool): Whether remote refs should be cached after first resolution handlers (dict): A mapping from URI schemes to functions that should be used to retrieve them urljoin_cache (:func:`functools.lru_cache`): A cache that will be used for caching the results of joining the resolution scope to subscopes. remote_cache (:func:`functools.lru_cache`): A cache that will be used for caching the results of resolved remote URLs. Attributes: cache_remote (bool): Whether remote refs should be cached after first resolution .. deprecated:: v4.18.0 ``RefResolver`` has been deprecated in favor of `referencing`. """ _DEPRECATION_MESSAGE = ( "jsonschema.RefResolver is deprecated as of v4.18.0, in favor of the " "https://github.com/python-jsonschema/referencing library, which " "provides more compliant referencing behavior as well as more " "flexible APIs for customization. A future release will remove " "RefResolver. Please file a feature request (on referencing) if you " "are missing an API for the kind of customization you need." ) def __init__( self, base_uri, referrer, store=HashTrieMap(), cache_remote=True, handlers=(), urljoin_cache=None, remote_cache=None, ): if urljoin_cache is None: urljoin_cache = lru_cache(1024)(urljoin) if remote_cache is None: remote_cache = lru_cache(1024)(self.resolve_from_url) self.referrer = referrer self.cache_remote = cache_remote self.handlers = dict(handlers) self._scopes_stack = [base_uri] self.store = _utils.URIDict( (uri, each.contents) for uri, each in SPECIFICATIONS.items() ) self.store.update( (id, each.META_SCHEMA) for id, each in _META_SCHEMAS.items() ) self.store.update(store) self.store.update( (schema["$id"], schema) for schema in store.values() if isinstance(schema, Mapping) and "$id" in schema ) self.store[base_uri] = referrer self._urljoin_cache = urljoin_cache self._remote_cache = remote_cache @classmethod def from_schema( # noqa: D417 cls, schema, id_of=referencing.jsonschema.DRAFT202012.id_of, *args, **kwargs, ): """ Construct a resolver from a JSON schema object. Arguments: schema: the referring schema Returns: `_RefResolver` """ return cls(base_uri=id_of(schema) or "", referrer=schema, *args, **kwargs) # noqa: B026, E501 def push_scope(self, scope): """ Enter a given sub-scope. Treats further dereferences as being performed underneath the given scope. """ self._scopes_stack.append( self._urljoin_cache(self.resolution_scope, scope), ) def pop_scope(self): """ Exit the most recent entered scope. Treats further dereferences as being performed underneath the original scope. Don't call this method more times than `push_scope` has been called. """ try: self._scopes_stack.pop() except IndexError: raise exceptions._RefResolutionError( "Failed to pop the scope from an empty stack. " "`pop_scope()` should only be called once for every " "`push_scope()`", ) from None @property def resolution_scope(self): """ Retrieve the current resolution scope. """ return self._scopes_stack[-1] @property def base_uri(self): """ Retrieve the current base URI, not including any fragment. """ uri, _ = urldefrag(self.resolution_scope) return uri @contextlib.contextmanager def in_scope(self, scope): """ Temporarily enter the given scope for the duration of the context. .. deprecated:: v4.0.0 """ warnings.warn( "jsonschema.RefResolver.in_scope is deprecated and will be " "removed in a future release.", DeprecationWarning, stacklevel=3, ) self.push_scope(scope) try: yield finally: self.pop_scope() @contextlib.contextmanager def resolving(self, ref): """ Resolve the given ``ref`` and enter its resolution scope. Exits the scope on exit of this context manager. Arguments: ref (str): The reference to resolve """ url, resolved = self.resolve(ref) self.push_scope(url) try: yield resolved finally: self.pop_scope() def _find_in_referrer(self, key): return self._get_subschemas_cache()[key] @lru_cache # noqa: B019 def _get_subschemas_cache(self): cache = {key: [] for key in _SUBSCHEMAS_KEYWORDS} for keyword, subschema in _search_schema( self.referrer, _match_subschema_keywords, ): cache[keyword].append(subschema) return cache @lru_cache # noqa: B019 def _find_in_subschemas(self, url): subschemas = self._get_subschemas_cache()["$id"] if not subschemas: return None uri, fragment = urldefrag(url) for subschema in subschemas: id = subschema["$id"] if not isinstance(id, str): continue target_uri = self._urljoin_cache(self.resolution_scope, id) if target_uri.rstrip("/") == uri.rstrip("/"): if fragment: subschema = self.resolve_fragment(subschema, fragment) self.store[url] = subschema return url, subschema return None def resolve(self, ref): """ Resolve the given reference. """ url = self._urljoin_cache(self.resolution_scope, ref).rstrip("/") match = self._find_in_subschemas(url) if match is not None: return match return url, self._remote_cache(url) def resolve_from_url(self, url): """ Resolve the given URL. """ url, fragment = urldefrag(url) if not url: url = self.base_uri try: document = self.store[url] except KeyError: try: document = self.resolve_remote(url) except Exception as exc: raise exceptions._RefResolutionError(exc) from exc return self.resolve_fragment(document, fragment) def resolve_fragment(self, document, fragment): """ Resolve a ``fragment`` within the referenced ``document``. Arguments: document: The referent document fragment (str): a URI fragment to resolve within it """ fragment = fragment.lstrip("/") if not fragment: return document if document is self.referrer: find = self._find_in_referrer else: def find(key): yield from _search_schema(document, _match_keyword(key)) for keyword in ["$anchor", "$dynamicAnchor"]: for subschema in find(keyword): if fragment == subschema[keyword]: return subschema for keyword in ["id", "$id"]: for subschema in find(keyword): if "#" + fragment == subschema[keyword]: return subschema # Resolve via path parts = unquote(fragment).split("/") if fragment else [] for part in parts: part = part.replace("~1", "/").replace("~0", "~") if isinstance(document, Sequence): try: # noqa: SIM105 part = int(part) except ValueError: pass try: document = document[part] except (TypeError, LookupError) as err: raise exceptions._RefResolutionError( f"Unresolvable JSON pointer: {fragment!r}", ) from err return document def resolve_remote(self, uri): """ Resolve a remote ``uri``. If called directly, does not check the store first, but after retrieving the document at the specified URI it will be saved in the store if :attr:`cache_remote` is True. .. note:: If the requests_ library is present, ``jsonschema`` will use it to request the remote ``uri``, so that the correct encoding is detected and used. If it isn't, or if the scheme of the ``uri`` is not ``http`` or ``https``, UTF-8 is assumed. Arguments: uri (str): The URI to resolve Returns: The retrieved document .. _requests: https://pypi.org/project/requests/ """ try: import requests except ImportError: requests = None scheme = urlsplit(uri).scheme if scheme in self.handlers: result = self.handlers[scheme](uri) elif scheme in ["http", "https"] and requests: # Requests has support for detecting the correct encoding of # json over http result = requests.get(uri).json() else: # Otherwise, pass off to urllib and assume utf-8 with urlopen(uri) as url: # noqa: S310 result = json.loads(url.read().decode("utf-8")) if self.cache_remote: self.store[uri] = result return result _SUBSCHEMAS_KEYWORDS = ("$id", "id", "$anchor", "$dynamicAnchor") def _match_keyword(keyword): def matcher(value): if keyword in value: yield value return matcher def _match_subschema_keywords(value): for keyword in _SUBSCHEMAS_KEYWORDS: if keyword in value: yield keyword, value def _search_schema(schema, matcher): """Breadth-first search routine.""" values = deque([schema]) while values: value = values.pop() if not isinstance(value, dict): continue yield from matcher(value) values.extendleft(value.values()) def validate(instance, schema, cls=None, *args, **kwargs): # noqa: D417 """ Validate an instance under the given schema. >>> validate([2, 3, 4], {"maxItems": 2}) Traceback (most recent call last): ... ValidationError: [2, 3, 4] is too long :func:`~jsonschema.validators.validate` will first verify that the provided schema is itself valid, since not doing so can lead to less obvious error messages and fail in less obvious or consistent ways. If you know you have a valid schema already, especially if you intend to validate multiple instances with the same schema, you likely would prefer using the `jsonschema.protocols.Validator.validate` method directly on a specific validator (e.g. ``Draft202012Validator.validate``). Arguments: instance: The instance to validate schema: The schema to validate with cls (jsonschema.protocols.Validator): The class that will be used to validate the instance. If the ``cls`` argument is not provided, two things will happen in accordance with the specification. First, if the schema has a :kw:`$schema` keyword containing a known meta-schema [#]_ then the proper validator will be used. The specification recommends that all schemas contain :kw:`$schema` properties for this reason. If no :kw:`$schema` property is found, the default validator class is the latest released draft. Any other provided positional and keyword arguments will be passed on when instantiating the ``cls``. Raises: `jsonschema.exceptions.ValidationError`: if the instance is invalid `jsonschema.exceptions.SchemaError`: if the schema itself is invalid .. rubric:: Footnotes .. [#] known by a validator registered with `jsonschema.validators.validates` """ if cls is None: cls = validator_for(schema) cls.check_schema(schema) validator = cls(schema, *args, **kwargs) error = exceptions.best_match(validator.iter_errors(instance)) if error is not None: raise error def validator_for( schema, default: Validator | _utils.Unset = _UNSET, ) -> type[Validator]: """ Retrieve the validator class appropriate for validating the given schema. Uses the :kw:`$schema` keyword that should be present in the given schema to look up the appropriate validator class. Arguments: schema (collections.abc.Mapping or bool): the schema to look at default: the default to return if the appropriate validator class cannot be determined. If unprovided, the default is to return the latest supported draft. Examples: The :kw:`$schema` JSON Schema keyword will control which validator class is returned: >>> schema = { ... "$schema": "https://json-schema.org/draft/2020-12/schema", ... "type": "integer", ... } >>> jsonschema.validators.validator_for(schema) Here, a draft 7 schema instead will return the draft 7 validator: >>> schema = { ... "$schema": "http://json-schema.org/draft-07/schema#", ... "type": "integer", ... } >>> jsonschema.validators.validator_for(schema) Schemas with no ``$schema`` keyword will fallback to the default argument: >>> schema = {"type": "integer"} >>> jsonschema.validators.validator_for( ... schema, default=Draft7Validator, ... ) or if none is provided, to the latest version supported. Always including the keyword when authoring schemas is highly recommended. """ DefaultValidator = _LATEST_VERSION if default is _UNSET else default if schema is True or schema is False or "$schema" not in schema: return DefaultValidator if schema["$schema"] not in _META_SCHEMAS and default is _UNSET: warn( ( "The metaschema specified by $schema was not found. " "Using the latest draft to validate, but this will raise " "an error in the future." ), DeprecationWarning, stacklevel=2, ) return _META_SCHEMAS.get(schema["$schema"], DefaultValidator) PK!FFbenchmarks/__init__.pynu[""" Benchmarks for validation. This package is *not* public API. """ PK!y>>benchmarks/const_vs_enum.pynu[""" A benchmark for comparing equivalent validation of `const` and `enum`. """ from pyperf import Runner from jsonschema import Draft202012Validator value = [37] * 100 const_schema = {"const": list(value)} enum_schema = {"enum": [list(value)]} valid = list(value) invalid = [*valid, 73] const = Draft202012Validator(const_schema) enum = Draft202012Validator(enum_schema) assert const.is_valid(valid) assert enum.is_valid(valid) assert not const.is_valid(invalid) assert not enum.is_valid(invalid) if __name__ == "__main__": runner = Runner() runner.bench_func("const valid", lambda: const.is_valid(valid)) runner.bench_func("const invalid", lambda: const.is_valid(invalid)) runner.bench_func("enum valid", lambda: enum.is_valid(valid)) runner.bench_func("enum invalid", lambda: enum.is_valid(invalid)) PK!`@benchmarks/contains.pynu[""" A benchmark for validation of the `contains` keyword. """ from pyperf import Runner from jsonschema import Draft202012Validator schema = { "type": "array", "contains": {"const": 37}, } validator = Draft202012Validator(schema) size = 1000 beginning = [37] + [0] * (size - 1) middle = [0] * (size // 2) + [37] + [0] * (size // 2) end = [0] * (size - 1) + [37] invalid = [0] * size if __name__ == "__main__": runner = Runner() runner.bench_func("baseline", lambda: validator.is_valid([])) runner.bench_func("beginning", lambda: validator.is_valid(beginning)) runner.bench_func("middle", lambda: validator.is_valid(middle)) runner.bench_func("end", lambda: validator.is_valid(end)) runner.bench_func("invalid", lambda: validator.is_valid(invalid)) PK!5>  benchmarks/issue232.pynu[""" A performance benchmark using the example from issue #232. See https://github.com/python-jsonschema/jsonschema/pull/232. """ from pathlib import Path from pyperf import Runner from referencing import Registry from jsonschema.tests._suite import Version import jsonschema issue232 = Version( path=Path(__file__).parent / "issue232", remotes=Registry(), name="issue232", ) if __name__ == "__main__": issue232.benchmark( runner=Runner(), Validator=jsonschema.Draft4Validator, ) PK!xp@@$benchmarks/json_schema_test_suite.pynu[""" A performance benchmark using the official test suite. This benchmarks jsonschema using every valid example in the JSON-Schema-Test-Suite. It will take some time to complete. """ from pyperf import Runner from jsonschema.tests._suite import Suite if __name__ == "__main__": Suite().benchmark(runner=Runner()) PK!FUddbenchmarks/nested_schemas.pynu[""" Validating highly nested schemas shouldn't cause exponential time blowups. See https://github.com/python-jsonschema/jsonschema/issues/1097. """ from itertools import cycle from jsonschema.validators import validator_for metaschemaish = { "$id": "https://example.com/draft/2020-12/schema/strict", "$schema": "https://json-schema.org/draft/2020-12/schema", "$vocabulary": { "https://json-schema.org/draft/2020-12/vocab/core": True, "https://json-schema.org/draft/2020-12/vocab/applicator": True, "https://json-schema.org/draft/2020-12/vocab/unevaluated": True, "https://json-schema.org/draft/2020-12/vocab/validation": True, "https://json-schema.org/draft/2020-12/vocab/meta-data": True, "https://json-schema.org/draft/2020-12/vocab/format-annotation": True, "https://json-schema.org/draft/2020-12/vocab/content": True, }, "$dynamicAnchor": "meta", "$ref": "https://json-schema.org/draft/2020-12/schema", "unevaluatedProperties": False, } def nested_schema(levels): """ Produce a schema which validates deeply nested objects and arrays. """ names = cycle(["foo", "bar", "baz", "quux", "spam", "eggs"]) schema = {"type": "object", "properties": {"ham": {"type": "string"}}} for _, name in zip(range(levels - 1), names): schema = {"type": "object", "properties": {name: schema}} return schema validator = validator_for(metaschemaish)(metaschemaish) if __name__ == "__main__": from pyperf import Runner runner = Runner() not_nested = nested_schema(levels=1) runner.bench_func("not nested", lambda: validator.is_valid(not_nested)) for levels in range(1, 11, 3): schema = nested_schema(levels=levels) runner.bench_func( f"nested * {levels}", lambda schema=schema: validator.is_valid(schema), ) PK!O[WYYbenchmarks/subcomponents.pynu[""" A benchmark which tries to compare the possible slow subparts of validation. """ from referencing import Registry from referencing.jsonschema import DRAFT202012 from rpds import HashTrieMap, HashTrieSet from jsonschema import Draft202012Validator schema = { "type": "array", "minLength": 1, "maxLength": 1, "items": {"type": "integer"}, } hmap = HashTrieMap() hset = HashTrieSet() registry = Registry() v = Draft202012Validator(schema) def registry_data_structures(): return hmap.insert("foo", "bar"), hset.insert("foo") def registry_add(): resource = DRAFT202012.create_resource(schema) return registry.with_resource(uri="urn:example", resource=resource) if __name__ == "__main__": from pyperf import Runner runner = Runner() runner.bench_func("HashMap/HashSet insertion", registry_data_structures) runner.bench_func("Registry insertion", registry_add) runner.bench_func("Success", lambda: v.is_valid([1])) runner.bench_func("Failure", lambda: v.is_valid(["foo"])) runner.bench_func("Metaschema validation", lambda: v.check_schema(schema)) PK!๬benchmarks/unused_registry.pynu[""" An unused schema registry should not cause slower validation. "Unused" here means one where no reference resolution is occurring anyhow. See https://github.com/python-jsonschema/jsonschema/issues/1088. """ from pyperf import Runner from referencing import Registry from referencing.jsonschema import DRAFT201909 from jsonschema import Draft201909Validator registry = Registry().with_resource( "urn:example:foo", DRAFT201909.create_resource({}), ) schema = {"$ref": "https://json-schema.org/draft/2019-09/schema"} instance = {"maxLength": 4} no_registry = Draft201909Validator(schema) with_useless_registry = Draft201909Validator(schema, registry=registry) if __name__ == "__main__": runner = Runner() runner.bench_func( "no registry", lambda: no_registry.is_valid(instance), ) runner.bench_func( "useless registry", lambda: with_useless_registry.is_valid(instance), ) PK!d4  (benchmarks/useless_applicator_schemas.pynu[ """ A benchmark for validation of applicators containing lots of useless schemas. Signals a small possible optimization to remove all such schemas ahead of time. """ from pyperf import Runner from jsonschema import Draft202012Validator as Validator NUM_USELESS = 100000 subschema = {"const": 37} valid = 37 invalid = 12 baseline = Validator(subschema) # These should be indistinguishable from just `subschema` by_name = { "single subschema": { "anyOf": Validator({"anyOf": [subschema]}), "allOf": Validator({"allOf": [subschema]}), "oneOf": Validator({"oneOf": [subschema]}), }, "redundant subschemas": { "anyOf": Validator({"anyOf": [subschema] * NUM_USELESS}), "allOf": Validator({"allOf": [subschema] * NUM_USELESS}), }, "useless successful subschemas (beginning)": { "anyOf": Validator({"anyOf": [subschema, *[True] * NUM_USELESS]}), "allOf": Validator({"allOf": [subschema, *[True] * NUM_USELESS]}), }, "useless successful subschemas (middle)": { "anyOf": Validator( { "anyOf": [ *[True] * (NUM_USELESS // 2), subschema, *[True] * (NUM_USELESS // 2), ], }, ), "allOf": Validator( { "allOf": [ *[True] * (NUM_USELESS // 2), subschema, *[True] * (NUM_USELESS // 2), ], }, ), }, "useless successful subschemas (end)": { "anyOf": Validator({"anyOf": [*[True] * NUM_USELESS, subschema]}), "allOf": Validator({"allOf": [*[True] * NUM_USELESS, subschema]}), }, "useless failing subschemas (beginning)": { "anyOf": Validator({"anyOf": [subschema, *[False] * NUM_USELESS]}), "oneOf": Validator({"oneOf": [subschema, *[False] * NUM_USELESS]}), }, "useless failing subschemas (middle)": { "anyOf": Validator( { "anyOf": [ *[False] * (NUM_USELESS // 2), subschema, *[False] * (NUM_USELESS // 2), ], }, ), "oneOf": Validator( { "oneOf": [ *[False] * (NUM_USELESS // 2), subschema, *[False] * (NUM_USELESS // 2), ], }, ), }, "useless failing subschemas (end)": { "anyOf": Validator({"anyOf": [*[False] * NUM_USELESS, subschema]}), "oneOf": Validator({"oneOf": [*[False] * NUM_USELESS, subschema]}), }, } if __name__ == "__main__": runner = Runner() runner.bench_func("baseline valid", lambda: baseline.is_valid(valid)) runner.bench_func("baseline invalid", lambda: baseline.is_valid(invalid)) for group, applicators in by_name.items(): for applicator, validator in applicators.items(): runner.bench_func( f"{group}: {applicator} valid", lambda validator=validator: validator.is_valid(valid), ) runner.bench_func( f"{group}: {applicator} invalid", lambda validator=validator: validator.is_valid(invalid), ) PK!+wccbenchmarks/useless_keywords.pynu[""" A benchmark for validation of schemas containing lots of useless keywords. Checks we filter them out once, ahead of time. """ from pyperf import Runner from jsonschema import Draft202012Validator NUM_USELESS = 100000 schema = dict( [ ("not", {"const": 42}), *((str(i), i) for i in range(NUM_USELESS)), ("type", "integer"), *((str(i), i) for i in range(NUM_USELESS, NUM_USELESS)), ("minimum", 37), ], ) validator = Draft202012Validator(schema) valid = 3737 invalid = 12 if __name__ == "__main__": runner = Runner() runner.bench_func("beginning of schema", lambda: validator.is_valid(42)) runner.bench_func("middle of schema", lambda: validator.is_valid("foo")) runner.bench_func("end of schema", lambda: validator.is_valid(12)) runner.bench_func("valid", lambda: validator.is_valid(3737)) PK! > benchmarks/validator_creation.pynu[from pyperf import Runner from jsonschema import Draft202012Validator schema = { "type": "array", "minLength": 1, "maxLength": 1, "items": {"type": "integer"}, } if __name__ == "__main__": Runner().bench_func("validator creation", Draft202012Validator, schema) PK!v qqbenchmarks/issue232/issue.jsonnu[[ { "description": "Petstore", "schema": { "title": "A JSON Schema for Swagger 2.0 API.", "id": "http://swagger.io/v2/schema.json#", "$schema": "http://json-schema.org/draft-04/schema#", "type": "object", "required": [ "swagger", "info", "paths" ], "additionalProperties": false, "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" } }, "properties": { "swagger": { "type": "string", "enum": [ "2.0" ], "description": "The Swagger version of this document." }, "info": { "$ref": "#/definitions/info" }, "host": { "type": "string", "pattern": "^[^{}/ :\\\\]+(?::\\d+)?$", "description": "The host (name or ip) of the API. Example: 'swagger.io'" }, "basePath": { "type": "string", "pattern": "^/", "description": "The base path to the API. Example: '/api'." }, "schemes": { "$ref": "#/definitions/schemesList" }, "consumes": { "description": "A list of MIME types accepted by the API.", "allOf": [ { "$ref": "#/definitions/mediaTypeList" } ] }, "produces": { "description": "A list of MIME types the API can produce.", "allOf": [ { "$ref": "#/definitions/mediaTypeList" } ] }, "paths": { "$ref": "#/definitions/paths" }, "definitions": { "$ref": "#/definitions/definitions" }, "parameters": { "$ref": "#/definitions/parameterDefinitions" }, "responses": { "$ref": "#/definitions/responseDefinitions" }, "security": { "$ref": "#/definitions/security" }, "securityDefinitions": { "$ref": "#/definitions/securityDefinitions" }, "tags": { "type": "array", "items": { "$ref": "#/definitions/tag" }, "uniqueItems": true }, "externalDocs": { "$ref": "#/definitions/externalDocs" } }, "definitions": { "info": { "type": "object", "description": "General information about the API.", "required": [ "version", "title" ], "additionalProperties": false, "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" } }, "properties": { "title": { "type": "string", "description": "A unique and precise title of the API." }, "version": { "type": "string", "description": "A semantic version number of the API." }, "description": { "type": "string", "description": "A longer description of the API. Should be different from the title. GitHub Flavored Markdown is allowed." }, "termsOfService": { "type": "string", "description": "The terms of service for the API." }, "contact": { "$ref": "#/definitions/contact" }, "license": { "$ref": "#/definitions/license" } } }, "contact": { "type": "object", "description": "Contact information for the owners of the API.", "additionalProperties": false, "properties": { "name": { "type": "string", "description": "The identifying name of the contact person/organization." }, "url": { "type": "string", "description": "The URL pointing to the contact information.", "format": "uri" }, "email": { "type": "string", "description": "The email address of the contact person/organization.", "format": "email" } }, "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" } } }, "license": { "type": "object", "required": [ "name" ], "additionalProperties": false, "properties": { "name": { "type": "string", "description": "The name of the license type. It's encouraged to use an OSI compatible license." }, "url": { "type": "string", "description": "The URL pointing to the license.", "format": "uri" } }, "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" } } }, "paths": { "type": "object", "description": "Relative paths to the individual endpoints. They must be relative to the 'basePath'.", "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" }, "^/": { "$ref": "#/definitions/pathItem" } }, "additionalProperties": false }, "definitions": { "type": "object", "additionalProperties": { "$ref": "#/definitions/schema" }, "description": "One or more JSON objects describing the schemas being consumed and produced by the API." }, "parameterDefinitions": { "type": "object", "additionalProperties": { "$ref": "#/definitions/parameter" }, "description": "One or more JSON representations for parameters" }, "responseDefinitions": { "type": "object", "additionalProperties": { "$ref": "#/definitions/response" }, "description": "One or more JSON representations for parameters" }, "externalDocs": { "type": "object", "additionalProperties": false, "description": "information about external documentation", "required": [ "url" ], "properties": { "description": { "type": "string" }, "url": { "type": "string", "format": "uri" } }, "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" } } }, "examples": { "type": "object", "additionalProperties": true }, "mimeType": { "type": "string", "description": "The MIME type of the HTTP message." }, "operation": { "type": "object", "required": [ "responses" ], "additionalProperties": false, "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" } }, "properties": { "tags": { "type": "array", "items": { "type": "string" }, "uniqueItems": true }, "summary": { "type": "string", "description": "A brief summary of the operation." }, "description": { "type": "string", "description": "A longer description of the operation, GitHub Flavored Markdown is allowed." }, "externalDocs": { "$ref": "#/definitions/externalDocs" }, "operationId": { "type": "string", "description": "A unique identifier of the operation." }, "produces": { "description": "A list of MIME types the API can produce.", "allOf": [ { "$ref": "#/definitions/mediaTypeList" } ] }, "consumes": { "description": "A list of MIME types the API can consume.", "allOf": [ { "$ref": "#/definitions/mediaTypeList" } ] }, "parameters": { "$ref": "#/definitions/parametersList" }, "responses": { "$ref": "#/definitions/responses" }, "schemes": { "$ref": "#/definitions/schemesList" }, "deprecated": { "type": "boolean", "default": false }, "security": { "$ref": "#/definitions/security" } } }, "pathItem": { "type": "object", "additionalProperties": false, "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" } }, "properties": { "$ref": { "type": "string" }, "get": { "$ref": "#/definitions/operation" }, "put": { "$ref": "#/definitions/operation" }, "post": { "$ref": "#/definitions/operation" }, "delete": { "$ref": "#/definitions/operation" }, "options": { "$ref": "#/definitions/operation" }, "head": { "$ref": "#/definitions/operation" }, "patch": { "$ref": "#/definitions/operation" }, "parameters": { "$ref": "#/definitions/parametersList" } } }, "responses": { "type": "object", "description": "Response objects names can either be any valid HTTP status code or 'default'.", "minProperties": 1, "additionalProperties": false, "patternProperties": { "^([0-9]{3})$|^(default)$": { "$ref": "#/definitions/responseValue" }, "^x-": { "$ref": "#/definitions/vendorExtension" } }, "not": { "type": "object", "additionalProperties": false, "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" } } } }, "responseValue": { "oneOf": [ { "$ref": "#/definitions/response" }, { "$ref": "#/definitions/jsonReference" } ] }, "response": { "type": "object", "required": [ "description" ], "properties": { "description": { "type": "string" }, "schema": { "oneOf": [ { "$ref": "#/definitions/schema" }, { "$ref": "#/definitions/fileSchema" } ] }, "headers": { "$ref": "#/definitions/headers" }, "examples": { "$ref": "#/definitions/examples" } }, "additionalProperties": false, "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" } } }, "headers": { "type": "object", "additionalProperties": { "$ref": "#/definitions/header" } }, "header": { "type": "object", "additionalProperties": false, "required": [ "type" ], "properties": { "type": { "type": "string", "enum": [ "string", "number", "integer", "boolean", "array" ] }, "format": { "type": "string" }, "items": { "$ref": "#/definitions/primitivesItems" }, "collectionFormat": { "$ref": "#/definitions/collectionFormat" }, "default": { "$ref": "#/definitions/default" }, "maximum": { "$ref": "#/definitions/maximum" }, "exclusiveMaximum": { "$ref": "#/definitions/exclusiveMaximum" }, "minimum": { "$ref": "#/definitions/minimum" }, "exclusiveMinimum": { "$ref": "#/definitions/exclusiveMinimum" }, "maxLength": { "$ref": "#/definitions/maxLength" }, "minLength": { "$ref": "#/definitions/minLength" }, "pattern": { "$ref": "#/definitions/pattern" }, "maxItems": { "$ref": "#/definitions/maxItems" }, "minItems": { "$ref": "#/definitions/minItems" }, "uniqueItems": { "$ref": "#/definitions/uniqueItems" }, "enum": { "$ref": "#/definitions/enum" }, "multipleOf": { "$ref": "#/definitions/multipleOf" }, "description": { "type": "string" } }, "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" } } }, "vendorExtension": { "description": "Any property starting with x- is valid.", "additionalProperties": true, "additionalItems": true }, "bodyParameter": { "type": "object", "required": [ "name", "in", "schema" ], "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" } }, "properties": { "description": { "type": "string", "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." }, "name": { "type": "string", "description": "The name of the parameter." }, "in": { "type": "string", "description": "Determines the location of the parameter.", "enum": [ "body" ] }, "required": { "type": "boolean", "description": "Determines whether or not this parameter is required or optional.", "default": false }, "schema": { "$ref": "#/definitions/schema" } }, "additionalProperties": false }, "headerParameterSubSchema": { "additionalProperties": false, "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" } }, "properties": { "required": { "type": "boolean", "description": "Determines whether or not this parameter is required or optional.", "default": false }, "in": { "type": "string", "description": "Determines the location of the parameter.", "enum": [ "header" ] }, "description": { "type": "string", "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." }, "name": { "type": "string", "description": "The name of the parameter." }, "type": { "type": "string", "enum": [ "string", "number", "boolean", "integer", "array" ] }, "format": { "type": "string" }, "items": { "$ref": "#/definitions/primitivesItems" }, "collectionFormat": { "$ref": "#/definitions/collectionFormat" }, "default": { "$ref": "#/definitions/default" }, "maximum": { "$ref": "#/definitions/maximum" }, "exclusiveMaximum": { "$ref": "#/definitions/exclusiveMaximum" }, "minimum": { "$ref": "#/definitions/minimum" }, "exclusiveMinimum": { "$ref": "#/definitions/exclusiveMinimum" }, "maxLength": { "$ref": "#/definitions/maxLength" }, "minLength": { "$ref": "#/definitions/minLength" }, "pattern": { "$ref": "#/definitions/pattern" }, "maxItems": { "$ref": "#/definitions/maxItems" }, "minItems": { "$ref": "#/definitions/minItems" }, "uniqueItems": { "$ref": "#/definitions/uniqueItems" }, "enum": { "$ref": "#/definitions/enum" }, "multipleOf": { "$ref": "#/definitions/multipleOf" } } }, "queryParameterSubSchema": { "additionalProperties": false, "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" } }, "properties": { "required": { "type": "boolean", "description": "Determines whether or not this parameter is required or optional.", "default": false }, "in": { "type": "string", "description": "Determines the location of the parameter.", "enum": [ "query" ] }, "description": { "type": "string", "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." }, "name": { "type": "string", "description": "The name of the parameter." }, "allowEmptyValue": { "type": "boolean", "default": false, "description": "allows sending a parameter by name only or with an empty value." }, "type": { "type": "string", "enum": [ "string", "number", "boolean", "integer", "array" ] }, "format": { "type": "string" }, "items": { "$ref": "#/definitions/primitivesItems" }, "collectionFormat": { "$ref": "#/definitions/collectionFormatWithMulti" }, "default": { "$ref": "#/definitions/default" }, "maximum": { "$ref": "#/definitions/maximum" }, "exclusiveMaximum": { "$ref": "#/definitions/exclusiveMaximum" }, "minimum": { "$ref": "#/definitions/minimum" }, "exclusiveMinimum": { "$ref": "#/definitions/exclusiveMinimum" }, "maxLength": { "$ref": "#/definitions/maxLength" }, "minLength": { "$ref": "#/definitions/minLength" }, "pattern": { "$ref": "#/definitions/pattern" }, "maxItems": { "$ref": "#/definitions/maxItems" }, "minItems": { "$ref": "#/definitions/minItems" }, "uniqueItems": { "$ref": "#/definitions/uniqueItems" }, "enum": { "$ref": "#/definitions/enum" }, "multipleOf": { "$ref": "#/definitions/multipleOf" } } }, "formDataParameterSubSchema": { "additionalProperties": false, "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" } }, "properties": { "required": { "type": "boolean", "description": "Determines whether or not this parameter is required or optional.", "default": false }, "in": { "type": "string", "description": "Determines the location of the parameter.", "enum": [ "formData" ] }, "description": { "type": "string", "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." }, "name": { "type": "string", "description": "The name of the parameter." }, "allowEmptyValue": { "type": "boolean", "default": false, "description": "allows sending a parameter by name only or with an empty value." }, "type": { "type": "string", "enum": [ "string", "number", "boolean", "integer", "array", "file" ] }, "format": { "type": "string" }, "items": { "$ref": "#/definitions/primitivesItems" }, "collectionFormat": { "$ref": "#/definitions/collectionFormatWithMulti" }, "default": { "$ref": "#/definitions/default" }, "maximum": { "$ref": "#/definitions/maximum" }, "exclusiveMaximum": { "$ref": "#/definitions/exclusiveMaximum" }, "minimum": { "$ref": "#/definitions/minimum" }, "exclusiveMinimum": { "$ref": "#/definitions/exclusiveMinimum" }, "maxLength": { "$ref": "#/definitions/maxLength" }, "minLength": { "$ref": "#/definitions/minLength" }, "pattern": { "$ref": "#/definitions/pattern" }, "maxItems": { "$ref": "#/definitions/maxItems" }, "minItems": { "$ref": "#/definitions/minItems" }, "uniqueItems": { "$ref": "#/definitions/uniqueItems" }, "enum": { "$ref": "#/definitions/enum" }, "multipleOf": { "$ref": "#/definitions/multipleOf" } } }, "pathParameterSubSchema": { "additionalProperties": false, "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" } }, "required": [ "required" ], "properties": { "required": { "type": "boolean", "enum": [ true ], "description": "Determines whether or not this parameter is required or optional." }, "in": { "type": "string", "description": "Determines the location of the parameter.", "enum": [ "path" ] }, "description": { "type": "string", "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." }, "name": { "type": "string", "description": "The name of the parameter." }, "type": { "type": "string", "enum": [ "string", "number", "boolean", "integer", "array" ] }, "format": { "type": "string" }, "items": { "$ref": "#/definitions/primitivesItems" }, "collectionFormat": { "$ref": "#/definitions/collectionFormat" }, "default": { "$ref": "#/definitions/default" }, "maximum": { "$ref": "#/definitions/maximum" }, "exclusiveMaximum": { "$ref": "#/definitions/exclusiveMaximum" }, "minimum": { "$ref": "#/definitions/minimum" }, "exclusiveMinimum": { "$ref": "#/definitions/exclusiveMinimum" }, "maxLength": { "$ref": "#/definitions/maxLength" }, "minLength": { "$ref": "#/definitions/minLength" }, "pattern": { "$ref": "#/definitions/pattern" }, "maxItems": { "$ref": "#/definitions/maxItems" }, "minItems": { "$ref": "#/definitions/minItems" }, "uniqueItems": { "$ref": "#/definitions/uniqueItems" }, "enum": { "$ref": "#/definitions/enum" }, "multipleOf": { "$ref": "#/definitions/multipleOf" } } }, "nonBodyParameter": { "type": "object", "required": [ "name", "in", "type" ], "oneOf": [ { "$ref": "#/definitions/headerParameterSubSchema" }, { "$ref": "#/definitions/formDataParameterSubSchema" }, { "$ref": "#/definitions/queryParameterSubSchema" }, { "$ref": "#/definitions/pathParameterSubSchema" } ] }, "parameter": { "oneOf": [ { "$ref": "#/definitions/bodyParameter" }, { "$ref": "#/definitions/nonBodyParameter" } ] }, "schema": { "type": "object", "description": "A deterministic version of a JSON Schema object.", "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" } }, "properties": { "$ref": { "type": "string" }, "format": { "type": "string" }, "title": { "$ref": "http://json-schema.org/draft-04/schema#/properties/title" }, "description": { "$ref": "http://json-schema.org/draft-04/schema#/properties/description" }, "default": { "$ref": "http://json-schema.org/draft-04/schema#/properties/default" }, "multipleOf": { "$ref": "http://json-schema.org/draft-04/schema#/properties/multipleOf" }, "maximum": { "$ref": "http://json-schema.org/draft-04/schema#/properties/maximum" }, "exclusiveMaximum": { "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMaximum" }, "minimum": { "$ref": "http://json-schema.org/draft-04/schema#/properties/minimum" }, "exclusiveMinimum": { "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMinimum" }, "maxLength": { "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" }, "minLength": { "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" }, "pattern": { "$ref": "http://json-schema.org/draft-04/schema#/properties/pattern" }, "maxItems": { "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" }, "minItems": { "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" }, "uniqueItems": { "$ref": "http://json-schema.org/draft-04/schema#/properties/uniqueItems" }, "maxProperties": { "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" }, "minProperties": { "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" }, "required": { "$ref": "http://json-schema.org/draft-04/schema#/definitions/stringArray" }, "enum": { "$ref": "http://json-schema.org/draft-04/schema#/properties/enum" }, "additionalProperties": { "anyOf": [ { "$ref": "#/definitions/schema" }, { "type": "boolean" } ], "default": {} }, "type": { "$ref": "http://json-schema.org/draft-04/schema#/properties/type" }, "items": { "anyOf": [ { "$ref": "#/definitions/schema" }, { "type": "array", "minItems": 1, "items": { "$ref": "#/definitions/schema" } } ], "default": {} }, "allOf": { "type": "array", "minItems": 1, "items": { "$ref": "#/definitions/schema" } }, "properties": { "type": "object", "additionalProperties": { "$ref": "#/definitions/schema" }, "default": {} }, "discriminator": { "type": "string" }, "readOnly": { "type": "boolean", "default": false }, "xml": { "$ref": "#/definitions/xml" }, "externalDocs": { "$ref": "#/definitions/externalDocs" }, "example": {} }, "additionalProperties": false }, "fileSchema": { "type": "object", "description": "A deterministic version of a JSON Schema object.", "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" } }, "required": [ "type" ], "properties": { "format": { "type": "string" }, "title": { "$ref": "http://json-schema.org/draft-04/schema#/properties/title" }, "description": { "$ref": "http://json-schema.org/draft-04/schema#/properties/description" }, "default": { "$ref": "http://json-schema.org/draft-04/schema#/properties/default" }, "required": { "$ref": "http://json-schema.org/draft-04/schema#/definitions/stringArray" }, "type": { "type": "string", "enum": [ "file" ] }, "readOnly": { "type": "boolean", "default": false }, "externalDocs": { "$ref": "#/definitions/externalDocs" }, "example": {} }, "additionalProperties": false }, "primitivesItems": { "type": "object", "additionalProperties": false, "properties": { "type": { "type": "string", "enum": [ "string", "number", "integer", "boolean", "array" ] }, "format": { "type": "string" }, "items": { "$ref": "#/definitions/primitivesItems" }, "collectionFormat": { "$ref": "#/definitions/collectionFormat" }, "default": { "$ref": "#/definitions/default" }, "maximum": { "$ref": "#/definitions/maximum" }, "exclusiveMaximum": { "$ref": "#/definitions/exclusiveMaximum" }, "minimum": { "$ref": "#/definitions/minimum" }, "exclusiveMinimum": { "$ref": "#/definitions/exclusiveMinimum" }, "maxLength": { "$ref": "#/definitions/maxLength" }, "minLength": { "$ref": "#/definitions/minLength" }, "pattern": { "$ref": "#/definitions/pattern" }, "maxItems": { "$ref": "#/definitions/maxItems" }, "minItems": { "$ref": "#/definitions/minItems" }, "uniqueItems": { "$ref": "#/definitions/uniqueItems" }, "enum": { "$ref": "#/definitions/enum" }, "multipleOf": { "$ref": "#/definitions/multipleOf" } }, "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" } } }, "security": { "type": "array", "items": { "$ref": "#/definitions/securityRequirement" }, "uniqueItems": true }, "securityRequirement": { "type": "object", "additionalProperties": { "type": "array", "items": { "type": "string" }, "uniqueItems": true } }, "xml": { "type": "object", "additionalProperties": false, "properties": { "name": { "type": "string" }, "namespace": { "type": "string" }, "prefix": { "type": "string" }, "attribute": { "type": "boolean", "default": false }, "wrapped": { "type": "boolean", "default": false } }, "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" } } }, "tag": { "type": "object", "additionalProperties": false, "required": [ "name" ], "properties": { "name": { "type": "string" }, "description": { "type": "string" }, "externalDocs": { "$ref": "#/definitions/externalDocs" } }, "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" } } }, "securityDefinitions": { "type": "object", "additionalProperties": { "oneOf": [ { "$ref": "#/definitions/basicAuthenticationSecurity" }, { "$ref": "#/definitions/apiKeySecurity" }, { "$ref": "#/definitions/oauth2ImplicitSecurity" }, { "$ref": "#/definitions/oauth2PasswordSecurity" }, { "$ref": "#/definitions/oauth2ApplicationSecurity" }, { "$ref": "#/definitions/oauth2AccessCodeSecurity" } ] } }, "basicAuthenticationSecurity": { "type": "object", "additionalProperties": false, "required": [ "type" ], "properties": { "type": { "type": "string", "enum": [ "basic" ] }, "description": { "type": "string" } }, "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" } } }, "apiKeySecurity": { "type": "object", "additionalProperties": false, "required": [ "type", "name", "in" ], "properties": { "type": { "type": "string", "enum": [ "apiKey" ] }, "name": { "type": "string" }, "in": { "type": "string", "enum": [ "header", "query" ] }, "description": { "type": "string" } }, "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" } } }, "oauth2ImplicitSecurity": { "type": "object", "additionalProperties": false, "required": [ "type", "flow", "authorizationUrl" ], "properties": { "type": { "type": "string", "enum": [ "oauth2" ] }, "flow": { "type": "string", "enum": [ "implicit" ] }, "scopes": { "$ref": "#/definitions/oauth2Scopes" }, "authorizationUrl": { "type": "string", "format": "uri" }, "description": { "type": "string" } }, "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" } } }, "oauth2PasswordSecurity": { "type": "object", "additionalProperties": false, "required": [ "type", "flow", "tokenUrl" ], "properties": { "type": { "type": "string", "enum": [ "oauth2" ] }, "flow": { "type": "string", "enum": [ "password" ] }, "scopes": { "$ref": "#/definitions/oauth2Scopes" }, "tokenUrl": { "type": "string", "format": "uri" }, "description": { "type": "string" } }, "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" } } }, "oauth2ApplicationSecurity": { "type": "object", "additionalProperties": false, "required": [ "type", "flow", "tokenUrl" ], "properties": { "type": { "type": "string", "enum": [ "oauth2" ] }, "flow": { "type": "string", "enum": [ "application" ] }, "scopes": { "$ref": "#/definitions/oauth2Scopes" }, "tokenUrl": { "type": "string", "format": "uri" }, "description": { "type": "string" } }, "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" } } }, "oauth2AccessCodeSecurity": { "type": "object", "additionalProperties": false, "required": [ "type", "flow", "authorizationUrl", "tokenUrl" ], "properties": { "type": { "type": "string", "enum": [ "oauth2" ] }, "flow": { "type": "string", "enum": [ "accessCode" ] }, "scopes": { "$ref": "#/definitions/oauth2Scopes" }, "authorizationUrl": { "type": "string", "format": "uri" }, "tokenUrl": { "type": "string", "format": "uri" }, "description": { "type": "string" } }, "patternProperties": { "^x-": { "$ref": "#/definitions/vendorExtension" } } }, "oauth2Scopes": { "type": "object", "additionalProperties": { "type": "string" } }, "mediaTypeList": { "type": "array", "items": { "$ref": "#/definitions/mimeType" }, "uniqueItems": true }, "parametersList": { "type": "array", "description": "The parameters needed to send a valid API call.", "additionalItems": false, "items": { "oneOf": [ { "$ref": "#/definitions/parameter" }, { "$ref": "#/definitions/jsonReference" } ] }, "uniqueItems": true }, "schemesList": { "type": "array", "description": "The transfer protocol of the API.", "items": { "type": "string", "enum": [ "http", "https", "ws", "wss" ] }, "uniqueItems": true }, "collectionFormat": { "type": "string", "enum": [ "csv", "ssv", "tsv", "pipes" ], "default": "csv" }, "collectionFormatWithMulti": { "type": "string", "enum": [ "csv", "ssv", "tsv", "pipes", "multi" ], "default": "csv" }, "title": { "$ref": "http://json-schema.org/draft-04/schema#/properties/title" }, "description": { "$ref": "http://json-schema.org/draft-04/schema#/properties/description" }, "default": { "$ref": "http://json-schema.org/draft-04/schema#/properties/default" }, "multipleOf": { "$ref": "http://json-schema.org/draft-04/schema#/properties/multipleOf" }, "maximum": { "$ref": "http://json-schema.org/draft-04/schema#/properties/maximum" }, "exclusiveMaximum": { "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMaximum" }, "minimum": { "$ref": "http://json-schema.org/draft-04/schema#/properties/minimum" }, "exclusiveMinimum": { "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMinimum" }, "maxLength": { "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" }, "minLength": { "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" }, "pattern": { "$ref": "http://json-schema.org/draft-04/schema#/properties/pattern" }, "maxItems": { "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" }, "minItems": { "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" }, "uniqueItems": { "$ref": "http://json-schema.org/draft-04/schema#/properties/uniqueItems" }, "enum": { "$ref": "http://json-schema.org/draft-04/schema#/properties/enum" }, "jsonReference": { "type": "object", "required": [ "$ref" ], "additionalProperties": false, "properties": { "$ref": { "type": "string" } } } } }, "tests": [ { "description": "Example petsore", "data": { "swagger": "2.0", "info": { "description": "This is a sample server Petstore server. You can find out more about Swagger at [http://swagger.io](http://swagger.io) or on [irc.freenode.net, #swagger](http://swagger.io/irc/). For this sample, you can use the api key `special-key` to test the authorization filters.", "version": "1.0.0", "title": "Swagger Petstore", "termsOfService": "http://swagger.io/terms/", "contact": { "email": "apiteam@swagger.io" }, "license": { "name": "Apache 2.0", "url": "http://www.apache.org/licenses/LICENSE-2.0.html" } }, "host": "petstore.swagger.io", "basePath": "/v2", "tags": [ { "name": "pet", "description": "Everything about your Pets", "externalDocs": { "description": "Find out more", "url": "http://swagger.io" } }, { "name": "store", "description": "Access to Petstore orders" }, { "name": "user", "description": "Operations about user", "externalDocs": { "description": "Find out more about our store", "url": "http://swagger.io" } } ], "schemes": [ "http" ], "paths": { "/pet": { "post": { "tags": [ "pet" ], "summary": "Add a new pet to the store", "description": "", "operationId": "addPet", "consumes": [ "application/json", "application/xml" ], "produces": [ "application/xml", "application/json" ], "parameters": [ { "in": "body", "name": "body", "description": "Pet object that needs to be added to the store", "required": true, "schema": { "$ref": "#/definitions/Pet" } } ], "responses": { "405": { "description": "Invalid input" } }, "security": [ { "petstore_auth": [ "write:pets", "read:pets" ] } ] }, "put": { "tags": [ "pet" ], "summary": "Update an existing pet", "description": "", "operationId": "updatePet", "consumes": [ "application/json", "application/xml" ], "produces": [ "application/xml", "application/json" ], "parameters": [ { "in": "body", "name": "body", "description": "Pet object that needs to be added to the store", "required": true, "schema": { "$ref": "#/definitions/Pet" } } ], "responses": { "400": { "description": "Invalid ID supplied" }, "404": { "description": "Pet not found" }, "405": { "description": "Validation exception" } }, "security": [ { "petstore_auth": [ "write:pets", "read:pets" ] } ] } }, "/pet/findByStatus": { "get": { "tags": [ "pet" ], "summary": "Finds Pets by status", "description": "Multiple status values can be provided with comma separated strings", "operationId": "findPetsByStatus", "produces": [ "application/xml", "application/json" ], "parameters": [ { "name": "status", "in": "query", "description": "Status values that need to be considered for filter", "required": true, "type": "array", "items": { "type": "string", "enum": [ "available", "pending", "sold" ], "default": "available" }, "collectionFormat": "multi" } ], "responses": { "200": { "description": "successful operation", "schema": { "type": "array", "items": { "$ref": "#/definitions/Pet" } } }, "400": { "description": "Invalid status value" } }, "security": [ { "petstore_auth": [ "write:pets", "read:pets" ] } ] } }, "/pet/findByTags": { "get": { "tags": [ "pet" ], "summary": "Finds Pets by tags", "description": "Muliple tags can be provided with comma separated strings. Use tag1, tag2, tag3 for testing.", "operationId": "findPetsByTags", "produces": [ "application/xml", "application/json" ], "parameters": [ { "name": "tags", "in": "query", "description": "Tags to filter by", "required": true, "type": "array", "items": { "type": "string" }, "collectionFormat": "multi" } ], "responses": { "200": { "description": "successful operation", "schema": { "type": "array", "items": { "$ref": "#/definitions/Pet" } } }, "400": { "description": "Invalid tag value" } }, "security": [ { "petstore_auth": [ "write:pets", "read:pets" ] } ], "deprecated": true } }, "/pet/{petId}": { "get": { "tags": [ "pet" ], "summary": "Find pet by ID", "description": "Returns a single pet", "operationId": "getPetById", "produces": [ "application/xml", "application/json" ], "parameters": [ { "name": "petId", "in": "path", "description": "ID of pet to return", "required": true, "type": "integer", "format": "int64" } ], "responses": { "200": { "description": "successful operation", "schema": { "$ref": "#/definitions/Pet" } }, "400": { "description": "Invalid ID supplied" }, "404": { "description": "Pet not found" } }, "security": [ { "api_key": [] } ] }, "post": { "tags": [ "pet" ], "summary": "Updates a pet in the store with form data", "description": "", "operationId": "updatePetWithForm", "consumes": [ "application/x-www-form-urlencoded" ], "produces": [ "application/xml", "application/json" ], "parameters": [ { "name": "petId", "in": "path", "description": "ID of pet that needs to be updated", "required": true, "type": "integer", "format": "int64" }, { "name": "name", "in": "formData", "description": "Updated name of the pet", "required": false, "type": "string" }, { "name": "status", "in": "formData", "description": "Updated status of the pet", "required": false, "type": "string" } ], "responses": { "405": { "description": "Invalid input" } }, "security": [ { "petstore_auth": [ "write:pets", "read:pets" ] } ] }, "delete": { "tags": [ "pet" ], "summary": "Deletes a pet", "description": "", "operationId": "deletePet", "produces": [ "application/xml", "application/json" ], "parameters": [ { "name": "api_key", "in": "header", "required": false, "type": "string" }, { "name": "petId", "in": "path", "description": "Pet id to delete", "required": true, "type": "integer", "format": "int64" } ], "responses": { "400": { "description": "Invalid ID supplied" }, "404": { "description": "Pet not found" } }, "security": [ { "petstore_auth": [ "write:pets", "read:pets" ] } ] } }, "/pet/{petId}/uploadImage": { "post": { "tags": [ "pet" ], "summary": "uploads an image", "description": "", "operationId": "uploadFile", "consumes": [ "multipart/form-data" ], "produces": [ "application/json" ], "parameters": [ { "name": "petId", "in": "path", "description": "ID of pet to update", "required": true, "type": "integer", "format": "int64" }, { "name": "additionalMetadata", "in": "formData", "description": "Additional data to pass to server", "required": false, "type": "string" }, { "name": "file", "in": "formData", "description": "file to upload", "required": false, "type": "file" } ], "responses": { "200": { "description": "successful operation", "schema": { "$ref": "#/definitions/ApiResponse" } } }, "security": [ { "petstore_auth": [ "write:pets", "read:pets" ] } ] } }, "/store/inventory": { "get": { "tags": [ "store" ], "summary": "Returns pet inventories by status", "description": "Returns a map of status codes to quantities", "operationId": "getInventory", "produces": [ "application/json" ], "parameters": [], "responses": { "200": { "description": "successful operation", "schema": { "type": "object", "additionalProperties": { "type": "integer", "format": "int32" } } } }, "security": [ { "api_key": [] } ] } }, "/store/order": { "post": { "tags": [ "store" ], "summary": "Place an order for a pet", "description": "", "operationId": "placeOrder", "produces": [ "application/xml", "application/json" ], "parameters": [ { "in": "body", "name": "body", "description": "order placed for purchasing the pet", "required": true, "schema": { "$ref": "#/definitions/Order" } } ], "responses": { "200": { "description": "successful operation", "schema": { "$ref": "#/definitions/Order" } }, "400": { "description": "Invalid Order" } } } }, "/store/order/{orderId}": { "get": { "tags": [ "store" ], "summary": "Find purchase order by ID", "description": "For valid response try integer IDs with value >= 1 and <= 10. Other values will generated exceptions", "operationId": "getOrderById", "produces": [ "application/xml", "application/json" ], "parameters": [ { "name": "orderId", "in": "path", "description": "ID of pet that needs to be fetched", "required": true, "type": "integer", "maximum": 10.0, "minimum": 1.0, "format": "int64" } ], "responses": { "200": { "description": "successful operation", "schema": { "$ref": "#/definitions/Order" } }, "400": { "description": "Invalid ID supplied" }, "404": { "description": "Order not found" } } }, "delete": { "tags": [ "store" ], "summary": "Delete purchase order by ID", "description": "For valid response try integer IDs with positive integer value. Negative or non-integer values will generate API errors", "operationId": "deleteOrder", "produces": [ "application/xml", "application/json" ], "parameters": [ { "name": "orderId", "in": "path", "description": "ID of the order that needs to be deleted", "required": true, "type": "integer", "minimum": 1.0, "format": "int64" } ], "responses": { "400": { "description": "Invalid ID supplied" }, "404": { "description": "Order not found" } } } }, "/user": { "post": { "tags": [ "user" ], "summary": "Create user", "description": "This can only be done by the logged in user.", "operationId": "createUser", "produces": [ "application/xml", "application/json" ], "parameters": [ { "in": "body", "name": "body", "description": "Created user object", "required": true, "schema": { "$ref": "#/definitions/User" } } ], "responses": { "default": { "description": "successful operation" } } } }, "/user/createWithArray": { "post": { "tags": [ "user" ], "summary": "Creates list of users with given input array", "description": "", "operationId": "createUsersWithArrayInput", "produces": [ "application/xml", "application/json" ], "parameters": [ { "in": "body", "name": "body", "description": "List of user object", "required": true, "schema": { "type": "array", "items": { "$ref": "#/definitions/User" } } } ], "responses": { "default": { "description": "successful operation" } } } }, "/user/createWithList": { "post": { "tags": [ "user" ], "summary": "Creates list of users with given input array", "description": "", "operationId": "createUsersWithListInput", "produces": [ "application/xml", "application/json" ], "parameters": [ { "in": "body", "name": "body", "description": "List of user object", "required": true, "schema": { "type": "array", "items": { "$ref": "#/definitions/User" } } } ], "responses": { "default": { "description": "successful operation" } } } }, "/user/login": { "get": { "tags": [ "user" ], "summary": "Logs user into the system", "description": "", "operationId": "loginUser", "produces": [ "application/xml", "application/json" ], "parameters": [ { "name": "username", "in": "query", "description": "The user name for login", "required": true, "type": "string" }, { "name": "password", "in": "query", "description": "The password for login in clear text", "required": true, "type": "string" } ], "responses": { "200": { "description": "successful operation", "schema": { "type": "string" }, "headers": { "X-Rate-Limit": { "type": "integer", "format": "int32", "description": "calls per hour allowed by the user" }, "X-Expires-After": { "type": "string", "format": "date-time", "description": "date in UTC when token expires" } } }, "400": { "description": "Invalid username/password supplied" } } } }, "/user/logout": { "get": { "tags": [ "user" ], "summary": "Logs out current logged in user session", "description": "", "operationId": "logoutUser", "produces": [ "application/xml", "application/json" ], "parameters": [], "responses": { "default": { "description": "successful operation" } } } }, "/user/{username}": { "get": { "tags": [ "user" ], "summary": "Get user by user name", "description": "", "operationId": "getUserByName", "produces": [ "application/xml", "application/json" ], "parameters": [ { "name": "username", "in": "path", "description": "The name that needs to be fetched. Use user1 for testing. ", "required": true, "type": "string" } ], "responses": { "200": { "description": "successful operation", "schema": { "$ref": "#/definitions/User" } }, "400": { "description": "Invalid username supplied" }, "404": { "description": "User not found" } } }, "put": { "tags": [ "user" ], "summary": "Updated user", "description": "This can only be done by the logged in user.", "operationId": "updateUser", "produces": [ "application/xml", "application/json" ], "parameters": [ { "name": "username", "in": "path", "description": "name that need to be updated", "required": true, "type": "string" }, { "in": "body", "name": "body", "description": "Updated user object", "required": true, "schema": { "$ref": "#/definitions/User" } } ], "responses": { "400": { "description": "Invalid user supplied" }, "404": { "description": "User not found" } } }, "delete": { "tags": [ "user" ], "summary": "Delete user", "description": "This can only be done by the logged in user.", "operationId": "deleteUser", "produces": [ "application/xml", "application/json" ], "parameters": [ { "name": "username", "in": "path", "description": "The name that needs to be deleted", "required": true, "type": "string" } ], "responses": { "400": { "description": "Invalid username supplied" }, "404": { "description": "User not found" } } } } }, "securityDefinitions": { "petstore_auth": { "type": "oauth2", "authorizationUrl": "http://petstore.swagger.io/oauth/dialog", "flow": "implicit", "scopes": { "write:pets": "modify pets in your account", "read:pets": "read your pets" } }, "api_key": { "type": "apiKey", "name": "api_key", "in": "header" } }, "definitions": { "Order": { "type": "object", "properties": { "id": { "type": "integer", "format": "int64" }, "petId": { "type": "integer", "format": "int64" }, "quantity": { "type": "integer", "format": "int32" }, "shipDate": { "type": "string", "format": "date-time" }, "status": { "type": "string", "description": "Order Status", "enum": [ "placed", "approved", "delivered" ] }, "complete": { "type": "boolean", "default": false } }, "xml": { "name": "Order" } }, "Category": { "type": "object", "properties": { "id": { "type": "integer", "format": "int64" }, "name": { "type": "string" } }, "xml": { "name": "Category" } }, "User": { "type": "object", "properties": { "id": { "type": "integer", "format": "int64" }, "username": { "type": "string" }, "firstName": { "type": "string" }, "lastName": { "type": "string" }, "email": { "type": "string" }, "password": { "type": "string" }, "phone": { "type": "string" }, "userStatus": { "type": "integer", "format": "int32", "description": "User Status" } }, "xml": { "name": "User" } }, "Tag": { "type": "object", "properties": { "id": { "type": "integer", "format": "int64" }, "name": { "type": "string" } }, "xml": { "name": "Tag" } }, "Pet": { "type": "object", "required": [ "name", "photoUrls" ], "properties": { "id": { "type": "integer", "format": "int64" }, "category": { "$ref": "#/definitions/Category" }, "name": { "type": "string", "example": "doggie" }, "photoUrls": { "type": "array", "xml": { "name": "photoUrl", "wrapped": true }, "items": { "type": "string" } }, "tags": { "type": "array", "xml": { "name": "tag", "wrapped": true }, "items": { "$ref": "#/definitions/Tag" } }, "status": { "type": "string", "description": "pet status in the store", "enum": [ "available", "pending", "sold" ] } }, "xml": { "name": "Pet" } }, "ApiResponse": { "type": "object", "properties": { "code": { "type": "integer", "format": "int32" }, "type": { "type": "string" }, "message": { "type": "string" } } } }, "externalDocs": { "description": "Find out more about Swagger", "url": "http://swagger.io" } }, "valid": true } ] } ] PK!ru<.benchmarks/__pycache__/__init__.cpython-38.pycnu[U afF@sdZdS)z? Benchmarks for validation. This package is *not* public API. N)__doc__rrO/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/benchmarks/__init__.pyPK!ֵ3benchmarks/__pycache__/const_vs_enum.cpython-38.pycnu[U af>@sdZddlmZddlmZdgdZdeeiZdeegiZeeZ e dZ eeZ eeZ e e snte e s|te e rte e rted kreZed d d ed dd eddd eddd dS)zH A benchmark for comparing equivalent validation of `const` and `enum`. )Runner)Draft202012Validator%dconstenumI__main__z const validcCs ttSN)ris_validvalidr r T/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/benchmarks/const_vs_enum.pyrz const invalidcCs ttSr )rr invalidr r r rrrz enum validcCs ttSr )rr r r r r rrrz enum invalidcCs ttSr )rr rr r r rrrN)r)__doc__ZpyperfrZ jsonschemarvaluelistZ const_schemaZ enum_schemar rrrr AssertionError__name__runnerZ bench_funcr r r rs&    PK!!].benchmarks/__pycache__/contains.cpython-38.pycnu[U af@sdZddlmZddlmZdddidZeeZdZdgdged Zdged dgdged Z dged dgZ dgeZ e d kreZ e d d de ddde ddde ddde ddddS)z7 A benchmark for validation of the `contains` keyword. )Runner)Draft202012Validatorarrayconst%)typecontainsi__main__ZbaselinecCs tgSN) validatoris_validrrO/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/benchmarks/contains.pyr beginningcCs ttSr )r rrrrrrrrmiddlecCs ttSr )r rrrrrrrrendcCs ttSr )r rrrrrrrrinvalidcCs ttSr )r rrrrrrrrN)__doc__ZpyperfrZ jsonschemarZschemar sizerrrr__name__runnerZ bench_funcrrrrs$  " PK!I.benchmarks/__pycache__/issue232.cpython-38.pycnu[U af @stdZddlmZddlmZddlmZddlmZddl Z eee j deddZ e d krpe jee jd dS) z{ A performance benchmark using the example from issue #232. See https://github.com/python-jsonschema/jsonschema/pull/232. )Path)Runner)Registry)VersionNissue232)pathremotesname__main__)runnerZ Validator)__doc__pathlibrZpyperfrZ referencingrZjsonschema.tests._suiterZ jsonschema__file__parentr__name__Z benchmarkZDraft4ValidatorrrO/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/benchmarks/issue232.pys     PK!K3<benchmarks/__pycache__/json_schema_test_suite.cpython-38.pycnu[U af@@s8dZddlmZddlmZedkr4ejeddS)z A performance benchmark using the official test suite. This benchmarks jsonschema using every valid example in the JSON-Schema-Test-Suite. It will take some time to complete. )Runner)Suite__main__)runnerN)__doc__ZpyperfrZjsonschema.tests._suiter__name__Z benchmarkrr]/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/benchmarks/json_schema_test_suite.pys  PK!]4benchmarks/__pycache__/nested_schemas.cpython-38.pycnu[U afd @sdZddlmZddlmZddddddddddddd d Zd d ZeeeZed krddl m Z e Z eddZ e dddedddD](ZeedZe deefddqdS)z Validating highly nested schemas shouldn't cause exponential time blowups. See https://github.com/python-jsonschema/jsonschema/issues/1097. )cycle) validator_forz/https://example.com/draft/2020-12/schema/strictz,https://json-schema.org/draft/2020-12/schemaT)z0https://json-schema.org/draft/2020-12/vocab/corez6https://json-schema.org/draft/2020-12/vocab/applicatorz7https://json-schema.org/draft/2020-12/vocab/unevaluatedz6https://json-schema.org/draft/2020-12/vocab/validationz5https://json-schema.org/draft/2020-12/vocab/meta-dataz=https://json-schema.org/draft/2020-12/vocab/format-annotationz3https://json-schema.org/draft/2020-12/vocab/contentmetaF)z$idz$schemaz $vocabularyz$dynamicAnchorz$refZunevaluatedPropertiescCsTtddddddg}ddd d iid }tt|d |D]\}}d||id }q8|S) zL Produce a schema which validates deeply nested objects and arrays. ZfoobarZbazZquuxZspameggsobjectZhamtypestring)rZ properties)rziprange)levelsnamesschema_namerU/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/benchmarks/nested_schemas.py nested_schemas r__main__)Runnerr )r z not nestedcCs ttSN) validatoris_valid not_nestedrrrr1r z nested * cCs t|Sr)rr)rrrrr7rN)__doc__ itertoolsrZjsonschema.validatorsrZ metaschemaishrr__name__ZpyperfrrunnerrZ bench_funcr r rrrrrs:        PK!^..3benchmarks/__pycache__/subcomponents.cpython-38.pycnu[U afY@sdZddlmZddlmZddlmZmZddlm Z ddddd id Z eZ eZ eZ e e Zd d Zd dZedkrddlmZeZedeedeedddedddeddddS)zN A benchmark which tries to compare the possible slow subparts of validation. )Registry) DRAFT202012) HashTrieMap HashTrieSet)Draft202012Validatorarraytypeinteger)r Z minLengthZ maxLengthitemscCstddtdfS)Nfoobar)hmapinserthsetrrT/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/benchmarks/subcomponents.pyregistry_data_structuressrcCstt}tjd|dS)Nz urn:example)uriresource)rZcreate_resourceschemaregistryZ with_resource)rrrr registry_adds r__main__)RunnerzHashMap/HashSet insertionzRegistry insertionZSuccesscCs tdgS)NrvZis_validrrrr(rZFailurecCs tdgS)Nr rrrrrr)rzMetaschema validationcCs ttS)N)rZ check_schemarrrrrr*rN)__doc__Z referencingrZreferencing.jsonschemarZrpdsrrZ jsonschemarrrrrrrr__name__ZpyperfrrunnerZ bench_funcrrrrs.      PK!u5benchmarks/__pycache__/unused_registry.cpython-38.pycnu[U af@sdZddlmZddlmZddlmZddlmZe de iZ ddiZ d d iZ ee Zee e d Zed kreZed ddeddddS)z An unused schema registry should not cause slower validation. "Unused" here means one where no reference resolution is occurring anyhow. See https://github.com/python-jsonschema/jsonschema/issues/1088. )Runner)Registry) DRAFT201909)Draft201909Validatorzurn:example:fooz$refz,https://json-schema.org/draft/2019-09/schemaZ maxLength)registry__main__z no registrycCs ttSN) no_registryis_validinstancer r V/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/benchmarks/unused_registry.pyrzuseless registrycCs ttSr )with_useless_registryr r r r r rr"rN)__doc__ZpyperfrZ referencingrZreferencing.jsonschemarZ jsonschemarZ with_resourceZcreate_resourcerZschemar r r__name__runnerZ bench_funcr r r rs,     PK!!hO.@benchmarks/__pycache__/useless_applicator_schemas.cpython-38.pycnu[U af @sdZddlmZddlmZdZddiZdZdZ eeZ edegied egied egid edegeied egeid edefd geied efd geid edd gedefd gedied d gedefd gedid edd geefied d geefid edefdgeied efdgeideddgedefdgedied dgedefdgedideddgeefied dgeefiddZ e dkreZ e ddde ddde D]^\ZZeD]J\ZZe ededefdde ededefddq2q"dS)z A benchmark for validation of applicators containing lots of useless schemas. Signals a small possible optimization to remove all such schemas ahead of time. )Runner)Draft202012Validatoriconst% anyOfallOfoneOf)rrr )rrTF)rr )zsingle subschemazredundant subschemasz)useless successful subschemas (beginning)z&useless successful subschemas (middle)z#useless successful subschemas (end)z&useless failing subschemas (beginning)z#useless failing subschemas (middle)z useless failing subschemas (end)__main__zbaseline validcCs ttSN)baselineis_validvalidrra/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/benchmarks/useless_applicator_schemas.py^rzbaseline invalidcCs ttSr )r rinvalidrrrrr_rz: z validcCs |tSr )rr validatorrrrrerz invalidcCs |tSr )rrrrrrrirN)__doc__ZpyperfrZ jsonschemarZ ValidatorZ NUM_USELESSZ subschemarrr Zby_name__name__runnerZ bench_funcitemsgroupZ applicatorsZ applicatorrrrrrs               D   PK!Jߟ6benchmarks/__pycache__/useless_keywords.cpython-38.pycnu[U afc@sdZddlmZddlmZdZedddiffdd eeDdd d eeeDdZeeZ d Z dZ e dkreZ e ddde ddde ddde ddddS)z| A benchmark for validation of schemas containing lots of useless keywords. Checks we filter them out once, ahead of time. )Runner)Draft202012Validatorinotconst*ccs|]}t||fVqdSNstr.0ir W/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/benchmarks/useless_keywords.py srtypeintegerccs|]}t||fVqdSrrr r r rrsZminimum% __main__zbeginning of schemacCs tdS)Nr validatorZis_validr r r rrzmiddle of schemacCs tdS)NZfoorr r r rrrz end of schemacCs tdS)Nrrr r r rrrvalidcCs tdS)Nrrr r r rr rN)r)r)__doc__ZpyperfrZ jsonschemarZ NUM_USELESSdictrangeZschemarrinvalid__name__runnerZ bench_funcr r r rs,    PK!AaU8benchmarks/__pycache__/validator_creation.cpython-38.pycnu[U af@sFddlmZddlmZdddddidZedkrBed eed S) )Runner)Draft202012Validatorarraytypeinteger)rZ minLengthZ maxLengthitems__main__zvalidator creationN)ZpyperfrZ jsonschemarZschema__name__Z bench_funcr r Y/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/benchmarks/validator_creation.pys  PK!tests/__init__.pynu[PK!Aktests/_suite.pynu[""" Python representations of the JSON Schema Test Suite tests. """ from __future__ import annotations from contextlib import suppress from functools import partial from pathlib import Path from typing import TYPE_CHECKING, Any import json import os import re import subprocess import sys import unittest from attrs import field, frozen from referencing import Registry import referencing.jsonschema if TYPE_CHECKING: from collections.abc import Iterable, Mapping, Sequence import pyperf from jsonschema.validators import _VALIDATORS import jsonschema _DELIMITERS = re.compile(r"[\W\- ]+") def _find_suite(): root = os.environ.get("JSON_SCHEMA_TEST_SUITE") if root is not None: return Path(root) root = Path(jsonschema.__file__).parent.parent / "json" if not root.is_dir(): # pragma: no cover raise ValueError( ( "Can't find the JSON-Schema-Test-Suite directory. " "Set the 'JSON_SCHEMA_TEST_SUITE' environment " "variable or run the tests from alongside a checkout " "of the suite." ), ) return root @frozen class Suite: _root: Path = field(factory=_find_suite) _remotes: referencing.jsonschema.SchemaRegistry = field(init=False) def __attrs_post_init__(self): jsonschema_suite = self._root.joinpath("bin", "jsonschema_suite") argv = [sys.executable, str(jsonschema_suite), "remotes"] remotes = subprocess.check_output(argv).decode("utf-8") resources = json.loads(remotes) li = "http://localhost:1234/locationIndependentIdentifierPre2019.json" li4 = "http://localhost:1234/locationIndependentIdentifierDraft4.json" registry = Registry().with_resources( [ ( li, referencing.jsonschema.DRAFT7.create_resource( contents=resources.pop(li), ), ), ( li4, referencing.jsonschema.DRAFT4.create_resource( contents=resources.pop(li4), ), ), ], ).with_contents( resources.items(), default_specification=referencing.jsonschema.DRAFT202012, ) object.__setattr__(self, "_remotes", registry) def benchmark(self, runner: pyperf.Runner): # pragma: no cover for name, Validator in _VALIDATORS.items(): self.version(name=name).benchmark( runner=runner, Validator=Validator, ) def version(self, name) -> Version: return Version( name=name, path=self._root / "tests" / name, remotes=self._remotes, ) @frozen class Version: _path: Path _remotes: referencing.jsonschema.SchemaRegistry name: str def benchmark(self, **kwargs): # pragma: no cover for case in self.cases(): case.benchmark(**kwargs) def cases(self) -> Iterable[_Case]: return self._cases_in(paths=self._path.glob("*.json")) def format_cases(self) -> Iterable[_Case]: return self._cases_in(paths=self._path.glob("optional/format/*.json")) def optional_cases_of(self, name: str) -> Iterable[_Case]: return self._cases_in(paths=[self._path / "optional" / f"{name}.json"]) def to_unittest_testcase(self, *groups, **kwargs): name = kwargs.pop("name", "Test" + self.name.title().replace("-", "")) methods = { method.__name__: method for method in ( test.to_unittest_method(**kwargs) for group in groups for case in group for test in case.tests ) } cls = type(name, (unittest.TestCase,), methods) # We're doing crazy things, so if they go wrong, like a function # behaving differently on some other interpreter, just make them # not happen. with suppress(Exception): cls.__module__ = _someone_save_us_the_module_of_the_caller() return cls def _cases_in(self, paths: Iterable[Path]) -> Iterable[_Case]: for path in paths: for case in json.loads(path.read_text(encoding="utf-8")): yield _Case.from_dict( case, version=self, subject=path.stem, remotes=self._remotes, ) @frozen class _Case: version: Version subject: str description: str schema: Mapping[str, Any] | bool tests: list[_Test] comment: str | None = None specification: Sequence[dict[str, str]] = () @classmethod def from_dict(cls, data, remotes, **kwargs): data.update(kwargs) tests = [ _Test( version=data["version"], subject=data["subject"], case_description=data["description"], schema=data["schema"], remotes=remotes, **test, ) for test in data.pop("tests") ] return cls(tests=tests, **data) def benchmark(self, runner: pyperf.Runner, **kwargs): # pragma: no cover for test in self.tests: runner.bench_func( test.fully_qualified_name, partial(test.validate_ignoring_errors, **kwargs), ) @frozen(repr=False) class _Test: version: Version subject: str case_description: str description: str data: Any schema: Mapping[str, Any] | bool valid: bool _remotes: referencing.jsonschema.SchemaRegistry comment: str | None = None def __repr__(self): # pragma: no cover return f"" @property def fully_qualified_name(self): # pragma: no cover return " > ".join( # noqa: FLY002 [ self.version.name, self.subject, self.case_description, self.description, ], ) def to_unittest_method(self, skip=lambda test: None, **kwargs): if self.valid: def fn(this): self.validate(**kwargs) else: def fn(this): with this.assertRaises(jsonschema.ValidationError): self.validate(**kwargs) fn.__name__ = "_".join( [ "test", _DELIMITERS.sub("_", self.subject), _DELIMITERS.sub("_", self.case_description), _DELIMITERS.sub("_", self.description), ], ) reason = skip(self) if reason is None or os.environ.get("JSON_SCHEMA_DEBUG", "0") != "0": return fn elif os.environ.get("JSON_SCHEMA_EXPECTED_FAILURES", "0") != "0": # pragma: no cover # noqa: E501 return unittest.expectedFailure(fn) else: return unittest.skip(reason)(fn) def validate(self, Validator, **kwargs): Validator.check_schema(self.schema) validator = Validator( schema=self.schema, registry=self._remotes, **kwargs, ) if os.environ.get("JSON_SCHEMA_DEBUG", "0") != "0": # pragma: no cover breakpoint() # noqa: T100 validator.validate(instance=self.data) def validate_ignoring_errors(self, Validator): # pragma: no cover with suppress(jsonschema.ValidationError): self.validate(Validator=Validator) def _someone_save_us_the_module_of_the_caller(): """ The FQON of the module 2nd stack frames up from here. This is intended to allow us to dynamically return test case classes that are indistinguishable from being defined in the module that wants them. Otherwise, trial will mis-print the FQON, and copy pasting it won't re-run the class that really is running. Save us all, this is all so so so so so terrible. """ return sys._getframe(2).f_globals["__name__"] PK!ĿZZtests/fuzz_validate.pynu[""" Fuzzing setup for OSS-Fuzz. See https://github.com/google/oss-fuzz/tree/master/projects/jsonschema for the other half of the setup here. """ import sys from hypothesis import given, strategies import jsonschema PRIM = strategies.one_of( strategies.booleans(), strategies.integers(), strategies.floats(allow_nan=False, allow_infinity=False), strategies.text(), ) DICT = strategies.recursive( base=strategies.one_of( strategies.booleans(), strategies.dictionaries(strategies.text(), PRIM), ), extend=lambda inner: strategies.dictionaries(strategies.text(), inner), ) @given(obj1=DICT, obj2=DICT) def test_schemas(obj1, obj2): try: jsonschema.validate(instance=obj1, schema=obj2) except jsonschema.exceptions.ValidationError: pass except jsonschema.exceptions.SchemaError: pass def main(): atheris.instrument_all() atheris.Setup( sys.argv, test_schemas.hypothesis.fuzz_one_input, enable_python_coverage=True, ) atheris.Fuzz() if __name__ == "__main__": import atheris main() PK!nootests/test_cli.pynu[from contextlib import redirect_stderr, redirect_stdout from importlib import metadata from io import StringIO from json import JSONDecodeError from pathlib import Path from textwrap import dedent from unittest import TestCase import json import os import subprocess import sys import tempfile import warnings from jsonschema import Draft4Validator, Draft202012Validator from jsonschema.exceptions import ( SchemaError, ValidationError, _RefResolutionError, ) from jsonschema.validators import _LATEST_VERSION, validate with warnings.catch_warnings(): warnings.simplefilter("ignore") from jsonschema import cli def fake_validator(*errors): errors = list(reversed(errors)) class FakeValidator: def __init__(self, *args, **kwargs): pass def iter_errors(self, instance): if errors: return errors.pop() return [] # pragma: no cover @classmethod def check_schema(self, schema): pass return FakeValidator def fake_open(all_contents): def open(path): contents = all_contents.get(path) if contents is None: raise FileNotFoundError(path) return StringIO(contents) return open def _message_for(non_json): try: json.loads(non_json) except JSONDecodeError as error: return str(error) else: # pragma: no cover raise RuntimeError("Tried and failed to capture a JSON dump error.") class TestCLI(TestCase): def run_cli( self, argv, files=None, stdin=StringIO(), exit_code=0, **override, ): arguments = cli.parse_args(argv) arguments.update(override) self.assertFalse(hasattr(cli, "open")) cli.open = fake_open(files or {}) try: stdout, stderr = StringIO(), StringIO() actual_exit_code = cli.run( arguments, stdin=stdin, stdout=stdout, stderr=stderr, ) finally: del cli.open self.assertEqual( actual_exit_code, exit_code, msg=dedent( f""" Expected an exit code of {exit_code} != {actual_exit_code}. stdout: {stdout.getvalue()} stderr: {stderr.getvalue()} """, ), ) return stdout.getvalue(), stderr.getvalue() def assertOutputs(self, stdout="", stderr="", **kwargs): self.assertEqual( self.run_cli(**kwargs), (dedent(stdout), dedent(stderr)), ) def test_invalid_instance(self): error = ValidationError("I am an error!", instance=12) self.assertOutputs( files=dict( some_schema='{"does not": "matter since it is stubbed"}', some_instance=json.dumps(error.instance), ), validator=fake_validator([error]), argv=["-i", "some_instance", "some_schema"], exit_code=1, stderr="12: I am an error!\n", ) def test_invalid_instance_pretty_output(self): error = ValidationError("I am an error!", instance=12) self.assertOutputs( files=dict( some_schema='{"does not": "matter since it is stubbed"}', some_instance=json.dumps(error.instance), ), validator=fake_validator([error]), argv=["-i", "some_instance", "--output", "pretty", "some_schema"], exit_code=1, stderr="""\ ===[ValidationError]===(some_instance)=== I am an error! ----------------------------- """, ) def test_invalid_instance_explicit_plain_output(self): error = ValidationError("I am an error!", instance=12) self.assertOutputs( files=dict( some_schema='{"does not": "matter since it is stubbed"}', some_instance=json.dumps(error.instance), ), validator=fake_validator([error]), argv=["--output", "plain", "-i", "some_instance", "some_schema"], exit_code=1, stderr="12: I am an error!\n", ) def test_invalid_instance_multiple_errors(self): instance = 12 first = ValidationError("First error", instance=instance) second = ValidationError("Second error", instance=instance) self.assertOutputs( files=dict( some_schema='{"does not": "matter since it is stubbed"}', some_instance=json.dumps(instance), ), validator=fake_validator([first, second]), argv=["-i", "some_instance", "some_schema"], exit_code=1, stderr="""\ 12: First error 12: Second error """, ) def test_invalid_instance_multiple_errors_pretty_output(self): instance = 12 first = ValidationError("First error", instance=instance) second = ValidationError("Second error", instance=instance) self.assertOutputs( files=dict( some_schema='{"does not": "matter since it is stubbed"}', some_instance=json.dumps(instance), ), validator=fake_validator([first, second]), argv=["-i", "some_instance", "--output", "pretty", "some_schema"], exit_code=1, stderr="""\ ===[ValidationError]===(some_instance)=== First error ----------------------------- ===[ValidationError]===(some_instance)=== Second error ----------------------------- """, ) def test_multiple_invalid_instances(self): first_instance = 12 first_errors = [ ValidationError("An error", instance=first_instance), ValidationError("Another error", instance=first_instance), ] second_instance = "foo" second_errors = [ValidationError("BOOM", instance=second_instance)] self.assertOutputs( files=dict( some_schema='{"does not": "matter since it is stubbed"}', some_first_instance=json.dumps(first_instance), some_second_instance=json.dumps(second_instance), ), validator=fake_validator(first_errors, second_errors), argv=[ "-i", "some_first_instance", "-i", "some_second_instance", "some_schema", ], exit_code=1, stderr="""\ 12: An error 12: Another error foo: BOOM """, ) def test_multiple_invalid_instances_pretty_output(self): first_instance = 12 first_errors = [ ValidationError("An error", instance=first_instance), ValidationError("Another error", instance=first_instance), ] second_instance = "foo" second_errors = [ValidationError("BOOM", instance=second_instance)] self.assertOutputs( files=dict( some_schema='{"does not": "matter since it is stubbed"}', some_first_instance=json.dumps(first_instance), some_second_instance=json.dumps(second_instance), ), validator=fake_validator(first_errors, second_errors), argv=[ "--output", "pretty", "-i", "some_first_instance", "-i", "some_second_instance", "some_schema", ], exit_code=1, stderr="""\ ===[ValidationError]===(some_first_instance)=== An error ----------------------------- ===[ValidationError]===(some_first_instance)=== Another error ----------------------------- ===[ValidationError]===(some_second_instance)=== BOOM ----------------------------- """, ) def test_custom_error_format(self): first_instance = 12 first_errors = [ ValidationError("An error", instance=first_instance), ValidationError("Another error", instance=first_instance), ] second_instance = "foo" second_errors = [ValidationError("BOOM", instance=second_instance)] self.assertOutputs( files=dict( some_schema='{"does not": "matter since it is stubbed"}', some_first_instance=json.dumps(first_instance), some_second_instance=json.dumps(second_instance), ), validator=fake_validator(first_errors, second_errors), argv=[ "--error-format", ":{error.message}._-_.{error.instance}:", "-i", "some_first_instance", "-i", "some_second_instance", "some_schema", ], exit_code=1, stderr=":An error._-_.12::Another error._-_.12::BOOM._-_.foo:", ) def test_invalid_schema(self): self.assertOutputs( files=dict(some_schema='{"type": 12}'), argv=["some_schema"], exit_code=1, stderr="""\ 12: 12 is not valid under any of the given schemas """, ) def test_invalid_schema_pretty_output(self): schema = {"type": 12} with self.assertRaises(SchemaError) as e: validate(schema=schema, instance="") error = str(e.exception) self.assertOutputs( files=dict(some_schema=json.dumps(schema)), argv=["--output", "pretty", "some_schema"], exit_code=1, stderr=( "===[SchemaError]===(some_schema)===\n\n" + str(error) + "\n-----------------------------\n" ), ) def test_invalid_schema_multiple_errors(self): self.assertOutputs( files=dict(some_schema='{"type": 12, "items": 57}'), argv=["some_schema"], exit_code=1, stderr="""\ 57: 57 is not of type 'object', 'boolean' """, ) def test_invalid_schema_multiple_errors_pretty_output(self): schema = {"type": 12, "items": 57} with self.assertRaises(SchemaError) as e: validate(schema=schema, instance="") error = str(e.exception) self.assertOutputs( files=dict(some_schema=json.dumps(schema)), argv=["--output", "pretty", "some_schema"], exit_code=1, stderr=( "===[SchemaError]===(some_schema)===\n\n" + str(error) + "\n-----------------------------\n" ), ) def test_invalid_schema_with_invalid_instance(self): """ "Validating" an instance that's invalid under an invalid schema just shows the schema error. """ self.assertOutputs( files=dict( some_schema='{"type": 12, "minimum": 30}', some_instance="13", ), argv=["-i", "some_instance", "some_schema"], exit_code=1, stderr="""\ 12: 12 is not valid under any of the given schemas """, ) def test_invalid_schema_with_invalid_instance_pretty_output(self): instance, schema = 13, {"type": 12, "minimum": 30} with self.assertRaises(SchemaError) as e: validate(schema=schema, instance=instance) error = str(e.exception) self.assertOutputs( files=dict( some_schema=json.dumps(schema), some_instance=json.dumps(instance), ), argv=["--output", "pretty", "-i", "some_instance", "some_schema"], exit_code=1, stderr=( "===[SchemaError]===(some_schema)===\n\n" + str(error) + "\n-----------------------------\n" ), ) def test_invalid_instance_continues_with_the_rest(self): self.assertOutputs( files=dict( some_schema='{"minimum": 30}', first_instance="not valid JSON!", second_instance="12", ), argv=[ "-i", "first_instance", "-i", "second_instance", "some_schema", ], exit_code=1, stderr="""\ Failed to parse 'first_instance': {} 12: 12 is less than the minimum of 30 """.format(_message_for("not valid JSON!")), ) def test_custom_error_format_applies_to_schema_errors(self): instance, schema = 13, {"type": 12, "minimum": 30} with self.assertRaises(SchemaError): validate(schema=schema, instance=instance) self.assertOutputs( files=dict(some_schema=json.dumps(schema)), argv=[ "--error-format", ":{error.message}._-_.{error.instance}:", "some_schema", ], exit_code=1, stderr=":12 is not valid under any of the given schemas._-_.12:", ) def test_instance_is_invalid_JSON(self): instance = "not valid JSON!" self.assertOutputs( files=dict(some_schema="{}", some_instance=instance), argv=["-i", "some_instance", "some_schema"], exit_code=1, stderr=f"""\ Failed to parse 'some_instance': {_message_for(instance)} """, ) def test_instance_is_invalid_JSON_pretty_output(self): stdout, stderr = self.run_cli( files=dict( some_schema="{}", some_instance="not valid JSON!", ), argv=["--output", "pretty", "-i", "some_instance", "some_schema"], exit_code=1, ) self.assertFalse(stdout) self.assertIn( "(some_instance)===\n\nTraceback (most recent call last):\n", stderr, ) self.assertNotIn("some_schema", stderr) def test_instance_is_invalid_JSON_on_stdin(self): instance = "not valid JSON!" self.assertOutputs( files=dict(some_schema="{}"), stdin=StringIO(instance), argv=["some_schema"], exit_code=1, stderr=f"""\ Failed to parse : {_message_for(instance)} """, ) def test_instance_is_invalid_JSON_on_stdin_pretty_output(self): stdout, stderr = self.run_cli( files=dict(some_schema="{}"), stdin=StringIO("not valid JSON!"), argv=["--output", "pretty", "some_schema"], exit_code=1, ) self.assertFalse(stdout) self.assertIn( "()===\n\nTraceback (most recent call last):\n", stderr, ) self.assertNotIn("some_schema", stderr) def test_schema_is_invalid_JSON(self): schema = "not valid JSON!" self.assertOutputs( files=dict(some_schema=schema), argv=["some_schema"], exit_code=1, stderr=f"""\ Failed to parse 'some_schema': {_message_for(schema)} """, ) def test_schema_is_invalid_JSON_pretty_output(self): stdout, stderr = self.run_cli( files=dict(some_schema="not valid JSON!"), argv=["--output", "pretty", "some_schema"], exit_code=1, ) self.assertFalse(stdout) self.assertIn( "(some_schema)===\n\nTraceback (most recent call last):\n", stderr, ) def test_schema_and_instance_are_both_invalid_JSON(self): """ Only the schema error is reported, as we abort immediately. """ schema, instance = "not valid JSON!", "also not valid JSON!" self.assertOutputs( files=dict(some_schema=schema, some_instance=instance), argv=["some_schema"], exit_code=1, stderr=f"""\ Failed to parse 'some_schema': {_message_for(schema)} """, ) def test_schema_and_instance_are_both_invalid_JSON_pretty_output(self): """ Only the schema error is reported, as we abort immediately. """ stdout, stderr = self.run_cli( files=dict( some_schema="not valid JSON!", some_instance="also not valid JSON!", ), argv=["--output", "pretty", "-i", "some_instance", "some_schema"], exit_code=1, ) self.assertFalse(stdout) self.assertIn( "(some_schema)===\n\nTraceback (most recent call last):\n", stderr, ) self.assertNotIn("some_instance", stderr) def test_instance_does_not_exist(self): self.assertOutputs( files=dict(some_schema="{}"), argv=["-i", "nonexisting_instance", "some_schema"], exit_code=1, stderr="""\ 'nonexisting_instance' does not exist. """, ) def test_instance_does_not_exist_pretty_output(self): self.assertOutputs( files=dict(some_schema="{}"), argv=[ "--output", "pretty", "-i", "nonexisting_instance", "some_schema", ], exit_code=1, stderr="""\ ===[FileNotFoundError]===(nonexisting_instance)=== 'nonexisting_instance' does not exist. ----------------------------- """, ) def test_schema_does_not_exist(self): self.assertOutputs( argv=["nonexisting_schema"], exit_code=1, stderr="'nonexisting_schema' does not exist.\n", ) def test_schema_does_not_exist_pretty_output(self): self.assertOutputs( argv=["--output", "pretty", "nonexisting_schema"], exit_code=1, stderr="""\ ===[FileNotFoundError]===(nonexisting_schema)=== 'nonexisting_schema' does not exist. ----------------------------- """, ) def test_neither_instance_nor_schema_exist(self): self.assertOutputs( argv=["-i", "nonexisting_instance", "nonexisting_schema"], exit_code=1, stderr="'nonexisting_schema' does not exist.\n", ) def test_neither_instance_nor_schema_exist_pretty_output(self): self.assertOutputs( argv=[ "--output", "pretty", "-i", "nonexisting_instance", "nonexisting_schema", ], exit_code=1, stderr="""\ ===[FileNotFoundError]===(nonexisting_schema)=== 'nonexisting_schema' does not exist. ----------------------------- """, ) def test_successful_validation(self): self.assertOutputs( files=dict(some_schema="{}", some_instance="{}"), argv=["-i", "some_instance", "some_schema"], stdout="", stderr="", ) def test_successful_validation_pretty_output(self): self.assertOutputs( files=dict(some_schema="{}", some_instance="{}"), argv=["--output", "pretty", "-i", "some_instance", "some_schema"], stdout="===[SUCCESS]===(some_instance)===\n", stderr="", ) def test_successful_validation_of_stdin(self): self.assertOutputs( files=dict(some_schema="{}"), stdin=StringIO("{}"), argv=["some_schema"], stdout="", stderr="", ) def test_successful_validation_of_stdin_pretty_output(self): self.assertOutputs( files=dict(some_schema="{}"), stdin=StringIO("{}"), argv=["--output", "pretty", "some_schema"], stdout="===[SUCCESS]===()===\n", stderr="", ) def test_successful_validation_of_just_the_schema(self): self.assertOutputs( files=dict(some_schema="{}", some_instance="{}"), argv=["-i", "some_instance", "some_schema"], stdout="", stderr="", ) def test_successful_validation_of_just_the_schema_pretty_output(self): self.assertOutputs( files=dict(some_schema="{}", some_instance="{}"), argv=["--output", "pretty", "-i", "some_instance", "some_schema"], stdout="===[SUCCESS]===(some_instance)===\n", stderr="", ) def test_successful_validation_via_explicit_base_uri(self): ref_schema_file = tempfile.NamedTemporaryFile(delete=False) ref_schema_file.close() self.addCleanup(os.remove, ref_schema_file.name) ref_path = Path(ref_schema_file.name) ref_path.write_text('{"definitions": {"num": {"type": "integer"}}}') schema = f'{{"$ref": "{ref_path.name}#/definitions/num"}}' self.assertOutputs( files=dict(some_schema=schema, some_instance="1"), argv=[ "-i", "some_instance", "--base-uri", ref_path.parent.as_uri() + "/", "some_schema", ], stdout="", stderr="", ) def test_unsuccessful_validation_via_explicit_base_uri(self): ref_schema_file = tempfile.NamedTemporaryFile(delete=False) ref_schema_file.close() self.addCleanup(os.remove, ref_schema_file.name) ref_path = Path(ref_schema_file.name) ref_path.write_text('{"definitions": {"num": {"type": "integer"}}}') schema = f'{{"$ref": "{ref_path.name}#/definitions/num"}}' self.assertOutputs( files=dict(some_schema=schema, some_instance='"1"'), argv=[ "-i", "some_instance", "--base-uri", ref_path.parent.as_uri() + "/", "some_schema", ], exit_code=1, stdout="", stderr="1: '1' is not of type 'integer'\n", ) def test_nonexistent_file_with_explicit_base_uri(self): schema = '{"$ref": "someNonexistentFile.json#definitions/num"}' instance = "1" with self.assertRaises(_RefResolutionError) as e: self.assertOutputs( files=dict( some_schema=schema, some_instance=instance, ), argv=[ "-i", "some_instance", "--base-uri", Path.cwd().as_uri(), "some_schema", ], ) error = str(e.exception) self.assertIn(f"{os.sep}someNonexistentFile.json'", error) def test_invalid_explicit_base_uri(self): schema = '{"$ref": "foo.json#definitions/num"}' instance = "1" with self.assertRaises(_RefResolutionError) as e: self.assertOutputs( files=dict( some_schema=schema, some_instance=instance, ), argv=[ "-i", "some_instance", "--base-uri", "not@UR1", "some_schema", ], ) error = str(e.exception) self.assertEqual( error, "unknown url type: 'foo.json'", ) def test_it_validates_using_the_latest_validator_when_unspecified(self): # There isn't a better way now I can think of to ensure that the # latest version was used, given that the call to validator_for # is hidden inside the CLI, so guard that that's the case, and # this test will have to be updated when versions change until # we can think of a better way to ensure this behavior. self.assertIs(Draft202012Validator, _LATEST_VERSION) self.assertOutputs( files=dict(some_schema='{"const": "check"}', some_instance='"a"'), argv=["-i", "some_instance", "some_schema"], exit_code=1, stdout="", stderr="a: 'check' was expected\n", ) def test_it_validates_using_draft7_when_specified(self): """ Specifically, `const` validation applies for Draft 7. """ schema = """ { "$schema": "http://json-schema.org/draft-07/schema#", "const": "check" } """ instance = '"foo"' self.assertOutputs( files=dict(some_schema=schema, some_instance=instance), argv=["-i", "some_instance", "some_schema"], exit_code=1, stdout="", stderr="foo: 'check' was expected\n", ) def test_it_validates_using_draft4_when_specified(self): """ Specifically, `const` validation *does not* apply for Draft 4. """ schema = """ { "$schema": "http://json-schema.org/draft-04/schema#", "const": "check" } """ instance = '"foo"' self.assertOutputs( files=dict(some_schema=schema, some_instance=instance), argv=["-i", "some_instance", "some_schema"], stdout="", stderr="", ) class TestParser(TestCase): FakeValidator = fake_validator() def test_find_validator_by_fully_qualified_object_name(self): arguments = cli.parse_args( [ "--validator", "jsonschema.tests.test_cli.TestParser.FakeValidator", "--instance", "mem://some/instance", "mem://some/schema", ], ) self.assertIs(arguments["validator"], self.FakeValidator) def test_find_validator_in_jsonschema(self): arguments = cli.parse_args( [ "--validator", "Draft4Validator", "--instance", "mem://some/instance", "mem://some/schema", ], ) self.assertIs(arguments["validator"], Draft4Validator) def cli_output_for(self, *argv): stdout, stderr = StringIO(), StringIO() with redirect_stdout(stdout), redirect_stderr(stderr): # noqa: SIM117 with self.assertRaises(SystemExit): cli.parse_args(argv) return stdout.getvalue(), stderr.getvalue() def test_unknown_output(self): stdout, stderr = self.cli_output_for( "--output", "foo", "mem://some/schema", ) self.assertIn("invalid choice: 'foo'", stderr) self.assertFalse(stdout) def test_useless_error_format(self): stdout, stderr = self.cli_output_for( "--output", "pretty", "--error-format", "foo", "mem://some/schema", ) self.assertIn( "--error-format can only be used with --output plain", stderr, ) self.assertFalse(stdout) class TestCLIIntegration(TestCase): def test_license(self): output = subprocess.check_output( [sys.executable, "-m", "pip", "show", "jsonschema"], stderr=subprocess.STDOUT, ) self.assertIn(b"License: MIT", output) def test_version(self): version = subprocess.check_output( [sys.executable, "-W", "ignore", "-m", "jsonschema", "--version"], stderr=subprocess.STDOUT, ) version = version.decode("utf-8").strip() self.assertEqual(version, metadata.version("jsonschema")) def test_no_arguments_shows_usage_notes(self): output = subprocess.check_output( [sys.executable, "-m", "jsonschema"], stderr=subprocess.STDOUT, ) output_for_help = subprocess.check_output( [sys.executable, "-m", "jsonschema", "--help"], stderr=subprocess.STDOUT, ) self.assertEqual(output, output_for_help) PK!&,==tests/test_deprecations.pynu[from contextlib import contextmanager from io import BytesIO from unittest import TestCase, mock import importlib.metadata import json import subprocess import sys import urllib.request import referencing.exceptions from jsonschema import FormatChecker, exceptions, protocols, validators class TestDeprecations(TestCase): def test_version(self): """ As of v4.0.0, __version__ is deprecated in favor of importlib.metadata. """ message = "Accessing jsonschema.__version__ is deprecated" with self.assertWarnsRegex(DeprecationWarning, message) as w: from jsonschema import __version__ self.assertEqual(__version__, importlib.metadata.version("jsonschema")) self.assertEqual(w.filename, __file__) def test_validators_ErrorTree(self): """ As of v4.0.0, importing ErrorTree from jsonschema.validators is deprecated in favor of doing so from jsonschema.exceptions. """ message = "Importing ErrorTree from jsonschema.validators is " with self.assertWarnsRegex(DeprecationWarning, message) as w: from jsonschema.validators import ErrorTree self.assertEqual(ErrorTree, exceptions.ErrorTree) self.assertEqual(w.filename, __file__) def test_import_ErrorTree(self): """ As of v4.18.0, importing ErrorTree from the package root is deprecated in favor of doing so from jsonschema.exceptions. """ message = "Importing ErrorTree directly from the jsonschema package " with self.assertWarnsRegex(DeprecationWarning, message) as w: from jsonschema import ErrorTree self.assertEqual(ErrorTree, exceptions.ErrorTree) self.assertEqual(w.filename, __file__) def test_ErrorTree_setitem(self): """ As of v4.20.0, setting items on an ErrorTree is deprecated. """ e = exceptions.ValidationError("some error", path=["foo"]) tree = exceptions.ErrorTree() subtree = exceptions.ErrorTree(errors=[e]) message = "ErrorTree.__setitem__ is " with self.assertWarnsRegex(DeprecationWarning, message) as w: tree["foo"] = subtree self.assertEqual(tree["foo"], subtree) self.assertEqual(w.filename, __file__) def test_import_FormatError(self): """ As of v4.18.0, importing FormatError from the package root is deprecated in favor of doing so from jsonschema.exceptions. """ message = "Importing FormatError directly from the jsonschema package " with self.assertWarnsRegex(DeprecationWarning, message) as w: from jsonschema import FormatError self.assertEqual(FormatError, exceptions.FormatError) self.assertEqual(w.filename, __file__) def test_import_Validator(self): """ As of v4.19.0, importing Validator from the package root is deprecated in favor of doing so from jsonschema.protocols. """ message = "Importing Validator directly from the jsonschema package " with self.assertWarnsRegex(DeprecationWarning, message) as w: from jsonschema import Validator self.assertEqual(Validator, protocols.Validator) self.assertEqual(w.filename, __file__) def test_validators_validators(self): """ As of v4.0.0, accessing jsonschema.validators.validators is deprecated. """ message = "Accessing jsonschema.validators.validators is deprecated" with self.assertWarnsRegex(DeprecationWarning, message) as w: value = validators.validators self.assertEqual(value, validators._VALIDATORS) self.assertEqual(w.filename, __file__) def test_validators_meta_schemas(self): """ As of v4.0.0, accessing jsonschema.validators.meta_schemas is deprecated. """ message = "Accessing jsonschema.validators.meta_schemas is deprecated" with self.assertWarnsRegex(DeprecationWarning, message) as w: value = validators.meta_schemas self.assertEqual(value, validators._META_SCHEMAS) self.assertEqual(w.filename, __file__) def test_RefResolver_in_scope(self): """ As of v4.0.0, RefResolver.in_scope is deprecated. """ resolver = validators._RefResolver.from_schema({}) message = "jsonschema.RefResolver.in_scope is deprecated " with self.assertWarnsRegex(DeprecationWarning, message) as w: # noqa: SIM117 with resolver.in_scope("foo"): pass self.assertEqual(w.filename, __file__) def test_Validator_is_valid_two_arguments(self): """ As of v4.0.0, calling is_valid with two arguments (to provide a different schema) is deprecated. """ validator = validators.Draft7Validator({}) message = "Passing a schema to Validator.is_valid is deprecated " with self.assertWarnsRegex(DeprecationWarning, message) as w: result = validator.is_valid("foo", {"type": "number"}) self.assertFalse(result) self.assertEqual(w.filename, __file__) def test_Validator_iter_errors_two_arguments(self): """ As of v4.0.0, calling iter_errors with two arguments (to provide a different schema) is deprecated. """ validator = validators.Draft7Validator({}) message = "Passing a schema to Validator.iter_errors is deprecated " with self.assertWarnsRegex(DeprecationWarning, message) as w: error, = validator.iter_errors("foo", {"type": "number"}) self.assertEqual(error.validator, "type") self.assertEqual(w.filename, __file__) def test_Validator_resolver(self): """ As of v4.18.0, accessing Validator.resolver is deprecated. """ validator = validators.Draft7Validator({}) message = "Accessing Draft7Validator.resolver is " with self.assertWarnsRegex(DeprecationWarning, message) as w: self.assertIsInstance(validator.resolver, validators._RefResolver) self.assertEqual(w.filename, __file__) def test_RefResolver(self): """ As of v4.18.0, RefResolver is fully deprecated. """ message = "jsonschema.RefResolver is deprecated" with self.assertWarnsRegex(DeprecationWarning, message) as w: from jsonschema import RefResolver self.assertEqual(w.filename, __file__) with self.assertWarnsRegex(DeprecationWarning, message) as w: from jsonschema.validators import RefResolver # noqa: F401, F811 self.assertEqual(w.filename, __file__) def test_RefResolutionError(self): """ As of v4.18.0, RefResolutionError is deprecated in favor of directly catching errors from the referencing library. """ message = "jsonschema.exceptions.RefResolutionError is deprecated" with self.assertWarnsRegex(DeprecationWarning, message) as w: from jsonschema import RefResolutionError self.assertEqual(RefResolutionError, exceptions._RefResolutionError) self.assertEqual(w.filename, __file__) with self.assertWarnsRegex(DeprecationWarning, message) as w: from jsonschema.exceptions import RefResolutionError self.assertEqual(RefResolutionError, exceptions._RefResolutionError) self.assertEqual(w.filename, __file__) def test_catching_Unresolvable_directly(self): """ This behavior is the intended behavior (i.e. it's not deprecated), but given we do "tricksy" things in the iterim to wrap exceptions in a multiple inheritance subclass, we need to be extra sure it works and stays working. """ validator = validators.Draft202012Validator({"$ref": "urn:nothing"}) with self.assertRaises(referencing.exceptions.Unresolvable) as e: validator.validate(12) expected = referencing.exceptions.Unresolvable(ref="urn:nothing") self.assertEqual( (e.exception, str(e.exception)), (expected, "Unresolvable: urn:nothing"), ) def test_catching_Unresolvable_via_RefResolutionError(self): """ Until RefResolutionError is removed, it is still possible to catch exceptions from reference resolution using it, even though they may have been raised by referencing. """ with self.assertWarns(DeprecationWarning): from jsonschema import RefResolutionError validator = validators.Draft202012Validator({"$ref": "urn:nothing"}) with self.assertRaises(referencing.exceptions.Unresolvable) as u: validator.validate(12) with self.assertRaises(RefResolutionError) as e: validator.validate(12) self.assertEqual( (e.exception, str(e.exception)), (u.exception, "Unresolvable: urn:nothing"), ) def test_WrappedReferencingError_hashability(self): """ Ensure the wrapped referencing errors are hashable when possible. """ with self.assertWarns(DeprecationWarning): from jsonschema import RefResolutionError validator = validators.Draft202012Validator({"$ref": "urn:nothing"}) with self.assertRaises(referencing.exceptions.Unresolvable) as u: validator.validate(12) with self.assertRaises(RefResolutionError) as e: validator.validate(12) self.assertIn(e.exception, {u.exception}) self.assertIn(u.exception, {e.exception}) def test_Validator_subclassing(self): """ As of v4.12.0, subclassing a validator class produces an explicit deprecation warning. This was never intended to be public API (and some comments over the years in issues said so, but obviously that's not a great way to make sure it's followed). A future version will explicitly raise an error. """ message = "Subclassing validator classes is " with self.assertWarnsRegex(DeprecationWarning, message) as w: class Subclass(validators.Draft202012Validator): pass self.assertEqual(w.filename, __file__) with self.assertWarnsRegex(DeprecationWarning, message) as w: class AnotherSubclass(validators.create(meta_schema={})): pass def test_FormatChecker_cls_checks(self): """ As of v4.14.0, FormatChecker.cls_checks is deprecated without replacement. """ self.addCleanup(FormatChecker.checkers.pop, "boom", None) message = "FormatChecker.cls_checks " with self.assertWarnsRegex(DeprecationWarning, message) as w: FormatChecker.cls_checks("boom") self.assertEqual(w.filename, __file__) def test_draftN_format_checker(self): """ As of v4.16.0, accessing jsonschema.draftn_format_checker is deprecated in favor of Validator.FORMAT_CHECKER. """ message = "Accessing jsonschema.draft202012_format_checker is " with self.assertWarnsRegex(DeprecationWarning, message) as w: from jsonschema import draft202012_format_checker self.assertIs( draft202012_format_checker, validators.Draft202012Validator.FORMAT_CHECKER, ) self.assertEqual(w.filename, __file__) message = "Accessing jsonschema.draft201909_format_checker is " with self.assertWarnsRegex(DeprecationWarning, message) as w: from jsonschema import draft201909_format_checker self.assertIs( draft201909_format_checker, validators.Draft201909Validator.FORMAT_CHECKER, ) self.assertEqual(w.filename, __file__) message = "Accessing jsonschema.draft7_format_checker is " with self.assertWarnsRegex(DeprecationWarning, message) as w: from jsonschema import draft7_format_checker self.assertIs( draft7_format_checker, validators.Draft7Validator.FORMAT_CHECKER, ) self.assertEqual(w.filename, __file__) message = "Accessing jsonschema.draft6_format_checker is " with self.assertWarnsRegex(DeprecationWarning, message) as w: from jsonschema import draft6_format_checker self.assertIs( draft6_format_checker, validators.Draft6Validator.FORMAT_CHECKER, ) self.assertEqual(w.filename, __file__) message = "Accessing jsonschema.draft4_format_checker is " with self.assertWarnsRegex(DeprecationWarning, message) as w: from jsonschema import draft4_format_checker self.assertIs( draft4_format_checker, validators.Draft4Validator.FORMAT_CHECKER, ) self.assertEqual(w.filename, __file__) message = "Accessing jsonschema.draft3_format_checker is " with self.assertWarnsRegex(DeprecationWarning, message) as w: from jsonschema import draft3_format_checker self.assertIs( draft3_format_checker, validators.Draft3Validator.FORMAT_CHECKER, ) self.assertEqual(w.filename, __file__) with self.assertRaises(ImportError): from jsonschema import draft1234_format_checker # noqa: F401 def test_import_cli(self): """ As of v4.17.0, importing jsonschema.cli is deprecated. """ message = "The jsonschema CLI is deprecated and will be removed " with self.assertWarnsRegex(DeprecationWarning, message) as w: import jsonschema.cli importlib.reload(jsonschema.cli) self.assertEqual(w.filename, importlib.__file__) def test_cli(self): """ As of v4.17.0, the jsonschema CLI is deprecated. """ process = subprocess.run( [sys.executable, "-m", "jsonschema"], capture_output=True, check=True, ) self.assertIn(b"The jsonschema CLI is deprecated ", process.stderr) def test_automatic_remote_retrieval(self): """ Automatic retrieval of remote references is deprecated as of v4.18.0. """ ref = "http://bar#/$defs/baz" schema = {"$defs": {"baz": {"type": "integer"}}} if "requests" in sys.modules: # pragma: no cover self.addCleanup( sys.modules.__setitem__, "requests", sys.modules["requests"], ) sys.modules["requests"] = None @contextmanager def fake_urlopen(request): self.assertIsInstance(request, urllib.request.Request) self.assertEqual(request.full_url, "http://bar") # Ha ha urllib.request.Request "normalizes" header names and # Request.get_header does not also normalize them... (header, value), = request.header_items() self.assertEqual(header.lower(), "user-agent") self.assertEqual( value, "python-jsonschema (deprecated $ref resolution)", ) yield BytesIO(json.dumps(schema).encode("utf8")) validator = validators.Draft202012Validator({"$ref": ref}) message = "Automatically retrieving remote references " patch = mock.patch.object(urllib.request, "urlopen", new=fake_urlopen) with patch, self.assertWarnsRegex(DeprecationWarning, message): self.assertEqual( (validator.is_valid({}), validator.is_valid(37)), (False, True), ) PK!]?X?Xtests/test_exceptions.pynu[from unittest import TestCase import textwrap from jsonschema import exceptions from jsonschema.validators import _LATEST_VERSION class TestBestMatch(TestCase): def best_match_of(self, instance, schema): errors = list(_LATEST_VERSION(schema).iter_errors(instance)) msg = f"No errors found for {instance} under {schema!r}!" self.assertTrue(errors, msg=msg) best = exceptions.best_match(iter(errors)) reversed_best = exceptions.best_match(reversed(errors)) self.assertEqual( best._contents(), reversed_best._contents(), f"No consistent best match!\nGot: {best}\n\nThen: {reversed_best}", ) return best def test_shallower_errors_are_better_matches(self): schema = { "properties": { "foo": { "minProperties": 2, "properties": {"bar": {"type": "object"}}, }, }, } best = self.best_match_of(instance={"foo": {"bar": []}}, schema=schema) self.assertEqual(best.validator, "minProperties") def test_oneOf_and_anyOf_are_weak_matches(self): """ A property you *must* match is probably better than one you have to match a part of. """ schema = { "minProperties": 2, "anyOf": [{"type": "string"}, {"type": "number"}], "oneOf": [{"type": "string"}, {"type": "number"}], } best = self.best_match_of(instance={}, schema=schema) self.assertEqual(best.validator, "minProperties") def test_if_the_most_relevant_error_is_anyOf_it_is_traversed(self): """ If the most relevant error is an anyOf, then we traverse its context and select the otherwise *least* relevant error, since in this case that means the most specific, deep, error inside the instance. I.e. since only one of the schemas must match, we look for the most relevant one. """ schema = { "properties": { "foo": { "anyOf": [ {"type": "string"}, {"properties": {"bar": {"type": "array"}}}, ], }, }, } best = self.best_match_of(instance={"foo": {"bar": 12}}, schema=schema) self.assertEqual(best.validator_value, "array") def test_no_anyOf_traversal_for_equally_relevant_errors(self): """ We don't traverse into an anyOf (as above) if all of its context errors seem to be equally "wrong" against the instance. """ schema = { "anyOf": [ {"type": "string"}, {"type": "integer"}, {"type": "object"}, ], } best = self.best_match_of(instance=[], schema=schema) self.assertEqual(best.validator, "anyOf") def test_anyOf_traversal_for_single_equally_relevant_error(self): """ We *do* traverse anyOf with a single nested error, even though it is vacuously equally relevant to itself. """ schema = { "anyOf": [ {"type": "string"}, ], } best = self.best_match_of(instance=[], schema=schema) self.assertEqual(best.validator, "type") def test_anyOf_traversal_for_single_sibling_errors(self): """ We *do* traverse anyOf with a single subschema that fails multiple times (e.g. on multiple items). """ schema = { "anyOf": [ {"items": {"const": 37}}, ], } best = self.best_match_of(instance=[12, 12], schema=schema) self.assertEqual(best.validator, "const") def test_anyOf_traversal_for_non_type_matching_sibling_errors(self): """ We *do* traverse anyOf with multiple subschemas when one does not type match. """ schema = { "anyOf": [ {"type": "object"}, {"items": {"const": 37}}, ], } best = self.best_match_of(instance=[12, 12], schema=schema) self.assertEqual(best.validator, "const") def test_if_the_most_relevant_error_is_oneOf_it_is_traversed(self): """ If the most relevant error is an oneOf, then we traverse its context and select the otherwise *least* relevant error, since in this case that means the most specific, deep, error inside the instance. I.e. since only one of the schemas must match, we look for the most relevant one. """ schema = { "properties": { "foo": { "oneOf": [ {"type": "string"}, {"properties": {"bar": {"type": "array"}}}, ], }, }, } best = self.best_match_of(instance={"foo": {"bar": 12}}, schema=schema) self.assertEqual(best.validator_value, "array") def test_no_oneOf_traversal_for_equally_relevant_errors(self): """ We don't traverse into an oneOf (as above) if all of its context errors seem to be equally "wrong" against the instance. """ schema = { "oneOf": [ {"type": "string"}, {"type": "integer"}, {"type": "object"}, ], } best = self.best_match_of(instance=[], schema=schema) self.assertEqual(best.validator, "oneOf") def test_oneOf_traversal_for_single_equally_relevant_error(self): """ We *do* traverse oneOf with a single nested error, even though it is vacuously equally relevant to itself. """ schema = { "oneOf": [ {"type": "string"}, ], } best = self.best_match_of(instance=[], schema=schema) self.assertEqual(best.validator, "type") def test_oneOf_traversal_for_single_sibling_errors(self): """ We *do* traverse oneOf with a single subschema that fails multiple times (e.g. on multiple items). """ schema = { "oneOf": [ {"items": {"const": 37}}, ], } best = self.best_match_of(instance=[12, 12], schema=schema) self.assertEqual(best.validator, "const") def test_oneOf_traversal_for_non_type_matching_sibling_errors(self): """ We *do* traverse oneOf with multiple subschemas when one does not type match. """ schema = { "oneOf": [ {"type": "object"}, {"items": {"const": 37}}, ], } best = self.best_match_of(instance=[12, 12], schema=schema) self.assertEqual(best.validator, "const") def test_if_the_most_relevant_error_is_allOf_it_is_traversed(self): """ Now, if the error is allOf, we traverse but select the *most* relevant error from the context, because all schemas here must match anyways. """ schema = { "properties": { "foo": { "allOf": [ {"type": "string"}, {"properties": {"bar": {"type": "array"}}}, ], }, }, } best = self.best_match_of(instance={"foo": {"bar": 12}}, schema=schema) self.assertEqual(best.validator_value, "string") def test_nested_context_for_oneOf(self): """ We traverse into nested contexts (a oneOf containing an error in a nested oneOf here). """ schema = { "properties": { "foo": { "oneOf": [ {"type": "string"}, { "oneOf": [ {"type": "string"}, { "properties": { "bar": {"type": "array"}, }, }, ], }, ], }, }, } best = self.best_match_of(instance={"foo": {"bar": 12}}, schema=schema) self.assertEqual(best.validator_value, "array") def test_it_prioritizes_matching_types(self): schema = { "properties": { "foo": { "anyOf": [ {"type": "array", "minItems": 2}, {"type": "string", "minLength": 10}, ], }, }, } best = self.best_match_of(instance={"foo": "bar"}, schema=schema) self.assertEqual(best.validator, "minLength") reordered = { "properties": { "foo": { "anyOf": [ {"type": "string", "minLength": 10}, {"type": "array", "minItems": 2}, ], }, }, } best = self.best_match_of(instance={"foo": "bar"}, schema=reordered) self.assertEqual(best.validator, "minLength") def test_it_prioritizes_matching_union_types(self): schema = { "properties": { "foo": { "anyOf": [ {"type": ["array", "object"], "minItems": 2}, {"type": ["integer", "string"], "minLength": 10}, ], }, }, } best = self.best_match_of(instance={"foo": "bar"}, schema=schema) self.assertEqual(best.validator, "minLength") reordered = { "properties": { "foo": { "anyOf": [ {"type": "string", "minLength": 10}, {"type": "array", "minItems": 2}, ], }, }, } best = self.best_match_of(instance={"foo": "bar"}, schema=reordered) self.assertEqual(best.validator, "minLength") def test_boolean_schemas(self): schema = {"properties": {"foo": False}} best = self.best_match_of(instance={"foo": "bar"}, schema=schema) self.assertIsNone(best.validator) def test_one_error(self): validator = _LATEST_VERSION({"minProperties": 2}) error, = validator.iter_errors({}) self.assertEqual( exceptions.best_match(validator.iter_errors({})).validator, "minProperties", ) def test_no_errors(self): validator = _LATEST_VERSION({}) self.assertIsNone(exceptions.best_match(validator.iter_errors({}))) class TestByRelevance(TestCase): def test_short_paths_are_better_matches(self): shallow = exceptions.ValidationError("Oh no!", path=["baz"]) deep = exceptions.ValidationError("Oh yes!", path=["foo", "bar"]) match = max([shallow, deep], key=exceptions.relevance) self.assertIs(match, shallow) match = max([deep, shallow], key=exceptions.relevance) self.assertIs(match, shallow) def test_global_errors_are_even_better_matches(self): shallow = exceptions.ValidationError("Oh no!", path=[]) deep = exceptions.ValidationError("Oh yes!", path=["foo"]) errors = sorted([shallow, deep], key=exceptions.relevance) self.assertEqual( [list(error.path) for error in errors], [["foo"], []], ) errors = sorted([deep, shallow], key=exceptions.relevance) self.assertEqual( [list(error.path) for error in errors], [["foo"], []], ) def test_weak_keywords_are_lower_priority(self): weak = exceptions.ValidationError("Oh no!", path=[], validator="a") normal = exceptions.ValidationError("Oh yes!", path=[], validator="b") best_match = exceptions.by_relevance(weak="a") match = max([weak, normal], key=best_match) self.assertIs(match, normal) match = max([normal, weak], key=best_match) self.assertIs(match, normal) def test_strong_keywords_are_higher_priority(self): weak = exceptions.ValidationError("Oh no!", path=[], validator="a") normal = exceptions.ValidationError("Oh yes!", path=[], validator="b") strong = exceptions.ValidationError("Oh fine!", path=[], validator="c") best_match = exceptions.by_relevance(weak="a", strong="c") match = max([weak, normal, strong], key=best_match) self.assertIs(match, strong) match = max([strong, normal, weak], key=best_match) self.assertIs(match, strong) class TestErrorTree(TestCase): def test_it_knows_how_many_total_errors_it_contains(self): # FIXME: #442 errors = [ exceptions.ValidationError("Something", validator=i) for i in range(8) ] tree = exceptions.ErrorTree(errors) self.assertEqual(tree.total_errors, 8) def test_it_contains_an_item_if_the_item_had_an_error(self): errors = [exceptions.ValidationError("a message", path=["bar"])] tree = exceptions.ErrorTree(errors) self.assertIn("bar", tree) def test_it_does_not_contain_an_item_if_the_item_had_no_error(self): errors = [exceptions.ValidationError("a message", path=["bar"])] tree = exceptions.ErrorTree(errors) self.assertNotIn("foo", tree) def test_keywords_that_failed_appear_in_errors_dict(self): error = exceptions.ValidationError("a message", validator="foo") tree = exceptions.ErrorTree([error]) self.assertEqual(tree.errors, {"foo": error}) def test_it_creates_a_child_tree_for_each_nested_path(self): errors = [ exceptions.ValidationError("a bar message", path=["bar"]), exceptions.ValidationError("a bar -> 0 message", path=["bar", 0]), ] tree = exceptions.ErrorTree(errors) self.assertIn(0, tree["bar"]) self.assertNotIn(1, tree["bar"]) def test_children_have_their_errors_dicts_built(self): e1, e2 = ( exceptions.ValidationError("1", validator="foo", path=["bar", 0]), exceptions.ValidationError("2", validator="quux", path=["bar", 0]), ) tree = exceptions.ErrorTree([e1, e2]) self.assertEqual(tree["bar"][0].errors, {"foo": e1, "quux": e2}) def test_multiple_errors_with_instance(self): e1, e2 = ( exceptions.ValidationError( "1", validator="foo", path=["bar", "bar2"], instance="i1"), exceptions.ValidationError( "2", validator="quux", path=["foobar", 2], instance="i2"), ) exceptions.ErrorTree([e1, e2]) def test_it_does_not_contain_subtrees_that_are_not_in_the_instance(self): error = exceptions.ValidationError("123", validator="foo", instance=[]) tree = exceptions.ErrorTree([error]) with self.assertRaises(IndexError): tree[0] def test_if_its_in_the_tree_anyhow_it_does_not_raise_an_error(self): """ If a keyword refers to a path that isn't in the instance, the tree still properly returns a subtree for that path. """ error = exceptions.ValidationError( "a message", validator="foo", instance={}, path=["foo"], ) tree = exceptions.ErrorTree([error]) self.assertIsInstance(tree["foo"], exceptions.ErrorTree) def test_iter(self): e1, e2 = ( exceptions.ValidationError( "1", validator="foo", path=["bar", "bar2"], instance="i1"), exceptions.ValidationError( "2", validator="quux", path=["foobar", 2], instance="i2"), ) tree = exceptions.ErrorTree([e1, e2]) self.assertEqual(set(tree), {"bar", "foobar"}) def test_repr_single(self): error = exceptions.ValidationError( "1", validator="foo", path=["bar", "bar2"], instance="i1", ) tree = exceptions.ErrorTree([error]) self.assertEqual(repr(tree), "") def test_repr_multiple(self): e1, e2 = ( exceptions.ValidationError( "1", validator="foo", path=["bar", "bar2"], instance="i1"), exceptions.ValidationError( "2", validator="quux", path=["foobar", 2], instance="i2"), ) tree = exceptions.ErrorTree([e1, e2]) self.assertEqual(repr(tree), "") def test_repr_empty(self): tree = exceptions.ErrorTree([]) self.assertEqual(repr(tree), "") class TestErrorInitReprStr(TestCase): def make_error(self, **kwargs): defaults = dict( message="hello", validator="type", validator_value="string", instance=5, schema={"type": "string"}, ) defaults.update(kwargs) return exceptions.ValidationError(**defaults) def assertShows(self, expected, **kwargs): expected = textwrap.dedent(expected).rstrip("\n") error = self.make_error(**kwargs) message_line, _, rest = str(error).partition("\n") self.assertEqual(message_line, error.message) self.assertEqual(rest, expected) def test_it_calls_super_and_sets_args(self): error = self.make_error() self.assertGreater(len(error.args), 1) def test_repr(self): self.assertEqual( repr(exceptions.ValidationError(message="Hello!")), "", ) def test_unset_error(self): error = exceptions.ValidationError("message") self.assertEqual(str(error), "message") kwargs = { "validator": "type", "validator_value": "string", "instance": 5, "schema": {"type": "string"}, } # Just the message should show if any of the attributes are unset for attr in kwargs: k = dict(kwargs) del k[attr] error = exceptions.ValidationError("message", **k) self.assertEqual(str(error), "message") def test_empty_paths(self): self.assertShows( """ Failed validating 'type' in schema: {'type': 'string'} On instance: 5 """, path=[], schema_path=[], ) def test_one_item_paths(self): self.assertShows( """ Failed validating 'type' in schema: {'type': 'string'} On instance[0]: 5 """, path=[0], schema_path=["items"], ) def test_multiple_item_paths(self): self.assertShows( """ Failed validating 'type' in schema['items'][0]: {'type': 'string'} On instance[0]['a']: 5 """, path=[0, "a"], schema_path=["items", 0, 1], ) def test_uses_pprint(self): self.assertShows( """ Failed validating 'maxLength' in schema: {0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8, 9: 9, 10: 10, 11: 11, 12: 12, 13: 13, 14: 14, 15: 15, 16: 16, 17: 17, 18: 18, 19: 19} On instance: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24] """, instance=list(range(25)), schema=dict(zip(range(20), range(20))), validator="maxLength", ) def test_does_not_reorder_dicts(self): self.assertShows( """ Failed validating 'type' in schema: {'do': 3, 'not': 7, 'sort': 37, 'me': 73} On instance: {'here': 73, 'too': 37, 'no': 7, 'sorting': 3} """, schema={ "do": 3, "not": 7, "sort": 37, "me": 73, }, instance={ "here": 73, "too": 37, "no": 7, "sorting": 3, }, ) def test_str_works_with_instances_having_overriden_eq_operator(self): """ Check for #164 which rendered exceptions unusable when a `ValidationError` involved instances with an `__eq__` method that returned truthy values. """ class DontEQMeBro: def __eq__(this, other): # pragma: no cover self.fail("Don't!") def __ne__(this, other): # pragma: no cover self.fail("Don't!") instance = DontEQMeBro() error = exceptions.ValidationError( "a message", validator="foo", instance=instance, validator_value="some", schema="schema", ) self.assertIn(repr(instance), str(error)) class TestHashable(TestCase): def test_hashable(self): {exceptions.ValidationError("")} {exceptions.SchemaError("")} PK!=t t tests/test_format.pynu[""" Tests for the parts of jsonschema related to the :kw:`format` keyword. """ from unittest import TestCase from jsonschema import FormatChecker, ValidationError from jsonschema.exceptions import FormatError from jsonschema.validators import Draft4Validator BOOM = ValueError("Boom!") BANG = ZeroDivisionError("Bang!") def boom(thing): if thing == "bang": raise BANG raise BOOM class TestFormatChecker(TestCase): def test_it_can_validate_no_formats(self): checker = FormatChecker(formats=()) self.assertFalse(checker.checkers) def test_it_raises_a_key_error_for_unknown_formats(self): with self.assertRaises(KeyError): FormatChecker(formats=["o noes"]) def test_it_can_register_cls_checkers(self): original = dict(FormatChecker.checkers) self.addCleanup(FormatChecker.checkers.pop, "boom") with self.assertWarns(DeprecationWarning): FormatChecker.cls_checks("boom")(boom) self.assertEqual( FormatChecker.checkers, dict(original, boom=(boom, ())), ) def test_it_can_register_checkers(self): checker = FormatChecker() checker.checks("boom")(boom) self.assertEqual( checker.checkers, dict(FormatChecker.checkers, boom=(boom, ())), ) def test_it_catches_registered_errors(self): checker = FormatChecker() checker.checks("boom", raises=type(BOOM))(boom) with self.assertRaises(FormatError) as cm: checker.check(instance=12, format="boom") self.assertIs(cm.exception.cause, BOOM) self.assertIs(cm.exception.__cause__, BOOM) self.assertEqual(str(cm.exception), "12 is not a 'boom'") # Unregistered errors should not be caught with self.assertRaises(type(BANG)): checker.check(instance="bang", format="boom") def test_format_error_causes_become_validation_error_causes(self): checker = FormatChecker() checker.checks("boom", raises=ValueError)(boom) validator = Draft4Validator({"format": "boom"}, format_checker=checker) with self.assertRaises(ValidationError) as cm: validator.validate("BOOM") self.assertIs(cm.exception.cause, BOOM) self.assertIs(cm.exception.__cause__, BOOM) def test_format_checkers_come_with_defaults(self): # This is bad :/ but relied upon. # The docs for quite awhile recommended people do things like # validate(..., format_checker=FormatChecker()) # We should change that, but we can't without deprecation... checker = FormatChecker() with self.assertRaises(FormatError): checker.check(instance="not-an-ipv4", format="ipv4") def test_repr(self): checker = FormatChecker(formats=()) checker.checks("foo")(lambda thing: True) # pragma: no cover checker.checks("bar")(lambda thing: True) # pragma: no cover checker.checks("baz")(lambda thing: True) # pragma: no cover self.assertEqual( repr(checker), "", ) PK!!!#tests/test_jsonschema_test_suite.pynu[""" Test runner for the JSON Schema official test suite Tests comprehensive correctness of each draft's validator. See https://github.com/json-schema-org/JSON-Schema-Test-Suite for details. """ import sys from jsonschema.tests._suite import Suite import jsonschema SUITE = Suite() DRAFT3 = SUITE.version(name="draft3") DRAFT4 = SUITE.version(name="draft4") DRAFT6 = SUITE.version(name="draft6") DRAFT7 = SUITE.version(name="draft7") DRAFT201909 = SUITE.version(name="draft2019-09") DRAFT202012 = SUITE.version(name="draft2020-12") def skip(message, **kwargs): def skipper(test): if all(value == getattr(test, attr) for attr, value in kwargs.items()): return message return skipper def missing_format(Validator): def missing_format(test): # pragma: no cover schema = test.schema if ( schema is True or schema is False or "format" not in schema or schema["format"] in Validator.FORMAT_CHECKER.checkers or test.valid ): return return f"Format checker {schema['format']!r} not found." return missing_format def complex_email_validation(test): if test.subject != "email": return message = "Complex email validation is (intentionally) unsupported." return skip( message=message, description="an invalid domain", )(test) or skip( message=message, description="an invalid IPv4-address-literal", )(test) or skip( message=message, description="dot after local part is not valid", )(test) or skip( message=message, description="dot before local part is not valid", )(test) or skip( message=message, description="two subsequent dots inside local part are not valid", )(test) if sys.version_info < (3, 9): # pragma: no cover message = "Rejecting leading zeros is 3.9+" allowed_leading_zeros = skip( message=message, subject="ipv4", description="invalid leading zeroes, as they are treated as octals", ) else: def allowed_leading_zeros(test): # pragma: no cover return def leap_second(test): message = "Leap seconds are unsupported." return skip( message=message, subject="time", description="a valid time string with leap second", )(test) or skip( message=message, subject="time", description="a valid time string with leap second, Zulu", )(test) or skip( message=message, subject="time", description="a valid time string with leap second with offset", )(test) or skip( message=message, subject="time", description="valid leap second, positive time-offset", )(test) or skip( message=message, subject="time", description="valid leap second, negative time-offset", )(test) or skip( message=message, subject="time", description="valid leap second, large positive time-offset", )(test) or skip( message=message, subject="time", description="valid leap second, large negative time-offset", )(test) or skip( message=message, subject="time", description="valid leap second, zero time-offset", )(test) or skip( message=message, subject="date-time", description="a valid date-time with a leap second, UTC", )(test) or skip( message=message, subject="date-time", description="a valid date-time with a leap second, with minus offset", )(test) TestDraft3 = DRAFT3.to_unittest_testcase( DRAFT3.cases(), DRAFT3.format_cases(), DRAFT3.optional_cases_of(name="bignum"), DRAFT3.optional_cases_of(name="non-bmp-regex"), DRAFT3.optional_cases_of(name="zeroTerminatedFloats"), Validator=jsonschema.Draft3Validator, format_checker=jsonschema.Draft3Validator.FORMAT_CHECKER, skip=lambda test: ( missing_format(jsonschema.Draft3Validator)(test) or complex_email_validation(test) ), ) TestDraft4 = DRAFT4.to_unittest_testcase( DRAFT4.cases(), DRAFT4.format_cases(), DRAFT4.optional_cases_of(name="bignum"), DRAFT4.optional_cases_of(name="float-overflow"), DRAFT4.optional_cases_of(name="id"), DRAFT4.optional_cases_of(name="non-bmp-regex"), DRAFT4.optional_cases_of(name="zeroTerminatedFloats"), Validator=jsonschema.Draft4Validator, format_checker=jsonschema.Draft4Validator.FORMAT_CHECKER, skip=lambda test: ( allowed_leading_zeros(test) or leap_second(test) or missing_format(jsonschema.Draft4Validator)(test) or complex_email_validation(test) ), ) TestDraft6 = DRAFT6.to_unittest_testcase( DRAFT6.cases(), DRAFT6.format_cases(), DRAFT6.optional_cases_of(name="bignum"), DRAFT6.optional_cases_of(name="float-overflow"), DRAFT6.optional_cases_of(name="id"), DRAFT6.optional_cases_of(name="non-bmp-regex"), Validator=jsonschema.Draft6Validator, format_checker=jsonschema.Draft6Validator.FORMAT_CHECKER, skip=lambda test: ( allowed_leading_zeros(test) or leap_second(test) or missing_format(jsonschema.Draft6Validator)(test) or complex_email_validation(test) ), ) TestDraft7 = DRAFT7.to_unittest_testcase( DRAFT7.cases(), DRAFT7.format_cases(), DRAFT7.optional_cases_of(name="bignum"), DRAFT7.optional_cases_of(name="cross-draft"), DRAFT7.optional_cases_of(name="float-overflow"), DRAFT6.optional_cases_of(name="id"), DRAFT7.optional_cases_of(name="non-bmp-regex"), DRAFT7.optional_cases_of(name="unknownKeyword"), Validator=jsonschema.Draft7Validator, format_checker=jsonschema.Draft7Validator.FORMAT_CHECKER, skip=lambda test: ( allowed_leading_zeros(test) or leap_second(test) or missing_format(jsonschema.Draft7Validator)(test) or complex_email_validation(test) ), ) TestDraft201909 = DRAFT201909.to_unittest_testcase( DRAFT201909.cases(), DRAFT201909.optional_cases_of(name="anchor"), DRAFT201909.optional_cases_of(name="bignum"), DRAFT201909.optional_cases_of(name="cross-draft"), DRAFT201909.optional_cases_of(name="float-overflow"), DRAFT201909.optional_cases_of(name="id"), DRAFT201909.optional_cases_of(name="no-schema"), DRAFT201909.optional_cases_of(name="non-bmp-regex"), DRAFT201909.optional_cases_of(name="refOfUnknownKeyword"), DRAFT201909.optional_cases_of(name="unknownKeyword"), Validator=jsonschema.Draft201909Validator, skip=skip( message="Vocabulary support is still in-progress.", subject="vocabulary", description=( "no validation: invalid number, but it still validates" ), ), ) TestDraft201909Format = DRAFT201909.to_unittest_testcase( DRAFT201909.format_cases(), name="TestDraft201909Format", Validator=jsonschema.Draft201909Validator, format_checker=jsonschema.Draft201909Validator.FORMAT_CHECKER, skip=lambda test: ( complex_email_validation(test) or allowed_leading_zeros(test) or leap_second(test) or missing_format(jsonschema.Draft201909Validator)(test) or complex_email_validation(test) ), ) TestDraft202012 = DRAFT202012.to_unittest_testcase( DRAFT202012.cases(), DRAFT201909.optional_cases_of(name="anchor"), DRAFT202012.optional_cases_of(name="bignum"), DRAFT202012.optional_cases_of(name="cross-draft"), DRAFT202012.optional_cases_of(name="float-overflow"), DRAFT202012.optional_cases_of(name="id"), DRAFT202012.optional_cases_of(name="no-schema"), DRAFT202012.optional_cases_of(name="non-bmp-regex"), DRAFT202012.optional_cases_of(name="refOfUnknownKeyword"), DRAFT202012.optional_cases_of(name="unknownKeyword"), Validator=jsonschema.Draft202012Validator, skip=skip( message="Vocabulary support is still in-progress.", subject="vocabulary", description=( "no validation: invalid number, but it still validates" ), ), ) TestDraft202012Format = DRAFT202012.to_unittest_testcase( DRAFT202012.format_cases(), name="TestDraft202012Format", Validator=jsonschema.Draft202012Validator, format_checker=jsonschema.Draft202012Validator.FORMAT_CHECKER, skip=lambda test: ( complex_email_validation(test) or allowed_leading_zeros(test) or leap_second(test) or missing_format(jsonschema.Draft202012Validator)(test) or complex_email_validation(test) ), ) PK!ٸnAAtests/test_types.pynu[""" Tests for the `TypeChecker`-based type interface. The actual correctness of the type checking is handled in `test_jsonschema_test_suite`; these tests check that TypeChecker functions correctly at a more granular level. """ from collections import namedtuple from unittest import TestCase from jsonschema import ValidationError, _keywords from jsonschema._types import TypeChecker from jsonschema.exceptions import UndefinedTypeCheck, UnknownType from jsonschema.validators import Draft202012Validator, extend def equals_2(checker, instance): return instance == 2 def is_namedtuple(instance): return isinstance(instance, tuple) and getattr(instance, "_fields", None) def is_object_or_named_tuple(checker, instance): if Draft202012Validator.TYPE_CHECKER.is_type(instance, "object"): return True return is_namedtuple(instance) class TestTypeChecker(TestCase): def test_is_type(self): checker = TypeChecker({"two": equals_2}) self.assertEqual( ( checker.is_type(instance=2, type="two"), checker.is_type(instance="bar", type="two"), ), (True, False), ) def test_is_unknown_type(self): with self.assertRaises(UndefinedTypeCheck) as e: TypeChecker().is_type(4, "foobar") self.assertIn( "'foobar' is unknown to this type checker", str(e.exception), ) self.assertTrue( e.exception.__suppress_context__, msg="Expected the internal KeyError to be hidden.", ) def test_checks_can_be_added_at_init(self): checker = TypeChecker({"two": equals_2}) self.assertEqual(checker, TypeChecker().redefine("two", equals_2)) def test_redefine_existing_type(self): self.assertEqual( TypeChecker().redefine("two", object()).redefine("two", equals_2), TypeChecker().redefine("two", equals_2), ) def test_remove(self): self.assertEqual( TypeChecker({"two": equals_2}).remove("two"), TypeChecker(), ) def test_remove_unknown_type(self): with self.assertRaises(UndefinedTypeCheck) as context: TypeChecker().remove("foobar") self.assertIn("foobar", str(context.exception)) def test_redefine_many(self): self.assertEqual( TypeChecker().redefine_many({"foo": int, "bar": str}), TypeChecker().redefine("foo", int).redefine("bar", str), ) def test_remove_multiple(self): self.assertEqual( TypeChecker({"foo": int, "bar": str}).remove("foo", "bar"), TypeChecker(), ) def test_type_check_can_raise_key_error(self): """ Make sure no one writes: try: self._type_checkers[type](...) except KeyError: ignoring the fact that the function itself can raise that. """ error = KeyError("Stuff") def raises_keyerror(checker, instance): raise error with self.assertRaises(KeyError) as context: TypeChecker({"foo": raises_keyerror}).is_type(4, "foo") self.assertIs(context.exception, error) def test_repr(self): checker = TypeChecker({"foo": is_namedtuple, "bar": is_namedtuple}) self.assertEqual(repr(checker), "") class TestCustomTypes(TestCase): def test_simple_type_can_be_extended(self): def int_or_str_int(checker, instance): if not isinstance(instance, (int, str)): return False try: int(instance) except ValueError: return False return True CustomValidator = extend( Draft202012Validator, type_checker=Draft202012Validator.TYPE_CHECKER.redefine( "integer", int_or_str_int, ), ) validator = CustomValidator({"type": "integer"}) validator.validate(4) validator.validate("4") with self.assertRaises(ValidationError): validator.validate(4.4) with self.assertRaises(ValidationError): validator.validate("foo") def test_object_can_be_extended(self): schema = {"type": "object"} Point = namedtuple("Point", ["x", "y"]) type_checker = Draft202012Validator.TYPE_CHECKER.redefine( "object", is_object_or_named_tuple, ) CustomValidator = extend( Draft202012Validator, type_checker=type_checker, ) validator = CustomValidator(schema) validator.validate(Point(x=4, y=5)) def test_object_extensions_require_custom_validators(self): schema = {"type": "object", "required": ["x"]} type_checker = Draft202012Validator.TYPE_CHECKER.redefine( "object", is_object_or_named_tuple, ) CustomValidator = extend( Draft202012Validator, type_checker=type_checker, ) validator = CustomValidator(schema) Point = namedtuple("Point", ["x", "y"]) # Cannot handle required with self.assertRaises(ValidationError): validator.validate(Point(x=4, y=5)) def test_object_extensions_can_handle_custom_validators(self): schema = { "type": "object", "required": ["x"], "properties": {"x": {"type": "integer"}}, } type_checker = Draft202012Validator.TYPE_CHECKER.redefine( "object", is_object_or_named_tuple, ) def coerce_named_tuple(fn): def coerced(validator, value, instance, schema): if is_namedtuple(instance): instance = instance._asdict() return fn(validator, value, instance, schema) return coerced required = coerce_named_tuple(_keywords.required) properties = coerce_named_tuple(_keywords.properties) CustomValidator = extend( Draft202012Validator, type_checker=type_checker, validators={"required": required, "properties": properties}, ) validator = CustomValidator(schema) Point = namedtuple("Point", ["x", "y"]) # Can now process required and properties validator.validate(Point(x=4, y=5)) with self.assertRaises(ValidationError): validator.validate(Point(x="not an integer", y=5)) # As well as still handle objects. validator.validate({"x": 4, "y": 5}) with self.assertRaises(ValidationError): validator.validate({"x": "not an integer", "y": 5}) def test_unknown_type(self): with self.assertRaises(UnknownType) as e: Draft202012Validator({}).is_type(12, "some unknown type") self.assertIn("'some unknown type'", str(e.exception)) PK!{'CCtests/test_utils.pynu[from math import nan from unittest import TestCase from jsonschema._utils import equal class TestEqual(TestCase): def test_none(self): self.assertTrue(equal(None, None)) def test_nan(self): self.assertTrue(equal(nan, nan)) class TestDictEqual(TestCase): def test_equal_dictionaries(self): dict_1 = {"a": "b", "c": "d"} dict_2 = {"c": "d", "a": "b"} self.assertTrue(equal(dict_1, dict_2)) def test_equal_dictionaries_with_nan(self): dict_1 = {"a": nan, "c": "d"} dict_2 = {"c": "d", "a": nan} self.assertTrue(equal(dict_1, dict_2)) def test_missing_key(self): dict_1 = {"a": "b", "c": "d"} dict_2 = {"c": "d", "x": "b"} self.assertFalse(equal(dict_1, dict_2)) def test_additional_key(self): dict_1 = {"a": "b", "c": "d"} dict_2 = {"c": "d", "a": "b", "x": "x"} self.assertFalse(equal(dict_1, dict_2)) def test_missing_value(self): dict_1 = {"a": "b", "c": "d"} dict_2 = {"c": "d", "a": "x"} self.assertFalse(equal(dict_1, dict_2)) def test_empty_dictionaries(self): dict_1 = {} dict_2 = {} self.assertTrue(equal(dict_1, dict_2)) def test_one_none(self): dict_1 = None dict_2 = {"a": "b", "c": "d"} self.assertFalse(equal(dict_1, dict_2)) def test_same_item(self): dict_1 = {"a": "b", "c": "d"} self.assertTrue(equal(dict_1, dict_1)) def test_nested_equal(self): dict_1 = {"a": {"a": "b", "c": "d"}, "c": "d"} dict_2 = {"c": "d", "a": {"a": "b", "c": "d"}} self.assertTrue(equal(dict_1, dict_2)) def test_nested_dict_unequal(self): dict_1 = {"a": {"a": "b", "c": "d"}, "c": "d"} dict_2 = {"c": "d", "a": {"a": "b", "c": "x"}} self.assertFalse(equal(dict_1, dict_2)) def test_mixed_nested_equal(self): dict_1 = {"a": ["a", "b", "c", "d"], "c": "d"} dict_2 = {"c": "d", "a": ["a", "b", "c", "d"]} self.assertTrue(equal(dict_1, dict_2)) def test_nested_list_unequal(self): dict_1 = {"a": ["a", "b", "c", "d"], "c": "d"} dict_2 = {"c": "d", "a": ["b", "c", "d", "a"]} self.assertFalse(equal(dict_1, dict_2)) class TestListEqual(TestCase): def test_equal_lists(self): list_1 = ["a", "b", "c"] list_2 = ["a", "b", "c"] self.assertTrue(equal(list_1, list_2)) def test_equal_lists_with_nan(self): list_1 = ["a", nan, "c"] list_2 = ["a", nan, "c"] self.assertTrue(equal(list_1, list_2)) def test_unsorted_lists(self): list_1 = ["a", "b", "c"] list_2 = ["b", "b", "a"] self.assertFalse(equal(list_1, list_2)) def test_first_list_larger(self): list_1 = ["a", "b", "c"] list_2 = ["a", "b"] self.assertFalse(equal(list_1, list_2)) def test_second_list_larger(self): list_1 = ["a", "b"] list_2 = ["a", "b", "c"] self.assertFalse(equal(list_1, list_2)) def test_list_with_none_unequal(self): list_1 = ["a", "b", None] list_2 = ["a", "b", "c"] self.assertFalse(equal(list_1, list_2)) list_1 = ["a", "b", None] list_2 = [None, "b", "c"] self.assertFalse(equal(list_1, list_2)) def test_list_with_none_equal(self): list_1 = ["a", None, "c"] list_2 = ["a", None, "c"] self.assertTrue(equal(list_1, list_2)) def test_empty_list(self): list_1 = [] list_2 = [] self.assertTrue(equal(list_1, list_2)) def test_one_none(self): list_1 = None list_2 = [] self.assertFalse(equal(list_1, list_2)) def test_same_list(self): list_1 = ["a", "b", "c"] self.assertTrue(equal(list_1, list_1)) def test_equal_nested_lists(self): list_1 = ["a", ["b", "c"], "d"] list_2 = ["a", ["b", "c"], "d"] self.assertTrue(equal(list_1, list_2)) def test_unequal_nested_lists(self): list_1 = ["a", ["b", "c"], "d"] list_2 = ["a", [], "c"] self.assertFalse(equal(list_1, list_2)) PK!$RGWWtests/test_validators.pynu[from __future__ import annotations from collections import deque, namedtuple from contextlib import contextmanager from decimal import Decimal from io import BytesIO from typing import Any from unittest import TestCase, mock from urllib.request import pathname2url import json import os import sys import tempfile import warnings from attrs import define, field from referencing.jsonschema import DRAFT202012 import referencing.exceptions from jsonschema import ( FormatChecker, TypeChecker, exceptions, protocols, validators, ) def fail(validator, errors, instance, schema): for each in errors: each.setdefault("message", "You told me to fail!") yield exceptions.ValidationError(**each) class TestCreateAndExtend(TestCase): def setUp(self): self.addCleanup( self.assertEqual, validators._META_SCHEMAS, dict(validators._META_SCHEMAS), ) self.addCleanup( self.assertEqual, validators._VALIDATORS, dict(validators._VALIDATORS), ) self.meta_schema = {"$id": "some://meta/schema"} self.validators = {"fail": fail} self.type_checker = TypeChecker() self.Validator = validators.create( meta_schema=self.meta_schema, validators=self.validators, type_checker=self.type_checker, ) def test_attrs(self): self.assertEqual( ( self.Validator.VALIDATORS, self.Validator.META_SCHEMA, self.Validator.TYPE_CHECKER, ), ( self.validators, self.meta_schema, self.type_checker, ), ) def test_init(self): schema = {"fail": []} self.assertEqual(self.Validator(schema).schema, schema) def test_iter_errors_successful(self): schema = {"fail": []} validator = self.Validator(schema) errors = list(validator.iter_errors("hello")) self.assertEqual(errors, []) def test_iter_errors_one_error(self): schema = {"fail": [{"message": "Whoops!"}]} validator = self.Validator(schema) expected_error = exceptions.ValidationError( "Whoops!", instance="goodbye", schema=schema, validator="fail", validator_value=[{"message": "Whoops!"}], schema_path=deque(["fail"]), ) errors = list(validator.iter_errors("goodbye")) self.assertEqual(len(errors), 1) self.assertEqual(errors[0]._contents(), expected_error._contents()) def test_iter_errors_multiple_errors(self): schema = { "fail": [ {"message": "First"}, {"message": "Second!", "validator": "asdf"}, {"message": "Third"}, ], } validator = self.Validator(schema) errors = list(validator.iter_errors("goodbye")) self.assertEqual(len(errors), 3) def test_if_a_version_is_provided_it_is_registered(self): Validator = validators.create( meta_schema={"$id": "something"}, version="my version", ) self.addCleanup(validators._META_SCHEMAS.pop, "something") self.addCleanup(validators._VALIDATORS.pop, "my version") self.assertEqual(Validator.__name__, "MyVersionValidator") self.assertEqual(Validator.__qualname__, "MyVersionValidator") def test_repr(self): Validator = validators.create( meta_schema={"$id": "something"}, version="my version", ) self.addCleanup(validators._META_SCHEMAS.pop, "something") self.addCleanup(validators._VALIDATORS.pop, "my version") self.assertEqual( repr(Validator({})), "MyVersionValidator(schema={}, format_checker=None)", ) def test_long_repr(self): Validator = validators.create( meta_schema={"$id": "something"}, version="my version", ) self.addCleanup(validators._META_SCHEMAS.pop, "something") self.addCleanup(validators._VALIDATORS.pop, "my version") self.assertEqual( repr(Validator({"a": list(range(1000))})), ( "MyVersionValidator(schema={'a': [0, 1, 2, 3, 4, 5, ...]}, " "format_checker=None)" ), ) def test_repr_no_version(self): Validator = validators.create(meta_schema={}) self.assertEqual( repr(Validator({})), "Validator(schema={}, format_checker=None)", ) def test_dashes_are_stripped_from_validator_names(self): Validator = validators.create( meta_schema={"$id": "something"}, version="foo-bar", ) self.addCleanup(validators._META_SCHEMAS.pop, "something") self.addCleanup(validators._VALIDATORS.pop, "foo-bar") self.assertEqual(Validator.__qualname__, "FooBarValidator") def test_if_a_version_is_not_provided_it_is_not_registered(self): original = dict(validators._META_SCHEMAS) validators.create(meta_schema={"id": "id"}) self.assertEqual(validators._META_SCHEMAS, original) def test_validates_registers_meta_schema_id(self): meta_schema_key = "meta schema id" my_meta_schema = {"id": meta_schema_key} validators.create( meta_schema=my_meta_schema, version="my version", id_of=lambda s: s.get("id", ""), ) self.addCleanup(validators._META_SCHEMAS.pop, meta_schema_key) self.addCleanup(validators._VALIDATORS.pop, "my version") self.assertIn(meta_schema_key, validators._META_SCHEMAS) def test_validates_registers_meta_schema_draft6_id(self): meta_schema_key = "meta schema $id" my_meta_schema = {"$id": meta_schema_key} validators.create( meta_schema=my_meta_schema, version="my version", ) self.addCleanup(validators._META_SCHEMAS.pop, meta_schema_key) self.addCleanup(validators._VALIDATORS.pop, "my version") self.assertIn(meta_schema_key, validators._META_SCHEMAS) def test_create_default_types(self): Validator = validators.create(meta_schema={}, validators=()) self.assertTrue( all( Validator({}).is_type(instance=instance, type=type) for type, instance in [ ("array", []), ("boolean", True), ("integer", 12), ("null", None), ("number", 12.0), ("object", {}), ("string", "foo"), ] ), ) def test_check_schema_with_different_metaschema(self): """ One can create a validator class whose metaschema uses a different dialect than itself. """ NoEmptySchemasValidator = validators.create( meta_schema={ "$schema": validators.Draft202012Validator.META_SCHEMA["$id"], "not": {"const": {}}, }, ) NoEmptySchemasValidator.check_schema({"foo": "bar"}) with self.assertRaises(exceptions.SchemaError): NoEmptySchemasValidator.check_schema({}) NoEmptySchemasValidator({"foo": "bar"}).validate("foo") def test_check_schema_with_different_metaschema_defaults_to_self(self): """ A validator whose metaschema doesn't declare $schema defaults to its own validation behavior, not the latest "normal" specification. """ NoEmptySchemasValidator = validators.create( meta_schema={"fail": [{"message": "Meta schema whoops!"}]}, validators={"fail": fail}, ) with self.assertRaises(exceptions.SchemaError): NoEmptySchemasValidator.check_schema({}) def test_extend(self): original = dict(self.Validator.VALIDATORS) new = object() Extended = validators.extend( self.Validator, validators={"new": new}, ) self.assertEqual( ( Extended.VALIDATORS, Extended.META_SCHEMA, Extended.TYPE_CHECKER, self.Validator.VALIDATORS, ), ( dict(original, new=new), self.Validator.META_SCHEMA, self.Validator.TYPE_CHECKER, original, ), ) def test_extend_idof(self): """ Extending a validator preserves its notion of schema IDs. """ def id_of(schema): return schema.get("__test__", self.Validator.ID_OF(schema)) correct_id = "the://correct/id/" meta_schema = { "$id": "the://wrong/id/", "__test__": correct_id, } Original = validators.create( meta_schema=meta_schema, validators=self.validators, type_checker=self.type_checker, id_of=id_of, ) self.assertEqual(Original.ID_OF(Original.META_SCHEMA), correct_id) Derived = validators.extend(Original) self.assertEqual(Derived.ID_OF(Derived.META_SCHEMA), correct_id) def test_extend_applicable_validators(self): """ Extending a validator preserves its notion of applicable validators. """ schema = { "$defs": {"test": {"type": "number"}}, "$ref": "#/$defs/test", "maximum": 1, } draft4 = validators.Draft4Validator(schema) self.assertTrue(draft4.is_valid(37)) # as $ref ignores siblings Derived = validators.extend(validators.Draft4Validator) self.assertTrue(Derived(schema).is_valid(37)) class TestValidationErrorMessages(TestCase): def message_for(self, instance, schema, *args, **kwargs): cls = kwargs.pop("cls", validators._LATEST_VERSION) cls.check_schema(schema) validator = cls(schema, *args, **kwargs) errors = list(validator.iter_errors(instance)) self.assertTrue(errors, msg=f"No errors were raised for {instance!r}") self.assertEqual( len(errors), 1, msg=f"Expected exactly one error, found {errors!r}", ) return errors[0].message def test_single_type_failure(self): message = self.message_for(instance=1, schema={"type": "string"}) self.assertEqual(message, "1 is not of type 'string'") def test_single_type_list_failure(self): message = self.message_for(instance=1, schema={"type": ["string"]}) self.assertEqual(message, "1 is not of type 'string'") def test_multiple_type_failure(self): types = "string", "object" message = self.message_for(instance=1, schema={"type": list(types)}) self.assertEqual(message, "1 is not of type 'string', 'object'") def test_object_with_named_type_failure(self): schema = {"type": [{"name": "Foo", "minimum": 3}]} message = self.message_for( instance=1, schema=schema, cls=validators.Draft3Validator, ) self.assertEqual(message, "1 is not of type 'Foo'") def test_minimum(self): message = self.message_for(instance=1, schema={"minimum": 2}) self.assertEqual(message, "1 is less than the minimum of 2") def test_maximum(self): message = self.message_for(instance=1, schema={"maximum": 0}) self.assertEqual(message, "1 is greater than the maximum of 0") def test_dependencies_single_element(self): depend, on = "bar", "foo" schema = {"dependencies": {depend: on}} message = self.message_for( instance={"bar": 2}, schema=schema, cls=validators.Draft3Validator, ) self.assertEqual(message, "'foo' is a dependency of 'bar'") def test_object_without_title_type_failure_draft3(self): type = {"type": [{"minimum": 3}]} message = self.message_for( instance=1, schema={"type": [type]}, cls=validators.Draft3Validator, ) self.assertEqual( message, "1 is not of type {'type': [{'minimum': 3}]}", ) def test_dependencies_list_draft3(self): depend, on = "bar", "foo" schema = {"dependencies": {depend: [on]}} message = self.message_for( instance={"bar": 2}, schema=schema, cls=validators.Draft3Validator, ) self.assertEqual(message, "'foo' is a dependency of 'bar'") def test_dependencies_list_draft7(self): depend, on = "bar", "foo" schema = {"dependencies": {depend: [on]}} message = self.message_for( instance={"bar": 2}, schema=schema, cls=validators.Draft7Validator, ) self.assertEqual(message, "'foo' is a dependency of 'bar'") def test_additionalItems_single_failure(self): message = self.message_for( instance=[2], schema={"items": [], "additionalItems": False}, cls=validators.Draft3Validator, ) self.assertIn("(2 was unexpected)", message) def test_additionalItems_multiple_failures(self): message = self.message_for( instance=[1, 2, 3], schema={"items": [], "additionalItems": False}, cls=validators.Draft3Validator, ) self.assertIn("(1, 2, 3 were unexpected)", message) def test_additionalProperties_single_failure(self): additional = "foo" schema = {"additionalProperties": False} message = self.message_for(instance={additional: 2}, schema=schema) self.assertIn("('foo' was unexpected)", message) def test_additionalProperties_multiple_failures(self): schema = {"additionalProperties": False} message = self.message_for( instance=dict.fromkeys(["foo", "bar"]), schema=schema, ) self.assertIn(repr("foo"), message) self.assertIn(repr("bar"), message) self.assertIn("were unexpected)", message) def test_const(self): schema = {"const": 12} message = self.message_for( instance={"foo": "bar"}, schema=schema, ) self.assertIn("12 was expected", message) def test_contains_draft_6(self): schema = {"contains": {"const": 12}} message = self.message_for( instance=[2, {}, []], schema=schema, cls=validators.Draft6Validator, ) self.assertEqual( message, "None of [2, {}, []] are valid under the given schema", ) def test_invalid_format_default_message(self): checker = FormatChecker(formats=()) checker.checks("thing")(lambda value: False) schema = {"format": "thing"} message = self.message_for( instance="bla", schema=schema, format_checker=checker, ) self.assertIn(repr("bla"), message) self.assertIn(repr("thing"), message) self.assertIn("is not a", message) def test_additionalProperties_false_patternProperties(self): schema = {"type": "object", "additionalProperties": False, "patternProperties": { "^abc$": {"type": "string"}, "^def$": {"type": "string"}, }} message = self.message_for( instance={"zebra": 123}, schema=schema, cls=validators.Draft4Validator, ) self.assertEqual( message, "{} does not match any of the regexes: {}, {}".format( repr("zebra"), repr("^abc$"), repr("^def$"), ), ) message = self.message_for( instance={"zebra": 123, "fish": 456}, schema=schema, cls=validators.Draft4Validator, ) self.assertEqual( message, "{}, {} do not match any of the regexes: {}, {}".format( repr("fish"), repr("zebra"), repr("^abc$"), repr("^def$"), ), ) def test_False_schema(self): message = self.message_for( instance="something", schema=False, ) self.assertEqual(message, "False schema does not allow 'something'") def test_multipleOf(self): message = self.message_for( instance=3, schema={"multipleOf": 2}, ) self.assertEqual(message, "3 is not a multiple of 2") def test_minItems(self): message = self.message_for(instance=[], schema={"minItems": 2}) self.assertEqual(message, "[] is too short") def test_maxItems(self): message = self.message_for(instance=[1, 2, 3], schema={"maxItems": 2}) self.assertEqual(message, "[1, 2, 3] is too long") def test_minItems_1(self): message = self.message_for(instance=[], schema={"minItems": 1}) self.assertEqual(message, "[] should be non-empty") def test_maxItems_0(self): message = self.message_for(instance=[1, 2, 3], schema={"maxItems": 0}) self.assertEqual(message, "[1, 2, 3] is expected to be empty") def test_minLength(self): message = self.message_for( instance="", schema={"minLength": 2}, ) self.assertEqual(message, "'' is too short") def test_maxLength(self): message = self.message_for( instance="abc", schema={"maxLength": 2}, ) self.assertEqual(message, "'abc' is too long") def test_minLength_1(self): message = self.message_for(instance="", schema={"minLength": 1}) self.assertEqual(message, "'' should be non-empty") def test_maxLength_0(self): message = self.message_for(instance="abc", schema={"maxLength": 0}) self.assertEqual(message, "'abc' is expected to be empty") def test_minProperties(self): message = self.message_for(instance={}, schema={"minProperties": 2}) self.assertEqual(message, "{} does not have enough properties") def test_maxProperties(self): message = self.message_for( instance={"a": {}, "b": {}, "c": {}}, schema={"maxProperties": 2}, ) self.assertEqual( message, "{'a': {}, 'b': {}, 'c': {}} has too many properties", ) def test_minProperties_1(self): message = self.message_for(instance={}, schema={"minProperties": 1}) self.assertEqual(message, "{} should be non-empty") def test_maxProperties_0(self): message = self.message_for( instance={1: 2}, schema={"maxProperties": 0}, ) self.assertEqual(message, "{1: 2} is expected to be empty") def test_prefixItems_with_items(self): message = self.message_for( instance=[1, 2, "foo"], schema={"items": False, "prefixItems": [{}, {}]}, ) self.assertEqual( message, "Expected at most 2 items but found 1 extra: 'foo'", ) def test_prefixItems_with_multiple_extra_items(self): message = self.message_for( instance=[1, 2, "foo", 5], schema={"items": False, "prefixItems": [{}, {}]}, ) self.assertEqual( message, "Expected at most 2 items but found 2 extra: ['foo', 5]", ) def test_pattern(self): message = self.message_for( instance="bbb", schema={"pattern": "^a*$"}, ) self.assertEqual(message, "'bbb' does not match '^a*$'") def test_does_not_contain(self): message = self.message_for( instance=[], schema={"contains": {"type": "string"}}, ) self.assertEqual( message, "[] does not contain items matching the given schema", ) def test_contains_too_few(self): message = self.message_for( instance=["foo", 1], schema={"contains": {"type": "string"}, "minContains": 2}, ) self.assertEqual( message, "Too few items match the given schema " "(expected at least 2 but only 1 matched)", ) def test_contains_too_few_both_constrained(self): message = self.message_for( instance=["foo", 1], schema={ "contains": {"type": "string"}, "minContains": 2, "maxContains": 4, }, ) self.assertEqual( message, "Too few items match the given schema (expected at least 2 but " "only 1 matched)", ) def test_contains_too_many(self): message = self.message_for( instance=["foo", "bar", "baz"], schema={"contains": {"type": "string"}, "maxContains": 2}, ) self.assertEqual( message, "Too many items match the given schema (expected at most 2)", ) def test_contains_too_many_both_constrained(self): message = self.message_for( instance=["foo"] * 5, schema={ "contains": {"type": "string"}, "minContains": 2, "maxContains": 4, }, ) self.assertEqual( message, "Too many items match the given schema (expected at most 4)", ) def test_exclusiveMinimum(self): message = self.message_for( instance=3, schema={"exclusiveMinimum": 5}, ) self.assertEqual( message, "3 is less than or equal to the minimum of 5", ) def test_exclusiveMaximum(self): message = self.message_for(instance=3, schema={"exclusiveMaximum": 2}) self.assertEqual( message, "3 is greater than or equal to the maximum of 2", ) def test_required(self): message = self.message_for(instance={}, schema={"required": ["foo"]}) self.assertEqual(message, "'foo' is a required property") def test_dependentRequired(self): message = self.message_for( instance={"foo": {}}, schema={"dependentRequired": {"foo": ["bar"]}}, ) self.assertEqual(message, "'bar' is a dependency of 'foo'") def test_oneOf_matches_none(self): message = self.message_for(instance={}, schema={"oneOf": [False]}) self.assertEqual( message, "{} is not valid under any of the given schemas", ) def test_oneOf_matches_too_many(self): message = self.message_for(instance={}, schema={"oneOf": [True, True]}) self.assertEqual(message, "{} is valid under each of True, True") def test_unevaluated_items(self): schema = {"type": "array", "unevaluatedItems": False} message = self.message_for(instance=["foo", "bar"], schema=schema) self.assertIn( message, "Unevaluated items are not allowed ('foo', 'bar' were unexpected)", ) def test_unevaluated_items_on_invalid_type(self): schema = {"type": "array", "unevaluatedItems": False} message = self.message_for(instance="foo", schema=schema) self.assertEqual(message, "'foo' is not of type 'array'") def test_unevaluated_properties_invalid_against_subschema(self): schema = { "properties": {"foo": {"type": "string"}}, "unevaluatedProperties": {"const": 12}, } message = self.message_for( instance={ "foo": "foo", "bar": "bar", "baz": 12, }, schema=schema, ) self.assertEqual( message, "Unevaluated properties are not valid under the given schema " "('bar' was unevaluated and invalid)", ) def test_unevaluated_properties_disallowed(self): schema = {"type": "object", "unevaluatedProperties": False} message = self.message_for( instance={ "foo": "foo", "bar": "bar", }, schema=schema, ) self.assertEqual( message, "Unevaluated properties are not allowed " "('bar', 'foo' were unexpected)", ) def test_unevaluated_properties_on_invalid_type(self): schema = {"type": "object", "unevaluatedProperties": False} message = self.message_for(instance="foo", schema=schema) self.assertEqual(message, "'foo' is not of type 'object'") def test_single_item(self): schema = {"prefixItems": [{}], "items": False} message = self.message_for( instance=["foo", "bar", "baz"], schema=schema, ) self.assertEqual( message, "Expected at most 1 item but found 2 extra: ['bar', 'baz']", ) def test_heterogeneous_additionalItems_with_Items(self): schema = {"items": [{}], "additionalItems": False} message = self.message_for( instance=["foo", "bar", 37], schema=schema, cls=validators.Draft7Validator, ) self.assertEqual( message, "Additional items are not allowed ('bar', 37 were unexpected)", ) def test_heterogeneous_items_prefixItems(self): schema = {"prefixItems": [{}], "items": False} message = self.message_for( instance=["foo", "bar", 37], schema=schema, ) self.assertEqual( message, "Expected at most 1 item but found 2 extra: ['bar', 37]", ) def test_heterogeneous_unevaluatedItems_prefixItems(self): schema = {"prefixItems": [{}], "unevaluatedItems": False} message = self.message_for( instance=["foo", "bar", 37], schema=schema, ) self.assertEqual( message, "Unevaluated items are not allowed ('bar', 37 were unexpected)", ) def test_heterogeneous_properties_additionalProperties(self): """ Not valid deserialized JSON, but this should not blow up. """ schema = {"properties": {"foo": {}}, "additionalProperties": False} message = self.message_for( instance={"foo": {}, "a": "baz", 37: 12}, schema=schema, ) self.assertEqual( message, "Additional properties are not allowed (37, 'a' were unexpected)", ) def test_heterogeneous_properties_unevaluatedProperties(self): """ Not valid deserialized JSON, but this should not blow up. """ schema = {"properties": {"foo": {}}, "unevaluatedProperties": False} message = self.message_for( instance={"foo": {}, "a": "baz", 37: 12}, schema=schema, ) self.assertEqual( message, "Unevaluated properties are not allowed (37, 'a' were unexpected)", ) class TestValidationErrorDetails(TestCase): # TODO: These really need unit tests for each individual keyword, rather # than just these higher level tests. def test_anyOf(self): instance = 5 schema = { "anyOf": [ {"minimum": 20}, {"type": "string"}, ], } validator = validators.Draft4Validator(schema) errors = list(validator.iter_errors(instance)) self.assertEqual(len(errors), 1) e = errors[0] self.assertEqual(e.validator, "anyOf") self.assertEqual(e.validator_value, schema["anyOf"]) self.assertEqual(e.instance, instance) self.assertEqual(e.schema, schema) self.assertIsNone(e.parent) self.assertEqual(e.path, deque([])) self.assertEqual(e.relative_path, deque([])) self.assertEqual(e.absolute_path, deque([])) self.assertEqual(e.json_path, "$") self.assertEqual(e.schema_path, deque(["anyOf"])) self.assertEqual(e.relative_schema_path, deque(["anyOf"])) self.assertEqual(e.absolute_schema_path, deque(["anyOf"])) self.assertEqual(len(e.context), 2) e1, e2 = sorted_errors(e.context) self.assertEqual(e1.validator, "minimum") self.assertEqual(e1.validator_value, schema["anyOf"][0]["minimum"]) self.assertEqual(e1.instance, instance) self.assertEqual(e1.schema, schema["anyOf"][0]) self.assertIs(e1.parent, e) self.assertEqual(e1.path, deque([])) self.assertEqual(e1.absolute_path, deque([])) self.assertEqual(e1.relative_path, deque([])) self.assertEqual(e1.json_path, "$") self.assertEqual(e1.schema_path, deque([0, "minimum"])) self.assertEqual(e1.relative_schema_path, deque([0, "minimum"])) self.assertEqual( e1.absolute_schema_path, deque(["anyOf", 0, "minimum"]), ) self.assertFalse(e1.context) self.assertEqual(e2.validator, "type") self.assertEqual(e2.validator_value, schema["anyOf"][1]["type"]) self.assertEqual(e2.instance, instance) self.assertEqual(e2.schema, schema["anyOf"][1]) self.assertIs(e2.parent, e) self.assertEqual(e2.path, deque([])) self.assertEqual(e2.relative_path, deque([])) self.assertEqual(e2.absolute_path, deque([])) self.assertEqual(e2.json_path, "$") self.assertEqual(e2.schema_path, deque([1, "type"])) self.assertEqual(e2.relative_schema_path, deque([1, "type"])) self.assertEqual(e2.absolute_schema_path, deque(["anyOf", 1, "type"])) self.assertEqual(len(e2.context), 0) def test_type(self): instance = {"foo": 1} schema = { "type": [ {"type": "integer"}, { "type": "object", "properties": {"foo": {"enum": [2]}}, }, ], } validator = validators.Draft3Validator(schema) errors = list(validator.iter_errors(instance)) self.assertEqual(len(errors), 1) e = errors[0] self.assertEqual(e.validator, "type") self.assertEqual(e.validator_value, schema["type"]) self.assertEqual(e.instance, instance) self.assertEqual(e.schema, schema) self.assertIsNone(e.parent) self.assertEqual(e.path, deque([])) self.assertEqual(e.relative_path, deque([])) self.assertEqual(e.absolute_path, deque([])) self.assertEqual(e.json_path, "$") self.assertEqual(e.schema_path, deque(["type"])) self.assertEqual(e.relative_schema_path, deque(["type"])) self.assertEqual(e.absolute_schema_path, deque(["type"])) self.assertEqual(len(e.context), 2) e1, e2 = sorted_errors(e.context) self.assertEqual(e1.validator, "type") self.assertEqual(e1.validator_value, schema["type"][0]["type"]) self.assertEqual(e1.instance, instance) self.assertEqual(e1.schema, schema["type"][0]) self.assertIs(e1.parent, e) self.assertEqual(e1.path, deque([])) self.assertEqual(e1.relative_path, deque([])) self.assertEqual(e1.absolute_path, deque([])) self.assertEqual(e1.json_path, "$") self.assertEqual(e1.schema_path, deque([0, "type"])) self.assertEqual(e1.relative_schema_path, deque([0, "type"])) self.assertEqual(e1.absolute_schema_path, deque(["type", 0, "type"])) self.assertFalse(e1.context) self.assertEqual(e2.validator, "enum") self.assertEqual(e2.validator_value, [2]) self.assertEqual(e2.instance, 1) self.assertEqual(e2.schema, {"enum": [2]}) self.assertIs(e2.parent, e) self.assertEqual(e2.path, deque(["foo"])) self.assertEqual(e2.relative_path, deque(["foo"])) self.assertEqual(e2.absolute_path, deque(["foo"])) self.assertEqual(e2.json_path, "$.foo") self.assertEqual( e2.schema_path, deque([1, "properties", "foo", "enum"]), ) self.assertEqual( e2.relative_schema_path, deque([1, "properties", "foo", "enum"]), ) self.assertEqual( e2.absolute_schema_path, deque(["type", 1, "properties", "foo", "enum"]), ) self.assertFalse(e2.context) def test_single_nesting(self): instance = {"foo": 2, "bar": [1], "baz": 15, "quux": "spam"} schema = { "properties": { "foo": {"type": "string"}, "bar": {"minItems": 2}, "baz": {"maximum": 10, "enum": [2, 4, 6, 8]}, }, } validator = validators.Draft3Validator(schema) errors = validator.iter_errors(instance) e1, e2, e3, e4 = sorted_errors(errors) self.assertEqual(e1.path, deque(["bar"])) self.assertEqual(e2.path, deque(["baz"])) self.assertEqual(e3.path, deque(["baz"])) self.assertEqual(e4.path, deque(["foo"])) self.assertEqual(e1.relative_path, deque(["bar"])) self.assertEqual(e2.relative_path, deque(["baz"])) self.assertEqual(e3.relative_path, deque(["baz"])) self.assertEqual(e4.relative_path, deque(["foo"])) self.assertEqual(e1.absolute_path, deque(["bar"])) self.assertEqual(e2.absolute_path, deque(["baz"])) self.assertEqual(e3.absolute_path, deque(["baz"])) self.assertEqual(e4.absolute_path, deque(["foo"])) self.assertEqual(e1.json_path, "$.bar") self.assertEqual(e2.json_path, "$.baz") self.assertEqual(e3.json_path, "$.baz") self.assertEqual(e4.json_path, "$.foo") self.assertEqual(e1.validator, "minItems") self.assertEqual(e2.validator, "enum") self.assertEqual(e3.validator, "maximum") self.assertEqual(e4.validator, "type") def test_multiple_nesting(self): instance = [1, {"foo": 2, "bar": {"baz": [1]}}, "quux"] schema = { "type": "string", "items": { "type": ["string", "object"], "properties": { "foo": {"enum": [1, 3]}, "bar": { "type": "array", "properties": { "bar": {"required": True}, "baz": {"minItems": 2}, }, }, }, }, } validator = validators.Draft3Validator(schema) errors = validator.iter_errors(instance) e1, e2, e3, e4, e5, e6 = sorted_errors(errors) self.assertEqual(e1.path, deque([])) self.assertEqual(e2.path, deque([0])) self.assertEqual(e3.path, deque([1, "bar"])) self.assertEqual(e4.path, deque([1, "bar", "bar"])) self.assertEqual(e5.path, deque([1, "bar", "baz"])) self.assertEqual(e6.path, deque([1, "foo"])) self.assertEqual(e1.json_path, "$") self.assertEqual(e2.json_path, "$[0]") self.assertEqual(e3.json_path, "$[1].bar") self.assertEqual(e4.json_path, "$[1].bar.bar") self.assertEqual(e5.json_path, "$[1].bar.baz") self.assertEqual(e6.json_path, "$[1].foo") self.assertEqual(e1.schema_path, deque(["type"])) self.assertEqual(e2.schema_path, deque(["items", "type"])) self.assertEqual( list(e3.schema_path), ["items", "properties", "bar", "type"], ) self.assertEqual( list(e4.schema_path), ["items", "properties", "bar", "properties", "bar", "required"], ) self.assertEqual( list(e5.schema_path), ["items", "properties", "bar", "properties", "baz", "minItems"], ) self.assertEqual( list(e6.schema_path), ["items", "properties", "foo", "enum"], ) self.assertEqual(e1.validator, "type") self.assertEqual(e2.validator, "type") self.assertEqual(e3.validator, "type") self.assertEqual(e4.validator, "required") self.assertEqual(e5.validator, "minItems") self.assertEqual(e6.validator, "enum") def test_recursive(self): schema = { "definitions": { "node": { "anyOf": [{ "type": "object", "required": ["name", "children"], "properties": { "name": { "type": "string", }, "children": { "type": "object", "patternProperties": { "^.*$": { "$ref": "#/definitions/node", }, }, }, }, }], }, }, "type": "object", "required": ["root"], "properties": {"root": {"$ref": "#/definitions/node"}}, } instance = { "root": { "name": "root", "children": { "a": { "name": "a", "children": { "ab": { "name": "ab", # missing "children" }, }, }, }, }, } validator = validators.Draft4Validator(schema) e, = validator.iter_errors(instance) self.assertEqual(e.absolute_path, deque(["root"])) self.assertEqual( e.absolute_schema_path, deque(["properties", "root", "anyOf"]), ) self.assertEqual(e.json_path, "$.root") e1, = e.context self.assertEqual(e1.absolute_path, deque(["root", "children", "a"])) self.assertEqual( e1.absolute_schema_path, deque( [ "properties", "root", "anyOf", 0, "properties", "children", "patternProperties", "^.*$", "anyOf", ], ), ) self.assertEqual(e1.json_path, "$.root.children.a") e2, = e1.context self.assertEqual( e2.absolute_path, deque( ["root", "children", "a", "children", "ab"], ), ) self.assertEqual( e2.absolute_schema_path, deque( [ "properties", "root", "anyOf", 0, "properties", "children", "patternProperties", "^.*$", "anyOf", 0, "properties", "children", "patternProperties", "^.*$", "anyOf", ], ), ) self.assertEqual(e2.json_path, "$.root.children.a.children.ab") def test_additionalProperties(self): instance = {"bar": "bar", "foo": 2} schema = {"additionalProperties": {"type": "integer", "minimum": 5}} validator = validators.Draft3Validator(schema) errors = validator.iter_errors(instance) e1, e2 = sorted_errors(errors) self.assertEqual(e1.path, deque(["bar"])) self.assertEqual(e2.path, deque(["foo"])) self.assertEqual(e1.json_path, "$.bar") self.assertEqual(e2.json_path, "$.foo") self.assertEqual(e1.validator, "type") self.assertEqual(e2.validator, "minimum") def test_patternProperties(self): instance = {"bar": 1, "foo": 2} schema = { "patternProperties": { "bar": {"type": "string"}, "foo": {"minimum": 5}, }, } validator = validators.Draft3Validator(schema) errors = validator.iter_errors(instance) e1, e2 = sorted_errors(errors) self.assertEqual(e1.path, deque(["bar"])) self.assertEqual(e2.path, deque(["foo"])) self.assertEqual(e1.json_path, "$.bar") self.assertEqual(e2.json_path, "$.foo") self.assertEqual(e1.validator, "type") self.assertEqual(e2.validator, "minimum") def test_additionalItems(self): instance = ["foo", 1] schema = { "items": [], "additionalItems": {"type": "integer", "minimum": 5}, } validator = validators.Draft3Validator(schema) errors = validator.iter_errors(instance) e1, e2 = sorted_errors(errors) self.assertEqual(e1.path, deque([0])) self.assertEqual(e2.path, deque([1])) self.assertEqual(e1.json_path, "$[0]") self.assertEqual(e2.json_path, "$[1]") self.assertEqual(e1.validator, "type") self.assertEqual(e2.validator, "minimum") def test_additionalItems_with_items(self): instance = ["foo", "bar", 1] schema = { "items": [{}], "additionalItems": {"type": "integer", "minimum": 5}, } validator = validators.Draft3Validator(schema) errors = validator.iter_errors(instance) e1, e2 = sorted_errors(errors) self.assertEqual(e1.path, deque([1])) self.assertEqual(e2.path, deque([2])) self.assertEqual(e1.json_path, "$[1]") self.assertEqual(e2.json_path, "$[2]") self.assertEqual(e1.validator, "type") self.assertEqual(e2.validator, "minimum") def test_propertyNames(self): instance = {"foo": 12} schema = {"propertyNames": {"not": {"const": "foo"}}} validator = validators.Draft7Validator(schema) error, = validator.iter_errors(instance) self.assertEqual(error.validator, "not") self.assertEqual( error.message, "'foo' should not be valid under {'const': 'foo'}", ) self.assertEqual(error.path, deque([])) self.assertEqual(error.json_path, "$") self.assertEqual(error.schema_path, deque(["propertyNames", "not"])) def test_if_then(self): schema = { "if": {"const": 12}, "then": {"const": 13}, } validator = validators.Draft7Validator(schema) error, = validator.iter_errors(12) self.assertEqual(error.validator, "const") self.assertEqual(error.message, "13 was expected") self.assertEqual(error.path, deque([])) self.assertEqual(error.json_path, "$") self.assertEqual(error.schema_path, deque(["then", "const"])) def test_if_else(self): schema = { "if": {"const": 12}, "else": {"const": 13}, } validator = validators.Draft7Validator(schema) error, = validator.iter_errors(15) self.assertEqual(error.validator, "const") self.assertEqual(error.message, "13 was expected") self.assertEqual(error.path, deque([])) self.assertEqual(error.json_path, "$") self.assertEqual(error.schema_path, deque(["else", "const"])) def test_boolean_schema_False(self): validator = validators.Draft7Validator(False) error, = validator.iter_errors(12) self.assertEqual( ( error.message, error.validator, error.validator_value, error.instance, error.schema, error.schema_path, error.json_path, ), ( "False schema does not allow 12", None, None, 12, False, deque([]), "$", ), ) def test_ref(self): ref, schema = "someRef", {"additionalProperties": {"type": "integer"}} validator = validators.Draft7Validator( {"$ref": ref}, resolver=validators._RefResolver("", {}, store={ref: schema}), ) error, = validator.iter_errors({"foo": "notAnInteger"}) self.assertEqual( ( error.message, error.validator, error.validator_value, error.instance, error.absolute_path, error.schema, error.schema_path, error.json_path, ), ( "'notAnInteger' is not of type 'integer'", "type", "integer", "notAnInteger", deque(["foo"]), {"type": "integer"}, deque(["additionalProperties", "type"]), "$.foo", ), ) def test_prefixItems(self): schema = {"prefixItems": [{"type": "string"}, {}, {}, {"maximum": 3}]} validator = validators.Draft202012Validator(schema) type_error, min_error = validator.iter_errors([1, 2, "foo", 5]) self.assertEqual( ( type_error.message, type_error.validator, type_error.validator_value, type_error.instance, type_error.absolute_path, type_error.schema, type_error.schema_path, type_error.json_path, ), ( "1 is not of type 'string'", "type", "string", 1, deque([0]), {"type": "string"}, deque(["prefixItems", 0, "type"]), "$[0]", ), ) self.assertEqual( ( min_error.message, min_error.validator, min_error.validator_value, min_error.instance, min_error.absolute_path, min_error.schema, min_error.schema_path, min_error.json_path, ), ( "5 is greater than the maximum of 3", "maximum", 3, 5, deque([3]), {"maximum": 3}, deque(["prefixItems", 3, "maximum"]), "$[3]", ), ) def test_prefixItems_with_items(self): schema = { "items": {"type": "string"}, "prefixItems": [{}], } validator = validators.Draft202012Validator(schema) e1, e2 = validator.iter_errors(["foo", 2, "bar", 4, "baz"]) self.assertEqual( ( e1.message, e1.validator, e1.validator_value, e1.instance, e1.absolute_path, e1.schema, e1.schema_path, e1.json_path, ), ( "2 is not of type 'string'", "type", "string", 2, deque([1]), {"type": "string"}, deque(["items", "type"]), "$[1]", ), ) self.assertEqual( ( e2.message, e2.validator, e2.validator_value, e2.instance, e2.absolute_path, e2.schema, e2.schema_path, e2.json_path, ), ( "4 is not of type 'string'", "type", "string", 4, deque([3]), {"type": "string"}, deque(["items", "type"]), "$[3]", ), ) def test_contains_too_many(self): """ `contains` + `maxContains` produces only one error, even if there are many more incorrectly matching elements. """ schema = {"contains": {"type": "string"}, "maxContains": 2} validator = validators.Draft202012Validator(schema) error, = validator.iter_errors(["foo", 2, "bar", 4, "baz", "quux"]) self.assertEqual( ( error.message, error.validator, error.validator_value, error.instance, error.absolute_path, error.schema, error.schema_path, error.json_path, ), ( "Too many items match the given schema (expected at most 2)", "maxContains", 2, ["foo", 2, "bar", 4, "baz", "quux"], deque([]), {"contains": {"type": "string"}, "maxContains": 2}, deque(["contains"]), "$", ), ) def test_contains_too_few(self): schema = {"contains": {"type": "string"}, "minContains": 2} validator = validators.Draft202012Validator(schema) error, = validator.iter_errors(["foo", 2, 4]) self.assertEqual( ( error.message, error.validator, error.validator_value, error.instance, error.absolute_path, error.schema, error.schema_path, error.json_path, ), ( ( "Too few items match the given schema " "(expected at least 2 but only 1 matched)" ), "minContains", 2, ["foo", 2, 4], deque([]), {"contains": {"type": "string"}, "minContains": 2}, deque(["contains"]), "$", ), ) def test_contains_none(self): schema = {"contains": {"type": "string"}, "minContains": 2} validator = validators.Draft202012Validator(schema) error, = validator.iter_errors([2, 4]) self.assertEqual( ( error.message, error.validator, error.validator_value, error.instance, error.absolute_path, error.schema, error.schema_path, error.json_path, ), ( "[2, 4] does not contain items matching the given schema", "contains", {"type": "string"}, [2, 4], deque([]), {"contains": {"type": "string"}, "minContains": 2}, deque(["contains"]), "$", ), ) def test_ref_sibling(self): schema = { "$defs": {"foo": {"required": ["bar"]}}, "properties": { "aprop": { "$ref": "#/$defs/foo", "required": ["baz"], }, }, } validator = validators.Draft202012Validator(schema) e1, e2 = validator.iter_errors({"aprop": {}}) self.assertEqual( ( e1.message, e1.validator, e1.validator_value, e1.instance, e1.absolute_path, e1.schema, e1.schema_path, e1.relative_schema_path, e1.json_path, ), ( "'bar' is a required property", "required", ["bar"], {}, deque(["aprop"]), {"required": ["bar"]}, deque(["properties", "aprop", "required"]), deque(["properties", "aprop", "required"]), "$.aprop", ), ) self.assertEqual( ( e2.message, e2.validator, e2.validator_value, e2.instance, e2.absolute_path, e2.schema, e2.schema_path, e2.relative_schema_path, e2.json_path, ), ( "'baz' is a required property", "required", ["baz"], {}, deque(["aprop"]), {"$ref": "#/$defs/foo", "required": ["baz"]}, deque(["properties", "aprop", "required"]), deque(["properties", "aprop", "required"]), "$.aprop", ), ) class MetaSchemaTestsMixin: # TODO: These all belong upstream def test_invalid_properties(self): with self.assertRaises(exceptions.SchemaError): self.Validator.check_schema({"properties": 12}) def test_minItems_invalid_string(self): with self.assertRaises(exceptions.SchemaError): # needs to be an integer self.Validator.check_schema({"minItems": "1"}) def test_enum_allows_empty_arrays(self): """ Technically, all the spec says is they SHOULD have elements, not MUST. (As of Draft 6. Previous drafts do say MUST). See #529. """ if self.Validator in { validators.Draft3Validator, validators.Draft4Validator, }: with self.assertRaises(exceptions.SchemaError): self.Validator.check_schema({"enum": []}) else: self.Validator.check_schema({"enum": []}) def test_enum_allows_non_unique_items(self): """ Technically, all the spec says is they SHOULD be unique, not MUST. (As of Draft 6. Previous drafts do say MUST). See #529. """ if self.Validator in { validators.Draft3Validator, validators.Draft4Validator, }: with self.assertRaises(exceptions.SchemaError): self.Validator.check_schema({"enum": [12, 12]}) else: self.Validator.check_schema({"enum": [12, 12]}) def test_schema_with_invalid_regex(self): with self.assertRaises(exceptions.SchemaError): self.Validator.check_schema({"pattern": "*notaregex"}) def test_schema_with_invalid_regex_with_disabled_format_validation(self): self.Validator.check_schema( {"pattern": "*notaregex"}, format_checker=None, ) class ValidatorTestMixin(MetaSchemaTestsMixin): def test_it_implements_the_validator_protocol(self): self.assertIsInstance(self.Validator({}), protocols.Validator) def test_valid_instances_are_valid(self): schema, instance = self.valid self.assertTrue(self.Validator(schema).is_valid(instance)) def test_invalid_instances_are_not_valid(self): schema, instance = self.invalid self.assertFalse(self.Validator(schema).is_valid(instance)) def test_non_existent_properties_are_ignored(self): self.Validator({object(): object()}).validate(instance=object()) def test_evolve(self): schema, format_checker = {"type": "integer"}, FormatChecker() original = self.Validator( schema, format_checker=format_checker, ) new = original.evolve( schema={"type": "string"}, format_checker=self.Validator.FORMAT_CHECKER, ) expected = self.Validator( {"type": "string"}, format_checker=self.Validator.FORMAT_CHECKER, _resolver=new._resolver, ) self.assertEqual(new, expected) self.assertNotEqual(new, original) def test_evolve_with_subclass(self): """ Subclassing validators isn't supported public API, but some users have done it, because we don't actually error entirely when it's done :/ We need to deprecate doing so first to help as many of these users ensure they can move to supported APIs, but this test ensures that in the interim, we haven't broken those users. """ with self.assertWarns(DeprecationWarning): @define class OhNo(self.Validator): foo = field(factory=lambda: [1, 2, 3]) _bar = field(default=37) validator = OhNo({}, bar=12) self.assertEqual(validator.foo, [1, 2, 3]) new = validator.evolve(schema={"type": "integer"}) self.assertEqual(new.foo, [1, 2, 3]) self.assertEqual(new._bar, 12) def test_is_type_is_true_for_valid_type(self): self.assertTrue(self.Validator({}).is_type("foo", "string")) def test_is_type_is_false_for_invalid_type(self): self.assertFalse(self.Validator({}).is_type("foo", "array")) def test_is_type_evades_bool_inheriting_from_int(self): self.assertFalse(self.Validator({}).is_type(True, "integer")) self.assertFalse(self.Validator({}).is_type(True, "number")) def test_it_can_validate_with_decimals(self): schema = {"items": {"type": "number"}} Validator = validators.extend( self.Validator, type_checker=self.Validator.TYPE_CHECKER.redefine( "number", lambda checker, thing: isinstance( thing, (int, float, Decimal), ) and not isinstance(thing, bool), ), ) validator = Validator(schema) validator.validate([1, 1.1, Decimal(1) / Decimal(8)]) invalid = ["foo", {}, [], True, None] self.assertEqual( [error.instance for error in validator.iter_errors(invalid)], invalid, ) def test_it_returns_true_for_formats_it_does_not_know_about(self): validator = self.Validator( {"format": "carrot"}, format_checker=FormatChecker(), ) validator.validate("bugs") def test_it_does_not_validate_formats_by_default(self): validator = self.Validator({}) self.assertIsNone(validator.format_checker) def test_it_validates_formats_if_a_checker_is_provided(self): checker = FormatChecker() bad = ValueError("Bad!") @checker.checks("foo", raises=ValueError) def check(value): if value == "good": return True elif value == "bad": raise bad else: # pragma: no cover self.fail(f"What is {value}? [Baby Don't Hurt Me]") validator = self.Validator( {"format": "foo"}, format_checker=checker, ) validator.validate("good") with self.assertRaises(exceptions.ValidationError) as cm: validator.validate("bad") # Make sure original cause is attached self.assertIs(cm.exception.cause, bad) def test_non_string_custom_type(self): non_string_type = object() schema = {"type": [non_string_type]} Crazy = validators.extend( self.Validator, type_checker=self.Validator.TYPE_CHECKER.redefine( non_string_type, lambda checker, thing: isinstance(thing, int), ), ) Crazy(schema).validate(15) def test_it_properly_formats_tuples_in_errors(self): """ A tuple instance properly formats validation errors for uniqueItems. See #224 """ TupleValidator = validators.extend( self.Validator, type_checker=self.Validator.TYPE_CHECKER.redefine( "array", lambda checker, thing: isinstance(thing, tuple), ), ) with self.assertRaises(exceptions.ValidationError) as e: TupleValidator({"uniqueItems": True}).validate((1, 1)) self.assertIn("(1, 1) has non-unique elements", str(e.exception)) def test_check_redefined_sequence(self): """ Allow array to validate against another defined sequence type """ schema = {"type": "array", "uniqueItems": True} MyMapping = namedtuple("MyMapping", "a, b") Validator = validators.extend( self.Validator, type_checker=self.Validator.TYPE_CHECKER.redefine_many( { "array": lambda checker, thing: isinstance( thing, (list, deque), ), "object": lambda checker, thing: isinstance( thing, (dict, MyMapping), ), }, ), ) validator = Validator(schema) valid_instances = [ deque(["a", None, "1", "", True]), deque([[False], [0]]), [deque([False]), deque([0])], [[deque([False])], [deque([0])]], [[[[[deque([False])]]]], [[[[deque([0])]]]]], [deque([deque([False])]), deque([deque([0])])], [MyMapping("a", 0), MyMapping("a", False)], [ MyMapping("a", [deque([0])]), MyMapping("a", [deque([False])]), ], [ MyMapping("a", [MyMapping("a", deque([0]))]), MyMapping("a", [MyMapping("a", deque([False]))]), ], [deque(deque(deque([False]))), deque(deque(deque([0])))], ] for instance in valid_instances: validator.validate(instance) invalid_instances = [ deque(["a", "b", "a"]), deque([[False], [False]]), [deque([False]), deque([False])], [[deque([False])], [deque([False])]], [[[[[deque([False])]]]], [[[[deque([False])]]]]], [deque([deque([False])]), deque([deque([False])])], [MyMapping("a", False), MyMapping("a", False)], [ MyMapping("a", [deque([False])]), MyMapping("a", [deque([False])]), ], [ MyMapping("a", [MyMapping("a", deque([False]))]), MyMapping("a", [MyMapping("a", deque([False]))]), ], [deque(deque(deque([False]))), deque(deque(deque([False])))], ] for instance in invalid_instances: with self.assertRaises(exceptions.ValidationError): validator.validate(instance) def test_it_creates_a_ref_resolver_if_not_provided(self): with self.assertWarns(DeprecationWarning): resolver = self.Validator({}).resolver self.assertIsInstance(resolver, validators._RefResolver) def test_it_upconverts_from_deprecated_RefResolvers(self): ref, schema = "someCoolRef", {"type": "integer"} resolver = validators._RefResolver("", {}, store={ref: schema}) validator = self.Validator({"$ref": ref}, resolver=resolver) with self.assertRaises(exceptions.ValidationError): validator.validate(None) def test_it_upconverts_from_yet_older_deprecated_legacy_RefResolvers(self): """ Legacy RefResolvers support only the context manager form of resolution. """ class LegacyRefResolver: @contextmanager def resolving(this, ref): self.assertEqual(ref, "the ref") yield {"type": "integer"} resolver = LegacyRefResolver() schema = {"$ref": "the ref"} with self.assertRaises(exceptions.ValidationError): self.Validator(schema, resolver=resolver).validate(None) class AntiDraft6LeakMixin: """ Make sure functionality from draft 6 doesn't leak backwards in time. """ def test_True_is_not_a_schema(self): with self.assertRaises(exceptions.SchemaError) as e: self.Validator.check_schema(True) self.assertIn("True is not of type", str(e.exception)) def test_False_is_not_a_schema(self): with self.assertRaises(exceptions.SchemaError) as e: self.Validator.check_schema(False) self.assertIn("False is not of type", str(e.exception)) def test_True_is_not_a_schema_even_if_you_forget_to_check(self): with self.assertRaises(Exception) as e: self.Validator(True).validate(12) self.assertNotIsInstance(e.exception, exceptions.ValidationError) def test_False_is_not_a_schema_even_if_you_forget_to_check(self): with self.assertRaises(Exception) as e: self.Validator(False).validate(12) self.assertNotIsInstance(e.exception, exceptions.ValidationError) class TestDraft3Validator(AntiDraft6LeakMixin, ValidatorTestMixin, TestCase): Validator = validators.Draft3Validator valid: tuple[dict, dict] = ({}, {}) invalid = {"type": "integer"}, "foo" def test_any_type_is_valid_for_type_any(self): validator = self.Validator({"type": "any"}) validator.validate(object()) def test_any_type_is_redefinable(self): """ Sigh, because why not. """ Crazy = validators.extend( self.Validator, type_checker=self.Validator.TYPE_CHECKER.redefine( "any", lambda checker, thing: isinstance(thing, int), ), ) validator = Crazy({"type": "any"}) validator.validate(12) with self.assertRaises(exceptions.ValidationError): validator.validate("foo") def test_is_type_is_true_for_any_type(self): self.assertTrue(self.Validator({"type": "any"}).is_valid(object())) def test_is_type_does_not_evade_bool_if_it_is_being_tested(self): self.assertTrue(self.Validator({}).is_type(True, "boolean")) self.assertTrue(self.Validator({"type": "any"}).is_valid(True)) class TestDraft4Validator(AntiDraft6LeakMixin, ValidatorTestMixin, TestCase): Validator = validators.Draft4Validator valid: tuple[dict, dict] = ({}, {}) invalid = {"type": "integer"}, "foo" class TestDraft6Validator(ValidatorTestMixin, TestCase): Validator = validators.Draft6Validator valid: tuple[dict, dict] = ({}, {}) invalid = {"type": "integer"}, "foo" class TestDraft7Validator(ValidatorTestMixin, TestCase): Validator = validators.Draft7Validator valid: tuple[dict, dict] = ({}, {}) invalid = {"type": "integer"}, "foo" class TestDraft201909Validator(ValidatorTestMixin, TestCase): Validator = validators.Draft201909Validator valid: tuple[dict, dict] = ({}, {}) invalid = {"type": "integer"}, "foo" class TestDraft202012Validator(ValidatorTestMixin, TestCase): Validator = validators.Draft202012Validator valid: tuple[dict, dict] = ({}, {}) invalid = {"type": "integer"}, "foo" class TestLatestValidator(TestCase): """ These really apply to multiple versions but are easiest to test on one. """ def test_ref_resolvers_may_have_boolean_schemas_stored(self): ref = "someCoolRef" schema = {"$ref": ref} resolver = validators._RefResolver("", {}, store={ref: False}) validator = validators._LATEST_VERSION(schema, resolver=resolver) with self.assertRaises(exceptions.ValidationError): validator.validate(None) class TestValidatorFor(TestCase): def test_draft_3(self): schema = {"$schema": "http://json-schema.org/draft-03/schema"} self.assertIs( validators.validator_for(schema), validators.Draft3Validator, ) schema = {"$schema": "http://json-schema.org/draft-03/schema#"} self.assertIs( validators.validator_for(schema), validators.Draft3Validator, ) def test_draft_4(self): schema = {"$schema": "http://json-schema.org/draft-04/schema"} self.assertIs( validators.validator_for(schema), validators.Draft4Validator, ) schema = {"$schema": "http://json-schema.org/draft-04/schema#"} self.assertIs( validators.validator_for(schema), validators.Draft4Validator, ) def test_draft_6(self): schema = {"$schema": "http://json-schema.org/draft-06/schema"} self.assertIs( validators.validator_for(schema), validators.Draft6Validator, ) schema = {"$schema": "http://json-schema.org/draft-06/schema#"} self.assertIs( validators.validator_for(schema), validators.Draft6Validator, ) def test_draft_7(self): schema = {"$schema": "http://json-schema.org/draft-07/schema"} self.assertIs( validators.validator_for(schema), validators.Draft7Validator, ) schema = {"$schema": "http://json-schema.org/draft-07/schema#"} self.assertIs( validators.validator_for(schema), validators.Draft7Validator, ) def test_draft_201909(self): schema = {"$schema": "https://json-schema.org/draft/2019-09/schema"} self.assertIs( validators.validator_for(schema), validators.Draft201909Validator, ) schema = {"$schema": "https://json-schema.org/draft/2019-09/schema#"} self.assertIs( validators.validator_for(schema), validators.Draft201909Validator, ) def test_draft_202012(self): schema = {"$schema": "https://json-schema.org/draft/2020-12/schema"} self.assertIs( validators.validator_for(schema), validators.Draft202012Validator, ) schema = {"$schema": "https://json-schema.org/draft/2020-12/schema#"} self.assertIs( validators.validator_for(schema), validators.Draft202012Validator, ) def test_True(self): self.assertIs( validators.validator_for(True), validators._LATEST_VERSION, ) def test_False(self): self.assertIs( validators.validator_for(False), validators._LATEST_VERSION, ) def test_custom_validator(self): Validator = validators.create( meta_schema={"id": "meta schema id"}, version="12", id_of=lambda s: s.get("id", ""), ) schema = {"$schema": "meta schema id"} self.assertIs( validators.validator_for(schema), Validator, ) def test_custom_validator_draft6(self): Validator = validators.create( meta_schema={"$id": "meta schema $id"}, version="13", ) schema = {"$schema": "meta schema $id"} self.assertIs( validators.validator_for(schema), Validator, ) def test_validator_for_jsonschema_default(self): self.assertIs(validators.validator_for({}), validators._LATEST_VERSION) def test_validator_for_custom_default(self): self.assertIs(validators.validator_for({}, default=None), None) def test_warns_if_meta_schema_specified_was_not_found(self): with self.assertWarns(DeprecationWarning) as cm: validators.validator_for(schema={"$schema": "unknownSchema"}) self.assertEqual(cm.filename, __file__) self.assertEqual( str(cm.warning), "The metaschema specified by $schema was not found. " "Using the latest draft to validate, but this will raise " "an error in the future.", ) def test_does_not_warn_if_meta_schema_is_unspecified(self): with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") validators.validator_for(schema={}, default={}) self.assertFalse(w) def test_validator_for_custom_default_with_schema(self): schema, default = {"$schema": "mailto:foo@example.com"}, object() self.assertIs(validators.validator_for(schema, default), default) class TestValidate(TestCase): def assertUses(self, schema, Validator): result = [] with mock.patch.object(Validator, "check_schema", result.append): validators.validate({}, schema) self.assertEqual(result, [schema]) def test_draft3_validator_is_chosen(self): self.assertUses( schema={"$schema": "http://json-schema.org/draft-03/schema#"}, Validator=validators.Draft3Validator, ) # Make sure it works without the empty fragment self.assertUses( schema={"$schema": "http://json-schema.org/draft-03/schema"}, Validator=validators.Draft3Validator, ) def test_draft4_validator_is_chosen(self): self.assertUses( schema={"$schema": "http://json-schema.org/draft-04/schema#"}, Validator=validators.Draft4Validator, ) # Make sure it works without the empty fragment self.assertUses( schema={"$schema": "http://json-schema.org/draft-04/schema"}, Validator=validators.Draft4Validator, ) def test_draft6_validator_is_chosen(self): self.assertUses( schema={"$schema": "http://json-schema.org/draft-06/schema#"}, Validator=validators.Draft6Validator, ) # Make sure it works without the empty fragment self.assertUses( schema={"$schema": "http://json-schema.org/draft-06/schema"}, Validator=validators.Draft6Validator, ) def test_draft7_validator_is_chosen(self): self.assertUses( schema={"$schema": "http://json-schema.org/draft-07/schema#"}, Validator=validators.Draft7Validator, ) # Make sure it works without the empty fragment self.assertUses( schema={"$schema": "http://json-schema.org/draft-07/schema"}, Validator=validators.Draft7Validator, ) def test_draft202012_validator_is_chosen(self): self.assertUses( schema={ "$schema": "https://json-schema.org/draft/2020-12/schema#", }, Validator=validators.Draft202012Validator, ) # Make sure it works without the empty fragment self.assertUses( schema={ "$schema": "https://json-schema.org/draft/2020-12/schema", }, Validator=validators.Draft202012Validator, ) def test_draft202012_validator_is_the_default(self): self.assertUses(schema={}, Validator=validators.Draft202012Validator) def test_validation_error_message(self): with self.assertRaises(exceptions.ValidationError) as e: validators.validate(12, {"type": "string"}) self.assertRegex( str(e.exception), "(?s)Failed validating '.*' in schema.*On instance", ) def test_schema_error_message(self): with self.assertRaises(exceptions.SchemaError) as e: validators.validate(12, {"type": 12}) self.assertRegex( str(e.exception), "(?s)Failed validating '.*' in metaschema.*On schema", ) def test_it_uses_best_match(self): schema = { "oneOf": [ {"type": "number", "minimum": 20}, {"type": "array"}, ], } with self.assertRaises(exceptions.ValidationError) as e: validators.validate(12, schema) self.assertIn("12 is less than the minimum of 20", str(e.exception)) class TestThreading(TestCase): """ Threading-related functionality tests. jsonschema doesn't promise thread safety, and its validation behavior across multiple threads may change at any time, but that means it isn't safe to share *validators* across threads, not that anytime one has multiple threads that jsonschema won't work (it certainly is intended to). These tests ensure that this minimal level of functionality continues to work. """ def test_validation_across_a_second_thread(self): failed = [] def validate(): try: validators.validate(instance=37, schema=True) except: # pragma: no cover # noqa: E722 failed.append(sys.exc_info()) validate() # just verify it succeeds from threading import Thread thread = Thread(target=validate) thread.start() thread.join() self.assertEqual((thread.is_alive(), failed), (False, [])) class TestReferencing(TestCase): def test_registry_with_retrieve(self): def retrieve(uri): return DRAFT202012.create_resource({"type": "integer"}) registry = referencing.Registry(retrieve=retrieve) schema = {"$ref": "https://example.com/"} validator = validators.Draft202012Validator(schema, registry=registry) self.assertEqual( (validator.is_valid(12), validator.is_valid("foo")), (True, False), ) def test_custom_registries_do_not_autoretrieve_remote_resources(self): registry = referencing.Registry() schema = {"$ref": "https://example.com/"} validator = validators.Draft202012Validator(schema, registry=registry) with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") with self.assertRaises(referencing.exceptions.Unresolvable): validator.validate(12) self.assertFalse(w) class TestRefResolver(TestCase): base_uri = "" stored_uri = "foo://stored" stored_schema = {"stored": "schema"} def setUp(self): self.referrer = {} self.store = {self.stored_uri: self.stored_schema} self.resolver = validators._RefResolver( self.base_uri, self.referrer, self.store, ) def test_it_does_not_retrieve_schema_urls_from_the_network(self): ref = validators.Draft3Validator.META_SCHEMA["id"] with mock.patch.object(self.resolver, "resolve_remote") as patched: # noqa: SIM117 with self.resolver.resolving(ref) as resolved: pass self.assertEqual(resolved, validators.Draft3Validator.META_SCHEMA) self.assertFalse(patched.called) def test_it_resolves_local_refs(self): ref = "#/properties/foo" self.referrer["properties"] = {"foo": object()} with self.resolver.resolving(ref) as resolved: self.assertEqual(resolved, self.referrer["properties"]["foo"]) def test_it_resolves_local_refs_with_id(self): schema = {"id": "http://bar/schema#", "a": {"foo": "bar"}} resolver = validators._RefResolver.from_schema( schema, id_of=lambda schema: schema.get("id", ""), ) with resolver.resolving("#/a") as resolved: self.assertEqual(resolved, schema["a"]) with resolver.resolving("http://bar/schema#/a") as resolved: self.assertEqual(resolved, schema["a"]) def test_it_retrieves_stored_refs(self): with self.resolver.resolving(self.stored_uri) as resolved: self.assertIs(resolved, self.stored_schema) self.resolver.store["cached_ref"] = {"foo": 12} with self.resolver.resolving("cached_ref#/foo") as resolved: self.assertEqual(resolved, 12) def test_it_retrieves_unstored_refs_via_requests(self): ref = "http://bar#baz" schema = {"baz": 12} if "requests" in sys.modules: # pragma: no cover self.addCleanup( sys.modules.__setitem__, "requests", sys.modules["requests"], ) sys.modules["requests"] = ReallyFakeRequests({"http://bar": schema}) with self.resolver.resolving(ref) as resolved: self.assertEqual(resolved, 12) def test_it_retrieves_unstored_refs_via_urlopen(self): ref = "http://bar#baz" schema = {"baz": 12} if "requests" in sys.modules: # pragma: no cover self.addCleanup( sys.modules.__setitem__, "requests", sys.modules["requests"], ) sys.modules["requests"] = None @contextmanager def fake_urlopen(url): self.assertEqual(url, "http://bar") yield BytesIO(json.dumps(schema).encode("utf8")) self.addCleanup(setattr, validators, "urlopen", validators.urlopen) validators.urlopen = fake_urlopen with self.resolver.resolving(ref) as resolved: pass self.assertEqual(resolved, 12) def test_it_retrieves_local_refs_via_urlopen(self): with tempfile.NamedTemporaryFile(delete=False, mode="wt") as tempf: self.addCleanup(os.remove, tempf.name) json.dump({"foo": "bar"}, tempf) ref = f"file://{pathname2url(tempf.name)}#foo" with self.resolver.resolving(ref) as resolved: self.assertEqual(resolved, "bar") def test_it_can_construct_a_base_uri_from_a_schema(self): schema = {"id": "foo"} resolver = validators._RefResolver.from_schema( schema, id_of=lambda schema: schema.get("id", ""), ) self.assertEqual(resolver.base_uri, "foo") self.assertEqual(resolver.resolution_scope, "foo") with resolver.resolving("") as resolved: self.assertEqual(resolved, schema) with resolver.resolving("#") as resolved: self.assertEqual(resolved, schema) with resolver.resolving("foo") as resolved: self.assertEqual(resolved, schema) with resolver.resolving("foo#") as resolved: self.assertEqual(resolved, schema) def test_it_can_construct_a_base_uri_from_a_schema_without_id(self): schema = {} resolver = validators._RefResolver.from_schema(schema) self.assertEqual(resolver.base_uri, "") self.assertEqual(resolver.resolution_scope, "") with resolver.resolving("") as resolved: self.assertEqual(resolved, schema) with resolver.resolving("#") as resolved: self.assertEqual(resolved, schema) def test_custom_uri_scheme_handlers(self): def handler(url): self.assertEqual(url, ref) return schema schema = {"foo": "bar"} ref = "foo://bar" resolver = validators._RefResolver("", {}, handlers={"foo": handler}) with resolver.resolving(ref) as resolved: self.assertEqual(resolved, schema) def test_cache_remote_on(self): response = [object()] def handler(url): try: return response.pop() except IndexError: # pragma: no cover self.fail("Response must not have been cached!") ref = "foo://bar" resolver = validators._RefResolver( "", {}, cache_remote=True, handlers={"foo": handler}, ) with resolver.resolving(ref): pass with resolver.resolving(ref): pass def test_cache_remote_off(self): response = [object()] def handler(url): try: return response.pop() except IndexError: # pragma: no cover self.fail("Handler called twice!") ref = "foo://bar" resolver = validators._RefResolver( "", {}, cache_remote=False, handlers={"foo": handler}, ) with resolver.resolving(ref): pass def test_if_you_give_it_junk_you_get_a_resolution_error(self): error = ValueError("Oh no! What's this?") def handler(url): raise error ref = "foo://bar" resolver = validators._RefResolver("", {}, handlers={"foo": handler}) with self.assertRaises(exceptions._RefResolutionError) as err: # noqa: SIM117 with resolver.resolving(ref): self.fail("Shouldn't get this far!") # pragma: no cover self.assertEqual(err.exception, exceptions._RefResolutionError(error)) def test_helpful_error_message_on_failed_pop_scope(self): resolver = validators._RefResolver("", {}) resolver.pop_scope() with self.assertRaises(exceptions._RefResolutionError) as exc: resolver.pop_scope() self.assertIn("Failed to pop the scope", str(exc.exception)) def test_pointer_within_schema_with_different_id(self): """ See #1085. """ schema = validators.Draft7Validator.META_SCHEMA one = validators._RefResolver("", schema) validator = validators.Draft7Validator(schema, resolver=one) self.assertFalse(validator.is_valid({"maxLength": "foo"})) another = { "allOf": [{"$ref": validators.Draft7Validator.META_SCHEMA["$id"]}], } two = validators._RefResolver("", another) validator = validators.Draft7Validator(another, resolver=two) self.assertFalse(validator.is_valid({"maxLength": "foo"})) def test_newly_created_validator_with_ref_resolver(self): """ See https://github.com/python-jsonschema/jsonschema/issues/1061#issuecomment-1624266555. """ def handle(uri): self.assertEqual(uri, "http://example.com/foo") return {"type": "integer"} resolver = validators._RefResolver("", {}, handlers={"http": handle}) Validator = validators.create( meta_schema={}, validators=validators.Draft4Validator.VALIDATORS, ) schema = {"$id": "http://example.com/bar", "$ref": "foo"} validator = Validator(schema, resolver=resolver) self.assertEqual( (validator.is_valid({}), validator.is_valid(37)), (False, True), ) def test_refresolver_with_pointer_in_schema_with_no_id(self): """ See https://github.com/python-jsonschema/jsonschema/issues/1124#issuecomment-1632574249. """ schema = { "properties": {"x": {"$ref": "#/definitions/x"}}, "definitions": {"x": {"type": "integer"}}, } validator = validators.Draft202012Validator( schema, resolver=validators._RefResolver("", schema), ) self.assertEqual( (validator.is_valid({"x": "y"}), validator.is_valid({"x": 37})), (False, True), ) def sorted_errors(errors): def key(error): return ( [str(e) for e in error.path], [str(e) for e in error.schema_path], ) return sorted(errors, key=key) @define class ReallyFakeRequests: _responses: dict[str, Any] def get(self, url): response = self._responses.get(url) if url is None: # pragma: no cover raise ValueError("Unknown URL: " + repr(url)) return _ReallyFakeJSONResponse(json.dumps(response)) @define class _ReallyFakeJSONResponse: _response: str def json(self): return json.loads(self._response) PK! n:)tests/__pycache__/__init__.cpython-38.pycnu[U af@sdS)NrrrJ/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/tests/__init__.pyPK!mE"E"'tests/__pycache__/_suite.cpython-38.pycnu[U af@s8dZddlmZddlmZddlmZddlmZddl m Z m Z ddl Z ddl Z ddlZddlZddlZddlZddlmZmZdd lmZddlZe rdd lmZmZmZddlZdd lmZddlZe d Z!d dZ"eGdddZ#eGdddZ$eGdddZ%eddGdddZ&ddZ'dS)z= Python representations of the JSON Schema Test Suite tests. ) annotations)suppress)partial)Path) TYPE_CHECKINGAnyN)fieldfrozen)Registry)IterableMappingSequence) _VALIDATORSz[\W\- ]+cCsBtjd}|dk rt|Sttjjjd}|s>td|S)NZJSON_SCHEMA_TEST_SUITEjsonzCan't find the JSON-Schema-Test-Suite directory. Set the 'JSON_SCHEMA_TEST_SUITE' environment variable or run the tests from alongside a checkout of the suite.) osenvirongetr jsonschema__file__parentis_dir ValueError)rootrH/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/tests/_suite.py _find_suite s rc@sVeZdZUeedZded<eddZded<dd Zd d d d Z ddddZ dS)Suite)factoryr_rootF)init%referencing.jsonschema.SchemaRegistry_remotescCs|jdd}tjt|dg}t|d}t |}d}d}t |t j jj||df|t j jj||dfgj|t j jd}t|d |dS) Nbinjsonschema_suiteremotesutf-8z?http://localhost:1234/locationIndependentIdentifierPre2019.jsonz>http://localhost:1234/locationIndependentIdentifierDraft4.json)contents)Zdefault_specificationr!)rjoinpathsys executablestr subprocess check_outputdecoderloadsr Zwith_resources referencingrZDRAFT7Zcreate_resourcepopZDRAFT4Z with_contentsitemsZ DRAFT202012object __setattr__)selfr#argvr$ resourcesliZli4registryrrr__attrs_post_init__8s. zSuite.__attrs_post_init__ pyperf.RunnerrunnercCs,tD]\}}|j|dj||dqdS)N)name)r< Validator)rr1version benchmark)r4r<r=r>rrrr@Ws  zSuite.benchmarkVersionreturncCst||jd||jdS)Ntests)r=pathr$)rArr!r4r=rrrr?^s  z Suite.versionN) __name__ __module__ __qualname__rrr__annotations__r!r9r@r?rrrrr2s rc@sreZdZUded<ded<ded<ddZd d d d Zd d d dZdd dddZddZdd dddZ dS)rAr_pathr r!r*r=cKs|D]}|jf|qdSN)casesr@)r4kwargscaserrrr@ns zVersion.benchmarkzIterable[_Case]rBcCs|j|jddS)Nz*.jsonpaths _cases_inrKglobr4rrrrMrsz Version.casescCs|j|jddS)Nzoptional/format/*.jsonrPrRrUrrr format_casesuszVersion.format_cases)r=rCcCs|j|jd|dgdS)Noptionalz.jsonrP)rSrKrFrrroptional_cases_ofxszVersion.optional_cases_ofc sjdd|jdd}ddfdd|DD}t|tjf|}ttt |_ W5QRX|S) Nr=ZTest-cSsi|] }|j|qSr)rG).0methodrrr }sz0Version.to_unittest_testcase..c3s0|](}|D]}|jD]}|jfVqq qdSrL)rDto_unittest_method)r[grouprOtestrNrr s z/Version.to_unittest_testcase..) r0r=titlereplacetypeunittestZTestCaser Exception)_someone_save_us_the_module_of_the_callerrH)r4groupsrNr=methodsclsrrarto_unittest_testcase{s   zVersion.to_unittest_testcasezIterable[Path])rQrCccs>|D]4}t|jddD]}tj|||j|jdVqqdS)Nr%)encoding)r?subjectr$)rr. read_text_Case from_dictstemr!)r4rQrErOrrrrSszVersion._cases_inN) rGrHrIrJr@rMrVrXrlrSrrrrrAfs rAc@sheZdZUded<ded<ded<ded<ded <d Zd ed <d Zded<eddZddddZd S)rprAr?r*rn descriptionMapping[str, Any] | boolschemaz list[_Test]rDN str | NonecommentrzSequence[dict[str, str]] specificationc s6|fdddD}|fd|iS)Nc s6g|].}tfddddd|qS)r?rnrsru)r?rncase_descriptionrur$)_Test)r[r`datar$rr sz#_Case.from_dict..rD)updater0)rkr|r$rNrDrr{rrqs    z_Case.from_dictr:r;cKs(|jD]}||jt|jf|qdSrL)rDZ bench_funcfully_qualified_namervalidate_ignoring_errors)r4r<rNr`rrrr@s   z_Case.benchmark) rGrHrIrJrwrx classmethodrqr@rrrrrps    rpF)reprc@seZdZUded<ded<ded<ded<ded<d ed <d ed <d ed<dZded<ddZeddZddfddZddZ ddZ dS)rzrAr?r*rnryrsrr|rtruboolvalidr r!NrvrwcCsd|jdS)Nz)rrUrrr__repr__sz_Test.__repr__cCsd|jj|j|j|jgS)Nz > )joinr?r=rnryrsrUrrrrsz_Test.fully_qualified_namecCsdSrLr)r`rrrz_Test.c sjrfdd}nfdd}ddtdjtdjtdjg|_|}|dksxtj dddkr||Stj dddkrt |St ||SdS) NcsjfdSrL)validatethisrNr4rrfnsz$_Test.to_unittest_method..fnc s(|tjjfW5QRXdSrL) assertRaisesrValidationErrorrrrrrrs_r`JSON_SCHEMA_DEBUG0ZJSON_SCHEMA_EXPECTED_FAILURES)rr _DELIMITERSsubrnryrsrGrrrrfZexpectedFailureskip)r4rrNrreasonrrrr^s     z_Test.to_unittest_methodcKsN||j|f|j|jd|}tjdddkrrN validatorrrrrs z_Test.validatec Cs&ttj|j|dW5QRXdS)N)r>)rrrr)r4r>rrrrs z_Test.validate_ignoring_errors) rGrHrIrJrwrpropertyrr^rrrrrrrzs    rzcCstdjdS)a The FQON of the module 2nd stack frames up from here. This is intended to allow us to dynamically return test case classes that are indistinguishable from being defined in the module that wants them. Otherwise, trial will mis-print the FQON, and copy pasting it won't re-run the class that really is running. Save us all, this is all so so so so so terrible. rG)r( _getframe f_globalsrrrrrhs rh)(__doc__ __future__r contextlibr functoolsrpathlibrtypingrrrrrer+r(rfattrsrr r/r Zreferencing.jsonschemacollections.abcr r r ZpyperfZjsonschema.validatorsrrcompilerrrrArprzrhrrrrs<       34"HPK!H3KK.tests/__pycache__/fuzz_validate.cpython-38.pycnu[U afZ@sdZddlZddlmZmZddlZeeeej ddde Z ej eee e e dddZeeed d d Zd d ZedkrddlZedS)z Fuzzing setup for OSS-Fuzz. See https://github.com/google/oss-fuzz/tree/master/projects/jsonschema for the other half of the setup here. N)given strategiesF) allow_nanZallow_infinitycCstt|S)N)r dictionariestext)innerrO/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/tests/fuzz_validate.pyr )baseextendZobj1obj2cCsFztj||dWn.tjjk r*Yntjjk r@YnXdS)N)instanceZschema) jsonschemavalidate exceptionsZValidationErrorZ SchemaErrorrrrr test_schemass rcCs*ttjtjtjjddtdS)NT)Zenable_python_coverage) atherisZinstrument_allSetupsysargvr hypothesisZfuzz_one_inputZFuzzrrrr main&sr__main__)__doc__rrrrrZone_ofZbooleansZintegersZfloatsrZPRIM recursiverDICTrr__name__rrrrr s,    PK!IH ]])tests/__pycache__/test_cli.cpython-38.pycnu[U afo @s2ddlmZmZddlmZddlmZddlmZddl m Z ddl m Z ddl mZddlZddlZddlZddlZddlZddlZdd lmZmZdd lmZmZmZdd lmZmZeed dd lm Z W5QRXddZ!ddZ"ddZ#GdddeZ$GdddeZ%GdddeZ&dS))redirect_stderrredirect_stdout)metadata)StringIO)JSONDecodeError)Path)dedent)TestCaseN)Draft4ValidatorDraft202012Validator) SchemaErrorValidationError_RefResolutionError)_LATEST_VERSIONvalidateignore)clics"ttGfddd}|S)Ncs,eZdZddZfddZeddZdS)z%fake_validator..FakeValidatorc_sdSN)selfargskwargsrrJ/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/tests/test_cli.py__init__ sz.fake_validator..FakeValidator.__init__csr SgSr)poprinstanceerrorsrr iter_errors#sz1fake_validator..FakeValidator.iter_errorscSsdSrrrschemarrr check_schema(sz2fake_validator..FakeValidator.check_schemaN)__name__ __module__ __qualname__rr classmethodr"rrrr FakeValidators r')listreversed)rr'rrrfake_validators  r*csfdd}|S)Ncs"|}|dkrt|t|Sr)getFileNotFoundErrorr)pathcontents all_contentsrropen0s zfake_open..openr)r0r1rr/r fake_open/s r2c CsJzt|Wn.tk r<}zt|WYSd}~XYn XtddS)Nz.Tried and failed to capture a JSON dump error.)jsonloadsrstr RuntimeError)Znon_jsonerrorrrr _message_for8s r8c@seZdZdedfddZd^ddZdd Zd d Zd d ZddZ ddZ ddZ ddZ ddZ ddZddZddZddZd d!Zd"d#Zd$d%Zd&d'Zd(d)Zd*d+Zd,d-Zd.d/Zd0d1Zd2d3Zd4d5Zd6d7Zd8d9Zd:d;Zdd?Z!d@dAZ"dBdCZ#dDdEZ$dFdGZ%dHdIZ&dJdKZ'dLdMZ(dNdOZ)dPdQZ*dRdSZ+dTdUZ,dVdWZ-dXdYZ.dZd[Z/d\d]Z0dS)_TestCLINrc Kst|}|||ttdt|p,it_z$tt}}tj||||d} W5t`X|j | |t d|d| d| d| d d| | fS) Nr1)stdinstdoutstderrz. Expected an exit code of z != z. stdout: z stderr: z )msg) r parse_argsupdate assertFalsehasattrr2r1rrun assertEqualrgetvalue) rargvfilesr: exit_codeoverride argumentsr;r<Zactual_exit_coderrrrun_cliBs:    zTestCLI.run_clicKs$||jf|t|t|fdSr)rCrJr)rr;r<rrrr assertOutputsbs zTestCLI.assertOutputscCs@tddd}|jtdt|jdt|gdddgd d d dS) NI am an error! r*{"does not": "matter since it is stubbed"} some_schema some_instance-irSrR12: I am an error! rF validatorrErGr<r rLdictr3dumpsrr*rr7rrrtest_invalid_instancehs  zTestCLI.test_invalid_instancecCsDtddd}|jtdt|jdt|gdddd d gd d d dS)NrMrNrOrPrQrTrS--outputprettyrRrUz ===[ValidationError]===(some_instance)=== I am an error! ----------------------------- rWrYr\rrr#test_invalid_instance_pretty_outputws   z+TestCLI.test_invalid_instance_pretty_outputcCsDtddd}|jtdt|jdt|gdddd d gd d d dS)NrMrNrOrPrQr^plainrTrSrRrUrVrWrYr\rrr+test_invalid_instance_explicit_plain_outputs   z3TestCLI.test_invalid_instance_explicit_plain_outputcCsPd}td|d}td|d}|jtdt|dt||gddd gd d d dS) NrN First errorrO Second errorrPrQrTrSrRrUzM 12: First error 12: Second error rWr rLrZr3r[r*rrfirstsecondrrr%test_invalid_instance_multiple_errorss   z-TestCLI.test_invalid_instance_multiple_errorscCsTd}td|d}td|d}|jtdt|dt||gddd d d gd d ddS)NrNrcrOrdrPrQrTrSr^r_rRrUa ===[ValidationError]===(some_instance)=== First error ----------------------------- ===[ValidationError]===(some_instance)=== Second error ----------------------------- rWrerfrrr3test_invalid_instance_multiple_errors_pretty_outputs    z;TestCLI.test_invalid_instance_multiple_errors_pretty_outputcCsld}td|dtd|dg}d}td|dg}|jtdt|t|dt||d d d d d gd dddS)NrNAn errorrO Another errorfooBOOMrPrRsome_first_instancesome_second_instancerTrprqrRrUze 12: An error 12: Another error foo: BOOM rWrerfirst_instanceZ first_errorssecond_instanceZ second_errorsrrrtest_multiple_invalid_instancess,  z'TestCLI.test_multiple_invalid_instancesc Cspd}td|dtd|dg}d}td|dg}|jtdt|t|dt||d d d d d d dgddddS)NrNrkrOrlrmrnrPror^r_rTrprqrRrUa ===[ValidationError]===(some_first_instance)=== An error ----------------------------- ===[ValidationError]===(some_first_instance)=== Another error ----------------------------- ===[ValidationError]===(some_second_instance)=== BOOM ----------------------------- rWrerrrrr-test_multiple_invalid_instances_pretty_outputs0  z5TestCLI.test_multiple_invalid_instances_pretty_outputc Cspd}td|dtd|dg}d}td|dg}|jtdt|t|dt||d d d d d d dgddddS)NrNrkrOrlrmrnrPro--error-format&:{error.message}._-_.{error.instance}:rTrprqrRrUz5:An error._-_.12::Another error._-_.12::BOOM._-_.foo:rWrerrrrrtest_custom_error_formats0  z TestCLI.test_custom_error_formatcCs|jtdddgddddS)Nz {"type": 12}rRrRrUO 12: 12 is not valid under any of the given schemas rFrErGr<rLrZrrrrtest_invalid_schema.s zTestCLI.test_invalid_schemac Cshddi}|t}t|ddW5QRXt|j}|jtt|ddddgd d t|d d dS) NtyperNrKr!rrzr^r_rRrU%===[SchemaError]===(some_schema)===  ----------------------------- r| assertRaisesr rr5 exceptionrLrZr3r[rr!er7rrr!test_invalid_schema_pretty_output9s  z)TestCLI.test_invalid_schema_pretty_outputcCs|jtdddgddddS)Nz{"type": 12, "items": 57}rzrRrUzF 57: 57 is not of type 'object', 'boolean' r|r}r~rrr#test_invalid_schema_multiple_errorsLs z+TestCLI.test_invalid_schema_multiple_errorsc Csjddd}|t}t|ddW5QRXt|j}|jtt|dddd gd d t|d d dS)NrN9)ritemsrKrrzr^r_rRrUrrr|rrrrr1test_invalid_schema_multiple_errors_pretty_outputWs   z9TestCLI.test_invalid_schema_multiple_errors_pretty_outputcCs$|jtddddddgddd d S) zv "Validating" an instance that's invalid under an invalid schema just shows the schema error. z{"type": 12, "minimum": 30}Z13rQrTrSrRrUr{r|Nr}r~rrr)test_invalid_schema_with_invalid_instancejsz1TestCLI.test_invalid_schema_with_invalid_instancec Cs|dddd}}|t}t||dW5QRXt|j}|jtt|t|dddd d d gd d t|dddS)N rNrZminimumrrQr^r_rTrSrRrUrrr|r)rrr!rr7rrr7test_invalid_schema_with_invalid_instance_pretty_output|s"   z?TestCLI.test_invalid_schema_with_invalid_instance_pretty_outputcCs4|jtdddddddddgd d tdd dS) Nz{"minimum": 30}not valid JSON!Z12)rRrsrtrTrsrtrRrUzw Failed to parse 'first_instance': {} 12: 12 is less than the minimum of 30 r|)rLrZformatr8r~rrr-test_invalid_instance_continues_with_the_rests"z5TestCLI.test_invalid_instance_continues_with_the_restc CsZdddd}}|tt||dW5QRX|jtt|dddd gd d d dS) NrrNrrrrzrwrxrRrUz7:12 is not valid under any of the given schemas._-_.12:r|)rr rrLrZr3r[)rrr!rrr1test_custom_error_format_applies_to_schema_errorss z9TestCLI.test_custom_error_format_applies_to_schema_errorscCs4d}|jtd|ddddgddt|d d dS) Nr{}rQrTrSrRrUz1 Failed to parse 'some_instance': r|rLrZr8rrrrtest_instance_is_invalid_JSONs z%TestCLI.test_instance_is_invalid_JSONcCsL|jtddddddddgd d \}}|||d ||d|dS) NrrrQr^r_rTrSrRrUrFrErGz7(some_instance)=== Traceback (most recent call last): rJrZr@assertIn assertNotInrr;r<rrr+test_instance_is_invalid_JSON_pretty_outputs  z3TestCLI.test_instance_is_invalid_JSON_pretty_outputcCs4d}|jtddt|dgddt|dddS) NrrrzrRrUz) Failed to parse : r)rFr:rErGr<)rLrZrr8rrrr&test_instance_is_invalid_JSON_on_stdinsz.TestCLI.test_instance_is_invalid_JSON_on_stdincCsL|jtddtddddgdd\}}|||d ||d|dS) Nrrzrr^r_rRrU)rFr:rErGz1()=== Traceback (most recent call last): )rJrZrr@rrrrrr4test_instance_is_invalid_JSON_on_stdin_pretty_outputs  z)=== rKrrr~rrr1test_successful_validation_of_stdin_pretty_outputsz9TestCLI.test_successful_validation_of_stdin_pretty_outputcCs$|jtddddddgddddSrr}r~rrr-test_successful_validation_of_just_the_schemas  z5TestCLI.test_successful_validation_of_just_the_schemacCs(|jtddddddddgdd d dSrr}r~rrr;test_successful_validation_of_just_the_schema_pretty_outputs   zCTestCLI.test_successful_validation_of_just_the_schema_pretty_outputcCsxtjdd}||tj|jt|j}|dd|jd}|j t |dddd d |j d d gd d ddS)NFdelete-{"definitions": {"num": {"type": "integer"}}} {"$ref": "#/definitions/num"}1rQrTrS --base-uri/rRrKr tempfileNamedTemporaryFilecloseZ addCleanuposremovenamer write_textrLrZparentas_urirZref_schema_fileZref_pathr!rrr0test_successful_validation_via_explicit_base_uris"     z8TestCLI.test_successful_validation_via_explicit_base_uricCsztjdd}||tj|jt|j}|dd|jd}|j t |dddd d |j d d gd ddddS)NFrrrrz"1"rQrTrSrrrRrUrKz 1: '1' is not of type 'integer' rFrErGr;r<rrrrr2test_unsuccessful_validation_via_explicit_base_uris$     z:TestCLI.test_unsuccessful_validation_via_explicit_base_uric Cshd}d}|t.}|jt||ddddtdgdW5QRXt|j}| t j d |dS) Nz4{"$ref": "someNonexistentFile.json#definitions/num"}rrQrTrSrrRrFrEzsomeNonexistentFile.json') rrrLrZrcwdrr5rrrseprr!rrr7rrr,test_nonexistent_file_with_explicit_base_uris"   z4TestCLI.test_nonexistent_file_with_explicit_base_uric CsXd}d}|t&}|jt||ddddddgd W5QRXt|j}||d dS) Nz${"$ref": "foo.json#definitions/num"}rrQrTrSrznot@UR1rRrzunknown url type: 'foo.json')rrrLrZr5rrCrrrrtest_invalid_explicit_base_uris(  z&TestCLI.test_invalid_explicit_base_uricCs2|tt|jtddddddgddd d dS) Nz{"const": "check"}z"a"rQrTrSrRrUrKza: 'check' was expected r)assertIsr rrLrZr~rrr=test_it_validates_using_the_latest_validator_when_unspecifieds  zETestCLI.test_it_validates_using_the_latest_validator_when_unspecifiedcCs.d}d}|jt||ddddgddd d d S) zG Specifically, `const` validation applies for Draft 7. z { "$schema": "http://json-schema.org/draft-07/schema#", "const": "check" } "foo"rQrTrSrRrUrKzfoo: 'check' was expected rNr}rrrr-test_it_validates_using_draft7_when_specifieds z5TestCLI.test_it_validates_using_draft7_when_specifiedcCs,d}d}|jt||ddddgdddd S) zP Specifically, `const` validation *does not* apply for Draft 4. z { "$schema": "http://json-schema.org/draft-04/schema#", "const": "check" } rrQrTrSrRrKrNr}rrrr-test_it_validates_using_draft4_when_specified*s z5TestCLI.test_it_validates_using_draft4_when_specified)rKrK)1r#r$r%rrJrLr]r`rbrirjrurvryrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr9As` )       r9c@s:eZdZeZddZddZddZddZd d Z d S) TestParsercCs*tdddddg}||d|jdS)N --validatorz2jsonschema.tests.test_cli.TestParser.FakeValidator --instancemem://some/instancemem://some/schemarX)rr>rr'rrIrrr2test_find_validator_by_fully_qualified_object_nameAsz=TestParser.test_find_validator_by_fully_qualified_object_namecCs(tdddddg}||dtdS)Nrr rrrrX)rr>rr rrrr!test_find_validator_in_jsonschemaLsz,TestParser.test_find_validator_in_jsonschemac Gsftt}}t|:t|&|tt|W5QRXW5QRXW5QRX||fSr)rrrr SystemExitrr>rD)rrEr;r<rrrcli_output_forVs  (zTestParser.cli_output_forcCs,|ddd\}}|d|||dS)Nr^rmrzinvalid choice: 'foo'rrr@rrrrtest_unknown_output]s zTestParser.test_unknown_outputcCs0|ddddd\}}|d|||dS)Nr^r_rwrmrz3--error-format can only be used with --output plainrrrrrtest_useless_error_formatesz$TestParser.test_useless_error_formatN) r#r$r%r*r'rrrrrrrrrr=s   rc@s$eZdZddZddZddZdS)TestCLIIntegrationcCs,tjtjddddgtjd}|d|dS)N-mpipshow jsonschemar<s License: MIT) subprocess check_outputsys executableSTDOUTr)routputrrr test_licensess zTestCLIIntegration.test_licensecCsBtjtjdddddgtjd}|d}||t ddS)Nz-Wrrrz --versionrzutf-8) rrrrrdecodestriprCrversion)rrrrr test_versionzs zTestCLIIntegration.test_versioncCsBtjtjddgtjd}tjtjdddgtjd}|||dS)Nrrrz--help)rrrrrrC)rrZoutput_for_helprrr#test_no_arguments_shows_usage_notess  z6TestCLIIntegration.test_no_arguments_shows_usage_notesN)r#r$r%rrrrrrrrrsr)' contextlibrr importlibriorr3rpathlibrtextwraprunittestr rrrrwarningsrr r Zjsonschema.exceptionsr r rZjsonschema.validatorsrrcatch_warnings simplefilterrr*r2r8r9rrrrrrs<          5PK!=992tests/__pycache__/test_deprecations.cpython-38.pycnu[U af=@sddlmZddlmZddlmZmZddlZddl Z ddl Z ddl Z ddl Z ddlZddlmZmZmZmZGdddeZdS))contextmanager)BytesIO)TestCasemockN) FormatChecker exceptions protocols validatorsc@seZdZddZddZddZddZd d Zd d Zd dZ ddZ ddZ ddZ ddZ ddZddZddZddZdd Zd!d"Zd#d$Zd%d&Zd'd(Zd)d*Zd+d,Zd-d.Zd/S)0TestDeprecationsc CsNd}|t|}ddlm}W5QRX||tjd||jt dS)zY As of v4.0.0, __version__ is deprecated in favor of importlib.metadata. z.Accessing jsonschema.__version__ is deprecatedr) __version__ jsonschemaN) assertWarnsRegexDeprecationWarningr r assertEqual importlibmetadataversionfilename__file__)selfmessagewr rS/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/tests/test_deprecations.py test_versions zTestDeprecations.test_versionc CsHd}|t|}ddlm}W5QRX||tj||jtdS)z As of v4.0.0, importing ErrorTree from jsonschema.validators is deprecated in favor of doing so from jsonschema.exceptions. z2Importing ErrorTree from jsonschema.validators is r ErrorTreeN)r rjsonschema.validatorsrrrrrrrrrrrrtest_validators_ErrorTrees z*TestDeprecations.test_validators_ErrorTreec CsHd}|t|}ddlm}W5QRX||tj||jtdS)z As of v4.18.0, importing ErrorTree from the package root is deprecated in favor of doing so from jsonschema.exceptions. z9Importing ErrorTree directly from the jsonschema package rrN)r rr rrrrrrrrrtest_import_ErrorTree)s z&TestDeprecations.test_import_ErrorTreec Csltjddgd}t}tj|gd}d}|t|}||d<W5QRX||d|||jtdS)zM As of v4.20.0, setting items on an ErrorTree is deprecated. z some errorfoo)path)errorszErrorTree.__setitem__ is N)rZValidationErrorrr rrrr)retreeZsubtreerrrrrtest_ErrorTree_setitem6sz'TestDeprecations.test_ErrorTree_setitemc CsHd}|t|}ddlm}W5QRX||tj||jtdS)z As of v4.18.0, importing FormatError from the package root is deprecated in favor of doing so from jsonschema.exceptions. z;Importing FormatError directly from the jsonschema package r) FormatErrorN)r rr r'rrrr)rrrr'rrrtest_import_FormatErrorFs z(TestDeprecations.test_import_FormatErrorc CsHd}|t|}ddlm}W5QRX||tj||jtdS)z As of v4.19.0, importing Validator from the package root is deprecated in favor of doing so from jsonschema.protocols. z9Importing Validator directly from the jsonschema package r) ValidatorN)r rr r)rrrr)rrrr)rrrtest_import_ValidatorSs z&TestDeprecations.test_import_Validatorc CsBd}|t| }tj}W5QRX||tj||jtdS)za As of v4.0.0, accessing jsonschema.validators.validators is deprecated. z8Accessing jsonschema.validators.validators is deprecatedN)r rr rZ _VALIDATORSrrrrrvaluerrrtest_validators_validators`s z+TestDeprecations.test_validators_validatorsc CsBd}|t| }tj}W5QRX||tj||jtdS)zc As of v4.0.0, accessing jsonschema.validators.meta_schemas is deprecated. z:Accessing jsonschema.validators.meta_schemas is deprecatedN)r rr Z meta_schemasrZ _META_SCHEMASrrr+rrrtest_validators_meta_schemasms z-TestDeprecations.test_validators_meta_schemasc CsPtji}d}|t|}|dW5QRXW5QRX||jtdS)zC As of v4.0.0, RefResolver.in_scope is deprecated. z.jsonschema.RefResolver.in_scope is deprecated r!N) r _RefResolverZ from_schemar rZin_scoperrr)rresolverrrrrrtest_RefResolver_in_scopezs   z*TestDeprecations.test_RefResolver_in_scopec CsRti}d}|t|}|dddi}W5QRX||||jtdS)zz As of v4.0.0, calling is_valid with two arguments (to provide a different schema) is deprecated. z5Passing a schema to Validator.is_valid is deprecated r!typenumberN) r Draft7Validatorr ris_validZ assertFalserrr)r validatorrrresultrrr%test_Validator_is_valid_two_argumentss   z6TestDeprecations.test_Validator_is_valid_two_argumentsc CsXti}d}|t|}|dddi\}W5QRX||jd||jtdS)z} As of v4.0.0, calling iter_errors with two arguments (to provide a different schema) is deprecated. z8Passing a schema to Validator.iter_errors is deprecated r!r2r3N) r r4r rZ iter_errorsrr6rr)rr6rrerrorrrr(test_Validator_iter_errors_two_argumentss  z9TestDeprecations.test_Validator_iter_errors_two_argumentsc CsHti}d}|t|}||jtjW5QRX||jt dS)zL As of v4.18.0, accessing Validator.resolver is deprecated. z&Accessing Draft7Validator.resolver is N) r r4r rassertIsInstancer0r/rrr)rr6rrrrrtest_Validator_resolvers  z(TestDeprecations.test_Validator_resolverc Csld}|t|}ddlm}W5QRX||jt|t|}ddlm}W5QRX||jtdS)zA As of v4.18.0, RefResolver is fully deprecated. z$jsonschema.RefResolver is deprecatedr) RefResolverN)r rr r=rrrr)rrrr=rrrtest_RefResolversz!TestDeprecations.test_RefResolverc Csd}|t|}ddlm}W5QRX||tj||jt|t|}ddl m}W5QRX||tj||jtdS)z As of v4.18.0, RefResolutionError is deprecated in favor of directly catching errors from the referencing library. z6jsonschema.exceptions.RefResolutionError is deprecatedrRefResolutionErrorN) r rr r@rrZ_RefResolutionErrorrrZjsonschema.exceptions)rrrr@rrrtest_RefResolutionErrorsz(TestDeprecations.test_RefResolutionErrorc Cs`tddi}|tjj}|dW5QRXtjjdd}||jt |jf|dfdS)a This behavior is the intended behavior (i.e. it's not deprecated), but given we do "tricksy" things in the iterim to wrap exceptions in a multiple inheritance subclass, we need to be extra sure it works and stays working. $ref urn:nothing )refUnresolvable: urn:nothingN) r Draft202012Validator assertRaises referencingr Unresolvablevalidater exceptionstr)rr6r$expectedrrr#test_catching_Unresolvable_directlysz4TestDeprecations.test_catching_Unresolvable_directlyc Cs|tddlm}W5QRXtddi}|tjj }| dW5QRX||}| dW5QRX| |j t |j f|j dfdS)z Until RefResolutionError is removed, it is still possible to catch exceptions from reference resolution using it, even though they may have been raised by referencing. rr?rBrCrDrFN) assertWarnsrr r@r rGrHrIrrJrKrrLrMrr@r6ur$rrr1test_catching_Unresolvable_via_RefResolutionErrors  zBTestDeprecations.test_catching_Unresolvable_via_RefResolutionErrorc Cs|tddlm}W5QRXtddi}|tjj }| dW5QRX||}| dW5QRX| |j |j h| |j |j hdS)zS Ensure the wrapped referencing errors are hashable when possible. rr?rBrCrDN) rPrr r@r rGrHrIrrJrKassertInrLrQrrr(test_WrappedReferencingError_hashabilitys  z9TestDeprecations.test_WrappedReferencingError_hashabilityc Cspd}|t|}Gdddtj}W5QRX||jt|t|}Gdddtjid}W5QRXdS)ac As of v4.12.0, subclassing a validator class produces an explicit deprecation warning. This was never intended to be public API (and some comments over the years in issues said so, but obviously that's not a great way to make sure it's followed). A future version will explicitly raise an error. z!Subclassing validator classes is c@s eZdZdS)z=TestDeprecations.test_Validator_subclassing..SubclassN__name__ __module__ __qualname__rrrrSubclasssrZc@s eZdZdS)zDTestDeprecations.test_Validator_subclassing..AnotherSubclassNrVrrrrAnotherSubclasssr[)Z meta_schemaN)r rr rGrrrcreate)rrrrZr[rrrtest_Validator_subclassing s z+TestDeprecations.test_Validator_subclassingc CsJ|tjjddd}|t|}tdW5QRX||jt dS)zd As of v4.14.0, FormatChecker.cls_checks is deprecated without replacement. ZboomNzFormatChecker.cls_checks ) addCleanuprZcheckerspopr rZ cls_checksrrr)rrrrrrtest_FormatChecker_cls_checks!s z.TestDeprecations.test_FormatChecker_cls_checksc Csd}|t|}ddlm}W5QRX||tjj||j t d}|t|}ddlm }W5QRX||tj j||j t d}|t|}ddlm }W5QRX||tjj||j t d}|t|}dd lm}W5QRX||tjj||j t d }|t|}dd lm}W5QRX||tjj||j t d }|t|}dd lm}W5QRX||tjj||j t |tddlm} W5QRXdS)z As of v4.16.0, accessing jsonschema.draftn_format_checker is deprecated in favor of Validator.FORMAT_CHECKER. z3Accessing jsonschema.draft202012_format_checker is r)draft202012_format_checkerz3Accessing jsonschema.draft201909_format_checker is )draft201909_format_checkerz.Accessing jsonschema.draft7_format_checker is )draft7_format_checkerz.Accessing jsonschema.draft6_format_checker is )draft6_format_checkerz.Accessing jsonschema.draft4_format_checker is )draft4_format_checkerz.Accessing jsonschema.draft3_format_checker is )draft3_format_checker)draft1234_format_checkerN)r rr raZassertIsr rGZFORMAT_CHECKERrrrrbZDraft201909Validatorrcr4rdZDraft6ValidatorreZDraft4ValidatorrfZDraft3ValidatorrH ImportErrorrg) rrrrarbrcrdrerfrgrrrtest_draftN_format_checker/sd z+TestDeprecations.test_draftN_format_checkerc CsDd}|t|}ddl}t|jW5QRX||jtjdS)zH As of v4.17.0, importing jsonschema.cli is deprecated. z5The jsonschema CLI is deprecated and will be removed rN) r rZjsonschema.clirreloadclirrr)rrrr rrrtest_import_clits z TestDeprecations.test_import_clicCs*tjtjddgddd}|d|jdS)zB As of v4.17.0, the jsonschema CLI is deprecated. z-mr T)capture_outputchecks!The jsonschema CLI is deprecated N) subprocessrunsys executablerTstderr)rprocessrrrtest_clis  zTestDeprecations.test_clic sd}ddddiiidtjkr6tjjdtjddtjd<tfdd }td |i}d }tjj t j d |d }|: t |"|i|dfdW5QRXW5QRXdS)zW Automatic retrieval of remote references is deprecated as of v4.18.0. zhttp://bar#/$defs/bazz$defsZbazr2integerrequestsNc3sb|tjj|jd|\\}}|d|dtt  dVdS)Nz http://barz user-agentz.python-jsonschema (deprecated $ref resolution)utf8) r;urllibrequestRequestrfull_url header_itemslowerrjsondumpsencode)rzheaderr,Zschemarrr fake_urlopenszFTestDeprecations.test_automatic_remote_retrieval..fake_urlopenrBz+Automatically retrieving remote references urlopen)new%)FT)rqmodulesr^ __setitem__rr rGrpatchobjectryrzr rrr5)rrErr6rrrrrtest_automatic_remote_retrievals&   z0TestDeprecations.test_automatic_remote_retrievalN)rWrXrYrrr r&r(r*r-r.r1r8r:r<r>rArOrSrUr]r`rirlrurrrrrr s.         E  r ) contextlibriorunittestrrimportlib.metadatarrrorqurllib.requestryZreferencing.exceptionsrIr rrrr r rrrrs  PK! -dLULU0tests/__pycache__/test_exceptions.cpython-38.pycnu[U af?X@sddlmZddlZddlmZddlmZGdddeZGdddeZGd d d eZ Gd d d eZ Gd ddeZ dS))TestCaseN) exceptions)_LATEST_VERSIONc@seZdZddZddZddZddZd d Zd d Zd dZ ddZ ddZ ddZ ddZ ddZddZddZddZdd Zd!d"Zd#d$Zd%d&Zd'd(Zd)S)* TestBestMatchcCsttt||}d|d|d}|j||dtt|}tt|}|| | d|d||S)NzNo errors found for z under !)msgzNo consistent best match! Got: z Then: ) listr iter_errors assertTruer best_matchiterreversed assertEqualZ _contents)selfinstanceschemaerrorsrbestZ reversed_bestrQ/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/tests/test_exceptions.py best_match_of szTestBestMatch.best_match_ofcCsBddddddiidii}|jddgii|d}||jd dS) N propertiesfoobartypeobject) minPropertiesrrrrrr validatorrrrrrr(test_shallower_errors_are_better_matchess z6TestBestMatch.test_shallower_errors_are_better_matchescCsDdddiddigddiddigd}|ji|d}||jddS) zn A property you *must* match is probably better than one you have to match a part of. rrstringnumber)ranyOfoneOfrrNrr!rrr%test_oneOf_and_anyOf_are_weak_matches$s z3TestBestMatch.test_oneOf_and_anyOf_are_weak_matchescCsLdddddiddddiiigiii}|jdddii|d }||jdd S) aL If the most relevant error is an anyOf, then we traverse its context and select the otherwise *least* relevant error, since in this case that means the most specific, deep, error inside the instance. I.e. since only one of the schemas must match, we look for the most relevant one. rrr%rr#rarray rNrrvalidator_valuer!rrr8test_if_the_most_relevant_error_is_anyOf_it_is_traversed2s  zFTestBestMatch.test_if_the_most_relevant_error_is_anyOf_it_is_traversedcCs:dddiddiddigi}|jg|d}||jddS)z We don't traverse into an anyOf (as above) if all of its context errors seem to be equally "wrong" against the instance. r%rr#integerrrNrr!rrr3test_no_anyOf_traversal_for_equally_relevant_errorsIszATestBestMatch.test_no_anyOf_traversal_for_equally_relevant_errorscCs.dddigi}|jg|d}||jddS)z We *do* traverse anyOf with a single nested error, even though it is vacuously equally relevant to itself. r%rr#rNrr!rrr6test_anyOf_traversal_for_single_equally_relevant_errorYs zDTestBestMatch.test_anyOf_traversal_for_single_equally_relevant_errorcCs6ddddiigi}|jddg|d}||jddS)z| We *do* traverse anyOf with a single subschema that fails multiple times (e.g. on multiple items). r%itemsconst%r)rNrr!rrr.test_anyOf_traversal_for_single_sibling_errorsgs  zrrr?rr@rrr(test_it_prioritizes_matching_union_types s,    z6TestBestMatch.test_it_prioritizes_matching_union_typescCs.dddii}|jddi|d}||jdS)NrrFrr)r assertIsNoner r!rrrtest_boolean_schemas;s z"TestBestMatch.test_boolean_schemascCs6tddi}|i\}|t|ijddS)Nrr)rr rrr r )rr errorrrrtest_one_error@s   zTestBestMatch.test_one_errorcCs"ti}|t|idS)N)rrDrr r )rr rrrtest_no_errorsHszTestBestMatch.test_no_errorsN)__name__ __module__ __qualname__rr"r'r,r.r/r3r4r5r6r7r8r9r:r;rBrCrErGrHrrrrrs( rc@s,eZdZddZddZddZddZd S) TestByRelevancecCsbtjddgd}tjdddgd}t||gtjd}|||t||gtjd}|||dS)NOh no!ZbazpathOh yes!rrkey)rValidationErrormax relevanceassertIs)rshallowdeepmatchrrr#test_short_paths_are_better_matchesNs  z3TestByRelevance.test_short_paths_are_better_matchescCs~tjdgd}tjddgd}t||gtjd}|dd|Ddgggt||gtjd}|dd|DdgggdS) NrMrNrPrrQcSsg|]}t|jqSrrrO.0rFrrr ]szNTestByRelevance.test_global_errors_are_even_better_matches..cSsg|]}t|jqSrr[r\rrrr^cs)rrSsortedrUr)rrWrXrrrr*test_global_errors_are_even_better_matchesWs  z:TestByRelevance.test_global_errors_are_even_better_matchescCshtjdgdd}tjdgdd}tjdd}t||g|d}|||t||g|d}|||dS)NrMarOr rPb)weakrQrrSZ by_relevancerTrV)rrdnormalr rYrrr%test_weak_keywords_are_lower_prioritygs  z5TestByRelevance.test_weak_keywords_are_lower_prioritycCs~tjdgdd}tjdgdd}tjdgdd}tjddd}t|||g|d }|||t|||g|d }|||dS) NrMrarbrPrczOh fine!c)rdstrongrQre)rrdrfrir rYrrr(test_strong_keywords_are_higher_priorityss z8TestByRelevance.test_strong_keywords_are_higher_priorityN)rIrJrKrZr`rgrjrrrrrLMs  rLc@steZdZddZddZddZddZd d Zd d Zd dZ ddZ ddZ ddZ ddZ ddZddZdS) TestErrorTreecCs.ddtdD}t|}||jddS)NcSsg|]}tjd|dqS)Z Somethingr )rrS)r]irrrr^szQTestErrorTree.test_it_knows_how_many_total_errors_it_contains..)ranger ErrorTreerZ total_errorsrrtreerrr/test_it_knows_how_many_total_errors_it_containss  z=TestErrorTree.test_it_knows_how_many_total_errors_it_containscCs,tjddgdg}t|}|d|dS)N a messagerrN)rrSrpassertInrqrrr1test_it_contains_an_item_if_the_item_had_an_errors z?TestErrorTree.test_it_contains_an_item_if_the_item_had_an_errorcCs,tjddgdg}t|}|d|dS)NrtrrNr)rrSrp assertNotInrqrrr9test_it_does_not_contain_an_item_if_the_item_had_no_errors zGTestErrorTree.test_it_does_not_contain_an_item_if_the_item_had_no_errorcCs0tjddd}t|g}||jd|idS)NrtrrlrrSrprrrrFrrrrr/test_keywords_that_failed_appear_in_errors_dicts z=TestErrorTree.test_keywords_that_failed_appear_in_errors_dictcCsPtjddgdtjdddgdg}t|}|d|d|d|ddS)Nz a bar messagerrNza bar -> 0 messager)rrSrprurwrqrrr1test_it_creates_a_child_tree_for_each_nested_paths  z?TestErrorTree.test_it_creates_a_child_tree_for_each_nested_pathcCsXtjddddgdtjddddgd}}t||g}||ddj||ddS) N1rrr)r rO2quux)rrryre1e2rrrrr+test_children_have_their_errors_dicts_builts z9TestErrorTree.test_children_have_their_errors_dicts_builtcCs@tjddddgddtjddd d gd d}}t||gdS Nr~rrbar2i1r rOrrrfoobarri2)rrSrp)rrrrrr"test_multiple_errors_with_instances z0TestErrorTree.test_multiple_errors_with_instancec Cs>tjddgd}t|g}|t|dW5QRXdS)NZ123r)r rr)rrSrp assertRaises IndexErrorrzrrr>test_it_does_not_contain_subtrees_that_are_not_in_the_instances  zLTestErrorTree.test_it_does_not_contain_subtrees_that_are_not_in_the_instancecCs6tjddidgd}t|g}||dtjdS)z If a keyword refers to a path that isn't in the instance, the tree still properly returns a subtree for that path. rtr)r rrON)rrSrpZassertIsInstancerzrrr9test_if_its_in_the_tree_anyhow_it_does_not_raise_an_errors zGTestErrorTree.test_if_its_in_the_tree_anyhow_it_does_not_raise_an_errorcCsTtjddddgddtjddd d gd d}}t||g}|t|dd hdSr)rrSrprsetrrrr test_iters zTestErrorTree.test_itercCs6tjddddgdd}t|g}|t|ddS)Nr~rrrrrzrrSrprreprrzrrrtest_repr_singles zTestErrorTree.test_repr_singlecCsPtjddddgddtjddd d gd d}}t||g}|t|d dS) Nr~rrrrrrrrrrzrrrrrtest_repr_multiples z TestErrorTree.test_repr_multiplecCstg}|t|ddS)Nz)rrprr)rrrrrrtest_repr_emptys zTestErrorTree.test_repr_emptyN)rIrJrKrsrvrxr{r}rrrrrrrrrrrrrks    rkc@sdeZdZddZddZddZddZd d Zd d Zd dZ ddZ ddZ ddZ ddZ dS)TestErrorInitReprStrcKs,tddddddid}||tjf|S)NZhellorr#)messager r+rr)dictupdaterrS)rkwargsdefaultsrrr make_errors zTestErrorInitReprStr.make_errorcKsNt|d}|jf|}t|d\}}}|||j|||dS)N )textwrapdedentrstriprstr partitionrr)rexpectedrrFZ message_line_restrrr assertShowss  z TestErrorInitReprStr.assertShowscCs|}|t|jddS)Nr|)rZ assertGreaterlenargs)rrFrrr!test_it_calls_super_and_sets_argssz6TestErrorInitReprStr.test_it_calls_super_and_sets_argscCs|ttjddddS)NzHello!)rz)rrrrSrrrr test_reprszTestErrorInitReprStr.test_reprcCsdtd}|t|ddddddid}|D].}t|}||=tjd|}|t|dq0dS)Nrrr#r)r r+rr)r)rrSrrr)rrFrattrkrrrtest_unset_errors  z%TestErrorInitReprStr.test_unset_errorcCs|jdggddS)Nz Failed validating 'type' in schema: {'type': 'string'} On instance: 5 rOZ schema_pathrrrrrtest_empty_paths+s z%TestErrorInitReprStr.test_empty_pathscCs|jddgdgddS)Nz Failed validating 'type' in schema: {'type': 'string'} On instance[0]: 5 rr0rrrrrrtest_one_item_paths8s z(TestErrorInitReprStr.test_one_item_pathscCs|jdddgdddgddS)Nz Failed validating 'type' in schema['items'][0]: {'type': 'string'} On instance[0]['a']: 5 rrar0r|rrrrrrtest_multiple_item_pathsEs z-TestErrorInitReprStr.test_multiple_item_pathscCs0|jdttdtttdtddddS)Na? Failed validating 'maxLength' in schema: {0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8, 9: 9, 10: 10, 11: 11, 12: 12, 13: 13, 14: 14, 15: 15, 16: 16, 17: 17, 18: 18, 19: 19} On instance: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24] Z maxLength)rrr )rrrorziprrrrtest_uses_pprintRs 2 z%TestErrorInitReprStr.test_uses_pprintcCs(|jdddddddddddddS) Nz Failed validating 'type' in schema: {'do': 3, 'not': 7, 'sort': 37, 'me': 73} On instance: {'here': 73, 'too': 37, 'no': 7, 'sorting': 3} r2I)donotsortme)hereZtoonoZsorting)rrrrrrrtest_does_not_reorder_dictssz0TestErrorInitReprStr.test_does_not_reorder_dictscsDGfddd}|}tjdd|ddd}t|t|dS) z Check for #164 which rendered exceptions unusable when a `ValidationError` involved instances with an `__eq__` method that returned truthy values. cs$eZdZfddZfddZdS)zdTestErrorInitReprStr.test_str_works_with_instances_having_overriden_eq_operator..DontEQMeBrocsddSNzDon't!Zfailthisotherrrr__eq__szkTestErrorInitReprStr.test_str_works_with_instances_having_overriden_eq_operator..DontEQMeBro.__eq__csddSrrrrrr__ne__szkTestErrorInitReprStr.test_str_works_with_instances_having_overriden_eq_operator..DontEQMeBro.__ne__N)rIrJrKrrrrrr DontEQMeBros rrtrZsomer)r rr+rN)rrSrurr)rrrrFrrr:test_str_works_with_instances_having_overriden_eq_operatorszOTestErrorInitReprStr.test_str_works_with_instances_having_overriden_eq_operatorN)rIrJrKrrrrrrrrrrrrrrrrs    9rc@seZdZddZdS) TestHashablecCstdhtdhdS)N)rrSZ SchemaErrorrrrr test_hashables zTestHashable.test_hashableN)rIrJrKrrrrrrsr) unittestrrZ jsonschemarZjsonschema.validatorsrrrLrkrrrrrrs   G4{@PK!uABB,tests/__pycache__/test_format.cpython-38.pycnu[U aft @sddZddlmZddlmZmZddlmZddlm Z e dZ e dZ dd ZGd d d eZd S) zH Tests for the parts of jsonschema related to the :kw:`format` keyword. )TestCase) FormatCheckerValidationError) FormatError)Draft4ValidatorzBoom!zBang!cCs|dkr ttdS)Nbang)BANGBOOMthingr M/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/tests/test_format.pyboomsrc@sLeZdZddZddZddZddZd d Zd d Zd dZ ddZ dS)TestFormatCheckercCstdd}||jdS)Nr formats)rZ assertFalsecheckersselfcheckerr r r test_it_can_validate_no_formatss z1TestFormatChecker.test_it_can_validate_no_formatsc Cs&|ttdgdW5QRXdS)Nzo noesr) assertRaisesKeyErrorr)rr r r .test_it_raises_a_key_error_for_unknown_formatss z@TestFormatChecker.test_it_raises_a_key_error_for_unknown_formatsc Cs\ttj}|tjjd|ttdtW5QRX| tjt|tdfddSNrr )r) dictrrZ addCleanuppopZ assertWarnsDeprecationWarningZ cls_checksr assertEqual)roriginalr r r !test_it_can_register_cls_checkerss  z3TestFormatChecker.test_it_can_register_cls_checkerscCs4t}|dt||jttjtdfddSr)rchecksrrrrrr r r test_it_can_register_checkers(s z/TestFormatChecker.test_it_can_register_checkersc Cst}|jdttdt|t}|jdddW5QRX||j j t||j j t| t |j d|tt|jdddW5QRXdS)NrZraises instanceformatz12 is not a 'boom'r)rr!typer rrrcheckassertIs exceptioncause __cause__rstrr)rrcmr r r !test_it_catches_registered_errors0s z3TestFormatChecker.test_it_catches_registered_errorsc Cslt}|jdtdttddi|d}|t}|dW5QRX||j j t ||j j t dS)Nrr#r')Zformat_checkerr ) rr! ValueErrorrrrrvalidater*r+r,r r-)rr validatorr/r r r 7test_format_error_causes_become_validation_error_causes?s zITestFormatChecker.test_format_error_causes_become_validation_error_causesc Cs.t}|t|jdddW5QRXdS)Nz not-an-ipv4Zipv4r%)rrrr)rr r r 'test_format_checkers_come_with_defaultsJs z9TestFormatChecker.test_format_checkers_come_with_defaultscCsTtdd}|ddd|ddd|dd d|t|d dS) Nr rZfoocSsdSNTr r r r r Uz-TestFormatChecker.test_repr..barcSsdSr6r r r r r r7Vr8ZbazcSsdSr6r r r r r r7Wr8z.)rr!rreprrr r r test_reprSs zTestFormatChecker.test_reprN) __name__ __module__ __qualname__rrr r"r0r4r5r;r r r r rs   rN)__doc__unittestrZ jsonschemarrZjsonschema.exceptionsrZjsonschema.validatorsrr1r ZeroDivisionErrorrrrr r r r s   PK!k)ɜ;tests/__pycache__/test_jsonschema_test_suite.cpython-38.pycnu[U af!@s<dZddlZddlmZddlZeZejddZejddZejddZ ejddZ ejd dZ ejd dZ d d Z d dZddZejdkrdZe edddZnddZddZejeeejddejddejddejejjdddZejeeejddejd dejd!dejddejddejejjd"dd Ze je e e jdde jd de jd!de jddejejjd#dd Ze je e e jdde jd$de jd de jd!de jdde jd%dejejjd&dd Z e je e jd'de jdde jd$de jd de jd!de jd(de jdde jd)de jd%dej!e d*d+d,dd- Z"e je d.ej!ej!jd/dd0Z#e je e jd'de jdde jd$de jd de jd!de jd(de jdde jd)de jd%dej$e d*d+d,dd- Z%e je d1ej$ej$jd2dd0Z&dS)3z Test runner for the JSON Schema official test suite Tests comprehensive correctness of each draft's validator. See https://github.com/json-schema-org/JSON-Schema-Test-Suite for details. N)SuiteZdraft3)nameZdraft4Zdraft6Zdraft7z draft2019-09z draft2020-12c sfdd}|S)Ncs"tfddDrSdS)Nc3s |]\}}|t|kVqdSN)getattr).0attrvaluetest\/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/tests/test_jsonschema_test_suite.py sz(skip..skipper..)allitemsr kwargsmessager r skipperszskip..skipperr )rrrr rr skipsrcsfdd}|S)NcsH|j}|dks4|dks4d|ks4|djjks4|jr8dSd|ddS)NTFformatzFormat checker z not found.)schemaFORMAT_CHECKERZcheckersZvalid)r r Validatorr r missing_formatsz&missing_format..missing_formatr )rrr rr rs rcCsb|jdkrdSd}t|dd|p`t|dd|p`t|dd|p`t|dd|p`t|dd|S) Nemailz8Complex email validation is (intentionally) unsupported.zan invalid domain)r descriptionzan invalid IPv4-address-literalz!dot after local part is not validz"dot before local part is not validz3two subsequent dots inside local part are not valid)subjectrr rr r r complex_email_validation.sJ   r) zRejecting leading zeros is 3.9+Zipv4z5invalid leading zeroes, as they are treated as octalsrrrcCsdSrr r r r r allowed_leading_zerosMsr#cCsd}t|ddd|pt|ddd|pt|ddd|pt|ddd|pt|ddd|pt|dd d|pt|dd d|pt|dd d|pt|d d d|pt|d dd|S)NzLeap seconds are unsupported.timez$a valid time string with leap secondr"z*a valid time string with leap second, Zuluz0a valid time string with leap second with offsetz'valid leap second, positive time-offsetz'valid leap second, negative time-offsetz-valid leap second, large positive time-offsetz-valid leap second, large negative time-offsetz#valid leap second, zero time-offsetz date-timez)a valid date-time with a leap second, UTCz7a valid date-time with a leap second, with minus offset)rrr r r leap_secondQs  $r%Zbignumz non-bmp-regexZzeroTerminatedFloatscCsttj|pt|Sr)r jsonschemaDraft3Validatorrr r r r sr()rformat_checkerrzfloat-overflowidcCs&t|p$t|p$ttj|p$t|Sr)r#r%rr&Draft4Validatorrr r r r r(s  cCs&t|p$t|p$ttj|p$t|Sr)r#r%rr&Draft6Validatorrr r r r r(s  z cross-draftZunknownKeywordcCs&t|p$t|p$ttj|p$t|Sr)r#r%rr&Draft7Validatorrr r r r r(s  anchorz no-schemaZrefOfUnknownKeywordz(Vocabulary support is still in-progress.Z vocabularyz5no validation: invalid number, but it still validates)rrTestDraft201909FormatcCs.t|p,t|p,t|p,ttj|p,t|Sr)rr#r%rr&Draft201909Validatorr r r r r(s )rrr)rTestDraft202012FormatcCs.t|p,t|p,t|p,ttj|p,t|Sr)rr#r%rr&Draft202012Validatorr r r r r(s )'__doc__sysZjsonschema.tests._suiterr&ZSUITEversionZDRAFT3ZDRAFT4ZDRAFT6ZDRAFT7Z DRAFT201909Z DRAFT202012rrr version_inforr#r%Zto_unittest_testcaseZcasesZ format_casesZoptional_cases_ofr'rZ TestDraft3r+Z TestDraft4r,Z TestDraft6r-Z TestDraft7r0ZTestDraft201909r/r2ZTestDraft202012r1r r r r s        -                                    PK!Zii+tests/__pycache__/test_types.cpython-38.pycnu[U afA@sdZddlmZddlmZddlmZmZddlm Z ddl m Z m Z ddl mZmZdd Zd d Zd d ZGdddeZGdddeZdS)z Tests for the `TypeChecker`-based type interface. The actual correctness of the type checking is handled in `test_jsonschema_test_suite`; these tests check that TypeChecker functions correctly at a more granular level. ) namedtuple)TestCase)ValidationError _keywords) TypeChecker)UndefinedTypeCheck UnknownType)Draft202012ValidatorextendcCs|dkS)Ncheckerinstancer r L/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/tests/test_types.pyequals_2srcCst|tot|ddS)N_fields) isinstancetuplegetattr)rr r r is_namedtuplesrcCstj|drdSt|S)NobjectT)r TYPE_CHECKERis_typerr r r ris_object_or_named_tuplesrc@s\eZdZddZddZddZddZd d Zd d Zd dZ ddZ ddZ ddZ dS)TestTypeCheckercCs4tdti}||jddd|jdddfddS)Ntwor )rtypebar)TF)rr assertEqualrselfrr r r test_is_type s   zTestTypeChecker.test_is_typec CsL|t}tddW5QRX|dt|j|j|jjdddS)Nfoobarz('foobar' is unknown to this type checkerz,Expected the internal KeyError to be hidden.)msg) assertRaisesrrrassertInstr exception assertTrue__suppress_context__r!er r rtest_is_unknown_type*s z$TestTypeChecker.test_is_unknown_typecCs&tdti}||tdtdSNr)rrrredefiner r r r test_checks_can_be_added_at_init6s z0TestTypeChecker.test_checks_can_be_added_at_initcCs.|tdtdttdtdSr/)rrr0rrr!r r rtest_redefine_existing_type:s z+TestTypeChecker.test_redefine_existing_typecCs |tdtidtdSr/)rrrremover2r r r test_remove@szTestTypeChecker.test_removec Cs8|t}tdW5QRX|dt|jdS)Nr$)r&rrr4r'r(r))r!contextr r rtest_remove_unknown_typeFs z(TestTypeChecker.test_remove_unknown_typecCs0|tttdtdtdtdSNfoorr:r)rrZ redefine_manyintr(r0r2r r rtest_redefine_manyKsz"TestTypeChecker.test_redefine_manycCs$|tttdddtdSr8)rrr;r(r4r2r r rtest_remove_multipleQsz$TestTypeChecker.test_remove_multiplec sPtdfdd}|t}td|iddW5QRX||jdS)z Make sure no one writes: try: self._type_checkers[type](...) except KeyError: ignoring the fact that the function itself can raise that. ZStuffcsdSNr r errorr rraises_keyerrordszLTestTypeChecker.test_type_check_can_raise_key_error..raises_keyerrorr:r#N)KeyErrorr&rrZassertIsr))r!rAr6r r?r#test_type_check_can_raise_key_errorWs   z3TestTypeChecker.test_type_check_can_raise_key_errorcCs"tttd}|t|ddS)Nr9z")rrrreprr r r r test_reprlszTestTypeChecker.test_reprN) __name__ __module__ __qualname__r"r.r1r3r5r7r<r=rCrEr r r rrs  rc@s4eZdZddZddZddZddZd d Zd S) TestCustomTypesc Csdd}tttjd|d}|ddi}|d|d|t|dW5QRX|t|d W5QRXdS) NcSs:t|ttfsdSz t|Wntk r4YdSXdS)NFT)rr;r( ValueErrorr r r rint_or_str_intss zHTestCustomTypes.test_simple_type_can_be_extended..int_or_str_intinteger type_checkerrr#4g@r:)r r rr0validater&r)r!rKCustomValidator validatorr r r test_simple_type_can_be_extendedrs      z0TestCustomTypes.test_simple_type_can_be_extendedcCsNddi}tdddg}tjdt}tt|d}||}||ddd dS) NrrPointxyrMr#rUrV)rr rr0rr rP)r!schemarTrNrQrRr r rtest_object_can_be_extendedsz+TestCustomTypes.test_object_can_be_extendedc Cshddgd}tjdt}tt|d}||}tdddg}|t||ddd W5QRXdS) NrrU)rrequiredrMrTrVr#rWrX) r rr0rr rr&rrP)r!rYrNrQrRrTr r r0test_object_extensions_require_custom_validatorss  z@TestCustomTypes.test_object_extensions_require_custom_validatorsc Csddgdddiid}tjdt}dd}|tj}|tj}tt|||dd }||}td dd g}| |d d d| t | |dd dW5QRX| d d d| t | dd dW5QRXdS)NrrUrrL)rr[ propertiescsfdd}|S)Ncst|r|}||||Sr>)r_asdict)rRvaluerrYfnr rcoercedszpTestCustomTypes.test_object_extensions_can_handle_custom_validators..coerce_named_tuple..coercedr )rarbr r`rcoerce_named_tuples z_TestCustomTypes.test_object_extensions_can_handle_custom_validators..coerce_named_tuple)r[r])rNZ validatorsrTrVr#rWrXznot an integer) r rr0rrr[r]r rrPr&r) r!rYrNrcr[r]rQrRrTr r r3test_object_extensions_can_handle_custom_validatorss0     zCTestCustomTypes.test_object_extensions_can_handle_custom_validatorsc Cs<|t}tiddW5QRX|dt|jdS)N zsome unknown typez'some unknown type')r&rr rr'r(r)r,r r rtest_unknown_types z!TestCustomTypes.test_unknown_typeN)rFrGrHrSrZr\rdrfr r r rrIqs *rIN)__doc__ collectionsrunittestrZ jsonschemarrZjsonschema._typesrZjsonschema.exceptionsrrZjsonschema.validatorsr r rrrrrIr r r rs   RPK!5+tests/__pycache__/test_utils.cpython-38.pycnu[U afC@sXddlmZddlmZddlmZGdddeZGdddeZGdd d eZd S) )nan)TestCase)equalc@seZdZddZddZdS) TestEqualcCs|tdddSN assertTruerselfr L/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/tests/test_utils.py test_noneszTestEqual.test_nonecCs|tttdSr)rrrr r r r test_nan szTestEqual.test_nanN)__name__ __module__ __qualname__r rr r r r rsrc@sleZdZddZddZddZddZd d Zd d Zd dZ ddZ ddZ ddZ ddZ ddZdS) TestDictEqualcCs(ddd}ddd}|t||dSNbdacrrrr dict_1Zdict_2r r r test_equal_dictionariess  z%TestDictEqual.test_equal_dictionariescCs(tdd}dtd}|t||dS)Nrrrrrrrr r r test_equal_dictionaries_with_nans  z.TestDictEqual.test_equal_dictionaries_with_nancCs(ddd}ddd}|t||dS)Nrrr)rxZ assertFalserrr r r test_missing_keys  zTestDictEqual.test_missing_keycCs*ddd}dddd}|t||dS)Nrrrr)rrrr rr r r test_additional_keys  z!TestDictEqual.test_additional_keycCs(ddd}ddd}|t||dSNrrrrrr rr r r test_missing_value$s  z TestDictEqual.test_missing_valuecCsi}i}|t||dSrrrr r r test_empty_dictionaries)sz%TestDictEqual.test_empty_dictionariescCs"d}ddd}|t||dSNrrrr rr r r test_one_none.s zTestDictEqual.test_one_nonecCsddd}|t||dSr&r)r rr r r test_same_item3s zTestDictEqual.test_same_itemcCs4ddddd}ddddd}|t||dSrrrr r r test_nested_equal7szTestDictEqual.test_nested_equalcCs4ddddd}ddddd}|t||dSr#r rr r r test_nested_dict_unequal<sz&TestDictEqual.test_nested_dict_unequalcCs8ddddgdd}dddddgd}|t||dSNrrrrrrrrr r r test_mixed_nested_equalAsz%TestDictEqual.test_mixed_nested_equalcCs8ddddgdd}dddddgd}|t||dSr+r rr r r test_nested_list_unequalFsz&TestDictEqual.test_nested_list_unequalN)rrrrrr!r"r$r%r'r(r)r*r,r-r r r r rsrc@sleZdZddZddZddZddZd d Zd d Zd dZ ddZ ddZ ddZ ddZ ddZdS) TestListEqualcCs(dddg}dddg}|t||dSNrrrrr list_1Zlist_2r r r test_equal_listsMs  zTestListEqual.test_equal_listscCs(dtdg}dtdg}|t||dSNrrrr0r r r test_equal_lists_with_nanRs  z'TestListEqual.test_equal_lists_with_nancCs(dddg}dddg}|t||dSr/r r0r r r test_unsorted_listsWs  z!TestListEqual.test_unsorted_listscCs&dddg}ddg}|t||dSr/r r0r r r test_first_list_larger\s z$TestListEqual.test_first_list_largercCs&ddg}dddg}|t||dSr/r r0r r r test_second_list_largeras z%TestListEqual.test_second_list_largercCsLdddg}dddg}|t||dddg}dddg}|t||dSr/r r0r r r test_list_with_none_unequalfs     z)TestListEqual.test_list_with_none_unequalcCs(dddg}dddg}|t||dSr3rr0r r r test_list_with_none_equalos  z'TestListEqual.test_list_with_none_equalcCsg}g}|t||dSrrr0r r r test_empty_listtszTestListEqual.test_empty_listcCsd}g}|t||dSrr r0r r r r'yszTestListEqual.test_one_nonecCsdddg}|t||dSr/r)r r1r r r test_same_list~s zTestListEqual.test_same_listcCs0dddgdg}dddgdg}|t||dSNrrrrrr0r r r test_equal_nested_listssz%TestListEqual.test_equal_nested_listscCs,dddgdg}dgdg}|t||dSr<r r0r r r test_unequal_nested_listss z'TestListEqual.test_unequal_nested_listsN)rrrr2r4r5r6r7r8r9r:r'r;r=r>r r r r r.Ls r.N) mathrunittestrZjsonschema._utilsrrrr.r r r r s   =PK!9;z.z.0tests/__pycache__/test_validators.cpython-38.pycnu[U afW@s4ddlmZddlmZmZddlmZddlmZddl m Z ddl m Z ddl mZmZddlmZdd lZdd lZdd lZdd lZdd lZdd lmZmZdd lmZdd lZdd lmZm Z m!Z!m"Z"m#Z#d dZ$GdddeZ%GdddeZ&GdddeZ'GdddZ(Gddde(Z)GdddZ*Gddde*e)eZ+Gddde*e)eZ,Gdd d e)eZ-Gd!d"d"e)eZ.Gd#d$d$e)eZ/Gd%d&d&e)eZ0Gd'd(d(eZ1Gd)d*d*eZ2Gd+d,d,eZ3Gd-d.d.eZ4Gd/d0d0eZ5Gd1d2d2eZ6d3d4Z7eGd5d6d6Z8eGd7d8d8Z9d S)9) annotations)deque namedtuple)contextmanager)Decimal)BytesIO)Any)TestCasemock) pathname2urlN)definefield) DRAFT202012) FormatChecker TypeChecker exceptions protocols validatorsccs(|D]}|ddtjf|VqdS)NmessagezYou told me to fail!) setdefaultrValidationError) validatorerrorsinstanceschemaZeachrQ/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/tests/test_validators.pyfails rc@seZdZddZddZddZddZd d Zd d Zd dZ ddZ ddZ ddZ ddZ ddZddZddZddZdd Zd!d"Zd#d$Zd%d&Zd'd(Zd)S)*TestCreateAndExtendcCsh||jtjttj||jtjttjddi|_dti|_t|_ tj |j|j|j d|_ dS)N$idzsome://meta/schemar) meta_schemar type_checker) addCleanup assertEqualr _META_SCHEMASdict _VALIDATORSr rrr!create ValidatorselfrrrsetUp$s$  zTestCreateAndExtend.setUpcCs.||jj|jj|jjf|j|j|jfdSN)r#r( VALIDATORS META_SCHEMA TYPE_CHECKERrr r!r)rrr test_attrs9szTestCreateAndExtend.test_attrscCs dgi}|||j|dS)Nr)r#r(rr*rrrr test_initFszTestCreateAndExtend.test_initcCs0dgi}||}t|d}||gdS)NrZhello)r(list iter_errorsr#r*rrrrrrtest_iter_errors_successfulJs z/TestCreateAndExtend.test_iter_errors_successfulcCstdddigi}||}tjdd|dddigtdgd}t|d}|t|d||d|dS)NrrzWhoops!goodbye)rrrvalidator_value schema_pathr) r(rrrr3r4r#lenZ _contents)r*rrZexpected_errorrrrrtest_iter_errors_one_errorQs  z.TestCreateAndExtend.test_iter_errors_one_errorcCsHdddidddddigi}||}t|d}|t|d dS) NrrZFirstzSecond!Zasdf)rrZThirdr7)r(r3r4r#r;r5rrr test_iter_errors_multiple_errorsbs z4TestCreateAndExtend.test_iter_errors_multiple_errorscCsRtjddidd}|tjjd|tjjd||jd||jddS)Nr something my versionr versionZMyVersionValidator) rr'r"r$popr&r#__name__ __qualname__r*r(rrr.test_if_a_version_is_provided_it_is_registeredoszBTestCreateAndExtend.test_if_a_version_is_provided_it_is_registeredcCsJtjddidd}|tjjd|tjjd|t|iddS)Nrr?r@rAz2MyVersionValidator(schema={}, format_checker=None))rr'r"r$rCr&r#reprrFrrr test_reprys zTestCreateAndExtend.test_reprcCsVtjddidd}|tjjd|tjjd|t|dtt diddS)Nrr?r@rAaizNMyVersionValidator(schema={'a': [0, 1, 2, 3, 4, 5, ...]}, format_checker=None)) rr'r"r$rCr&r#rHr3rangerFrrrtest_long_reprsz"TestCreateAndExtend.test_long_reprcCs$tjid}|t|iddS)Nr z)Validator(schema={}, format_checker=None))rr'r#rHrFrrrtest_repr_no_versions   z(TestCreateAndExtend.test_repr_no_versioncCsDtjddidd}|tjjd|tjjd||jddS)Nrr?zfoo-barrAZFooBarValidator)rr'r"r$rCr&r#rErFrrr-test_dashes_are_stripped_from_validator_namesszATestCreateAndExtend.test_dashes_are_stripped_from_validator_namescCs,ttj}tjddid|tj|dS)NidrM)r%rr$r'r#)r*originalrrr6test_if_a_version_is_not_provided_it_is_not_registereds zJTestCreateAndExtend.test_if_a_version_is_not_provided_it_is_not_registeredcCsRd}d|i}tj|dddd|tjj||tjjd||tjdS)Nmeta schema idrPr@cSs |ddSNrPgetsrrrzMTestCreateAndExtend.test_validates_registers_meta_schema_id..r rBid_ofrr'r"r$rCr&assertInr*Zmeta_schema_keyZmy_meta_schemarrr'test_validates_registers_meta_schema_idsz;TestCreateAndExtend.test_validates_registers_meta_schema_idcCsLd}d|i}tj|dd|tjj||tjjd||tjdS)Nmeta schema $idrr@rAr^r`rrr.test_validates_registers_meta_schema_draft6_idszBTestCreateAndExtend.test_validates_registers_meta_schema_draft6_idc sDtjidd|tfdddgfdddd d ifd fDdS) Nrr rc3s$|]\}}ij||dVqdS))rtypeN)is_type).0rerr(rr sz@TestCreateAndExtend.test_create_default_types..array)booleanT)integer )nullN)numberg(@object)stringfoo)rr' assertTrueallr)rrhrtest_create_default_typess z-TestCreateAndExtend.test_create_default_typesc Csdtjtjjddiidd}|ddi|tj|iW5QRX|ddiddS)zq One can create a validator class whose metaschema uses a different dialect than itself. rconst)$schemanotrMrrbarN) rr'Draft202012Validatorr. check_schema assertRaisesr SchemaErrorvalidater*ZNoEmptySchemasValidatorrrr+test_check_schema_with_different_metaschemas z?TestCreateAndExtend.test_check_schema_with_different_metaschemac CsBtjdddigidtid}|tj|iW5QRXdS)z A validator whose metaschema doesn't declare $schema defaults to its own validation behavior, not the latest "normal" specification. rrzMeta schema whoops!rdN)rr'rr|rr}r{rrrr.id_ofzthe://correct/id/zthe://wrong/id/)rr)r rr!r]N)rr'r!r#rr.r)r*r]Z correct_idr ZOriginalDerivedrr)rtest_extend_idof s  z$TestCreateAndExtend.test_extend_idofcCsRdddiiddd}t|}||dttj}|||ddS) zV Extending a validator preserves its notion of applicable validators. testreroz #/$defs/testr:)$defs$refmaximum%N)rDraft4Validatorrsis_validr)r*rZdraft4rrrr!test_extend_applicable_validators s   z5TestCreateAndExtend.test_extend_applicable_validatorsN)rD __module__rEr+r0r2r6r<r>rGrIrLrNrOrRrarcrurrrrrrrrrr#s(       rc@seZdZddZddZddZddZd d Zd d Zd dZ ddZ ddZ ddZ ddZ ddZddZddZddZdd Zd!d"Zd#d$Zd%d&Zd'd(Zd)d*Zd+d,Zd-d.Zd/d0Zd1d2Zd3d4Zd5d6Zd7d8Zd9d:Zd;d<Z d=d>Z!d?d@Z"dAdBZ#dCdDZ$dEdFZ%dGdHZ&dIdJZ'dKdLZ(dMdNZ)dOdPZ*dQdRZ+dSdTZ,dUdVZ-dWdXZ.dYdZZ/d[d\Z0d]d^Z1d_d`Z2dadbZ3dcddZ4dedfZ5dgdhZ6didjZ7dkdlZ8dmdnZ9dodpZ:dqdrZ;dsdtZ.formatZbla)rrformat_checkerzis not a)rchecksrr_rH)r*checkerrrrrr#test_invalid_format_default_messages z?TestValidationErrorMessages.test_invalid_format_default_messagec Csddddiddidd}|jddi|tjd }||d tdtd td |jdd d|tjd }||dtdtdtd td dS)NrpFrerq)^abc$^def$)rerpatternPropertieszebra{rz,{} does not match any of the regexes: {}, {}rri)rfishz.{}, {} do not match any of the regexes: {}, {}r)rrrr#rrHrrrr1test_additionalProperties_false_patternPropertiessBzMTestValidationErrorMessages.test_additionalProperties_false_patternPropertiescCs|jddd}||ddS)Nr?Frz'False schema does not allow 'something'rrrrrtest_False_schemas z-TestValidationErrorMessages.test_False_schemacCs"|jdddid}||ddS)Nr=Z multipleOfrrz3 is not a multiple of 2rrrrrtest_multipleOfs z+TestValidationErrorMessages.test_multipleOfcCs"|jgddid}||ddS)NminItemsrrz[] is too shortrrrrr test_minItemssz)TestValidationErrorMessages.test_minItemscCs(|jdddgddid}||ddS)Nr:rr=maxItemsrz[1, 2, 3] is too longrrrrr test_maxItemssz)TestValidationErrorMessages.test_maxItemscCs"|jgddid}||ddS)Nrr:rz[] should be non-emptyrrrrrtest_minItems_1sz+TestValidationErrorMessages.test_minItems_1cCs(|jdddgddid}||ddS)Nr:rr=rrrz![1, 2, 3] is expected to be emptyrrrrrtest_maxItems_0sz+TestValidationErrorMessages.test_maxItems_0cCs"|jdddid}||ddS)NrU minLengthrrz'' is too shortrrrrrtest_minLengths z*TestValidationErrorMessages.test_minLengthcCs"|jdddid}||ddS)Nabc maxLengthrrz'abc' is too longrrrrrtest_maxLengths z*TestValidationErrorMessages.test_maxLengthcCs"|jdddid}||ddS)NrUrr:rz'' should be non-emptyrrrrrtest_minLength_1sz,TestValidationErrorMessages.test_minLength_1cCs"|jdddid}||ddS)Nrrrrz'abc' is expected to be emptyrrrrrtest_maxLength_0sz,TestValidationErrorMessages.test_maxLength_0cCs"|jiddid}||ddS)N minPropertiesrrz"{} does not have enough propertiesrrrrrtest_minPropertiessz.TestValidationErrorMessages.test_minPropertiescCs*|jiiidddid}||ddS)N)rJbc maxPropertiesrrz3{'a': {}, 'b': {}, 'c': {}} has too many propertiesrrrrrtest_maxProperties"s z.TestValidationErrorMessages.test_maxPropertiescCs"|jiddid}||ddS)Nrr:rz{} should be non-emptyrrrrrtest_minProperties_1,sz0TestValidationErrorMessages.test_minProperties_1cCs&|jddiddid}||ddS)Nr:rrrrz{1: 2} is expected to be emptyrrrrrtest_maxProperties_00s z0TestValidationErrorMessages.test_maxProperties_0cCs.|jdddgdiigdd}||ddS)Nr:rrrFr prefixItemsrz1Expected at most 2 items but found 1 extra: 'foo'rrrrrtest_prefixItems_with_items7s z7TestValidationErrorMessages.test_prefixItems_with_itemscCs0|jddddgdiigdd}||ddS) Nr:rrrFrrz6Expected at most 2 items but found 2 extra: ['foo', 5]rrrrr*test_prefixItems_with_multiple_extra_itemsAs  zFTestValidationErrorMessages.test_prefixItems_with_multiple_extra_itemscCs"|jdddid}||ddS)Nbbbpatternz^a*$rz'bbb' does not match '^a*$'rrrrr test_patternKs z(TestValidationErrorMessages.test_patterncCs&|jgdddiid}||ddS)Nrrerqrz3[] does not contain items matching the given schemarrrrrtest_does_not_containRs z1TestValidationErrorMessages.test_does_not_containcCs,|jddgddiddd}||ddS) Nrrr:rerqrr minContainsrMToo few items match the given schema (expected at least 2 but only 1 matched)rrrrrtest_contains_too_few\s z1TestValidationErrorMessages.test_contains_too_fewcCs.|jddgddidddd}||d dS) Nrrr:rerqrrr maxContainsrrrrrrr&test_contains_too_few_both_constrainedgszBTestValidationErrorMessages.test_contains_too_few_both_constrainedcCs.|jdddgddiddd}||d dS) Nrrrybazrerqrrrr:Too many items match the given schema (expected at most 2)rrrrrtest_contains_too_manyvs z2TestValidationErrorMessages.test_contains_too_manycCs0|jdgdddidddd}||d dS) Nrrrrerqrrrrz:Too many items match the given schema (expected at most 4)rrrrr'test_contains_too_many_both_constrainedszCTestValidationErrorMessages.test_contains_too_many_both_constrainedcCs"|jdddid}||ddS)Nr=ZexclusiveMinimumrrz+3 is less than or equal to the minimum of 5rrrrrtest_exclusiveMinimumsz1TestValidationErrorMessages.test_exclusiveMinimumcCs"|jdddid}||ddS)Nr=ZexclusiveMaximumrrz.3 is greater than or equal to the maximum of 2rrrrrtest_exclusiveMaximums z1TestValidationErrorMessages.test_exclusiveMaximumcCs$|jiddgid}||ddS)Nrequiredrrrz'foo' is a required propertyrrrrr test_requiredsz)TestValidationErrorMessages.test_requiredcCs,|jdiidddgiid}||ddS)NrrZdependentRequiredryrz'bar' is a dependency of 'foo'rrrrrtest_dependentRequireds  z2TestValidationErrorMessages.test_dependentRequiredcCs$|jiddgid}||ddS)NoneOfFrz.{} is not valid under any of the given schemasrrrrrtest_oneOf_matches_nones z3TestValidationErrorMessages.test_oneOf_matches_nonecCs&|jidddgid}||ddS)Nr Trz${} is valid under each of True, Truerrrrrtest_oneOf_matches_too_manysz7TestValidationErrorMessages.test_oneOf_matches_too_manycCs,ddd}|jddg|d}||ddS)NrjFreunevaluatedItemsrrryrz@Unevaluated items are not allowed ('foo', 'bar' were unexpected)rrrrrtest_unevaluated_itemss  z2TestValidationErrorMessages.test_unevaluated_itemscCs(ddd}|jd|d}||ddS)NrjFr rrrz'foo' is not of type 'array'rrrrr&test_unevaluated_items_on_invalid_types zBTestValidationErrorMessages.test_unevaluated_items_on_invalid_typecCs<dddiiddid}|jdddd|d }||d dS) Nrrrerqrvrm propertiesunevaluatedPropertiesryrrryrrz_Unevaluated properties are not valid under the given schema ('bar' was unevaluated and invalid)rrrrr5test_unevaluated_properties_invalid_against_subschemas zQTestValidationErrorMessages.test_unevaluated_properties_invalid_against_subschemacCs.ddd}|jddd|d}||ddS) NrpFrerrrryrrryrzEUnevaluated properties are not allowed ('bar', 'foo' were unexpected)rrrrr&test_unevaluated_properties_disalloweds zBTestValidationErrorMessages.test_unevaluated_properties_disallowedcCs(ddd}|jd|d}||ddS)NrpFrrrrz'foo' is not of type 'object'rrrrr+test_unevaluated_properties_on_invalid_types zGTestValidationErrorMessages.test_unevaluated_properties_on_invalid_typecCs0igdd}|jdddg|d}||ddS)NFrrrrryrrz9Expected at most 1 item but found 2 extra: ['bar', 'baz']rrrrrtest_single_items z,TestValidationErrorMessages.test_single_itemcCs4igdd}|jdddg|tjd}||ddS)NFrrrryrrztest_schema_with_invalid_regex_with_disabled_format_validationsszSMetaSchemaTestsMixin.test_schema_with_invalid_regex_with_disabled_format_validationN) rDrrErprrrtrurwryrrrrrnBs rnc@seZdZddZddZddZddZd d Zd d Zd dZ ddZ ddZ ddZ ddZ ddZddZddZddZdd Zd!d"Zd#d$Zd%d&Zd'S)(ValidatorTestMixincCs||itjdSr,)assertIsInstancer(rr)rrr)test_it_implements_the_validator_protocol{sz.OhNocSs dddgS)Nr:rr=rrrrrrZr[zCValidatorTestMixin.test_evolve_with_subclass..OhNo.)factoryrdefaultN)rDrrEr rr_barrrrrOhNosrrm)ryr:rr=rerlrN) assertWarnsDeprecationWarningr r(r#rrrr)r*rrrrrrtest_evolve_with_subclasss  z,ValidatorTestMixin.test_evolve_with_subclasscCs||idddS)Nrrrq)rsr(rfr)rrr#test_is_type_is_true_for_valid_typesz6ValidatorTestMixin.test_is_type_is_true_for_valid_typecCs||idddS)Nrrrjr2r(rfr)rrr&test_is_type_is_false_for_invalid_typesz9ValidatorTestMixin.test_is_type_is_false_for_invalid_typecCs4||idd||idddS)NTrlrorr)rrr,test_is_type_evades_bool_inheriting_from_intsz?ValidatorTestMixin.test_is_type_evades_bool_inheriting_from_intcCs~dddii}tj|j|jjdddd}||}|ddtdtd gd igd dg}|d d ||D|dS)NrrerocSst|tttfot|t Sr,) isinstanceintfloatrboolrrrrrrZszGValidatorTestMixin.test_it_can_validate_with_decimals..r!r:g?r@rrTcSsg|] }|jqSrr)rgrVrrr szIValidatorTestMixin.test_it_can_validate_with_decimals..) rrr(r/redefiner~rr#r4)r*rr(rrrrr"test_it_can_validate_with_decimalss  z5ValidatorTestMixin.test_it_can_validate_with_decimalscCs"|jdditd}|ddS)NrZcarrotrxZbugs)r(rr~r*rrrr7test_it_returns_true_for_formats_it_does_not_know_abouts zJValidatorTestMixin.test_it_returns_true_for_formats_it_does_not_know_aboutcCs|i}||jdSr,)r(r'rrrrr,test_it_does_not_validate_formats_by_defaults z?ValidatorTestMixin.test_it_does_not_validate_formats_by_defaultc s|t}td|jdtdfdd}jddi|d}|dtj}|d W5QRX|j j dS) NzBad!rr)Zraisescs0|dkr dS|dkrnd|ddS)NgoodTbadzWhat is z? [Baby Don't Hurt Me])rrrr*rrchecks zTValidatorTestMixin.test_it_validates_formats_if_a_checker_is_provided..checkrrxrr) r ValueErrorrr(r~r|rrr1 exceptioncause)r*rrrcmrrr2test_it_validates_formats_if_a_checker_is_provideds  zEValidatorTestMixin.test_it_validates_formats_if_a_checker_is_providedcCsBt}d|gi}tj|j|jj|ddd}||ddS)NrecSs t|tSr,rrrrrrrZr[z@ValidatorTestMixin.test_non_string_custom_type..rr<)rprrr(r/rr~)r*Znon_string_typerCrazyrrrtest_non_string_custom_types z.ValidatorTestMixin.test_non_string_custom_typec Cs`tj|j|jjdddd}|tj}|ddidW5QRX| dt |j d S) zh A tuple instance properly formats validation errors for uniqueItems. See #224 rjcSs t|tSr,)rtuplerrrrrZ r[zNValidatorTestMixin.test_it_properly_formats_tuples_in_errors..r uniqueItemsT)r:r:z(1, 1) has non-unique elementsN) rrr(r/rr|rrr~r_strr)r*ZTupleValidatorr4rrr)test_it_properly_formats_tuples_in_errorssz.cst|tfSr,)rr%rrrrrZs)rjrprrJNrqrUFrr) rrrr(r/Z redefine_manyrr~r|rr)r*rr(rZvalid_instancesrZinvalid_instancesrrrtest_check_redefined_sequences\    "" ""z0ValidatorTestMixin.test_check_redefined_sequencec Cs4|t|ij}W5QRX||tjdSr,)rrr(rdr{rre)r*rdrrr.test_it_creates_a_ref_resolver_if_not_providedTs zAValidatorTestMixin.test_it_creates_a_ref_resolver_if_not_providedc CsZdddi}}tjdi||id}|jd|i|d}|tj|dW5QRXdS)N someCoolRefrerlrUrarrc)rrer(r|rrr~r*rfrrdrrrr/test_it_upconverts_from_deprecated_RefResolversYs zBValidatorTestMixin.test_it_upconverts_from_deprecated_RefResolversc sPGfddd}|}ddi}tjj||ddW5QRXdS)zb Legacy RefResolvers support only the context manager form of resolution. cseZdZefddZdS)znValidatorTestMixin.test_it_upconverts_from_yet_older_deprecated_legacy_RefResolvers..LegacyRefResolverc3s|dddiVdS)Nthe refrerlr#)thisrfr)rr resolvinghs zxValidatorTestMixin.test_it_upconverts_from_yet_older_deprecated_legacy_RefResolvers..LegacyRefResolver.resolvingN)rDrrErrrr)rrLegacyRefResolvergsrrrrcN)r|rrr(r~)r*rrdrrr)r@test_it_upconverts_from_yet_older_deprecated_legacy_RefResolversas zSValidatorTestMixin.test_it_upconverts_from_yet_older_deprecated_legacy_RefResolversN)rDrrEr|rrrrrrrrrrrrrrrrrrrrrrrzzs& Brzc@s0eZdZdZddZddZddZdd Zd S) AntiDraft6LeakMixinzN Make sure functionality from draft 6 doesn't leak backwards in time. c Cs:|tj}|jdW5QRX|dt|jdS)NTzTrue is not of typer|rr}r(r{r_rrr*r4rrrtest_True_is_not_a_schemaysz-AntiDraft6LeakMixin.test_True_is_not_a_schemac Cs:|tj}|jdW5QRX|dt|jdS)NFzFalse is not of typerrrrrtest_False_is_not_a_schema~sz.AntiDraft6LeakMixin.test_False_is_not_a_schemac Cs:|t}|ddW5QRX||jtjdS)NTrmr| Exceptionr(r~ZassertNotIsInstancerrrrrrr5test_True_is_not_a_schema_even_if_you_forget_to_checks zIAntiDraft6LeakMixin.test_True_is_not_a_schema_even_if_you_forget_to_checkc Cs:|t}|ddW5QRX||jtjdS)NFrmrrrrr6test_False_is_not_a_schema_even_if_you_forget_to_checks zJAntiDraft6LeakMixin.test_False_is_not_a_schema_even_if_you_forget_to_checkN)rDrrE__doc__rrrrrrrrrts rc@sPeZdZUejZiifZded<ddidfZddZ dd Z d d Z d d Z dS)TestDraft3Validatortuple[dict, dict]r}rerlrrcCs|ddi}|tdSNreany)r(r~rprrrr#test_any_type_is_valid_for_type_anysz7TestDraft3Validator.test_any_type_is_valid_for_type_anyc Cs\tj|j|jjdddd}|ddi}|d|tj|dW5QRXdS) z( Sigh, because why not. rcSs t|tSr,rrrrrrZr[zBTestDraft3Validator.test_any_type_is_redefinable..rrermrrN) rrr(r/rr~r|rr)r*rrrrrtest_any_type_is_redefinables  z0TestDraft3Validator.test_any_type_is_redefinablecCs ||dditdSr)rsr(rrpr)rrr!test_is_type_is_true_for_any_typesz5TestDraft3Validator.test_is_type_is_true_for_any_typecCs6||idd||ddiddS)NTrkrer)rsr(rfrr)rrr6test_is_type_does_not_evade_bool_if_it_is_being_testedszJTestDraft3Validator.test_is_type_does_not_evade_bool_if_it_is_being_testedN) rDrrErrr(r}__annotations__rrrrrrrrrrs  rc@s0eZdZUejZiifZded<ddidfZdS)TestDraft4Validatorrr}rerlrrN) rDrrErrr(r}rrrrrrrs rc@s0eZdZUejZiifZded<ddidfZdS)TestDraft6Validatorrr}rerlrrN) rDrrErrr(r}rrrrrrrs rc@s0eZdZUejZiifZded<ddidfZdS)TestDraft7Validatorrr}rerlrrN) rDrrErrr(r}rrrrrrrs rc@s0eZdZUejZiifZded<ddidfZdS)TestDraft201909Validatorrr}rerlrrN) rDrrErDraft201909Validatorr(r}rrrrrrrs rc@s0eZdZUejZiifZded<ddidfZdS)TestDraft202012Validatorrr}rerlrrN) rDrrErrzr(r}rrrrrrrs rc@seZdZdZddZdS)TestLatestValidatorzQ These really apply to multiple versions but are easiest to test on one. c CsTd}d|i}tjdi|did}tj||d}|tj|dW5QRXdS)NrrrUFrarc)rrerr|rrr~rrrr2test_ref_resolvers_may_have_boolean_schemas_storeds zFTestLatestValidator.test_ref_resolvers_may_have_boolean_schemas_storedN)rDrrErrrrrrrsrc@seZdZddZddZddZddZd d Zd d Zd dZ ddZ ddZ ddZ ddZ ddZddZddZddZdS) TestValidatorForcCs<ddi}|t|tjddi}|t|tjdS)Nrw&http://json-schema.org/draft-03/schema'http://json-schema.org/draft-03/schema#)r1r validator_forrr1rrr test_draft_3szTestValidatorFor.test_draft_3cCs<ddi}|t|tjddi}|t|tjdS)Nrw&http://json-schema.org/draft-04/schema'http://json-schema.org/draft-04/schema#)r1rrrr1rrr test_draft_4szTestValidatorFor.test_draft_4cCs<ddi}|t|tjddi}|t|tjdS)Nrw&http://json-schema.org/draft-06/schema'http://json-schema.org/draft-06/schema#)r1rrrr1rrr test_draft_6szTestValidatorFor.test_draft_6cCs<ddi}|t|tjddi}|t|tjdS)Nrw&http://json-schema.org/draft-07/schema'http://json-schema.org/draft-07/schema#)r1rrrr1rrr test_draft_7szTestValidatorFor.test_draft_7cCs<ddi}|t|tjddi}|t|tjdS)Nrwz,https://json-schema.org/draft/2019-09/schemaz-https://json-schema.org/draft/2019-09/schema#)r1rrrr1rrrtest_draft_201909sz"TestValidatorFor.test_draft_201909cCs<ddi}|t|tjddi}|t|tjdS)Nrw,https://json-schema.org/draft/2020-12/schema-https://json-schema.org/draft/2020-12/schema#)r1rrrzr1rrrtest_draft_202012sz"TestValidatorFor.test_draft_202012cCs|tdtjdS)NTr1rrrr)rrr test_True*szTestValidatorFor.test_TruecCs|tdtjdSrrr)rrr test_False0szTestValidatorFor.test_FalsecCs6tjddidddd}ddi}|t||dS)NrPrSZ12cSs |ddSrTrVrXrrrrZ:r[z8TestValidatorFor.test_custom_validator..r\rwrr'r1rr*r(rrrrtest_custom_validator6sz&TestValidatorFor.test_custom_validatorcCs0tjddidd}ddi}|t||dS)NrrbZ13rArwrrrrrtest_custom_validator_draft6Bsz-TestValidatorFor.test_custom_validator_draft6cCs|titjdSr,rr)rrr%test_validator_for_jsonschema_defaultMsz6TestValidatorFor.test_validator_for_jsonschema_defaultcCs|tjiddddS)Nr)r1rrr)rrr!test_validator_for_custom_defaultPsz2TestValidatorFor.test_validator_for_custom_defaultc CsJ|t}tjddidW5QRX||jt|t|jddS)NrwZ unknownSchemarzThe metaschema specified by $schema was not found. Using the latest draft to validate, but this will raise an error in the future.) rrrrr#filename__file__rwarning)r*rrrr1test_warns_if_meta_schema_specified_was_not_foundSs zBTestValidatorFor.test_warns_if_meta_schema_specified_was_not_foundc Cs>tjdd}tdtjiidW5QRX||dS)NTrecordalways)rr)warningscatch_warnings simplefilterrrr2)r*wrrr0test_does_not_warn_if_meta_schema_is_unspecified_s zATestValidatorFor.test_does_not_warn_if_meta_schema_is_unspecifiedcCs(ddit}}|t|||dS)Nrwzmailto:foo@example.com)rpr1rr)r*rrrrr-test_validator_for_custom_default_with_schemaesz>TestValidatorFor.test_validator_for_custom_default_with_schemaN)rDrrErrrrrrrrrrrrrrrrrrrrs         rc@s\eZdZddZddZddZddZd d Zd d Zd dZ ddZ ddZ ddZ dS) TestValidatec Cs@g}tj|d|jti|W5QRX|||gdS)Nr{)r patchrpappendrr~r#)r*rr(resultrrr assertUseskszTestValidate.assertUsescCs,|jdditjd|jdditjddS)Nrwrrr(r)rrrr)rrrtest_draft3_validator_is_chosenqsz,TestValidate.test_draft3_validator_is_chosencCs,|jdditjd|jdditjddS)Nrwrrr)rrrr)rrrtest_draft4_validator_is_chosen|sz,TestValidate.test_draft4_validator_is_chosencCs,|jdditjd|jdditjddS)Nrwrrr)rrrr)rrrtest_draft6_validator_is_chosensz,TestValidate.test_draft6_validator_is_chosencCs,|jdditjd|jdditjddS)Nrwrrr)rrrr)rrrtest_draft7_validator_is_chosensz,TestValidate.test_draft7_validator_is_chosencCs,|jdditjd|jdditjddS)Nrwrrrrrrzr)rrr$test_draft202012_validator_is_chosensz1TestValidate.test_draft202012_validator_is_chosencCs|jitjddS)Nrr r)rrr)test_draft202012_validator_is_the_defaultsz6TestValidate.test_draft202012_validator_is_the_defaultc Cs>|tj}tdddiW5QRX|t|jddS)Nrmrerqz1(?s)Failed validating '.*' in schema.*On instance)r|rrrr~ assertRegexrrrrrrtest_validation_error_messages z*TestValidate.test_validation_error_messagec Cs>|tj}tdddiW5QRX|t|jddS)Nrmrez3(?s)Failed validating '.*' in metaschema.*On schema)r|rr}rr~r rrrrrrtest_schema_error_messages z&TestValidate.test_schema_error_messagec CsPddddddigi}|tj}td|W5QRX|dt|jdS) Nr ror%rMrerjrmz!12 is less than the minimum of 20)r|rrrr~r_rr)r*rr4rrrtest_it_uses_best_matchsz$TestValidate.test_it_uses_best_matchN) rDrrErrrrr r r rrrrrrrrjs    rc@seZdZdZddZdS) TestThreadinga Threading-related functionality tests. jsonschema doesn't promise thread safety, and its validation behavior across multiple threads may change at any time, but that means it isn't safe to share *validators* across threads, not that anytime one has multiple threads that jsonschema won't work (it certainly is intended to). These tests ensure that this minimal level of functionality continues to work. csXgfdd}|ddlm}||d}||||fdgfdS)Ncs2ztjdddWntYnXdS)NrTr)rr~rsysexc_inforfailedrrr~szFTestThreading.test_validation_across_a_second_thread..validater)Thread)targetF) threadingrstartjoinr#is_alive)r*r~rthreadrrr&test_validation_across_a_second_threads   z4TestThreading.test_validation_across_a_second_threadN)rDrrErrrrrrrs rc@seZdZddZddZdS)TestReferencingcCsJdd}tj|d}ddi}tj||d}||d|dfd dS) NcSstddiS)Nrerl)rZcreate_resourceurirrrretrievesz=TestReferencing.test_registry_with_retrieve..retrieve)r!rhttps://example.com/registryrmrr)TF) referencingRegistryrrzr#r)r*r!r$rrrrrtest_registry_with_retrieves z+TestReferencing.test_registry_with_retrievec Csrt}ddi}tj||d}tjdd4}td|tjj | dW5QRXW5QRX| |dS)Nrr"r#Trrrm) r%r&rrzrrrr|rZ Unresolvabler~r2)r*r$rrrrrr;test_custom_registries_do_not_autoretrieve_remote_resourcess zKTestReferencing.test_custom_registries_do_not_autoretrieve_remote_resourcesN)rDrrEr'r(rrrrrs rc@seZdZdZdZddiZddZddZd d Zd d Z d dZ ddZ ddZ ddZ ddZddZddZddZddZdd Zd!d"Zd#d$Zd%d&Zd'd(Zd)S)*TestRefResolverrUz foo://storedZstoredrcCs.i|_|j|ji|_t|j|j|j|_dSr,)referrer stored_uri stored_schemarbrrebase_urirdr)rrrr+ szTestRefResolver.setUpc Cs`tjjd}tj|jd}|j|}W5QRXW5QRX||tjj| |j dS)NrPZresolve_remote) rrr.r rrprdrr#r2called)r*rfZpatchedresolvedrrr6test_it_does_not_retrieve_schema_urls_from_the_network s  zFTestRefResolver.test_it_does_not_retrieve_schema_urls_from_the_networkc CsFd}dti|jd<|j|}|||jddW5QRXdS)Nz#/properties/foorrr)rpr*rdrr#)r*rfr/rrrtest_it_resolves_local_refs sz+TestRefResolver.test_it_resolves_local_refsc Csrdddid}tjj|ddd}|d}|||d W5QRX|d }|||d W5QRXdS) Nzhttp://bar/schema#rrry)rPrJcSs |ddSrTrVrrrrrZ# r[zETestRefResolver.test_it_resolves_local_refs_with_id..r]z#/arJzhttp://bar/schema#/a)rre from_schemarr#r*rrdr/rrr#test_it_resolves_local_refs_with_id s  z3TestRefResolver.test_it_resolves_local_refs_with_idc Cs`|j|j}|||jW5QRXddi|jjd<|jd}||dW5QRXdS)NrrrmZ cached_refzcached_ref#/foo)rdrr+r1r,rbr#)r*r/rrrtest_it_retrieves_stored_refs* s z-TestRefResolver.test_it_retrieves_stored_refsc Cshd}ddi}dtjkr.|tjjdtjdtd|itjd<|j|}||dW5QRXdS)Nhttp://bar#bazrrmrequests http://bar)rmodulesr" __setitem__ReallyFakeRequestsrdrr#)r*rfrr/rrr,test_it_retrieves_unstored_refs_via_requests2 s z.fake_urlopenurlopen) rr:r"r;rsetattrrrFrdrr#)r*rfrEr/rrDr+test_it_retrieves_unstored_refs_via_urlopen? s    z;TestRefResolver.test_it_retrieves_unstored_refs_via_urlopenc Csttjddd&}|tj|jtddi|W5QRXdt|jd}|j |}| |dW5QRXdS)NFwt)deletemoderrryzfile://z#foo) tempfileNamedTemporaryFiler"osremoverr?dumpr rdrr#)r*Ztempfrfr/rrr(test_it_retrieves_local_refs_via_urlopenU s z8TestRefResolver.test_it_retrieves_local_refs_via_urlopenc Csddi}tjj|ddd}||jd||jd|d}|||W5QRX|d}|||W5QRX|d}|||W5QRX|d}|||W5QRXdS) NrPrrcSs |ddSrTrVrrrrrZb r[zPTestRefResolver.test_it_can_construct_a_base_uri_from_a_schema..r2rU#zfoo#rrer3r#r-Zresolution_scoperr4rrr.test_it_can_construct_a_base_uri_from_a_schema^ s    z>TestRefResolver.test_it_can_construct_a_base_uri_from_a_schemac Csti}tj|}||jd||jd|d}|||W5QRX|d}|||W5QRXdS)NrUrRrSr4rrr9test_it_can_construct_a_base_uri_from_a_schema_without_ido s   zITestRefResolver.test_it_can_construct_a_base_uri_from_a_schema_without_idc sVfdd}ddidtjdid|id}|}|W5QRXdS)Ncs|Sr,rrBrfrr*rrhandlerz s z@TestRefResolver.test_custom_uri_scheme_handlers..handlerrrry foo://barrUhandlers)rrerr#)r*rWrdr/rrVrtest_custom_uri_scheme_handlersy s  z/TestRefResolver.test_custom_uri_scheme_handlersc s`tgfdd}d}tjdidd|id}||W5QRX||W5QRXdS)Ncs.z WStk r(dYnXdS)Nz#Response must not have been cached!rC IndexErrorrrBresponser*rrrW s z5TestRefResolver.test_cache_remote_on..handlerrXrUTrrZ cache_remoterZrprrerr*rWrfrdrr^rtest_cache_remote_on s   z$TestRefResolver.test_cache_remote_onc sJtgfdd}d}tjdidd|id}||W5QRXdS)Ncs.z WStk r(dYnXdS)NzHandler called twice!r\rBr^rrrW s z6TestRefResolver.test_cache_remote_off..handlerrXrUFrrr`rarbrr^rtest_cache_remote_off s z%TestRefResolver.test_cache_remote_offc s|tdfdd}d}tjdid|id}|tj&}|||dW5QRXW5QRX||j tdS) NzOh no! What's this?csdSr,rrBrVrrrW szTTestRefResolver.test_if_you_give_it_junk_you_get_a_resolution_error..handlerrXrUrrrYzShouldn't get this far!) rrrer|r_RefResolutionErrorrrr#r)r*rWrfrderrrrer3test_if_you_give_it_junk_you_get_a_resolution_error s  zCTestRefResolver.test_if_you_give_it_junk_you_get_a_resolution_errorc CsJtdi}||tj}|W5QRX|dt|jdS)NrUzFailed to pop the scope) rreZ pop_scoper|rrfr_rr)r*rdexcrrr.test_helpful_error_message_on_failed_pop_scope s  z>TestRefResolver.test_helpful_error_message_on_failed_pop_scopecCs~tjj}td|}tj||d}||ddiddtjjdigi}td|}tj||d}||ddidS) z See #1085. rUrcrrrZallOfrrN)rrr.rer2r)r*ronerZanotherZtworrr,test_pointer_within_schema_with_different_id s  z.handlerUhttprYrdzhttp://example.com/barrr)rrrcrFTN)rrer'rr-r#r)r*rmrdr(rrrr)r.test_newly_created_validator_with_ref_resolver s   z>TestRefResolver.test_newly_created_validator_with_ref_resolvercCsXdddiidddiid}tj|td|d}||dd i|dd ifd d S) zj See https://github.com/python-jsonschema/jsonschema/issues/1124#issuecomment-1632574249. xrz#/definitions/xrerl)rrJrUrcyrroN)rrzrer#r)r*rrrrr2test_refresolver_with_pointer_in_schema_with_no_id s   zBTestRefResolver.test_refresolver_with_pointer_in_schema_with_no_idN)rDrrEr-r+r,r+r0r1r5r6r=rHrQrTrUr[rcrdrhrjrlrprsrrrrr) s*      r)cCsdd}t||dS)NcSs dd|jDdd|jDfS)NcSsg|] }t|qSrrrgr4rrrr sz.sorted_errors..key..cSsg|] }t|qSrrtrurrrr s)r)r9rerrrkey szsorted_errors..key)rv)sorted)rrvrrrr0 sr0c@seZdZUded<ddZdS)r<zdict[str, Any] _responsescCs2|j|}|dkr$tdt|tt|S)Nz Unknown URL: )rxrWrrH_ReallyFakeJSONResponser?r@)r*rCr_rrrrW s zReallyFakeRequests.getN)rDrrErrWrrrrr< s r<c@seZdZUded<ddZdS)ryr _responsecCs t|jSr,)r?loadsrzr)rrrr? sz_ReallyFakeJSONResponse.jsonN)rDrrErr?rrrrry s ry): __future__r collectionsrr contextlibrdecimalriortypingrunittestr r urllib.requestr r?rNrrLrattrsr r Zreferencing.jsonschemarZreferencing.exceptionsr%Z jsonschemarrrrrrrrr#rnrzrrrrrrrrrrrrr)r0r<ryrrrrsn        8{ aq  PK!YF #__pycache__/__init__.cpython-38.pycnu[U afe @s~dZddlZddlmZddlmZddlmZmZddl m Z m Z m Z m Z mZmZmZddZd d d d d ddddddg ZdS)a9 An implementation of JSON Schema for Python. The main functionality is provided by the validator classes for each of the supported JSON Schema versions. Most commonly, `jsonschema.validators.validate` is the quickest way to simply validate a given instance under a schema, and will create a validator for you. N) FormatChecker) TypeChecker) SchemaErrorValidationError)Draft3ValidatorDraft4ValidatorDraft6ValidatorDraft7ValidatorDraft201909ValidatorDraft202012Validatorvalidatec CsX|dkr.tjdtddddlm}|dS|dkrXdd lm}tj|jtdd|S|d krtjd tdddd l m }|S|d krtjdtddddl m }|S|dkrtjdtddddl m }|S|dkrddl m}tj|jtdd|Sttttttd}||}|dk r@tjd|dtdd|jStdtd|dS)N __version__zAccessing jsonschema.__version__ is deprecated and will be removed in a future release. Use importlib.metadata directly to query for jsonschema's version.) stacklevelr)metadataZ jsonschemaZ RefResolver) _RefResolver ErrorTreezImporting ErrorTree directly from the jsonschema package is deprecated and will become an ImportError. Import it from jsonschema.exceptions instead.)r FormatErrorzImporting FormatError directly from the jsonschema package is deprecated and will become an ImportError. Import it from jsonschema.exceptions instead.)r ValidatorzImporting Validator directly from the jsonschema package is deprecated and will become an ImportError. Import it from jsonschema.protocols instead.)rZRefResolutionError)_RefResolutionError)Zdraft3_format_checkerZdraft4_format_checkerZdraft6_format_checkerZdraft7_format_checkerZdraft201909_format_checkerZdraft202012_format_checkerzAccessing jsonschema.z is deprecated and will be removed in a future release. Instead, use the FORMAT_CHECKER attribute on the corresponding Validator.zmodule z has no attribute )warningswarnDeprecationWarning importlibrversionjsonschema.validatorsrZ_DEPRECATION_MESSAGEjsonschema.exceptionsrrZjsonschema.protocolsrrrrrr r r getZFORMAT_CHECKERAttributeError__name__) namerrrrrrZformat_checkersZValidatorForFormatr!D/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/__init__.py __getattr__s           r#r r rrrr rrrrr )__doc__rZjsonschema._formatrZjsonschema._typesrrrrrrrrr r r r r#__all__r!r!r!r"s$   $ RPK!g''#__pycache__/__main__.cpython-38.pycnu[U afs@sdZddlmZedS)zD The jsonschema CLI is now deprecated in favor of check-jsonschema. )mainN)__doc__Zjsonschema.clirrrD/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/__main__.pys PK!-c4[/[/"__pycache__/_format.cpython-38.pycnu[U af8 @sUddlmZddlmZddlmZmZddlmZddlZddl Z ddl Z ddl Z ddl m Z e jegefZe jdedZe je jee je jed ffZe d e jZGd d d ZeZeZeZeZ eZ!eZ"e#eeee e!e"d Z$de%d<deddddZ&e&dde&dddddddZ'e&ddddddej(ddddd d!Z)e&d"ej(d#dddd$d%Z*ee+6dd&l,m-Z-e&d'd(d(d(d(d(d dddd)d*Z.W5QRXee+4ddl/Z/e&d+d+d+e/j0e1fd,dddd-d.Z2W5QRXz ddl3Z3Wnre+k r~ee+Ndd/l4m5Z5e&d0ddddd1d2Z6e&d3d3d3d3e7d4dddd5d6Z8W5QRXYnXe&d7d7d7e7d,dddd8d9Z9e&d:d:d:e7d,dddd;d<Z:e&d0e7d#dddd=d2Z6e&d3d3d3d3e7d4dddd>d6Z8ee+Jdd?l;mW5QRXe&dGe j?d#ddddHdIZ@e&dJdJdJdJe7dKddddLdMZAe&dCe7dNddddOdPZBee+.ddlCZCe&dQe7eDfdNddddRdSZEW5QRXee+TddlFZFe&dTdTdTdTeFjGd4ddddUdVZHe&dWdWdWeFjGd,ddddXdYZIW5QRXee+.ddlJZJe&dZdZdZdZd[dddd\d]ZKW5QRXee+.ddlLZLe&d^d^eLjMd_dddd`daZNW5QRXe&dbdbe7d_ddddcddZOdS)f) annotations)suppress)datedatetime)UUIDN) FormatError_F)bound.z^\d{4}-\d{2}-\d{2}$c@seZdZUdZiZded<dddddZd d Zdd d ddddZe d d d ddddZ e d!d d ddddZ dd ddddZ dd ddddZ dS)" FormatCheckeraz A ``format`` property checker. JSON Schema does not mandate that the ``format`` property actually do any validation. If validation is desired however, instances of this class can be hooked into validators to enable format validation. `FormatChecker` objects always return ``True`` when asked about formats that they do not know how to validate. To add a check for a custom format use the `FormatChecker.checks` decorator. Arguments: formats: The known formats to validate. This argument can be used to limit which formats will be used during validation. z3dict[str, tuple[_FormatCheckCallable, _RaisesType]]checkersNztyping.Iterable[str] | None)formatscs*|dkrj}fdd|D_dS)Ncsi|]}|j|qSr ).0kselfr C/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/_format.py 6sz*FormatChecker.__init__..)r keys)rr r rr__init__3s zFormatChecker.__init__cCsdt|jdS)Nz)sortedr rr r r__repr__8szFormatChecker.__repr__r str _RaisesTypetyping.Callable[[_F], _F])formatraisesreturncsdddfdd }|S)a Register a decorated function as validating a new format. Arguments: format: The format that the decorated function will check. raises: The exception(s) raised by the decorated function when an invalid instance is found. The exception object will be accessible as the `jsonschema.exceptions.ValidationError.cause` attribute of the resulting validation error. rfuncrcs|fj<|SNrr!rrrr r_checksRsz%FormatChecker.checks.._checksr )rrrr%r r$rchecks;szFormatChecker.checkscCstjdtdd|j||dS)NzoFormatChecker.cls_checks is deprecated. Call FormatChecker.checks on a specific FormatChecker instance instead.) stacklevel)rr)warningswarnDeprecationWarning _cls_checksclsrrr r r cls_checksXs  zFormatChecker.cls_checkscsdddfdd }|S)Nrr cs|fj<|Sr"rr#r-r rr%ksz*FormatChecker._cls_checks.._checksr )r.rrr%r r-rr,gszFormatChecker._cls_checksobjectNone)instancerrc Csv||jkrdS|j|\}}d\}}z ||}Wn&|k rV}z|}W5d}~XYnX|srt|d||ddS)a Check whether the instance conforms to the given format. Arguments: instance (*any primitive type*, i.e. str, number, bool): The instance to check format: The format that instance should conform to Raises: FormatError: if the instance does not conform to ``format`` N)NNz is not a )cause)r r)rr2rr!rresultr3er r rcheckqs  zFormatChecker.checkboolcCs0z|||Wntk r&YdSXdSdS)aW Check whether the instance conforms to the given format. Arguments: instance (*any primitive type*, i.e. str, number, bool): The instance to check format: The format that instance should conform to Returns: bool: whether it conformed FTN)r6r)rr2rr r rconformss zFormatChecker.conforms)N)r )r )r )__name__ __module__ __qualname____doc__r __annotations__rrr& classmethodr/r,r6r8r r r rr s   !r )draft3draft4draft6draft7 draft201909 draft202012zdict[str, FormatChecker]_draft_checkersr r)rc sTp|p|p|p|p&|p.|dddfdd }|S)Nrr csrtd|}r0td|}rHtd|}r`td|}rxtd|}rtd|}tppppp||S)Nr?r@rArBrCrD)rEr&r r,r#rCrDr?r@rArBrr rwraps.z_checks_drafts..wrapr ) namer?r@rArBrCrDrrGr rFr_checks_draftss  rIz idn-email)rHemailr0r7)r2rcCst|tsdSd|kS)NT@) isinstancerr2r r ris_emails rNz ip-addressZipv4)r?r@rArBrCrDrcCst|tsdStt|SNT)rLrr7 ipaddress IPv4AddressrMr r ris_ipv4s rRZipv6)rHrcCs&t|tsdSt|}t|dd S)NTZscope_id)rLrrP IPv6Addressgetattr)r2addressr r ris_ipv6s  rW)FQDNz host-namehostnamecCst|tsdSt|ddjS)NT)Z min_labels)rLrrXZis_validrMr r r is_host_names r[z idn-hostname)rBrCrDrcCst|tsdSt|dSrO)rLridnaencoderMr r ris_idn_host_name s  r^)validate_rfc3986uricCst|tsdSt|ddSNTURIZrulerLrr_rMr r ris_uri3s rez uri-reference)rArBrCrDrcCst|tsdSt|ddSNTZ URI_referencercrdrMr r ris_uri_reference9s rgZiricCst|tsdStj|ddS)NTZIRIrcrLrrfc3987parserMr r ris_iriGs rkz iri-referencecCst|tsdStj|ddS)NTZ IRI_referencercrhrMr r ris_iri_referenceRs rlcCst|tsdStj|ddSrarhrMr r rre]s cCst|tsdStj|ddSrfrhrMr r rrgcs )validate_rfc3339z date-timecCst|tsdSt|SrO)rLrrmupperrMr r r is_datetimess rotime)rBrCrDcCst|tsdStd|S)NTz 1970-01-01T)rLrrorMr r ris_timeys rqregexcCst|tsdStt|SrO)rLrr7recompilerMr r ris_regexs rur)r?rBrCrDrcCs&t|tsdStt|o"t|SrO)rLrr7_RE_DATE fullmatchr fromisoformatrMr r ris_dates ry)r?rcCst|tsdStt|dS)NTz%H:%M:%S)rLrr7rstrptimerMr r ris_draft3_times r{colorcCs@t|trszis_uuid..) )rLrrallrMr rMris_uuids r)NNNNNNNr )P __future__r contextlibrrrrrrPrstypingr)Zjsonschema.exceptionsrCallabler0r7Z_FormatCheckCallableTypeVarrUnionType ExceptionTuplerrtASCIIrvr Zdraft3_format_checkerZdraft4_format_checkerZdraft6_format_checkerZdraft7_format_checkerZdraft201909_format_checkerZdraft202012_format_checkerdictrEr=rIrNAddressValueErrorrRrW ImportErrorZfqdnrXr[r\ IDNAError UnicodeErrorr^riZrfc3986_validatorr_rer~rgrkrlZrfc3339_validatorrmrorqerrorruryr{r} TypeErrorrrZJsonPointerExceptionrrrrrZDurationParsingExceptionrrr r r rsh     .        "          PK!n!..$__pycache__/_keywords.cpython-38.pycnu[U afe:@slddlmZddlZddlmZmZmZmZmZm Z m Z ddl m Z m Z ddZddZd d Zd d Zd dZddZddZddZddZddZddZddZddZdd Zd!d"Zd#d$Zd%d&Zd'd(Zd)d*Z d+d,Z!d-d.Z"d/d0Z#d1d2Z$d3d4Z%d5d6Z&d7d8Z'd9d:Z(d;d<Z)d=d>Z*d?d@Z+dAdBZ,dCdDZ-dEdFZ.dGdHZ/dIdJZ0dKdLZ1dS)M)FractionN) ensure_listequal extras_msgfind_additional_properties%find_evaluated_item_indexes_by_schema&find_evaluated_property_keys_by_schemauniq) FormatErrorValidationErrorccs\||dsdS|D]>\}}|D],\}}t||r(|j||||dEdHq(qdSNobjectpath schema_path)is_typeitemsresearchdescend) validatorpatternPropertiesinstanceschemapattern subschemakvrE/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/_keywords.pyrs  rccs2||dsdS|D]}|j||dEdHqdS)Nr )rr)rr)r propertyNamesrrpropertyrrrr s r c cs||dsdStt||}||drP|D]}|j||||dEdHq.n|s|rd|krt|dkrpdnd}ddd t|D}dd d t|dD}|d |d |} t| Vnd } t| tt|t dVdS)Nr )rrZdoesdo, css|]}t|VqdSNrepr.0eachrrr 0sz'additionalProperties..css|]}t|VqdSr%r&r(rrrr+1s z not match any of the regexes: z8Additional properties are not allowed (%s %s unexpected)key) rsetrrlenjoinsortedr rstr) rZaPrrextrasextraverbZjoinedpatternserrorrrradditionalProperties$s"     r9c cs||dsdSt|dg}t|}||}|dkr.z is not one of )allr )renumsrrrrmrenum srpccs|j||dEdHdSN)refrZ_validate_reference)rrrrrrrrrrsrrccs|j||dEdHdSrqrs)r dynamicRefrrrrrrtsrtc#sLt|}tfdd|DsHddd|D}td|VdS)Nc3s|]}|VqdSr%)rr)typerrrrr+sztype..r$css|]}t|VqdSr%r&rurrrr+sz is not of type )ranyr1r )rtypesrrreprsrrwrrvsrvccsJ||dsdS|D],\}}||kr|j|||||dEdHqdSr rk)r propertiesrrr!rrrrr{"s r{ccs6||dsdS|D]}||krt|dVqdS)Nr z is a required propertyrK)rrequiredrrr!rrrr|0s  r|ccs@||drnszoneOf..r$css|]}t|VqdSr%r&)r)rrrrr+tszoneOf..z is valid under each of )rrrrr appendr1) roneOfrrZ subschemasrrBrrZ first_validZ more_validrzrrwrr_s&    rccs.|j|d|r*|d|}t|VdS)NrDz should not be valid under )rFrGr )rZ not_schemarrrPrrrnot_xsrccsd|j|d|r:d|kr`|d}|j||ddEdHn&d|kr`|d}|j||ddEdHdS)NrDthenrjelse)rFrGr)rZ if_schemarrrZelse_rrrif_~src#sP||dsdSt|||fddt|D}|rLd}t|t|VdS)Nr:csg|]\}}|kr|qSrr)r)rBr<Zevaluated_item_indexesrrrsz$unevaluatedItems..z4Unevaluated items are not allowed (%s %s unexpected))rrrr r)runevaluatedItemsrrZunevaluated_itemsr8rrrrs  rc cs||dsdSt|||}g}|D]2}||kr$|j|||||dD]}||qFq$|r|dkrd}t|td} t|t| Vnd}t|t|VdS)Nr rFz9Unevaluated properties are not allowed (%s %s unexpected)r-z[Unevaluated properties are not valid under the given schema (%s %s unevaluated and invalid))rrrrr2r3r r) runevaluatedPropertiesrrZevaluated_keysZunevaluated_keysr!rr8r4rrrrs2   rccsH||dsdStt||D]$\\}}}|j||||dEdHqdS)Nr:)rrrr)rziprr)rr;rrrBr<rrrrr;s r;)2Z fractionsrrZjsonschema._utilsrrrrrrr Zjsonschema.exceptionsr r rr r9rrCrHrMrOrLrNrWr\r_r`rrdrfrgrhrlrprrrtrvr{r|r~rrrrrrrrr;rrrrsN $  %          PK!7u$$+__pycache__/_legacy_keywords.cpython-38.pycnu[U afW;@sddlZddlmZddlmZddlmZddZddZd d Z d d Z d dZ ddZ ddZ ddZddZddZddZddZddZdd Zd!d"Zd#d$Zd%d&Zd'd(ZdS))N)lookup_recursive_ref)_utils)ValidationErrorcCs(|d}|dk rd|fgS|SdS)z Ignore siblings of ``$ref`` if it is present. Otherwise, return all keywords. Suitable for use with `create`'s ``applicable_validators`` argument. $refN)getitems)schemarefr L/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/_legacy_keywords.pyignore_ref_siblings s  r ccs||dsdS|D]\}}||kr*q||drN|j|||dEdHq||dr|||kr|d|}t|Vq|D]$}||kr|d|}t|VqqdS)Nobject schema_pathstring is a dependency of )is_typerdescendr) validator dependenciesinstancerproperty dependencymessageeachr r r dependencies_draft3s&    rccs~||dsdS|D]`\}}||kr*q||drb|D]$}||kr:|d|}t|Vq:q|j|||dEdHqdS)z Support for the ``dependencies`` keyword from pre-draft 2019-09. In later drafts, the keyword was split into separate ``dependentRequired`` and ``dependentSchemas`` validators. r Narrayrr)rrrr)rrrrrrrrr r r !dependencies_draft4_draft6_draft7/s  rccsDt|D]4}|jd|gid|r |d|}t|Vq dS)Ntyperz is disallowed for )r ensure_listevolveis_validr)rZdisallowrrZ disallowedrr r r disallow_draft3Msr#ccsN||dr"|||EdHdSt|D]\}}|j|||dEdHq*dS)Nr r)rr enumerate)rZextendsrrindex subschemar r r extends_draft3Ts  r'ccs~||dsdS||drFt|D]\}}|j|||dEdHq$n4tt||D]$\\}}}|j||||dEdHqTdS)Nrr pathr)r)rr$rziprrrrr%itemr&r r r items_draft3_draft4\s  r.c cs||dr ||didr$dSt|dg}||drvt||d|dD]\}}|j|||dEdHqTnH|st|t|dgkrd}t|t|t|dgdVdS)Nrrr )startr(z3Additional items are not allowed (%s %s unexpected))rrlenr$rrr extras_msg)rZaIrrZ len_itemsr%r-errorr r r additionalItemsjs   r3ccs~||dsdS||drRtt||D]$\\}}}|j||||dEdHq*n(t|D]\}}|j|||dEdHqZdS)Nrr*r()rr+r$rr,r r r items_draft6_draft7_draft201909|s  r4ccs\||dsdS|ddr*||k}d}n ||k}d}|rX|d|d|}t|VdS)NnumberZexclusiveMinimumFzless than or equal toz less than is z the minimum of rrr)rZminimumrrfailedcmprr r r minimum_draft3_draft4s  r:ccs\||dsdS|ddr*||k}d}n ||k}d}|rX|d|d|}t|VdS)Nr5ZexclusiveMaximumFzgreater than or equal toz greater thanr6z the maximum of r7)rmaximumrrr8r9rr r r maximum_draft3_draft4s  r<ccs||dsdS|D]\}}||krF|j|||||dEdHq|ddrt|d}|jd|d||d|j||j |dg|VqdS)Nr r*requiredFz is a required property)rZvalidator_valuerr) rrrrrZ_setr) appendleftrextend)r propertiesrrrr&r2r r r properties_draft3s*   rAc cst|}g}t|D]P\}}||drTt|j|||d}|sHdS||q|||rdSqg}|D]>}z|t|dWqpt k r|t|YqpXqpt |dd ||dVdS)Nr rnamez is not of type z, )context) rr r$rlistrr?appendrepr Exceptionrjoin) rtypesrr all_errorsr%rerrorsZreprsr r r type_draft3s(    rLc#s>|dsdStfdd|Ds:td|dVdS)Nrc3s |]}jd|VqdS)rN)r!r").0elementcontainsrr r sz)contains_draft6_draft7..zNone of z! are valid under the given schema)ranyr)rrPrrr rOr contains_draft6_draft7s  rSccs(t|j}|j||j|jdEdHdS)N)resolver)r _resolverrcontentsrT)r recursiveRefrrresolvedr r r rWs  rWc Cs||drgSg}|d}|dk rT|j|}|t|j|j|jd||jd|krt |j}|t|j|j|jd||jd|krd|krt t t |S||ddrt t t |S|t t t |d7}d |krT|j|d d  |r6|t|||d 7}d |krT|t|||d 7}nd |krT|t|||d 7}d D]F}||krXt|D],\}}|j||d  |rn||qnqXdD]L}||kr||D]2} t||| d} | dkr|t||| 7}qq|S)z Get all indexes of items that get evaluated under the current schema. Covers all keywords related to unevaluatedItems: items, prefixItems, if, then, else, contains, unevaluatedItems, allOf, oneOf, anyOf booleanrNrrU $recursiveRefrr3r ifrthenelse)rPunevaluatedItemsZallOfoneOfZanyOf)rrrUlookupr?%find_evaluated_item_indexes_by_schemar!rVrTrrDranger0r"r$rEnextr) rrrZevaluated_indexesr rXkeywordkvr&errsr r r rcs             rcc#sR||dsdSt|||fddt|D}|rNd}t|t|VdS)Nrcsg|]\}}|kr|qSr r )rMr%r-Zevaluated_item_indexesr r Fsz.unevaluatedItems_draft2019..z4Unevaluated items are not allowed (%s %s unexpected))rrcr$rrr1)rr_rrZunevaluated_itemsr2r rjr unevaluatedItems_draft2019@s  rlc Cs.||drgSg}|d}|dk rT|j|}|t|j|j|jd||jd|krt |j}|t|j|j|jd||jdD]Z}||kr||}||dr|r|| 7}q||dr|D]}||kr| |qqd|kr$|D],}|dD]} t | |r| |qqd|krd|dD](\}} ||krPq:|t||| 7}q:d D]L}||krh||D]2} t||| d} | dkr~|t||| 7}q~qhd |kr*|j|d d |r |t|||d 7}d |kr*|t|||d 7}nd |kr*|t|||d 7}|S)NrYrrZr[)r@ZadditionalPropertiesZunevaluatedPropertiesr ZpatternPropertiesZdependentSchemasr`r\rr]r^)rrrUrbr?&find_evaluated_property_keys_by_schemar!rVrTrkeysrEresearchrrerr") rrrevaluated_keysr rXrfZ schema_valuerpatternr&rir r r rmOs                  rmc cs||dsdSt|||}g}|D]2}||kr$|j|||||dD]}||qFq$|r|dkrd}t|td} t|t| Vnd}t|t|VdS)Nr r*Fz9Unevaluated properties are not allowed (%s %s unexpected))keyz[Unevaluated properties are not valid under the given schema (%s %s unevaluated and invalid)) rrmrrEsortedstrrrr1) rZuPrrrqZunevaluated_keysr_r2extrasr r r unevaluatedProperties_draft2019s2   rx)roZreferencing.jsonschemarZ jsonschemarZjsonschema.exceptionsrr rrr#r'r.r3r4r:r<rArLrSrWrcrlrmrxr r r r s*     NUPK!0Ue77!__pycache__/_types.cpython-38.pycnu[U af @sddlmZddlmZmZmZddlZddlmZm Z m Z ddl m Z ddl mZddd d d Zd d ZddZddZddZddZddZddZddZe ddGdddZeeeeeeeeed Zed!Zed"d#d$ZeZeZeZ dS)%) annotations)AnyCallableMappingN)evolvefieldfrozen) HashTrieMap)UndefinedTypeCheckz0Mapping[str, Callable[[TypeChecker, Any], bool]]4HashTrieMap[str, Callable[[TypeChecker, Any], bool]])init_valreturncCs t|SN)r convert)r rB/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/_types.py_typed_map_convertersrcCs t|tSr) isinstancelistcheckerinstancerrris_arraysrcCs t|tSr)rboolrrrris_boolsrcCst|trdSt|tSNF)rrintrrrr is_integers rcCs|dkSrrrrrris_null%srcCst|trdSt|tjSr)rrnumbersNumberrrrr is_number)s r!cCs t|tSr)rdictrrrr is_object0sr#cCs t|tSr)rstrrrrr is_string4sr%cCsdS)NTrrrrris_any8sr&Freprc@sneZdZUdZeeedZded<ddZ ddd d d Z ddd d d Z dddddZ ddddZ dS) TypeCheckeram A :kw:`type` property checker. A `TypeChecker` performs type checking for a `Validator`, converting between the defined JSON Schema types and some associated Python types or objects. Modifying the behavior just mentioned by redefining which Python objects are considered to be of which JSON Schema types can be done using `TypeChecker.redefine` or `TypeChecker.redefine_many`, and types can be removed via `TypeChecker.remove`. Each of these return a new `TypeChecker`. Arguments: type_checkers: The initial mapping of types to their checking functions. )default converterr _type_checkerscCs0dddt|jD}d|jjd|dS)Nz, css|]}t|VqdSrr').0krrr Wsz'TypeChecker.__repr__..)joinsortedr, __class____name__)selftypesrrr__repr__VszTypeChecker.__repr__r$r)typer cCs8z|j|}Wntk r,t|dYnX|||S)ac Check if the instance is of the appropriate type. Arguments: instance: The instance to check type: The name of the type that is expected. Raises: `jsonschema.exceptions.UndefinedTypeCheck`: if ``type`` is unknown to this object. N)r,KeyErrorr )r5rr8fnrrris_typeZs zTypeChecker.is_typecCs|||iS)a Produce a new checker with the given type redefined. Arguments: type: The name of the type to check. fn (collections.abc.Callable): A callable taking exactly two parameters - the type checker calling the function and the instance to check. The function should return true if instance is of this type and false otherwise. ) redefine_many)r5r8r:rrrredefinevszTypeChecker.redefiner)r cCs|j|}t||dS)z Produce a new checker with the given types redefined. Arguments: definitions (dict): A dictionary mapping types to their checking functions.  type_checkers)r,updater)r5 definitionsr?rrrr<s zTypeChecker.redefine_manyc GsJ|j}|D]2}z||}Wq tk r:t|dYq Xq t||dS)a* Produce a new checker with the given types forgotten. Arguments: types: the names of the types to remove. Raises: `jsonschema.exceptions.UndefinedTypeCheck`: if any given type is unknown to this object Nr>)r,remover9r r)r5r6r?ZeachrrrrBszTypeChecker.removeN)r)r4 __module__ __qualname____doc__rr rr,__annotations__r7r;r=r<rBrrrrr)<s  r))anyarraybooleanintegerobjectnullnumberstringrGrJcCst||pt|to|Sr)rrfloatrrrrs rP)! __future__rtypingrrrrattrsrrrZrpdsr Zjsonschema.exceptionsr rrrrrr!r#r%r&r)Zdraft3_type_checkerrBZdraft4_type_checkerr=Zdraft6_type_checkerZdraft7_type_checkerZdraft201909_type_checkerZdraft202012_type_checkerrrrrsF   u PK!{6"__pycache__/_typing.cpython-38.pycnu[U afb@sdZddlmZmZmZmZmZmZddlZ ddl m Z GdddeZ ee j jgeedffZee j jgeeeeffZdS)zA Some (initially private) typing helpers for jsonschema's types. )AnyCallableIterableProtocolTupleUnionN) Validatorc@s&eZdZeeeejjddddZdS)SchemaKeywordValidatorN) validatorvalueinstanceschemareturncCsdS)N)selfr r r r rrC/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/_typing.py__call__ szSchemaKeywordValidator.__call__) __name__ __module__ __qualname__rr referencing jsonschemaSchemarrrrrr s r )__doc__typingrrrrrrZreferencing.jsonschemarZjsonschema.protocolsrr rrstrZid_ofZApplicableValidatorsrrrrs   PK!!%{""!__pycache__/_utils.cpython-38.pycnu[U af)@sddlmZmZmZddlmZddlZddlZGdddeZGdddZ dd Z d d Z d d Z ddZ ddZddZddZeefddZddZddZddZdS))MappingMutableMappingSequence)urlsplitNc@sPeZdZdZddZddZddZdd Zd d Zd d Z ddZ ddZ dS)URIDictz8 Dictionary which uses normalized URIs as keys. cCs t|SN)rgeturlselfurir B/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/_utils.py normalize szURIDict.normalizecOst|_|jj||dSr)dictstoreupdate)r argskwargsr r r __init__szURIDict.__init__cCs|j||Srrrr r r r __getitem__szURIDict.__getitem__cCs||j||<dSrr)r r valuer r r __setitem__szURIDict.__setitem__cCs|j||=dSrrr r r r __delitem__szURIDict.__delitem__cCs t|jSr)iterrr r r r __iter__szURIDict.__iter__cCs t|jSr)lenrrr r r __len__szURIDict.__len__cCs t|jSr)reprrrr r r __repr__"szURIDict.__repr__N) __name__ __module__ __qualname____doc__rrrrrrrr r r r r rsrc@seZdZdZddZdS)UnsetzG An as-of-yet unset attribute or unprovided default parameter. cCsdS)Nzr rr r r r +szUnset.__repr__N)r!r"r#r$r r r r r r%&sr%cCs(|s|S|dddd|DdS)aB Construct a single string containing indexing operations for the indices. For example for a container ``bar``, [1, 2, "foo"] -> bar[1][2]["foo"] Arguments: container (str): A word to use for the thing being indexed indices (sequence): The indices to format. [z][css|]}t|VqdSrr).0indexr r r Bsz"format_as_index..])join) containerindicesr r r format_as_index/sr/ccsL|di}d|di}|D]$}||kr"|r@t||r@q"|Vq"dS)z Return the set of additional properties for the given ``instance``. Weeds out properties that should have been validated by ``properties`` and / or ``patternProperties``. Assumes ``instance`` is dict-like already. properties|patternPropertiesN)getr,research)instanceschemar0patternspropertyr r r find_additional_propertiesEs r:cCs,t|dkrdnd}ddd|D|fS)z@ Create an error message for extra items or properties. waswerez, css|]}t|VqdSrr')r(extrar r r r*\szextras_msg..)rr,)extrasverbr r r extras_msgWsrAcCst|tr|gS|S)z] Wrap ``thing`` in a list if it's a single str. Otherwise, return it unchanged. ) isinstancestr)thingr r r ensure_list_s rEcs.t|tkrdStfdd|DS)zI Check if two mappings are equal using the semantics of `equal`. Fc3s(|] \}}|kot||VqdSrequal)r(keyrtwor r r*psz!_mapping_equal..)rallitemsonerJr rIr _mapping_equaljs  rOcCs,t|t|krdStddt||DS)zJ Check if two sequences are equal using the semantics of `equal`. Fcss|]\}}t||VqdSrrF)r(ijr r r r*|sz"_sequence_equal..)rrKziprMr r r _sequence_equalvsrScCst||kr dSt|ts t|tr(||kSt|trFt|trFt||St|trdt|trdt||St|t|kS)z Check if two things are equal evading some Python type hierarchy semantics. Specifically in JSON Schema, evade `bool` inheriting from `int`, recursing into sequences to do the same. T)rBrCrrSrrOunboolrMr r r rGs  rGcCs|dkr |S|dkr|S|S)zH A hack to make True and 1 and False and 0 unique for ``uniq``. TFr )elementtruefalser r r rTs rTc CszJtdd|D}t|dd}t||D]\}}t||r,WdSq,WnVttfk rg}|D]4}t|}|D]}t||rvYdSqv||qfYnXdS)z Check if all of a container's elements are unique. Tries to rely on the container being recursively sortable, or otherwise falls back on (slow) brute force. css|]}t|VqdSr)rT)r(rPr r r r*szuniq..r;NFT) sorted itertoolsislicerRrGNotImplementedError TypeErrorrTappend)r-sortslicedrPrQseener r r uniqs   rbc Cs||drgSg}d|kr,ttt|S|d}|dk rl|j|}|t|j |j |j d||j |d}|dk r|j|}|t|j |j |j d||j d|kr|ttt|d7}d|kr@|j |dd  |r"|t|||d7}d |kr@|t|||d 7}nd |kr@|t|||d 7}d D]F}||krDt |D],\}} |j ||d  | rZ||qZqDd D]L}||kr||D]2} t||| d} | dkr|t||| 7}qq|S)z Get all indexes of items that get evaluated under the current schema. Covers all keywords related to unevaluatedItems: items, prefixItems, if, then, else, contains, unevaluatedItems, allOf, oneOf, anyOf booleanrL$refNr7 _resolver $dynamicRefZ prefixItemsifr7thenelse)containsZunevaluatedItemsZallOfoneOfZanyOf)is_typelistrangerr3rflookupextend%find_evaluated_item_indexes_by_schemaevolvecontentsresolveris_valid enumerater]nextdescend) validatorr6r7Zevaluated_indexesrefresolved dynamicRefkeywordkv subschemaerrsr r r rts             rtc Cs<||drgSg}|d}|dk rT|j|}|t|j|j|jd||j|d}|dk r|j|}|t|j|j|jd||jdD]Z}||kr||}||dr|r|| 7}q||dr|D]} | |kr| | qqd|kr2|D].} |dD]} t | | r| | qqd |krr|d  D](\} } | |kr^qH|t||| 7}qHd D]L}||krv||D]2} t||| d} | dkr|t||| 7}qqvd |kr8|j|d d |r|t|||d 7}d |kr8|t|||d 7}nd|kr8|t|||d7}|S)a Get all keys of items that get evaluated under the current schema. Covers all keywords related to unevaluatedProperties: properties, additionalProperties, unevaluatedProperties, patternProperties, dependentSchemas, allOf, oneOf, anyOf, if, then, else rcrdNrerg)r0ZadditionalPropertiesZunevaluatedPropertiesobjectr2ZdependentSchemasrmrhrirjrk)ror3rfrrrs&find_evaluated_property_keys_by_schemarurvrwkeysr]r4r5rLrzr{rx) r|r6r7Zevaluated_keysr}r~rrZ schema_valuer9patternrrr r r rs                  r)collections.abcrrr urllib.parserrYr4rr%r/r:rArErOrSrGrrTrbrtrr r r r s       MPK!+!!__pycache__/cli.cpython-38.pycnu[U afg!@sdZddlmZddlmZddlmZddlZddlZddlZddl Z ddl Z zddl m Z Wn e k rddlm Z YnXddlmZmZddlmZdd lmZmZe jd ed d Gd ddeZeGdddZeGdddZeGdddZddZejddZ e j!dddddde j!dd d!d"e j!d#d$d%d&gd%d'd(e j!d)d*ed+d,e j!d-d.d"e j!d/d0e"d1d2e j!d3d4d"d5d6Z#d7d8Z$ej%d9dfd:d;Z&ej'ej(ej)fdz" The ``jsonschema`` command line. )metadata)JSONDecodeError)dedentN resolve_name)definefield) SchemaError) _RefResolver validator_forzThe jsonschema CLI is deprecated and will be removed in a future version. Please use check-jsonschema instead, which can be installed from https://pypi.org/project/check-jsonschema/) stacklevelc@s eZdZdS)_CannotLoadFileN)__name__ __module__ __qualname__rr?/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/cli.pyr#src@sReZdZeZeZeZeddZddZ ddZ ddZ d d Z d d Z d S) _OutputtercCs:|ddkrt|d}n|ddkr,t}||||dS)Noutputplain error_formatpretty) formatterstdoutstderr)_PlainFormatter_PrettyFormatter)cls argumentsrrrrrrfrom_arguments.s   z_Outputter.from_argumentsc Csz t|}Wn<tk rH}z|j|tdt|W5d}~XYnX|\zt|WW5QRStk r}z|j |tdt|W5d}~XYnXW5QRXdS)Npathexc_info) openFileNotFoundErrorfilenotfound_errorsysr#rjsonloadr parsing_error)selfr"fileerrorrrrr)6s z_Outputter.loadcKs|j|jjf|dSN)_stderrwrite _formatterr&r+kwargsrrrr&Dsz_Outputter.filenotfound_errorcKs|j|jjf|dSr.)r/r0r1r*r2rrrr*Gsz_Outputter.parsing_errorcKs|j|jjf|dSr.)r/r0r1validation_errorr2rrrr4Jsz_Outputter.validation_errorcKs|j|jjf|dSr.)_stdoutr0r1validation_successr2rrrr6Msz_Outputter.validation_successN)rrrrr1r5r/ classmethodr r)r&r*r4r6rrrrr's rc@s8eZdZedZdZddZddZddZd d Z d S) rz` ===[{type}]===({path})=== {body} ----------------------------- z===[SUCCESS]===({path})=== cCs|jj|d|ddS)Nr%z does not exist.r"typebody) _ERROR_MSGformatr+r"r#rrrr&^s z#_PrettyFormatter.filenotfound_errorcCs2|\}}}dt|||}|jj||j|dS)Nr8)join tracebackformat_exceptionr;r<r)r+r"r#exc_type exc_value exc_tracebackZ exc_linesrrrr*es  z_PrettyFormatter.parsing_errorcCs|jj||jj|dS)Nr8)r;r< __class__rr+ instance_pathr-rrrr4ps z!_PrettyFormatter.validation_errorcCs|jj|dS)N)r") _SUCCESS_MSGr<r+rGrrrr6wsz#_PrettyFormatter.validation_successN) rrrrr;rHr&r*r4r6rrrrrQs rc@s2eZdZeZddZddZddZddZd S) rcCs |dS)Nz does not exist. rr=rrrr&sz"_PlainFormatter.filenotfound_errorcCs d|dkrdnt||dS)NzFailed to parse {}: {} )r<reprr=rrrr*sz_PlainFormatter.parsing_errorcCs|jj||dS)N) file_namer-) _error_formatr<rFrrrr4sz _PlainFormatter.validation_errorcCsdS)Nr>rrIrrrr6sz"_PlainFormatter.validation_successN) rrrrrNr&r*r4r6rrrrr{s rcCsd|krd|}t|S)N.z jsonschema.r)namerrr_resolve_name_with_defaultsrQzJSON Schema Validation CLI) descriptionz-iz --instanceappend instancesz a path to a JSON instance (i.e. filename.json) to validate (may be specified multiple times). If no instances are provided via this option, one will be expected on standard input. )actiondesthelpz-Fz--error-formata the format to use for each validation error message, specified in a form suitable for str.format. This string will be passed one formatted object named 'error' for each ValidationError. Only provide this option when using --output=plain, which is the default. If this argument is unprovided and --output=plain is used, a simple default representation will be used. )rWz-oz--outputrrz an output format to use. 'plain' (default) will produce minimal text with one line for each error, while 'pretty' will produce more detailed human-readable output on multiple lines. )choicesdefaultrWz-Vz --validatorz the fully qualified object name of a validator to use, or, for validators that are registered with jsonschema, simply the name of the class. )r9rWz --base-uriz a base URI to assign to the provided schema, even if it does not declare one (via e.g. $id). This option can be used if you wish to resolve relative references to a particular URI (or local path) z --versionversionZ jsonschema)rUrZschemaz=the path to a JSON Schema to validate with (i.e. schema.json)cCsXttj|pdgd}|ddkr4|dr4td|ddkrT|ddkrTd|d<|S)Nz--helpargsrrrz3--error-format can only be used with --output plainz"{error.instance}: {error.message} )varsparser parse_argsr-)r]rrrrr`sr`cCs:d}||D]}d}|j||dq|s6|j|d|S)NFTrGr-)rG)Z iter_errorsr4r6)rGinstance validator outputterinvalidr-rrr_validate_instances rfrKcCsttt|dddS)Nr\)r)r'exitrunr`r\rrrmainsric sFtj|||dz|d}Wntk r8YdSX|d}|dkrRt|}z||Wn:tk r}zj|d|dWYdSd}~XYnX|drj|d}}nfdd}d g}|d dk rt|d |d nd} ||| d } d } |D]B} z || } Wntk r(d} YnX| t | | | dO} q| S)N)rrrr[rKrcrarTc sNz tWStk rH}zjdtdt|W5d}~XYnXdS)NrJr!)r(r)rr*r'r#r)_r-rdstdinrrr) s zrun..loadrJbase_uri)rmZreferrer)resolverr)rGrbrcrd) rr r)rr Z check_schemar r4r rf)rrrrlr[Z Validatorr-r)rTrnrcZ exit_codeZeachrbrrkrrhsZ     rh)+__doc__ importlibrr(rtextwraprargparser'r@warningspkgutilr ImportErrorZpkgutil_resolve_nameattrsrrZjsonschema.exceptionsr Zjsonschema.validatorsr r warnDeprecationWarning ExceptionrrrrrQArgumentParserr_ add_argumentrZr`rfargvrirrrlrhrrrrs     ))      PK!I==%__pycache__/exceptions.cpython-38.pycnu[U af:@sUdZddlmZddlmZmZddlmZddlm Z m Z ddl m Z m Z mZddlZddlZddlZddlmZdd lmZdd lmZe rdd lmZmZmZmZdd lmZed dgZ de!d<eZ"de!d<e#Z$dddddZ%ddZ&Gddde'Z(Gddde(Z)Gddde(Z*edd Gd!d"d"e'Z+Gd#d$d$e+eZ,Gd%d&d&e'Z-Gd'd(d(e'Z.Gd)d*d*e'Z/Gd+d,d,Z0e e"fd-d.Z1e1Z2e2fd/d0Z3dS)1z2 Validation errors, and some surrounding helpers. ) annotations) defaultdictdeque)pformat)dedentindent) TYPE_CHECKINGAnyClassVarN)define) Unresolvable)_utils)IterableMappingMutableMappingSequence)_typesZanyOfoneOfzfrozenset[str] WEAK_MATCHESSTRONG_MATCHESr strthingprefixcCstt|ddd|S)zP Format something for an error message as prettily as we currently can. HF)width sort_dicts)rrlstriprrF/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/exceptions.py_prettysr cCs6|dkrtjtjtddtStdtd|dS)NZRefResolutionError stacklevelzmodule z has no attribute )warningswarn_RefResolutionError_DEPRECATION_MESSAGEDeprecationWarningAttributeError__name__)namerrr __getattr__%sr,c seZdZUded<ded<edddeeeddef ddddd d d dd d d d fdd ZddddZddddZeddddZ e ddddZ e ddddZ e ddddZ d)d d d d!d"d#Zd$d%Zd&dd'd(ZZS)*_Errorz ClassVar[str]!_word_for_schema_in_error_message#_word_for_instance_in_error_messagerNrzIterable[str | int]zException | Noner zMapping[str, Any] | boolz _Error | Nonez_types.TypeCheckerNone) message validatorpathcausevalidator_valueinstanceschema schema_pathparent type_checkerreturnc st||||||||| | ||_t||_|_t| |_|_t||_ ||_ |_ ||_ ||_ ||_||_| |_| |_|D] } || _qdSN)super__init__r1rr3 relative_pathr8relative_schema_pathlistcontextr4 __cause__r2r5r6r7r9 _type_checker) selfr1r2r3r4rBr5r6r7r8r9r:error __class__rrr>5s2   z_Error.__init__r;cCsd|jjd|jdS)N<: >)rHr*r1rErrr__repr__^sz_Error.__repr__cCs|j|j|j|jf}tdd|Dr,|jStj|jt |j ddd}tj|j |j d}d}t d|jd|jd|d t|j|d d |d t|j|d d S) Ncss|]}|tkVqdSr<)_unset).0mrrr esz!_Error.__str__..) containerindices z z Failed validating z in z: rz On )r2r5r6r7anyr1r Zformat_as_indexr.rAr@r/r?rr rstrip)rEZessential_for_verboser8Z instance_pathrrrr__str__as:   z_Error.__str__)othercCs|f|Sr< _contents)clsr\rrr create_from~sz_Error.create_fromzSequence[str | int]cCs2|j}|dkr|jSt|j}|t|j|Sr<)r9r?r extendleftreversed absolute_pathrEr9r3rrrrcs  z_Error.absolute_pathcCs2|j}|dkr|jSt|j}|t|j|Sr<)r9r@rrarbabsolute_schema_pathrdrrrres  z_Error.absolute_schema_pathcCs@d}|jD]0}t|tr.|dt|d7}q |d|7}q |S)N$[].)rc isinstanceintr)rEr3elemrrr json_paths   z_Error.json_pathz_types.TypeChecker | None)r:kwargsr;cKsH|dk r|jtkr||_|D]"\}}t||tkr t|||q dSr<)rDrOitemsgetattrsetattr)rEr:rnkvrrr_sets z _Error._setcsd}fdd|DS)N) r1r4rBr2r5r3r8r6r7r9csi|]}|t|qSr)rp)rPattrrMrr sz$_Error._contents..r)rEattrsrrMrr^sz_Error._contentsboolc sZzjd}Wnttfk r(YdSXt|trDjj|Stfdd|DS)NtypeFc3s|]}jj|VqdSr<)rDis_typer6)rP expected_typerMrrrRsz'_Error._matches_type..) r7KeyError TypeErrorrjrrDrzr6rY)rEexpectedrrMr _matches_types  z_Error._matches_type)N)r* __module__ __qualname____annotations__rOr>rNr[ classmethodr`propertyrcrermrtr^r __classcell__rrrGrr-0s6 ()    r-c@seZdZdZdZdZdS)ValidationErrorz: An instance was invalid under a provided schema. r7r6Nr*rr__doc__r.r/rrrrrsrc@seZdZdZdZdZdS) SchemaErrorzB A schema was invalid under its corresponding metaschema. Z metaschemar7NrrrrrrsrF)slotsc@s4eZdZUdZdZded<ddZddd d Zd S) r&z& A ref could not be resolved. zjsonschema.exceptions.RefResolutionError is deprecated as of version 4.18.0. If you wish to catch potential reference resolution errors, directly catch referencing.exceptions.Unresolvable. Exception_causecCs|j|jk rtS|j|jkSr<)rHNotImplementedrrEr\rrr__eq__s z_RefResolutionError.__eq__rrIcCs t|jSr<)rrrMrrrr[sz_RefResolutionError.__str__N)r*rrrr'rrr[rrrrr&s r&c@sBeZdZddddZddZddZd d Zd d Zd dZdS)_WrappedReferencingError _Unresolvable)r4cCst|d|dS)N_wrapped)object __setattr__)rEr4rrrr>sz!_WrappedReferencingError.__init__cCs4|j|jkr|j|jkS|j|jjkr0|j|kStSr<)rHrrrrrrrs    z_WrappedReferencingError.__eq__cCs t|j|Sr<)rpr)rErurrrr,sz$_WrappedReferencingError.__getattr__cCs t|jSr<)hashrrMrrr__hash__sz!_WrappedReferencingError.__hash__cCsd|jdS)Nzrr,rrNr[rrrrrs rc@s.eZdZdZdddddZdddd Zd S) UndefinedTypeCheckzN A type checker was asked to check a type it did not have registered. rr0)ryr;cCs ||_dSr<ry)rEryrrrr> szUndefinedTypeCheck.__init__rIcCsd|jdS)NzType z is unknown to this type checkerrrMrrrr[ szUndefinedTypeCheck.__str__Nr*rrrr>r[rrrrrsrc@s eZdZdZddZddZdS) UnknownTypezP A validator was asked to validate an instance against an unknown type. cCs||_||_||_dSr<)ryr6r7)rEryr6r7rrrr>szUnknownType.__init__c Cs:d}td|jdt|j|ddt|j|ddS)NrVz Unknown type z, for validator with schema: rWz7 While checking instance: rX)rryr r7r6rZ)rErrrrr[s   zUnknownType.__str__Nrrrrrrsrcs*eZdZdZdfdd ZddZZS) FormatErrorz% Validating a format failed. Ncs$t||||_||_|_dSr<)r=r>r1r4rC)rEr1r4rGrrr>-szFormatError.__init__cCs|jSr<)r1rMrrrr[2szFormatError.__str__)N)r*rrrr>r[rrrrGrr(src@sneZdZdZeZdddddZddd d Zd d Zddd ddZ ddZ ddZ ddZ e ddZdS) ErrorTreezF ErrorTrees make it easier to check which validations failed. rzIterable[ValidationError])errorscCsLi|_t|j|_|D]0}|}|jD] }||}q$||j|j<|j|_qdSr<)rrrHr^r3r2r6 _instance)rErrFrTelementrrrr>=s    zErrorTree.__init__z str | int)indexcCs ||jkS)zC Check whether ``instance[index]`` has any errors. r]rErrrr __contains__IszErrorTree.__contains__cCs&|jtk r||kr|j||j|S)aQ Retrieve the child tree one level down at the given ``index``. If the index is not in the instance that this tree corresponds to and is not known by this tree, whatever error would be raised by ``instance.__getitem__`` will be propagated (usually this is some subclass of `LookupError`. )rrOr^rrrr __getitem__Os  zErrorTree.__getitem__)rvaluecCstjdtdd||j|<dS)a Add an error to the tree at the given ``index``. .. deprecated:: v4.20.0 Setting items on an `ErrorTree` is deprecated without replacement. To populate a tree, provide all of its sub-errors when you construct the tree. z8ErrorTree.__setitem__ is deprecated without replacement.r!r"N)r$r%r(r^)rErrrrr __setitem__\s zErrorTree.__setitem__cCs t|jS)zY Iterate (non-recursively) over the indices in the instance with errors. )iterr^rMrrr__iter__mszErrorTree.__iter__cCs|jS)z, Return the `total_errors`. ) total_errorsrMrrr__len__sszErrorTree.__len__cCs4t|}|dkrdnd}d|jjd|d|dS)NrFrrJz (z total z)>)lenrHr*)rEtotalrrrrrNyszErrorTree.__repr__cCs&tdd|jD}t|j|S)zT The total number of errors in the entire tree, including children. css|]\}}t|VqdSr<)r)rP_treerrrrRsz)ErrorTree.total_errors..)sumr^rorr)rEZ child_errorsrrrr~szErrorTree.total_errorsN)r)r*rrrrOrr>rrrrrrNrrrrrrr6s  rcsfdd}|S)a{ Create a key function that can be used to sort errors by relevance. Arguments: weak (set): a collection of validation keywords to consider to be "weak". If there are two errors at the same level of the instance and one is in the set of weak validation keywords, the other error will take priority. By default, :kw:`anyOf` and :kw:`oneOf` are considered weak keywords and will be superseded by other same-level validation errors. strong (set): a collection of validation keywords to consider to be "strong" cs,|j}t|j |j|k|k| fSr<)r2rr3r)rFr2strongweakrr relevances zby_relevance..relevancer)rrrrrr by_relevances rcCst|}t|d}|dkrdStt|g||d}|jr~tjd|j|d}t|dkrt||d||dkrt|S|d}q4|S)a Try to find an error that appears to be the best match among given errors. In general, errors that are higher up in the instance (i.e. for which `ValidationError.path` is shorter) are considered better matches, since they indicate "more" is wrong with the instance. If the resulting match is either :kw:`oneOf` or :kw:`anyOf`, the *opposite* assumption is made -- i.e. the deepest error is picked, since these keywords only need to match once, and any other errors may not be relevant. Arguments: errors (collections.abc.Iterable): the errors to select from. Do not provide a mixture of errors from different validation attempts (i.e. from different instances or schemas), since it won't produce sensical output. key (collections.abc.Callable): the key to use when sorting errors. See `relevance` and transitively `by_relevance` for more details (the default is to sort with the defaults of that function). Changing the default is only useful if you want to change the function that rates errors but still want the error context descent done by this function. Returns: the best matching error, or ``None`` if the iterable was empty .. note:: This function is a heuristic. Its return value may change for a given set of inputs from version to version if better heuristics are added. N)keyr!rr) rnextmax itertoolschainrBheapq nsmallestr)rrbestsmallestrrr best_matchs' $ r)4r __future__r collectionsrrpprintrtextwraprrtypingrr r rrr$rwr Zreferencing.exceptionsr rZ jsonschemar collections.abcrrrrr frozensetrrrZUnsetrOr r,rr-rrr&rrrrrrrrrrrrsB          Q  PK!K~$__pycache__/protocols.cpython-38.pycnu[U af@sdZddlmZddlmZmZmZmZmZm Z erpddl m Z ddl Z ddlmZddlmZddlZddlZe Gdd d eZdS) z4 typing.Protocol classes for jsonschema interfaces. ) annotations) TYPE_CHECKINGAnyClassVarIterableProtocolruntime_checkable)MappingN)_typing)ValidationErrorc@seZdZUdZded<ded<ded<ded<d ed <d ed <d(d ddddddZed ddddZddddddZdddddZ dd dd!d"Z dddd#d$Z dd%d&d'Z d S)) Validatora The protocol to which all validator classes adhere. Arguments: schema: The schema that the validator object will validate with. It is assumed to be valid, and providing an invalid schema can lead to undefined behavior. See `Validator.check_schema` to validate a schema first. registry: a schema registry that will be used for looking up JSON references resolver: a resolver that will be used to resolve :kw:`$ref` properties (JSON references). If unprovided, one will be created. .. deprecated:: v4.18.0 `RefResolver <_RefResolver>` has been deprecated in favor of `referencing`, and with it, this argument. format_checker: if provided, a checker which will be used to assert about :kw:`format` properties present in the schema. If unprovided, *no* format validation is done, and the presence of format within schemas is strictly informational. Certain formats require additional packages to be installed in order to assert against instances. Ensure you've installed `jsonschema` with its `extra (optional) dependencies ` when invoking ``pip``. .. deprecated:: v4.12.0 Subclassing validator classes now explicitly warns this is not part of their public API. zClassVar[Mapping]Z META_SCHEMAZ VALIDATORSz ClassVar[jsonschema.TypeChecker] TYPE_CHECKERz"ClassVar[jsonschema.FormatChecker]ZFORMAT_CHECKERz _typing.id_ofZID_OFzMapping | boolschemaNz%referencing.jsonschema.SchemaRegistryzjsonschema.FormatChecker | NoneNone)rregistryformat_checkerreturncCsdS)N)selfrrrrrE/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/protocols.py__init__sszValidator.__init__)rrcCsdS)z Validate the given schema against the validator's `META_SCHEMA`. Raises: `jsonschema.exceptions.SchemaError`: if the schema is invalid Nr)clsrrrr check_schema{szValidator.check_schemarstrbool)instancetypercCsdS)a Check if the instance is of the given (JSON Schema) type. Arguments: instance: the value to check type: the name of a known (JSON Schema) type Returns: whether the instance is of the given type Raises: `jsonschema.exceptions.UnknownType`: if ``type`` is not a known type Nr)rrrrrris_typeszValidator.is_type)rrcCsdS)a Check if the instance is valid under the current `schema`. Returns: whether the instance is valid or not >>> schema = {"maxItems" : 2} >>> Draft202012Validator(schema).is_valid([2, 3, 4]) False Nrrrrrris_validszValidator.is_validzIterable[ValidationError]cCsdS)a] Lazily yield each of the validation errors in the given instance. >>> schema = { ... "type" : "array", ... "items" : {"enum" : [1, 2, 3]}, ... "maxItems" : 2, ... } >>> v = Draft202012Validator(schema) >>> for error in sorted(v.iter_errors([2, 3, 4]), key=str): ... print(error.message) 4 is not one of [1, 2, 3] [2, 3, 4] is too long .. deprecated:: v4.0.0 Calling this function with a second schema argument is deprecated. Use `Validator.evolve` instead. Nrrrrr iter_errorsszValidator.iter_errorscCsdS)a Check if the instance is valid under the current `schema`. Raises: `jsonschema.exceptions.ValidationError`: if the instance is invalid >>> schema = {"maxItems" : 2} >>> Draft202012Validator(schema).validate([2, 3, 4]) Traceback (most recent call last): ... ValidationError: [2, 3, 4] is too long NrrrrrvalidateszValidator.validate)rcKsdS)al Create a new validator like this one, but with given changes. Preserves all other attributes, so can be used to e.g. create a validator with a different schema but with the same :kw:`$ref` resolution behavior. >>> validator = Draft202012Validator({}) >>> validator.evolve(schema={"type": "number"}) Draft202012Validator(schema={'type': 'number'}, format_checker=None) The returned object satisfies the validator protocol, but may not be of the same concrete class! In particular this occurs when a :kw:`$ref` occurs to a schema with a different :kw:`$schema` than this one (i.e. for a different draft). >>> validator.evolve( ... schema={"$schema": Draft7Validator.META_SCHEMA["$id"]} ... ) Draft7Validator(schema=..., format_checker=None) Nr)rkwargsrrrevolveszValidator.evolve)N) __name__ __module__ __qualname____doc____annotations__r classmethodrrrr r!r#rrrrr .s . r )r' __future__rtypingrrrrrrcollections.abcr Zreferencing.jsonschemaZ referencingZ jsonschemar Zjsonschema.exceptionsr Zjsonschema.validatorsr rrrrs     PK!U%__pycache__/validators.cpython-38.pycnu[U afŷ'@sUdZddlmZddlmZddlmZmZmZddl m Z ddl m Z ddl mZddlmZmZmZmZdd lmZdd lmZdd lZdd lZdd lZdd lZdd lmZmZmZdd lm Z!ddl"m#Z#dd l$Z%dd l&Z%ddl'm(Z(m)Z)m*Z*m+Z+m,Z,m-Z-m.Z.erddl/m0Z0e-1Z2iZ3de4d<e-5Z6ddZ7ddZ8ddddZ9e!:e%j;e9dZe%j'j?j@e dfddd d!d"d#d$d%d&d'ZAdNd(d)ZBeAe!Cd*e)jDe*jEe)jFe*jGe*jHe)jIe)jJe*jKe)jLe*jMe)jNe)jOe*jPe)jQe)jRe*jSe)jTe)jUe*jVe*jWe)jXd+e+jYe(jZd,e%j'j[j@e*j\d-Z]eAe!Cd.e)jDe*jEe)jFe)j^e)j_e*j`e)jJe)jLe*jMe)jNe)jOe)jae*jPe)jQe)jRe)jbe*jSe)jIe)jce)jde)jTe)jUe)jee)jfe)jge)jXd/e+jhe(jid0e%j'jjj@e*j\d-ZkeAe!Cd1e)jDe*jEe)jFe)j^e)j_e)jle*jme*j`e)jJe)jne)joe)jLe*jpe)jNe)jOe)jae)jqe)jQe)jRe)jbe)jre)jIe)jce)jde)jTe)jUe)jee)jse)jfe)jge)jXd2e+jte(jud3e%j'jvj@e*j\d-ZweAe!Cd4e)jDe*jEe)jFe)j^e)j_e)jle*jme*j`e)jJe)jne)joe)jLe)jxe*jpe)jNe)jOe)jae)jqe)jQe)jRe)jbe)jre)jIe)jce)jde)jTe)jUe)jee)jse)jfe)jge)jXd5 e+jye(jzd6e%j'j{j@e*j\d-Z|eAe!Cd7e*j}e)jDe*jEe)jFe)j^e)j_e)jle)j~e)je)je)jJe)jne)joe)jLe)jxe*jpe)jNe)jOe)jae)jqe)jQe)jRe)jbe)jre)jIe)jce)jde)jTe)jUe)jee)jse)jfe)jge*je*je)jXd8$e+je(jd9d:ZeAe!Cd;e)je)jDe)jFe)j^e)j_e)jle)j~e)je)je)jJe)jne)joe)jLe)jxe)je)jNe)jOe)jae)jqe)jQe)jRe)jbe)jre)jIe)jce)jde)jTe)jUe)je)jee)jse)jfe)jge)je)je)jXd<$e+j=e(j>d=d:ZeZGd>d?d?Zd@ZdAdBZdCdDZdEdFZdOdGdHZe2fdIdJdKdLdMZd S)PzQ Creation and extension of validators, with implementations for existing drafts. ) annotations)deque)IterableMappingSequence) lru_cache) methodcaller) TYPE_CHECKING)unquote urldefragurljoinurlsplit)urlopen)warnN)definefieldfields)REGISTRY) HashTrieMap)_format _keywords_legacy_keywords_types_typing_utils exceptions) Validatorzdict[str, Validator] _VALIDATORScCs|dkr(tjdtddddlm}|S|dkrDtjdtddtS|d kr`tjd tddtS|d kr~tjtjtddtSt d t d |dS)N ErrorTreezkImporting ErrorTree from jsonschema.validators is deprecated. Instead import it from jsonschema.exceptions. stacklevelr)r validatorszvAccessing jsonschema.validators.validators is deprecated. Use jsonschema.validators.validator_for with a given schema.Z meta_schemaszxAccessing jsonschema.validators.meta_schemas is deprecated. Use jsonschema.validators.validator_for with a given schema.Z RefResolverzmodule z has no attribute ) warningsrDeprecationWarningZjsonschema.exceptionsrr _META_SCHEMAS _RefResolver_DEPRECATION_MESSAGEAttributeError__name__)namerr+F/opt/nydus/tmp/pip-target-53d1vnqk/lib/python/jsonschema/validators.py __getattr__,s< r-csfdd}|S)a Register the decorated validator for a ``version`` of the specification. Registered validators and their meta schemas will be considered when parsing :kw:`$schema` keywords' URIs. Arguments: version (str): An identifier to use as the version's name Returns: collections.abc.Callable: a class decorator to decorate the validator with the version cs |t<||j}|t|<|SN)rID_OF META_SCHEMAr%)clsZmeta_schema_idversionr+r, _validateses zvalidates.._validatesr+)r3r4r+r2r, validatesPs r5str)uric Csnddlm}m}ddi}|||d}||8}tjdtddtjjt |tj j d W5QRSQRXdS) Nr)Requestrz User-Agentz.python-jsonschema (deprecated $ref resolution))headersaAutomatically retrieving remote references can be a security vulnerability and is discouraged by the JSON Schema specifications. Relying on this behavior is deprecated and will shortly become an error. If you are sure you want to remotely retrieve your reference and that it is safe to do so, you can find instructions for doing so via referencing.Registry in the referencing documentation (https://referencing.readthedocs.org). r )Zdefault_specification) urllib.requestr8rr#rr$ referencingResourceZ from_contentsjsonload jsonschema DRAFT202012)r7r8rr9requestresponser+r+r,_warn_for_remote_retrievems   rD)retriever+itemsz#referencing.jsonschema.ObjectSchemazcMapping[str, _typing.SchemaKeywordValidator] | Iterable[tuple[str, _typing.SchemaKeywordValidator]]z str | Nonez_types.TypeCheckerz_format.FormatCheckerz _typing.id_ofz_typing.ApplicableValidators meta_schemar"r3 type_checkerformat_checkerid_ofapplicable_validatorsc s|tjjpdtjjdtGfddd}ddt|D|dk r|dd d d }|d|_ |_ t ||}|S) a" Create a new validator class. Arguments: meta_schema: the meta schema for the new validator class validators: a mapping from names to callables, where each callable will validate the schema property with the given name. Each callable should take 4 arguments: 1. a validator instance, 2. the value of the property being validated within the instance 3. the instance 4. the schema version: an identifier for the version that this validator class will validate. If provided, the returned validator class will have its ``__name__`` set to include the version, and also will have `jsonschema.validators.validates` automatically called for the given version. type_checker: a type checker, used when applying the :kw:`type` keyword. If unprovided, a `jsonschema.TypeChecker` will be created with a set of default types typical of JSON Schema drafts. format_checker: a format checker, used when applying the :kw:`format` keyword. If unprovided, a `jsonschema.FormatChecker` will be created with a set of default formats typical of JSON Schema drafts. id_of: A function that given a schema, returns its ID. applicable_validators: A function that, given a schema, returns the list of applicable schema keywords and associated values which will be used to validate the instance. This is mostly used to support pre-draft 7 versions of JSON Schema which specified behavior around ignoring keywords if they were siblings of a ``$ref`` keyword. If you're not attempting to implement similar behavior, you can typically ignore this argument and leave it at its default. Returns: a new `jsonschema.protocols.Validator` class zurn:unknown-dialect)Z dialect_iddefaultcseZdZUeZeZZZeZ Z e ddddZ e e jdZded<e ddddZe dd Zd ed <e ed dd Zded<e ddd ddZddZfddZeefddZefddZfddZd(fdd Zd)fdd Zd d!Z d"d#Z!d$d%Z"d*d&d'Z#dS)+zcreate..ValidatorF)initrepreq)rOzreferencing.jsonschema.SchemaschemaNresolver)rMrOaliasrMz_format.FormatChecker | NonerJT)rMkw_onlyrOz%referencing.jsonschema.SchemaRegistry _registry _resolver)rSrMrUrOcSs"tjdtdddd}||_dS)NaKSubclassing validator classes is not intended to be part of their public API. A future version will make doing so an error, as the behavior of subclasses isn't guaranteed to stay the same between releases of jsonschema. Instead, prefer composition of validators, wrapping them in an object owned entirely by the downstream library.rr c[sb|j}|d|j}t||d}t|D].}|js4q(|j}|j}||kr(t||||<q(|f|SNrQrT) __class__ setdefaultrQ validator_forrrNr*rSgetattr)selfchangesr1rQ NewValidatorr attr_name init_namer+r+r,evolve s  z;create..Validator.__init_subclass__..evolve)r#rr$rb)r1rbr+r+r,__init_subclass__sz+create..Validator.__init_subclass__csjdkr:j}|tk r"t|}j}||_jdksNjdkrVg_nfddjD_t j dd}|dk rj}|dk r||dS)NTFcs*g|]"\}}|jkrj|||fqSr+ VALIDATORS.0kvr]r+r, &s zAcreate..Validator.__attrs_post_init__.. push_scope) rWrV_REMOTE_WARNING_REGISTRYSPECIFICATIONScombinecreate_resourcerQZresolver_with_root _validatorsr\ _ref_resolver)r]registryresourcerlid)rLrK specificationrjr,__attrs_post_init__s       z-create..Validator.__attrs_post_init__cSsJt|j|d}|tkr|j}||j|d}||D]}tj|q4dS)NrT)rQrJ)r[r0_UNSETFORMAT_CHECKER iter_errorsrZ SchemaErrorZ create_from)r1rQrJr validatorerrorr+r+r, check_schema3sz&create..Validator.check_schemacs>tjd|jjdtdd|jdkr8tj|jd|_|jS)Nz Accessing z.resolver is deprecated as of v4.18.0, in favor of the https://github.com/python-jsonschema/referencing library, which provides more compliant referencing behavior as well as more flexible APIs for customization.rr rK) r#rrYr)r$rrr& from_schemarQrjr~r+r,rR?s z"create..Validator.resolvercsJ|d|j}t||jd}D]\}}||kr t||||<q |f|SrX)rZrQr[rYr\)r]r^rQr_r`ra) evolve_fieldsr+r,rbTs  z create..Validator.evolvec 3s|dk r0tjdtddfdd|D}njj}}|dkrJdS|dkrrtjd|dd||d VdS|D]V\}}}||||pd }|D]4}|j||||jd |d kr|j ||VqqvdS) NzPassing a schema to Validator.iter_errors is deprecated and will be removed in a future release. Call validator.evolve(schema=new_schema).iter_errors(...) instead.rr cs*g|]"\}}|jkrj|||fqSr+rdrfrjr+r,rkjs z9create..Validator.iter_errors..TFFalse schema does not allow r{validator_valueinstancerQr+r{rrrQrI>if$ref) r#rr$rQrqrValidationError_set TYPE_CHECKER schema_path appendleft) r]r_schemar"r{rhrierrorsr|)rLrjr,rz^sD  z%create..Validator.iter_errorsc 3s|dkr dS|dkr4tjd|dd||dVdS|jdk rL|j|d}n(|dkrf|j|}|j||d}|D]\}}|j|} | dkrq|| ||||pd} | D]^} | j |||||j d|d kr| j ||dk r| j ||dk r| j || Vqq|dS) NTFrrrQ)rQrWr+r>rr)rrrrrbrWZin_subresourcerpregetrrrrpath) r]rrQrrrRZevolvedrhrir{rr|)rLrvr+r,descendsL      z!create..Validator.descendc_s|j||D]}|q dSr.)rz)r]argskwargsr|r+r+r,validatesz"create..Validator.validatecSsBz|j||WStjk r<t|||j}|dYnXdSr.)ris_typerZUndefinedTypeCheckZ UnknownTyperQ)r]rtypeexcr+r+r,rs z!create..Validator.is_typec Ss|jdkrbz|j|}Wn2tjjk rL}zt||W5d}~XYnX|j||j|j dSt |jdd}|dkr|j |}|||W5QRSQRXn:||\}}|j |zt|||WS|j XdS)N)rRresolve)rrrWlookupr<rZ UnresolvableZ_WrappedReferencingErrorrcontentsrRr\ resolvingrl pop_scopelist)r]refrresolvederrrscoper+r+r,_validate_references&    z-create..Validator._validate_referencecSs<|dk r$tjdtdd|j|d}t||d}|dkS)NzPassing a schema to Validator.is_valid is deprecated and will be removed in a future release. Call validator.evolve(schema=new_schema).is_valid(...) instead.rr r)r#rr$rbnextrz)r]rrr|r+r+r,is_valids z"create..Validator.is_valid)N)NNN)N)$r) __module__ __qualname__dictrer0rry staticmethodr/_APPLICABLE_VALIDATORSrrqreprlibrOrQ__annotations__rrrJrmrVrWrcrw classmethodrxr}propertyrRrbrzrrrrrr+rLrZformat_checker_argrKrHrvrIr"r+r,rsJ    34rcSsg|]}|jr|j|jfqSr+)rNr*rS)rgrr+r+r,rkszcreate..N -) r<r@Zspecification_withZ SpecificationZOPAQUErrtitlereplacer)rr5) rHr"r3rIrJrKrLrsafer+rr,creates"O " rc CsLt|j}|||dkr"|j}|dkr0|j}t|j|||||j|jdS)a Create a new validator class by extending an existing one. Arguments: validator (jsonschema.protocols.Validator): an existing validator class validators (collections.abc.Mapping): a mapping of new validator callables to extend with, whose structure is as in `create`. .. note:: Any validator callables with the same name as an existing one will (silently) replace the old validator callable entirely, effectively overriding any validation done in the "parent" validator class. If you wish to instead extend the behavior of a parent's validator callable, delegate and call it directly in the new validator function by retrieving it using ``OldValidator.VALIDATORS["validation_keyword_name"]``. version (str): a version for the new validator class type_checker (jsonschema.TypeChecker): a type checker, used when applying the :kw:`type` keyword. If unprovided, the type checker of the extended `jsonschema.protocols.Validator` will be carried along. format_checker (jsonschema.FormatChecker): a format checker, used when applying the :kw:`format` keyword. If unprovided, the format checker of the extended `jsonschema.protocols.Validator` will be carried along. Returns: a new `jsonschema.protocols.Validator` class extending the one provided .. note:: Meta Schemas The new validator class will have its parent's meta schema. If you wish to change or extend the meta schema in the new validator class, modify ``META_SCHEMA`` directly on the returned class. Note that no implicit copying is done, so a copy should likely be made before modifying it, in order to not affect the old validator. NrG) rreupdaterryrr0r/r)r{r"r3rIrJZall_validatorsr+r+r,extendsC  rz'http://json-schema.org/draft-03/schema#)radditionalItemsadditionalProperties dependenciesZdisallowZ divisibleByenumZextendsformatrFmaxItems maxLengthmaximumminItems minLengthminimumpatternpatternProperties propertiesr uniqueItemsZdraft3)rHr"rIrJr3rKrLz'http://json-schema.org/draft-04/schema#)rrrallOfanyOfrrrrFrr maxPropertiesrrr minPropertiesr multipleOfnotoneOfrrrrequiredrrZdraft4z'http://json-schema.org/draft-06/schema#)rrrrrconstcontainsrrexclusiveMaximumexclusiveMinimumrrFrrrrrrrrrrrrrr propertyNamesrrrZdraft6z'http://json-schema.org/draft-07/schema#) rrrrrrrrrrrrrrFrrrrrrrrrrrrrrrrrrZdraft7z,https://json-schema.org/draft/2019-09/schema)$z $recursiveRefrrrrrrrdependentRequireddependentSchemasrrrrrrFrrrrrrrrrrrrrrrrrunevaluatedItemsunevaluatedPropertiesrz draft2019-09)rHr"rIrJr3z,https://json-schema.org/draft/2020-12/schema)$z $dynamicRefrrrrrrrrrrrrrrFrrrrrrrrrrrrr prefixItemsrrrrrrrz draft2020-12c@seZdZdZdZeddddfddZeej j j fdd Z d d Z d d ZeddZeddZejddZejddZddZeddZeddZddZddZd d!Zd"d#ZdS)$r&a Resolve JSON References. Arguments: base_uri (str): The URI of the referring document referrer: The actual referring document store (dict): A mapping from URIs to documents to cache cache_remote (bool): Whether remote refs should be cached after first resolution handlers (dict): A mapping from URI schemes to functions that should be used to retrieve them urljoin_cache (:func:`functools.lru_cache`): A cache that will be used for caching the results of joining the resolution scope to subscopes. remote_cache (:func:`functools.lru_cache`): A cache that will be used for caching the results of resolved remote URLs. Attributes: cache_remote (bool): Whether remote refs should be cached after first resolution .. deprecated:: v4.18.0 ``RefResolver`` has been deprecated in favor of `referencing`. a|jsonschema.RefResolver is deprecated as of v4.18.0, in favor of the https://github.com/python-jsonschema/referencing library, which provides more compliant referencing behavior as well as more flexible APIs for customization. A future release will remove RefResolver. Please file a feature request (on referencing) if you are missing an API for the kind of customization you need.Tr+NcCs|dkrtdt}|dkr*td|j}||_||_t||_|g|_t ddt D|_ |j ddt D|j ||j dd|D||j |<||_||_dS)Nicss|]\}}||jfVqdSr.)r)rgr7eachr+r+r, sz(_RefResolver.__init__..css|]\}}||jfVqdSr.)r0)rgrurr+r+r,rscss,|]$}t|trd|kr|d|fVqdS)$idN) isinstancer)rgrQr+r+r,rs )rr resolve_from_urlreferrer cache_remoterhandlers _scopes_stackrURIDictrnrFstorerr%values_urljoin_cache _remote_cache)r]base_urirrrrZ urljoin_cacheZ remote_cacher+r+r,__init__s*         z_RefResolver.__init__cOs||||pd|d|S)z Construct a resolver from a JSON schema object. Arguments: schema: the referring schema Returns: `_RefResolver` r)rrr+)r1rQrKrrr+r+r,rsz_RefResolver.from_schemacCs|j||j|dS)z Enter a given sub-scope. Treats further dereferences as being performed underneath the given scope. N)rappendrresolution_scoper]rr+r+r,rls z_RefResolver.push_scopecCs4z|jWn tk r.tddYnXdS)z Exit the most recent entered scope. Treats further dereferences as being performed underneath the original scope. Don't call this method more times than `push_scope` has been called. znFailed to pop the scope from an empty stack. `pop_scope()` should only be called once for every `push_scope()`N)rpop IndexErrorr_RefResolutionErrorrjr+r+r,rs z_RefResolver.pop_scopecCs |jdS)z8 Retrieve the current resolution scope. )rrjr+r+r,rsz_RefResolver.resolution_scopecCst|j\}}|S)zL Retrieve the current base URI, not including any fragment. )r r)r]r7_r+r+r,rsz_RefResolver.base_uriccs4tjdtdd||z dVW5|XdS)zt Temporarily enter the given scope for the duration of the context. .. deprecated:: v4.0.0 zVjsonschema.RefResolver.in_scope is deprecated and will be removed in a future release.r N)r#rr$rlrrr+r+r,in_scopes  z_RefResolver.in_scopeccs2||\}}||z |VW5|XdS)z Resolve the given ``ref`` and enter its resolution scope. Exits the scope on exit of this context manager. Arguments: ref (str): The reference to resolve N)rrlr)r]rurlrr+r+r,rs   z_RefResolver.resolvingcCs ||Sr.)_get_subschemas_cache)r]keyr+r+r,_find_in_referrer,sz_RefResolver._find_in_referrercCs6ddtD}t|jtD]\}}|||q|S)NcSsi|] }|gqSr+r+)rgrr+r+r, 1sz6_RefResolver._get_subschemas_cache..)_SUBSCHEMAS_KEYWORDS_search_schemar_match_subschema_keywordsr)r]cachekeyword subschemar+r+r,r/s z"_RefResolver._get_subschemas_cachecCs|d}|sdSt|\}}|D]`}|d}t|ts.find)$anchor$dynamicAnchor)rur#z~1z~0~zUnresolvable JSON pointer: N)lstriprrr splitrrrint ValueError TypeError LookupErrorrr) r]rrr rrpartspartrr+r r,rhs>         z_RefResolver.resolve_fragmentc Csz ddl}Wntk r$d}YnXt|j}||jkrJ|j||}nD|dkrf|rf||}n(t|}t| d}W5QRX|j r||j |<|S)a Resolve a remote ``uri``. If called directly, does not check the store first, but after retrieving the document at the specified URI it will be saved in the store if :attr:`cache_remote` is True. .. note:: If the requests_ library is present, ``jsonschema`` will use it to request the remote ``uri``, so that the correct encoding is detected and used. If it isn't, or if the scheme of the ``uri`` is not ``http`` or ``https``, UTF-8 is assumed. Arguments: uri (str): The URI to resolve Returns: The retrieved document .. _requests: https://pypi.org/project/requests/ rN)httphttpszutf-8) requests ImportErrorr schemerrr>rloadsreaddecoderr)r]r7rrresultrr+r+r,rs       z_RefResolver.resolve_remote)r)rr__doc__r'rrrr<r@rArKrrlrrrr contextlibcontextmanagerrrrrrrrrrrr+r+r+r,r&_s>1  &         7r&)rrur r csfdd}|S)Nc3s|kr|VdSr.r+)valuerr+r,matchersz_match_keyword..matcherr+)rr&r+r%r,r s r ccs tD]}||kr||fVqdSr.)r)r$rr+r+r,rsrccsDt|g}|r@|}t|ts"q ||EdH||q dS)zBreadth-first search routine.N)rrrr extendleftr)rQr&rr$r+r+r,rs  rcOsJ|dkrt|}||||f||}t||}|dk rF|dS)a7 Validate an instance under the given schema. >>> validate([2, 3, 4], {"maxItems": 2}) Traceback (most recent call last): ... ValidationError: [2, 3, 4] is too long :func:`~jsonschema.validators.validate` will first verify that the provided schema is itself valid, since not doing so can lead to less obvious error messages and fail in less obvious or consistent ways. If you know you have a valid schema already, especially if you intend to validate multiple instances with the same schema, you likely would prefer using the `jsonschema.protocols.Validator.validate` method directly on a specific validator (e.g. ``Draft202012Validator.validate``). Arguments: instance: The instance to validate schema: The schema to validate with cls (jsonschema.protocols.Validator): The class that will be used to validate the instance. If the ``cls`` argument is not provided, two things will happen in accordance with the specification. First, if the schema has a :kw:`$schema` keyword containing a known meta-schema [#]_ then the proper validator will be used. The specification recommends that all schemas contain :kw:`$schema` properties for this reason. If no :kw:`$schema` property is found, the default validator class is the latest released draft. Any other provided positional and keyword arguments will be passed on when instantiating the ``cls``. Raises: `jsonschema.exceptions.ValidationError`: if the instance is invalid `jsonschema.exceptions.SchemaError`: if the schema itself is invalid .. rubric:: Footnotes .. [#] known by a validator registered with `jsonschema.validators.validates` N)r[r}r best_matchrz)rrQr1rrr{r|r+r+r,rs< rzValidator | _utils.Unsetztype[Validator])rMreturncCs^|tkr tn|}|dks(|dks(d|kr,|S|dtkrN|tkrNtdtddt|d|S)a Retrieve the validator class appropriate for validating the given schema. Uses the :kw:`$schema` keyword that should be present in the given schema to look up the appropriate validator class. Arguments: schema (collections.abc.Mapping or bool): the schema to look at default: the default to return if the appropriate validator class cannot be determined. If unprovided, the default is to return the latest supported draft. Examples: The :kw:`$schema` JSON Schema keyword will control which validator class is returned: >>> schema = { ... "$schema": "https://json-schema.org/draft/2020-12/schema", ... "type": "integer", ... } >>> jsonschema.validators.validator_for(schema) Here, a draft 7 schema instead will return the draft 7 validator: >>> schema = { ... "$schema": "http://json-schema.org/draft-07/schema#", ... "type": "integer", ... } >>> jsonschema.validators.validator_for(schema) Schemas with no ``$schema`` keyword will fallback to the default argument: >>> schema = {"type": "integer"} >>> jsonschema.validators.validator_for( ... schema, default=Draft7Validator, ... ) or if none is provided, to the latest version supported. Always including the keyword when authoring schemas is highly recommended. TFz$schemazThe metaschema specified by $schema was not found. Using the latest draft to validate, but this will raise an error in the future.rr )rx_LATEST_VERSIONr%rr$r)rQrMZDefaultValidatorr+r+r,r[7s= r[)r+NNN)N)r! __future__r collectionsrcollections.abcrrr functoolsroperatorrtypingr urllib.parser r r r r;rr#rr"r>rattrsrrrZjsonschema_specificationsrrnZrpdsrZreferencing.exceptionsr<Zreferencing.jsonschemar@rrrrrrrZjsonschema.protocolsrZUnsetrxrrrr%r-r5rDroZRegistryrmZdraft202012_type_checkerZdraft202012_format_checkerrArKrrrrrrZdependencies_draft3Zdisallow_draft3rrZextends_draft3rZitems_draft3_draft4rrZmaximum_draft3_draft4rrZminimum_draft3_draft4rrZproperties_draft3Z type_draft3rZdraft3_type_checkerZdraft3_format_checkerZDRAFT3Zignore_ref_siblingsZDraft3ValidatorrrZ!dependencies_draft4_draft6_draft7rrnot_rrrrZdraft4_type_checkerZdraft4_format_checkerZDRAFT4ZDraft4ValidatorrZcontains_draft6_draft7rrZitems_draft6_draft7_draft201909rrrZdraft6_type_checkerZdraft6_format_checkerZDRAFT6ZDraft6ValidatorZif_Zdraft7_type_checkerZdraft7_format_checkerZDRAFT7ZDraft7ValidatorZ recursiveRefrrrZunevaluatedItems_draft2019ZunevaluatedProperties_draft2019Zdraft201909_type_checkerZdraft201909_format_checkerZDraft201909ValidatorZ dynamicRefrFrrrZDraft202012Validatorr*r&rr rrrr[r+r+r+r,s\         $   $   U"'!,"-(1&/w  HPK!4ee __init__.pynu[PK!i,Duss __main__.pynu[PK!88 N_format.pynu[PK!W e:e: I_keywords.pynu[PK!5W;W;(_legacy_keywords.pynu[PK!nJ ¿_types.pynu[PK!;8bb _typing.pynu[PK!?0)) _utils.pynu[PK! xg!g!Ycli.pynu[PK!硻:: "exceptions.pynu[PK!7aU ]protocols.pynu[PK!yŷŷ 0zvalidators.pynu[PK!FF22benchmarks/__init__.pynu[PK!y>>2benchmarks/const_vs_enum.pynu[PK!`@G6benchmarks/contains.pynu[PK!5>  9benchmarks/issue232.pynu[PK!xp@@$;benchmarks/json_schema_test_suite.pynu[PK!FUdd=benchmarks/nested_schemas.pynu[PK!O[WYY2Ebenchmarks/subcomponents.pynu[PK!๬Ibenchmarks/unused_registry.pynu[PK!d4  (Mbenchmarks/useless_applicator_schemas.pynu[PK!+wcc5[benchmarks/useless_keywords.pynu[PK! > ^benchmarks/validator_creation.pynu[PK!v qqS`benchmarks/issue232/issue.jsonnu[PK!ru<.*benchmarks/__pycache__/__init__.cpython-38.pycnu[PK!ֵ3e+benchmarks/__pycache__/const_vs_enum.cpython-38.pycnu[PK!!].}0benchmarks/__pycache__/contains.cpython-38.pycnu[PK!I.5benchmarks/__pycache__/issue232.cpython-38.pycnu[PK!K3<8benchmarks/__pycache__/json_schema_test_suite.cpython-38.pycnu[PK!]4Q;benchmarks/__pycache__/nested_schemas.cpython-38.pycnu[PK!^..3fCbenchmarks/__pycache__/subcomponents.cpython-38.pycnu[PK!u5Ibenchmarks/__pycache__/unused_registry.cpython-38.pycnu[PK!!hO.@Obenchmarks/__pycache__/useless_applicator_schemas.cpython-38.pycnu[PK!Jߟ6 Xbenchmarks/__pycache__/useless_keywords.cpython-38.pycnu[PK!AaU8^benchmarks/__pycache__/validator_creation.cpython-38.pycnu[PK!9`tests/__init__.pynu[PK!Akz`tests/_suite.pynu[PK!ĿZZtests/fuzz_validate.pynu[PK!noo5tests/test_cli.pynu[PK!&,==!tests/test_deprecations.pynu[PK!]?X?X2tests/test_exceptions.pynu[PK!=t t tests/test_format.pynu[PK!!!#:tests/test_jsonschema_test_suite.pynu[PK!ٸnAAotests/test_types.pynu[PK!{'CCtests/test_utils.pynu[PK!$RGWWytests/test_validators.pynu[PK! n:)h>tests/__pycache__/__init__.cpython-38.pycnu[PK!mE"E"'f?tests/__pycache__/_suite.cpython-38.pycnu[PK!H3KK.btests/__pycache__/fuzz_validate.cpython-38.pycnu[PK!IH ]])gtests/__pycache__/test_cli.cpython-38.pycnu[PK!=992tests/__pycache__/test_deprecations.cpython-38.pycnu[PK! -dLULU0+tests/__pycache__/test_exceptions.cpython-38.pycnu[PK!uABB,Utests/__pycache__/test_format.cpython-38.pycnu[PK!k)ɜ;uetests/__pycache__/test_jsonschema_test_suite.cpython-38.pycnu[PK!Zii+||tests/__pycache__/test_types.cpython-38.pycnu[PK!5+@tests/__pycache__/test_utils.cpython-38.pycnu[PK!9;z.z.0tests/__pycache__/test_validators.cpython-38.pycnu[PK!YF #` __pycache__/__init__.cpython-38.pycnu[PK!g''#} __pycache__/__main__.cpython-38.pycnu[PK!-c4[/[/" __pycache__/_format.cpython-38.pycnu[PK!n!..$ __pycache__/_keywords.cpython-38.pycnu[PK!7u$$+L __pycache__/_legacy_keywords.cpython-38.pycnu[PK!0Ue77!q __pycache__/_types.cpython-38.pycnu[PK!{6"C __pycache__/_typing.cpython-38.pycnu[PK!!%{""! __pycache__/_utils.cpython-38.pycnu[PK!+!!r __pycache__/cli.cpython-38.pycnu[PK!I==% __pycache__/exceptions.cpython-38.pycnu[PK!K~$ __pycache__/protocols.cpython-38.pycnu[PK!U%) __pycache__/validators.cpython-38.pycnu[PKEE