summaryrefslogtreecommitdiff
path: root/e/lib/python3.11/site-packages/setuptools/config
diff options
context:
space:
mode:
Diffstat (limited to 'e/lib/python3.11/site-packages/setuptools/config')
-rw-r--r--e/lib/python3.11/site-packages/setuptools/config/__init__.py35
-rw-r--r--e/lib/python3.11/site-packages/setuptools/config/__pycache__/__init__.cpython-311.pycbin0 -> 2029 bytes
-rw-r--r--e/lib/python3.11/site-packages/setuptools/config/__pycache__/_apply_pyprojecttoml.cpython-311.pycbin0 -> 22532 bytes
-rw-r--r--e/lib/python3.11/site-packages/setuptools/config/__pycache__/expand.cpython-311.pycbin0 -> 28243 bytes
-rw-r--r--e/lib/python3.11/site-packages/setuptools/config/__pycache__/pyprojecttoml.cpython-311.pycbin0 -> 27441 bytes
-rw-r--r--e/lib/python3.11/site-packages/setuptools/config/__pycache__/setupcfg.cpython-311.pycbin0 -> 33025 bytes
-rw-r--r--e/lib/python3.11/site-packages/setuptools/config/_apply_pyprojecttoml.py377
-rw-r--r--e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__init__.py34
-rw-r--r--e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__pycache__/__init__.cpython-311.pycbin0 -> 2317 bytes
-rw-r--r--e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__pycache__/error_reporting.cpython-311.pycbin0 -> 20208 bytes
-rw-r--r--e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__pycache__/extra_validations.cpython-311.pycbin0 -> 1866 bytes
-rw-r--r--e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__pycache__/fastjsonschema_exceptions.cpython-311.pycbin0 -> 3241 bytes
-rw-r--r--e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__pycache__/fastjsonschema_validations.cpython-311.pycbin0 -> 192639 bytes
-rw-r--r--e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__pycache__/formats.cpython-311.pycbin0 -> 14356 bytes
-rw-r--r--e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/error_reporting.py318
-rw-r--r--e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/extra_validations.py36
-rw-r--r--e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/fastjsonschema_exceptions.py51
-rw-r--r--e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/fastjsonschema_validations.py1035
-rw-r--r--e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/formats.py259
-rw-r--r--e/lib/python3.11/site-packages/setuptools/config/expand.py462
-rw-r--r--e/lib/python3.11/site-packages/setuptools/config/pyprojecttoml.py493
-rw-r--r--e/lib/python3.11/site-packages/setuptools/config/setupcfg.py762
22 files changed, 3862 insertions, 0 deletions
diff --git a/e/lib/python3.11/site-packages/setuptools/config/__init__.py b/e/lib/python3.11/site-packages/setuptools/config/__init__.py
new file mode 100644
index 0000000..1a5153a
--- /dev/null
+++ b/e/lib/python3.11/site-packages/setuptools/config/__init__.py
@@ -0,0 +1,35 @@
+"""For backward compatibility, expose main functions from
+``setuptools.config.setupcfg``
+"""
+import warnings
+from functools import wraps
+from textwrap import dedent
+from typing import Callable, TypeVar, cast
+
+from .._deprecation_warning import SetuptoolsDeprecationWarning
+from . import setupcfg
+
+Fn = TypeVar("Fn", bound=Callable)
+
+__all__ = ('parse_configuration', 'read_configuration')
+
+
+def _deprecation_notice(fn: Fn) -> Fn:
+ @wraps(fn)
+ def _wrapper(*args, **kwargs):
+ msg = f"""\
+ As setuptools moves its configuration towards `pyproject.toml`,
+ `{__name__}.{fn.__name__}` became deprecated.
+
+ For the time being, you can use the `{setupcfg.__name__}` module
+ to access a backward compatible API, but this module is provisional
+ and might be removed in the future.
+ """
+ warnings.warn(dedent(msg), SetuptoolsDeprecationWarning, stacklevel=2)
+ return fn(*args, **kwargs)
+
+ return cast(Fn, _wrapper)
+
+
+read_configuration = _deprecation_notice(setupcfg.read_configuration)
+parse_configuration = _deprecation_notice(setupcfg.parse_configuration)
diff --git a/e/lib/python3.11/site-packages/setuptools/config/__pycache__/__init__.cpython-311.pyc b/e/lib/python3.11/site-packages/setuptools/config/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..8f4af28
--- /dev/null
+++ b/e/lib/python3.11/site-packages/setuptools/config/__pycache__/__init__.cpython-311.pyc
Binary files differ
diff --git a/e/lib/python3.11/site-packages/setuptools/config/__pycache__/_apply_pyprojecttoml.cpython-311.pyc b/e/lib/python3.11/site-packages/setuptools/config/__pycache__/_apply_pyprojecttoml.cpython-311.pyc
new file mode 100644
index 0000000..06804b4
--- /dev/null
+++ b/e/lib/python3.11/site-packages/setuptools/config/__pycache__/_apply_pyprojecttoml.cpython-311.pyc
Binary files differ
diff --git a/e/lib/python3.11/site-packages/setuptools/config/__pycache__/expand.cpython-311.pyc b/e/lib/python3.11/site-packages/setuptools/config/__pycache__/expand.cpython-311.pyc
new file mode 100644
index 0000000..7188ef3
--- /dev/null
+++ b/e/lib/python3.11/site-packages/setuptools/config/__pycache__/expand.cpython-311.pyc
Binary files differ
diff --git a/e/lib/python3.11/site-packages/setuptools/config/__pycache__/pyprojecttoml.cpython-311.pyc b/e/lib/python3.11/site-packages/setuptools/config/__pycache__/pyprojecttoml.cpython-311.pyc
new file mode 100644
index 0000000..e5e5c74
--- /dev/null
+++ b/e/lib/python3.11/site-packages/setuptools/config/__pycache__/pyprojecttoml.cpython-311.pyc
Binary files differ
diff --git a/e/lib/python3.11/site-packages/setuptools/config/__pycache__/setupcfg.cpython-311.pyc b/e/lib/python3.11/site-packages/setuptools/config/__pycache__/setupcfg.cpython-311.pyc
new file mode 100644
index 0000000..58e20e9
--- /dev/null
+++ b/e/lib/python3.11/site-packages/setuptools/config/__pycache__/setupcfg.cpython-311.pyc
Binary files differ
diff --git a/e/lib/python3.11/site-packages/setuptools/config/_apply_pyprojecttoml.py b/e/lib/python3.11/site-packages/setuptools/config/_apply_pyprojecttoml.py
new file mode 100644
index 0000000..8af5561
--- /dev/null
+++ b/e/lib/python3.11/site-packages/setuptools/config/_apply_pyprojecttoml.py
@@ -0,0 +1,377 @@
+"""Translation layer between pyproject config and setuptools distribution and
+metadata objects.
+
+The distribution and metadata objects are modeled after (an old version of)
+core metadata, therefore configs in the format specified for ``pyproject.toml``
+need to be processed before being applied.
+
+**PRIVATE MODULE**: API reserved for setuptools internal usage only.
+"""
+import logging
+import os
+import warnings
+from collections.abc import Mapping
+from email.headerregistry import Address
+from functools import partial, reduce
+from itertools import chain
+from types import MappingProxyType
+from typing import (TYPE_CHECKING, Any, Callable, Dict, List, Optional, Set, Tuple,
+ Type, Union)
+
+from setuptools._deprecation_warning import SetuptoolsDeprecationWarning
+
+if TYPE_CHECKING:
+ from setuptools._importlib import metadata # noqa
+ from setuptools.dist import Distribution # noqa
+
+EMPTY: Mapping = MappingProxyType({}) # Immutable dict-like
+_Path = Union[os.PathLike, str]
+_DictOrStr = Union[dict, str]
+_CorrespFn = Callable[["Distribution", Any, _Path], None]
+_Correspondence = Union[str, _CorrespFn]
+
+_logger = logging.getLogger(__name__)
+
+
+def apply(dist: "Distribution", config: dict, filename: _Path) -> "Distribution":
+ """Apply configuration dict read with :func:`read_configuration`"""
+
+ if not config:
+ return dist # short-circuit unrelated pyproject.toml file
+
+ root_dir = os.path.dirname(filename) or "."
+
+ _apply_project_table(dist, config, root_dir)
+ _apply_tool_table(dist, config, filename)
+
+ current_directory = os.getcwd()
+ os.chdir(root_dir)
+ try:
+ dist._finalize_requires()
+ dist._finalize_license_files()
+ finally:
+ os.chdir(current_directory)
+
+ return dist
+
+
+def _apply_project_table(dist: "Distribution", config: dict, root_dir: _Path):
+ project_table = config.get("project", {}).copy()
+ if not project_table:
+ return # short-circuit
+
+ _handle_missing_dynamic(dist, project_table)
+ _unify_entry_points(project_table)
+
+ for field, value in project_table.items():
+ norm_key = json_compatible_key(field)
+ corresp = PYPROJECT_CORRESPONDENCE.get(norm_key, norm_key)
+ if callable(corresp):
+ corresp(dist, value, root_dir)
+ else:
+ _set_config(dist, corresp, value)
+
+
+def _apply_tool_table(dist: "Distribution", config: dict, filename: _Path):
+ tool_table = config.get("tool", {}).get("setuptools", {})
+ if not tool_table:
+ return # short-circuit
+
+ for field, value in tool_table.items():
+ norm_key = json_compatible_key(field)
+
+ if norm_key in TOOL_TABLE_DEPRECATIONS:
+ suggestion = TOOL_TABLE_DEPRECATIONS[norm_key]
+ msg = f"The parameter `{norm_key}` is deprecated, {suggestion}"
+ warnings.warn(msg, SetuptoolsDeprecationWarning)
+
+ norm_key = TOOL_TABLE_RENAMES.get(norm_key, norm_key)
+ _set_config(dist, norm_key, value)
+
+ _copy_command_options(config, dist, filename)
+
+
+def _handle_missing_dynamic(dist: "Distribution", project_table: dict):
+ """Be temporarily forgiving with ``dynamic`` fields not listed in ``dynamic``"""
+ # TODO: Set fields back to `None` once the feature stabilizes
+ dynamic = set(project_table.get("dynamic", []))
+ for field, getter in _PREVIOUSLY_DEFINED.items():
+ if not (field in project_table or field in dynamic):
+ value = getter(dist)
+ if value:
+ msg = _WouldIgnoreField.message(field, value)
+ warnings.warn(msg, _WouldIgnoreField)
+
+
+def json_compatible_key(key: str) -> str:
+ """As defined in :pep:`566#json-compatible-metadata`"""
+ return key.lower().replace("-", "_")
+
+
+def _set_config(dist: "Distribution", field: str, value: Any):
+ setter = getattr(dist.metadata, f"set_{field}", None)
+ if setter:
+ setter(value)
+ elif hasattr(dist.metadata, field) or field in SETUPTOOLS_PATCHES:
+ setattr(dist.metadata, field, value)
+ else:
+ setattr(dist, field, value)
+
+
+_CONTENT_TYPES = {
+ ".md": "text/markdown",
+ ".rst": "text/x-rst",
+ ".txt": "text/plain",
+}
+
+
+def _guess_content_type(file: str) -> Optional[str]:
+ _, ext = os.path.splitext(file.lower())
+ if not ext:
+ return None
+
+ if ext in _CONTENT_TYPES:
+ return _CONTENT_TYPES[ext]
+
+ valid = ", ".join(f"{k} ({v})" for k, v in _CONTENT_TYPES.items())
+ msg = f"only the following file extensions are recognized: {valid}."
+ raise ValueError(f"Undefined content type for {file}, {msg}")
+
+
+def _long_description(dist: "Distribution", val: _DictOrStr, root_dir: _Path):
+ from setuptools.config import expand
+
+ if isinstance(val, str):
+ text = expand.read_files(val, root_dir)
+ ctype = _guess_content_type(val)
+ else:
+ text = val.get("text") or expand.read_files(val.get("file", []), root_dir)
+ ctype = val["content-type"]
+
+ _set_config(dist, "long_description", text)
+ if ctype:
+ _set_config(dist, "long_description_content_type", ctype)
+
+
+def _license(dist: "Distribution", val: dict, root_dir: _Path):
+ from setuptools.config import expand
+
+ if "file" in val:
+ _set_config(dist, "license", expand.read_files([val["file"]], root_dir))
+ else:
+ _set_config(dist, "license", val["text"])
+
+
+def _people(dist: "Distribution", val: List[dict], _root_dir: _Path, kind: str):
+ field = []
+ email_field = []
+ for person in val:
+ if "name" not in person:
+ email_field.append(person["email"])
+ elif "email" not in person:
+ field.append(person["name"])
+ else:
+ addr = Address(display_name=person["name"], addr_spec=person["email"])
+ email_field.append(str(addr))
+
+ if field:
+ _set_config(dist, kind, ", ".join(field))
+ if email_field:
+ _set_config(dist, f"{kind}_email", ", ".join(email_field))
+
+
+def _project_urls(dist: "Distribution", val: dict, _root_dir):
+ _set_config(dist, "project_urls", val)
+
+
+def _python_requires(dist: "Distribution", val: dict, _root_dir):
+ from setuptools.extern.packaging.specifiers import SpecifierSet
+
+ _set_config(dist, "python_requires", SpecifierSet(val))
+
+
+def _dependencies(dist: "Distribution", val: list, _root_dir):
+ if getattr(dist, "install_requires", []):
+ msg = "`install_requires` overwritten in `pyproject.toml` (dependencies)"
+ warnings.warn(msg)
+ _set_config(dist, "install_requires", val)
+
+
+def _optional_dependencies(dist: "Distribution", val: dict, _root_dir):
+ existing = getattr(dist, "extras_require", {})
+ _set_config(dist, "extras_require", {**existing, **val})
+
+
+def _unify_entry_points(project_table: dict):
+ project = project_table
+ entry_points = project.pop("entry-points", project.pop("entry_points", {}))
+ renaming = {"scripts": "console_scripts", "gui_scripts": "gui_scripts"}
+ for key, value in list(project.items()): # eager to allow modifications
+ norm_key = json_compatible_key(key)
+ if norm_key in renaming and value:
+ entry_points[renaming[norm_key]] = project.pop(key)
+
+ if entry_points:
+ project["entry-points"] = {
+ name: [f"{k} = {v}" for k, v in group.items()]
+ for name, group in entry_points.items()
+ }
+
+
+def _copy_command_options(pyproject: dict, dist: "Distribution", filename: _Path):
+ tool_table = pyproject.get("tool", {})
+ cmdclass = tool_table.get("setuptools", {}).get("cmdclass", {})
+ valid_options = _valid_command_options(cmdclass)
+
+ cmd_opts = dist.command_options
+ for cmd, config in pyproject.get("tool", {}).get("distutils", {}).items():
+ cmd = json_compatible_key(cmd)
+ valid = valid_options.get(cmd, set())
+ cmd_opts.setdefault(cmd, {})
+ for key, value in config.items():
+ key = json_compatible_key(key)
+ cmd_opts[cmd][key] = (str(filename), value)
+ if key not in valid:
+ # To avoid removing options that are specified dynamically we
+ # just log a warn...
+ _logger.warning(f"Command option {cmd}.{key} is not defined")
+
+
+def _valid_command_options(cmdclass: Mapping = EMPTY) -> Dict[str, Set[str]]:
+ from .._importlib import metadata
+ from setuptools.dist import Distribution
+
+ valid_options = {"global": _normalise_cmd_options(Distribution.global_options)}
+
+ unloaded_entry_points = metadata.entry_points(group='distutils.commands')
+ loaded_entry_points = (_load_ep(ep) for ep in unloaded_entry_points)
+ entry_points = (ep for ep in loaded_entry_points if ep)
+ for cmd, cmd_class in chain(entry_points, cmdclass.items()):
+ opts = valid_options.get(cmd, set())
+ opts = opts | _normalise_cmd_options(getattr(cmd_class, "user_options", []))
+ valid_options[cmd] = opts
+
+ return valid_options
+
+
+def _load_ep(ep: "metadata.EntryPoint") -> Optional[Tuple[str, Type]]:
+ # Ignore all the errors
+ try:
+ return (ep.name, ep.load())
+ except Exception as ex:
+ msg = f"{ex.__class__.__name__} while trying to load entry-point {ep.name}"
+ _logger.warning(f"{msg}: {ex}")
+ return None
+
+
+def _normalise_cmd_option_key(name: str) -> str:
+ return json_compatible_key(name).strip("_=")
+
+
+def _normalise_cmd_options(desc: List[Tuple[str, Optional[str], str]]) -> Set[str]:
+ return {_normalise_cmd_option_key(fancy_option[0]) for fancy_option in desc}
+
+
+def _attrgetter(attr):
+ """
+ Similar to ``operator.attrgetter`` but returns None if ``attr`` is not found
+ >>> from types import SimpleNamespace
+ >>> obj = SimpleNamespace(a=42, b=SimpleNamespace(c=13))
+ >>> _attrgetter("a")(obj)
+ 42
+ >>> _attrgetter("b.c")(obj)
+ 13
+ >>> _attrgetter("d")(obj) is None
+ True
+ """
+ return partial(reduce, lambda acc, x: getattr(acc, x, None), attr.split("."))
+
+
+def _some_attrgetter(*items):
+ """
+ Return the first "truth-y" attribute or None
+ >>> from types import SimpleNamespace
+ >>> obj = SimpleNamespace(a=42, b=SimpleNamespace(c=13))
+ >>> _some_attrgetter("d", "a", "b.c")(obj)
+ 42
+ >>> _some_attrgetter("d", "e", "b.c", "a")(obj)
+ 13
+ >>> _some_attrgetter("d", "e", "f")(obj) is None
+ True
+ """
+ def _acessor(obj):
+ values = (_attrgetter(i)(obj) for i in items)
+ return next((i for i in values if i is not None), None)
+ return _acessor
+
+
+PYPROJECT_CORRESPONDENCE: Dict[str, _Correspondence] = {
+ "readme": _long_description,
+ "license": _license,
+ "authors": partial(_people, kind="author"),
+ "maintainers": partial(_people, kind="maintainer"),
+ "urls": _project_urls,
+ "dependencies": _dependencies,
+ "optional_dependencies": _optional_dependencies,
+ "requires_python": _python_requires,
+}
+
+TOOL_TABLE_RENAMES = {"script_files": "scripts"}
+TOOL_TABLE_DEPRECATIONS = {
+ "namespace_packages": "consider using implicit namespaces instead (PEP 420)."
+}
+
+SETUPTOOLS_PATCHES = {"long_description_content_type", "project_urls",
+ "provides_extras", "license_file", "license_files"}
+
+_PREVIOUSLY_DEFINED = {
+ "name": _attrgetter("metadata.name"),
+ "version": _attrgetter("metadata.version"),
+ "description": _attrgetter("metadata.description"),
+ "readme": _attrgetter("metadata.long_description"),
+ "requires-python": _some_attrgetter("python_requires", "metadata.python_requires"),
+ "license": _attrgetter("metadata.license"),
+ "authors": _some_attrgetter("metadata.author", "metadata.author_email"),
+ "maintainers": _some_attrgetter("metadata.maintainer", "metadata.maintainer_email"),
+ "keywords": _attrgetter("metadata.keywords"),
+ "classifiers": _attrgetter("metadata.classifiers"),
+ "urls": _attrgetter("metadata.project_urls"),
+ "entry-points": _attrgetter("entry_points"),
+ "dependencies": _some_attrgetter("_orig_install_requires", "install_requires"),
+ "optional-dependencies": _some_attrgetter("_orig_extras_require", "extras_require"),
+}
+
+
+class _WouldIgnoreField(UserWarning):
+ """Inform users that ``pyproject.toml`` would overwrite previous metadata."""
+
+ MESSAGE = """\
+ {field!r} defined outside of `pyproject.toml` would be ignored.
+ !!\n\n
+ ##########################################################################
+ # configuration would be ignored/result in error due to `pyproject.toml` #
+ ##########################################################################
+
+ The following seems to be defined outside of `pyproject.toml`:
+
+ `{field} = {value!r}`
+
+ According to the spec (see the link below), however, setuptools CANNOT
+ consider this value unless {field!r} is listed as `dynamic`.
+
+ https://packaging.python.org/en/latest/specifications/declaring-project-metadata/
+
+ For the time being, `setuptools` will still consider the given value (as a
+ **transitional** measure), but please note that future releases of setuptools will
+ follow strictly the standard.
+
+ To prevent this warning, you can list {field!r} under `dynamic` or alternatively
+ remove the `[project]` table from your file and rely entirely on other means of
+ configuration.
+ \n\n!!
+ """
+
+ @classmethod
+ def message(cls, field, value):
+ from inspect import cleandoc
+ return cleandoc(cls.MESSAGE.format(field=field, value=value))
diff --git a/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__init__.py b/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__init__.py
new file mode 100644
index 0000000..dbe6cb4
--- /dev/null
+++ b/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__init__.py
@@ -0,0 +1,34 @@
+from functools import reduce
+from typing import Any, Callable, Dict
+
+from . import formats
+from .error_reporting import detailed_errors, ValidationError
+from .extra_validations import EXTRA_VALIDATIONS
+from .fastjsonschema_exceptions import JsonSchemaException, JsonSchemaValueException
+from .fastjsonschema_validations import validate as _validate
+
+__all__ = [
+ "validate",
+ "FORMAT_FUNCTIONS",
+ "EXTRA_VALIDATIONS",
+ "ValidationError",
+ "JsonSchemaException",
+ "JsonSchemaValueException",
+]
+
+
+FORMAT_FUNCTIONS: Dict[str, Callable[[str], bool]] = {
+ fn.__name__.replace("_", "-"): fn
+ for fn in formats.__dict__.values()
+ if callable(fn) and not fn.__name__.startswith("_")
+}
+
+
+def validate(data: Any) -> bool:
+ """Validate the given ``data`` object using JSON Schema
+ This function raises ``ValidationError`` if ``data`` is invalid.
+ """
+ with detailed_errors():
+ _validate(data, custom_formats=FORMAT_FUNCTIONS)
+ reduce(lambda acc, fn: fn(acc), EXTRA_VALIDATIONS, data)
+ return True
diff --git a/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__pycache__/__init__.cpython-311.pyc b/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..3f9637a
--- /dev/null
+++ b/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__pycache__/__init__.cpython-311.pyc
Binary files differ
diff --git a/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__pycache__/error_reporting.cpython-311.pyc b/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__pycache__/error_reporting.cpython-311.pyc
new file mode 100644
index 0000000..0fb3517
--- /dev/null
+++ b/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__pycache__/error_reporting.cpython-311.pyc
Binary files differ
diff --git a/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__pycache__/extra_validations.cpython-311.pyc b/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__pycache__/extra_validations.cpython-311.pyc
new file mode 100644
index 0000000..5c55a81
--- /dev/null
+++ b/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__pycache__/extra_validations.cpython-311.pyc
Binary files differ
diff --git a/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__pycache__/fastjsonschema_exceptions.cpython-311.pyc b/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__pycache__/fastjsonschema_exceptions.cpython-311.pyc
new file mode 100644
index 0000000..ebc5f05
--- /dev/null
+++ b/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__pycache__/fastjsonschema_exceptions.cpython-311.pyc
Binary files differ
diff --git a/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__pycache__/fastjsonschema_validations.cpython-311.pyc b/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__pycache__/fastjsonschema_validations.cpython-311.pyc
new file mode 100644
index 0000000..c7462a8
--- /dev/null
+++ b/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__pycache__/fastjsonschema_validations.cpython-311.pyc
Binary files differ
diff --git a/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__pycache__/formats.cpython-311.pyc b/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__pycache__/formats.cpython-311.pyc
new file mode 100644
index 0000000..d883e1b
--- /dev/null
+++ b/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/__pycache__/formats.cpython-311.pyc
Binary files differ
diff --git a/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/error_reporting.py b/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/error_reporting.py
new file mode 100644
index 0000000..f78e483
--- /dev/null
+++ b/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/error_reporting.py
@@ -0,0 +1,318 @@
+import io
+import json
+import logging
+import os
+import re
+from contextlib import contextmanager
+from textwrap import indent, wrap
+from typing import Any, Dict, Iterator, List, Optional, Sequence, Union, cast
+
+from .fastjsonschema_exceptions import JsonSchemaValueException
+
+_logger = logging.getLogger(__name__)
+
+_MESSAGE_REPLACEMENTS = {
+ "must be named by propertyName definition": "keys must be named by",
+ "one of contains definition": "at least one item that matches",
+ " same as const definition:": "",
+ "only specified items": "only items matching the definition",
+}
+
+_SKIP_DETAILS = (
+ "must not be empty",
+ "is always invalid",
+ "must not be there",
+)
+
+_NEED_DETAILS = {"anyOf", "oneOf", "anyOf", "contains", "propertyNames", "not", "items"}
+
+_CAMEL_CASE_SPLITTER = re.compile(r"\W+|([A-Z][^A-Z\W]*)")
+_IDENTIFIER = re.compile(r"^[\w_]+$", re.I)
+
+_TOML_JARGON = {
+ "object": "table",
+ "property": "key",
+ "properties": "keys",
+ "property names": "keys",
+}
+
+
+class ValidationError(JsonSchemaValueException):
+ """Report violations of a given JSON schema.
+
+ This class extends :exc:`~fastjsonschema.JsonSchemaValueException`
+ by adding the following properties:
+
+ - ``summary``: an improved version of the ``JsonSchemaValueException`` error message
+ with only the necessary information)
+
+ - ``details``: more contextual information about the error like the failing schema
+ itself and the value that violates the schema.
+
+ Depending on the level of the verbosity of the ``logging`` configuration
+ the exception message will be only ``summary`` (default) or a combination of
+ ``summary`` and ``details`` (when the logging level is set to :obj:`logging.DEBUG`).
+ """
+
+ summary = ""
+ details = ""
+ _original_message = ""
+
+ @classmethod
+ def _from_jsonschema(cls, ex: JsonSchemaValueException):
+ formatter = _ErrorFormatting(ex)
+ obj = cls(str(formatter), ex.value, formatter.name, ex.definition, ex.rule)
+ debug_code = os.getenv("JSONSCHEMA_DEBUG_CODE_GENERATION", "false").lower()
+ if debug_code != "false": # pragma: no cover
+ obj.__cause__, obj.__traceback__ = ex.__cause__, ex.__traceback__
+ obj._original_message = ex.message
+ obj.summary = formatter.summary
+ obj.details = formatter.details
+ return obj
+
+
+@contextmanager
+def detailed_errors():
+ try:
+ yield
+ except JsonSchemaValueException as ex:
+ raise ValidationError._from_jsonschema(ex) from None
+
+
+class _ErrorFormatting:
+ def __init__(self, ex: JsonSchemaValueException):
+ self.ex = ex
+ self.name = f"`{self._simplify_name(ex.name)}`"
+ self._original_message = self.ex.message.replace(ex.name, self.name)
+ self._summary = ""
+ self._details = ""
+
+ def __str__(self) -> str:
+ if _logger.getEffectiveLevel() <= logging.DEBUG and self.details:
+ return f"{self.summary}\n\n{self.details}"
+
+ return self.summary
+
+ @property
+ def summary(self) -> str:
+ if not self._summary:
+ self._summary = self._expand_summary()
+
+ return self._summary
+
+ @property
+ def details(self) -> str:
+ if not self._details:
+ self._details = self._expand_details()
+
+ return self._details
+
+ def _simplify_name(self, name):
+ x = len("data.")
+ return name[x:] if name.startswith("data.") else name
+
+ def _expand_summary(self):
+ msg = self._original_message
+
+ for bad, repl in _MESSAGE_REPLACEMENTS.items():
+ msg = msg.replace(bad, repl)
+
+ if any(substring in msg for substring in _SKIP_DETAILS):
+ return msg
+
+ schema = self.ex.rule_definition
+ if self.ex.rule in _NEED_DETAILS and schema:
+ summary = _SummaryWriter(_TOML_JARGON)
+ return f"{msg}:\n\n{indent(summary(schema), ' ')}"
+
+ return msg
+
+ def _expand_details(self) -> str:
+ optional = []
+ desc_lines = self.ex.definition.pop("$$description", [])
+ desc = self.ex.definition.pop("description", None) or " ".join(desc_lines)
+ if desc:
+ description = "\n".join(
+ wrap(
+ desc,
+ width=80,
+ initial_indent=" ",
+ subsequent_indent=" ",
+ break_long_words=False,
+ )
+ )
+ optional.append(f"DESCRIPTION:\n{description}")
+ schema = json.dumps(self.ex.definition, indent=4)
+ value = json.dumps(self.ex.value, indent=4)
+ defaults = [
+ f"GIVEN VALUE:\n{indent(value, ' ')}",
+ f"OFFENDING RULE: {self.ex.rule!r}",
+ f"DEFINITION:\n{indent(schema, ' ')}",
+ ]
+ return "\n\n".join(optional + defaults)
+
+
+class _SummaryWriter:
+ _IGNORE = {"description", "default", "title", "examples"}
+
+ def __init__(self, jargon: Optional[Dict[str, str]] = None):
+ self.jargon: Dict[str, str] = jargon or {}
+ # Clarify confusing terms
+ self._terms = {
+ "anyOf": "at least one of the following",
+ "oneOf": "exactly one of the following",
+ "allOf": "all of the following",
+ "not": "(*NOT* the following)",
+ "prefixItems": f"{self._jargon('items')} (in order)",
+ "items": "items",
+ "contains": "contains at least one of",
+ "propertyNames": (
+ f"non-predefined acceptable {self._jargon('property names')}"
+ ),
+ "patternProperties": f"{self._jargon('properties')} named via pattern",
+ "const": "predefined value",
+ "enum": "one of",
+ }
+ # Attributes that indicate that the definition is easy and can be done
+ # inline (e.g. string and number)
+ self._guess_inline_defs = [
+ "enum",
+ "const",
+ "maxLength",
+ "minLength",
+ "pattern",
+ "format",
+ "minimum",
+ "maximum",
+ "exclusiveMinimum",
+ "exclusiveMaximum",
+ "multipleOf",
+ ]
+
+ def _jargon(self, term: Union[str, List[str]]) -> Union[str, List[str]]:
+ if isinstance(term, list):
+ return [self.jargon.get(t, t) for t in term]
+ return self.jargon.get(term, term)
+
+ def __call__(
+ self,
+ schema: Union[dict, List[dict]],
+ prefix: str = "",
+ *,
+ _path: Sequence[str] = (),
+ ) -> str:
+ if isinstance(schema, list):
+ return self._handle_list(schema, prefix, _path)
+
+ filtered = self._filter_unecessary(schema, _path)
+ simple = self._handle_simple_dict(filtered, _path)
+ if simple:
+ return f"{prefix}{simple}"
+
+ child_prefix = self._child_prefix(prefix, " ")
+ item_prefix = self._child_prefix(prefix, "- ")
+ indent = len(prefix) * " "
+ with io.StringIO() as buffer:
+ for i, (key, value) in enumerate(filtered.items()):
+ child_path = [*_path, key]
+ line_prefix = prefix if i == 0 else indent
+ buffer.write(f"{line_prefix}{self._label(child_path)}:")
+ # ^ just the first item should receive the complete prefix
+ if isinstance(value, dict):
+ filtered = self._filter_unecessary(value, child_path)
+ simple = self._handle_simple_dict(filtered, child_path)
+ buffer.write(
+ f" {simple}"
+ if simple
+ else f"\n{self(value, child_prefix, _path=child_path)}"
+ )
+ elif isinstance(value, list) and (
+ key != "type" or self._is_property(child_path)
+ ):
+ children = self._handle_list(value, item_prefix, child_path)
+ sep = " " if children.startswith("[") else "\n"
+ buffer.write(f"{sep}{children}")
+ else:
+ buffer.write(f" {self._value(value, child_path)}\n")
+ return buffer.getvalue()
+
+ def _is_unecessary(self, path: Sequence[str]) -> bool:
+ if self._is_property(path) or not path: # empty path => instruction @ root
+ return False
+ key = path[-1]
+ return any(key.startswith(k) for k in "$_") or key in self._IGNORE
+
+ def _filter_unecessary(self, schema: dict, path: Sequence[str]):
+ return {
+ key: value
+ for key, value in schema.items()
+ if not self._is_unecessary([*path, key])
+ }
+
+ def _handle_simple_dict(self, value: dict, path: Sequence[str]) -> Optional[str]:
+ inline = any(p in value for p in self._guess_inline_defs)
+ simple = not any(isinstance(v, (list, dict)) for v in value.values())
+ if inline or simple:
+ return f"{{{', '.join(self._inline_attrs(value, path))}}}\n"
+ return None
+
+ def _handle_list(
+ self, schemas: list, prefix: str = "", path: Sequence[str] = ()
+ ) -> str:
+ if self._is_unecessary(path):
+ return ""
+
+ repr_ = repr(schemas)
+ if all(not isinstance(e, (dict, list)) for e in schemas) and len(repr_) < 60:
+ return f"{repr_}\n"
+
+ item_prefix = self._child_prefix(prefix, "- ")
+ return "".join(
+ self(v, item_prefix, _path=[*path, f"[{i}]"]) for i, v in enumerate(schemas)
+ )
+
+ def _is_property(self, path: Sequence[str]):
+ """Check if the given path can correspond to an arbitrarily named property"""
+ counter = 0
+ for key in path[-2::-1]:
+ if key not in {"properties", "patternProperties"}:
+ break
+ counter += 1
+
+ # If the counter if even, the path correspond to a JSON Schema keyword
+ # otherwise it can be any arbitrary string naming a property
+ return counter % 2 == 1
+
+ def _label(self, path: Sequence[str]) -> str:
+ *parents, key = path
+ if not self._is_property(path):
+ norm_key = _separate_terms(key)
+ return self._terms.get(key) or " ".join(self._jargon(norm_key))
+
+ if parents[-1] == "patternProperties":
+ return f"(regex {key!r})"
+ return repr(key) # property name
+
+ def _value(self, value: Any, path: Sequence[str]) -> str:
+ if path[-1] == "type" and not self._is_property(path):
+ type_ = self._jargon(value)
+ return (
+ f"[{', '.join(type_)}]" if isinstance(value, list) else cast(str, type_)
+ )
+ return repr(value)
+
+ def _inline_attrs(self, schema: dict, path: Sequence[str]) -> Iterator[str]:
+ for key, value in schema.items():
+ child_path = [*path, key]
+ yield f"{self._label(child_path)}: {self._value(value, child_path)}"
+
+ def _child_prefix(self, parent_prefix: str, child_prefix: str) -> str:
+ return len(parent_prefix) * " " + child_prefix
+
+
+def _separate_terms(word: str) -> List[str]:
+ """
+ >>> _separate_terms("FooBar-foo")
+ ['foo', 'bar', 'foo']
+ """
+ return [w.lower() for w in _CAMEL_CASE_SPLITTER.split(word) if w]
diff --git a/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/extra_validations.py b/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/extra_validations.py
new file mode 100644
index 0000000..4130a42
--- /dev/null
+++ b/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/extra_validations.py
@@ -0,0 +1,36 @@
+"""The purpose of this module is implement PEP 621 validations that are
+difficult to express as a JSON Schema (or that are not supported by the current
+JSON Schema library).
+"""
+
+from typing import Mapping, TypeVar
+
+from .error_reporting import ValidationError
+
+T = TypeVar("T", bound=Mapping)
+
+
+class RedefiningStaticFieldAsDynamic(ValidationError):
+ """According to PEP 621:
+
+ Build back-ends MUST raise an error if the metadata specifies a field
+ statically as well as being listed in dynamic.
+ """
+
+
+def validate_project_dynamic(pyproject: T) -> T:
+ project_table = pyproject.get("project", {})
+ dynamic = project_table.get("dynamic", [])
+
+ for field in dynamic:
+ if field in project_table:
+ msg = f"You cannot provide a value for `project.{field}` and "
+ msg += "list it under `project.dynamic` at the same time"
+ name = f"data.project.{field}"
+ value = {field: project_table[field], "...": " # ...", "dynamic": dynamic}
+ raise RedefiningStaticFieldAsDynamic(msg, value, name, rule="PEP 621")
+
+ return pyproject
+
+
+EXTRA_VALIDATIONS = (validate_project_dynamic,)
diff --git a/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/fastjsonschema_exceptions.py b/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/fastjsonschema_exceptions.py
new file mode 100644
index 0000000..d2dddd6
--- /dev/null
+++ b/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/fastjsonschema_exceptions.py
@@ -0,0 +1,51 @@
+import re
+
+
+SPLIT_RE = re.compile(r'[\.\[\]]+')
+
+
+class JsonSchemaException(ValueError):
+ """
+ Base exception of ``fastjsonschema`` library.
+ """
+
+
+class JsonSchemaValueException(JsonSchemaException):
+ """
+ Exception raised by validation function. Available properties:
+
+ * ``message`` containing human-readable information what is wrong (e.g. ``data.property[index] must be smaller than or equal to 42``),
+ * invalid ``value`` (e.g. ``60``),
+ * ``name`` of a path in the data structure (e.g. ``data.property[index]``),
+ * ``path`` as an array in the data structure (e.g. ``['data', 'property', 'index']``),
+ * the whole ``definition`` which the ``value`` has to fulfil (e.g. ``{'type': 'number', 'maximum': 42}``),
+ * ``rule`` which the ``value`` is breaking (e.g. ``maximum``)
+ * and ``rule_definition`` (e.g. ``42``).
+
+ .. versionchanged:: 2.14.0
+ Added all extra properties.
+ """
+
+ def __init__(self, message, value=None, name=None, definition=None, rule=None):
+ super().__init__(message)
+ self.message = message
+ self.value = value
+ self.name = name
+ self.definition = definition
+ self.rule = rule
+
+ @property
+ def path(self):
+ return [item for item in SPLIT_RE.split(self.name) if item != '']
+
+ @property
+ def rule_definition(self):
+ if not self.rule or not self.definition:
+ return None
+ return self.definition.get(self.rule)
+
+
+class JsonSchemaDefinitionException(JsonSchemaException):
+ """
+ Exception raised by generator of validation function.
+ """
diff --git a/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/fastjsonschema_validations.py b/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/fastjsonschema_validations.py
new file mode 100644
index 0000000..ad5ee31
--- /dev/null
+++ b/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/fastjsonschema_validations.py
@@ -0,0 +1,1035 @@
+# noqa
+# type: ignore
+# flake8: noqa
+# pylint: skip-file
+# mypy: ignore-errors
+# yapf: disable
+# pylama:skip=1
+
+
+# *** PLEASE DO NOT MODIFY DIRECTLY: Automatically generated code ***
+
+
+VERSION = "2.15.3"
+import re
+from .fastjsonschema_exceptions import JsonSchemaValueException
+
+
+REGEX_PATTERNS = {
+ '^.*$': re.compile('^.*$'),
+ '.+': re.compile('.+'),
+ '^.+$': re.compile('^.+$'),
+ 'idn-email_re_pattern': re.compile('^[^@]+@[^@]+\\.[^@]+\\Z')
+}
+
+NoneType = type(None)
+
+def validate(data, custom_formats={}, name_prefix=None):
+ validate_https___packaging_python_org_en_latest_specifications_declaring_build_dependencies(data, custom_formats, (name_prefix or "data") + "")
+ return data
+
+def validate_https___packaging_python_org_en_latest_specifications_declaring_build_dependencies(data, custom_formats={}, name_prefix=None):
+ if not isinstance(data, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '<https://docs.python.org/3/install/#distutils-configuration-files>`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml <https://ini2toml.readthedocs.io/en/latest/setuptools_pep621.html>`_.', 'It considers only ``setuptools`` `parameters', '<https://setuptools.pypa.io/en/latest/userguide/declarative_config.html>`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive'}}}, 'readme': {'anyOf': [{'$ref': '#/definitions/file-directive'}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='type')
+ data_is_dict = isinstance(data, dict)
+ if data_is_dict:
+ data_keys = set(data.keys())
+ if "build-system" in data_keys:
+ data_keys.remove("build-system")
+ data__buildsystem = data["build-system"]
+ if not isinstance(data__buildsystem, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system must be object", value=data__buildsystem, name="" + (name_prefix or "data") + ".build-system", definition={'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, rule='type')
+ data__buildsystem_is_dict = isinstance(data__buildsystem, dict)
+ if data__buildsystem_is_dict:
+ data__buildsystem_len = len(data__buildsystem)
+ if not all(prop in data__buildsystem for prop in ['requires']):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system must contain ['requires'] properties", value=data__buildsystem, name="" + (name_prefix or "data") + ".build-system", definition={'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, rule='required')
+ data__buildsystem_keys = set(data__buildsystem.keys())
+ if "requires" in data__buildsystem_keys:
+ data__buildsystem_keys.remove("requires")
+ data__buildsystem__requires = data__buildsystem["requires"]
+ if not isinstance(data__buildsystem__requires, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.requires must be array", value=data__buildsystem__requires, name="" + (name_prefix or "data") + ".build-system.requires", definition={'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, rule='type')
+ data__buildsystem__requires_is_list = isinstance(data__buildsystem__requires, (list, tuple))
+ if data__buildsystem__requires_is_list:
+ data__buildsystem__requires_len = len(data__buildsystem__requires)
+ for data__buildsystem__requires_x, data__buildsystem__requires_item in enumerate(data__buildsystem__requires):
+ if not isinstance(data__buildsystem__requires_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.requires[{data__buildsystem__requires_x}]".format(**locals()) + " must be string", value=data__buildsystem__requires_item, name="" + (name_prefix or "data") + ".build-system.requires[{data__buildsystem__requires_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ if "build-backend" in data__buildsystem_keys:
+ data__buildsystem_keys.remove("build-backend")
+ data__buildsystem__buildbackend = data__buildsystem["build-backend"]
+ if not isinstance(data__buildsystem__buildbackend, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.build-backend must be string", value=data__buildsystem__buildbackend, name="" + (name_prefix or "data") + ".build-system.build-backend", definition={'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, rule='type')
+ if isinstance(data__buildsystem__buildbackend, str):
+ if not custom_formats["pep517-backend-reference"](data__buildsystem__buildbackend):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.build-backend must be pep517-backend-reference", value=data__buildsystem__buildbackend, name="" + (name_prefix or "data") + ".build-system.build-backend", definition={'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, rule='format')
+ if "backend-path" in data__buildsystem_keys:
+ data__buildsystem_keys.remove("backend-path")
+ data__buildsystem__backendpath = data__buildsystem["backend-path"]
+ if not isinstance(data__buildsystem__backendpath, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.backend-path must be array", value=data__buildsystem__backendpath, name="" + (name_prefix or "data") + ".build-system.backend-path", definition={'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}, rule='type')
+ data__buildsystem__backendpath_is_list = isinstance(data__buildsystem__backendpath, (list, tuple))
+ if data__buildsystem__backendpath_is_list:
+ data__buildsystem__backendpath_len = len(data__buildsystem__backendpath)
+ for data__buildsystem__backendpath_x, data__buildsystem__backendpath_item in enumerate(data__buildsystem__backendpath):
+ if not isinstance(data__buildsystem__backendpath_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system.backend-path[{data__buildsystem__backendpath_x}]".format(**locals()) + " must be string", value=data__buildsystem__backendpath_item, name="" + (name_prefix or "data") + ".build-system.backend-path[{data__buildsystem__backendpath_x}]".format(**locals()) + "", definition={'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}, rule='type')
+ if data__buildsystem_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".build-system must not contain "+str(data__buildsystem_keys)+" properties", value=data__buildsystem, name="" + (name_prefix or "data") + ".build-system", definition={'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, rule='additionalProperties')
+ if "project" in data_keys:
+ data_keys.remove("project")
+ data__project = data["project"]
+ validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata(data__project, custom_formats, (name_prefix or "data") + ".project")
+ if "tool" in data_keys:
+ data_keys.remove("tool")
+ data__tool = data["tool"]
+ if not isinstance(data__tool, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".tool must be object", value=data__tool, name="" + (name_prefix or "data") + ".tool", definition={'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '<https://docs.python.org/3/install/#distutils-configuration-files>`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml <https://ini2toml.readthedocs.io/en/latest/setuptools_pep621.html>`_.', 'It considers only ``setuptools`` `parameters', '<https://setuptools.pypa.io/en/latest/userguide/declarative_config.html>`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive'}}}, 'readme': {'anyOf': [{'$ref': '#/definitions/file-directive'}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}, rule='type')
+ data__tool_is_dict = isinstance(data__tool, dict)
+ if data__tool_is_dict:
+ data__tool_keys = set(data__tool.keys())
+ if "distutils" in data__tool_keys:
+ data__tool_keys.remove("distutils")
+ data__tool__distutils = data__tool["distutils"]
+ validate_https___docs_python_org_3_install(data__tool__distutils, custom_formats, (name_prefix or "data") + ".tool.distutils")
+ if "setuptools" in data__tool_keys:
+ data__tool_keys.remove("setuptools")
+ data__tool__setuptools = data__tool["setuptools"]
+ validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data__tool__setuptools, custom_formats, (name_prefix or "data") + ".tool.setuptools")
+ if data_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-build-dependencies/', 'title': 'Data structure for ``pyproject.toml`` files', '$$description': ['File format containing build-time configurations for the Python ecosystem. ', ':pep:`517` initially defined a build-system independent format for source trees', 'which was complemented by :pep:`518` to provide a way of specifying dependencies ', 'for building Python projects.', 'Please notice the ``project`` table (as initially defined in :pep:`621`) is not included', 'in this schema and should be considered separately.'], 'type': 'object', 'additionalProperties': False, 'properties': {'build-system': {'type': 'object', 'description': 'Table used to store build-related data', 'additionalProperties': False, 'properties': {'requires': {'type': 'array', '$$description': ['List of dependencies in the :pep:`508` format required to execute the build', 'system. Please notice that the resulting dependency graph', '**MUST NOT contain cycles**'], 'items': {'type': 'string'}}, 'build-backend': {'type': 'string', 'description': 'Python object that will be used to perform the build according to :pep:`517`', 'format': 'pep517-backend-reference'}, 'backend-path': {'type': 'array', '$$description': ['List of directories to be prepended to ``sys.path`` when loading the', 'back-end, and running its hooks'], 'items': {'type': 'string', '$comment': 'Should be a path (TODO: enforce it with format?)'}}}, 'required': ['requires']}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, 'tool': {'type': 'object', 'properties': {'distutils': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '<https://docs.python.org/3/install/#distutils-configuration-files>`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, 'setuptools': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml <https://ini2toml.readthedocs.io/en/latest/setuptools_pep621.html>`_.', 'It considers only ``setuptools`` `parameters', '<https://setuptools.pypa.io/en/latest/userguide/declarative_config.html>`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$ref': '#/definitions/find-directive'}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'$ref': '#/definitions/attr-directive'}, {'$ref': '#/definitions/file-directive'}]}, 'classifiers': {'$ref': '#/definitions/file-directive'}, 'description': {'$ref': '#/definitions/file-directive'}, 'dependencies': {'$ref': '#/definitions/file-directive'}, 'entry-points': {'$ref': '#/definitions/file-directive'}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$ref': '#/definitions/file-directive'}}}, 'readme': {'anyOf': [{'$ref': '#/definitions/file-directive'}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}}}}, 'project': {'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$ref': '#/definitions/author'}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create command-line wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.']}, 'gui-scripts': {'$ref': '#/definitions/entry-point-group', '$$description': ['Instruct the installer to create GUI wrappers for the given', '`entry points <https://packaging.python.org/specifications/entry-points/>`_.', 'The difference between ``scripts`` and ``gui-scripts`` is only relevant in', 'Windows.']}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$ref': '#/definitions/entry-point-group'}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$ref': '#/definitions/dependency'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$ref': '#/definitions/dependency'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='additionalProperties')
+ return data
+
+def validate_https___setuptools_pypa_io_en_latest_references_keywords_html(data, custom_formats={}, name_prefix=None):
+ if not isinstance(data, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml <https://ini2toml.readthedocs.io/en/latest/setuptools_pep621.html>`_.', 'It considers only ``setuptools`` `parameters', '<https://setuptools.pypa.io/en/latest/userguide/declarative_config.html>`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='type')
+ data_is_dict = isinstance(data, dict)
+ if data_is_dict:
+ data_keys = set(data.keys())
+ if "platforms" in data_keys:
+ data_keys.remove("platforms")
+ data__platforms = data["platforms"]
+ if not isinstance(data__platforms, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".platforms must be array", value=data__platforms, name="" + (name_prefix or "data") + ".platforms", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+ data__platforms_is_list = isinstance(data__platforms, (list, tuple))
+ if data__platforms_is_list:
+ data__platforms_len = len(data__platforms)
+ for data__platforms_x, data__platforms_item in enumerate(data__platforms):
+ if not isinstance(data__platforms_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".platforms[{data__platforms_x}]".format(**locals()) + " must be string", value=data__platforms_item, name="" + (name_prefix or "data") + ".platforms[{data__platforms_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ if "provides" in data_keys:
+ data_keys.remove("provides")
+ data__provides = data["provides"]
+ if not isinstance(data__provides, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".provides must be array", value=data__provides, name="" + (name_prefix or "data") + ".provides", definition={'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, rule='type')
+ data__provides_is_list = isinstance(data__provides, (list, tuple))
+ if data__provides_is_list:
+ data__provides_len = len(data__provides)
+ for data__provides_x, data__provides_item in enumerate(data__provides):
+ if not isinstance(data__provides_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".provides[{data__provides_x}]".format(**locals()) + " must be string", value=data__provides_item, name="" + (name_prefix or "data") + ".provides[{data__provides_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='type')
+ if isinstance(data__provides_item, str):
+ if not custom_formats["pep508-identifier"](data__provides_item):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".provides[{data__provides_x}]".format(**locals()) + " must be pep508-identifier", value=data__provides_item, name="" + (name_prefix or "data") + ".provides[{data__provides_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='format')
+ if "obsoletes" in data_keys:
+ data_keys.remove("obsoletes")
+ data__obsoletes = data["obsoletes"]
+ if not isinstance(data__obsoletes, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".obsoletes must be array", value=data__obsoletes, name="" + (name_prefix or "data") + ".obsoletes", definition={'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, rule='type')
+ data__obsoletes_is_list = isinstance(data__obsoletes, (list, tuple))
+ if data__obsoletes_is_list:
+ data__obsoletes_len = len(data__obsoletes)
+ for data__obsoletes_x, data__obsoletes_item in enumerate(data__obsoletes):
+ if not isinstance(data__obsoletes_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".obsoletes[{data__obsoletes_x}]".format(**locals()) + " must be string", value=data__obsoletes_item, name="" + (name_prefix or "data") + ".obsoletes[{data__obsoletes_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='type')
+ if isinstance(data__obsoletes_item, str):
+ if not custom_formats["pep508-identifier"](data__obsoletes_item):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".obsoletes[{data__obsoletes_x}]".format(**locals()) + " must be pep508-identifier", value=data__obsoletes_item, name="" + (name_prefix or "data") + ".obsoletes[{data__obsoletes_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'pep508-identifier'}, rule='format')
+ if "zip-safe" in data_keys:
+ data_keys.remove("zip-safe")
+ data__zipsafe = data["zip-safe"]
+ if not isinstance(data__zipsafe, (bool)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".zip-safe must be boolean", value=data__zipsafe, name="" + (name_prefix or "data") + ".zip-safe", definition={'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, rule='type')
+ if "script-files" in data_keys:
+ data_keys.remove("script-files")
+ data__scriptfiles = data["script-files"]
+ if not isinstance(data__scriptfiles, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".script-files must be array", value=data__scriptfiles, name="" + (name_prefix or "data") + ".script-files", definition={'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, rule='type')
+ data__scriptfiles_is_list = isinstance(data__scriptfiles, (list, tuple))
+ if data__scriptfiles_is_list:
+ data__scriptfiles_len = len(data__scriptfiles)
+ for data__scriptfiles_x, data__scriptfiles_item in enumerate(data__scriptfiles):
+ if not isinstance(data__scriptfiles_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".script-files[{data__scriptfiles_x}]".format(**locals()) + " must be string", value=data__scriptfiles_item, name="" + (name_prefix or "data") + ".script-files[{data__scriptfiles_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ if "eager-resources" in data_keys:
+ data_keys.remove("eager-resources")
+ data__eagerresources = data["eager-resources"]
+ if not isinstance(data__eagerresources, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".eager-resources must be array", value=data__eagerresources, name="" + (name_prefix or "data") + ".eager-resources", definition={'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, rule='type')
+ data__eagerresources_is_list = isinstance(data__eagerresources, (list, tuple))
+ if data__eagerresources_is_list:
+ data__eagerresources_len = len(data__eagerresources)
+ for data__eagerresources_x, data__eagerresources_item in enumerate(data__eagerresources):
+ if not isinstance(data__eagerresources_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".eager-resources[{data__eagerresources_x}]".format(**locals()) + " must be string", value=data__eagerresources_item, name="" + (name_prefix or "data") + ".eager-resources[{data__eagerresources_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ if "packages" in data_keys:
+ data_keys.remove("packages")
+ data__packages = data["packages"]
+ data__packages_one_of_count1 = 0
+ if data__packages_one_of_count1 < 2:
+ try:
+ if not isinstance(data__packages, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages must be array", value=data__packages, name="" + (name_prefix or "data") + ".packages", definition={'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, rule='type')
+ data__packages_is_list = isinstance(data__packages, (list, tuple))
+ if data__packages_is_list:
+ data__packages_len = len(data__packages)
+ for data__packages_x, data__packages_item in enumerate(data__packages):
+ if not isinstance(data__packages_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages[{data__packages_x}]".format(**locals()) + " must be string", value=data__packages_item, name="" + (name_prefix or "data") + ".packages[{data__packages_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='type')
+ if isinstance(data__packages_item, str):
+ if not custom_formats["python-module-name"](data__packages_item):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages[{data__packages_x}]".format(**locals()) + " must be python-module-name", value=data__packages_item, name="" + (name_prefix or "data") + ".packages[{data__packages_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='format')
+ data__packages_one_of_count1 += 1
+ except JsonSchemaValueException: pass
+ if data__packages_one_of_count1 < 2:
+ try:
+ validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_find_directive(data__packages, custom_formats, (name_prefix or "data") + ".packages")
+ data__packages_one_of_count1 += 1
+ except JsonSchemaValueException: pass
+ if data__packages_one_of_count1 != 1:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".packages must be valid exactly by one definition" + (" (" + str(data__packages_one_of_count1) + " matches found)"), value=data__packages, name="" + (name_prefix or "data") + ".packages", definition={'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, rule='oneOf')
+ if "package-dir" in data_keys:
+ data_keys.remove("package-dir")
+ data__packagedir = data["package-dir"]
+ if not isinstance(data__packagedir, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be object", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='type')
+ data__packagedir_is_dict = isinstance(data__packagedir, dict)
+ if data__packagedir_is_dict:
+ data__packagedir_keys = set(data__packagedir.keys())
+ for data__packagedir_key, data__packagedir_val in data__packagedir.items():
+ if REGEX_PATTERNS['^.*$'].search(data__packagedir_key):
+ if data__packagedir_key in data__packagedir_keys:
+ data__packagedir_keys.remove(data__packagedir_key)
+ if not isinstance(data__packagedir_val, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir.{data__packagedir_key}".format(**locals()) + " must be string", value=data__packagedir_val, name="" + (name_prefix or "data") + ".package-dir.{data__packagedir_key}".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ if data__packagedir_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must not contain "+str(data__packagedir_keys)+" properties", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='additionalProperties')
+ data__packagedir_len = len(data__packagedir)
+ if data__packagedir_len != 0:
+ data__packagedir_property_names = True
+ for data__packagedir_key in data__packagedir:
+ try:
+ data__packagedir_key_one_of_count2 = 0
+ if data__packagedir_key_one_of_count2 < 2:
+ try:
+ if isinstance(data__packagedir_key, str):
+ if not custom_formats["python-module-name"](data__packagedir_key):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be python-module-name", value=data__packagedir_key, name="" + (name_prefix or "data") + ".package-dir", definition={'format': 'python-module-name'}, rule='format')
+ data__packagedir_key_one_of_count2 += 1
+ except JsonSchemaValueException: pass
+ if data__packagedir_key_one_of_count2 < 2:
+ try:
+ if data__packagedir_key != "":
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be same as const definition: ", value=data__packagedir_key, name="" + (name_prefix or "data") + ".package-dir", definition={'const': ''}, rule='const')
+ data__packagedir_key_one_of_count2 += 1
+ except JsonSchemaValueException: pass
+ if data__packagedir_key_one_of_count2 != 1:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be valid exactly by one definition" + (" (" + str(data__packagedir_key_one_of_count2) + " matches found)"), value=data__packagedir_key, name="" + (name_prefix or "data") + ".package-dir", definition={'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, rule='oneOf')
+ except JsonSchemaValueException:
+ data__packagedir_property_names = False
+ if not data__packagedir_property_names:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-dir must be named by propertyName definition", value=data__packagedir, name="" + (name_prefix or "data") + ".package-dir", definition={'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, rule='propertyNames')
+ if "package-data" in data_keys:
+ data_keys.remove("package-data")
+ data__packagedata = data["package-data"]
+ if not isinstance(data__packagedata, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be object", value=data__packagedata, name="" + (name_prefix or "data") + ".package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type')
+ data__packagedata_is_dict = isinstance(data__packagedata, dict)
+ if data__packagedata_is_dict:
+ data__packagedata_keys = set(data__packagedata.keys())
+ for data__packagedata_key, data__packagedata_val in data__packagedata.items():
+ if REGEX_PATTERNS['^.*$'].search(data__packagedata_key):
+ if data__packagedata_key in data__packagedata_keys:
+ data__packagedata_keys.remove(data__packagedata_key)
+ if not isinstance(data__packagedata_val, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data.{data__packagedata_key}".format(**locals()) + " must be array", value=data__packagedata_val, name="" + (name_prefix or "data") + ".package-data.{data__packagedata_key}".format(**locals()) + "", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+ data__packagedata_val_is_list = isinstance(data__packagedata_val, (list, tuple))
+ if data__packagedata_val_is_list:
+ data__packagedata_val_len = len(data__packagedata_val)
+ for data__packagedata_val_x, data__packagedata_val_item in enumerate(data__packagedata_val):
+ if not isinstance(data__packagedata_val_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data.{data__packagedata_key}[{data__packagedata_val_x}]".format(**locals()) + " must be string", value=data__packagedata_val_item, name="" + (name_prefix or "data") + ".package-data.{data__packagedata_key}[{data__packagedata_val_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ if data__packagedata_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must not contain "+str(data__packagedata_keys)+" properties", value=data__packagedata, name="" + (name_prefix or "data") + ".package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='additionalProperties')
+ data__packagedata_len = len(data__packagedata)
+ if data__packagedata_len != 0:
+ data__packagedata_property_names = True
+ for data__packagedata_key in data__packagedata:
+ try:
+ data__packagedata_key_one_of_count3 = 0
+ if data__packagedata_key_one_of_count3 < 2:
+ try:
+ if isinstance(data__packagedata_key, str):
+ if not custom_formats["python-module-name"](data__packagedata_key):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be python-module-name", value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'format': 'python-module-name'}, rule='format')
+ data__packagedata_key_one_of_count3 += 1
+ except JsonSchemaValueException: pass
+ if data__packagedata_key_one_of_count3 < 2:
+ try:
+ if data__packagedata_key != "*":
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be same as const definition: *", value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'const': '*'}, rule='const')
+ data__packagedata_key_one_of_count3 += 1
+ except JsonSchemaValueException: pass
+ if data__packagedata_key_one_of_count3 != 1:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be valid exactly by one definition" + (" (" + str(data__packagedata_key_one_of_count3) + " matches found)"), value=data__packagedata_key, name="" + (name_prefix or "data") + ".package-data", definition={'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, rule='oneOf')
+ except JsonSchemaValueException:
+ data__packagedata_property_names = False
+ if not data__packagedata_property_names:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".package-data must be named by propertyName definition", value=data__packagedata, name="" + (name_prefix or "data") + ".package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='propertyNames')
+ if "include-package-data" in data_keys:
+ data_keys.remove("include-package-data")
+ data__includepackagedata = data["include-package-data"]
+ if not isinstance(data__includepackagedata, (bool)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".include-package-data must be boolean", value=data__includepackagedata, name="" + (name_prefix or "data") + ".include-package-data", definition={'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'boolean'}, rule='type')
+ if "exclude-package-data" in data_keys:
+ data_keys.remove("exclude-package-data")
+ data__excludepackagedata = data["exclude-package-data"]
+ if not isinstance(data__excludepackagedata, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be object", value=data__excludepackagedata, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type')
+ data__excludepackagedata_is_dict = isinstance(data__excludepackagedata, dict)
+ if data__excludepackagedata_is_dict:
+ data__excludepackagedata_keys = set(data__excludepackagedata.keys())
+ for data__excludepackagedata_key, data__excludepackagedata_val in data__excludepackagedata.items():
+ if REGEX_PATTERNS['^.*$'].search(data__excludepackagedata_key):
+ if data__excludepackagedata_key in data__excludepackagedata_keys:
+ data__excludepackagedata_keys.remove(data__excludepackagedata_key)
+ if not isinstance(data__excludepackagedata_val, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data.{data__excludepackagedata_key}".format(**locals()) + " must be array", value=data__excludepackagedata_val, name="" + (name_prefix or "data") + ".exclude-package-data.{data__excludepackagedata_key}".format(**locals()) + "", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+ data__excludepackagedata_val_is_list = isinstance(data__excludepackagedata_val, (list, tuple))
+ if data__excludepackagedata_val_is_list:
+ data__excludepackagedata_val_len = len(data__excludepackagedata_val)
+ for data__excludepackagedata_val_x, data__excludepackagedata_val_item in enumerate(data__excludepackagedata_val):
+ if not isinstance(data__excludepackagedata_val_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data.{data__excludepackagedata_key}[{data__excludepackagedata_val_x}]".format(**locals()) + " must be string", value=data__excludepackagedata_val_item, name="" + (name_prefix or "data") + ".exclude-package-data.{data__excludepackagedata_key}[{data__excludepackagedata_val_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ if data__excludepackagedata_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must not contain "+str(data__excludepackagedata_keys)+" properties", value=data__excludepackagedata, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='additionalProperties')
+ data__excludepackagedata_len = len(data__excludepackagedata)
+ if data__excludepackagedata_len != 0:
+ data__excludepackagedata_property_names = True
+ for data__excludepackagedata_key in data__excludepackagedata:
+ try:
+ data__excludepackagedata_key_one_of_count4 = 0
+ if data__excludepackagedata_key_one_of_count4 < 2:
+ try:
+ if isinstance(data__excludepackagedata_key, str):
+ if not custom_formats["python-module-name"](data__excludepackagedata_key):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be python-module-name", value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'format': 'python-module-name'}, rule='format')
+ data__excludepackagedata_key_one_of_count4 += 1
+ except JsonSchemaValueException: pass
+ if data__excludepackagedata_key_one_of_count4 < 2:
+ try:
+ if data__excludepackagedata_key != "*":
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be same as const definition: *", value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'const': '*'}, rule='const')
+ data__excludepackagedata_key_one_of_count4 += 1
+ except JsonSchemaValueException: pass
+ if data__excludepackagedata_key_one_of_count4 != 1:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be valid exactly by one definition" + (" (" + str(data__excludepackagedata_key_one_of_count4) + " matches found)"), value=data__excludepackagedata_key, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, rule='oneOf')
+ except JsonSchemaValueException:
+ data__excludepackagedata_property_names = False
+ if not data__excludepackagedata_property_names:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".exclude-package-data must be named by propertyName definition", value=data__excludepackagedata, name="" + (name_prefix or "data") + ".exclude-package-data", definition={'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='propertyNames')
+ if "namespace-packages" in data_keys:
+ data_keys.remove("namespace-packages")
+ data__namespacepackages = data["namespace-packages"]
+ if not isinstance(data__namespacepackages, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".namespace-packages must be array", value=data__namespacepackages, name="" + (name_prefix or "data") + ".namespace-packages", definition={'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, rule='type')
+ data__namespacepackages_is_list = isinstance(data__namespacepackages, (list, tuple))
+ if data__namespacepackages_is_list:
+ data__namespacepackages_len = len(data__namespacepackages)
+ for data__namespacepackages_x, data__namespacepackages_item in enumerate(data__namespacepackages):
+ if not isinstance(data__namespacepackages_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + " must be string", value=data__namespacepackages_item, name="" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='type')
+ if isinstance(data__namespacepackages_item, str):
+ if not custom_formats["python-module-name"](data__namespacepackages_item):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + " must be python-module-name", value=data__namespacepackages_item, name="" + (name_prefix or "data") + ".namespace-packages[{data__namespacepackages_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='format')
+ if "py-modules" in data_keys:
+ data_keys.remove("py-modules")
+ data__pymodules = data["py-modules"]
+ if not isinstance(data__pymodules, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-modules must be array", value=data__pymodules, name="" + (name_prefix or "data") + ".py-modules", definition={'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, rule='type')
+ data__pymodules_is_list = isinstance(data__pymodules, (list, tuple))
+ if data__pymodules_is_list:
+ data__pymodules_len = len(data__pymodules)
+ for data__pymodules_x, data__pymodules_item in enumerate(data__pymodules):
+ if not isinstance(data__pymodules_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + " must be string", value=data__pymodules_item, name="" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='type')
+ if isinstance(data__pymodules_item, str):
+ if not custom_formats["python-module-name"](data__pymodules_item):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + " must be python-module-name", value=data__pymodules_item, name="" + (name_prefix or "data") + ".py-modules[{data__pymodules_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'python-module-name'}, rule='format')
+ if "data-files" in data_keys:
+ data_keys.remove("data-files")
+ data__datafiles = data["data-files"]
+ if not isinstance(data__datafiles, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".data-files must be object", value=data__datafiles, name="" + (name_prefix or "data") + ".data-files", definition={'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type')
+ data__datafiles_is_dict = isinstance(data__datafiles, dict)
+ if data__datafiles_is_dict:
+ data__datafiles_keys = set(data__datafiles.keys())
+ for data__datafiles_key, data__datafiles_val in data__datafiles.items():
+ if REGEX_PATTERNS['^.*$'].search(data__datafiles_key):
+ if data__datafiles_key in data__datafiles_keys:
+ data__datafiles_keys.remove(data__datafiles_key)
+ if not isinstance(data__datafiles_val, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".data-files.{data__datafiles_key}".format(**locals()) + " must be array", value=data__datafiles_val, name="" + (name_prefix or "data") + ".data-files.{data__datafiles_key}".format(**locals()) + "", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+ data__datafiles_val_is_list = isinstance(data__datafiles_val, (list, tuple))
+ if data__datafiles_val_is_list:
+ data__datafiles_val_len = len(data__datafiles_val)
+ for data__datafiles_val_x, data__datafiles_val_item in enumerate(data__datafiles_val):
+ if not isinstance(data__datafiles_val_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".data-files.{data__datafiles_key}[{data__datafiles_val_x}]".format(**locals()) + " must be string", value=data__datafiles_val_item, name="" + (name_prefix or "data") + ".data-files.{data__datafiles_key}[{data__datafiles_val_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ if "cmdclass" in data_keys:
+ data_keys.remove("cmdclass")
+ data__cmdclass = data["cmdclass"]
+ if not isinstance(data__cmdclass, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".cmdclass must be object", value=data__cmdclass, name="" + (name_prefix or "data") + ".cmdclass", definition={'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, rule='type')
+ data__cmdclass_is_dict = isinstance(data__cmdclass, dict)
+ if data__cmdclass_is_dict:
+ data__cmdclass_keys = set(data__cmdclass.keys())
+ for data__cmdclass_key, data__cmdclass_val in data__cmdclass.items():
+ if REGEX_PATTERNS['^.*$'].search(data__cmdclass_key):
+ if data__cmdclass_key in data__cmdclass_keys:
+ data__cmdclass_keys.remove(data__cmdclass_key)
+ if not isinstance(data__cmdclass_val, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".cmdclass.{data__cmdclass_key}".format(**locals()) + " must be string", value=data__cmdclass_val, name="" + (name_prefix or "data") + ".cmdclass.{data__cmdclass_key}".format(**locals()) + "", definition={'type': 'string', 'format': 'python-qualified-identifier'}, rule='type')
+ if isinstance(data__cmdclass_val, str):
+ if not custom_formats["python-qualified-identifier"](data__cmdclass_val):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".cmdclass.{data__cmdclass_key}".format(**locals()) + " must be python-qualified-identifier", value=data__cmdclass_val, name="" + (name_prefix or "data") + ".cmdclass.{data__cmdclass_key}".format(**locals()) + "", definition={'type': 'string', 'format': 'python-qualified-identifier'}, rule='format')
+ if "license-files" in data_keys:
+ data_keys.remove("license-files")
+ data__licensefiles = data["license-files"]
+ if not isinstance(data__licensefiles, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".license-files must be array", value=data__licensefiles, name="" + (name_prefix or "data") + ".license-files", definition={'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, rule='type')
+ data__licensefiles_is_list = isinstance(data__licensefiles, (list, tuple))
+ if data__licensefiles_is_list:
+ data__licensefiles_len = len(data__licensefiles)
+ for data__licensefiles_x, data__licensefiles_item in enumerate(data__licensefiles):
+ if not isinstance(data__licensefiles_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".license-files[{data__licensefiles_x}]".format(**locals()) + " must be string", value=data__licensefiles_item, name="" + (name_prefix or "data") + ".license-files[{data__licensefiles_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ else: data["license-files"] = ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*']
+ if "dynamic" in data_keys:
+ data_keys.remove("dynamic")
+ data__dynamic = data["dynamic"]
+ if not isinstance(data__dynamic, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must be object", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}, rule='type')
+ data__dynamic_is_dict = isinstance(data__dynamic, dict)
+ if data__dynamic_is_dict:
+ data__dynamic_keys = set(data__dynamic.keys())
+ if "version" in data__dynamic_keys:
+ data__dynamic_keys.remove("version")
+ data__dynamic__version = data__dynamic["version"]
+ data__dynamic__version_one_of_count5 = 0
+ if data__dynamic__version_one_of_count5 < 2:
+ try:
+ validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_attr_directive(data__dynamic__version, custom_formats, (name_prefix or "data") + ".dynamic.version")
+ data__dynamic__version_one_of_count5 += 1
+ except JsonSchemaValueException: pass
+ if data__dynamic__version_one_of_count5 < 2:
+ try:
+ validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__version, custom_formats, (name_prefix or "data") + ".dynamic.version")
+ data__dynamic__version_one_of_count5 += 1
+ except JsonSchemaValueException: pass
+ if data__dynamic__version_one_of_count5 != 1:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.version must be valid exactly by one definition" + (" (" + str(data__dynamic__version_one_of_count5) + " matches found)"), value=data__dynamic__version, name="" + (name_prefix or "data") + ".dynamic.version", definition={'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, rule='oneOf')
+ if "classifiers" in data__dynamic_keys:
+ data__dynamic_keys.remove("classifiers")
+ data__dynamic__classifiers = data__dynamic["classifiers"]
+ validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__classifiers, custom_formats, (name_prefix or "data") + ".dynamic.classifiers")
+ if "description" in data__dynamic_keys:
+ data__dynamic_keys.remove("description")
+ data__dynamic__description = data__dynamic["description"]
+ validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__description, custom_formats, (name_prefix or "data") + ".dynamic.description")
+ if "dependencies" in data__dynamic_keys:
+ data__dynamic_keys.remove("dependencies")
+ data__dynamic__dependencies = data__dynamic["dependencies"]
+ validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__dependencies, custom_formats, (name_prefix or "data") + ".dynamic.dependencies")
+ if "entry-points" in data__dynamic_keys:
+ data__dynamic_keys.remove("entry-points")
+ data__dynamic__entrypoints = data__dynamic["entry-points"]
+ validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__entrypoints, custom_formats, (name_prefix or "data") + ".dynamic.entry-points")
+ if "optional-dependencies" in data__dynamic_keys:
+ data__dynamic_keys.remove("optional-dependencies")
+ data__dynamic__optionaldependencies = data__dynamic["optional-dependencies"]
+ if not isinstance(data__dynamic__optionaldependencies, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.optional-dependencies must be object", value=data__dynamic__optionaldependencies, name="" + (name_prefix or "data") + ".dynamic.optional-dependencies", definition={'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, rule='type')
+ data__dynamic__optionaldependencies_is_dict = isinstance(data__dynamic__optionaldependencies, dict)
+ if data__dynamic__optionaldependencies_is_dict:
+ data__dynamic__optionaldependencies_keys = set(data__dynamic__optionaldependencies.keys())
+ for data__dynamic__optionaldependencies_key, data__dynamic__optionaldependencies_val in data__dynamic__optionaldependencies.items():
+ if REGEX_PATTERNS['.+'].search(data__dynamic__optionaldependencies_key):
+ if data__dynamic__optionaldependencies_key in data__dynamic__optionaldependencies_keys:
+ data__dynamic__optionaldependencies_keys.remove(data__dynamic__optionaldependencies_key)
+ validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__optionaldependencies_val, custom_formats, (name_prefix or "data") + ".dynamic.optional-dependencies.{data__dynamic__optionaldependencies_key}")
+ if data__dynamic__optionaldependencies_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.optional-dependencies must not contain "+str(data__dynamic__optionaldependencies_keys)+" properties", value=data__dynamic__optionaldependencies, name="" + (name_prefix or "data") + ".dynamic.optional-dependencies", definition={'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, rule='additionalProperties')
+ data__dynamic__optionaldependencies_len = len(data__dynamic__optionaldependencies)
+ if data__dynamic__optionaldependencies_len != 0:
+ data__dynamic__optionaldependencies_property_names = True
+ for data__dynamic__optionaldependencies_key in data__dynamic__optionaldependencies:
+ try:
+ if isinstance(data__dynamic__optionaldependencies_key, str):
+ if not custom_formats["python-identifier"](data__dynamic__optionaldependencies_key):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.optional-dependencies must be python-identifier", value=data__dynamic__optionaldependencies_key, name="" + (name_prefix or "data") + ".dynamic.optional-dependencies", definition={'format': 'python-identifier'}, rule='format')
+ except JsonSchemaValueException:
+ data__dynamic__optionaldependencies_property_names = False
+ if not data__dynamic__optionaldependencies_property_names:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.optional-dependencies must be named by propertyName definition", value=data__dynamic__optionaldependencies, name="" + (name_prefix or "data") + ".dynamic.optional-dependencies", definition={'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, rule='propertyNames')
+ if "readme" in data__dynamic_keys:
+ data__dynamic_keys.remove("readme")
+ data__dynamic__readme = data__dynamic["readme"]
+ data__dynamic__readme_any_of_count6 = 0
+ if not data__dynamic__readme_any_of_count6:
+ try:
+ validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data__dynamic__readme, custom_formats, (name_prefix or "data") + ".dynamic.readme")
+ data__dynamic__readme_any_of_count6 += 1
+ except JsonSchemaValueException: pass
+ if not data__dynamic__readme_any_of_count6:
+ try:
+ data__dynamic__readme_is_dict = isinstance(data__dynamic__readme, dict)
+ if data__dynamic__readme_is_dict:
+ data__dynamic__readme_keys = set(data__dynamic__readme.keys())
+ if "content-type" in data__dynamic__readme_keys:
+ data__dynamic__readme_keys.remove("content-type")
+ data__dynamic__readme__contenttype = data__dynamic__readme["content-type"]
+ if not isinstance(data__dynamic__readme__contenttype, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme.content-type must be string", value=data__dynamic__readme__contenttype, name="" + (name_prefix or "data") + ".dynamic.readme.content-type", definition={'type': 'string'}, rule='type')
+ data__dynamic__readme_any_of_count6 += 1
+ except JsonSchemaValueException: pass
+ if not data__dynamic__readme_any_of_count6:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme cannot be validated by any definition", value=data__dynamic__readme, name="" + (name_prefix or "data") + ".dynamic.readme", definition={'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}, rule='anyOf')
+ data__dynamic__readme_is_dict = isinstance(data__dynamic__readme, dict)
+ if data__dynamic__readme_is_dict:
+ data__dynamic__readme_len = len(data__dynamic__readme)
+ if not all(prop in data__dynamic__readme for prop in ['file']):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic.readme must contain ['file'] properties", value=data__dynamic__readme, name="" + (name_prefix or "data") + ".dynamic.readme", definition={'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}, rule='required')
+ if data__dynamic_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must not contain "+str(data__dynamic_keys)+" properties", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}, rule='additionalProperties')
+ if data_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://setuptools.pypa.io/en/latest/references/keywords.html', 'title': '``tool.setuptools`` table', '$$description': ['Please notice for the time being the ``setuptools`` project does not specify', 'a way of configuring builds via ``pyproject.toml``.', 'Therefore this schema should be taken just as a *"thought experiment"* on how', 'this *might be done*, by following the principles established in', '`ini2toml <https://ini2toml.readthedocs.io/en/latest/setuptools_pep621.html>`_.', 'It considers only ``setuptools`` `parameters', '<https://setuptools.pypa.io/en/latest/userguide/declarative_config.html>`_', 'that can currently be configured via ``setup.cfg`` and are not covered by :pep:`621`', 'but intentionally excludes ``dependency_links`` and ``setup_requires``.', 'NOTE: ``scripts`` was renamed to ``script-files`` to avoid confusion with', 'entry-point based scripts (defined in :pep:`621`).'], 'type': 'object', 'additionalProperties': False, 'properties': {'platforms': {'type': 'array', 'items': {'type': 'string'}}, 'provides': {'$$description': ['Package and virtual package names contained within this package', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'obsoletes': {'$$description': ['Packages which this package renders obsolete', '**(not supported by pip)**'], 'type': 'array', 'items': {'type': 'string', 'format': 'pep508-identifier'}}, 'zip-safe': {'description': 'Whether the project can be safely installed and run from a zip file.', 'type': 'boolean'}, 'script-files': {'description': 'Legacy way of defining scripts (entry-points are preferred).', 'type': 'array', 'items': {'type': 'string'}, '$comment': 'TODO: is this field deprecated/should be removed?'}, 'eager-resources': {'$$description': ['Resources that should be extracted together, if any of them is needed,', 'or if any C extensions included in the project are imported.'], 'type': 'array', 'items': {'type': 'string'}}, 'packages': {'$$description': ['Packages that should be included in the distribution.', 'It can be given either as a list of package identifiers', 'or as a ``dict``-like structure with a single key ``find``', 'which corresponds to a dynamic call to', '``setuptools.config.expand.find_packages`` function.', 'The ``find`` key is associated with a nested ``dict``-like structure that can', 'contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,', 'mimicking the keyword arguments of the associated function.'], 'oneOf': [{'title': 'Array of Python package identifiers', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}}, {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}]}, 'package-dir': {'$$description': [':class:`dict`-like structure mapping from package names to directories where their', 'code can be found.', 'The empty string (as key) means that all packages are contained inside', 'the given directory will be included in the distribution.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': ''}]}, 'patternProperties': {'^.*$': {'type': 'string'}}}, 'package-data': {'$$description': ['Mapping from package names to lists of glob patterns.', 'Usually this option is not needed when using ``include-package-data = true``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'include-package-data': {'$$description': ['Automatically include any data files inside the package directories', 'that are specified by ``MANIFEST.in``', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'boolean'}, 'exclude-package-data': {'$$description': ['Mapping from package names to lists of glob patterns that should be excluded', 'For more information on how to include data files, check ``setuptools`` `docs', '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_.'], 'type': 'object', 'additionalProperties': False, 'propertyNames': {'oneOf': [{'format': 'python-module-name'}, {'const': '*'}]}, 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'namespace-packages': {'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'https://setuptools.pypa.io/en/latest/userguide/package_discovery.html'}, 'py-modules': {'description': 'Modules that setuptools will manipulate', 'type': 'array', 'items': {'type': 'string', 'format': 'python-module-name'}, '$comment': 'TODO: clarify the relationship with ``packages``'}, 'data-files': {'$$description': ['**DEPRECATED**: dict-like structure where each key represents a directory and', 'the value is a list of glob patterns that should be installed in them.', "Please notice this don't work with wheels. See `data files support", '<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'array', 'items': {'type': 'string'}}}}, 'cmdclass': {'$$description': ['Mapping of distutils-style command names to ``setuptools.Command`` subclasses', 'which in turn should be represented by strings with a qualified class name', '(i.e., "dotted" form with module), e.g.::\n\n', ' cmdclass = {mycmd = "pkg.subpkg.module.CommandClass"}\n\n', 'The command class should be a directly defined at the top-level of the', 'containing module (no class nesting).'], 'type': 'object', 'patternProperties': {'^.*$': {'type': 'string', 'format': 'python-qualified-identifier'}}}, 'license-files': {'type': 'array', 'items': {'type': 'string'}, '$$description': ['PROVISIONAL: List of glob patterns for all license files being distributed.', '(might become standard with PEP 639).'], 'default': ['LICEN[CS]E*', ' COPYING*', ' NOTICE*', 'AUTHORS*'], '$comment': 'TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?'}, 'dynamic': {'type': 'object', 'description': 'Instructions for loading :pep:`621`-related metadata dynamically', 'additionalProperties': False, 'properties': {'version': {'$$description': ['A version dynamically loaded via either the ``attr:`` or ``file:``', 'directives. Please make sure the given file or attribute respects :pep:`440`.'], 'oneOf': [{'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}]}, 'classifiers': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'description': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'dependencies': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'entry-points': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'optional-dependencies': {'type': 'object', 'propertyNames': {'format': 'python-identifier'}, 'additionalProperties': False, 'patternProperties': {'.+': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}}}, 'readme': {'anyOf': [{'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, {'properties': {'content-type': {'type': 'string'}}}], 'required': ['file']}}}}, 'definitions': {'file-directive': {'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, 'attr-directive': {'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, 'find-directive': {'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}}}, rule='additionalProperties')
+ return data
+
+def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_file_directive(data, custom_formats={}, name_prefix=None):
+ if not isinstance(data, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, rule='type')
+ data_is_dict = isinstance(data, dict)
+ if data_is_dict:
+ data_len = len(data)
+ if not all(prop in data for prop in ['file']):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['file'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, rule='required')
+ data_keys = set(data.keys())
+ if "file" in data_keys:
+ data_keys.remove("file")
+ data__file = data["file"]
+ data__file_one_of_count7 = 0
+ if data__file_one_of_count7 < 2:
+ try:
+ if not isinstance(data__file, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".file must be string", value=data__file, name="" + (name_prefix or "data") + ".file", definition={'type': 'string'}, rule='type')
+ data__file_one_of_count7 += 1
+ except JsonSchemaValueException: pass
+ if data__file_one_of_count7 < 2:
+ try:
+ if not isinstance(data__file, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".file must be array", value=data__file, name="" + (name_prefix or "data") + ".file", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type')
+ data__file_is_list = isinstance(data__file, (list, tuple))
+ if data__file_is_list:
+ data__file_len = len(data__file)
+ for data__file_x, data__file_item in enumerate(data__file):
+ if not isinstance(data__file_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".file[{data__file_x}]".format(**locals()) + " must be string", value=data__file_item, name="" + (name_prefix or "data") + ".file[{data__file_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ data__file_one_of_count7 += 1
+ except JsonSchemaValueException: pass
+ if data__file_one_of_count7 != 1:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".file must be valid exactly by one definition" + (" (" + str(data__file_one_of_count7) + " matches found)"), value=data__file, name="" + (name_prefix or "data") + ".file", definition={'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}, rule='oneOf')
+ if data_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/file-directive', 'title': "'file:' directive", 'description': 'Value is read from a file (or list of files and then concatenated)', 'type': 'object', 'additionalProperties': False, 'properties': {'file': {'oneOf': [{'type': 'string'}, {'type': 'array', 'items': {'type': 'string'}}]}}, 'required': ['file']}, rule='additionalProperties')
+ return data
+
+def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_attr_directive(data, custom_formats={}, name_prefix=None):
+ if not isinstance(data, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, rule='type')
+ data_is_dict = isinstance(data, dict)
+ if data_is_dict:
+ data_len = len(data)
+ if not all(prop in data for prop in ['attr']):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['attr'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, rule='required')
+ data_keys = set(data.keys())
+ if "attr" in data_keys:
+ data_keys.remove("attr")
+ data__attr = data["attr"]
+ if not isinstance(data__attr, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".attr must be string", value=data__attr, name="" + (name_prefix or "data") + ".attr", definition={'type': 'string'}, rule='type')
+ if data_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'title': "'attr:' directive", '$id': '#/definitions/attr-directive', '$$description': ['Value is read from a module attribute. Supports callables and iterables;', 'unsupported types are cast via ``str()``'], 'type': 'object', 'additionalProperties': False, 'properties': {'attr': {'type': 'string'}}, 'required': ['attr']}, rule='additionalProperties')
+ return data
+
+def validate_https___setuptools_pypa_io_en_latest_references_keywords_html__definitions_find_directive(data, custom_formats={}, name_prefix=None):
+ if not isinstance(data, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}, rule='type')
+ data_is_dict = isinstance(data, dict)
+ if data_is_dict:
+ data_keys = set(data.keys())
+ if "find" in data_keys:
+ data_keys.remove("find")
+ data__find = data["find"]
+ if not isinstance(data__find, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".find must be object", value=data__find, name="" + (name_prefix or "data") + ".find", definition={'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}, rule='type')
+ data__find_is_dict = isinstance(data__find, dict)
+ if data__find_is_dict:
+ data__find_keys = set(data__find.keys())
+ if "where" in data__find_keys:
+ data__find_keys.remove("where")
+ data__find__where = data__find["where"]
+ if not isinstance(data__find__where, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.where must be array", value=data__find__where, name="" + (name_prefix or "data") + ".find.where", definition={'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, rule='type')
+ data__find__where_is_list = isinstance(data__find__where, (list, tuple))
+ if data__find__where_is_list:
+ data__find__where_len = len(data__find__where)
+ for data__find__where_x, data__find__where_item in enumerate(data__find__where):
+ if not isinstance(data__find__where_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.where[{data__find__where_x}]".format(**locals()) + " must be string", value=data__find__where_item, name="" + (name_prefix or "data") + ".find.where[{data__find__where_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ if "exclude" in data__find_keys:
+ data__find_keys.remove("exclude")
+ data__find__exclude = data__find["exclude"]
+ if not isinstance(data__find__exclude, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.exclude must be array", value=data__find__exclude, name="" + (name_prefix or "data") + ".find.exclude", definition={'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, rule='type')
+ data__find__exclude_is_list = isinstance(data__find__exclude, (list, tuple))
+ if data__find__exclude_is_list:
+ data__find__exclude_len = len(data__find__exclude)
+ for data__find__exclude_x, data__find__exclude_item in enumerate(data__find__exclude):
+ if not isinstance(data__find__exclude_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.exclude[{data__find__exclude_x}]".format(**locals()) + " must be string", value=data__find__exclude_item, name="" + (name_prefix or "data") + ".find.exclude[{data__find__exclude_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ if "include" in data__find_keys:
+ data__find_keys.remove("include")
+ data__find__include = data__find["include"]
+ if not isinstance(data__find__include, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.include must be array", value=data__find__include, name="" + (name_prefix or "data") + ".find.include", definition={'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, rule='type')
+ data__find__include_is_list = isinstance(data__find__include, (list, tuple))
+ if data__find__include_is_list:
+ data__find__include_len = len(data__find__include)
+ for data__find__include_x, data__find__include_item in enumerate(data__find__include):
+ if not isinstance(data__find__include_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.include[{data__find__include_x}]".format(**locals()) + " must be string", value=data__find__include_item, name="" + (name_prefix or "data") + ".find.include[{data__find__include_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ if "namespaces" in data__find_keys:
+ data__find_keys.remove("namespaces")
+ data__find__namespaces = data__find["namespaces"]
+ if not isinstance(data__find__namespaces, (bool)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".find.namespaces must be boolean", value=data__find__namespaces, name="" + (name_prefix or "data") + ".find.namespaces", definition={'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}, rule='type')
+ if data__find_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".find must not contain "+str(data__find_keys)+" properties", value=data__find, name="" + (name_prefix or "data") + ".find", definition={'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}, rule='additionalProperties')
+ if data_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/find-directive', 'title': "'find:' directive", 'type': 'object', 'additionalProperties': False, 'properties': {'find': {'type': 'object', '$$description': ['Dynamic `package discovery', '<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_.'], 'additionalProperties': False, 'properties': {'where': {'description': 'Directories to be searched for packages (Unix-style relative path)', 'type': 'array', 'items': {'type': 'string'}}, 'exclude': {'type': 'array', '$$description': ['Exclude packages that match the values listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'include': {'type': 'array', '$$description': ['Restrict the found packages to just the ones listed in this field.', "Can container shell-style wildcards (e.g. ``'pkg.*'``)"], 'items': {'type': 'string'}}, 'namespaces': {'type': 'boolean', '$$description': ['When ``True``, directories without a ``__init__.py`` file will also', 'be scanned for :pep:`420`-style implicit namespaces']}}}}}, rule='additionalProperties')
+ return data
+
+def validate_https___docs_python_org_3_install(data, custom_formats={}, name_prefix=None):
+ if not isinstance(data, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://docs.python.org/3/install/', 'title': '``tool.distutils`` table', '$$description': ['Originally, ``distutils`` allowed developers to configure arguments for', '``setup.py`` scripts via `distutils configuration files', '<https://docs.python.org/3/install/#distutils-configuration-files>`_.', '``tool.distutils`` subtables could be used with the same purpose', '(NOT CURRENTLY IMPLEMENTED).'], 'type': 'object', 'properties': {'global': {'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}}, 'patternProperties': {'.+': {'type': 'object'}}, '$comment': 'TODO: Is there a practical way of making this schema more specific?'}, rule='type')
+ data_is_dict = isinstance(data, dict)
+ if data_is_dict:
+ data_keys = set(data.keys())
+ if "global" in data_keys:
+ data_keys.remove("global")
+ data__global = data["global"]
+ if not isinstance(data__global, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".global must be object", value=data__global, name="" + (name_prefix or "data") + ".global", definition={'type': 'object', 'description': 'Global options applied to all ``distutils`` commands'}, rule='type')
+ for data_key, data_val in data.items():
+ if REGEX_PATTERNS['.+'].search(data_key):
+ if data_key in data_keys:
+ data_keys.remove(data_key)
+ if not isinstance(data_val, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be object", value=data_val, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'object'}, rule='type')
+ return data
+
+def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata(data, custom_formats={}, name_prefix=None):
+ if not isinstance(data, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='type')
+ data_is_dict = isinstance(data, dict)
+ if data_is_dict:
+ data_len = len(data)
+ if not all(prop in data for prop in ['name']):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['name'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='required')
+ data_keys = set(data.keys())
+ if "name" in data_keys:
+ data_keys.remove("name")
+ data__name = data["name"]
+ if not isinstance(data__name, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, rule='type')
+ if isinstance(data__name, str):
+ if not custom_formats["pep508-identifier"](data__name):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be pep508-identifier", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, rule='format')
+ if "version" in data_keys:
+ data_keys.remove("version")
+ data__version = data["version"]
+ if not isinstance(data__version, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".version must be string", value=data__version, name="" + (name_prefix or "data") + ".version", definition={'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, rule='type')
+ if isinstance(data__version, str):
+ if not custom_formats["pep440"](data__version):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".version must be pep440", value=data__version, name="" + (name_prefix or "data") + ".version", definition={'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, rule='format')
+ if "description" in data_keys:
+ data_keys.remove("description")
+ data__description = data["description"]
+ if not isinstance(data__description, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".description must be string", value=data__description, name="" + (name_prefix or "data") + ".description", definition={'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, rule='type')
+ if "readme" in data_keys:
+ data_keys.remove("readme")
+ data__readme = data["readme"]
+ data__readme_one_of_count8 = 0
+ if data__readme_one_of_count8 < 2:
+ try:
+ if not isinstance(data__readme, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be string", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, rule='type')
+ data__readme_one_of_count8 += 1
+ except JsonSchemaValueException: pass
+ if data__readme_one_of_count8 < 2:
+ try:
+ if not isinstance(data__readme, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be object", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}, rule='type')
+ data__readme_any_of_count9 = 0
+ if not data__readme_any_of_count9:
+ try:
+ data__readme_is_dict = isinstance(data__readme, dict)
+ if data__readme_is_dict:
+ data__readme_len = len(data__readme)
+ if not all(prop in data__readme for prop in ['file']):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must contain ['file'] properties", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, rule='required')
+ data__readme_keys = set(data__readme.keys())
+ if "file" in data__readme_keys:
+ data__readme_keys.remove("file")
+ data__readme__file = data__readme["file"]
+ if not isinstance(data__readme__file, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.file must be string", value=data__readme__file, name="" + (name_prefix or "data") + ".readme.file", definition={'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}, rule='type')
+ data__readme_any_of_count9 += 1
+ except JsonSchemaValueException: pass
+ if not data__readme_any_of_count9:
+ try:
+ data__readme_is_dict = isinstance(data__readme, dict)
+ if data__readme_is_dict:
+ data__readme_len = len(data__readme)
+ if not all(prop in data__readme for prop in ['text']):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must contain ['text'] properties", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}, rule='required')
+ data__readme_keys = set(data__readme.keys())
+ if "text" in data__readme_keys:
+ data__readme_keys.remove("text")
+ data__readme__text = data__readme["text"]
+ if not isinstance(data__readme__text, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.text must be string", value=data__readme__text, name="" + (name_prefix or "data") + ".readme.text", definition={'type': 'string', 'description': 'Full text describing the project.'}, rule='type')
+ data__readme_any_of_count9 += 1
+ except JsonSchemaValueException: pass
+ if not data__readme_any_of_count9:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme cannot be validated by any definition", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, rule='anyOf')
+ data__readme_is_dict = isinstance(data__readme, dict)
+ if data__readme_is_dict:
+ data__readme_len = len(data__readme)
+ if not all(prop in data__readme for prop in ['content-type']):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must contain ['content-type'] properties", value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}, rule='required')
+ data__readme_keys = set(data__readme.keys())
+ if "content-type" in data__readme_keys:
+ data__readme_keys.remove("content-type")
+ data__readme__contenttype = data__readme["content-type"]
+ if not isinstance(data__readme__contenttype, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme.content-type must be string", value=data__readme__contenttype, name="" + (name_prefix or "data") + ".readme.content-type", definition={'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}, rule='type')
+ data__readme_one_of_count8 += 1
+ except JsonSchemaValueException: pass
+ if data__readme_one_of_count8 != 1:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".readme must be valid exactly by one definition" + (" (" + str(data__readme_one_of_count8) + " matches found)"), value=data__readme, name="" + (name_prefix or "data") + ".readme", definition={'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, rule='oneOf')
+ if "requires-python" in data_keys:
+ data_keys.remove("requires-python")
+ data__requirespython = data["requires-python"]
+ if not isinstance(data__requirespython, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".requires-python must be string", value=data__requirespython, name="" + (name_prefix or "data") + ".requires-python", definition={'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, rule='type')
+ if isinstance(data__requirespython, str):
+ if not custom_formats["pep508-versionspec"](data__requirespython):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".requires-python must be pep508-versionspec", value=data__requirespython, name="" + (name_prefix or "data") + ".requires-python", definition={'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, rule='format')
+ if "license" in data_keys:
+ data_keys.remove("license")
+ data__license = data["license"]
+ data__license_one_of_count10 = 0
+ if data__license_one_of_count10 < 2:
+ try:
+ data__license_is_dict = isinstance(data__license, dict)
+ if data__license_is_dict:
+ data__license_len = len(data__license)
+ if not all(prop in data__license for prop in ['file']):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must contain ['file'] properties", value=data__license, name="" + (name_prefix or "data") + ".license", definition={'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, rule='required')
+ data__license_keys = set(data__license.keys())
+ if "file" in data__license_keys:
+ data__license_keys.remove("file")
+ data__license__file = data__license["file"]
+ if not isinstance(data__license__file, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".license.file must be string", value=data__license__file, name="" + (name_prefix or "data") + ".license.file", definition={'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}, rule='type')
+ data__license_one_of_count10 += 1
+ except JsonSchemaValueException: pass
+ if data__license_one_of_count10 < 2:
+ try:
+ data__license_is_dict = isinstance(data__license, dict)
+ if data__license_is_dict:
+ data__license_len = len(data__license)
+ if not all(prop in data__license for prop in ['text']):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must contain ['text'] properties", value=data__license, name="" + (name_prefix or "data") + ".license", definition={'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}, rule='required')
+ data__license_keys = set(data__license.keys())
+ if "text" in data__license_keys:
+ data__license_keys.remove("text")
+ data__license__text = data__license["text"]
+ if not isinstance(data__license__text, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".license.text must be string", value=data__license__text, name="" + (name_prefix or "data") + ".license.text", definition={'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}, rule='type')
+ data__license_one_of_count10 += 1
+ except JsonSchemaValueException: pass
+ if data__license_one_of_count10 != 1:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".license must be valid exactly by one definition" + (" (" + str(data__license_one_of_count10) + " matches found)"), value=data__license, name="" + (name_prefix or "data") + ".license", definition={'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, rule='oneOf')
+ if "authors" in data_keys:
+ data_keys.remove("authors")
+ data__authors = data["authors"]
+ if not isinstance(data__authors, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".authors must be array", value=data__authors, name="" + (name_prefix or "data") + ".authors", definition={'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, rule='type')
+ data__authors_is_list = isinstance(data__authors, (list, tuple))
+ if data__authors_is_list:
+ data__authors_len = len(data__authors)
+ for data__authors_x, data__authors_item in enumerate(data__authors):
+ validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_author(data__authors_item, custom_formats, (name_prefix or "data") + ".authors[{data__authors_x}]")
+ if "maintainers" in data_keys:
+ data_keys.remove("maintainers")
+ data__maintainers = data["maintainers"]
+ if not isinstance(data__maintainers, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".maintainers must be array", value=data__maintainers, name="" + (name_prefix or "data") + ".maintainers", definition={'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, rule='type')
+ data__maintainers_is_list = isinstance(data__maintainers, (list, tuple))
+ if data__maintainers_is_list:
+ data__maintainers_len = len(data__maintainers)
+ for data__maintainers_x, data__maintainers_item in enumerate(data__maintainers):
+ validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_author(data__maintainers_item, custom_formats, (name_prefix or "data") + ".maintainers[{data__maintainers_x}]")
+ if "keywords" in data_keys:
+ data_keys.remove("keywords")
+ data__keywords = data["keywords"]
+ if not isinstance(data__keywords, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".keywords must be array", value=data__keywords, name="" + (name_prefix or "data") + ".keywords", definition={'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, rule='type')
+ data__keywords_is_list = isinstance(data__keywords, (list, tuple))
+ if data__keywords_is_list:
+ data__keywords_len = len(data__keywords)
+ for data__keywords_x, data__keywords_item in enumerate(data__keywords):
+ if not isinstance(data__keywords_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".keywords[{data__keywords_x}]".format(**locals()) + " must be string", value=data__keywords_item, name="" + (name_prefix or "data") + ".keywords[{data__keywords_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type')
+ if "classifiers" in data_keys:
+ data_keys.remove("classifiers")
+ data__classifiers = data["classifiers"]
+ if not isinstance(data__classifiers, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".classifiers must be array", value=data__classifiers, name="" + (name_prefix or "data") + ".classifiers", definition={'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, rule='type')
+ data__classifiers_is_list = isinstance(data__classifiers, (list, tuple))
+ if data__classifiers_is_list:
+ data__classifiers_len = len(data__classifiers)
+ for data__classifiers_x, data__classifiers_item in enumerate(data__classifiers):
+ if not isinstance(data__classifiers_item, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".classifiers[{data__classifiers_x}]".format(**locals()) + " must be string", value=data__classifiers_item, name="" + (name_prefix or "data") + ".classifiers[{data__classifiers_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, rule='type')
+ if isinstance(data__classifiers_item, str):
+ if not custom_formats["trove-classifier"](data__classifiers_item):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".classifiers[{data__classifiers_x}]".format(**locals()) + " must be trove-classifier", value=data__classifiers_item, name="" + (name_prefix or "data") + ".classifiers[{data__classifiers_x}]".format(**locals()) + "", definition={'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, rule='format')
+ if "urls" in data_keys:
+ data_keys.remove("urls")
+ data__urls = data["urls"]
+ if not isinstance(data__urls, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".urls must be object", value=data__urls, name="" + (name_prefix or "data") + ".urls", definition={'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, rule='type')
+ data__urls_is_dict = isinstance(data__urls, dict)
+ if data__urls_is_dict:
+ data__urls_keys = set(data__urls.keys())
+ for data__urls_key, data__urls_val in data__urls.items():
+ if REGEX_PATTERNS['^.+$'].search(data__urls_key):
+ if data__urls_key in data__urls_keys:
+ data__urls_keys.remove(data__urls_key)
+ if not isinstance(data__urls_val, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".urls.{data__urls_key}".format(**locals()) + " must be string", value=data__urls_val, name="" + (name_prefix or "data") + ".urls.{data__urls_key}".format(**locals()) + "", definition={'type': 'string', 'format': 'url'}, rule='type')
+ if isinstance(data__urls_val, str):
+ if not custom_formats["url"](data__urls_val):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".urls.{data__urls_key}".format(**locals()) + " must be url", value=data__urls_val, name="" + (name_prefix or "data") + ".urls.{data__urls_key}".format(**locals()) + "", definition={'type': 'string', 'format': 'url'}, rule='format')
+ if data__urls_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".urls must not contain "+str(data__urls_keys)+" properties", value=data__urls, name="" + (name_prefix or "data") + ".urls", definition={'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, rule='additionalProperties')
+ if "scripts" in data_keys:
+ data_keys.remove("scripts")
+ data__scripts = data["scripts"]
+ validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data__scripts, custom_formats, (name_prefix or "data") + ".scripts")
+ if "gui-scripts" in data_keys:
+ data_keys.remove("gui-scripts")
+ data__guiscripts = data["gui-scripts"]
+ validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data__guiscripts, custom_formats, (name_prefix or "data") + ".gui-scripts")
+ if "entry-points" in data_keys:
+ data_keys.remove("entry-points")
+ data__entrypoints = data["entry-points"]
+ data__entrypoints_is_dict = isinstance(data__entrypoints, dict)
+ if data__entrypoints_is_dict:
+ data__entrypoints_keys = set(data__entrypoints.keys())
+ for data__entrypoints_key, data__entrypoints_val in data__entrypoints.items():
+ if REGEX_PATTERNS['^.+$'].search(data__entrypoints_key):
+ if data__entrypoints_key in data__entrypoints_keys:
+ data__entrypoints_keys.remove(data__entrypoints_key)
+ validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data__entrypoints_val, custom_formats, (name_prefix or "data") + ".entry-points.{data__entrypoints_key}")
+ if data__entrypoints_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".entry-points must not contain "+str(data__entrypoints_keys)+" properties", value=data__entrypoints, name="" + (name_prefix or "data") + ".entry-points", definition={'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, rule='additionalProperties')
+ data__entrypoints_len = len(data__entrypoints)
+ if data__entrypoints_len != 0:
+ data__entrypoints_property_names = True
+ for data__entrypoints_key in data__entrypoints:
+ try:
+ if isinstance(data__entrypoints_key, str):
+ if not custom_formats["python-entrypoint-group"](data__entrypoints_key):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".entry-points must be python-entrypoint-group", value=data__entrypoints_key, name="" + (name_prefix or "data") + ".entry-points", definition={'format': 'python-entrypoint-group'}, rule='format')
+ except JsonSchemaValueException:
+ data__entrypoints_property_names = False
+ if not data__entrypoints_property_names:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".entry-points must be named by propertyName definition", value=data__entrypoints, name="" + (name_prefix or "data") + ".entry-points", definition={'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, rule='propertyNames')
+ if "dependencies" in data_keys:
+ data_keys.remove("dependencies")
+ data__dependencies = data["dependencies"]
+ if not isinstance(data__dependencies, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".dependencies must be array", value=data__dependencies, name="" + (name_prefix or "data") + ".dependencies", definition={'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, rule='type')
+ data__dependencies_is_list = isinstance(data__dependencies, (list, tuple))
+ if data__dependencies_is_list:
+ data__dependencies_len = len(data__dependencies)
+ for data__dependencies_x, data__dependencies_item in enumerate(data__dependencies):
+ validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_dependency(data__dependencies_item, custom_formats, (name_prefix or "data") + ".dependencies[{data__dependencies_x}]")
+ if "optional-dependencies" in data_keys:
+ data_keys.remove("optional-dependencies")
+ data__optionaldependencies = data["optional-dependencies"]
+ if not isinstance(data__optionaldependencies, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies must be object", value=data__optionaldependencies, name="" + (name_prefix or "data") + ".optional-dependencies", definition={'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='type')
+ data__optionaldependencies_is_dict = isinstance(data__optionaldependencies, dict)
+ if data__optionaldependencies_is_dict:
+ data__optionaldependencies_keys = set(data__optionaldependencies.keys())
+ for data__optionaldependencies_key, data__optionaldependencies_val in data__optionaldependencies.items():
+ if REGEX_PATTERNS['^.+$'].search(data__optionaldependencies_key):
+ if data__optionaldependencies_key in data__optionaldependencies_keys:
+ data__optionaldependencies_keys.remove(data__optionaldependencies_key)
+ if not isinstance(data__optionaldependencies_val, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies.{data__optionaldependencies_key}".format(**locals()) + " must be array", value=data__optionaldependencies_val, name="" + (name_prefix or "data") + ".optional-dependencies.{data__optionaldependencies_key}".format(**locals()) + "", definition={'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, rule='type')
+ data__optionaldependencies_val_is_list = isinstance(data__optionaldependencies_val, (list, tuple))
+ if data__optionaldependencies_val_is_list:
+ data__optionaldependencies_val_len = len(data__optionaldependencies_val)
+ for data__optionaldependencies_val_x, data__optionaldependencies_val_item in enumerate(data__optionaldependencies_val):
+ validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_dependency(data__optionaldependencies_val_item, custom_formats, (name_prefix or "data") + ".optional-dependencies.{data__optionaldependencies_key}[{data__optionaldependencies_val_x}]")
+ if data__optionaldependencies_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies must not contain "+str(data__optionaldependencies_keys)+" properties", value=data__optionaldependencies, name="" + (name_prefix or "data") + ".optional-dependencies", definition={'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='additionalProperties')
+ data__optionaldependencies_len = len(data__optionaldependencies)
+ if data__optionaldependencies_len != 0:
+ data__optionaldependencies_property_names = True
+ for data__optionaldependencies_key in data__optionaldependencies:
+ try:
+ if isinstance(data__optionaldependencies_key, str):
+ if not custom_formats["pep508-identifier"](data__optionaldependencies_key):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies must be pep508-identifier", value=data__optionaldependencies_key, name="" + (name_prefix or "data") + ".optional-dependencies", definition={'format': 'pep508-identifier'}, rule='format')
+ except JsonSchemaValueException:
+ data__optionaldependencies_property_names = False
+ if not data__optionaldependencies_property_names:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".optional-dependencies must be named by propertyName definition", value=data__optionaldependencies, name="" + (name_prefix or "data") + ".optional-dependencies", definition={'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, rule='propertyNames')
+ if "dynamic" in data_keys:
+ data_keys.remove("dynamic")
+ data__dynamic = data["dynamic"]
+ if not isinstance(data__dynamic, (list, tuple)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must be array", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}, rule='type')
+ data__dynamic_is_list = isinstance(data__dynamic, (list, tuple))
+ if data__dynamic_is_list:
+ data__dynamic_len = len(data__dynamic)
+ for data__dynamic_x, data__dynamic_item in enumerate(data__dynamic):
+ if data__dynamic_item not in ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic[{data__dynamic_x}]".format(**locals()) + " must be one of ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']", value=data__dynamic_item, name="" + (name_prefix or "data") + ".dynamic[{data__dynamic_x}]".format(**locals()) + "", definition={'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}, rule='enum')
+ if data_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$schema': 'http://json-schema.org/draft-07/schema', '$id': 'https://packaging.python.org/en/latest/specifications/declaring-project-metadata/', 'title': 'Package metadata stored in the ``project`` table', '$$description': ['Data structure for the **project** table inside ``pyproject.toml``', '(as initially defined in :pep:`621`)'], 'type': 'object', 'properties': {'name': {'type': 'string', 'description': 'The name (primary identifier) of the project. MUST be statically defined.', 'format': 'pep508-identifier'}, 'version': {'type': 'string', 'description': 'The version of the project as supported by :pep:`440`.', 'format': 'pep440'}, 'description': {'type': 'string', '$$description': ['The `summary description of the project', '<https://packaging.python.org/specifications/core-metadata/#summary>`_']}, 'readme': {'$$description': ['`Full/detailed description of the project in the form of a README', '<https://www.python.org/dev/peps/pep-0621/#readme>`_', "with meaning similar to the one defined in `core metadata's Description", '<https://packaging.python.org/specifications/core-metadata/#description>`_'], 'oneOf': [{'type': 'string', '$$description': ['Relative path to a text file (UTF-8) containing the full description', 'of the project. If the file path ends in case-insensitive ``.md`` or', '``.rst`` suffixes, then the content-type is respectively', '``text/markdown`` or ``text/x-rst``']}, {'type': 'object', 'allOf': [{'anyOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to a text file containing the full description', 'of the project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', 'description': 'Full text describing the project.'}}, 'required': ['text']}]}, {'properties': {'content-type': {'type': 'string', '$$description': ['Content-type (:rfc:`1341`) of the full description', '(e.g. ``text/markdown``). The ``charset`` parameter is assumed', 'UTF-8 when not present.'], '$comment': 'TODO: add regex pattern or format?'}}, 'required': ['content-type']}]}]}, 'requires-python': {'type': 'string', 'format': 'pep508-versionspec', '$$description': ['`The Python version requirements of the project', '<https://packaging.python.org/specifications/core-metadata/#requires-python>`_.']}, 'license': {'description': '`Project license <https://www.python.org/dev/peps/pep-0621/#license>`_.', 'oneOf': [{'properties': {'file': {'type': 'string', '$$description': ['Relative path to the file (UTF-8) which contains the license for the', 'project.']}}, 'required': ['file']}, {'properties': {'text': {'type': 'string', '$$description': ['The license of the project whose meaning is that of the', '`License field from the core metadata', '<https://packaging.python.org/specifications/core-metadata/#license>`_.']}}, 'required': ['text']}]}, 'authors': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'authors' of the project.", 'The exact meaning is open to interpretation (e.g. original or primary authors,', 'current maintainers, or owners of the package).']}, 'maintainers': {'type': 'array', 'items': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, '$$description': ["The people or organizations considered to be the 'maintainers' of the project.", 'Similarly to ``authors``, the exact meaning is open to interpretation.']}, 'keywords': {'type': 'array', 'items': {'type': 'string'}, 'description': 'List of keywords to assist searching for the distribution in a larger catalog.'}, 'classifiers': {'type': 'array', 'items': {'type': 'string', 'format': 'trove-classifier', 'description': '`PyPI classifier <https://pypi.org/classifiers/>`_.'}, '$$description': ['`Trove classifiers <https://pypi.org/classifiers/>`_', 'which apply to the project.']}, 'urls': {'type': 'object', 'description': 'URLs associated with the project in the form ``label => value``.', 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', 'format': 'url'}}}, 'scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'gui-scripts': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'entry-points': {'$$description': ['Instruct the installer to expose the given modules/functions via', '``entry-point`` discovery mechanism (useful for plugins).', 'More information available in the `Python packaging guide', '<https://packaging.python.org/specifications/entry-points/>`_.'], 'propertyNames': {'format': 'python-entrypoint-group'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}}}, 'dependencies': {'type': 'array', 'description': 'Project (mandatory) dependencies.', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}, 'optional-dependencies': {'type': 'object', 'description': 'Optional dependency for the project', 'propertyNames': {'format': 'pep508-identifier'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'array', 'items': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}}, 'dynamic': {'type': 'array', '$$description': ['Specifies which fields are intentionally unspecified and expected to be', 'dynamically provided by build tools'], 'items': {'enum': ['version', 'description', 'readme', 'requires-python', 'license', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies']}}}, 'required': ['name'], 'additionalProperties': False, 'if': {'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, 'then': {'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, 'definitions': {'author': {'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, 'entry-point-group': {'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, 'dependency': {'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}}}, rule='additionalProperties')
+ try:
+ try:
+ data_is_dict = isinstance(data, dict)
+ if data_is_dict:
+ data_len = len(data)
+ if not all(prop in data for prop in ['dynamic']):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['dynamic'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, rule='required')
+ data_keys = set(data.keys())
+ if "dynamic" in data_keys:
+ data_keys.remove("dynamic")
+ data__dynamic = data["dynamic"]
+ data__dynamic_is_list = isinstance(data__dynamic, (list, tuple))
+ if data__dynamic_is_list:
+ data__dynamic_contains = False
+ for data__dynamic_key in data__dynamic:
+ try:
+ if data__dynamic_key != "version":
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must be same as const definition: version", value=data__dynamic_key, name="" + (name_prefix or "data") + ".dynamic", definition={'const': 'version'}, rule='const')
+ data__dynamic_contains = True
+ break
+ except JsonSchemaValueException: pass
+ if not data__dynamic_contains:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".dynamic must contain one of contains definition", value=data__dynamic, name="" + (name_prefix or "data") + ".dynamic", definition={'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}, rule='contains')
+ except JsonSchemaValueException: pass
+ else:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must NOT match a disallowed definition", value=data, name="" + (name_prefix or "data") + "", definition={'not': {'required': ['dynamic'], 'properties': {'dynamic': {'contains': {'const': 'version'}, '$$description': ['version is listed in ``dynamic``']}}}, '$$comment': ['According to :pep:`621`:', ' If the core metadata specification lists a field as "Required", then', ' the metadata MUST specify the field statically or list it in dynamic', 'In turn, `core metadata`_ defines:', ' The required fields are: Metadata-Version, Name, Version.', ' All the other fields are optional.', 'Since ``Metadata-Version`` is defined by the build back-end, ``name`` and', '``version`` are the only mandatory information in ``pyproject.toml``.', '.. _core metadata: https://packaging.python.org/specifications/core-metadata/']}, rule='not')
+ except JsonSchemaValueException:
+ pass
+ else:
+ data_is_dict = isinstance(data, dict)
+ if data_is_dict:
+ data_len = len(data)
+ if not all(prop in data for prop in ['version']):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['version'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'required': ['version'], '$$description': ['version should be statically defined in the ``version`` field']}, rule='required')
+ return data
+
+def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_dependency(data, custom_formats={}, name_prefix=None):
+ if not isinstance(data, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}, rule='type')
+ if isinstance(data, str):
+ if not custom_formats["pep508"](data):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must be pep508", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/dependency', 'title': 'Dependency', 'type': 'string', 'description': 'Project dependency specification according to PEP 508', 'format': 'pep508'}, rule='format')
+ return data
+
+def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_entry_point_group(data, custom_formats={}, name_prefix=None):
+ if not isinstance(data, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, rule='type')
+ data_is_dict = isinstance(data, dict)
+ if data_is_dict:
+ data_keys = set(data.keys())
+ for data_key, data_val in data.items():
+ if REGEX_PATTERNS['^.+$'].search(data_key):
+ if data_key in data_keys:
+ data_keys.remove(data_key)
+ if not isinstance(data_val, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be string", value=data_val, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}, rule='type')
+ if isinstance(data_val, str):
+ if not custom_formats["python-entrypoint-reference"](data_val):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be python-entrypoint-reference", value=data_val, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}, rule='format')
+ if data_keys:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must not contain "+str(data_keys)+" properties", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, rule='additionalProperties')
+ data_len = len(data)
+ if data_len != 0:
+ data_property_names = True
+ for data_key in data:
+ try:
+ if isinstance(data_key, str):
+ if not custom_formats["python-entrypoint-name"](data_key):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must be python-entrypoint-name", value=data_key, name="" + (name_prefix or "data") + "", definition={'format': 'python-entrypoint-name'}, rule='format')
+ except JsonSchemaValueException:
+ data_property_names = False
+ if not data_property_names:
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must be named by propertyName definition", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/entry-point-group', 'title': 'Entry-points', 'type': 'object', '$$description': ['Entry-points are grouped together to indicate what sort of capabilities they', 'provide.', 'See the `packaging guides', '<https://packaging.python.org/specifications/entry-points/>`_', 'and `setuptools docs', '<https://setuptools.pypa.io/en/latest/userguide/entry_point.html>`_', 'for more information.'], 'propertyNames': {'format': 'python-entrypoint-name'}, 'additionalProperties': False, 'patternProperties': {'^.+$': {'type': 'string', '$$description': ['Reference to a Python object. It is either in the form', '``importable.module``, or ``importable.module:object.attr``.'], 'format': 'python-entrypoint-reference', '$comment': 'https://packaging.python.org/specifications/entry-points/'}}}, rule='propertyNames')
+ return data
+
+def validate_https___packaging_python_org_en_latest_specifications_declaring_project_metadata___definitions_author(data, custom_formats={}, name_prefix=None):
+ if not isinstance(data, (dict)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'$id': '#/definitions/author', 'title': 'Author or Maintainer', '$comment': 'https://www.python.org/dev/peps/pep-0621/#authors-maintainers', 'type': 'object', 'properties': {'name': {'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, 'email': {'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}}}, rule='type')
+ data_is_dict = isinstance(data, dict)
+ if data_is_dict:
+ data_keys = set(data.keys())
+ if "name" in data_keys:
+ data_keys.remove("name")
+ data__name = data["name"]
+ if not isinstance(data__name, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string', '$$description': ['MUST be a valid email name, i.e. whatever can be put as a name, before an', 'email, in :rfc:`822`.']}, rule='type')
+ if "email" in data_keys:
+ data_keys.remove("email")
+ data__email = data["email"]
+ if not isinstance(data__email, (str)):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".email must be string", value=data__email, name="" + (name_prefix or "data") + ".email", definition={'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}, rule='type')
+ if isinstance(data__email, str):
+ if not REGEX_PATTERNS["idn-email_re_pattern"].match(data__email):
+ raise JsonSchemaValueException("" + (name_prefix or "data") + ".email must be idn-email", value=data__email, name="" + (name_prefix or "data") + ".email", definition={'type': 'string', 'format': 'idn-email', 'description': 'MUST be a valid email address'}, rule='format')
+ return data \ No newline at end of file
diff --git a/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/formats.py b/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/formats.py
new file mode 100644
index 0000000..638ac11
--- /dev/null
+++ b/e/lib/python3.11/site-packages/setuptools/config/_validate_pyproject/formats.py
@@ -0,0 +1,259 @@
+import logging
+import os
+import re
+import string
+import typing
+from itertools import chain as _chain
+
+_logger = logging.getLogger(__name__)
+
+# -------------------------------------------------------------------------------------
+# PEP 440
+
+VERSION_PATTERN = r"""
+ v?
+ (?:
+ (?:(?P<epoch>[0-9]+)!)? # epoch
+ (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
+ (?P<pre> # pre-release
+ [-_\.]?
+ (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
+ [-_\.]?
+ (?P<pre_n>[0-9]+)?
+ )?
+ (?P<post> # post release
+ (?:-(?P<post_n1>[0-9]+))
+ |
+ (?:
+ [-_\.]?
+ (?P<post_l>post|rev|r)
+ [-_\.]?
+ (?P<post_n2>[0-9]+)?
+ )
+ )?
+ (?P<dev> # dev release
+ [-_\.]?
+ (?P<dev_l>dev)
+ [-_\.]?
+ (?P<dev_n>[0-9]+)?
+ )?
+ )
+ (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
+"""
+
+VERSION_REGEX = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.X | re.I)
+
+
+def pep440(version: str) -> bool:
+ return VERSION_REGEX.match(version) is not None
+
+
+# -------------------------------------------------------------------------------------
+# PEP 508
+
+PEP508_IDENTIFIER_PATTERN = r"([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])"
+PEP508_IDENTIFIER_REGEX = re.compile(f"^{PEP508_IDENTIFIER_PATTERN}$", re.I)
+
+
+def pep508_identifier(name: str) -> bool:
+ return PEP508_IDENTIFIER_REGEX.match(name) is not None
+
+
+try:
+ try:
+ from packaging import requirements as _req
+ except ImportError: # pragma: no cover
+ # let's try setuptools vendored version
+ from setuptools._vendor.packaging import requirements as _req # type: ignore
+
+ def pep508(value: str) -> bool:
+ try:
+ _req.Requirement(value)
+ return True
+ except _req.InvalidRequirement:
+ return False
+
+except ImportError: # pragma: no cover
+ _logger.warning(
+ "Could not find an installation of `packaging`. Requirements, dependencies and "
+ "versions might not be validated. "
+ "To enforce validation, please install `packaging`."
+ )
+
+ def pep508(value: str) -> bool:
+ return True
+
+
+def pep508_versionspec(value: str) -> bool:
+ """Expression that can be used to specify/lock versions (including ranges)"""
+ if any(c in value for c in (";", "]", "@")):
+ # In PEP 508:
+ # conditional markers, extras and URL specs are not included in the
+ # versionspec
+ return False
+ # Let's pretend we have a dependency called `requirement` with the given
+ # version spec, then we can re-use the pep508 function for validation:
+ return pep508(f"requirement{value}")
+
+
+# -------------------------------------------------------------------------------------
+# PEP 517
+
+
+def pep517_backend_reference(value: str) -> bool:
+ module, _, obj = value.partition(":")
+ identifiers = (i.strip() for i in _chain(module.split("."), obj.split(".")))
+ return all(python_identifier(i) for i in identifiers if i)
+
+
+# -------------------------------------------------------------------------------------
+# Classifiers - PEP 301
+
+
+def _download_classifiers() -> str:
+ import ssl
+ from email.message import Message
+ from urllib.request import urlopen
+
+ url = "https://pypi.org/pypi?:action=list_classifiers"
+ context = ssl.create_default_context()
+ with urlopen(url, context=context) as response:
+ headers = Message()
+ headers["content_type"] = response.getheader("content-type", "text/plain")
+ return response.read().decode(headers.get_param("charset", "utf-8"))
+
+
+class _TroveClassifier:
+ """The ``trove_classifiers`` package is the official way of validating classifiers,
+ however this package might not be always available.
+ As a workaround we can still download a list from PyPI.
+ We also don't want to be over strict about it, so simply skipping silently is an
+ option (classifiers will be validated anyway during the upload to PyPI).
+ """
+
+ def __init__(self):
+ self.downloaded: typing.Union[None, False, typing.Set[str]] = None
+ self._skip_download = False
+ # None => not cached yet
+ # False => cache not available
+ self.__name__ = "trove_classifier" # Emulate a public function
+
+ def _disable_download(self):
+ # This is a private API. Only setuptools has the consent of using it.
+ self._skip_download = True
+
+ def __call__(self, value: str) -> bool:
+ if self.downloaded is False or self._skip_download is True:
+ return True
+
+ if os.getenv("NO_NETWORK") or os.getenv("VALIDATE_PYPROJECT_NO_NETWORK"):
+ self.downloaded = False
+ msg = (
+ "Install ``trove-classifiers`` to ensure proper validation. "
+ "Skipping download of classifiers list from PyPI (NO_NETWORK)."
+ )
+ _logger.debug(msg)
+ return True
+
+ if self.downloaded is None:
+ msg = (
+ "Install ``trove-classifiers`` to ensure proper validation. "
+ "Meanwhile a list of classifiers will be downloaded from PyPI."
+ )
+ _logger.debug(msg)
+ try:
+ self.downloaded = set(_download_classifiers().splitlines())
+ except Exception:
+ self.downloaded = False
+ _logger.debug("Problem with download, skipping validation")
+ return True
+
+ return value in self.downloaded or value.lower().startswith("private ::")
+
+
+try:
+ from trove_classifiers import classifiers as _trove_classifiers
+
+ def trove_classifier(value: str) -> bool:
+ return value in _trove_classifiers or value.lower().startswith("private ::")
+
+except ImportError: # pragma: no cover
+ trove_classifier = _TroveClassifier()
+
+
+# -------------------------------------------------------------------------------------
+# Non-PEP related
+
+
+def url(value: str) -> bool:
+ from urllib.parse import urlparse
+
+ try:
+ parts = urlparse(value)
+ if not parts.scheme:
+ _logger.warning(
+ "For maximum compatibility please make sure to include a "
+ "`scheme` prefix in your URL (e.g. 'http://'). "
+ f"Given value: {value}"
+ )
+ if not (value.startswith("/") or value.startswith("\\") or "@" in value):
+ parts = urlparse(f"http://{value}")
+
+ return bool(parts.scheme and parts.netloc)
+ except Exception:
+ return False
+
+
+# https://packaging.python.org/specifications/entry-points/
+ENTRYPOINT_PATTERN = r"[^\[\s=]([^=]*[^\s=])?"
+ENTRYPOINT_REGEX = re.compile(f"^{ENTRYPOINT_PATTERN}$", re.I)
+RECOMMEDED_ENTRYPOINT_PATTERN = r"[\w.-]+"
+RECOMMEDED_ENTRYPOINT_REGEX = re.compile(f"^{RECOMMEDED_ENTRYPOINT_PATTERN}$", re.I)
+ENTRYPOINT_GROUP_PATTERN = r"\w+(\.\w+)*"
+ENTRYPOINT_GROUP_REGEX = re.compile(f"^{ENTRYPOINT_GROUP_PATTERN}$", re.I)
+
+
+def python_identifier(value: str) -> bool:
+ return value.isidentifier()
+
+
+def python_qualified_identifier(value: str) -> bool:
+ if value.startswith(".") or value.endswith("."):
+ return False
+ return all(python_identifier(m) for m in value.split("."))
+
+
+def python_module_name(value: str) -> bool:
+ return python_qualified_identifier(value)
+
+
+def python_entrypoint_group(value: str) -> bool:
+ return ENTRYPOINT_GROUP_REGEX.match(value) is not None
+
+
+def python_entrypoint_name(value: str) -> bool:
+ if not ENTRYPOINT_REGEX.match(value):
+ return False
+ if not RECOMMEDED_ENTRYPOINT_REGEX.match(value):
+ msg = f"Entry point `{value}` does not follow recommended pattern: "
+ msg += RECOMMEDED_ENTRYPOINT_PATTERN
+ _logger.warning(msg)
+ return True
+
+
+def python_entrypoint_reference(value: str) -> bool:
+ module, _, rest = value.partition(":")
+ if "[" in rest:
+ obj, _, extras_ = rest.partition("[")
+ if extras_.strip()[-1] != "]":
+ return False
+ extras = (x.strip() for x in extras_.strip(string.whitespace + "[]").split(","))
+ if not all(pep508_identifier(e) for e in extras):
+ return False
+ _logger.warning(f"`{value}` - using extras for entry points is not recommended")
+ else:
+ obj = rest
+
+ module_parts = module.split(".")
+ identifiers = _chain(module_parts, obj.split(".")) if rest else module_parts
+ return all(python_identifier(i.strip()) for i in identifiers)
diff --git a/e/lib/python3.11/site-packages/setuptools/config/expand.py b/e/lib/python3.11/site-packages/setuptools/config/expand.py
new file mode 100644
index 0000000..c8db2c4
--- /dev/null
+++ b/e/lib/python3.11/site-packages/setuptools/config/expand.py
@@ -0,0 +1,462 @@
+"""Utility functions to expand configuration directives or special values
+(such glob patterns).
+
+We can split the process of interpreting configuration files into 2 steps:
+
+1. The parsing the file contents from strings to value objects
+ that can be understand by Python (for example a string with a comma
+ separated list of keywords into an actual Python list of strings).
+
+2. The expansion (or post-processing) of these values according to the
+ semantics ``setuptools`` assign to them (for example a configuration field
+ with the ``file:`` directive should be expanded from a list of file paths to
+ a single string with the contents of those files concatenated)
+
+This module focus on the second step, and therefore allow sharing the expansion
+functions among several configuration file formats.
+
+**PRIVATE MODULE**: API reserved for setuptools internal usage only.
+"""
+import ast
+import importlib
+import io
+import os
+import pathlib
+import sys
+import warnings
+from glob import iglob
+from configparser import ConfigParser
+from importlib.machinery import ModuleSpec
+from itertools import chain
+from typing import (
+ TYPE_CHECKING,
+ Callable,
+ Dict,
+ Iterable,
+ Iterator,
+ List,
+ Mapping,
+ Optional,
+ Tuple,
+ TypeVar,
+ Union,
+ cast
+)
+from pathlib import Path
+from types import ModuleType
+
+from distutils.errors import DistutilsOptionError
+
+from .._path import same_path as _same_path
+
+if TYPE_CHECKING:
+ from setuptools.dist import Distribution # noqa
+ from setuptools.discovery import ConfigDiscovery # noqa
+ from distutils.dist import DistributionMetadata # noqa
+
+chain_iter = chain.from_iterable
+_Path = Union[str, os.PathLike]
+_K = TypeVar("_K")
+_V = TypeVar("_V", covariant=True)
+
+
+class StaticModule:
+ """Proxy to a module object that avoids executing arbitrary code."""
+
+ def __init__(self, name: str, spec: ModuleSpec):
+ module = ast.parse(pathlib.Path(spec.origin).read_bytes())
+ vars(self).update(locals())
+ del self.self
+
+ def _find_assignments(self) -> Iterator[Tuple[ast.AST, ast.AST]]:
+ for statement in self.module.body:
+ if isinstance(statement, ast.Assign):
+ yield from ((target, statement.value) for target in statement.targets)
+ elif isinstance(statement, ast.AnnAssign) and statement.value:
+ yield (statement.target, statement.value)
+
+ def __getattr__(self, attr):
+ """Attempt to load an attribute "statically", via :func:`ast.literal_eval`."""
+ try:
+ return next(
+ ast.literal_eval(value)
+ for target, value in self._find_assignments()
+ if isinstance(target, ast.Name) and target.id == attr
+ )
+ except Exception as e:
+ raise AttributeError(f"{self.name} has no attribute {attr}") from e
+
+
+def glob_relative(
+ patterns: Iterable[str], root_dir: Optional[_Path] = None
+) -> List[str]:
+ """Expand the list of glob patterns, but preserving relative paths.
+
+ :param list[str] patterns: List of glob patterns
+ :param str root_dir: Path to which globs should be relative
+ (current directory by default)
+ :rtype: list
+ """
+ glob_characters = {'*', '?', '[', ']', '{', '}'}
+ expanded_values = []
+ root_dir = root_dir or os.getcwd()
+ for value in patterns:
+
+ # Has globby characters?
+ if any(char in value for char in glob_characters):
+ # then expand the glob pattern while keeping paths *relative*:
+ glob_path = os.path.abspath(os.path.join(root_dir, value))
+ expanded_values.extend(sorted(
+ os.path.relpath(path, root_dir).replace(os.sep, "/")
+ for path in iglob(glob_path, recursive=True)))
+
+ else:
+ # take the value as-is
+ path = os.path.relpath(value, root_dir).replace(os.sep, "/")
+ expanded_values.append(path)
+
+ return expanded_values
+
+
+def read_files(filepaths: Union[str, bytes, Iterable[_Path]], root_dir=None) -> str:
+ """Return the content of the files concatenated using ``\n`` as str
+
+ This function is sandboxed and won't reach anything outside ``root_dir``
+
+ (By default ``root_dir`` is the current directory).
+ """
+ from setuptools.extern.more_itertools import always_iterable
+
+ root_dir = os.path.abspath(root_dir or os.getcwd())
+ _filepaths = (os.path.join(root_dir, path) for path in always_iterable(filepaths))
+ return '\n'.join(
+ _read_file(path)
+ for path in _filter_existing_files(_filepaths)
+ if _assert_local(path, root_dir)
+ )
+
+
+def _filter_existing_files(filepaths: Iterable[_Path]) -> Iterator[_Path]:
+ for path in filepaths:
+ if os.path.isfile(path):
+ yield path
+ else:
+ warnings.warn(f"File {path!r} cannot be found")
+
+
+def _read_file(filepath: Union[bytes, _Path]) -> str:
+ with io.open(filepath, encoding='utf-8') as f:
+ return f.read()
+
+
+def _assert_local(filepath: _Path, root_dir: str):
+ if Path(os.path.abspath(root_dir)) not in Path(os.path.abspath(filepath)).parents:
+ msg = f"Cannot access {filepath!r} (or anything outside {root_dir!r})"
+ raise DistutilsOptionError(msg)
+
+ return True
+
+
+def read_attr(
+ attr_desc: str,
+ package_dir: Optional[Mapping[str, str]] = None,
+ root_dir: Optional[_Path] = None
+):
+ """Reads the value of an attribute from a module.
+
+ This function will try to read the attributed statically first
+ (via :func:`ast.literal_eval`), and only evaluate the module if it fails.
+
+ Examples:
+ read_attr("package.attr")
+ read_attr("package.module.attr")
+
+ :param str attr_desc: Dot-separated string describing how to reach the
+ attribute (see examples above)
+ :param dict[str, str] package_dir: Mapping of package names to their
+ location in disk (represented by paths relative to ``root_dir``).
+ :param str root_dir: Path to directory containing all the packages in
+ ``package_dir`` (current directory by default).
+ :rtype: str
+ """
+ root_dir = root_dir or os.getcwd()
+ attrs_path = attr_desc.strip().split('.')
+ attr_name = attrs_path.pop()
+ module_name = '.'.join(attrs_path)
+ module_name = module_name or '__init__'
+ _parent_path, path, module_name = _find_module(module_name, package_dir, root_dir)
+ spec = _find_spec(module_name, path)
+
+ try:
+ return getattr(StaticModule(module_name, spec), attr_name)
+ except Exception:
+ # fallback to evaluate module
+ module = _load_spec(spec, module_name)
+ return getattr(module, attr_name)
+
+
+def _find_spec(module_name: str, module_path: Optional[_Path]) -> ModuleSpec:
+ spec = importlib.util.spec_from_file_location(module_name, module_path)
+ spec = spec or importlib.util.find_spec(module_name)
+
+ if spec is None:
+ raise ModuleNotFoundError(module_name)
+
+ return spec
+
+
+def _load_spec(spec: ModuleSpec, module_name: str) -> ModuleType:
+ name = getattr(spec, "__name__", module_name)
+ if name in sys.modules:
+ return sys.modules[name]
+ module = importlib.util.module_from_spec(spec)
+ sys.modules[name] = module # cache (it also ensures `==` works on loaded items)
+ spec.loader.exec_module(module) # type: ignore
+ return module
+
+
+def _find_module(
+ module_name: str, package_dir: Optional[Mapping[str, str]], root_dir: _Path
+) -> Tuple[_Path, Optional[str], str]:
+ """Given a module (that could normally be imported by ``module_name``
+ after the build is complete), find the path to the parent directory where
+ it is contained and the canonical name that could be used to import it
+ considering the ``package_dir`` in the build configuration and ``root_dir``
+ """
+ parent_path = root_dir
+ module_parts = module_name.split('.')
+ if package_dir:
+ if module_parts[0] in package_dir:
+ # A custom path was specified for the module we want to import
+ custom_path = package_dir[module_parts[0]]
+ parts = custom_path.rsplit('/', 1)
+ if len(parts) > 1:
+ parent_path = os.path.join(root_dir, parts[0])
+ parent_module = parts[1]
+ else:
+ parent_module = custom_path
+ module_name = ".".join([parent_module, *module_parts[1:]])
+ elif '' in package_dir:
+ # A custom parent directory was specified for all root modules
+ parent_path = os.path.join(root_dir, package_dir[''])
+
+ path_start = os.path.join(parent_path, *module_name.split("."))
+ candidates = chain(
+ (f"{path_start}.py", os.path.join(path_start, "__init__.py")),
+ iglob(f"{path_start}.*")
+ )
+ module_path = next((x for x in candidates if os.path.isfile(x)), None)
+ return parent_path, module_path, module_name
+
+
+def resolve_class(
+ qualified_class_name: str,
+ package_dir: Optional[Mapping[str, str]] = None,
+ root_dir: Optional[_Path] = None
+) -> Callable:
+ """Given a qualified class name, return the associated class object"""
+ root_dir = root_dir or os.getcwd()
+ idx = qualified_class_name.rfind('.')
+ class_name = qualified_class_name[idx + 1 :]
+ pkg_name = qualified_class_name[:idx]
+
+ _parent_path, path, module_name = _find_module(pkg_name, package_dir, root_dir)
+ module = _load_spec(_find_spec(module_name, path), module_name)
+ return getattr(module, class_name)
+
+
+def cmdclass(
+ values: Dict[str, str],
+ package_dir: Optional[Mapping[str, str]] = None,
+ root_dir: Optional[_Path] = None
+) -> Dict[str, Callable]:
+ """Given a dictionary mapping command names to strings for qualified class
+ names, apply :func:`resolve_class` to the dict values.
+ """
+ return {k: resolve_class(v, package_dir, root_dir) for k, v in values.items()}
+
+
+def find_packages(
+ *,
+ namespaces=True,
+ fill_package_dir: Optional[Dict[str, str]] = None,
+ root_dir: Optional[_Path] = None,
+ **kwargs
+) -> List[str]:
+ """Works similarly to :func:`setuptools.find_packages`, but with all
+ arguments given as keyword arguments. Moreover, ``where`` can be given
+ as a list (the results will be simply concatenated).
+
+ When the additional keyword argument ``namespaces`` is ``True``, it will
+ behave like :func:`setuptools.find_namespace_packages`` (i.e. include
+ implicit namespaces as per :pep:`420`).
+
+ The ``where`` argument will be considered relative to ``root_dir`` (or the current
+ working directory when ``root_dir`` is not given).
+
+ If the ``fill_package_dir`` argument is passed, this function will consider it as a
+ similar data structure to the ``package_dir`` configuration parameter add fill-in
+ any missing package location.
+
+ :rtype: list
+ """
+ from setuptools.discovery import construct_package_dir
+ from setuptools.extern.more_itertools import unique_everseen, always_iterable
+
+ if namespaces:
+ from setuptools.discovery import PEP420PackageFinder as PackageFinder
+ else:
+ from setuptools.discovery import PackageFinder # type: ignore
+
+ root_dir = root_dir or os.curdir
+ where = kwargs.pop('where', ['.'])
+ packages: List[str] = []
+ fill_package_dir = {} if fill_package_dir is None else fill_package_dir
+ search = list(unique_everseen(always_iterable(where)))
+
+ if len(search) == 1 and all(not _same_path(search[0], x) for x in (".", root_dir)):
+ fill_package_dir.setdefault("", search[0])
+
+ for path in search:
+ package_path = _nest_path(root_dir, path)
+ pkgs = PackageFinder.find(package_path, **kwargs)
+ packages.extend(pkgs)
+ if pkgs and not (
+ fill_package_dir.get("") == path
+ or os.path.samefile(package_path, root_dir)
+ ):
+ fill_package_dir.update(construct_package_dir(pkgs, path))
+
+ return packages
+
+
+def _nest_path(parent: _Path, path: _Path) -> str:
+ path = parent if path in {".", ""} else os.path.join(parent, path)
+ return os.path.normpath(path)
+
+
+def version(value: Union[Callable, Iterable[Union[str, int]], str]) -> str:
+ """When getting the version directly from an attribute,
+ it should be normalised to string.
+ """
+ if callable(value):
+ value = value()
+
+ value = cast(Iterable[Union[str, int]], value)
+
+ if not isinstance(value, str):
+ if hasattr(value, '__iter__'):
+ value = '.'.join(map(str, value))
+ else:
+ value = '%s' % value
+
+ return value
+
+
+def canonic_package_data(package_data: dict) -> dict:
+ if "*" in package_data:
+ package_data[""] = package_data.pop("*")
+ return package_data
+
+
+def canonic_data_files(
+ data_files: Union[list, dict], root_dir: Optional[_Path] = None
+) -> List[Tuple[str, List[str]]]:
+ """For compatibility with ``setup.py``, ``data_files`` should be a list
+ of pairs instead of a dict.
+
+ This function also expands glob patterns.
+ """
+ if isinstance(data_files, list):
+ return data_files
+
+ return [
+ (dest, glob_relative(patterns, root_dir))
+ for dest, patterns in data_files.items()
+ ]
+
+
+def entry_points(text: str, text_source="entry-points") -> Dict[str, dict]:
+ """Given the contents of entry-points file,
+ process it into a 2-level dictionary (``dict[str, dict[str, str]]``).
+ The first level keys are entry-point groups, the second level keys are
+ entry-point names, and the second level values are references to objects
+ (that correspond to the entry-point value).
+ """
+ parser = ConfigParser(default_section=None, delimiters=("=",)) # type: ignore
+ parser.optionxform = str # case sensitive
+ parser.read_string(text, text_source)
+ groups = {k: dict(v.items()) for k, v in parser.items()}
+ groups.pop(parser.default_section, None)
+ return groups
+
+
+class EnsurePackagesDiscovered:
+ """Some expand functions require all the packages to already be discovered before
+ they run, e.g. :func:`read_attr`, :func:`resolve_class`, :func:`cmdclass`.
+
+ Therefore in some cases we will need to run autodiscovery during the evaluation of
+ the configuration. However, it is better to postpone calling package discovery as
+ much as possible, because some parameters can influence it (e.g. ``package_dir``),
+ and those might not have been processed yet.
+ """
+
+ def __init__(self, distribution: "Distribution"):
+ self._dist = distribution
+ self._called = False
+
+ def __call__(self):
+ """Trigger the automatic package discovery, if it is still necessary."""
+ if not self._called:
+ self._called = True
+ self._dist.set_defaults(name=False) # Skip name, we can still be parsing
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, _exc_type, _exc_value, _traceback):
+ if self._called:
+ self._dist.set_defaults.analyse_name() # Now we can set a default name
+
+ def _get_package_dir(self) -> Mapping[str, str]:
+ self()
+ pkg_dir = self._dist.package_dir
+ return {} if pkg_dir is None else pkg_dir
+
+ @property
+ def package_dir(self) -> Mapping[str, str]:
+ """Proxy to ``package_dir`` that may trigger auto-discovery when used."""
+ return LazyMappingProxy(self._get_package_dir)
+
+
+class LazyMappingProxy(Mapping[_K, _V]):
+ """Mapping proxy that delays resolving the target object, until really needed.
+
+ >>> def obtain_mapping():
+ ... print("Running expensive function!")
+ ... return {"key": "value", "other key": "other value"}
+ >>> mapping = LazyMappingProxy(obtain_mapping)
+ >>> mapping["key"]
+ Running expensive function!
+ 'value'
+ >>> mapping["other key"]
+ 'other value'
+ """
+
+ def __init__(self, obtain_mapping_value: Callable[[], Mapping[_K, _V]]):
+ self._obtain = obtain_mapping_value
+ self._value: Optional[Mapping[_K, _V]] = None
+
+ def _target(self) -> Mapping[_K, _V]:
+ if self._value is None:
+ self._value = self._obtain()
+ return self._value
+
+ def __getitem__(self, key: _K) -> _V:
+ return self._target()[key]
+
+ def __len__(self) -> int:
+ return len(self._target())
+
+ def __iter__(self) -> Iterator[_K]:
+ return iter(self._target())
diff --git a/e/lib/python3.11/site-packages/setuptools/config/pyprojecttoml.py b/e/lib/python3.11/site-packages/setuptools/config/pyprojecttoml.py
new file mode 100644
index 0000000..d995f0b
--- /dev/null
+++ b/e/lib/python3.11/site-packages/setuptools/config/pyprojecttoml.py
@@ -0,0 +1,493 @@
+"""
+Load setuptools configuration from ``pyproject.toml`` files.
+
+**PRIVATE MODULE**: API reserved for setuptools internal usage only.
+"""
+import logging
+import os
+import warnings
+from contextlib import contextmanager
+from functools import partial
+from typing import TYPE_CHECKING, Callable, Dict, Optional, Mapping, Union
+
+from setuptools.errors import FileError, OptionError
+
+from . import expand as _expand
+from ._apply_pyprojecttoml import apply as _apply
+from ._apply_pyprojecttoml import _PREVIOUSLY_DEFINED, _WouldIgnoreField
+
+if TYPE_CHECKING:
+ from setuptools.dist import Distribution # noqa
+
+_Path = Union[str, os.PathLike]
+_logger = logging.getLogger(__name__)
+
+
+def load_file(filepath: _Path) -> dict:
+ from setuptools.extern import tomli # type: ignore
+
+ with open(filepath, "rb") as file:
+ return tomli.load(file)
+
+
+def validate(config: dict, filepath: _Path) -> bool:
+ from . import _validate_pyproject as validator
+
+ trove_classifier = validator.FORMAT_FUNCTIONS.get("trove-classifier")
+ if hasattr(trove_classifier, "_disable_download"):
+ # Improve reproducibility by default. See issue 31 for validate-pyproject.
+ trove_classifier._disable_download() # type: ignore
+
+ try:
+ return validator.validate(config)
+ except validator.ValidationError as ex:
+ summary = f"configuration error: {ex.summary}"
+ if ex.name.strip("`") != "project":
+ # Probably it is just a field missing/misnamed, not worthy the verbosity...
+ _logger.debug(summary)
+ _logger.debug(ex.details)
+
+ error = f"invalid pyproject.toml config: {ex.name}."
+ raise ValueError(f"{error}\n{summary}") from None
+
+
+def apply_configuration(
+ dist: "Distribution",
+ filepath: _Path,
+ ignore_option_errors=False,
+) -> "Distribution":
+ """Apply the configuration from a ``pyproject.toml`` file into an existing
+ distribution object.
+ """
+ config = read_configuration(filepath, True, ignore_option_errors, dist)
+ return _apply(dist, config, filepath)
+
+
+def read_configuration(
+ filepath: _Path,
+ expand=True,
+ ignore_option_errors=False,
+ dist: Optional["Distribution"] = None,
+):
+ """Read given configuration file and returns options from it as a dict.
+
+ :param str|unicode filepath: Path to configuration file in the ``pyproject.toml``
+ format.
+
+ :param bool expand: Whether to expand directives and other computed values
+ (i.e. post-process the given configuration)
+
+ :param bool ignore_option_errors: Whether to silently ignore
+ options, values of which could not be resolved (e.g. due to exceptions
+ in directives such as file:, attr:, etc.).
+ If False exceptions are propagated as expected.
+
+ :param Distribution|None: Distribution object to which the configuration refers.
+ If not given a dummy object will be created and discarded after the
+ configuration is read. This is used for auto-discovery of packages in the case
+ a dynamic configuration (e.g. ``attr`` or ``cmdclass``) is expanded.
+ When ``expand=False`` this object is simply ignored.
+
+ :rtype: dict
+ """
+ filepath = os.path.abspath(filepath)
+
+ if not os.path.isfile(filepath):
+ raise FileError(f"Configuration file {filepath!r} does not exist.")
+
+ asdict = load_file(filepath) or {}
+ project_table = asdict.get("project", {})
+ tool_table = asdict.get("tool", {})
+ setuptools_table = tool_table.get("setuptools", {})
+ if not asdict or not (project_table or setuptools_table):
+ return {} # User is not using pyproject to configure setuptools
+
+ if setuptools_table:
+ # TODO: Remove the following once the feature stabilizes:
+ msg = "Support for `[tool.setuptools]` in `pyproject.toml` is still *beta*."
+ warnings.warn(msg, _BetaConfiguration)
+
+ # There is an overall sense in the community that making include_package_data=True
+ # the default would be an improvement.
+ # `ini2toml` backfills include_package_data=False when nothing is explicitly given,
+ # therefore setting a default here is backwards compatible.
+ orig_setuptools_table = setuptools_table.copy()
+ if dist and getattr(dist, "include_package_data") is not None:
+ setuptools_table.setdefault("include-package-data", dist.include_package_data)
+ else:
+ setuptools_table.setdefault("include-package-data", True)
+ # Persist changes:
+ asdict["tool"] = tool_table
+ tool_table["setuptools"] = setuptools_table
+
+ try:
+ # Don't complain about unrelated errors (e.g. tools not using the "tool" table)
+ subset = {"project": project_table, "tool": {"setuptools": setuptools_table}}
+ validate(subset, filepath)
+ except Exception as ex:
+ # TODO: Remove the following once the feature stabilizes:
+ if _skip_bad_config(project_table, orig_setuptools_table, dist):
+ return {}
+ # TODO: After the previous statement is removed the try/except can be replaced
+ # by the _ignore_errors context manager.
+ if ignore_option_errors:
+ _logger.debug(f"ignored error: {ex.__class__.__name__} - {ex}")
+ else:
+ raise # re-raise exception
+
+ if expand:
+ root_dir = os.path.dirname(filepath)
+ return expand_configuration(asdict, root_dir, ignore_option_errors, dist)
+
+ return asdict
+
+
+def _skip_bad_config(
+ project_cfg: dict, setuptools_cfg: dict, dist: Optional["Distribution"]
+) -> bool:
+ """Be temporarily forgiving with invalid ``pyproject.toml``"""
+ # See pypa/setuptools#3199 and pypa/cibuildwheel#1064
+
+ if dist is None or (
+ dist.metadata.name is None
+ and dist.metadata.version is None
+ and dist.install_requires is None
+ ):
+ # It seems that the build is not getting any configuration from other places
+ return False
+
+ if setuptools_cfg:
+ # If `[tool.setuptools]` is set, then `pyproject.toml` config is intentional
+ return False
+
+ given_config = set(project_cfg.keys())
+ popular_subset = {"name", "version", "python_requires", "requires-python"}
+ if given_config <= popular_subset:
+ # It seems that the docs in cibuildtool has been inadvertently encouraging users
+ # to create `pyproject.toml` files that are not compliant with the standards.
+ # Let's be forgiving for the time being.
+ warnings.warn(_InvalidFile.message(), _InvalidFile, stacklevel=2)
+ return True
+
+ return False
+
+
+def expand_configuration(
+ config: dict,
+ root_dir: Optional[_Path] = None,
+ ignore_option_errors: bool = False,
+ dist: Optional["Distribution"] = None,
+) -> dict:
+ """Given a configuration with unresolved fields (e.g. dynamic, cmdclass, ...)
+ find their final values.
+
+ :param dict config: Dict containing the configuration for the distribution
+ :param str root_dir: Top-level directory for the distribution/project
+ (the same directory where ``pyproject.toml`` is place)
+ :param bool ignore_option_errors: see :func:`read_configuration`
+ :param Distribution|None: Distribution object to which the configuration refers.
+ If not given a dummy object will be created and discarded after the
+ configuration is read. Used in the case a dynamic configuration
+ (e.g. ``attr`` or ``cmdclass``).
+
+ :rtype: dict
+ """
+ return _ConfigExpander(config, root_dir, ignore_option_errors, dist).expand()
+
+
+class _ConfigExpander:
+ def __init__(
+ self,
+ config: dict,
+ root_dir: Optional[_Path] = None,
+ ignore_option_errors: bool = False,
+ dist: Optional["Distribution"] = None,
+ ):
+ self.config = config
+ self.root_dir = root_dir or os.getcwd()
+ self.project_cfg = config.get("project", {})
+ self.dynamic = self.project_cfg.get("dynamic", [])
+ self.setuptools_cfg = config.get("tool", {}).get("setuptools", {})
+ self.dynamic_cfg = self.setuptools_cfg.get("dynamic", {})
+ self.ignore_option_errors = ignore_option_errors
+ self._dist = dist
+
+ def _ensure_dist(self) -> "Distribution":
+ from setuptools.dist import Distribution
+
+ attrs = {"src_root": self.root_dir, "name": self.project_cfg.get("name", None)}
+ return self._dist or Distribution(attrs)
+
+ def _process_field(self, container: dict, field: str, fn: Callable):
+ if field in container:
+ with _ignore_errors(self.ignore_option_errors):
+ container[field] = fn(container[field])
+
+ def _canonic_package_data(self, field="package-data"):
+ package_data = self.setuptools_cfg.get(field, {})
+ return _expand.canonic_package_data(package_data)
+
+ def expand(self):
+ self._expand_packages()
+ self._canonic_package_data()
+ self._canonic_package_data("exclude-package-data")
+
+ # A distribution object is required for discovering the correct package_dir
+ dist = self._ensure_dist()
+ ctx = _EnsurePackagesDiscovered(dist, self.project_cfg, self.setuptools_cfg)
+ with ctx as ensure_discovered:
+ package_dir = ensure_discovered.package_dir
+ self._expand_data_files()
+ self._expand_cmdclass(package_dir)
+ self._expand_all_dynamic(dist, package_dir)
+
+ return self.config
+
+ def _expand_packages(self):
+ packages = self.setuptools_cfg.get("packages")
+ if packages is None or isinstance(packages, (list, tuple)):
+ return
+
+ find = packages.get("find")
+ if isinstance(find, dict):
+ find["root_dir"] = self.root_dir
+ find["fill_package_dir"] = self.setuptools_cfg.setdefault("package-dir", {})
+ with _ignore_errors(self.ignore_option_errors):
+ self.setuptools_cfg["packages"] = _expand.find_packages(**find)
+
+ def _expand_data_files(self):
+ data_files = partial(_expand.canonic_data_files, root_dir=self.root_dir)
+ self._process_field(self.setuptools_cfg, "data-files", data_files)
+
+ def _expand_cmdclass(self, package_dir: Mapping[str, str]):
+ root_dir = self.root_dir
+ cmdclass = partial(_expand.cmdclass, package_dir=package_dir, root_dir=root_dir)
+ self._process_field(self.setuptools_cfg, "cmdclass", cmdclass)
+
+ def _expand_all_dynamic(self, dist: "Distribution", package_dir: Mapping[str, str]):
+ special = ( # need special handling
+ "version",
+ "readme",
+ "entry-points",
+ "scripts",
+ "gui-scripts",
+ "classifiers",
+ "dependencies",
+ "optional-dependencies",
+ )
+ # `_obtain` functions are assumed to raise appropriate exceptions/warnings.
+ obtained_dynamic = {
+ field: self._obtain(dist, field, package_dir)
+ for field in self.dynamic
+ if field not in special
+ }
+ obtained_dynamic.update(
+ self._obtain_entry_points(dist, package_dir) or {},
+ version=self._obtain_version(dist, package_dir),
+ readme=self._obtain_readme(dist),
+ classifiers=self._obtain_classifiers(dist),
+ dependencies=self._obtain_dependencies(dist),
+ optional_dependencies=self._obtain_optional_dependencies(dist),
+ )
+ # `None` indicates there is nothing in `tool.setuptools.dynamic` but the value
+ # might have already been set by setup.py/extensions, so avoid overwriting.
+ updates = {k: v for k, v in obtained_dynamic.items() if v is not None}
+ self.project_cfg.update(updates)
+
+ def _ensure_previously_set(self, dist: "Distribution", field: str):
+ previous = _PREVIOUSLY_DEFINED[field](dist)
+ if previous is None and not self.ignore_option_errors:
+ msg = (
+ f"No configuration found for dynamic {field!r}.\n"
+ "Some dynamic fields need to be specified via `tool.setuptools.dynamic`"
+ "\nothers must be specified via the equivalent attribute in `setup.py`."
+ )
+ raise OptionError(msg)
+
+ def _expand_directive(
+ self, specifier: str, directive, package_dir: Mapping[str, str]
+ ):
+ with _ignore_errors(self.ignore_option_errors):
+ root_dir = self.root_dir
+ if "file" in directive:
+ return _expand.read_files(directive["file"], root_dir)
+ if "attr" in directive:
+ return _expand.read_attr(directive["attr"], package_dir, root_dir)
+ raise ValueError(f"invalid `{specifier}`: {directive!r}")
+ return None
+
+ def _obtain(self, dist: "Distribution", field: str, package_dir: Mapping[str, str]):
+ if field in self.dynamic_cfg:
+ return self._expand_directive(
+ f"tool.setuptools.dynamic.{field}",
+ self.dynamic_cfg[field],
+ package_dir,
+ )
+ self._ensure_previously_set(dist, field)
+ return None
+
+ def _obtain_version(self, dist: "Distribution", package_dir: Mapping[str, str]):
+ # Since plugins can set version, let's silently skip if it cannot be obtained
+ if "version" in self.dynamic and "version" in self.dynamic_cfg:
+ return _expand.version(self._obtain(dist, "version", package_dir))
+ return None
+
+ def _obtain_readme(self, dist: "Distribution") -> Optional[Dict[str, str]]:
+ if "readme" not in self.dynamic:
+ return None
+
+ dynamic_cfg = self.dynamic_cfg
+ if "readme" in dynamic_cfg:
+ return {
+ "text": self._obtain(dist, "readme", {}),
+ "content-type": dynamic_cfg["readme"].get("content-type", "text/x-rst"),
+ }
+
+ self._ensure_previously_set(dist, "readme")
+ return None
+
+ def _obtain_entry_points(
+ self, dist: "Distribution", package_dir: Mapping[str, str]
+ ) -> Optional[Dict[str, dict]]:
+ fields = ("entry-points", "scripts", "gui-scripts")
+ if not any(field in self.dynamic for field in fields):
+ return None
+
+ text = self._obtain(dist, "entry-points", package_dir)
+ if text is None:
+ return None
+
+ groups = _expand.entry_points(text)
+ expanded = {"entry-points": groups}
+
+ def _set_scripts(field: str, group: str):
+ if group in groups:
+ value = groups.pop(group)
+ if field not in self.dynamic:
+ msg = _WouldIgnoreField.message(field, value)
+ warnings.warn(msg, _WouldIgnoreField)
+ # TODO: Don't set field when support for pyproject.toml stabilizes
+ # instead raise an error as specified in PEP 621
+ expanded[field] = value
+
+ _set_scripts("scripts", "console_scripts")
+ _set_scripts("gui-scripts", "gui_scripts")
+
+ return expanded
+
+ def _obtain_classifiers(self, dist: "Distribution"):
+ if "classifiers" in self.dynamic:
+ value = self._obtain(dist, "classifiers", {})
+ if value:
+ return value.splitlines()
+ return None
+
+ def _obtain_dependencies(self, dist: "Distribution"):
+ if "dependencies" in self.dynamic:
+ value = self._obtain(dist, "dependencies", {})
+ if value:
+ return _parse_requirements_list(value)
+ return None
+
+ def _obtain_optional_dependencies(self, dist: "Distribution"):
+ if "optional-dependencies" not in self.dynamic:
+ return None
+ if "optional-dependencies" in self.dynamic_cfg:
+ optional_dependencies_map = self.dynamic_cfg["optional-dependencies"]
+ assert isinstance(optional_dependencies_map, dict)
+ return {
+ group: _parse_requirements_list(self._expand_directive(
+ f"tool.setuptools.dynamic.optional-dependencies.{group}",
+ directive,
+ {},
+ ))
+ for group, directive in optional_dependencies_map.items()
+ }
+ self._ensure_previously_set(dist, "optional-dependencies")
+ return None
+
+
+def _parse_requirements_list(value):
+ return [
+ line
+ for line in value.splitlines()
+ if line.strip() and not line.strip().startswith("#")
+ ]
+
+
+@contextmanager
+def _ignore_errors(ignore_option_errors: bool):
+ if not ignore_option_errors:
+ yield
+ return
+
+ try:
+ yield
+ except Exception as ex:
+ _logger.debug(f"ignored error: {ex.__class__.__name__} - {ex}")
+
+
+class _EnsurePackagesDiscovered(_expand.EnsurePackagesDiscovered):
+ def __init__(
+ self, distribution: "Distribution", project_cfg: dict, setuptools_cfg: dict
+ ):
+ super().__init__(distribution)
+ self._project_cfg = project_cfg
+ self._setuptools_cfg = setuptools_cfg
+
+ def __enter__(self):
+ """When entering the context, the values of ``packages``, ``py_modules`` and
+ ``package_dir`` that are missing in ``dist`` are copied from ``setuptools_cfg``.
+ """
+ dist, cfg = self._dist, self._setuptools_cfg
+ package_dir: Dict[str, str] = cfg.setdefault("package-dir", {})
+ package_dir.update(dist.package_dir or {})
+ dist.package_dir = package_dir # needs to be the same object
+
+ dist.set_defaults._ignore_ext_modules() # pyproject.toml-specific behaviour
+
+ # Set `name`, `py_modules` and `packages` in dist to short-circuit
+ # auto-discovery, but avoid overwriting empty lists purposefully set by users.
+ if dist.metadata.name is None:
+ dist.metadata.name = self._project_cfg.get("name")
+ if dist.py_modules is None:
+ dist.py_modules = cfg.get("py-modules")
+ if dist.packages is None:
+ dist.packages = cfg.get("packages")
+
+ return super().__enter__()
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ """When exiting the context, if values of ``packages``, ``py_modules`` and
+ ``package_dir`` are missing in ``setuptools_cfg``, copy from ``dist``.
+ """
+ # If anything was discovered set them back, so they count in the final config.
+ self._setuptools_cfg.setdefault("packages", self._dist.packages)
+ self._setuptools_cfg.setdefault("py-modules", self._dist.py_modules)
+ return super().__exit__(exc_type, exc_value, traceback)
+
+
+class _BetaConfiguration(UserWarning):
+ """Explicitly inform users that some `pyproject.toml` configuration is *beta*"""
+
+
+class _InvalidFile(UserWarning):
+ """The given `pyproject.toml` file is invalid and would be ignored.
+ !!\n\n
+ ############################
+ # Invalid `pyproject.toml` #
+ ############################
+
+ Any configurations in `pyproject.toml` will be ignored.
+ Please note that future releases of setuptools will halt the build process
+ if an invalid file is given.
+
+ To prevent setuptools from considering `pyproject.toml` please
+ DO NOT include the `[project]` or `[tool.setuptools]` tables in your file.
+ \n\n!!
+ """
+
+ @classmethod
+ def message(cls):
+ from inspect import cleandoc
+ return cleandoc(cls.__doc__)
diff --git a/e/lib/python3.11/site-packages/setuptools/config/setupcfg.py b/e/lib/python3.11/site-packages/setuptools/config/setupcfg.py
new file mode 100644
index 0000000..c2a974d
--- /dev/null
+++ b/e/lib/python3.11/site-packages/setuptools/config/setupcfg.py
@@ -0,0 +1,762 @@
+"""
+Load setuptools configuration from ``setup.cfg`` files.
+
+**API will be made private in the future**
+"""
+import os
+
+import contextlib
+import functools
+import warnings
+from collections import defaultdict
+from functools import partial
+from functools import wraps
+from typing import (TYPE_CHECKING, Callable, Any, Dict, Generic, Iterable, List,
+ Optional, Tuple, TypeVar, Union)
+
+from distutils.errors import DistutilsOptionError, DistutilsFileError
+from setuptools.extern.packaging.requirements import Requirement, InvalidRequirement
+from setuptools.extern.packaging.version import Version, InvalidVersion
+from setuptools.extern.packaging.specifiers import SpecifierSet
+from setuptools._deprecation_warning import SetuptoolsDeprecationWarning
+
+from . import expand
+
+if TYPE_CHECKING:
+ from setuptools.dist import Distribution # noqa
+ from distutils.dist import DistributionMetadata # noqa
+
+_Path = Union[str, os.PathLike]
+SingleCommandOptions = Dict["str", Tuple["str", Any]]
+"""Dict that associate the name of the options of a particular command to a
+tuple. The first element of the tuple indicates the origin of the option value
+(e.g. the name of the configuration file where it was read from),
+while the second element of the tuple is the option value itself
+"""
+AllCommandOptions = Dict["str", SingleCommandOptions] # cmd name => its options
+Target = TypeVar("Target", bound=Union["Distribution", "DistributionMetadata"])
+
+
+def read_configuration(
+ filepath: _Path,
+ find_others=False,
+ ignore_option_errors=False
+) -> dict:
+ """Read given configuration file and returns options from it as a dict.
+
+ :param str|unicode filepath: Path to configuration file
+ to get options from.
+
+ :param bool find_others: Whether to search for other configuration files
+ which could be on in various places.
+
+ :param bool ignore_option_errors: Whether to silently ignore
+ options, values of which could not be resolved (e.g. due to exceptions
+ in directives such as file:, attr:, etc.).
+ If False exceptions are propagated as expected.
+
+ :rtype: dict
+ """
+ from setuptools.dist import Distribution
+
+ dist = Distribution()
+ filenames = dist.find_config_files() if find_others else []
+ handlers = _apply(dist, filepath, filenames, ignore_option_errors)
+ return configuration_to_dict(handlers)
+
+
+def apply_configuration(dist: "Distribution", filepath: _Path) -> "Distribution":
+ """Apply the configuration from a ``setup.cfg`` file into an existing
+ distribution object.
+ """
+ _apply(dist, filepath)
+ dist._finalize_requires()
+ return dist
+
+
+def _apply(
+ dist: "Distribution", filepath: _Path,
+ other_files: Iterable[_Path] = (),
+ ignore_option_errors: bool = False,
+) -> Tuple["ConfigHandler", ...]:
+ """Read configuration from ``filepath`` and applies to the ``dist`` object."""
+ from setuptools.dist import _Distribution
+
+ filepath = os.path.abspath(filepath)
+
+ if not os.path.isfile(filepath):
+ raise DistutilsFileError('Configuration file %s does not exist.' % filepath)
+
+ current_directory = os.getcwd()
+ os.chdir(os.path.dirname(filepath))
+ filenames = [*other_files, filepath]
+
+ try:
+ _Distribution.parse_config_files(dist, filenames=filenames)
+ handlers = parse_configuration(
+ dist, dist.command_options, ignore_option_errors=ignore_option_errors
+ )
+ dist._finalize_license_files()
+ finally:
+ os.chdir(current_directory)
+
+ return handlers
+
+
+def _get_option(target_obj: Target, key: str):
+ """
+ Given a target object and option key, get that option from
+ the target object, either through a get_{key} method or
+ from an attribute directly.
+ """
+ getter_name = 'get_{key}'.format(**locals())
+ by_attribute = functools.partial(getattr, target_obj, key)
+ getter = getattr(target_obj, getter_name, by_attribute)
+ return getter()
+
+
+def configuration_to_dict(handlers: Tuple["ConfigHandler", ...]) -> dict:
+ """Returns configuration data gathered by given handlers as a dict.
+
+ :param list[ConfigHandler] handlers: Handlers list,
+ usually from parse_configuration()
+
+ :rtype: dict
+ """
+ config_dict: dict = defaultdict(dict)
+
+ for handler in handlers:
+ for option in handler.set_options:
+ value = _get_option(handler.target_obj, option)
+ config_dict[handler.section_prefix][option] = value
+
+ return config_dict
+
+
+def parse_configuration(
+ distribution: "Distribution",
+ command_options: AllCommandOptions,
+ ignore_option_errors=False
+) -> Tuple["ConfigMetadataHandler", "ConfigOptionsHandler"]:
+ """Performs additional parsing of configuration options
+ for a distribution.
+
+ Returns a list of used option handlers.
+
+ :param Distribution distribution:
+ :param dict command_options:
+ :param bool ignore_option_errors: Whether to silently ignore
+ options, values of which could not be resolved (e.g. due to exceptions
+ in directives such as file:, attr:, etc.).
+ If False exceptions are propagated as expected.
+ :rtype: list
+ """
+ with expand.EnsurePackagesDiscovered(distribution) as ensure_discovered:
+ options = ConfigOptionsHandler(
+ distribution,
+ command_options,
+ ignore_option_errors,
+ ensure_discovered,
+ )
+
+ options.parse()
+ if not distribution.package_dir:
+ distribution.package_dir = options.package_dir # Filled by `find_packages`
+
+ meta = ConfigMetadataHandler(
+ distribution.metadata,
+ command_options,
+ ignore_option_errors,
+ ensure_discovered,
+ distribution.package_dir,
+ distribution.src_root,
+ )
+ meta.parse()
+
+ return meta, options
+
+
+def _warn_accidental_env_marker_misconfig(label: str, orig_value: str, parsed: list):
+ """Because users sometimes misinterpret this configuration:
+
+ [options.extras_require]
+ foo = bar;python_version<"4"
+
+ It looks like one requirement with an environment marker
+ but because there is no newline, it's parsed as two requirements
+ with a semicolon as separator.
+
+ Therefore, if:
+ * input string does not contain a newline AND
+ * parsed result contains two requirements AND
+ * parsing of the two parts from the result ("<first>;<second>")
+ leads in a valid Requirement with a valid marker
+ a UserWarning is shown to inform the user about the possible problem.
+ """
+ if "\n" in orig_value or len(parsed) != 2:
+ return
+
+ with contextlib.suppress(InvalidRequirement):
+ original_requirements_str = ";".join(parsed)
+ req = Requirement(original_requirements_str)
+ if req.marker is not None:
+ msg = (
+ f"One of the parsed requirements in `{label}` "
+ f"looks like a valid environment marker: '{parsed[1]}'\n"
+ "Make sure that the config is correct and check "
+ "https://setuptools.pypa.io/en/latest/userguide/declarative_config.html#opt-2" # noqa: E501
+ )
+ warnings.warn(msg, UserWarning)
+
+
+class ConfigHandler(Generic[Target]):
+ """Handles metadata supplied in configuration files."""
+
+ section_prefix: str
+ """Prefix for config sections handled by this handler.
+ Must be provided by class heirs.
+
+ """
+
+ aliases: Dict[str, str] = {}
+ """Options aliases.
+ For compatibility with various packages. E.g.: d2to1 and pbr.
+ Note: `-` in keys is replaced with `_` by config parser.
+
+ """
+
+ def __init__(
+ self,
+ target_obj: Target,
+ options: AllCommandOptions,
+ ignore_option_errors,
+ ensure_discovered: expand.EnsurePackagesDiscovered,
+ ):
+ sections: AllCommandOptions = {}
+
+ section_prefix = self.section_prefix
+ for section_name, section_options in options.items():
+ if not section_name.startswith(section_prefix):
+ continue
+
+ section_name = section_name.replace(section_prefix, '').strip('.')
+ sections[section_name] = section_options
+
+ self.ignore_option_errors = ignore_option_errors
+ self.target_obj = target_obj
+ self.sections = sections
+ self.set_options: List[str] = []
+ self.ensure_discovered = ensure_discovered
+
+ @property
+ def parsers(self):
+ """Metadata item name to parser function mapping."""
+ raise NotImplementedError(
+ '%s must provide .parsers property' % self.__class__.__name__
+ )
+
+ def __setitem__(self, option_name, value):
+ unknown = tuple()
+ target_obj = self.target_obj
+
+ # Translate alias into real name.
+ option_name = self.aliases.get(option_name, option_name)
+
+ current_value = getattr(target_obj, option_name, unknown)
+
+ if current_value is unknown:
+ raise KeyError(option_name)
+
+ if current_value:
+ # Already inhabited. Skipping.
+ return
+
+ skip_option = False
+ parser = self.parsers.get(option_name)
+ if parser:
+ try:
+ value = parser(value)
+
+ except Exception:
+ skip_option = True
+ if not self.ignore_option_errors:
+ raise
+
+ if skip_option:
+ return
+
+ setter = getattr(target_obj, 'set_%s' % option_name, None)
+ if setter is None:
+ setattr(target_obj, option_name, value)
+ else:
+ setter(value)
+
+ self.set_options.append(option_name)
+
+ @classmethod
+ def _parse_list(cls, value, separator=','):
+ """Represents value as a list.
+
+ Value is split either by separator (defaults to comma) or by lines.
+
+ :param value:
+ :param separator: List items separator character.
+ :rtype: list
+ """
+ if isinstance(value, list): # _get_parser_compound case
+ return value
+
+ if '\n' in value:
+ value = value.splitlines()
+ else:
+ value = value.split(separator)
+
+ return [chunk.strip() for chunk in value if chunk.strip()]
+
+ @classmethod
+ def _parse_dict(cls, value):
+ """Represents value as a dict.
+
+ :param value:
+ :rtype: dict
+ """
+ separator = '='
+ result = {}
+ for line in cls._parse_list(value):
+ key, sep, val = line.partition(separator)
+ if sep != separator:
+ raise DistutilsOptionError(
+ 'Unable to parse option value to dict: %s' % value
+ )
+ result[key.strip()] = val.strip()
+
+ return result
+
+ @classmethod
+ def _parse_bool(cls, value):
+ """Represents value as boolean.
+
+ :param value:
+ :rtype: bool
+ """
+ value = value.lower()
+ return value in ('1', 'true', 'yes')
+
+ @classmethod
+ def _exclude_files_parser(cls, key):
+ """Returns a parser function to make sure field inputs
+ are not files.
+
+ Parses a value after getting the key so error messages are
+ more informative.
+
+ :param key:
+ :rtype: callable
+ """
+
+ def parser(value):
+ exclude_directive = 'file:'
+ if value.startswith(exclude_directive):
+ raise ValueError(
+ 'Only strings are accepted for the {0} field, '
+ 'files are not accepted'.format(key)
+ )
+ return value
+
+ return parser
+
+ @classmethod
+ def _parse_file(cls, value, root_dir: _Path):
+ """Represents value as a string, allowing including text
+ from nearest files using `file:` directive.
+
+ Directive is sandboxed and won't reach anything outside
+ directory with setup.py.
+
+ Examples:
+ file: README.rst, CHANGELOG.md, src/file.txt
+
+ :param str value:
+ :rtype: str
+ """
+ include_directive = 'file:'
+
+ if not isinstance(value, str):
+ return value
+
+ if not value.startswith(include_directive):
+ return value
+
+ spec = value[len(include_directive) :]
+ filepaths = (path.strip() for path in spec.split(','))
+ return expand.read_files(filepaths, root_dir)
+
+ def _parse_attr(self, value, package_dir, root_dir: _Path):
+ """Represents value as a module attribute.
+
+ Examples:
+ attr: package.attr
+ attr: package.module.attr
+
+ :param str value:
+ :rtype: str
+ """
+ attr_directive = 'attr:'
+ if not value.startswith(attr_directive):
+ return value
+
+ attr_desc = value.replace(attr_directive, '')
+
+ # Make sure package_dir is populated correctly, so `attr:` directives can work
+ package_dir.update(self.ensure_discovered.package_dir)
+ return expand.read_attr(attr_desc, package_dir, root_dir)
+
+ @classmethod
+ def _get_parser_compound(cls, *parse_methods):
+ """Returns parser function to represents value as a list.
+
+ Parses a value applying given methods one after another.
+
+ :param parse_methods:
+ :rtype: callable
+ """
+
+ def parse(value):
+ parsed = value
+
+ for method in parse_methods:
+ parsed = method(parsed)
+
+ return parsed
+
+ return parse
+
+ @classmethod
+ def _parse_section_to_dict_with_key(cls, section_options, values_parser):
+ """Parses section options into a dictionary.
+
+ Applies a given parser to each option in a section.
+
+ :param dict section_options:
+ :param callable values_parser: function with 2 args corresponding to key, value
+ :rtype: dict
+ """
+ value = {}
+ for key, (_, val) in section_options.items():
+ value[key] = values_parser(key, val)
+ return value
+
+ @classmethod
+ def _parse_section_to_dict(cls, section_options, values_parser=None):
+ """Parses section options into a dictionary.
+
+ Optionally applies a given parser to each value.
+
+ :param dict section_options:
+ :param callable values_parser: function with 1 arg corresponding to option value
+ :rtype: dict
+ """
+ parser = (lambda _, v: values_parser(v)) if values_parser else (lambda _, v: v)
+ return cls._parse_section_to_dict_with_key(section_options, parser)
+
+ def parse_section(self, section_options):
+ """Parses configuration file section.
+
+ :param dict section_options:
+ """
+ for (name, (_, value)) in section_options.items():
+ with contextlib.suppress(KeyError):
+ # Keep silent for a new option may appear anytime.
+ self[name] = value
+
+ def parse(self):
+ """Parses configuration file items from one
+ or more related sections.
+
+ """
+ for section_name, section_options in self.sections.items():
+
+ method_postfix = ''
+ if section_name: # [section.option] variant
+ method_postfix = '_%s' % section_name
+
+ section_parser_method: Optional[Callable] = getattr(
+ self,
+ # Dots in section names are translated into dunderscores.
+ ('parse_section%s' % method_postfix).replace('.', '__'),
+ None,
+ )
+
+ if section_parser_method is None:
+ raise DistutilsOptionError(
+ 'Unsupported distribution option section: [%s.%s]'
+ % (self.section_prefix, section_name)
+ )
+
+ section_parser_method(section_options)
+
+ def _deprecated_config_handler(self, func, msg, warning_class):
+ """this function will wrap around parameters that are deprecated
+
+ :param msg: deprecation message
+ :param warning_class: class of warning exception to be raised
+ :param func: function to be wrapped around
+ """
+
+ @wraps(func)
+ def config_handler(*args, **kwargs):
+ warnings.warn(msg, warning_class)
+ return func(*args, **kwargs)
+
+ return config_handler
+
+
+class ConfigMetadataHandler(ConfigHandler["DistributionMetadata"]):
+
+ section_prefix = 'metadata'
+
+ aliases = {
+ 'home_page': 'url',
+ 'summary': 'description',
+ 'classifier': 'classifiers',
+ 'platform': 'platforms',
+ }
+
+ strict_mode = False
+ """We need to keep it loose, to be partially compatible with
+ `pbr` and `d2to1` packages which also uses `metadata` section.
+
+ """
+
+ def __init__(
+ self,
+ target_obj: "DistributionMetadata",
+ options: AllCommandOptions,
+ ignore_option_errors: bool,
+ ensure_discovered: expand.EnsurePackagesDiscovered,
+ package_dir: Optional[dict] = None,
+ root_dir: _Path = os.curdir
+ ):
+ super().__init__(target_obj, options, ignore_option_errors, ensure_discovered)
+ self.package_dir = package_dir
+ self.root_dir = root_dir
+
+ @property
+ def parsers(self):
+ """Metadata item name to parser function mapping."""
+ parse_list = self._parse_list
+ parse_file = partial(self._parse_file, root_dir=self.root_dir)
+ parse_dict = self._parse_dict
+ exclude_files_parser = self._exclude_files_parser
+
+ return {
+ 'platforms': parse_list,
+ 'keywords': parse_list,
+ 'provides': parse_list,
+ 'requires': self._deprecated_config_handler(
+ parse_list,
+ "The requires parameter is deprecated, please use "
+ "install_requires for runtime dependencies.",
+ SetuptoolsDeprecationWarning,
+ ),
+ 'obsoletes': parse_list,
+ 'classifiers': self._get_parser_compound(parse_file, parse_list),
+ 'license': exclude_files_parser('license'),
+ 'license_file': self._deprecated_config_handler(
+ exclude_files_parser('license_file'),
+ "The license_file parameter is deprecated, "
+ "use license_files instead.",
+ SetuptoolsDeprecationWarning,
+ ),
+ 'license_files': parse_list,
+ 'description': parse_file,
+ 'long_description': parse_file,
+ 'version': self._parse_version,
+ 'project_urls': parse_dict,
+ }
+
+ def _parse_version(self, value):
+ """Parses `version` option value.
+
+ :param value:
+ :rtype: str
+
+ """
+ version = self._parse_file(value, self.root_dir)
+
+ if version != value:
+ version = version.strip()
+ # Be strict about versions loaded from file because it's easy to
+ # accidentally include newlines and other unintended content
+ try:
+ Version(version)
+ except InvalidVersion:
+ tmpl = (
+ 'Version loaded from {value} does not '
+ 'comply with PEP 440: {version}'
+ )
+ raise DistutilsOptionError(tmpl.format(**locals()))
+
+ return version
+
+ return expand.version(self._parse_attr(value, self.package_dir, self.root_dir))
+
+
+class ConfigOptionsHandler(ConfigHandler["Distribution"]):
+
+ section_prefix = 'options'
+
+ def __init__(
+ self,
+ target_obj: "Distribution",
+ options: AllCommandOptions,
+ ignore_option_errors: bool,
+ ensure_discovered: expand.EnsurePackagesDiscovered,
+ ):
+ super().__init__(target_obj, options, ignore_option_errors, ensure_discovered)
+ self.root_dir = target_obj.src_root
+ self.package_dir: Dict[str, str] = {} # To be filled by `find_packages`
+
+ @classmethod
+ def _parse_list_semicolon(cls, value):
+ return cls._parse_list(value, separator=';')
+
+ def _parse_file_in_root(self, value):
+ return self._parse_file(value, root_dir=self.root_dir)
+
+ def _parse_requirements_list(self, label: str, value: str):
+ # Parse a requirements list, either by reading in a `file:`, or a list.
+ parsed = self._parse_list_semicolon(self._parse_file_in_root(value))
+ _warn_accidental_env_marker_misconfig(label, value, parsed)
+ # Filter it to only include lines that are not comments. `parse_list`
+ # will have stripped each line and filtered out empties.
+ return [line for line in parsed if not line.startswith("#")]
+
+ @property
+ def parsers(self):
+ """Metadata item name to parser function mapping."""
+ parse_list = self._parse_list
+ parse_bool = self._parse_bool
+ parse_dict = self._parse_dict
+ parse_cmdclass = self._parse_cmdclass
+
+ return {
+ 'zip_safe': parse_bool,
+ 'include_package_data': parse_bool,
+ 'package_dir': parse_dict,
+ 'scripts': parse_list,
+ 'eager_resources': parse_list,
+ 'dependency_links': parse_list,
+ 'namespace_packages': self._deprecated_config_handler(
+ parse_list,
+ "The namespace_packages parameter is deprecated, "
+ "consider using implicit namespaces instead (PEP 420).",
+ SetuptoolsDeprecationWarning,
+ ),
+ 'install_requires': partial(
+ self._parse_requirements_list, "install_requires"
+ ),
+ 'setup_requires': self._parse_list_semicolon,
+ 'tests_require': self._parse_list_semicolon,
+ 'packages': self._parse_packages,
+ 'entry_points': self._parse_file_in_root,
+ 'py_modules': parse_list,
+ 'python_requires': SpecifierSet,
+ 'cmdclass': parse_cmdclass,
+ }
+
+ def _parse_cmdclass(self, value):
+ package_dir = self.ensure_discovered.package_dir
+ return expand.cmdclass(self._parse_dict(value), package_dir, self.root_dir)
+
+ def _parse_packages(self, value):
+ """Parses `packages` option value.
+
+ :param value:
+ :rtype: list
+ """
+ find_directives = ['find:', 'find_namespace:']
+ trimmed_value = value.strip()
+
+ if trimmed_value not in find_directives:
+ return self._parse_list(value)
+
+ # Read function arguments from a dedicated section.
+ find_kwargs = self.parse_section_packages__find(
+ self.sections.get('packages.find', {})
+ )
+
+ find_kwargs.update(
+ namespaces=(trimmed_value == find_directives[1]),
+ root_dir=self.root_dir,
+ fill_package_dir=self.package_dir,
+ )
+
+ return expand.find_packages(**find_kwargs)
+
+ def parse_section_packages__find(self, section_options):
+ """Parses `packages.find` configuration file section.
+
+ To be used in conjunction with _parse_packages().
+
+ :param dict section_options:
+ """
+ section_data = self._parse_section_to_dict(section_options, self._parse_list)
+
+ valid_keys = ['where', 'include', 'exclude']
+
+ find_kwargs = dict(
+ [(k, v) for k, v in section_data.items() if k in valid_keys and v]
+ )
+
+ where = find_kwargs.get('where')
+ if where is not None:
+ find_kwargs['where'] = where[0] # cast list to single val
+
+ return find_kwargs
+
+ def parse_section_entry_points(self, section_options):
+ """Parses `entry_points` configuration file section.
+
+ :param dict section_options:
+ """
+ parsed = self._parse_section_to_dict(section_options, self._parse_list)
+ self['entry_points'] = parsed
+
+ def _parse_package_data(self, section_options):
+ package_data = self._parse_section_to_dict(section_options, self._parse_list)
+ return expand.canonic_package_data(package_data)
+
+ def parse_section_package_data(self, section_options):
+ """Parses `package_data` configuration file section.
+
+ :param dict section_options:
+ """
+ self['package_data'] = self._parse_package_data(section_options)
+
+ def parse_section_exclude_package_data(self, section_options):
+ """Parses `exclude_package_data` configuration file section.
+
+ :param dict section_options:
+ """
+ self['exclude_package_data'] = self._parse_package_data(section_options)
+
+ def parse_section_extras_require(self, section_options):
+ """Parses `extras_require` configuration file section.
+
+ :param dict section_options:
+ """
+ parsed = self._parse_section_to_dict_with_key(
+ section_options,
+ lambda k, v: self._parse_requirements_list(f"extras_require[{k}]", v)
+ )
+
+ self['extras_require'] = parsed
+
+ def parse_section_data_files(self, section_options):
+ """Parses `data_files` configuration file section.
+
+ :param dict section_options:
+ """
+ parsed = self._parse_section_to_dict(section_options, self._parse_list)
+ self['data_files'] = expand.canonic_data_files(parsed, self.root_dir)