diff --git a/tests/_metadata.py b/tests/_metadata.py index dbfd66d3e..bce0e3580 100644 --- a/tests/_metadata.py +++ b/tests/_metadata.py @@ -131,7 +131,7 @@ class StubMetadata: """ version: str - requires: Annotated[list[str], "The raw requirements as listed in METADATA.toml"] + requires: Annotated[list[Requirement], "The parsed requirements as listed in METADATA.toml"] extra_description: str | None stub_distribution: Annotated[str, "The name under which the distribution is uploaded to PyPI"] upstream_repository: Annotated[str, "The URL of the upstream repository"] | None @@ -201,14 +201,9 @@ def read_metadata(distribution: str) -> StubMetadata: # Check that the version parses Version(version[:-2] if version.endswith(".*") else version) - requires: object = data.get("requires", []) - assert isinstance(requires, list) - for req in requires: - assert isinstance(req, str), f"Invalid requirement {req!r} for {distribution!r}" - for space in " \t\n": - assert space not in req, f"For consistency, requirement should not have whitespace: {req!r}" - # Check that the requirement parses - Requirement(req) + requires_s: object = data.get("requires", []) + assert isinstance(requires_s, list) + requires = [parse_requires(distribution, req) for req in requires_s] extra_description: object = data.get("extra_description") assert isinstance(extra_description, (str, type(None))) @@ -292,9 +287,16 @@ def read_metadata(distribution: str) -> StubMetadata: ) +def parse_requires(distribution: str, req: object) -> Requirement: + assert isinstance(req, str), f"Invalid requirement {req!r} for {distribution!r}" + for space in " \t\n": + assert space not in req, f"For consistency, requirement should not have whitespace: {req!r}" + return Requirement(req) + + class PackageDependencies(NamedTuple): - typeshed_pkgs: tuple[str, ...] - external_pkgs: tuple[str, ...] + typeshed_pkgs: tuple[Requirement, ...] + external_pkgs: tuple[Requirement, ...] @cache @@ -317,17 +319,15 @@ def read_dependencies(distribution: str) -> PackageDependencies: If a typeshed stub is removed, this function will consider it to be an external dependency. """ pypi_name_to_typeshed_name_mapping = get_pypi_name_to_typeshed_name_mapping() - typeshed: list[str] = [] - external: list[str] = [] + typeshed: list[Requirement] = [] + external: list[Requirement] = [] for dependency in read_metadata(distribution).requires: - maybe_typeshed_dependency = Requirement(dependency).name - if maybe_typeshed_dependency in pypi_name_to_typeshed_name_mapping: - typeshed.append(pypi_name_to_typeshed_name_mapping[maybe_typeshed_dependency]) + if dependency.name in pypi_name_to_typeshed_name_mapping: + req = Requirement(str(dependency)) # copy the requirement + req.name = pypi_name_to_typeshed_name_mapping[dependency.name] + typeshed.append(req) else: - # convert to Requirement and then back to str - # to make sure that the requirements all have a normalised string representation - # (This will also catch any malformed requirements early) - external.append(str(Requirement(dependency))) + external.append(dependency) return PackageDependencies(tuple(typeshed), tuple(external)) @@ -341,13 +341,13 @@ def get_recursive_requirements(package_name: str) -> PackageDependencies: `get_recursive_requirements("caldav")` will determine that the stubs for `caldav` have both `requests` and `urllib3` as typeshed-internal dependencies. """ - typeshed: set[str] = set() - external: set[str] = set() + typeshed: set[Requirement] = set() + external: set[Requirement] = set() non_recursive_requirements = read_dependencies(package_name) typeshed.update(non_recursive_requirements.typeshed_pkgs) external.update(non_recursive_requirements.external_pkgs) for pkg in non_recursive_requirements.typeshed_pkgs: - reqs = get_recursive_requirements(pkg) + reqs = get_recursive_requirements(pkg.name) typeshed.update(reqs.typeshed_pkgs) external.update(reqs.external_pkgs) - return PackageDependencies(tuple(sorted(typeshed)), tuple(sorted(external))) + return PackageDependencies(tuple(typeshed), tuple(external)) diff --git a/tests/get_external_stub_requirements.py b/tests/get_external_stub_requirements.py index 5d910635f..420e668b5 100644 --- a/tests/get_external_stub_requirements.py +++ b/tests/get_external_stub_requirements.py @@ -1,18 +1,21 @@ #!/usr/bin/env python3 + from __future__ import annotations import os import sys +from packaging.requirements import Requirement + from _metadata import read_dependencies distributions = sys.argv[1:] if not distributions: distributions = os.listdir("stubs") -requirements = set[str]() +requirements = set[Requirement]() for distribution in distributions: requirements.update(read_dependencies(distribution).external_pkgs) -for requirement in sorted(requirements): +for requirement in sorted(requirements, key=str): print(requirement) diff --git a/tests/mypy_test.py b/tests/mypy_test.py index e1b10fc8a..db505161e 100755 --- a/tests/mypy_test.py +++ b/tests/mypy_test.py @@ -20,6 +20,7 @@ from typing import Any, NamedTuple from typing_extensions import Annotated, TypeAlias import tomli +from packaging.requirements import Requirement from _metadata import PackageDependencies, get_recursive_requirements, read_metadata from _utils import ( @@ -277,10 +278,11 @@ def run_mypy( def add_third_party_files( distribution: str, files: list[Path], args: TestConfig, configurations: list[MypyDistConf], seen_dists: set[str] ) -> None: + typeshed_reqs = get_recursive_requirements(distribution).typeshed_pkgs if distribution in seen_dists: return seen_dists.add(distribution) - seen_dists.update(get_recursive_requirements(distribution).typeshed_pkgs) + seen_dists.update(r.name for r in typeshed_reqs) root = Path("stubs", distribution) for name in os.listdir(root): if name.startswith("."): @@ -404,8 +406,8 @@ _DISTRIBUTION_TO_VENV_MAPPING: dict[str, Path | None] = {} def setup_venv_for_external_requirements_set( - requirements_set: frozenset[str], tempdir: Path, args: TestConfig -) -> tuple[frozenset[str], Path]: + requirements_set: frozenset[Requirement], tempdir: Path, args: TestConfig +) -> tuple[frozenset[Requirement], Path]: venv_dir = tempdir / f".venv-{hash(requirements_set)}" uv_command = ["uv", "venv", str(venv_dir)] if not args.verbose: @@ -414,9 +416,10 @@ def setup_venv_for_external_requirements_set( return requirements_set, venv_dir -def install_requirements_for_venv(venv_dir: Path, args: TestConfig, external_requirements: frozenset[str]) -> None: +def install_requirements_for_venv(venv_dir: Path, args: TestConfig, external_requirements: frozenset[Requirement]) -> None: + req_args = sorted(str(req) for req in external_requirements) # Use --no-cache-dir to avoid issues with concurrent read/writes to the cache - uv_command = ["uv", "pip", "install", get_mypy_req(), *sorted(external_requirements), "--no-cache-dir"] + uv_command = ["uv", "pip", "install", get_mypy_req(), *req_args, "--no-cache-dir"] if args.verbose: with _PRINT_LOCK: print(colored(f"Running {uv_command}", "blue")) @@ -436,7 +439,7 @@ def setup_virtual_environments(distributions: dict[str, PackageDependencies], ar # STAGE 1: Determine which (if any) stubs packages require virtual environments. # Group stubs packages according to their external-requirements sets - external_requirements_to_distributions: defaultdict[frozenset[str], list[str]] = defaultdict(list) + external_requirements_to_distributions: defaultdict[frozenset[Requirement], list[str]] = defaultdict(list) num_pkgs_with_external_reqs = 0 for distribution_name, requirements in distributions.items(): @@ -454,7 +457,7 @@ def setup_virtual_environments(distributions: dict[str, PackageDependencies], ar return # STAGE 2: Setup a virtual environment for each unique set of external requirements - requirements_sets_to_venvs: dict[frozenset[str], Path] = {} + requirements_sets_to_venvs: dict[frozenset[Requirement], Path] = {} if args.verbose: num_venvs = len(external_requirements_to_distributions) diff --git a/tests/pytype_test.py b/tests/pytype_test.py index f4c215a93..9c835a938 100755 --- a/tests/pytype_test.py +++ b/tests/pytype_test.py @@ -31,8 +31,6 @@ import os import traceback from collections.abc import Iterable, Sequence -from packaging.requirements import Requirement - # pytype is not py.typed https://github.com/google/pytype/issues/1325 from pytype import config as pytype_config, load_pytd # type: ignore[import] from pytype.imports import typeshed # type: ignore[import] @@ -200,8 +198,7 @@ def get_missing_modules(files_to_test: Sequence[str]) -> Iterable[str]: missing_modules = set() for distribution in stub_distributions: for external_req in read_dependencies(distribution).external_pkgs: - req_name = Requirement(external_req).name - associated_packages = _get_pkgs_associated_with_requirement(req_name) + associated_packages = _get_pkgs_associated_with_requirement(external_req.name) missing_modules.update(associated_packages) test_dir = os.path.dirname(__file__) diff --git a/tests/regr_test.py b/tests/regr_test.py index b6569f008..67cd954c2 100755 --- a/tests/regr_test.py +++ b/tests/regr_test.py @@ -139,13 +139,15 @@ def setup_testcase_dir(package: DistributionTests, tempdir: Path, verbosity: Ver # mypy refuses to consider a directory a "valid typeshed directory" # unless there's a stubs/mypy-extensions path inside it, # so add that to the list of stubs to copy over to the new directory - for requirement in {package.name, *requirements.typeshed_pkgs, "mypy-extensions"}: + typeshed_requirements = [r.name for r in requirements.typeshed_pkgs] + for requirement in {package.name, *typeshed_requirements, "mypy-extensions"}: shutil.copytree(Path("stubs", requirement), new_typeshed / "stubs" / requirement) if requirements.external_pkgs: venv_location = str(tempdir / VENV_DIR) subprocess.run(["uv", "venv", venv_location], check=True, capture_output=True) - uv_command = ["uv", "pip", "install", get_mypy_req(), *requirements.external_pkgs] + ext_requirements = [str(r) for r in requirements.external_pkgs] + uv_command = ["uv", "pip", "install", get_mypy_req(), *ext_requirements] if sys.platform == "win32": # Reads/writes to the cache are threadsafe with uv generally... # but not on old Windows versions diff --git a/tests/stubtest_third_party.py b/tests/stubtest_third_party.py index 65877e9f7..c22c81732 100755 --- a/tests/stubtest_third_party.py +++ b/tests/stubtest_third_party.py @@ -82,7 +82,8 @@ def run_stubtest( # Hopefully mypy continues to not need too many dependencies # TODO: Maybe find a way to cache these in CI dists_to_install = [dist_req, get_mypy_req()] - dists_to_install.extend(requirements.external_pkgs) # Internal requirements are added to MYPYPATH + # Internal requirements are added to MYPYPATH + dists_to_install.extend(str(r) for r in requirements.external_pkgs) # Since the "gdb" Python package is available only inside GDB, it is not # possible to install it through pip, so stub tests cannot install it. @@ -113,7 +114,7 @@ def run_stubtest( ] stubs_dir = dist.parent - mypypath_items = [str(dist)] + [str(stubs_dir / pkg) for pkg in requirements.typeshed_pkgs] + mypypath_items = [str(dist)] + [str(stubs_dir / pkg.name) for pkg in requirements.typeshed_pkgs] mypypath = os.pathsep.join(mypypath_items) # For packages that need a display, we need to pass at least $DISPLAY # to stubtest. $DISPLAY is set by xvfb-run in CI.