regr_test.py: Allow non-types dependencies (#9382)

This commit is contained in:
Alex Waygood
2022-12-23 21:55:54 +00:00
committed by GitHub
parent 4379a6a509
commit 8671fc5c0f
5 changed files with 250 additions and 102 deletions

View File

@@ -4,13 +4,18 @@ from __future__ import annotations
import os
import re
import subprocess
import sys
import venv
from collections.abc import Mapping
from functools import cache
from itertools import filterfalse
from pathlib import Path
from typing import NamedTuple
from typing_extensions import Annotated
import pathspec # type: ignore[import]
import tomli
from packaging.requirements import Requirement
# Used to install system-wide packages for different OS types:
METADATA_MAPPING = {"linux": "apt_dependencies", "darwin": "brew_dependencies", "win32": "choco_dependencies"}
@@ -45,25 +50,112 @@ def print_success_msg() -> None:
# ====================================================================
class PackageDependencies(NamedTuple):
typeshed_pkgs: tuple[str, ...]
external_pkgs: tuple[str, ...]
@cache
def read_dependencies(distribution: str) -> tuple[str, ...]:
def get_pypi_name_to_typeshed_name_mapping() -> Mapping[str, str]:
stub_name_map = {}
for typeshed_name in os.listdir("stubs"):
with Path("stubs", typeshed_name, "METADATA.toml").open("rb") as f:
pypi_name = tomli.load(f).get("stub_distribution", f"types-{typeshed_name}")
assert isinstance(pypi_name, str)
stub_name_map[pypi_name] = typeshed_name
return stub_name_map
@cache
def read_dependencies(distribution: str) -> PackageDependencies:
"""Read the dependencies listed in a METADATA.toml file for a stubs package.
Once the dependencies have been read,
determine which dependencies are typeshed-internal dependencies,
and which dependencies are external (non-types) dependencies.
For typeshed dependencies, translate the "dependency name" into the "package name";
for external dependencies, leave them as they are in the METADATA.toml file.
Note that this function may consider things to be typeshed stubs
even if they haven't yet been uploaded to PyPI.
If a typeshed stub is removed, this function will consider it to be an external dependency.
"""
pypi_name_to_typeshed_name_mapping = get_pypi_name_to_typeshed_name_mapping()
with Path("stubs", distribution, "METADATA.toml").open("rb") as f:
data = tomli.load(f)
requires = data.get("requires", [])
assert isinstance(requires, list)
dependencies = []
for dependency in requires:
dependencies = tomli.load(f).get("requires", [])
assert isinstance(dependencies, list)
typeshed, external = [], []
for dependency in dependencies:
assert isinstance(dependency, str)
assert dependency.startswith("types-"), f"unrecognized dependency {dependency!r}"
dependencies.append(dependency[6:].split("<")[0])
return tuple(dependencies)
maybe_typeshed_dependency = Requirement(dependency).name
if maybe_typeshed_dependency in pypi_name_to_typeshed_name_mapping:
typeshed.append(pypi_name_to_typeshed_name_mapping[maybe_typeshed_dependency])
else:
external.append(dependency)
return PackageDependencies(tuple(typeshed), tuple(external))
def get_recursive_requirements(package_name: str, seen: set[str] | None = None) -> list[str]:
seen = seen if seen is not None else {package_name}
for dependency in filterfalse(seen.__contains__, read_dependencies(package_name)):
seen.update(get_recursive_requirements(dependency, seen))
return sorted(seen | {package_name})
@cache
def get_recursive_requirements(package_name: str) -> PackageDependencies:
"""Recursively gather dependencies for a single stubs package.
For example, if the stubs for `caldav`
declare a dependency on typeshed's stubs for `requests`,
and the stubs for requests declare a dependency on typeshed's stubs for `urllib3`,
`get_recursive_requirements("caldav")` will determine that the stubs for `caldav`
have both `requests` and `urllib3` as typeshed-internal dependencies.
"""
typeshed: set[str] = set()
external: set[str] = set()
non_recursive_requirements = read_dependencies(package_name)
typeshed.update(non_recursive_requirements.typeshed_pkgs)
external.update(non_recursive_requirements.external_pkgs)
for pkg in non_recursive_requirements.typeshed_pkgs:
reqs = get_recursive_requirements(pkg)
typeshed.update(reqs.typeshed_pkgs)
external.update(reqs.external_pkgs)
return PackageDependencies(tuple(sorted(typeshed)), tuple(sorted(external)))
# ====================================================================
# Dynamic venv creation
# ====================================================================
class VenvInfo(NamedTuple):
pip_exe: Annotated[str, "A path to the venv's pip executable"]
python_exe: Annotated[str, "A path to the venv's python executable"]
@staticmethod
def of_existing_venv(venv_dir: Path) -> VenvInfo:
if sys.platform == "win32":
pip = venv_dir / "Scripts" / "pip.exe"
python = venv_dir / "Scripts" / "python.exe"
else:
pip = venv_dir / "bin" / "pip"
python = venv_dir / "bin" / "python"
return VenvInfo(str(pip), str(python))
def make_venv(venv_dir: Path) -> VenvInfo:
try:
venv.create(venv_dir, with_pip=True, clear=True)
except subprocess.CalledProcessError as e:
if "ensurepip" in e.cmd:
print_error(
"stubtest requires a Python installation with ensurepip. "
"If on Linux, you may need to install the python3-venv package."
)
raise
return VenvInfo.of_existing_venv(venv_dir)
@cache
def get_mypy_req() -> str:
with open("requirements-tests.txt", encoding="UTF-8") as f:
return next(line.strip() for line in f if "mypy" in line)
# ====================================================================
@@ -83,6 +175,10 @@ class PackageInfo(NamedTuple):
name: str
test_case_directory: Path
@property
def is_stdlib(self) -> bool:
return self.name == "stdlib"
def testcase_dir_from_package_name(package_name: str) -> Path:
return Path("stubs", package_name, "@tests/test_cases")