From cdfe65afe31289b871e7528886ac6c9561da3b30 Mon Sep 17 00:00:00 2001 From: Avasam Date: Mon, 31 Mar 2025 18:00:33 -0400 Subject: [PATCH 1/7] Enable Ruff flake8-use-pathlib (PTH) --- lib/ts_utils/paths.py | 2 +- lib/ts_utils/requirements.py | 5 +- lib/ts_utils/utils.py | 2 +- pyproject.toml | 1 + scripts/create_baseline_stubs.py | 41 ++++----- scripts/sync_protobuf/_utils.py | 5 +- scripts/sync_protobuf/google_protobuf.py | 4 +- scripts/sync_protobuf/tensorflow.py | 7 +- tests/check_typeshed_structure.py | 14 ++- tests/mypy_test.py | 7 +- tests/pytype_test.py | 107 +++++++++++------------ tests/runtests.py | 20 ++--- 12 files changed, 104 insertions(+), 111 deletions(-) diff --git a/lib/ts_utils/paths.py b/lib/ts_utils/paths.py index 63119231720d..2894aa24b2d7 100644 --- a/lib/ts_utils/paths.py +++ b/lib/ts_utils/paths.py @@ -5,7 +5,7 @@ # installed into the user's virtual env, so we can't determine the path # to typeshed. Installing ts_utils editable would solve that, see # https://github.com/python/typeshed/pull/12806. -TS_BASE_PATH: Final = Path("") +TS_BASE_PATH: Final = Path() STDLIB_PATH: Final = TS_BASE_PATH / "stdlib" STUBS_PATH: Final = TS_BASE_PATH / "stubs" diff --git a/lib/ts_utils/requirements.py b/lib/ts_utils/requirements.py index e1af0f1ea290..3563c78d46fa 100644 --- a/lib/ts_utils/requirements.py +++ b/lib/ts_utils/requirements.py @@ -1,7 +1,6 @@ from __future__ import annotations import itertools -import os import sys from collections.abc import Iterable @@ -13,14 +12,14 @@ def get_external_stub_requirements(distributions: Iterable[str] = ()) -> set[Requirement]: if not distributions: - distributions = os.listdir(STUBS_PATH) + distributions = STUBS_PATH.iterdir() return set(itertools.chain.from_iterable([read_dependencies(distribution).external_pkgs for distribution in distributions])) def get_stubtest_system_requirements(distributions: Iterable[str] = (), platform: str = sys.platform) -> list[str]: if not distributions: - distributions = os.listdir(STUBS_PATH) + distributions = STUBS_PATH.iterdir() requirements: list[str] = [] for distribution in distributions: diff --git a/lib/ts_utils/utils.py b/lib/ts_utils/utils.py index 522db807a29e..1bc083f6167c 100644 --- a/lib/ts_utils/utils.py +++ b/lib/ts_utils/utils.py @@ -208,7 +208,7 @@ def allowlists(distribution_name: str) -> list[str]: @cache def get_gitignore_spec() -> pathspec.PathSpec: - with open(".gitignore", encoding="UTF-8") as f: + with Path(".gitignore").open(encoding="UTF-8") as f: return pathspec.PathSpec.from_lines("gitwildmatch", f.readlines()) diff --git a/pyproject.toml b/pyproject.toml index 3cfcddda7b5f..137f173a728c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,6 +56,7 @@ select = [ "PLC", # Pylint Convention "PLE", # Pylint Error "PLR", # Pylint Refactor + "PTH", # flake8-use-pathlib "RUF", # Ruff-specific and unused-noqa "TRY", # tryceratops "UP", # pyupgrade diff --git a/scripts/create_baseline_stubs.py b/scripts/create_baseline_stubs.py index 2aab6aea5710..c2140d19d065 100755 --- a/scripts/create_baseline_stubs.py +++ b/scripts/create_baseline_stubs.py @@ -12,19 +12,20 @@ import argparse import asyncio -import glob -import os.path import re import subprocess import sys import urllib.parse from http import HTTPStatus from importlib.metadata import distribution +from pathlib import Path import aiohttp import termcolor -PYRIGHT_CONFIG = "pyrightconfig.stricter.json" +from ts_utils.paths import STDLIB_PATH, STUBS_PATH + +PYRIGHT_CONFIG = Path("pyrightconfig.stricter.json") def search_pip_freeze_output(project: str, output: str) -> tuple[str, str] | None: @@ -52,22 +53,22 @@ def get_installed_package_info(project: str) -> tuple[str, str] | None: return search_pip_freeze_output(project, r.stdout) -def run_stubgen(package: str, output: str) -> None: +def run_stubgen(package: str, output: Path) -> None: print(f"Running stubgen: stubgen -o {output} -p {package}") subprocess.run(["stubgen", "-o", output, "-p", package, "--export-less"], check=True) -def run_stubdefaulter(stub_dir: str) -> None: +def run_stubdefaulter(stub_dir: Path) -> None: print(f"Running stubdefaulter: stubdefaulter --packages {stub_dir}") subprocess.run(["stubdefaulter", "--packages", stub_dir]) -def run_black(stub_dir: str) -> None: +def run_black(stub_dir: Path) -> None: print(f"Running Black: black {stub_dir}") - subprocess.run(["pre-commit", "run", "black", "--files", *glob.iglob(f"{stub_dir}/**/*.pyi")]) + subprocess.run(["pre-commit", "run", "black", "--files", *stub_dir.rglob("*.pyi")]) -def run_ruff(stub_dir: str) -> None: +def run_ruff(stub_dir: Path) -> None: print(f"Running Ruff: ruff check {stub_dir} --fix-only") subprocess.run([sys.executable, "-m", "ruff", "check", stub_dir, "--fix-only"]) @@ -115,14 +116,14 @@ async def get_upstream_repo_url(project: str) -> str | None: return None -def create_metadata(project: str, stub_dir: str, version: str) -> None: +def create_metadata(project: str, stub_dir: Path, version: str) -> None: """Create a METADATA.toml file.""" match = re.match(r"[0-9]+.[0-9]+", version) if match is None: sys.exit(f"Error: Cannot parse version number: {version}") - filename = os.path.join(stub_dir, "METADATA.toml") + filename = stub_dir / "METADATA.toml" version = match.group(0) - if os.path.exists(filename): + if filename.exists(): return metadata = f'version = "{version}.*"\n' upstream_repo_url = asyncio.run(get_upstream_repo_url(project)) @@ -135,13 +136,13 @@ def create_metadata(project: str, stub_dir: str, version: str) -> None: else: metadata += f'upstream_repository = "{upstream_repo_url}"\n' print(f"Writing {filename}") - with open(filename, "w", encoding="UTF-8") as file: + with filename.open("w", encoding="UTF-8") as file: file.write(metadata) -def add_pyright_exclusion(stub_dir: str) -> None: +def add_pyright_exclusion(stub_dir: Path) -> None: """Exclude stub_dir from strict pyright checks.""" - with open(PYRIGHT_CONFIG, encoding="UTF-8") as f: + with PYRIGHT_CONFIG.open(encoding="UTF-8") as f: lines = f.readlines() i = 0 while i < len(lines) and not lines[i].strip().startswith('"exclude": ['): @@ -177,7 +178,7 @@ def add_pyright_exclusion(stub_dir: str) -> None: third_party_excludes.sort(key=str.lower) print(f"Updating {PYRIGHT_CONFIG}") - with open(PYRIGHT_CONFIG, "w", encoding="UTF-8") as f: + with PYRIGHT_CONFIG.open("w", encoding="UTF-8") as f: f.writelines(before_third_party_excludes) f.writelines(third_party_excludes) f.writelines(after_third_party_excludes) @@ -194,7 +195,7 @@ def main() -> None: parser.add_argument("--package", help="generate stubs for this Python package (default is autodetected)") args = parser.parse_args() project = args.project - package = args.package + package: str = args.package if not re.match(r"[a-zA-Z0-9-_.]+$", project): sys.exit(f"Invalid character in project name: {project!r}") @@ -214,7 +215,7 @@ def main() -> None: print(f'Using detected package "{package}" for project "{project}"', file=sys.stderr) print("Suggestion: Try again with --package argument if that's not what you wanted", file=sys.stderr) - if not os.path.isdir("stubs") or not os.path.isdir("stdlib"): + if not STUBS_PATH.is_dir() or not STDLIB_PATH.is_dir(): sys.exit("Error: Current working directory must be the root of typeshed repository") # Get normalized project name and version of installed package. @@ -226,9 +227,9 @@ def main() -> None: sys.exit(1) project, version = info - stub_dir = os.path.join("stubs", project) - package_dir = os.path.join(stub_dir, package) - if os.path.exists(package_dir): + stub_dir = STUBS_PATH / project + package_dir = stub_dir / package + if package_dir.exists(): sys.exit(f"Error: {package_dir} already exists (delete it first)") run_stubgen(package, stub_dir) diff --git a/scripts/sync_protobuf/_utils.py b/scripts/sync_protobuf/_utils.py index 0c49c5a6fa9a..9496bcd903d3 100644 --- a/scripts/sync_protobuf/_utils.py +++ b/scripts/sync_protobuf/_utils.py @@ -3,6 +3,7 @@ import subprocess import sys from http.client import HTTPResponse +from pathlib import Path from typing import TYPE_CHECKING, Iterable from urllib.request import urlopen from zipfile import ZipFile @@ -17,10 +18,10 @@ MYPY_PROTOBUF_VERSION = mypy_protobuf__version__ -def download_file(url: str, destination: StrPath) -> None: +def download_file(url: str, destination: Path) -> None: print(f"Downloading '{url}' to '{destination}'") resp: HTTPResponse - with urlopen(url) as resp, open(destination, "wb") as file: + with urlopen(url) as resp, destination.open("wb") as file: file.write(resp.read()) diff --git a/scripts/sync_protobuf/google_protobuf.py b/scripts/sync_protobuf/google_protobuf.py index ee238f82618d..3d43d50e962d 100755 --- a/scripts/sync_protobuf/google_protobuf.py +++ b/scripts/sync_protobuf/google_protobuf.py @@ -33,7 +33,7 @@ def extract_python_version(file_path: Path) -> str: """Extract the Python version from https://github.com/protocolbuffers/protobuf/blob/main/version.json .""" - with open(file_path) as file: + with file_path.open() as file: data: dict[str, Any] = json.load(file) # The root key will be the protobuf source code version version = next(iter(data.values()))["languages"]["python"] @@ -47,7 +47,7 @@ def extract_proto_file_paths(temp_dir: Path) -> list[str]: as described in py_proto_library calls in https://github.com/protocolbuffers/protobuf/blob/main/python/dist/BUILD.bazel . """ - with open(temp_dir / EXTRACTED_PACKAGE_DIR / "python" / "dist" / "BUILD.bazel") as file: + with (temp_dir / EXTRACTED_PACKAGE_DIR / "python" / "dist" / "BUILD.bazel").open() as file: matched_lines = filter(None, (re.search(PROTO_FILE_PATTERN, line) for line in file)) proto_files = [ EXTRACTED_PACKAGE_DIR + "/src/google/protobuf/" + match.group(1).replace("compiler_", "compiler/") + ".proto" diff --git a/scripts/sync_protobuf/tensorflow.py b/scripts/sync_protobuf/tensorflow.py index b26ee90ccabf..c67cc2a97eeb 100755 --- a/scripts/sync_protobuf/tensorflow.py +++ b/scripts/sync_protobuf/tensorflow.py @@ -6,7 +6,6 @@ from __future__ import annotations -import os import re import shutil import subprocess @@ -72,7 +71,7 @@ def post_creation() -> None: for path in STUBS_FOLDER.rglob("*_pb2.pyi"): print(f"Fixing imports in '{path}'") - with open(path) as file: + with path.open() as file: filedata = file.read() # Replace the target string @@ -80,13 +79,13 @@ def post_creation() -> None: filedata = re.sub(XLA_IMPORT_PATTERN, "\\1tensorflow.compiler.xla.", filedata) # Write the file out again - with open(path, "w") as file: + with path.open("w") as file: file.write(filedata) print() for to_remove in PROTOS_TO_REMOVE: file_path = STUBS_FOLDER / "tensorflow" / to_remove - os.remove(file_path) + file_path.unlink() print(f"Removed '{file_path}'") diff --git a/tests/check_typeshed_structure.py b/tests/check_typeshed_structure.py index 81adb8c74269..3e3901064557 100755 --- a/tests/check_typeshed_structure.py +++ b/tests/check_typeshed_structure.py @@ -7,7 +7,6 @@ from __future__ import annotations -import os import re import sys from pathlib import Path @@ -114,11 +113,10 @@ def check_test_cases() -> None: def check_no_symlinks() -> None: """Check that there are no symlinks in the typeshed repository.""" - files = [os.path.join(root, file) for root, _, files in os.walk(".") for file in files] + files = [(root / file) for root, _, files in Path().walk() for file in files] no_symlink = "You cannot use symlinks in typeshed, please copy {} to its link." for file in files: - _, ext = os.path.splitext(file) - if ext == ".pyi" and os.path.islink(file): + if file.suffix == ".pyi" and file.is_symlink(): raise ValueError(no_symlink.format(file)) @@ -140,20 +138,20 @@ def check_versions_file() -> None: def _find_stdlib_modules() -> set[str]: modules = set[str]() - for path, _, files in os.walk(STDLIB_PATH): + for path, _, files in STDLIB_PATH.walk(): for filename in files: - base_module = ".".join(os.path.normpath(path).split(os.sep)[1:]) + base_module = ".".join(path.parts[1:]) if filename == "__init__.pyi": modules.add(base_module) elif filename.endswith(".pyi"): - mod, _ = os.path.splitext(filename) + mod = filename[:-4] modules.add(f"{base_module}.{mod}" if base_module else mod) return modules def check_metadata() -> None: """Check that all METADATA.toml files are valid.""" - for distribution in os.listdir("stubs"): + for distribution in STUBS_PATH.iterdir(): # This function does various sanity checks for METADATA.toml files read_metadata(distribution) diff --git a/tests/mypy_test.py b/tests/mypy_test.py index 0fea2d56b0a7..5a3323ac867a 100755 --- a/tests/mypy_test.py +++ b/tests/mypy_test.py @@ -61,7 +61,7 @@ def _named_temporary_file() -> Generator[tempfile._TemporaryFileWrapper[str]]: yield temp finally: temp.close() - os.remove(temp.name) + Path(temp.name).unlink() SUPPORTED_VERSIONS = ["3.13", "3.12", "3.11", "3.10", "3.9"] @@ -590,15 +590,14 @@ def test_third_party_stubs(args: TestConfig, tempdir: Path) -> TestSummary: def test_typeshed(args: TestConfig, tempdir: Path) -> TestSummary: print(f"*** Testing Python {args.version} on {args.platform}") - stdlib_dir, stubs_dir = Path("stdlib"), Path("stubs") summary = TestSummary() - if stdlib_dir in args.filter or any(stdlib_dir in path.parents for path in args.filter): + if STDLIB_PATH in args.filter or any(STDLIB_PATH in path.parents for path in args.filter): mypy_result, files_checked = test_stdlib(args) summary.register_result(mypy_result, files_checked) print() - if stubs_dir in args.filter or any(stubs_dir in path.parents for path in args.filter): + if STUBS_PATH in args.filter or any(STUBS_PATH in path.parents for path in args.filter): tp_results = test_third_party_stubs(args, tempdir) summary.merge(tp_results) print() diff --git a/tests/pytype_test.py b/tests/pytype_test.py index 7e3eeb7354bb..ac4736b02203 100755 --- a/tests/pytype_test.py +++ b/tests/pytype_test.py @@ -19,38 +19,40 @@ if TYPE_CHECKING: assert sys.platform != "win32", "pytype isn't yet installed in CI, but wheels can be built on Windows" + from _typeshed import StrPath if sys.version_info >= (3, 13): print("pytype does not support Python 3.13+ yet.", file=sys.stderr) sys.exit(1) - import argparse import importlib.metadata import inspect import os import traceback from collections.abc import Iterable, Sequence +from pathlib import Path # pytype is not py.typed https://github.com/google/pytype/issues/1325 from pytype import config as pytype_config, load_pytd # type: ignore[import] from pytype.imports import typeshed # type: ignore[import] from ts_utils.metadata import read_dependencies +from ts_utils.paths import STDLIB_PATH, STUBS_PATH, TS_BASE_PATH from ts_utils.utils import SupportedVersionsDict, parse_stdlib_versions_file, supported_versions_for_module -TYPESHED_SUBDIRS = ["stdlib", "stubs"] +TYPESHED_SUBDIRS = [STDLIB_PATH, STUBS_PATH] TYPESHED_HOME = "TYPESHED_HOME" +EXCLUDE_LIST = TS_BASE_PATH / "tests" / "pytype_exclude_list.txt" _LOADERS: dict[str, tuple[pytype_config.Options, load_pytd.Loader]] = {} def main() -> None: args = create_parser().parse_args() - typeshed_location = args.typeshed_location or os.getcwd() - subdir_paths = [os.path.join(typeshed_location, d) for d in TYPESHED_SUBDIRS] - check_subdirs_discoverable(subdir_paths) + typeshed_location = Path(args.typeshed_location) or Path.cwd() + check_subdirs_discoverable(TYPESHED_SUBDIRS) old_typeshed_home = os.environ.get(TYPESHED_HOME) - os.environ[TYPESHED_HOME] = typeshed_location - files_to_test = determine_files_to_test(paths=args.files or subdir_paths) + os.environ[TYPESHED_HOME] = str(typeshed_location) + files_to_test = determine_files_to_test(paths=[Path(file) for file in args.files] or TYPESHED_SUBDIRS) run_all_tests(files_to_test=files_to_test, print_stderr=args.print_stderr, dry_run=args.dry_run) if old_typeshed_home is None: del os.environ[TYPESHED_HOME] @@ -73,12 +75,12 @@ def create_parser() -> argparse.ArgumentParser: return parser -def run_pytype(*, filename: str, python_version: str, missing_modules: Iterable[str]) -> str | None: +def run_pytype(*, filename: StrPath, python_version: str, missing_modules: Iterable[str]) -> str | None: """Run pytype, returning the stderr if any.""" if python_version not in _LOADERS: options = pytype_config.Options.create("", parse_pyi=True, python_version=python_version) # For simplicity, pretends missing modules are part of the stdlib. - missing_modules = tuple(os.path.join("stdlib", m) for m in missing_modules) + missing_modules = tuple(str(STDLIB_PATH / m) for m in missing_modules) loader = load_pytd.create_loader(options, missing_modules) _LOADERS[python_version] = (options, loader) options, loader = _LOADERS[python_version] @@ -94,21 +96,19 @@ def run_pytype(*, filename: str, python_version: str, missing_modules: Iterable[ return stderr -def _get_relative(filename: str) -> str: - top = 0 +def _get_relative(filename: StrPath) -> Path: + filepath = Path(filename) for d in TYPESHED_SUBDIRS: try: - top = filename.index(d + os.path.sep) + return d / filepath.relative_to(d) except ValueError: continue - else: - break - return filename[top:] + raise ValueError(f"{filepath} not relative to {TYPESHED_SUBDIRS}") -def _get_module_name(filename: str) -> str: +def _get_module_name(filename: StrPath) -> str: """Convert a filename {subdir}/m.n/module/foo to module.foo.""" - parts = _get_relative(filename).split(os.path.sep) + parts = _get_relative(filename).parts if parts[0] == "stdlib": module_parts = parts[1:] else: @@ -117,13 +117,13 @@ def _get_module_name(filename: str) -> str: return ".".join(module_parts).replace(".pyi", "").replace(".__init__", "") -def check_subdirs_discoverable(subdir_paths: list[str]) -> None: +def check_subdirs_discoverable(subdir_paths: Iterable[Path]) -> None: for p in subdir_paths: - if not os.path.isdir(p): + if not p.is_dir(): raise SystemExit(f"Cannot find typeshed subdir at {p} (specify parent dir via --typeshed-location)") -def determine_files_to_test(*, paths: Sequence[str]) -> list[str]: +def determine_files_to_test(*, paths: Sequence[Path]) -> list[Path]: """Determine all files to test. Checks for files in the pytype exclude list and for the stdlib VERSIONS file. @@ -132,29 +132,26 @@ def determine_files_to_test(*, paths: Sequence[str]) -> list[str]: ts = typeshed.Typeshed() exclude_list = set(ts.read_blacklist()) stdlib_module_versions = parse_stdlib_versions_file() - files = [] - for f in sorted(filenames): - if _get_relative(f) in exclude_list: - continue - if not _is_supported_stdlib_version(stdlib_module_versions, f): - continue - files.append(f) - return files + return [ + f + for f in sorted(filenames) + if _get_relative(f) not in exclude_list and _is_supported_stdlib_version(stdlib_module_versions, f) + ] -def find_stubs_in_paths(paths: Sequence[str]) -> list[str]: - filenames: list[str] = [] +def find_stubs_in_paths(paths: Sequence[Path]) -> list[Path]: + filenames: list[Path] = [] for path in paths: - if os.path.isdir(path): - for root, _, fns in os.walk(path): - filenames.extend(os.path.join(root, fn) for fn in fns if fn.endswith(".pyi")) + if path.is_dir(): + for root, _, fns in path.walk(): + filenames.extend(root / fn for fn in fns if fn.endswith(".pyi")) else: filenames.append(path) return filenames -def _is_supported_stdlib_version(module_versions: SupportedVersionsDict, filename: str) -> bool: - parts = _get_relative(filename).split(os.path.sep) +def _is_supported_stdlib_version(module_versions: SupportedVersionsDict, filename: StrPath) -> bool: + parts = _get_relative(filename).parts if parts[0] != "stdlib": return True module_name = _get_module_name(filename) @@ -181,7 +178,7 @@ def _get_pkgs_associated_with_requirement(req_name: str) -> list[str]: return sorted({package.removesuffix("-stubs") for package in packages}) -def get_missing_modules(files_to_test: Sequence[str]) -> Iterable[str]: +def get_missing_modules(files_to_test: Sequence[Path]) -> Iterable[str]: """Get names of modules that should be treated as missing. Some typeshed stubs depend on dependencies outside of typeshed. Since pytype @@ -191,53 +188,53 @@ def get_missing_modules(files_to_test: Sequence[str]) -> Iterable[str]: Similarly, pytype cannot parse files on its exclude list, so we also treat those as missing. """ - stub_distributions = set() + stub_distributions = set[str]() for fi in files_to_test: - parts = fi.split(os.sep) + parts = fi.parts try: idx = parts.index("stubs") except ValueError: continue stub_distributions.add(parts[idx + 1]) - missing_modules = set() - for distribution in stub_distributions: - for external_req in read_dependencies(distribution).external_pkgs: - associated_packages = _get_pkgs_associated_with_requirement(external_req.name) - missing_modules.update(associated_packages) + missing_modules = { + associated_package + for distribution in stub_distributions + for external_req in read_dependencies(distribution).external_pkgs + for associated_package in _get_pkgs_associated_with_requirement(external_req.name) + } - test_dir = os.path.dirname(__file__) - exclude_list = os.path.join(test_dir, "pytype_exclude_list.txt") - with open(exclude_list) as f: + with EXCLUDE_LIST.open() as f: excluded_files = f.readlines() - for fi in excluded_files: - if not fi.startswith("stubs/"): + for excluded_file in excluded_files: + parts = Path(excluded_file).parts + if parts[0] != "stubs": # Skips comments, empty lines, and stdlib files, which are in # the exclude list because pytype has its own version. continue - unused_stubs_prefix, unused_pkg, mod_path = fi.split("/", 2) # pyright: ignore[reportUnusedVariable] - missing_modules.add(os.path.splitext(mod_path)[0]) + mod_path = os.pathsep.join(parts[2:]) + missing_modules.add(mod_path.removesuffix(".pyi")) return missing_modules -def run_all_tests(*, files_to_test: Sequence[str], print_stderr: bool, dry_run: bool) -> None: - bad = [] +def run_all_tests(*, files_to_test: Sequence[Path], print_stderr: bool, dry_run: bool) -> None: + bad: list[tuple[StrPath, str, str]] = [] errors = 0 total_tests = len(files_to_test) missing_modules = get_missing_modules(files_to_test) python_version = f"{sys.version_info.major}.{sys.version_info.minor}" print("Testing files with pytype...") - for i, f in enumerate(files_to_test): + for i, file_to_test in enumerate(files_to_test): if dry_run: stderr = None else: - stderr = run_pytype(filename=f, python_version=python_version, missing_modules=missing_modules) + stderr = run_pytype(filename=file_to_test, python_version=python_version, missing_modules=missing_modules) if stderr: if print_stderr: print(f"\n{stderr}") errors += 1 stacktrace_final_line = stderr.rstrip().rsplit("\n", 1)[-1] - bad.append((_get_relative(f), python_version, stacktrace_final_line)) + bad.append((_get_relative(file_to_test), python_version, stacktrace_final_line)) runs = i + 1 if runs % 25 == 0: diff --git a/tests/runtests.py b/tests/runtests.py index 47be0830ba67..013f49eba059 100755 --- a/tests/runtests.py +++ b/tests/runtests.py @@ -3,7 +3,6 @@ import argparse import json -import os import re import subprocess import sys @@ -13,7 +12,7 @@ from ts_utils.paths import TEST_CASES_DIR, test_cases_path from ts_utils.utils import colored -_STRICTER_CONFIG_FILE = "pyrightconfig.stricter.json" +_STRICTER_CONFIG_FILE = Path("pyrightconfig.stricter.json") _TESTCASES_CONFIG_FILE = "pyrightconfig.testcases.json" _NPX_ERROR_PATTERN = r"error (runn|find)ing npx" _NPX_ERROR_MESSAGE = colored("\nSkipping Pyright tests: npx is not installed or can't be run!", "yellow") @@ -33,10 +32,10 @@ def _parse_jsonc(json_text: str) -> str: return valid_json -def _get_strict_params(stub_path: str) -> list[str]: - with open(_STRICTER_CONFIG_FILE, encoding="UTF-8") as file: +def _get_strict_params(stub_path: Path) -> list[str | Path]: + with _STRICTER_CONFIG_FILE.open(encoding="UTF-8") as file: data = json.loads(_parse_jsonc(file.read())) - lower_stub_path = stub_path.lower() + lower_stub_path = str(stub_path).lower().replace("\\", "/") if any(lower_stub_path == stub.lower() for stub in data["exclude"]): return [] return ["-p", _STRICTER_CONFIG_FILE] @@ -60,23 +59,22 @@ def main() -> None: ) parser.add_argument("path", help="Path of the stub to test in format /, from the root of the project.") args = parser.parse_args() - path: str = args.path + path = Path(args.path) run_stubtest: bool = args.run_stubtest python_version: str = args.python_version - path_tokens = Path(path).parts - if len(path_tokens) != 2: + if len(path.parts) != 2: parser.error("'path' argument should be in format /.") - folder, stub = path_tokens + folder, stub = path.parts if folder not in {"stdlib", "stubs"}: parser.error("Only the 'stdlib' and 'stubs' folders are supported.") - if not os.path.exists(path): + if not path.exists(): parser.error(f"{path=} does not exist.") stubtest_result: subprocess.CompletedProcess[bytes] | None = None pytype_result: subprocess.CompletedProcess[bytes] | None = None print("\nRunning pre-commit...") - pre_commit_result = subprocess.run(["pre-commit", "run", "--files", *Path(path).rglob("*")]) + pre_commit_result = subprocess.run(["pre-commit", "run", "--files", *path.rglob("*")]) print("\nRunning check_typeshed_structure.py...") check_structure_result = subprocess.run([sys.executable, "tests/check_typeshed_structure.py"]) From afb28a5bc666204a8f86039b8f18096b286aaf7b Mon Sep 17 00:00:00 2001 From: Avasam Date: Fri, 4 Apr 2025 15:24:13 -0400 Subject: [PATCH 2/7] Get name from Path for distribution --- lib/ts_utils/requirements.py | 4 ++-- tests/check_typeshed_structure.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/ts_utils/requirements.py b/lib/ts_utils/requirements.py index 3563c78d46fa..109c113fe570 100644 --- a/lib/ts_utils/requirements.py +++ b/lib/ts_utils/requirements.py @@ -12,14 +12,14 @@ def get_external_stub_requirements(distributions: Iterable[str] = ()) -> set[Requirement]: if not distributions: - distributions = STUBS_PATH.iterdir() + distributions = [distribution.name for distribution in STUBS_PATH.iterdir()] return set(itertools.chain.from_iterable([read_dependencies(distribution).external_pkgs for distribution in distributions])) def get_stubtest_system_requirements(distributions: Iterable[str] = (), platform: str = sys.platform) -> list[str]: if not distributions: - distributions = STUBS_PATH.iterdir() + distributions = [distribution.name for distribution in STUBS_PATH.iterdir()] requirements: list[str] = [] for distribution in distributions: diff --git a/tests/check_typeshed_structure.py b/tests/check_typeshed_structure.py index 50959b69aed9..e5a8d2904b3b 100755 --- a/tests/check_typeshed_structure.py +++ b/tests/check_typeshed_structure.py @@ -152,7 +152,7 @@ def check_metadata() -> None: """Check that all METADATA.toml files are valid.""" for distribution in STUBS_PATH.iterdir(): # This function does various sanity checks for METADATA.toml files - read_metadata(distribution) + read_metadata(distribution.name) def check_requirement_pins() -> None: From 4cc9af40f184f6e0562c583c49867c7ba7ab496a Mon Sep 17 00:00:00 2001 From: Avasam Date: Fri, 4 Apr 2025 15:47:13 -0400 Subject: [PATCH 3/7] More iterdir --- tests/mypy_test.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/mypy_test.py b/tests/mypy_test.py index abd2fb7fd0ab..1b5683808baa 100755 --- a/tests/mypy_test.py +++ b/tests/mypy_test.py @@ -170,6 +170,8 @@ def match(path: Path, args: TestConfig) -> bool: def add_files(files: list[Path], module: Path, args: TestConfig) -> None: """Add all files in package or module represented by 'name' located in 'root'.""" + if module.name.startswith("."): + return if module.is_file() and module.suffix == ".pyi": if match(module, args): files.append(module) @@ -307,10 +309,8 @@ def add_third_party_files( seen_dists.add(distribution) seen_dists.update(r.name for r in typeshed_reqs) root = distribution_path(distribution) - for name in os.listdir(root): - if name.startswith("."): - continue - add_files(files, (root / name), args) + for path in root.iterdir(): + add_files(files, path, args) add_configuration(configurations, distribution) @@ -359,7 +359,7 @@ def test_third_party_distribution( def test_stdlib(args: TestConfig) -> TestResult: files: list[Path] = [] for file in STDLIB_PATH.iterdir(): - if file.name in ("VERSIONS", TESTS_DIR) or file.name.startswith("."): + if file.name in ("VERSIONS", TESTS_DIR): continue add_files(files, file, args) @@ -536,7 +536,7 @@ def test_third_party_stubs(args: TestConfig, tempdir: Path) -> TestSummary: gitignore_spec = get_gitignore_spec() distributions_to_check: dict[str, PackageDependencies] = {} - for distribution in sorted(os.listdir("stubs")): + for distribution in sorted([distribution.name for distribution in Path("stubs").iterdir()]): dist_path = distribution_path(distribution) if spec_matches_path(gitignore_spec, dist_path): From 0e70eb7cb0880904c9458d8a84e851edf1b503e3 Mon Sep 17 00:00:00 2001 From: Avasam Date: Fri, 4 Apr 2025 15:50:34 -0400 Subject: [PATCH 4/7] Revert Path.walk. That was added in 3.12 --- tests/check_typeshed_structure.py | 7 ++++--- tests/pytype_test.py | 4 ++-- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/tests/check_typeshed_structure.py b/tests/check_typeshed_structure.py index e5a8d2904b3b..e3fb86739355 100755 --- a/tests/check_typeshed_structure.py +++ b/tests/check_typeshed_structure.py @@ -7,6 +7,7 @@ from __future__ import annotations +import os import re from pathlib import Path @@ -112,7 +113,7 @@ def check_test_cases() -> None: def check_no_symlinks() -> None: """Check that there are no symlinks in the typeshed repository.""" - files = [(root / file) for root, _, files in Path().walk() for file in files] + files = [Path(root) / file for root, _, files in os.walk(".") for file in files] no_symlink = "You cannot use symlinks in typeshed, please copy {} to its link." for file in files: if file.suffix == ".pyi" and file.is_symlink(): @@ -137,9 +138,9 @@ def check_versions_file() -> None: def _find_stdlib_modules() -> set[str]: modules = set[str]() - for path, _, files in STDLIB_PATH.walk(): + for path, _, files in os.walk(STDLIB_PATH): for filename in files: - base_module = ".".join(path.parts[1:]) + base_module = ".".join(Path(path).parts[1:]) if filename == "__init__.pyi": modules.add(base_module) elif filename.endswith(".pyi"): diff --git a/tests/pytype_test.py b/tests/pytype_test.py index ac4736b02203..49c530c4d885 100755 --- a/tests/pytype_test.py +++ b/tests/pytype_test.py @@ -143,8 +143,8 @@ def find_stubs_in_paths(paths: Sequence[Path]) -> list[Path]: filenames: list[Path] = [] for path in paths: if path.is_dir(): - for root, _, fns in path.walk(): - filenames.extend(root / fn for fn in fns if fn.endswith(".pyi")) + for root, _, fns in os.walk(path): + filenames.extend(Path(root) / fn for fn in fns if fn.endswith(".pyi")) else: filenames.append(path) return filenames From fc578a0b0ad79b731b03e67f254ed1cf37f4832c Mon Sep 17 00:00:00 2001 From: Avasam Date: Fri, 4 Apr 2025 15:55:02 -0400 Subject: [PATCH 5/7] Use asposix for slash normalization --- scripts/create_baseline_stubs.py | 2 +- tests/runtests.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/create_baseline_stubs.py b/scripts/create_baseline_stubs.py index ebe1e7d23d8e..5966a859536f 100755 --- a/scripts/create_baseline_stubs.py +++ b/scripts/create_baseline_stubs.py @@ -168,7 +168,7 @@ def add_pyright_exclusion(stub_dir: Path) -> None: third_party_excludes[-1] = last_line + "\n" # Must use forward slash in the .json file - line_to_add = f' "{stub_dir}",\n'.replace("\\", "/") + line_to_add = f' "{stub_dir.as_posix()}",\n' if line_to_add in third_party_excludes: print(f"{PYRIGHT_CONFIG} already up-to-date") diff --git a/tests/runtests.py b/tests/runtests.py index a7b2daa1412a..3b13643aa2f0 100755 --- a/tests/runtests.py +++ b/tests/runtests.py @@ -35,7 +35,7 @@ def _parse_jsonc(json_text: str) -> str: def _get_strict_params(stub_path: Path) -> list[str | Path]: with _STRICTER_CONFIG_FILE.open(encoding="UTF-8") as file: data = json.loads(_parse_jsonc(file.read())) - lower_stub_path = str(stub_path).lower().replace("\\", "/") + lower_stub_path = stub_path.as_posix().lower() if any(lower_stub_path == stub.lower() for stub in data["exclude"]): return [] return ["-p", _STRICTER_CONFIG_FILE] From 792b1fa7fd089ac8ef144b07b3d3316836372e8b Mon Sep 17 00:00:00 2001 From: Avasam Date: Fri, 4 Apr 2025 16:06:27 -0400 Subject: [PATCH 6/7] Replace .read and .write on Path --- scripts/create_baseline_stubs.py | 3 +-- scripts/sync_protobuf/_utils.py | 4 ++-- scripts/sync_protobuf/tensorflow.py | 6 ++---- tests/runtests.py | 3 +-- 4 files changed, 6 insertions(+), 10 deletions(-) diff --git a/scripts/create_baseline_stubs.py b/scripts/create_baseline_stubs.py index 5966a859536f..db4fd9f3bf08 100755 --- a/scripts/create_baseline_stubs.py +++ b/scripts/create_baseline_stubs.py @@ -136,8 +136,7 @@ def create_metadata(project: str, stub_dir: Path, version: str) -> None: else: metadata += f'upstream_repository = "{upstream_repo_url}"\n' print(f"Writing {filename}") - with filename.open("w", encoding="UTF-8") as file: - file.write(metadata) + filename.write_text(metadata, encoding="UTF-8") def add_pyright_exclusion(stub_dir: Path) -> None: diff --git a/scripts/sync_protobuf/_utils.py b/scripts/sync_protobuf/_utils.py index 05a393b4a8cc..f22bd10fb353 100644 --- a/scripts/sync_protobuf/_utils.py +++ b/scripts/sync_protobuf/_utils.py @@ -22,8 +22,8 @@ def download_file(url: str, destination: Path) -> None: print(f"Downloading '{url}' to '{destination}'") resp: HTTPResponse - with urlopen(url) as resp, destination.open("wb") as file: - file.write(resp.read()) + with urlopen(url) as resp: + destination.write_bytes(resp.read()) def extract_archive(archive_path: StrPath, destination: StrPath) -> None: diff --git a/scripts/sync_protobuf/tensorflow.py b/scripts/sync_protobuf/tensorflow.py index 78c53cc06172..fcb53226636e 100755 --- a/scripts/sync_protobuf/tensorflow.py +++ b/scripts/sync_protobuf/tensorflow.py @@ -71,16 +71,14 @@ def post_creation() -> None: for path in STUBS_FOLDER.rglob("*_pb2.pyi"): print(f"Fixing imports in '{path}'") - with path.open(encoding="utf-8") as file: - filedata = file.read() + filedata = path.read_text(encoding="utf-8") # Replace the target string filedata = re.sub(TSL_IMPORT_PATTERN, "\\1tensorflow.tsl.", filedata) filedata = re.sub(XLA_IMPORT_PATTERN, "\\1tensorflow.compiler.xla.", filedata) # Write the file out again - with path.open("w", encoding="utf-8") as file: - file.write(filedata) + path.write_text(filedata, encoding="utf-8") print() for to_remove in PROTOS_TO_REMOVE: diff --git a/tests/runtests.py b/tests/runtests.py index 3b13643aa2f0..2d90da2b8617 100755 --- a/tests/runtests.py +++ b/tests/runtests.py @@ -33,8 +33,7 @@ def _parse_jsonc(json_text: str) -> str: def _get_strict_params(stub_path: Path) -> list[str | Path]: - with _STRICTER_CONFIG_FILE.open(encoding="UTF-8") as file: - data = json.loads(_parse_jsonc(file.read())) + data = json.loads(_parse_jsonc(_STRICTER_CONFIG_FILE.read_text(encoding="UTF-8"))) lower_stub_path = stub_path.as_posix().lower() if any(lower_stub_path == stub.lower() for stub in data["exclude"]): return [] From 6b8c9925a7c4fb2207a89a4ea1a1a9a602f65e22 Mon Sep 17 00:00:00 2001 From: Avasam Date: Thu, 10 Apr 2025 17:42:02 -0400 Subject: [PATCH 7/7] Update some Path instanciation --- lib/ts_utils/paths.py | 1 + lib/ts_utils/utils.py | 4 ++-- tests/check_typeshed_structure.py | 2 +- tests/mypy_test.py | 2 +- tests/pytype_test.py | 2 +- 5 files changed, 6 insertions(+), 5 deletions(-) diff --git a/lib/ts_utils/paths.py b/lib/ts_utils/paths.py index 2894aa24b2d7..568a091a5ae6 100644 --- a/lib/ts_utils/paths.py +++ b/lib/ts_utils/paths.py @@ -11,6 +11,7 @@ PYPROJECT_PATH: Final = TS_BASE_PATH / "pyproject.toml" REQUIREMENTS_PATH: Final = TS_BASE_PATH / "requirements-tests.txt" +GITIGNORE_PATH: Final = TS_BASE_PATH / ".gitignore" TESTS_DIR: Final = "@tests" TEST_CASES_DIR: Final = "test_cases" diff --git a/lib/ts_utils/utils.py b/lib/ts_utils/utils.py index 20b9fe855c21..95e55620517b 100644 --- a/lib/ts_utils/utils.py +++ b/lib/ts_utils/utils.py @@ -21,7 +21,7 @@ def colored(text: str, color: str | None = None, **kwargs: Any) -> str: # type: return text -from .paths import REQUIREMENTS_PATH, STDLIB_PATH, STUBS_PATH, TEST_CASES_DIR, allowlists_path, test_cases_path +from .paths import GITIGNORE_PATH, REQUIREMENTS_PATH, STDLIB_PATH, STUBS_PATH, TEST_CASES_DIR, allowlists_path, test_cases_path PYTHON_VERSION: Final = f"{sys.version_info.major}.{sys.version_info.minor}" @@ -203,7 +203,7 @@ def allowlists(distribution_name: str) -> list[str]: @functools.cache def get_gitignore_spec() -> pathspec.PathSpec: - with Path(".gitignore").open(encoding="UTF-8") as f: + with GITIGNORE_PATH.open(encoding="UTF-8") as f: return pathspec.PathSpec.from_lines("gitwildmatch", f.readlines()) diff --git a/tests/check_typeshed_structure.py b/tests/check_typeshed_structure.py index e3fb86739355..666e726875c4 100755 --- a/tests/check_typeshed_structure.py +++ b/tests/check_typeshed_structure.py @@ -113,7 +113,7 @@ def check_test_cases() -> None: def check_no_symlinks() -> None: """Check that there are no symlinks in the typeshed repository.""" - files = [Path(root) / file for root, _, files in os.walk(".") for file in files] + files = [Path(root, file) for root, _, files in os.walk(".") for file in files] no_symlink = "You cannot use symlinks in typeshed, please copy {} to its link." for file in files: if file.suffix == ".pyi" and file.is_symlink(): diff --git a/tests/mypy_test.py b/tests/mypy_test.py index 1b5683808baa..a1c8353f0dae 100755 --- a/tests/mypy_test.py +++ b/tests/mypy_test.py @@ -536,7 +536,7 @@ def test_third_party_stubs(args: TestConfig, tempdir: Path) -> TestSummary: gitignore_spec = get_gitignore_spec() distributions_to_check: dict[str, PackageDependencies] = {} - for distribution in sorted([distribution.name for distribution in Path("stubs").iterdir()]): + for distribution in sorted([distribution.name for distribution in STUBS_PATH.iterdir()]): dist_path = distribution_path(distribution) if spec_matches_path(gitignore_spec, dist_path): diff --git a/tests/pytype_test.py b/tests/pytype_test.py index 49c530c4d885..e395fc31dbf3 100755 --- a/tests/pytype_test.py +++ b/tests/pytype_test.py @@ -144,7 +144,7 @@ def find_stubs_in_paths(paths: Sequence[Path]) -> list[Path]: for path in paths: if path.is_dir(): for root, _, fns in os.walk(path): - filenames.extend(Path(root) / fn for fn in fns if fn.endswith(".pyi")) + filenames.extend(Path(root, fn) for fn in fns if fn.endswith(".pyi")) else: filenames.append(path) return filenames