diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index bd3d481..5153a22 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -21,7 +21,13 @@ jobs: - "3.11" - "3.12" - "3.13" - runs-on: ubuntu-latest + platform-version: + - "ubuntu-24.04" + include: + # Python 3.7 only runs on Ubuntu 22.04 + - python-version: "3.7" + platform-version: "ubuntu-22.04" + runs-on: ${{ matrix.platform-version }} steps: - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 - name: Set up Python ${{ matrix.python-version }} @@ -36,3 +42,28 @@ jobs: - name: Test with pytest run: | pytest + + ruff: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - name: Install ruff based off pyproject.toml + uses: astral-sh/ruff-action@57714a7c8a2e59f32539362ba31877a1957dded1 # v3.5.1 + with: + version-file: "pyproject.toml" + + mypy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 + with: + python-version: "3.x" + - name: Install dependencies + run: | + python -m pip install --upgrade pip + python -m pip install .[lint,test] + - name: Run MyPy + run: | + mypy . diff --git a/morgan/__init__.py b/morgan/__init__.py index 8e46e6a..85f9338 100644 --- a/morgan/__init__.py +++ b/morgan/__init__.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import argparse import configparser import hashlib @@ -10,7 +12,7 @@ import urllib.parse import urllib.request import zipfile -from typing import Dict, Iterable, Optional, Tuple +from typing import IO, Callable, Iterable import packaging.requirements import packaging.specifiers @@ -52,7 +54,8 @@ def __init__(self, args: argparse.Namespace): self.mirror_all_versions: bool = args.mirror_all_versions self.package_type_regex: str = args.package_type_regex self.config = configparser.ConfigParser( - strict=False, dict_type=ListExtendingOrderedDict + strict=False, + dict_type=ListExtendingOrderedDict, ) self.config.read(args.config) self.envs = {} @@ -79,8 +82,8 @@ def __init__(self, args: argparse.Namespace): r".*" + env["sys_platform"] + r".*" - + env["platform_machine"] - ) + + env["platform_machine"], + ), ) self._processed_pkgs = Cache() @@ -129,16 +132,16 @@ def copy_server(self): with open(serverpath, "rb") as inp, open(outpath, "wb") as out: out.write(inp.read()) else: - import inspect + import inspect # noqa: PLC0415 with open(outpath, "w") as out: out.write(inspect.getsource(server)) - def _mirror( + def _mirror( # noqa: C901, PLR0912 self, requirement: packaging.requirements.Requirement, - required_by: packaging.requirements.Requirement = None, - ) -> Optional[dict]: + required_by: packaging.requirements.Requirement | None = None, + ) -> dict | None: if self._processed_pkgs.check(requirement): return None @@ -153,11 +156,11 @@ def _mirror( else: print(f"{requirement}") - data: dict = None + data: dict | None = None # get information about this package from the Simple API in JSON # format as per PEP 691 - request = urllib.request.Request( + request = urllib.request.Request( # noqa: S310 f"{self.index_url}{requirement.name}/", headers={ "Accept": "application/vnd.pypi.simple.v1+json", @@ -165,9 +168,12 @@ def _mirror( ) response_url = "" - with urllib.request.urlopen(request) as response: + with urllib.request.urlopen(request) as response: # noqa: S310 data = json.load(response) response_url = str(response.url) + if not data: + msg = f"Failed loading metadata: {response}" + raise RuntimeError(msg) # check metadata version ~1.0 v_str = data["meta"]["api-version"] @@ -175,23 +181,27 @@ def _mirror( v_str = "1.0" v_int = [int(i) for i in v_str.split(".")[:2]] if v_int[0] != 1: - raise Exception(f"Unsupported metadata version {v_str}, only support 1.x") + msg = f"Unsupported metadata version {v_str}, only support 1.x" + raise Exception(msg) # noqa: TRY002 files = data["files"] if files is None or not isinstance(files, list): - raise Exception("Expected response to contain a list of 'files'") + msg = "Expected response to contain a list of 'files'" + raise Exception(msg) # noqa: TRY002 # filter and enrich files files = self._filter_files(requirement, required_by, files) if files is None: if required_by is None: - raise Exception("No files match requirement") + msg = "No files match requirement" + raise Exception(msg) # noqa: TRY002 # this is a dependency, assume the dependency is not relevant # for any of our environments and don't return an error return None if len(files) == 0: - raise Exception(f"No files match requirement {requirement}") + msg = f"No files match requirement {requirement}" + raise Exception(msg) # noqa: TRY002 # download all files depdict = {} @@ -201,9 +211,9 @@ def _mirror( file_deps = self._process_file(requirement, file) if file_deps: depdict.update(file_deps) - except Exception: + except Exception: # noqa: BLE001 print( - "\tFailed processing file {}, skipping it".format(file["filename"]) + "\tFailed processing file {}, skipping it".format(file["filename"]), ) traceback.print_exc() continue @@ -215,9 +225,9 @@ def _mirror( def _filter_files( self, requirement: packaging.requirements.Requirement, - required_by: packaging.requirements.Requirement, + required_by: packaging.requirements.Requirement | None, files: Iterable[dict], - ) -> Iterable[dict]: + ) -> list[dict] | None: # remove files with unsupported extensions pattern: str = rf"\.{self.package_type_regex}$" files = list(filter(lambda file: re.search(pattern, file["filename"]), files)) @@ -233,11 +243,11 @@ def _filter_files( elif re.search(r"\.(tar\.gz|zip)$", file["filename"]): _, file["version"] = packaging.utils.parse_sdist_filename( # fix: selenium-2.0-dev-9429.tar.gz -> 9429 - to_single_dash(file["filename"]) + to_single_dash(file["filename"]), ) file["is_wheel"] = False file["tags"] = None - except ( + except ( # noqa: PERF203 packaging.version.InvalidVersion, packaging.utils.InvalidSdistFilename, packaging.utils.InvalidWheelFilename, @@ -249,7 +259,7 @@ def _filter_files( # packages with special versioning schemes, and we assume we # can ignore such files continue - except Exception: + except Exception: # noqa: BLE001 print("\tSkipping file {}, exception caught".format(file["filename"])) traceback.print_exc() continue @@ -257,8 +267,9 @@ def _filter_files( # sort all files by version in reverse order, and ignore yanked files files = list( filter( - lambda file: "version" in file and not file.get("yanked", False), files - ) + lambda file: "version" in file and not file.get("yanked", False), + files, + ), ) files.sort(key=lambda file: file["version"], reverse=True) @@ -268,8 +279,9 @@ def _filter_files( if requirement.specifier is not None: files = list( filter( - lambda file: requirement.specifier.contains(file["version"]), files - ) + lambda file: requirement.specifier.contains(file["version"]), + files, + ), ) if len(files) == 0: @@ -292,8 +304,9 @@ def _filter_files( return files - def _matches_environments(self, fileinfo: dict) -> bool: - if req := fileinfo.get("requires-python"): + def _matches_environments(self, fileinfo: dict) -> bool: # noqa: C901, PLR0912 + req = fileinfo.get("requires-python") + if req: # The Python versions in all of our environments must be supported # by this file in order to match. # Some packages specify their required Python versions with a simple @@ -313,7 +326,7 @@ def _matches_environments(self, fileinfo: dict) -> bool: # file does not support the Python version of one of our # environments, reject it return False - except Exception as e: + except packaging.specifiers.InvalidSpecifier as e: print(f"\tIgnoring {fileinfo['filename']}: {e}") return False @@ -324,15 +337,19 @@ def _matches_environments(self, fileinfo: dict) -> bool: if intrp_name not in ("py", "cp"): continue - intrp_set = packaging.specifiers.SpecifierSet(r">=" + intrp_ver) + if not intrp_ver: + msg = f"Unexpected interpreter tag {tag.interpreter} in file {fileinfo['filename']}" + raise ValueError(msg) + + intrp_set = packaging.specifiers.SpecifierSet(">=" + intrp_ver) # As an example, cp38 seems to indicate CPython 3.8+, so we # check if the version matches any of the supported Pythons, and # only skip it if it does not match any. intrp_ver_matched = any( - map( - lambda supported_python: intrp_set.contains(supported_python), - self._supported_pyversions, - ) + ( + intrp_set.contains(supported_python) + for supported_python in self._supported_pyversions + ), ) if intrp_ver and intrp_ver != "3" and not intrp_ver_matched: @@ -354,7 +371,7 @@ def _process_file( self, requirement: packaging.requirements.Requirement, fileinfo: dict, - ) -> Dict[str, packaging.requirements.Requirement]: + ) -> dict[str, dict[str, packaging.requirements.Requirement]] | None: filepath = os.path.join(self.index_path, requirement.name, fileinfo["filename"]) hashalg = ( PREFERRED_HASH_ALG @@ -364,7 +381,7 @@ def _process_file( self._download_file(fileinfo, filepath, hashalg) - md = self._extract_metadata(filepath, requirement.name, fileinfo["version"]) + md = self._extract_metadata(filepath) deps = md.dependencies(requirement.extras, self.envs.values()) if deps is None: @@ -402,18 +419,18 @@ def _download_file( return True print("\t{}...".format(fileinfo["url"]), end=" ") - with urllib.request.urlopen(fileinfo["url"]) as inp, open(target, "wb") as out: + with urllib.request.urlopen(fileinfo["url"]) as inp, open(target, "wb") as out: # noqa: S310 out.write(inp.read()) print("done") truehash = self._hash_file(target, hashalg) if truehash != exphash: os.remove(target) - raise ValueError( - "Digest mismatch for {}. Deleting file {}.".format( - fileinfo["filename"], target - ) + msg = "Digest mismatch for {}. Deleting file {}.".format( + fileinfo["filename"], + target, ) + raise ValueError(msg) touch_file(target, fileinfo) return True @@ -435,30 +452,29 @@ def _hash_file(self, filepath: str, hashalg: str) -> str: def _extract_metadata( self, filepath: str, - package: str, - version: packaging.version.Version, ) -> metadata.MetadataParser: md = metadata.MetadataParser(filepath) - archive = None + archive: tarfile.TarFile | zipfile.ZipFile | None = None members = None - opener = None + opener: Callable[[str], IO[bytes] | None] if re.search(r"\.(whl|zip)$", filepath): archive = zipfile.ZipFile(filepath) members = [member.filename for member in archive.infolist()] opener = archive.open elif re.search(r"\.tar.gz$", filepath): - archive = tarfile.open(filepath) + archive = tarfile.open(filepath) # noqa: SIM115 members = [member.name for member in archive.getmembers()] opener = archive.extractfile else: - raise Exception(f"Unexpected distribution file {filepath}") + msg = f"Unexpected distribution file {filepath}" + raise Exception(msg) # noqa: TRY002 for member in members: try: md.parse(opener, member) - except Exception as e: + except Exception as e: # noqa: BLE001, PERF203 print(f"Failed parsing member {member} of {filepath}: {e}") if md.seen_metadata_file(): @@ -469,7 +485,7 @@ def _extract_metadata( return md -def parse_interpreter(inp: str) -> Tuple[str, str]: +def parse_interpreter(inp: str) -> tuple[str, str | None]: """ Parse interpreter tags in the name of a binary wheel file. Returns a tuple of interpreter name and optional version, which will either be or @@ -482,9 +498,9 @@ def parse_interpreter(inp: str) -> Tuple[str, str]: intr = m.group(1) version = None - if m.lastindex > 1: + if m.lastindex and m.lastindex > 1: version = m.group(2) - if m.lastindex > 2: + if m.lastindex > 2: # noqa: PLR2004 version = f"{version}.{m.group(3)}" return (intr, version) @@ -521,16 +537,15 @@ def mirror(args: argparse.Namespace): # urllib3 = # <1.27 # >=2 - # [brotli] + # [brotli] # noqa: ERA001 for req in reqs: - req = req.strip() - m.mirror(f"{package}{req}") + m.mirror(f"{package}{req.strip()}") if not args.skip_server_copy: m.copy_server() -def main(): +def main(): # noqa: C901 """ Executes the command line interface of Morgan. Use -h for a full list of flags, options and arguments. @@ -543,10 +558,11 @@ def my_url(arg): url = urllib.parse.urlparse(arg) if all((url.scheme, url.netloc)): return f"{url.scheme}://{url.netloc}{url.path}/" - raise argparse.ArgumentTypeError("Invalid URL") + msg = "Invalid URL" + raise argparse.ArgumentTypeError(msg) parser = argparse.ArgumentParser( - description="Morgan: PyPI Mirror for Restricted Environments" + description="Morgan: PyPI Mirror for Restricted Environments", ) parser.add_argument( @@ -577,18 +593,16 @@ def my_url(arg): action="store_true", help="Skip server copy in mirror command (default: False)", ) - ( - parser.add_argument( - "-a", - "--mirror-all-versions", - dest="mirror_all_versions", - action="store_true", - help=( - "For packages listed in the [requirements] section, mirror every release " - "that matches their version specifiers. " - "Transitive dependencies still mirror only the latest matching release. " - "(Default: only the latest matching release)" - ), + parser.add_argument( + "-a", + "--mirror-all-versions", + dest="mirror_all_versions", + action="store_true", + help=( + "For packages listed in the [requirements] section, mirror every release " + "that matches their version specifiers. " + "Transitive dependencies still mirror only the latest matching release. " + "(Default: only the latest matching release)" ), ) parser.add_argument( @@ -629,6 +643,7 @@ def my_url(arg): server.run(args.index_path, args.host, args.port, args.no_metadata) return if args.command == "version": + # ruff: noqa: T201 print(f"Morgan v{__version__}") return @@ -637,7 +652,8 @@ def my_url(arg): if not os.path.isfile(args.config): # If a file named in filenames cannot be opened, that file will be ignored # https://docs.python.org/3.12/library/configparser.html#configparser.ConfigParser.read - raise argparse.ArgumentTypeError(f"Invalid config: {args.config}") + msg = f"Invalid config: {args.config}" + raise argparse.ArgumentTypeError(msg) if args.command == "mirror": mirror(args) diff --git a/morgan/configurator.py b/morgan/configurator.py index 796139e..dbf0566 100644 --- a/morgan/configurator.py +++ b/morgan/configurator.py @@ -11,7 +11,7 @@ if Version(platform.python_version()) < Version("3.8"): import importlib_metadata as metadata else: - from importlib import metadata + from importlib import metadata # type: ignore[no-redef] def generate_env(name: str = "local"): @@ -54,6 +54,7 @@ def generate_reqs(mode: str = ">="): requirements = { dist.metadata["Name"].lower(): f"{mode}{dist.version}" for dist in metadata.distributions() + if dist.metadata is not None } config = configparser.ConfigParser() config["requirements"] = OrderedDict(sorted(requirements.items())) @@ -64,11 +65,12 @@ def add_arguments(parser: argparse.ArgumentParser): """ Adds command line options specific to this script to an argument parser. """ - parser.add_argument( - "-e", "--env", dest="env", help="Name of environment to configure" + "-e", + "--env", + dest="env", + help="Name of environment to configure", ) - parser.add_argument( "-m", "--mode", diff --git a/morgan/metadata.py b/morgan/metadata.py index 31e7081..57bc18f 100644 --- a/morgan/metadata.py +++ b/morgan/metadata.py @@ -1,6 +1,8 @@ +from __future__ import annotations + import email.parser import re -from typing import BinaryIO, Callable, Dict, Iterable, Set +from typing import IO, Any, Callable, Iterable import tomli from packaging.markers import Marker @@ -43,15 +45,15 @@ class MetadataParser: The version of the package python_requirement : packaging.specifiers.SpecifierSet A specification of the Python versions supported by the package. - extras_provided : Set[str] + extras_provided : set[str] Extras provided by the package. - core_dependencies: Set[packaging.requirements.Requirement] + core_dependencies: set[packaging.requirements.Requirement] Core dependencies of the package. - optional_dependencies: Dict[str, Set[packaging.requirements.Requirement]] + optional_dependencies: dict[str, set[packaging.requirements.Requirement]] Optional dependencies of the package. A dictionary whose keys are either names of extras (from extras_provided) or environment marker constraints (e.g. :python_version<2.7). - build_dependencies : Set[packaging.requirements.Requirement] + build_dependencies : set[packaging.requirements.Requirement] Dependencies required to build the package. """ @@ -68,26 +70,27 @@ def __init__(self, source_path: str): """ self.source_path: str = source_path - self.name: str = None - self.version: Version = None - self.python_requirement: SpecifierSet = None - self.extras_provided: Set[str] = set() - self.core_dependencies: Set[Requirement] = set() - self.optional_dependencies: Dict[str, Set[Requirement]] = {} - self.build_dependencies: Set[Requirement] = set() - + self.name: str | None = None + self.version: Version | None = None + self.python_requirement: SpecifierSet | None = None + self.extras_provided: set[str] = set() + self.core_dependencies: set[Requirement] = set() + self.optional_dependencies: dict[str, set[Requirement]] = {} + self.build_dependencies: set[Requirement] = set() + + # ruff: noqa: C901 def parse( self, - opener: Callable[[str], BinaryIO], + opener: Callable[[str], IO[bytes] | None], filename: str, - ): + ) -> None: """ Parses a file, gathering whatever metadata can be gathered from it. Any file can be provided to the method, irrelevant files are simply ignored. Parameters ---------- - opener : Callable[[str], BinaryIO] + opener : Callable[[str], IO[bytes] | None] A function that can be used to open the file. The function takes one parameter, which is the file name, and returns a file object opened in binary mode. @@ -98,7 +101,7 @@ def parse( the kind of file it is. """ - parse_func = None + parse_func: Callable[[IO[bytes]], Any] | None = None main_metadata_file = False if re.search(r"\.whl$", self.source_path): @@ -114,18 +117,27 @@ def parse( parse_func = self._parse_metadata_file main_metadata_file = True elif re.fullmatch( - r"[^/]+(/[^/]+)?\.egg-info/(setup_)?requires.txt", filename + r"[^/]+(/[^/]+)?\.egg-info/(setup_)?requires.txt", + filename, ): parse_func = self._parse_requirestxt elif re.fullmatch(r"[^/]+/pyproject.toml", filename): parse_func = self._parse_pyproject - if parse_func: - with opener(filename) as fp: - if main_metadata_file: - self._metadata_file = fp.read() - fp.seek(0) - parse_func(fp) + if not parse_func: + return + + # Our file_object can be either None or IO[bytes] because + # TarFile.extractfile can return None + file_object = opener(filename) + if file_object is None: + return + + with file_object as fp: + if main_metadata_file: + self._metadata_file = fp.read() + fp.seek(0) + parse_func(fp) def seen_metadata_file(self) -> bool: """ @@ -142,12 +154,14 @@ def write_metadata_file(self, target: str): read yet, an exception will be raised. """ if not hasattr(self, "_metadata_file"): - raise Exception("Main METADATA file has not been read yet") + # ruff: noqa: TRY002 + msg = "Main METADATA file has not been read yet" + raise Exception(msg) with open(target, "wb") as out: out.write(self._metadata_file) - def dependencies(self, extras: Set[str], envs: Iterable[Dict]) -> Set[Requirement]: + def dependencies(self, extras: set[str], envs: Iterable[dict]) -> set[Requirement]: """ Resolves the dependencies of the package, returning a set of requirements. Only requirements that are relevant to the provided extras @@ -155,12 +169,12 @@ def dependencies(self, extras: Set[str], envs: Iterable[Dict]) -> Set[Requiremen Parameters ---------- - extras : Set[str] = set() + extras : set[str] = set() A set of extras that the package was required with. For example, if the instance of this class is used to parse the metadata of the package "pymongo", and the requirement string for that package was "pymongo[snappy,zstd]", then the set of extras will be (snappy, zstd). - envs: Iterable[Dict] = [] + envs: Iterable[dict] = [] The list of environments for which Morgan is downloading package distributions. These are simple dictionaries whose keys match those defined by the "Environment Markers" section of PEP 508. @@ -179,6 +193,8 @@ def dependencies(self, extras: Set[str], envs: Iterable[Dict]) -> Set[Requiremen # this dependency includes a set of environment marker # specifications orig = extra + # ruff: noqa: PLW2901, FIX002, TD003 + # TODO(grische): rewrite to avoid overwriting extra variable from the loop (extra, spec) = extra.split(":") if extra and extra not in extras: continue @@ -193,14 +209,14 @@ def dependencies(self, extras: Set[str], envs: Iterable[Dict]) -> Set[Requiremen return filter_relevant_requirements(deps, envs, extras) def _add_core_requirements(self, reqs): - self.core_dependencies |= set([Requirement(dep) for dep in reqs]) + self.core_dependencies |= {Requirement(dep) for dep in reqs} def _add_optional_requirements(self, extra, reqs): if extra not in self.optional_dependencies: self.optional_dependencies[extra] = set() - self.optional_dependencies[extra] |= set([Requirement(dep) for dep in reqs]) + self.optional_dependencies[extra] |= {Requirement(dep) for dep in reqs} - def _parse_pyproject(self, fp): + def _parse_pyproject(self, fp: IO[bytes]): data = tomli.load(fp) project = data.get("project") @@ -222,27 +238,28 @@ def _parse_pyproject(self, fp): if "optional-dependencies" in project: for extra in project["optional-dependencies"]: self._add_optional_requirements( - extra, project["optional-dependencies"][extra] + extra, + project["optional-dependencies"][extra], ) build_system = data.get("build-system") if build_system is not None and "requires" in build_system: - self.build_dependencies |= set( - [Requirement(req) for req in build_system["requires"]] - ) + self.build_dependencies |= { + Requirement(req) for req in build_system["requires"] + } - def _parse_metadata_file(self, fp): - data = email.parser.BytesParser().parse(fp, True) + def _parse_metadata_file(self, fp: IO[bytes]): + data = email.parser.BytesParser().parse(fp, headersonly=True) - (name, version, metadata_version) = ( + (name, version, metadata_version_str) = ( data.get("Name"), data.get("Version"), data.get("Metadata-Version"), ) - if metadata_version is None: + if metadata_version_str is None: return - metadata_version = Version(metadata_version) + metadata_version = Version(metadata_version_str) if name is not None: self.name = canonicalize_name(name) @@ -272,6 +289,7 @@ def _parse_metadata_12(self, data): req = Requirement(requirement_str) extra = None if req.marker is not None: + # ruff: noqa: SLF001 for marker in req.marker._markers: if ( isinstance(marker[0], MarkerVariable) @@ -293,11 +311,11 @@ def _parse_metadata_11(self, data): for requirement_str in requires: self.core_dependencies.add(Requirement(requirement_str)) - def _parse_requirestxt(self, fp): + def _parse_requirestxt(self, fp: IO[bytes]): section = None - content = [] - for line in fp.readlines(): - line = line.strip().decode("UTF-8") + content: list[str] = [] + for line_bytes in fp: + line = line_bytes.strip().decode("UTF-8") if line.startswith("["): if line.endswith("]"): if section or content: @@ -310,7 +328,8 @@ def _parse_requirestxt(self, fp): section = line[1:-1] content = [] else: - raise ValueError("Invalid section heading", line) + msg = "Invalid section heading" + raise ValueError(msg, line) elif line: content.append(line) @@ -320,3 +339,7 @@ def _parse_requirestxt(self, fp): self._add_build_requirements(content) else: self._add_core_requirements(content) + + def _add_build_requirements(self, reqs): + msg = "Setuptools build requirements not supported" + raise NotImplementedError(msg) diff --git a/morgan/server.py b/morgan/server.py index 989fc63..f47ca00 100644 --- a/morgan/server.py +++ b/morgan/server.py @@ -1,3 +1,12 @@ +# ruff: noqa: FBT001, FBT002, PERF401, PLR2004, PLW0602, PLW0603, S104 +# FBT: allow boolean as positional argument +# PERF401: allow manual list comprehensions +# PLR2004: allow magic values in comparisons +# PLW0602: allow unassigned globals +# PLW0603: allow usage of globals +# S104: allow binding to all network interfaces even if it is insecure +from __future__ import annotations + import argparse import html import http.server @@ -6,6 +15,7 @@ import pathlib import re import urllib.parse +from typing import Any PYPI_JSON_TYPE_V1 = "application/vnd.pypi.simple.v1+json" PYPI_JSON_TYPE_LT = "application/vnd.pypi.simple.latest+json" @@ -37,7 +47,7 @@ def do_GET(self): self.end_headers() self.wfile.write( b"The server cannot generate a response " - b"in any of the requested MIME types" + b"in any of the requested MIME types", ) return @@ -57,7 +67,7 @@ def do_GET(self): self._serve_notfound() - def _serve_notfound(self, msg: str = None): + def _serve_notfound(self, msg: str | None = None): self.send_response(404) self.send_header("Content-Type", "text/plain") self.end_headers() @@ -89,7 +99,9 @@ def _serve_project_listing(self, ct): newline = "\n" if i < len(projects) - 1 else "" self.wfile.write( ' {}{}'.format( - html.escape(project["name"]), project["name"], newline + html.escape(project["name"]), + project["name"], + newline, ).encode("utf-8"), ) self.wfile.write(b"\n \n") @@ -107,7 +119,7 @@ def _serve_project(self, ct, project): with os.scandir(path) as it: for entry in it: if re.search(r"\.(whl|zip|tar\.gz)$", entry.name): - file = { + file: dict[str, Any] = { "filename": entry.name, "url": f"/{project}/{entry.name}", "hashes": {}, @@ -135,7 +147,7 @@ def _serve_project(self, ct, project): self.send_header("Content-Type", PYPI_JSON_TYPE_V1) self.end_headers() body = json.dumps( - {"name": project, "meta": {"api-version": "1.0"}, "files": files} + {"name": project, "meta": {"api-version": "1.0"}, "files": files}, ) self.wfile.write(body.encode("utf-8")) else: @@ -216,7 +228,7 @@ def run( ).serve_forever() -def parse_accept_header(header_val: str) -> str: +def parse_accept_header(header_val: str | None) -> str | None: """ Parses an Accept HTTP header and returns a selected MIME type for the server to answer with, honoring priorities defined in the header value. If the diff --git a/morgan/utils.py b/morgan/utils.py index a690e00..a2e48cb 100644 --- a/morgan/utils.py +++ b/morgan/utils.py @@ -1,14 +1,18 @@ +from __future__ import annotations + import os import re from collections import OrderedDict -from typing import Dict, Iterable, Optional, Set +from typing import TYPE_CHECKING, Iterable import dateutil # type: ignore[import-untyped] -from packaging.requirements import Requirement + +if TYPE_CHECKING: + from packaging.requirements import Requirement def to_single_dash(filename): - "https://packaging.python.org/en/latest/specifications/version-specifiers/#version-specifiers" + """https://packaging.python.org/en/latest/specifications/version-specifiers/#version-specifiers""" # selenium-2.0-dev-9429.tar.gz m = re.search(r"-[0-9].*-", filename) @@ -23,7 +27,7 @@ def to_single_dash(filename): # selenium-2.0.dev9429.tar.gz -class Cache: # pylint: disable=protected-access +class Cache: def __init__(self): self.cache: set[str] = set() @@ -38,18 +42,21 @@ def add(self, req: Requirement): else: self.cache.add(str(req)) - def is_simple_case(self, req): + def is_simple_case(self, req: Requirement) -> bool: if not req.marker and not req.extras: specifier = req.specifier if not specifier: return True + # ruff: noqa: SLF001 if all(spec.operator in (">", ">=") for spec in specifier._specs): return True return False def is_requirement_relevant( - requirement: Requirement, envs: Iterable[Dict], extras: Optional[Set[str]] = None + requirement: Requirement, + envs: Iterable[dict], + extras: set[str] | None = None, ) -> bool: """Determines if a requirement is relevant for any of the provided environments. @@ -84,9 +91,9 @@ def is_requirement_relevant( def filter_relevant_requirements( requirements: Iterable[Requirement], - envs: Iterable[Dict], - extras: Optional[Set[str]] = None, -) -> Set[Requirement]: + envs: Iterable[dict], + extras: set[str] | None = None, +) -> set[Requirement]: """Filters a collection of requirements to only those relevant for the provided environments. Args: @@ -101,7 +108,7 @@ def filter_relevant_requirements( def touch_file(path: str, fileinfo: dict): - "upload-time: 2025-05-28T18:46:29.349478Z" + """upload-time: 2025-05-28T18:46:29.349478Z""" time_str = fileinfo.get("upload-time") if not path or not time_str: return diff --git a/pyproject.toml b/pyproject.toml index 7efcd27..d81c713 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,11 +14,8 @@ keywords = [ "pypi", "mirror", "packages", "pypi-mirror" ] readme = "README.md" requires-python = ">=3.7" dependencies = [ - #"packaging~=21.3", - # hatchling 1.27.0 requires packaging>=24.2, but you have packaging 21.3 which is incompatible. - "packaging~=24.2", - # packaging.utils.InvalidSdistFilename: Invalid sdist filename (invalid version): 'expandvars-0.6.0-macosx-10.15-x86_64.tar.gz' (old versions) - # solved in morgan/__init__.py + "packaging~=24.0; python_version == '3.7'", + "packaging~=24.2; python_version >= '3.8'", "importlib-metadata~=4.12.0; python_version < '3.8'", "tomli~=2.0.1", "python-dateutil", @@ -36,13 +33,20 @@ classifiers = [ "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Programming Language :: Python :: Implementation :: CPython", "Topic :: Internet :: WWW/HTTP :: HTTP Servers", ] [project.optional-dependencies] test = ["pytest~=7.1.3"] -lint = ["ruff~=0.14.3"] +lint = [ + "ruff~=0.14.3", + "mypy~=1.19.1", + "importlib-metadata", # required by mypy for all Python versions +] [tool.hatch.version] path = "morgan/__about__.py" @@ -64,3 +68,28 @@ docstring-code-format = true # Set the line length limit used when formatting code snippets in # docstrings. docstring-code-line-length = "dynamic" + +[tool.ruff.lint] +select = ["ALL"] +ignore = [ + "ANN", # don't enforce annotations + "D10", # don't enforce docstrings + "D2", # don't enforce pydoc style + "D4", # don't enforce pydoc style + "E501", # ignore line-too-long as we ruff format is enforced anyway + "PTH", # ignore pathlib suggestions +] + +[tool.ruff.lint.per-file-ignores] +"tests/**" = [ + "ANN", # exclude annotations from test files + "ARG", # Unused function args might be fixtures + "FBT", # Don't care about booleans as positional arguments in tests, e.g. via @pytest.mark.parametrize() + "INP001", # "implicit namespace package" is invalid for pytest + "PLR2004", # allow magic-value-comparison in tests + "S101", # asserts are allowed in tests +] + +[tool.mypy] +check_untyped_defs = true +follow_untyped_imports = true diff --git a/tests/test_init.py b/tests/test_init.py index 4f1afd9..43d7cba 100644 --- a/tests/test_init.py +++ b/tests/test_init.py @@ -11,7 +11,7 @@ class TestParseInterpreter: @pytest.mark.parametrize( - "interpreter_string, expected_name, expected_version", + ("interpreter_string", "expected_name", "expected_version"), [ ("cp38", "cp", "3.8"), ("cp3", "cp", "3"), @@ -30,7 +30,10 @@ class TestParseInterpreter: ], ) def test_parse_interpreter_components( - self, interpreter_string, expected_name, expected_version + self, + interpreter_string, + expected_name, + expected_version, ): name, version = parse_interpreter(interpreter_string) assert name == expected_name @@ -77,7 +80,7 @@ def temp_index_path(self, tmpdir): [requirements] requests = >=2.0.0 - """ + """, ) return tmpdir @@ -118,7 +121,8 @@ def test_server_file_copying(self, temp_index_path): ) with open(server.__file__, "rb") as original_server, open( - expected_serverpath, "rb" + expected_serverpath, + "rb", ) as copied_server: assert original_server.read() == copied_server.read(), ( "Copied file should match source" @@ -142,7 +146,7 @@ def test_file_hashing(self, temp_index_path): expected_hash = hashlib.sha256(test_data).hexdigest() # pylint: disable=W0212 - digest = mirrorer._hash_file(test_file, "sha256") + digest = mirrorer._hash_file(test_file, "sha256") # noqa: SLF001 assert digest == expected_hash, "Returned hash should match sha256 digest" hash_file = test_file + ".hash" @@ -166,7 +170,7 @@ def temp_index_path(self, tmp_path): sys_platform = linux platform_machine = x86_64 platform_tag = manylinux - """ + """, ) return tmp_path @@ -190,7 +194,7 @@ def make_file(filename, **overrides): fileinfo = { "filename": filename, "hashes": { - "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" + "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", }, "url": f"https://example.com/{filename}", } @@ -214,7 +218,7 @@ def extract_versions(files): return [str(file["version"]) for file in files] @pytest.mark.parametrize( - "version_spec,expected_versions", + ("version_spec", "expected_versions"), [ (">=1.5.0", ["1.6.0", "1.5.2", "1.5.1"]), (">=1.5.0,<1.6.0", ["1.5.2", "1.5.1"]), @@ -224,25 +228,31 @@ def extract_versions(files): ids=["basic_range", "complex_range", "exact_match", "no_match"], ) def test_filter_files_with_all_versions_mirrored( - self, make_mirrorer, sample_files, version_spec, expected_versions + self, + make_mirrorer, + sample_files, + version_spec, + expected_versions, ): """Test that file filtering correctly handles different version specifications.""" mirrorer = make_mirrorer( mirror_all_versions=True, ) requirement = packaging.requirements.Requirement( - f"sample_package{version_spec}" + f"sample_package{version_spec}", ) # pylint: disable=W0212 - filtered_files = mirrorer._filter_files( - requirement=requirement, required_by=None, files=sample_files + filtered_files = mirrorer._filter_files( # noqa: SLF001 + requirement=requirement, + required_by=None, + files=sample_files, ) assert self.extract_versions(filtered_files) == expected_versions @pytest.mark.parametrize( - "version_spec,expected_versions", + ("version_spec", "expected_versions"), [ (">=1.5.0", ["1.6.0"]), (">=1.5.0,<1.6.0", ["1.5.2"]), @@ -252,17 +262,23 @@ def test_filter_files_with_all_versions_mirrored( ids=["basic_range", "complex_range", "exact_match", "no_match"], ) def test_filter_files_with_latest_version_mirrored( - self, make_mirrorer, sample_files, version_spec, expected_versions + self, + make_mirrorer, + sample_files, + version_spec, + expected_versions, ): """Test that file filtering correctly handles different version specifications.""" mirrorer = make_mirrorer(mirror_all_versions=False) requirement = packaging.requirements.Requirement( - f"sample_package{version_spec}" + f"sample_package{version_spec}", ) # pylint: disable=W0212 - filtered_files = mirrorer._filter_files( - requirement=requirement, required_by=None, files=sample_files + filtered_files = mirrorer._filter_files( # noqa: SLF001 + requirement=requirement, + required_by=None, + files=sample_files, ) assert self.extract_versions(filtered_files) == expected_versions diff --git a/tests/test_metadata_parser.py b/tests/test_metadata_parser.py index a6c27ae..a4eeec7 100644 --- a/tests/test_metadata_parser.py +++ b/tests/test_metadata_parser.py @@ -79,7 +79,7 @@ def test_parse_metadata_file(self, parser, metadata_content): """Test parsing a metadata file with dependencies""" mock_fp = io.BytesIO(metadata_content) # pylint: disable=W0212 - parser._parse_metadata_file(mock_fp) + parser._parse_metadata_file(mock_fp) # noqa: SLF001 assert parser.name == "example-package" assert parser.version == Version("1.0.0") @@ -87,14 +87,14 @@ def test_parse_metadata_file(self, parser, metadata_content): assert parser.extras_provided == {"web", "test"} assert len(parser.core_dependencies) == 2 # requests and numpy - assert set(dep.name for dep in parser.core_dependencies) == { + assert {dep.name for dep in parser.core_dependencies} == { "requests", "numpy", } assert set(parser.optional_dependencies.keys()) == {"web", "test"} assert len(parser.optional_dependencies["web"]) == 1 - assert list(parser.optional_dependencies["web"])[0].name == "flask" + assert next(iter(parser.optional_dependencies["web"])).name == "flask" def test_parse_pyproject(self, parser, pyproject_content): """Test parsing a pyproject.toml file""" @@ -116,18 +116,18 @@ def test_parse_pyproject(self, parser, pyproject_content): }, ): # pylint: disable=W0212 - parser._parse_pyproject(mock_fp) + parser._parse_pyproject(mock_fp) # noqa: SLF001 assert parser.name == "example-package" assert parser.version == Version("1.0.0") assert str(parser.python_requirement) == ">=3.7" assert len(parser.core_dependencies) == 1 - assert list(parser.core_dependencies)[0].name == "requests" + assert next(iter(parser.core_dependencies)).name == "requests" assert set(parser.optional_dependencies.keys()) == {"web", "test"} assert len(parser.build_dependencies) == 2 - assert set(dep.name for dep in parser.build_dependencies) == { + assert {dep.name for dep in parser.build_dependencies} == { "setuptools", "wheel", } @@ -138,10 +138,10 @@ def test_parse_requirestxt(self, parser, requires_txt_content): mock_fp.name = "package.egg-info/requires.txt" # Not setup_requires.txt # pylint: disable=W0212 - parser._parse_requirestxt(mock_fp) + parser._parse_requirestxt(mock_fp) # noqa: SLF001 assert len(parser.core_dependencies) == 2 - assert set(dep.name for dep in parser.core_dependencies) == { + assert {dep.name for dep in parser.core_dependencies} == { "requests", "click", } @@ -149,13 +149,13 @@ def test_parse_requirestxt(self, parser, requires_txt_content): assert set(parser.optional_dependencies.keys()) == {"web", "test"} assert len(parser.optional_dependencies["web"]) == 2 assert len(parser.optional_dependencies["test"]) == 1 - assert set(dep.name for dep in parser.optional_dependencies["web"]) == { + assert {dep.name for dep in parser.optional_dependencies["web"]} == { "flask", "jinja2", } @pytest.mark.parametrize( - "extras, python_version, expected_count", + ("extras", "python_version", "expected_count"), [ (set(), "3.7", 3), # numpy excluded due to marker (set(), "3.8", 4), # numpy included due to marker diff --git a/tests/test_server.py b/tests/test_server.py index 897ae55..640a954 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -4,7 +4,7 @@ @pytest.mark.parametrize( - "accept_option, exp_dict", + ("accept_option", "exp_dict"), [ (server.GENL_HTML_TYPE, {"mime": server.GENL_HTML_TYPE, "priority": 0}), ( @@ -35,7 +35,7 @@ def test_parse_accept_option(accept_option, exp_dict): @pytest.mark.parametrize( - "accept_header, exp_mime", + ("accept_header", "exp_mime"), [ (None, server.PYPI_HTML_TYPE_V1), ( diff --git a/tests/test_utils.py b/tests/test_utils.py index bf296cd..87325db 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -64,7 +64,7 @@ def test_simple_requirement_with_empty_environments(self): assert result @pytest.mark.parametrize( - "requirement_str,expected", + ("requirement_str", "expected"), [ ('package; python_version < "3.8"', True), ('package; python_version > "3.9"', False), @@ -73,7 +73,10 @@ def test_simple_requirement_with_empty_environments(self): ids=["py37_only", "py37_and_above", "py35_and_below"], ) def test_requirement_with_python_version_marker( - self, requirement_str, expected, python_environments + self, + requirement_str, + expected, + python_environments, ): """Test requirements with Python version markers""" req = Requirement(requirement_str) @@ -83,7 +86,7 @@ def test_requirement_with_python_version_marker( assert result == expected @pytest.mark.parametrize( - "requirement_str,expected", + ("requirement_str", "expected"), [ ('package; sys_platform == "linux"', True), ('package; sys_platform == "linux" or sys_platform == "win32"', True), @@ -92,7 +95,10 @@ def test_requirement_with_python_version_marker( ids=["linux_only", "linux_or_windows", "freebsd_only"], ) def test_requirement_with_platform_marker( - self, requirement_str, expected, platform_environments + self, + requirement_str, + expected, + platform_environments, ): """Test requirements with platform markers""" req = Requirement(requirement_str) @@ -102,7 +108,7 @@ def test_requirement_with_platform_marker( assert result == expected @pytest.mark.parametrize( - "extras,expected", + ("extras", "expected"), [ ({"test"}, True), # With matching extra ({"other"}, False), # With non-matching extra @@ -119,7 +125,7 @@ def test_requirement_with_extra_marker(self, extras, expected, python_environmen assert result == expected @pytest.mark.parametrize( - "requirement_str,extras,expected", + ("requirement_str", "extras", "expected"), [ ('package; python_version >= "3.8" and extra == "test"', {"test"}, True), ('package; python_version >= "3.8" and extra == "test"', {"other"}, False), @@ -136,7 +142,11 @@ def test_requirement_with_extra_marker(self, extras, expected, python_environmen ], ) def test_complex_requirement_with_combined_markers( - self, requirement_str, extras, expected, python_environments + self, + requirement_str, + extras, + expected, + python_environments, ): """Test requirements with combined markers""" req = Requirement(requirement_str) @@ -146,7 +156,7 @@ def test_complex_requirement_with_combined_markers( assert result == expected @pytest.mark.parametrize( - "extras,expected_count,expected_names", + ("extras", "expected_count", "expected_names"), [ (None, 3, {"always-relevant", "py37-only", "py38-plus"}), ({"test"}, 4, {"always-relevant", "py37-only", "py38-plus", "test-extra"}), @@ -154,7 +164,11 @@ def test_complex_requirement_with_combined_markers( ids=["no_extras", "with_test_extra"], ) def test_filter_relevant_requirements( - self, extras, expected_count, expected_names, python_environments + self, + extras, + expected_count, + expected_names, + python_environments, ): """Test filtering a collection of requirements""" requirements = [ @@ -166,7 +180,9 @@ def test_filter_relevant_requirements( ] filtered = filter_relevant_requirements( - requirements, python_environments, extras=extras + requirements, + python_environments, + extras=extras, ) assert len(filtered) == expected_count @@ -174,7 +190,7 @@ def test_filter_relevant_requirements( def test_filter_with_empty_requirements(self): """Test filtering with empty requirements list""" - requirements = [] + requirements: list[Requirement] = [] environments = [{"python_version": "3.8"}] filtered = filter_relevant_requirements(requirements, environments) @@ -187,7 +203,7 @@ def test_filter_with_empty_environments(self): Requirement("package1"), Requirement('package2; python_version >= "3.8"'), ] - environments = [] + environments: list[dict] = [] filtered = filter_relevant_requirements(requirements, environments)