diff --git a/aiven_mysql_migrate/dump_tools.py b/aiven_mysql_migrate/dump_tools.py new file mode 100644 index 0000000..82812f3 --- /dev/null +++ b/aiven_mysql_migrate/dump_tools.py @@ -0,0 +1,119 @@ +# Copyright (c) 2025 Aiven, Helsinki, Finland. https://aiven.io/ +from abc import ABC, abstractmethod +from aiven_mysql_migrate.migration_executor import ProcessExecutor +from aiven_mysql_migrate.utils import MySQLConnectionInfo +from enum import Enum +from typing import List, Optional + +import logging +import shlex + +LOGGER = logging.getLogger(__name__) + + +class MySQLMigrateMethod(str, Enum): + dump = "dump" + replication = "replication" + + +class MySQLMigrateTool(str, Enum): + mysqldump = "mysqldump" + + +class MySQLMigrationToolBase(ABC): + """Abstract base class for MySQL database migration operations (dump and import).""" + def __init__( + self, + source: MySQLConnectionInfo, + target: MySQLConnectionInfo, + databases: List[str], + skip_column_stats: bool, + ): + self.source = source + self.target = target + self.databases = databases + self.skip_column_stats = skip_column_stats + self.process_executor = ProcessExecutor() + self._gtid: Optional[str] = None + + @abstractmethod + def get_dump_command(self, migration_method: MySQLMigrateMethod) -> List[str]: + """Build dump command.""" + + @abstractmethod + def get_import_command(self) -> List[str]: + """Build import command.""" + + def execute_migration(self, migration_method: MySQLMigrateMethod) -> Optional[str]: + """ + Execute the complete migration process (dump and import). + + Args: + migration_method: The migration method (dump or replication) + + Returns: + GTID string for replication setup, or None for dump method + """ + dump_cmd = self.get_dump_command(migration_method) + import_cmd = self.get_import_command() + + _, _, gtid = self.process_executor.execute_piped_commands( + dump_cmd=dump_cmd, import_cmd=import_cmd, target=self.target + ) + self._gtid = gtid + return self._gtid + + def cleanup(self) -> None: + self.process_executor.terminate_processes() + + def get_gtid(self) -> Optional[str]: + return self._gtid + + +class MySQLDumpTool(MySQLMigrationToolBase): + """MySQL dump tool using mysqldump/mysql.""" + + def get_dump_command(self, migration_method: MySQLMigrateMethod) -> List[str]: + """Build mysqldump command.""" + # "--flush-logs" and "--master-data=2" would be good options to add, but they do not work for RDS admin + # user - require extra permissions for `FLUSH TABLES WITH READ LOCK` + cmd = [ + "mysqldump", + "-h", + self.source.hostname, + "-P", + str(self.source.port), + "-u", + self.source.username, + f"-p{self.source.password}", + "--compress", + "--skip-lock-tables", + "--single-transaction", + "--hex-blob", + "--routines", + "--triggers", + "--events", + ] + if migration_method == MySQLMigrateMethod.replication: + cmd += ["--set-gtid-purged=ON"] + else: + cmd += ["--set-gtid-purged=OFF"] + if self.source.ssl: + cmd += ["--ssl-mode=REQUIRED"] + # Dumping column statistics is not supported by MySQL < 8.0 (which is default behaviour for newer versions) + if self.skip_column_stats: + cmd += ["--skip-column-statistics"] + cmd += ["--databases", "--", *[shlex.quote(db) for db in self.databases]] + + return cmd + + def get_import_command(self) -> List[str]: + """Build mysql import command.""" + cmd = [ + "mysql", "-h", self.target.hostname, "-P", + str(self.target.port), "-u", self.target.username, f"-p{self.target.password}", "--compress" + ] + if self.target.ssl: + cmd += ["--ssl-mode=REQUIRED"] + + return cmd diff --git a/aiven_mysql_migrate/migration.py b/aiven_mysql_migrate/migration.py index 6d844f5..f50da71 100644 --- a/aiven_mysql_migrate/migration.py +++ b/aiven_mysql_migrate/migration.py @@ -1,42 +1,28 @@ # Copyright (c) 2020 Aiven, Helsinki, Finland. https://aiven.io/ from aiven_mysql_migrate import config +from aiven_mysql_migrate.dump_tools import MySQLMigrationToolBase, MySQLDumpTool, MySQLMigrateMethod, MySQLMigrateTool from aiven_mysql_migrate.exceptions import ( DatabaseTooLargeException, EndpointConnectionException, GTIDModeDisabledException, MissingReplicationGrants, - MySQLDumpException, MySQLImportException, NothingToMigrateException, ReplicaSetupException, - ReplicationNotAvailableException, ServerIdsOverlappingException, SSLNotSupportedException, TooManyDatabasesException, - UnsupportedBinLogFormatException, UnsupportedMySQLEngineException, UnsupportedMySQLVersionException, - WrongMigrationConfigurationException + NothingToMigrateException, ReplicaSetupException, ReplicationNotAvailableException, ServerIdsOverlappingException, + SSLNotSupportedException, TooManyDatabasesException, UnsupportedBinLogFormatException, UnsupportedMySQLEngineException, + UnsupportedMySQLVersionException, WrongMigrationConfigurationException ) -from aiven_mysql_migrate.utils import MySQLConnectionInfo, MySQLDumpProcessor, PrivilegeCheckUser, select_global_var -from concurrent import futures +from aiven_mysql_migrate.utils import MySQLConnectionInfo, PrivilegeCheckUser, select_global_var from looseversion import LooseVersion from pathlib import Path from pymysql.constants.ER import HANDSHAKE_ERROR -from subprocess import Popen from typing import List, Optional -import concurrent -import enum import json import logging import os import pymysql -import resource -import shlex import signal -import subprocess -import sys import time LOGGER = logging.getLogger(__name__) -@enum.unique -class MySQLMigrateMethod(str, enum.Enum): - dump = "dump" - replication = "replication" - - class MySQLMigration: source: MySQLConnectionInfo target: MySQLConnectionInfo @@ -53,9 +39,10 @@ def __init__( filter_dbs: Optional[str] = None, privilege_check_user: Optional[str] = None, output_meta_file: Optional[Path] = None, + dump_tool: MySQLMigrateTool = MySQLMigrateTool.mysqldump, ): - self.mysqldump_proc: Optional[Popen] = None - self.mysql_proc: Optional[Popen] = None + self.dump_tool_name = dump_tool + self.dump_tool: Optional[MySQLMigrationToolBase] = None self.source = MySQLConnectionInfo.from_uri(source_uri, name="source") self.target = MySQLConnectionInfo.from_uri(target_uri, name="target") @@ -81,10 +68,8 @@ def setup_signal_handlers(self): def _stop_migration(self, signum, frame): LOGGER.info("Received signal: %s", signum) - for subproc in (self.mysqldump_proc, self.mysql_proc): - if subproc: - LOGGER.warning("Terminating subprocess with pid: %s", subproc.pid) - subproc.kill() + if self.dump_tool: + self.dump_tool.cleanup() def list_databases(self) -> List[str]: with self.source.cur() as cur: @@ -105,8 +90,8 @@ def _check_versions_replication_support(self): LOGGER.info("Checking MySQL versions for replication support") if ( - LooseVersion("5.7.0") <= LooseVersion(self.source.version) < LooseVersion("8.1") - and LooseVersion("8.0.0") <= LooseVersion(self.target.version) < LooseVersion("8.1") + LooseVersion("5.7.0") <= LooseVersion(self.source.version) < LooseVersion("8.1") + and LooseVersion("8.0.0") <= LooseVersion(self.target.version) < LooseVersion("8.1") ): LOGGER.info("\tSource - %s, target - %s -- OK", self.source.version, self.target.version) else: @@ -189,8 +174,7 @@ def _check_database_size(self, max_size: float): with self.source.cur() as cur: cur.execute( "SELECT SUM(DATA_LENGTH + INDEX_LENGTH) AS size FROM INFORMATION_SCHEMA.TABLES " - f"WHERE TABLE_SCHEMA NOT IN ({', '.join(['%s'] * len(self.ignore_dbs))})", - tuple(self.ignore_dbs) + f"WHERE TABLE_SCHEMA NOT IN ({', '.join(['%s'] * len(self.ignore_dbs))})", tuple(self.ignore_dbs) ) source_size = cur.fetchone()["size"] or 0 if source_size > max_size: @@ -266,114 +250,16 @@ def _stop_replication(self): self._stop_and_reset_slave() - def _get_dump_command(self, migration_method: MySQLMigrateMethod) -> List[str]: - # "--flush-logs" and "--master-data=2" would be good options to add, but they do not work for RDS admin - # user - require extra permissions for `FLUSH TABLES WITH READ LOCK` - cmd = [ - "mysqldump", - "-h", - self.source.hostname, - "-P", - str(self.source.port), - "-u", - self.source.username, - f"-p{self.source.password}", - "--compress", - "--skip-lock-tables", - "--single-transaction", - "--hex-blob", - "--routines", - "--triggers", - "--events", - ] - if migration_method == MySQLMigrateMethod.replication: - cmd += ["--set-gtid-purged=ON"] - else: - cmd += ["--set-gtid-purged=OFF"] - if self.source.ssl: - cmd += ["--ssl-mode=REQUIRED"] - # Dumping column statistics is not supported by MySQL < 8.0 (which is default behaviour for newer versions) - if self.skip_column_stats: - cmd += ["--skip-column-statistics"] - cmd += ["--databases", "--", *[shlex.quote(db) for db in self.databases]] - - return cmd - - def _get_import_command(self) -> List[str]: - cmd = [ - "mysql", "-h", self.target.hostname, "-P", - str(self.target.port), "-u", self.target.username, f"-p{self.target.password}", "--compress" - ] - if self.target.ssl: - cmd += ["--ssl-mode=REQUIRED"] - - return cmd - def _migrate_data(self, migration_method: MySQLMigrateMethod) -> Optional[str]: - """Migrate data using mysqldump/mysql cli into the target database, return GTID from the dump""" - LOGGER.info("Starting import MySQL dump file into target database") - - dump_processor = MySQLDumpProcessor() - self.mysqldump_proc = Popen( # pylint: disable=consider-using-with - self._get_dump_command(migration_method=migration_method), - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - universal_newlines=True - ) - self.mysql_proc = Popen( # pylint: disable=consider-using-with - self._get_import_command(), - stdin=subprocess.PIPE, - stderr=subprocess.PIPE, text=True - ) - - # Disallow creating child processes in migration target when this runs as non-root user. - resource.prlimit(self.mysql_proc.pid, resource.RLIMIT_NPROC, (0, 0)) - - # make mypy happy - assert self.mysqldump_proc.stdout - assert self.mysqldump_proc.stderr - assert self.mysql_proc.stdin - - # If sql_require_primary_key is ON globally - it's not possible to import tables without a primary key - with self.target.cur() as cur: - if select_global_var(cur, "sql_require_primary_key") == 1: - self.mysql_proc.stdin.write("SET SESSION sql_require_primary_key = 0;") - - def _reader_stdout(): - for line in self.mysqldump_proc.stdout: - line = dump_processor.process_line(line.rstrip()) - - if not line: - continue - - LOGGER.debug("dump: %s", line) - self.mysql_proc.stdin.write(line + "\n") - - self.mysql_proc.stdin.flush() - self.mysql_proc.stdin.close() - - def _reader_stderr(proc): - for line in proc.stderr: - sys.stderr.write(line) - - with futures.ThreadPoolExecutor(max_workers=3) as executor: - for future in concurrent.futures.as_completed([ - executor.submit(_reader_stdout), - executor.submit(_reader_stderr, self.mysqldump_proc), - executor.submit(_reader_stderr, self.mysql_proc) - ]): - future.result() - - export_code = self.mysqldump_proc.wait() - import_code = self.mysql_proc.wait() - - if export_code != 0: - raise MySQLDumpException(f"Error while importing data from the source database, exit code: {export_code}") - - if import_code != 0: - raise MySQLImportException(f"Error while importing data into the target database, exit code: {import_code}") + """Migrate data using the configured dump tool, return GTID from the dump""" + if self.dump_tool_name == MySQLMigrateTool.mysqldump: + self.dump_tool = MySQLDumpTool( + source=self.source, target=self.target, databases=self.databases, skip_column_stats=self.skip_column_stats + ) + else: + raise ValueError(f"Unknown dump tool: {self.dump_tool_name}") - return dump_processor.get_gtid() + return self.dump_tool.execute_migration(migration_method) def _set_gtid(self, gtid: str): LOGGER.info("GTID from the dump is `%s`", gtid) diff --git a/aiven_mysql_migrate/migration_executor.py b/aiven_mysql_migrate/migration_executor.py new file mode 100644 index 0000000..982206a --- /dev/null +++ b/aiven_mysql_migrate/migration_executor.py @@ -0,0 +1,117 @@ +# Copyright (c) 2025 Aiven, Helsinki, Finland. https://aiven.io/ +from aiven_mysql_migrate.exceptions import MySQLDumpException, MySQLImportException +from aiven_mysql_migrate.utils import MySQLConnectionInfo, MySQLDumpProcessor, select_global_var +from concurrent import futures +from subprocess import Popen +from typing import Callable, List, Optional, Tuple + +import concurrent +import logging +import resource +import subprocess +import sys + +LOGGER = logging.getLogger(__name__) + + +class ProcessExecutor: + """Responsible for executing external processes with piping.""" + + def __init__(self) -> None: + self.import_proc: Optional[Popen] = None + self.dump_proc: Optional[Popen] = None + + def execute_piped_commands( + self, + dump_cmd: List[str], + import_cmd: List[str], + target: MySQLConnectionInfo, + line_processor: Optional[Callable[[str], str]] = None + ) -> Tuple[int, int, Optional[str]]: + """ + Execute dump and import commands with piping. + + Args: + dump_cmd: The dump command and arguments + import_cmd: The import command and arguments + target: Target database connection info + line_processor: Optional function to process each line from dump output + + Returns: + Tuple of (dump_exit_code, import_exit_code, extracted_gtid) + """ + LOGGER.info("Starting import from source to target database") + + dump_processor = MySQLDumpProcessor() if not line_processor else None + self.dump_proc = Popen( # pylint: disable=consider-using-with + dump_cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True + ) + self.import_proc = Popen( # pylint: disable=consider-using-with + import_cmd, + stdin=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True + ) + + # Disallow creating child processes in migration target when this runs as non-root user + if hasattr(resource, "prlimit"): + resource.prlimit(self.import_proc.pid, resource.RLIMIT_NPROC, (0, 0)) + + # make mypy happy + assert self.dump_proc.stdout + assert self.dump_proc.stderr + assert self.import_proc.stdin + + # If sql_require_primary_key is ON globally - it's not possible to import tables without a primary key + with target.cur() as cur: + if select_global_var(cur, "sql_require_primary_key") == 1: + self.import_proc.stdin.write("SET SESSION sql_require_primary_key = 0;") + + def _reader_stdout(): + for line in self.dump_proc.stdout: + if line_processor: + processed_line = line_processor(line.rstrip()) + else: + processed_line = dump_processor.process_line(line.rstrip()) + + if not processed_line: + continue + + LOGGER.debug("dump: %s", processed_line) + self.import_proc.stdin.write(processed_line + "\n") + + self.import_proc.stdin.flush() + self.import_proc.stdin.close() + + def _reader_stderr(proc): + for line in proc.stderr: + sys.stderr.write(line) + + with futures.ThreadPoolExecutor(max_workers=3) as executor: + for future in concurrent.futures.as_completed([ + executor.submit(_reader_stdout), + executor.submit(_reader_stderr, self.dump_proc), + executor.submit(_reader_stderr, self.import_proc) + ]): + future.result() + + export_code = self.dump_proc.wait() + import_code = self.import_proc.wait() + + if export_code != 0: + raise MySQLDumpException(f"Error while exporting data from the source database, exit code: {export_code}") + + if import_code != 0: + raise MySQLImportException(f"Error while importing data into the target database, exit code: {import_code}") + + gtid = dump_processor.get_gtid() if dump_processor else None + return export_code, import_code, gtid + + def terminate_processes(self) -> None: + for proc in (self.import_proc, self.dump_proc): + if proc: + LOGGER.warning("Terminating subprocess with pid: %s", proc.pid) + proc.kill() diff --git a/docker-compose.test.yaml b/docker-compose.test.yaml index 0c273e1..4395374 100644 --- a/docker-compose.test.yaml +++ b/docker-compose.test.yaml @@ -15,6 +15,7 @@ services: mysql57-src-1: image: mysql:5.7 + platform: linux/amd64 restart: always environment: MYSQL_ROOT_PASSWORD: test diff --git a/test/sys/test_migration.py b/test/sys/test_migration.py index 15e60cc..78b35be 100644 --- a/test/sys/test_migration.py +++ b/test/sys/test_migration.py @@ -170,8 +170,7 @@ def test_database_size_check(src, dst, db_name): with src.cur() as cur: cur.execute( "SELECT TABLE_SCHEMA FROM INFORMATION_SCHEMA.TABLES " - f"WHERE TABLE_SCHEMA NOT IN ({', '.join(['%s'] * len(ignore_dbs))})", - tuple(ignore_dbs) + f"WHERE TABLE_SCHEMA NOT IN ({', '.join(['%s'] * len(ignore_dbs))})", tuple(ignore_dbs) ) other_test_dbs = {table_schema["TABLE_SCHEMA"] for table_schema in cur.fetchall()} diff --git a/uv.lock b/uv.lock index 139a270..4d32c0e 100644 --- a/uv.lock +++ b/uv.lock @@ -1,6 +1,6 @@ version = 1 revision = 2 -requires-python = ">=3.10, <3.13" +requires-python = ">=3.10, <3.14" resolution-markers = [ "python_full_version >= '3.12'", "python_full_version == '3.11.*'", @@ -98,6 +98,17 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736, upload-time = "2024-09-04T20:44:24.757Z" }, { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448, upload-time = "2024-09-04T20:44:26.208Z" }, { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976, upload-time = "2024-09-04T20:44:27.578Z" }, + { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989, upload-time = "2024-09-04T20:44:28.956Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802, upload-time = "2024-09-04T20:44:30.289Z" }, + { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792, upload-time = "2024-09-04T20:44:32.01Z" }, + { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893, upload-time = "2024-09-04T20:44:33.606Z" }, + { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810, upload-time = "2024-09-04T20:44:35.191Z" }, + { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200, upload-time = "2024-09-04T20:44:36.743Z" }, + { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447, upload-time = "2024-09-04T20:44:38.492Z" }, + { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358, upload-time = "2024-09-04T20:44:40.046Z" }, + { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469, upload-time = "2024-09-04T20:44:41.616Z" }, + { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475, upload-time = "2024-09-04T20:44:43.733Z" }, + { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009, upload-time = "2024-09-04T20:44:45.309Z" }, ] [[package]] @@ -257,6 +268,12 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/90/13/f3a89c76b0a41e19490b01e7069713a30949d9a6c147289ee1521bcea245/mypy-1.17.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:03b6d0ed2b188e35ee6d5c36b5580cffd6da23319991c49ab5556c023ccf1341", size = 12737895, upload-time = "2025-07-31T07:53:43.623Z" }, { url = "https://files.pythonhosted.org/packages/23/a1/c4ee79ac484241301564072e6476c5a5be2590bc2e7bfd28220033d2ef8f/mypy-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c837b896b37cd103570d776bda106eabb8737aa6dd4f248451aecf53030cdbeb", size = 12931025, upload-time = "2025-07-31T07:54:17.125Z" }, { url = "https://files.pythonhosted.org/packages/89/b8/7409477be7919a0608900e6320b155c72caab4fef46427c5cc75f85edadd/mypy-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:665afab0963a4b39dff7c1fa563cc8b11ecff7910206db4b2e64dd1ba25aed19", size = 9584664, upload-time = "2025-07-31T07:54:12.842Z" }, + { url = "https://files.pythonhosted.org/packages/5b/82/aec2fc9b9b149f372850291827537a508d6c4d3664b1750a324b91f71355/mypy-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93378d3203a5c0800c6b6d850ad2f19f7a3cdf1a3701d3416dbf128805c6a6a7", size = 11075338, upload-time = "2025-07-31T07:53:38.873Z" }, + { url = "https://files.pythonhosted.org/packages/07/ac/ee93fbde9d2242657128af8c86f5d917cd2887584cf948a8e3663d0cd737/mypy-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:15d54056f7fe7a826d897789f53dd6377ec2ea8ba6f776dc83c2902b899fee81", size = 10113066, upload-time = "2025-07-31T07:54:14.707Z" }, + { url = "https://files.pythonhosted.org/packages/5a/68/946a1e0be93f17f7caa56c45844ec691ca153ee8b62f21eddda336a2d203/mypy-1.17.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:209a58fed9987eccc20f2ca94afe7257a8f46eb5df1fb69958650973230f91e6", size = 11875473, upload-time = "2025-07-31T07:53:14.504Z" }, + { url = "https://files.pythonhosted.org/packages/9f/0f/478b4dce1cb4f43cf0f0d00fba3030b21ca04a01b74d1cd272a528cf446f/mypy-1.17.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:099b9a5da47de9e2cb5165e581f158e854d9e19d2e96b6698c0d64de911dd849", size = 12744296, upload-time = "2025-07-31T07:53:03.896Z" }, + { url = "https://files.pythonhosted.org/packages/ca/70/afa5850176379d1b303f992a828de95fc14487429a7139a4e0bdd17a8279/mypy-1.17.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa6ffadfbe6994d724c5a1bb6123a7d27dd68fc9c059561cd33b664a79578e14", size = 12914657, upload-time = "2025-07-31T07:54:08.576Z" }, + { url = "https://files.pythonhosted.org/packages/53/f9/4a83e1c856a3d9c8f6edaa4749a4864ee98486e9b9dbfbc93842891029c2/mypy-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:9a2b7d9180aed171f033c9f2fc6c204c1245cf60b0cb61cf2e7acc24eea78e0a", size = 9593320, upload-time = "2025-07-31T07:53:01.341Z" }, { url = "https://files.pythonhosted.org/packages/1d/f3/8fcd2af0f5b806f6cf463efaffd3c9548a28f84220493ecd38d127b6b66d/mypy-1.17.1-py3-none-any.whl", hash = "sha256:a9f52c0351c21fe24c21d8c0eb1f62967b262d6729393397b6f443c3b773c3b9", size = 2283411, upload-time = "2025-07-31T07:53:24.664Z" }, ] @@ -413,6 +430,16 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, + { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" }, + { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" }, + { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" }, + { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" }, + { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" }, + { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" }, { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, ]