diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml index b12ca64..11031f2 100644 --- a/.github/workflows/main.yaml +++ b/.github/workflows/main.yaml @@ -16,6 +16,7 @@ jobs: with: persist-credentials: true # required for git lfs - uses: ./.github/actions/uv-setup/ + - run: sudo apt update && sudo apt install -y --no-install-recommends python3-rpm - name: Test # use script to provide a tty (workaround of systematic "docker -t"?) shell: 'script -q -e -c "bash {0}"' diff --git a/pyproject.toml b/pyproject.toml index 2e39ce7..95b9f16 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,6 +8,7 @@ license-files = ["LICENSE"] dynamic = ["version"] dependencies = [ "argcomplete", + "specfile", ] [dependency-groups] diff --git a/requirements/base.txt b/requirements/base.txt index 85341ba..961d54b 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,2 +1,3 @@ # generated with update_requirements.py, do not edit manually argcomplete +specfile diff --git a/src/xcp_ng_dev/cli.py b/src/xcp_ng_dev/cli.py index 320760e..9914b30 100755 --- a/src/xcp_ng_dev/cli.py +++ b/src/xcp_ng_dev/cli.py @@ -16,6 +16,8 @@ import argcomplete +from xcp_ng_dev.koji import koji_init_parser + CONTAINER_PREFIX = "ghcr.io/xcp-ng/xcp-ng-build-env" DEFAULT_ULIMIT_NOFILE = 2048 @@ -82,6 +84,8 @@ def buildparser(): required=True, title="Development environments", help="Available environments") + koji_init_parser(subparsers_env) + # container-based workflow parser_container = subparsers_env.add_parser('container', help="Use a local container to build a package") parser_container.set_defaults(func=container) diff --git a/src/xcp_ng_dev/koji/__init__.py b/src/xcp_ng_dev/koji/__init__.py new file mode 100644 index 0000000..a224f51 --- /dev/null +++ b/src/xcp_ng_dev/koji/__init__.py @@ -0,0 +1,16 @@ +from xcp_ng_dev.koji.build import koji_build, koji_build_init_parser + +# from icecream import ic + +def koji_init_parser(subparsers_env): + parser_koji = subparsers_env.add_parser('koji', help="Koji related commands") + parser_koji.set_defaults(func=koji) + subparsers_koji = parser_koji.add_subparsers( + dest='command', required=True, + help="Koji sub-commands") + koji_build_init_parser(subparsers_koji) + +def koji(args): + match args.command: + case 'build': + koji_build(args) diff --git a/src/xcp_ng_dev/koji/build.py b/src/xcp_ng_dev/koji/build.py new file mode 100644 index 0000000..782788b --- /dev/null +++ b/src/xcp_ng_dev/koji/build.py @@ -0,0 +1,247 @@ +#!/usr/bin/env python3 +import logging +import os +import re +import subprocess +from contextlib import contextmanager +from datetime import datetime, timedelta +from pathlib import Path + +from specfile.specfile import Specfile + +TIME_FORMAT = '%Y-%m-%d-%H-%M-%S' + +# target -> required branch +PROTECTED_TARGETS = { + "v8.2-ci": "8.2", + "v8.2-fasttrack": "8.2", + "v8.2-incoming": "8.2", + "v8.3-ci": "master", + "v8.3-fasttrack": "master", + "v8.3-incoming": "master", +} + +@contextmanager +def cd(dir): + """Change to a directory temporarily. To be used in a with statement.""" + prevdir = os.getcwd() + os.chdir(dir) + try: + yield os.path.realpath(dir) + finally: + os.chdir(prevdir) + +def check_dir(dirpath): + if not os.path.isdir(dirpath): + raise Exception("Directory %s doesn't exist" % dirpath) + return dirpath + +def check_git_repo(dirpath): + """check that the working copy is a working directory and is clean.""" + with cd(dirpath): + return subprocess.run(['git', 'diff-index', '--quiet', 'HEAD', '--']).returncode == 0 + +def check_commit_is_available_remotely(dirpath, hash, target, warn): + with cd(dirpath): + if not subprocess.check_output(['git', 'branch', '-r', '--contains', hash]): + raise Exception("The current commit is not available in the remote repository") + if target is not None and re.match(r'v\d+\.\d+-u-.+', target): + raise Exception("Building with a user target requires using --pre-build or --test-build.\n") + try: + expected_branch = PROTECTED_TARGETS.get(target) + if ( + expected_branch is not None + and not is_remote_branch_commit(dirpath, hash, expected_branch) + ): + raise Exception(f"The current commit is not the last commit in the remote branch {expected_branch}.\n" + f"This is required when using the protected target {target}.\n") + except Exception as e: + if warn: + print(f"warning: {e}", flush=True) + else: + raise e + +def get_repo_and_commit_info(dirpath): + with cd(dirpath): + remote = subprocess.check_output(['git', 'config', '--get', 'remote.origin.url']).decode().strip() + # We want the exact hash for accurate build history + hash = subprocess.check_output(['git', 'rev-parse', 'HEAD']).decode().strip() + return remote, hash + +def koji_url(remote, hash): + if remote.startswith('git@'): + remote = re.sub(r'git@(.+):', r'git+https://\1/', remote) + elif remote.startswith('https://'): + remote = 'git+' + remote + else: + raise Exception("Unrecognized remote URL") + return remote + "?#" + hash + +@contextmanager +def local_branch(branch): + prev_branch = subprocess.check_output(['git', 'branch', '--show-current']).strip() + commit = subprocess.check_output(['git', 'rev-parse', 'HEAD']).decode().strip() + subprocess.check_call(['git', 'checkout', '--quiet', commit]) + try: + yield branch + finally: + # prev_branch is empty when the head was detached + subprocess.check_call(['git', 'checkout', prev_branch or commit]) + +def is_old_branch(b): + branch_time = datetime.strptime(b.split('/')[-1], TIME_FORMAT) + return branch_time < datetime.now() - timedelta(hours=3) + +def clean_old_branches(git_repo): + with cd(git_repo): + remote_branches = [ + line.split()[-1] for line in subprocess.check_output(['git', 'ls-remote']).decode().splitlines() + ] + remote_branches = [b for b in remote_branches if b.startswith('refs/heads/koji/test/')] + old_branches = [b for b in remote_branches if is_old_branch(b)] + if old_branches: + print("removing outdated remote branch(es)", flush=True) + subprocess.check_call(['git', 'push', '--delete', 'origin'] + old_branches) + +def xcpng_version(target): + xcpng_version_match = re.match(r'^v(\d+\.\d+)-u-\S+$', target) + if xcpng_version_match is None: + raise Exception(f"Can't find XCP-ng version in {target}") + return xcpng_version_match.group(1) + +def find_next_release(package, spec, target, test_build_id, pre_build_id): + assert test_build_id is not None or pre_build_id is not None + builds = subprocess.check_output(['koji', 'list-builds', '--quiet', '--package', package]).decode().splitlines() + if test_build_id: + base_nvr = f'{package}-{spec.version}-{spec.release}.0.{test_build_id}.' + else: + base_nvr = f'{package}-{spec.version}-{spec.release}~{pre_build_id}.' + # use a regex to match %{macro} without actually expanding the macros + base_nvr_re = ( + re.escape(re.sub('%{.+}', "@@@", base_nvr)).replace('@@@', '.*') + + r'(\d+)' + + re.escape(f'.xcpng{xcpng_version(target)}') + ) + build_matches = [re.match(base_nvr_re, b) for b in builds] + build_nbs = [int(m.group(1)) for m in build_matches if m] + build_nb = sorted(build_nbs)[-1] + 1 if build_nbs else 1 + if test_build_id: + return f'{spec.release}.0.{test_build_id}.{build_nb}' + else: + return f'{spec.release}~{pre_build_id}.{build_nb}' + +def push_bumped_release(git_repo, target, test_build_id, pre_build_id): + t = datetime.now().strftime(TIME_FORMAT) + branch = f'koji/test/{test_build_id or pre_build_id}/{t}' + with cd(git_repo), local_branch(branch): + spec_paths = subprocess.check_output(['git', 'ls-files', 'SPECS/*.spec']).decode().splitlines() + assert len(spec_paths) == 1 + spec_path = spec_paths[0] + with Specfile(spec_path) as spec: + # find the next build number + package = Path(spec_path).stem + spec.release = find_next_release(package, spec, target, test_build_id, pre_build_id) + subprocess.check_call(['git', 'commit', '--quiet', '-m', "bump release for test build", spec_path]) + subprocess.check_call(['git', 'push', 'origin', f'HEAD:refs/heads/{branch}']) + commit = subprocess.check_output(['git', 'rev-parse', 'HEAD']).decode().strip() + return commit + +def is_remote_branch_commit(git_repo, sha, branch): + with cd(git_repo): + remote_sha = ( + subprocess.check_output(['git', 'ls-remote', 'origin', f'refs/heads/{branch}']).decode().strip().split()[0] + ) + return sha == remote_sha + +def build_id_of(name, candidate): + if candidate is None: + return None + + length = len(candidate) + if length > 16: + logging.error(f"The {name} build id must be at most 16 characters long, it's {length} characters long") + exit(1) + + invalid_chars = any(re.match(r'[a-zA-Z0-9]', char) is None for char in candidate) + + if invalid_chars: + pp_invalid = ''.join("^" if re.match(r'[a-zA-Z0-9]', char) is None else " " for char in candidate) + logging.error(f"The {name} build id must only contain letters and digits:") + logging.error(f" {candidate}") + logging.error(f" {pp_invalid}") + exit(1) + + return candidate + +def koji_build_init_parser(subparsers_container): + parser = subparsers_container.add_parser( + 'build', + help='Build a package or chain-build several from local git repos for RPM sources') + parser.add_argument('target', help='Koji target for the build') + parser.add_argument('git_repos', nargs='+', + help='local path to one or more git repositories. If several are provided, ' + 'a chained build will be started in the order of the arguments') + parser.add_argument('--scratch', action="store_true", help='Perform scratch build') + parser.add_argument('--nowait', action="store_true", help='Do not wait for the build to end') + parser.add_argument('--force', action="store_true", help='Bypass sanity checks') + parser.add_argument( + '--test-build', + metavar="ID", + help='Run a test build. The provided ID will be used to build a unique release tag.', + ) + parser.add_argument( + '--pre-build', + metavar="ID", + help='Run a pre build. The provided ID will be used to build a unique release tag.', + ) + # args = parser.parse_args() + +def koji_build(args): + target = args.target + git_repos = [os.path.abspath(check_dir(d)) for d in args.git_repos] + is_scratch = args.scratch + is_nowait = args.nowait + + test_build = build_id_of("test", args.test_build) + pre_build = build_id_of("pre", args.pre_build) + + if test_build and pre_build: + logging.error("--pre-build and --test-build can't be used together") + exit(1) + + # if len(git_repos) > 1 and is_scratch: + # parser.error("--scratch is not compatible with chained builds.") + + # for d in git_repos: + # if not check_git_repo(d): + # parser.error("%s is not in a clean state (or is not a git repository)." % d) + + if len(git_repos) == 1: + remote, hash = get_repo_and_commit_info(git_repos[0]) + if test_build or pre_build: + clean_old_branches(git_repos[0]) + hash = push_bumped_release(git_repos[0], target, test_build, pre_build) + else: + check_commit_is_available_remotely(git_repos[0], hash, None if is_scratch else target, args.force) + url = koji_url(remote, hash) + command = ( + ['koji', 'build'] + + (['--scratch'] if is_scratch else []) + + [target, url] + + (['--nowait'] if is_nowait else []) + ) + print(' '.join(command), flush=True) + subprocess.check_call(command) + else: + urls = [] + for d in git_repos: + remote, hash = get_repo_and_commit_info(d) + if test_build or pre_build: + clean_old_branches(d) + hash = push_bumped_release(d, target, test_build, pre_build) + else: + check_commit_is_available_remotely(d, hash, None if is_scratch else target, args.force) + urls.append(koji_url(remote, hash)) + command = ['koji', 'chain-build', target] + (' : '.join(urls)).split(' ') + (['--nowait'] if is_nowait else []) + print(' '.join(command), flush=True) + subprocess.check_call(command) diff --git a/uv.lock b/uv.lock index 2ace1d9..38b79cf 100644 --- a/uv.lock +++ b/uv.lock @@ -192,6 +192,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d5/1a/524f832e1ff1962a22a1accc775ca7b143ba2e9f5924bb6749dce566784a/pyright-1.1.405-py3-none-any.whl", hash = "sha256:a2cb13700b5508ce8e5d4546034cb7ea4aedb60215c6c33f56cec7f53996035a", size = 5905038, upload-time = "2025-09-04T03:37:04.913Z" }, ] +[[package]] +name = "rpm" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bd/ce/8db44d2b8fd6713a59e391d12b6816854b7bee8121ae7370c2d565de4265/rpm-0.4.0.tar.gz", hash = "sha256:79adbefa82318e2625d6e4fa16666cf88543498a1f2c10dc3879165d1dc3ecee", size = 11237, upload-time = "2025-04-08T08:57:26.261Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/96/0f4e9ba318c6e09b76357ee88d6c73101f8799f8ed707cfdc1df131b4234/rpm-0.4.0-py3-none-any.whl", hash = "sha256:0ef697cb5fb73bf9300a13d423529d7ec215239bf95c5ecb145e6610645f6067", size = 5151, upload-time = "2025-04-08T08:57:24.971Z" }, +] + [[package]] name = "ruff" version = "0.12.12" @@ -218,6 +227,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/28/7e/61c42657f6e4614a4258f1c3b0c5b93adc4d1f8575f5229d1906b483099b/ruff-0.12.12-py3-none-win_arm64.whl", hash = "sha256:2a8199cab4ce4d72d158319b63370abf60991495fb733db96cd923a34c52d093", size = 12256762, upload-time = "2025-09-04T16:50:15.737Z" }, ] +[[package]] +name = "specfile" +version = "0.37.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "rpm" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/70/747e24b20dd0ece7abe217b1531475bbcf72ad170e8882509301351720a5/specfile-0.37.0.tar.gz", hash = "sha256:cb0676c3e35776554e2577eeddc5b19e44bb98cafb90b099513def6ab9beb16b", size = 113318, upload-time = "2025-09-05T11:35:56.997Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/96/93/09ffda2182a4777b687a4f316b91b48f5be5ad43611cbfbc7ee03dc7a99c/specfile-0.37.0-py3-none-any.whl", hash = "sha256:86b7d95cef1e042f77134dfeb1ae6bdef07220887af5db92684c7589799cc456", size = 67471, upload-time = "2025-09-05T11:35:55.48Z" }, +] + [[package]] name = "typing-extensions" version = "4.15.0" @@ -232,6 +253,7 @@ name = "xcp-ng-dev" source = { editable = "." } dependencies = [ { name = "argcomplete" }, + { name = "specfile" }, ] [package.dev-dependencies] @@ -246,7 +268,10 @@ dev = [ ] [package.metadata] -requires-dist = [{ name = "argcomplete" }] +requires-dist = [ + { name = "argcomplete" }, + { name = "specfile" }, +] [package.metadata.requires-dev] dev = [