Skip to content

Build All Artifacts (cronjob) #4238

Build All Artifacts (cronjob)

Build All Artifacts (cronjob) #4238

# template file: 050.single_header.yaml
name: "Build All Artifacts (cronjob)"
on:
schedule:
- cron: '0 */8 * * *'
workflow_call:
inputs:
ref: # commit id
required: false
type: string
extraParamsAllBuilds: # addional build parameter
required: false
type: string
secrets:
ORG_MEMBERS:
required: true
workflow_dispatch:
inputs:
skipImages:
description: 'Skip building images? no = build images, yes = skip images'
required: true
options: [ 'yes', 'no' ]
type: choice
default: 'yes'
checkOci:
description: 'Check OCI for existing artifacts? yes = check OCI, no = always build everything'
required: true
options: [ 'yes', 'no' ]
type: choice
default: 'yes'
extraParamsAllBuilds:
description: 'Extra params for all builds/jobs (prepare/artifact/image) (eg: DEBUG=yes)'
required: false
default: ''
type: string
branch:
type: choice
description: 'Framework build branch'
options:
# branches
- main
- update-maintainers
- firethund
- tabrisnet_x86
- dependabot/pip/setuptools-82.0.0
- update-kernel-configs
- v26.02
- force_uboot_recomp
- update-version-lib/tools/shellcheck.sh-koalaman/shellcheck-null
- singlemode
- always
- feature/new-parallel-repo-architecture
- rkfix
- v25.11
- sunxi
- coderabbitai/docstrings/2e13998
- revert-8583-fix-atf-compile
- test
- v25.08
- v25.05
default: 'main'
board:
type: choice
description: 'Board'
options:
# boards
- all
default: 'all'
maintainer:
type: choice
description: 'Maintainer'
options:
# maintainers
- "150balbes"
- "1ubuntuuser"
- "AGM1968"
- "AaronNGray"
- "CodeChenL"
- "ColorfulRhino"
- "DylanHP"
- "Grippy98"
- "Heisath"
- "HeyMeco"
- "IsMrX"
- "JackHuang021"
- "Janmcha"
- "JohnTheCoolingFan"
- "Kreyren"
- "NicoD-SBC"
- "PanderMusubi"
- "Qvy-png"
- "SeeleVolleri"
- "StephenGraf"
- "SuperKali"
- "The-going"
- "TheSnowfield"
- "Tonymac32"
- "ZazaBR"
- "adeepn"
- "ahoneybun"
- "alexl83"
- "amazingfate"
- "andyshrk"
- "brentr"
- "catalinii"
- "chainsx"
- "chraac"
- "devdotnetorg"
- "efectn"
- "eliasbakken"
- "engineer-80"
- "fridtjof"
- "ginkage"
- "glneo"
- "hoochiwetech"
- "hqnicolas"
- "hzyitc"
- "igorpecovnik"
- "janprunk"
- "jeanrhum"
- "joekhoobyar"
- "jomadeto"
- "jonaswood01"
- "juanlufont"
- "kamilsaigol"
- "krachlatte"
- "lbmendes"
- "leggewie"
- "libiunc"
- "linhz0hz"
- "mahdichi"
- "mattx433"
- "mhawkins-consultant"
- "mlegenovic"
- "paolosabatino"
- "prahal"
- "pyavitz"
- "rbqvq"
- "redrathnure"
- "retro98boy"
- "rpardini"
- "schmiedelm"
- "schwar3kat"
- "sgjava"
- "sicXnull"
- "spendist"
- "sputnik2019"
- "sven-ola"
- "teknoid"
- "torte71"
- "utlark"
- "vamzii"
- "vidplace7"
- "wei633"
- all
default: 'all'
targetsFilterInclude:
description: 'TARGETS_FILTER_INCLUDE, example: "BOARD:odroidhc4,BOARD:odroidn2"'
required: false
default: ''
type: string
nightlybuild:
description: 'yes = nighlty, no = stable'
required: false
options: [ 'yes', 'no' ]
type: choice
default: 'yes'
bumpversion:
type: boolean
description: "Bump version"
default: 'true'
versionOverride:
description: 'Version override'
required: false
default: ''
env:
# For easier reuse across the multiple chunks ('armbian/build' repo)
BUILD_REPOSITORY: "armbian/build"
BUILD_REF: "${{ inputs.ref || inputs.branch || 'main' }}" # branch or tag or sha1
# For easier reuse across the multiple chunks ('armbian/os' repo)
USERPATCHES_REPOSITORY: "armbian/os"
USERPATCHES_REF: "main" # branch or tag or sha1
USERPATCHES_DIR: "userpatches" # folder inside USERPATCHES_REPOSITORY
# Github repository for releases. Normally its the one where we executing script
RELEASE_REPOSITORY: "os"
# Armbian envs. Adjust to your needs.
# This makes builds faster, but only if the Docker images are up-to-date with all dependencies, Python, tools, etc. Otherwise it makes it... slower.
DOCKER_SKIP_UPDATE: "yes" # Do not apt update/install/requirements/etc during Dockerfile build, trust that Docker images are up-to-date.
# Added to every build, even the prepare job.
EXTRA_PARAMS_ALL_BUILDS: "${{ inputs.extraParamsAllBuilds || 'UPLOAD_TO_OCI_ONLY=yes' }}"
# Storage deployment path
TARGET_PATH: "cron/"
# Version management
VERSION_OVERRIDE: "${{ github.event.inputs.versionOverride }}"
VERSION_BUMP: "${{ github.event.inputs.bumpversion || 'true' }}"
# To use GitHub CLI in a GitHub Actions workflow
GH_TOKEN: "${{ secrets.ACCESS_TOKEN }}"
# Added to every image build arguments.
EXTRA_PARAMS_IMAGE: "SHARE_LOG=yes "
# To ensure that only a single workflow using the same concurrency group will run at a time
concurrency:
group: pipeline
cancel-in-progress: false
jobs:
# additional security check
team_check:
permissions:
actions: write
name: "Team check"
runs-on: [ "ubuntu-latest" ]
steps:
- name: "Check membership"
uses: armbian/actions/team-check@main
with:
ORG_MEMBERS: ${{ secrets.ORG_MEMBERS }}
GITHUB_TOKEN: "${{ env.GH_TOKEN }}"
TEAM: "Board Maintainers"
version_prep:
needs: team_check
name: "Bump version"
runs-on: ubuntu-latest
steps:
# Clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: 0
clean: false # true is default.
- name: Determine version
id: versionfile
run: |
# file = where version is getting stored, different for stable and nightly
# skip_tag = we only upload nighlty to GH releases
echo "file=nightly" >> $GITHUB_OUTPUT
echo "skip_tag=false" >> $GITHUB_OUTPUT
echo "pre_release=true" >> $GITHUB_OUTPUT
if [ "${{ github.event.inputs.nightlybuild || 'yes' }}" == "no" ]; then
echo "file=stable" >> $GITHUB_OUTPUT
echo "skip_tag=true" >> $GITHUB_OUTPUT
echo "pre_release=false" >> $GITHUB_OUTPUT
fi
# Bump version automatically
- name: Bump version
if: ${{ ( ! github.event.inputs.versionOverride ) && ( inputs.ref == '' ) && ( env.VERSION_BUMP == 'true' ) }}
id: changelog
uses: TriPSs/conventional-changelog-action@v5.4.0
with:
github-token: ${{ secrets.ACCESS_TOKEN_ARMBIANWORKER }}
git-message: 'Bump release to {version}'
git-user-name: armbianworker
git-user-email: info@armbian.com
output-file: 'false'
skip-version-file: 'false'
skip-on-empty: 'false'
skip-commit: 'false'
skip-ci: 'false'
skip-tag: "${{ steps.versionfile.outputs.skip_tag }}"
version-file: "${{ steps.versionfile.outputs.file }}.json"
pre-release: "${{ steps.versionfile.outputs.pre_release }}"
git-branch: 'main'
tag-prefix: ''
pre-release-identifier: 'trunk'
- name: Read version from file if nor overriden
if: ${{ ! github.event.inputs.versionOverride || env.VERSION_BUMP == 'false' }}
run: |
mkdir -p downloads
cat "${{ steps.versionfile.outputs.file }}.json" | jq '.version' | sed "s/\"//g" | sed 's/^/VERSION_OVERRIDE=/' >> $GITHUB_ENV
cat "${{ steps.versionfile.outputs.file }}.json" | jq '.version' | sed "s/\"//g" > downloads/version
- name: 'Upload Artifact'
uses: actions/upload-artifact@v6
with:
name: assets-for-download-all
path: downloads
retention-days: 5
- name: "Generate body file"
if: ${{ (github.event.inputs.skipImages || 'yes') != 'yes' }}
run: |
# Make html document
if [ -f release-headers/${{ env.RELEASE_REPOSITORY }}.sh ]; then
bash release-headers/${{ env.RELEASE_REPOSITORY }}.sh > body.html
fi
- uses: ncipollo/release-action@v1
if: ${{ (github.event.inputs.nightlybuild || 'yes') == 'yes' && (github.event.inputs.skipImages || 'yes') != 'yes' }}
with:
repo: "${{ env.RELEASE_REPOSITORY }}"
tag: "${{ env.VERSION_OVERRIDE }}"
name: "${{ env.VERSION_OVERRIDE }}"
bodyFile: "body.html"
prerelease: "true"
allowUpdates: true
removeArtifacts: true
token: ${{ env.GH_TOKEN }}
- name: Save
id: releases
run: |
echo "version=${{ env.VERSION_OVERRIDE }}" >> $GITHUB_OUTPUT
outputs:
# not related to matrix
version: ${{ steps.releases.outputs.version }}
matrix_prep:
name: "JSON matrix: 17/16 :: 17 artifact chunks, 16 image chunks"
if: ${{ github.repository_owner == 'armbian' }}
needs: [ version_prep ]
runs-on: [ "self-hosted", "Linux", 'super' ]
steps:
# Cleaning self hosted runners
- name: Runner clean
uses: armbian/actions/runner-clean@main
# clone the build system repo (`armbian/build`)
- name: Checkout build repo
uses: actions/checkout@v6
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ env.BUILD_REF }}
fetch-depth: 0
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
path: build
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: 0
clean: false # true is default.
path: userpatches
- name: Checkout JSON artifacts repository
uses: actions/checkout@v6
with:
repository: armbian/armbian.github.io
ref: data
clean: false
path: armbian.github.io
# clone the torrent lists
- name: "Checkout torrent lists"
uses: actions/checkout@v6
with:
repository: XIU2/TrackersListCollection
clean: false
ref: master # true is default
path: trackerslist
fetch-depth: 1
- name: "grab the sha1 of the latest commit of the build repo ${{ env.BUILD_REPOSITORY }}#${{ env.BUILD_REF }}"
id: latest-commit
run: |
cd build
echo "sha1=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
cd ..
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv build/userpatches
rsync -av userpatches/${{env.USERPATCHES_DIR}}/. build/userpatches/
# Use new release YAML targets
rsync -av armbian.github.io/data/release-targets/. build/userpatches/
- name: GitHub cache
id: cache-restore
uses: actions/cache@v5
with:
path: |
cache/memoize
cache/oci/positive
key: ${{ runner.os }}-matrix-cache-${{ github.sha }}-${{ steps.latest-commit.outputs.sha1 }}"
restore-keys: |
${{ runner.os }}-matrix-cache-
# Login to ghcr.io, we're gonna do a lot of OCI lookups.
- name: Docker Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: "${{ github.repository_owner }}" # GitHub username or org
password: ${{ secrets.GITHUB_TOKEN }} # GitHub actions builtin token. repo has to have pkg access.
- name: Prepare Info JSON and Matrices
id: prepare-matrix
run: |
FILTERS="${{ github.event.inputs.targetsFilterInclude }}"
if [ -z "${FILTERS}" ] && [ "${{ github.event.inputs.board }}" != "all" ] && [ -n "${{ github.event.inputs.board }}" ]; then
FILTERS='"BOARD:${{ github.event.inputs.board }}"'
fi
if [ -z "${FILTERS}" ] && [ "${{ github.event.inputs.maintainer }}" != "all" ] && [ -n "${{ github.event.inputs.board }}" ]; then
FILTERS='"BOARD_MAINTAINERS:${{ github.event.inputs.maintainer }}"'
fi
# this sets outputs "artifact-matrix" #and "image-matrix"
cd build
bash ./compile.sh gha-matrix armbian-images \
REVISION="${{ needs.version_prep.outputs.version }}" \
TARGETS_FILTER_INCLUDE="${FILTERS}" \
BETA=${{ github.event.inputs.nightlybuild || 'yes' }} \
CLEAN_INFO=yes \
CLEAN_MATRIX=yes \
MATRIX_ARTIFACT_CHUNKS=17 \
MATRIX_IMAGE_CHUNKS=16 \
CHECK_OCI=${{ github.event.inputs.checkOci || 'yes' }} \
TARGETS_FILENAME="targets-all-not-eos.yaml" \
SKIP_IMAGES=${{ github.event.inputs.skipImages || 'yes'}} \
${{env.EXTRA_PARAMS_ALL_BUILDS}} SHARE_LOG=yes # IMAGES_ONLY_OUTDATED_ARTIFACTS=yes
- name: "Logs: ${{ steps.prepare-matrix.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.prepare-matrix.outputs.logs_url }}"
- name: Generate server lists from NetBox (JSON)
id: prepare-urls
run: |
set -euo pipefail
mkdir -p build/output/info
# copy trackers list
cp trackerslist/best.txt build/output/info/best-torrent-servers.txt
BASE_URL="${{ secrets.NETBOX_API }}/virtualization/virtual-machines/?limit=500&name__empty=false&status=active"
# One jq filter used for all kinds – same fields everywhere
JQ_FILTER='
.results
| map(
select(.name != null)
| {
host: .name,
upload_path: (.custom_fields["path"] // ""),
download_path_archive:
((.custom_fields["download_path_archive"] // "/archive")
| if startswith("/") then . else "/" + . end),
download_path_images:
((.custom_fields["download_path_images"] // "/dl")
| if startswith("/") then . else "/" + . end),
download_path_debs:
((.custom_fields["download_path_debs"] // "/apt")
| if startswith("/") then . else "/" + . end),
port: (.custom_fields["port"] // 22),
username: (.custom_fields["username"] // "mirror")
}
)
| sort_by(.host)
'
for kind in servers servers-download servers-cache servers-upload; do
case "$kind" in
servers)
# All mirrors serving images
url="$BASE_URL&device_role=Mirror&tag=images"
outfile="build/output/info/servers.jq"
;;
servers-download)
# Mirrors used as HTTP download sources (webseeds)
url="$BASE_URL&device_role=Mirror&tag=images"
outfile="build/output/info/servers-download.jq"
;;
servers-cache)
# Cache mirrors (can be used for HTTP + rsync)
url="$BASE_URL&device_role=Mirror&tag=cache"
outfile="build/output/info/servers-cache.jq"
;;
servers-upload)
# Upload targets (SSH)
url="$BASE_URL&tag=upload&tag=images"
outfile="build/output/info/servers-upload.jq"
;;
esac
echo "Generating $outfile from $url"
timeout 10 curl -s \
-H "Authorization: Token ${{ secrets.NETBOX_TOKEN }}" \
-H "Accept: application/json; indent=4" \
"$url" \
| jq "$JQ_FILTER" > "$outfile"
done
# Store output/info folder in a GitHub Actions artifact
- uses: actions/upload-artifact@v6
name: Upload output/info as GitHub Artifact
with:
name: build-info-json
path: build/output/info
- name: chown cache memoize/oci back to normal user
run: sudo chown -R $USER:$USER build/cache/memoize build/cache/oci/positive
outputs:
# not related to matrix
build-sha1: ${{ steps.latest-commit.outputs.sha1 }}
version: ${{ needs.version_prep.outputs.version }}
# template file: 150.per-chunk-artifacts_prep-outputs.yaml
# artifacts-1 of 17
artifacts-chunk-json-1: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-1 }}
artifacts-chunk-not-empty-1: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-1 }}
artifacts-chunk-size-1: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-1 }}
# artifacts-2 of 17
artifacts-chunk-json-2: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-2 }}
artifacts-chunk-not-empty-2: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-2 }}
artifacts-chunk-size-2: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-2 }}
# artifacts-3 of 17
artifacts-chunk-json-3: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-3 }}
artifacts-chunk-not-empty-3: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-3 }}
artifacts-chunk-size-3: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-3 }}
# artifacts-4 of 17
artifacts-chunk-json-4: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-4 }}
artifacts-chunk-not-empty-4: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-4 }}
artifacts-chunk-size-4: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-4 }}
# artifacts-5 of 17
artifacts-chunk-json-5: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-5 }}
artifacts-chunk-not-empty-5: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-5 }}
artifacts-chunk-size-5: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-5 }}
# artifacts-6 of 17
artifacts-chunk-json-6: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-6 }}
artifacts-chunk-not-empty-6: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-6 }}
artifacts-chunk-size-6: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-6 }}
# artifacts-7 of 17
artifacts-chunk-json-7: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-7 }}
artifacts-chunk-not-empty-7: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-7 }}
artifacts-chunk-size-7: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-7 }}
# artifacts-8 of 17
artifacts-chunk-json-8: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-8 }}
artifacts-chunk-not-empty-8: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-8 }}
artifacts-chunk-size-8: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-8 }}
# artifacts-9 of 17
artifacts-chunk-json-9: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-9 }}
artifacts-chunk-not-empty-9: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-9 }}
artifacts-chunk-size-9: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-9 }}
# artifacts-10 of 17
artifacts-chunk-json-10: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-10 }}
artifacts-chunk-not-empty-10: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-10 }}
artifacts-chunk-size-10: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-10 }}
# artifacts-11 of 17
artifacts-chunk-json-11: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-11 }}
artifacts-chunk-not-empty-11: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-11 }}
artifacts-chunk-size-11: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-11 }}
# artifacts-12 of 17
artifacts-chunk-json-12: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-12 }}
artifacts-chunk-not-empty-12: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-12 }}
artifacts-chunk-size-12: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-12 }}
# artifacts-13 of 17
artifacts-chunk-json-13: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-13 }}
artifacts-chunk-not-empty-13: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-13 }}
artifacts-chunk-size-13: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-13 }}
# artifacts-14 of 17
artifacts-chunk-json-14: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-14 }}
artifacts-chunk-not-empty-14: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-14 }}
artifacts-chunk-size-14: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-14 }}
# artifacts-15 of 17
artifacts-chunk-json-15: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-15 }}
artifacts-chunk-not-empty-15: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-15 }}
artifacts-chunk-size-15: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-15 }}
# artifacts-16 of 17
artifacts-chunk-json-16: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-16 }}
artifacts-chunk-not-empty-16: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-16 }}
artifacts-chunk-size-16: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-16 }}
# artifacts-17 of 17
artifacts-chunk-json-17: ${{ steps.prepare-matrix.outputs.artifacts-chunk-json-17 }}
artifacts-chunk-not-empty-17: ${{ steps.prepare-matrix.outputs.artifacts-chunk-not-empty-17 }}
artifacts-chunk-size-17: ${{ steps.prepare-matrix.outputs.artifacts-chunk-size-17 }}
# template file: 151.per-chunk-images_prep-outputs.yaml
# artifacts-1 of 16
images-chunk-json-1: ${{ steps.prepare-matrix.outputs.images-chunk-json-1 }}
images-chunk-not-empty-1: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-1 }}
images-chunk-size-1: ${{ steps.prepare-matrix.outputs.images-chunk-size-1 }}
# artifacts-2 of 16
images-chunk-json-2: ${{ steps.prepare-matrix.outputs.images-chunk-json-2 }}
images-chunk-not-empty-2: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-2 }}
images-chunk-size-2: ${{ steps.prepare-matrix.outputs.images-chunk-size-2 }}
# artifacts-3 of 16
images-chunk-json-3: ${{ steps.prepare-matrix.outputs.images-chunk-json-3 }}
images-chunk-not-empty-3: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-3 }}
images-chunk-size-3: ${{ steps.prepare-matrix.outputs.images-chunk-size-3 }}
# artifacts-4 of 16
images-chunk-json-4: ${{ steps.prepare-matrix.outputs.images-chunk-json-4 }}
images-chunk-not-empty-4: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-4 }}
images-chunk-size-4: ${{ steps.prepare-matrix.outputs.images-chunk-size-4 }}
# artifacts-5 of 16
images-chunk-json-5: ${{ steps.prepare-matrix.outputs.images-chunk-json-5 }}
images-chunk-not-empty-5: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-5 }}
images-chunk-size-5: ${{ steps.prepare-matrix.outputs.images-chunk-size-5 }}
# artifacts-6 of 16
images-chunk-json-6: ${{ steps.prepare-matrix.outputs.images-chunk-json-6 }}
images-chunk-not-empty-6: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-6 }}
images-chunk-size-6: ${{ steps.prepare-matrix.outputs.images-chunk-size-6 }}
# artifacts-7 of 16
images-chunk-json-7: ${{ steps.prepare-matrix.outputs.images-chunk-json-7 }}
images-chunk-not-empty-7: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-7 }}
images-chunk-size-7: ${{ steps.prepare-matrix.outputs.images-chunk-size-7 }}
# artifacts-8 of 16
images-chunk-json-8: ${{ steps.prepare-matrix.outputs.images-chunk-json-8 }}
images-chunk-not-empty-8: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-8 }}
images-chunk-size-8: ${{ steps.prepare-matrix.outputs.images-chunk-size-8 }}
# artifacts-9 of 16
images-chunk-json-9: ${{ steps.prepare-matrix.outputs.images-chunk-json-9 }}
images-chunk-not-empty-9: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-9 }}
images-chunk-size-9: ${{ steps.prepare-matrix.outputs.images-chunk-size-9 }}
# artifacts-10 of 16
images-chunk-json-10: ${{ steps.prepare-matrix.outputs.images-chunk-json-10 }}
images-chunk-not-empty-10: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-10 }}
images-chunk-size-10: ${{ steps.prepare-matrix.outputs.images-chunk-size-10 }}
# artifacts-11 of 16
images-chunk-json-11: ${{ steps.prepare-matrix.outputs.images-chunk-json-11 }}
images-chunk-not-empty-11: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-11 }}
images-chunk-size-11: ${{ steps.prepare-matrix.outputs.images-chunk-size-11 }}
# artifacts-12 of 16
images-chunk-json-12: ${{ steps.prepare-matrix.outputs.images-chunk-json-12 }}
images-chunk-not-empty-12: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-12 }}
images-chunk-size-12: ${{ steps.prepare-matrix.outputs.images-chunk-size-12 }}
# artifacts-13 of 16
images-chunk-json-13: ${{ steps.prepare-matrix.outputs.images-chunk-json-13 }}
images-chunk-not-empty-13: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-13 }}
images-chunk-size-13: ${{ steps.prepare-matrix.outputs.images-chunk-size-13 }}
# artifacts-14 of 16
images-chunk-json-14: ${{ steps.prepare-matrix.outputs.images-chunk-json-14 }}
images-chunk-not-empty-14: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-14 }}
images-chunk-size-14: ${{ steps.prepare-matrix.outputs.images-chunk-size-14 }}
# artifacts-15 of 16
images-chunk-json-15: ${{ steps.prepare-matrix.outputs.images-chunk-json-15 }}
images-chunk-not-empty-15: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-15 }}
images-chunk-size-15: ${{ steps.prepare-matrix.outputs.images-chunk-size-15 }}
# artifacts-16 of 16
images-chunk-json-16: ${{ steps.prepare-matrix.outputs.images-chunk-json-16 }}
images-chunk-not-empty-16: ${{ steps.prepare-matrix.outputs.images-chunk-not-empty-16 }}
images-chunk-size-16: ${{ steps.prepare-matrix.outputs.images-chunk-size-16 }}
# template file: 250.single_aggr-jobs.yaml
# ------ aggregate all artifact chunks into a single dependency -------
all-artifacts-ready:
name: "17 artifacts chunks ready"
runs-on: ubuntu-latest # not going to run, anyway, but is required.
if: ${{ !cancelled() && ( 1 == 2 ) }} # eg: never run.
needs: [ "matrix_prep", "build-artifacts-chunk-1","build-artifacts-chunk-2","build-artifacts-chunk-3","build-artifacts-chunk-4","build-artifacts-chunk-5","build-artifacts-chunk-6","build-artifacts-chunk-7","build-artifacts-chunk-8","build-artifacts-chunk-9","build-artifacts-chunk-10","build-artifacts-chunk-11","build-artifacts-chunk-12","build-artifacts-chunk-13","build-artifacts-chunk-14","build-artifacts-chunk-15","build-artifacts-chunk-16","build-artifacts-chunk-17" ] # <-- HERE: all artifact chunk numbers.
steps:
- name: fake step
run: uptime
all-images-ready:
name: "16 image chunks ready"
runs-on: ubuntu-latest # not going to run, anyway, but is required.
if: ${{ !cancelled() && ( 1 == 2 ) }} # eg: never run.
needs: [ "matrix_prep", "build-images-chunk-1","build-images-chunk-2","build-images-chunk-3","build-images-chunk-4","build-images-chunk-5","build-images-chunk-6","build-images-chunk-7","build-images-chunk-8","build-images-chunk-9","build-images-chunk-10","build-images-chunk-11","build-images-chunk-12","build-images-chunk-13","build-images-chunk-14","build-images-chunk-15","build-images-chunk-16" ] # <-- HERE: all image chunk numbers.
steps:
- name: fake step
run: uptime
all-artifacts-and-images-ready:
name: "17 artifacts and 16 image chunks ready"
runs-on: ubuntu-latest # not going to run, anyway, but is required.
if: ${{ !cancelled() && ( 1 == 2 ) }} # eg: never run.
needs: [ "matrix_prep", "all-artifacts-ready", "all-images-ready" ]
steps:
- name: fake step
run: uptime
# template file: 550.per-chunk-artifacts_job.yaml
"build-artifacts-chunk-1": # templated "build-artifacts-chunk-1"
if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-1 == 'yes' }} # <-- HERE: Chunk number.
needs: [ "matrix_prep" ]
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-1) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty A1' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# Login to ghcr.io, for later uploading rootfs to ghcr.io
- name: Docker Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: "${{ github.repository_owner }}" # GitHub username or org
password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
- name: "Run only on GitHub-hosted"
if: ${{ runner.environment == 'github-hosted' }}
run: |
echo "GitHub-hosted runner"
sudo ./compile.sh requirements
sudo chown -R "$(id -u):$(id -g)" .
- name: Build ${{matrix.desc}}
timeout-minutes: 90
id: build
run: |
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build.outputs.logs_url }}"
"build-artifacts-chunk-2": # templated "build-artifacts-chunk-2"
if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-2 == 'yes' }} # <-- HERE: Chunk number.
needs: [ "matrix_prep" ]
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-2) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty A2' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# Login to ghcr.io, for later uploading rootfs to ghcr.io
- name: Docker Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: "${{ github.repository_owner }}" # GitHub username or org
password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
- name: "Run only on GitHub-hosted"
if: ${{ runner.environment == 'github-hosted' }}
run: |
echo "GitHub-hosted runner"
sudo ./compile.sh requirements
sudo chown -R "$(id -u):$(id -g)" .
- name: Build ${{matrix.desc}}
timeout-minutes: 90
id: build
run: |
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build.outputs.logs_url }}"
"build-artifacts-chunk-3": # templated "build-artifacts-chunk-3"
if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-3 == 'yes' }} # <-- HERE: Chunk number.
needs: [ "matrix_prep" ]
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-3) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty A3' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# Login to ghcr.io, for later uploading rootfs to ghcr.io
- name: Docker Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: "${{ github.repository_owner }}" # GitHub username or org
password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
- name: "Run only on GitHub-hosted"
if: ${{ runner.environment == 'github-hosted' }}
run: |
echo "GitHub-hosted runner"
sudo ./compile.sh requirements
sudo chown -R "$(id -u):$(id -g)" .
- name: Build ${{matrix.desc}}
timeout-minutes: 90
id: build
run: |
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build.outputs.logs_url }}"
"build-artifacts-chunk-4": # templated "build-artifacts-chunk-4"
if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-4 == 'yes' }} # <-- HERE: Chunk number.
needs: [ "matrix_prep" ]
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-4) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty A4' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# Login to ghcr.io, for later uploading rootfs to ghcr.io
- name: Docker Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: "${{ github.repository_owner }}" # GitHub username or org
password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
- name: "Run only on GitHub-hosted"
if: ${{ runner.environment == 'github-hosted' }}
run: |
echo "GitHub-hosted runner"
sudo ./compile.sh requirements
sudo chown -R "$(id -u):$(id -g)" .
- name: Build ${{matrix.desc}}
timeout-minutes: 90
id: build
run: |
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build.outputs.logs_url }}"
"build-artifacts-chunk-5": # templated "build-artifacts-chunk-5"
if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-5 == 'yes' }} # <-- HERE: Chunk number.
needs: [ "matrix_prep" ]
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-5) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty A5' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# Login to ghcr.io, for later uploading rootfs to ghcr.io
- name: Docker Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: "${{ github.repository_owner }}" # GitHub username or org
password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
- name: "Run only on GitHub-hosted"
if: ${{ runner.environment == 'github-hosted' }}
run: |
echo "GitHub-hosted runner"
sudo ./compile.sh requirements
sudo chown -R "$(id -u):$(id -g)" .
- name: Build ${{matrix.desc}}
timeout-minutes: 90
id: build
run: |
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build.outputs.logs_url }}"
"build-artifacts-chunk-6": # templated "build-artifacts-chunk-6"
if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-6 == 'yes' }} # <-- HERE: Chunk number.
needs: [ "matrix_prep" ]
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-6) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty A6' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# Login to ghcr.io, for later uploading rootfs to ghcr.io
- name: Docker Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: "${{ github.repository_owner }}" # GitHub username or org
password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
- name: "Run only on GitHub-hosted"
if: ${{ runner.environment == 'github-hosted' }}
run: |
echo "GitHub-hosted runner"
sudo ./compile.sh requirements
sudo chown -R "$(id -u):$(id -g)" .
- name: Build ${{matrix.desc}}
timeout-minutes: 90
id: build
run: |
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build.outputs.logs_url }}"
"build-artifacts-chunk-7": # templated "build-artifacts-chunk-7"
if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-7 == 'yes' }} # <-- HERE: Chunk number.
needs: [ "matrix_prep" ]
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-7) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty A7' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# Login to ghcr.io, for later uploading rootfs to ghcr.io
- name: Docker Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: "${{ github.repository_owner }}" # GitHub username or org
password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
- name: "Run only on GitHub-hosted"
if: ${{ runner.environment == 'github-hosted' }}
run: |
echo "GitHub-hosted runner"
sudo ./compile.sh requirements
sudo chown -R "$(id -u):$(id -g)" .
- name: Build ${{matrix.desc}}
timeout-minutes: 90
id: build
run: |
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build.outputs.logs_url }}"
"build-artifacts-chunk-8": # templated "build-artifacts-chunk-8"
if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-8 == 'yes' }} # <-- HERE: Chunk number.
needs: [ "matrix_prep" ]
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-8) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty A8' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# Login to ghcr.io, for later uploading rootfs to ghcr.io
- name: Docker Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: "${{ github.repository_owner }}" # GitHub username or org
password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
- name: "Run only on GitHub-hosted"
if: ${{ runner.environment == 'github-hosted' }}
run: |
echo "GitHub-hosted runner"
sudo ./compile.sh requirements
sudo chown -R "$(id -u):$(id -g)" .
- name: Build ${{matrix.desc}}
timeout-minutes: 90
id: build
run: |
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build.outputs.logs_url }}"
"build-artifacts-chunk-9": # templated "build-artifacts-chunk-9"
if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-9 == 'yes' }} # <-- HERE: Chunk number.
needs: [ "matrix_prep" ]
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-9) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty A9' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# Login to ghcr.io, for later uploading rootfs to ghcr.io
- name: Docker Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: "${{ github.repository_owner }}" # GitHub username or org
password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
- name: "Run only on GitHub-hosted"
if: ${{ runner.environment == 'github-hosted' }}
run: |
echo "GitHub-hosted runner"
sudo ./compile.sh requirements
sudo chown -R "$(id -u):$(id -g)" .
- name: Build ${{matrix.desc}}
timeout-minutes: 90
id: build
run: |
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build.outputs.logs_url }}"
"build-artifacts-chunk-10": # templated "build-artifacts-chunk-10"
if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-10 == 'yes' }} # <-- HERE: Chunk number.
needs: [ "matrix_prep" ]
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-10) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty A10' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# Login to ghcr.io, for later uploading rootfs to ghcr.io
- name: Docker Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: "${{ github.repository_owner }}" # GitHub username or org
password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
- name: "Run only on GitHub-hosted"
if: ${{ runner.environment == 'github-hosted' }}
run: |
echo "GitHub-hosted runner"
sudo ./compile.sh requirements
sudo chown -R "$(id -u):$(id -g)" .
- name: Build ${{matrix.desc}}
timeout-minutes: 90
id: build
run: |
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build.outputs.logs_url }}"
"build-artifacts-chunk-11": # templated "build-artifacts-chunk-11"
if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-11 == 'yes' }} # <-- HERE: Chunk number.
needs: [ "matrix_prep" ]
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-11) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty A11' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# Login to ghcr.io, for later uploading rootfs to ghcr.io
- name: Docker Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: "${{ github.repository_owner }}" # GitHub username or org
password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
- name: "Run only on GitHub-hosted"
if: ${{ runner.environment == 'github-hosted' }}
run: |
echo "GitHub-hosted runner"
sudo ./compile.sh requirements
sudo chown -R "$(id -u):$(id -g)" .
- name: Build ${{matrix.desc}}
timeout-minutes: 90
id: build
run: |
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build.outputs.logs_url }}"
"build-artifacts-chunk-12": # templated "build-artifacts-chunk-12"
if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-12 == 'yes' }} # <-- HERE: Chunk number.
needs: [ "matrix_prep" ]
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-12) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty A12' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# Login to ghcr.io, for later uploading rootfs to ghcr.io
- name: Docker Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: "${{ github.repository_owner }}" # GitHub username or org
password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
- name: "Run only on GitHub-hosted"
if: ${{ runner.environment == 'github-hosted' }}
run: |
echo "GitHub-hosted runner"
sudo ./compile.sh requirements
sudo chown -R "$(id -u):$(id -g)" .
- name: Build ${{matrix.desc}}
timeout-minutes: 90
id: build
run: |
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build.outputs.logs_url }}"
"build-artifacts-chunk-13": # templated "build-artifacts-chunk-13"
if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-13 == 'yes' }} # <-- HERE: Chunk number.
needs: [ "matrix_prep" ]
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-13) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty A13' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# Login to ghcr.io, for later uploading rootfs to ghcr.io
- name: Docker Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: "${{ github.repository_owner }}" # GitHub username or org
password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
- name: "Run only on GitHub-hosted"
if: ${{ runner.environment == 'github-hosted' }}
run: |
echo "GitHub-hosted runner"
sudo ./compile.sh requirements
sudo chown -R "$(id -u):$(id -g)" .
- name: Build ${{matrix.desc}}
timeout-minutes: 90
id: build
run: |
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build.outputs.logs_url }}"
"build-artifacts-chunk-14": # templated "build-artifacts-chunk-14"
if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-14 == 'yes' }} # <-- HERE: Chunk number.
needs: [ "matrix_prep" ]
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-14) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty A14' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# Login to ghcr.io, for later uploading rootfs to ghcr.io
- name: Docker Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: "${{ github.repository_owner }}" # GitHub username or org
password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
- name: "Run only on GitHub-hosted"
if: ${{ runner.environment == 'github-hosted' }}
run: |
echo "GitHub-hosted runner"
sudo ./compile.sh requirements
sudo chown -R "$(id -u):$(id -g)" .
- name: Build ${{matrix.desc}}
timeout-minutes: 90
id: build
run: |
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build.outputs.logs_url }}"
"build-artifacts-chunk-15": # templated "build-artifacts-chunk-15"
if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-15 == 'yes' }} # <-- HERE: Chunk number.
needs: [ "matrix_prep" ]
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-15) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty A15' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# Login to ghcr.io, for later uploading rootfs to ghcr.io
- name: Docker Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: "${{ github.repository_owner }}" # GitHub username or org
password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
- name: "Run only on GitHub-hosted"
if: ${{ runner.environment == 'github-hosted' }}
run: |
echo "GitHub-hosted runner"
sudo ./compile.sh requirements
sudo chown -R "$(id -u):$(id -g)" .
- name: Build ${{matrix.desc}}
timeout-minutes: 90
id: build
run: |
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build.outputs.logs_url }}"
"build-artifacts-chunk-16": # templated "build-artifacts-chunk-16"
if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-16 == 'yes' }} # <-- HERE: Chunk number.
needs: [ "matrix_prep" ]
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-16) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty A16' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# Login to ghcr.io, for later uploading rootfs to ghcr.io
- name: Docker Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: "${{ github.repository_owner }}" # GitHub username or org
password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
- name: "Run only on GitHub-hosted"
if: ${{ runner.environment == 'github-hosted' }}
run: |
echo "GitHub-hosted runner"
sudo ./compile.sh requirements
sudo chown -R "$(id -u):$(id -g)" .
- name: Build ${{matrix.desc}}
timeout-minutes: 90
id: build
run: |
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build.outputs.logs_url }}"
"build-artifacts-chunk-17": # templated "build-artifacts-chunk-17"
if: ${{ github.repository_owner == 'armbian' && needs.matrix_prep.outputs.artifacts-chunk-not-empty-17 == 'yes' }} # <-- HERE: Chunk number.
needs: [ "matrix_prep" ]
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.artifacts-chunk-json-17) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty A17' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# Login to ghcr.io, for later uploading rootfs to ghcr.io
- name: Docker Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: "${{ github.repository_owner }}" # GitHub username or org
password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6 # We don't need to clone git, really. A wget would suffice for GH-hosted runners. But using clone is better for Igor-hosted runners.
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
- name: "Run only on GitHub-hosted"
if: ${{ runner.environment == 'github-hosted' }}
run: |
echo "GitHub-hosted runner"
sudo ./compile.sh requirements
sudo chown -R "$(id -u):$(id -g)" .
- name: Build ${{matrix.desc}}
timeout-minutes: 90
id: build
run: |
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" SHOW_DEBUG=yes SHARE_LOG=yes ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build.outputs.logs_url }}"
# template file: 650.per-chunk-images_job.yaml
"build-images-chunk-1": # templated "build-images-chunk-1"
needs: [ "matrix_prep", "all-artifacts-ready" ]
timeout-minutes: 240
if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-1 == 'yes' ) }} # <-- HERE: Chunk number.
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-1) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty I1' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: Install dependencies
run: |
if [ ! -e /usr/bin/mktorrent ]; then
sudo apt-get update
sudo apt-get install -y mktorrent
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: Checkout JSON artifacts repository
uses: actions/checkout@v6
with:
repository: armbian/armbian.github.io
ref: data
clean: false
path: armbian.github.io
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
# Use new release YAML targets
rsync -av armbian.github.io/data/release-targets/. userpatches/
- name: "Cleanup leftover output"
run: |
rm -f userpatches/VERSION
- name: ${{matrix.desc}}
id: build-one-image
timeout-minutes: 90
run: |
# calculate loop from runner name
if [ -z "${ImageOS}" ]; then
USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
fi
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
- name: Install SSH key
uses: shimataro/ssh-key-action@v2
with:
key: ${{ secrets.KEY_UPLOAD }}
known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
if_key_exists: replace
- name: Check API rate limits
run: |
# install dependencies
if ! command -v "gh" > /dev/null 2>&1; then
sudo apt-get -y -qq install gh
fi
while true
do
API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
if (( $PERCENT > 20 )); then
echo "API rate in good shape $PERCENT % free"
exit 0
fi
echo "API rate lower then 20%, sleping 10m"
sleep 10m
done
# show current api rate
curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
- name: Import GPG key
env:
GPG_KEY1: ${{ secrets.GPG_KEY1 }}
if: env.GPG_KEY1 != null
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_KEY1 }}
passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
- name: Sign
env:
GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
if: env.GPG_PASSPHRASE1 != null
run: |
for extension in zip xz qcow2; do
if ls -l output/images/*/archive/*.$extension &>/dev/null; then
echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension
fi
done
# Download the artifacts (output/info) produced by the prepare-matrix job.
- name: Download artifacts
uses: actions/download-artifact@v7
with:
name: build-info-json
path: output/info
- name: Generate torrent
timeout-minutes: 3
run: |
set -euo pipefail
# Build tracker list (ignore empty/whitespace-only lines)
TRACKERS=$(
grep -v '^[ ]*$' output/info/best-torrent-servers.txt \
| sort -R \
| sed 's/^/ --announce=/'
)
# Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/)
BOARD=""
FILE=""
first_match=""
for ext in zip xz qcow2; do
if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then
first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1)
# first_match = output/images/BOARD/archive/file.ext
BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD
FILE=$(basename "$first_match")
break
fi
done
# Safety check
if [ -z "$BOARD" ] || [ -z "$FILE" ]; then
echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2
exit 1
fi
# Nightly / stable logic (templated)
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
WEBSEEDS=""
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then
##################################################################
# STABLE RELEASES
# Use download mirrors (servers-download.jq)
# URL: https://<host><download_path_images>/<BOARD>/archive/<FILE>
# download_path_images is already normalized & defaults to /dl in JSON.
##################################################################
WEBSEEDS=$(
jq -r --arg board "$BOARD" --arg file "$FILE" '
.[]
| "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)"
' output/info/servers-download.jq | paste -sd, -
)
else
##################################################################
# NIGHTLY BUILDS
# Use cache mirrors (servers-cache.jq) + GitHub as extra webseed
# Path is FIXED: https://SERVER/cache/os/<version>/<FILE>
##################################################################
WEBSEEDS=$(
jq -r \
--arg repo "os" \
--arg ver "${{ needs.matrix_prep.outputs.version }}" \
--arg file "$FILE" '
.[]
| "https://\(.host)/cache/\($repo)/\($ver)/\($file)"
' output/info/servers-cache.jq | paste -sd, -
)
# Append GitHub webseed
if [ -n "$WEBSEEDS" ]; then
WEBSEEDS+=","
fi
WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
fi
echo "WEBSEEDS: $WEBSEEDS"
# Go to the archive directory that contains FILE
cd "$(dirname "$first_match")" || exit 1
mktorrent \
--comment="Armbian torrent for ${FILE}" \
--verbose \
${TRACKERS} \
--web-seed="${WEBSEEDS}" \
"${FILE}"
# drop .txt helper files
rm -f *.txt
- name: "Prepare release artifacts (exclude .asc, .sha, .torrent)"
run: |
# Start from a clean directory
rm -rf output/release
mkdir -p output/release
# Copy wanted artifacts from output/images, preserving folder structure
# e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/...
find output/images -type f -name 'Armbian_*.*' \
! -name '*.asc' \
! -name '*.sha' \
! -name '*.torrent' \
-exec cp --parents {} output/release/ \;
# debug
tree output/images
echo "# debug"
tree output/release
- name: "Upload artefacts except .asc, .sha and .torrent"
timeout-minutes: 60
if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
#if: ${{ env.RELEASE_REPOSITORY == 'os' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
uses: ncipollo/release-action@v1
with:
repo: "${{ env.RELEASE_REPOSITORY }}"
tag: "${{ needs.matrix_prep.outputs.version }}"
artifacts: "output/release/output/images/*/*/Armbian_*.*"
omitBody: true
replacesArtifacts: true
omitName: true
makeLatest: false
omitPrereleaseDuringUpdate: true
allowUpdates: true
artifactErrorsFailBuild: true
token: "${{ env.GH_TOKEN }}"
- name: "Upload to servers"
run: |
# temp workspace for downloaded jsons (auto-cleanup)
tmpdir="$(mktemp -d)"
trap 'rm -rf "$tmpdir"' EXIT
curl -fL --retry 10 --retry-delay 5 --retry-all-errors \
-o "$tmpdir/cache.jq" https://github.armbian.com/servers/cache.jq \
-o "$tmpdir/upload.jq" https://github.armbian.com/servers/upload.jq
# debug
echo "=== servers-cache.jq ==="
jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq
echo "=== servers-upload.jq ==="
jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq
max_retries=3
sync_from_json() {
local json_file=$1
local mode=$2 # "cache" or "upload"
echo "== Processing ${json_file} (mode: ${mode}) =="
# Iterate over JSON array elements
while IFS= read -r server; do
# JSON structure (same for all files):
# {
# "host": "...",
# "upload_path": "...",
# "download_path_archive": "...",
# "download_path_images": "...",
# "download_path_debs": "...",
# "port": 22,
# "username": "mirror"
# }
SERVER_URL=$(jq -r '.host // empty' <<<"$server")
SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server")
SERVER_PORT=$(jq -r '.port // 22' <<<"$server")
SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server")
# skip empty host
[ -z "$SERVER_URL" ] && continue
echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)"
# Clean known_hosts entry (host:port form)
ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true
# Select rsync filters + remote subdir
if [ "$mode" = "cache" ]; then
# only.sha, .torrent .asc
RSYNC_FILTER=(
--include='*/'
--include='*.sha'
--include='*.asc'
--include='*.torrent'
--exclude='*'
)
REMOTE_SUBDIR="cache/artifacts/"
else
# everything
RSYNC_FILTER=(
--include='*/'
--include='*'
)
REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/"
fi
# Retry loop
for attempt in $(seq 1 "$max_retries"); do
echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..."
if rsync --progress \
-e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
-rvP \
"${RSYNC_FILTER[@]}" \
output/images/ \
"${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}"
then
echo "[$SERVER_URL] rsync successful."
break
fi
if [ "$attempt" -eq "$max_retries" ]; then
echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts."
exit 1
fi
echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..."
sleep 10
done
done < <(jq -c '.[]' "$json_file")
}
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
# Upload to cache servers: only .sha/.asc/.torrent
sync_from_json "$tmpdir/cache.jq" cache
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" = "os" ]; then
# Upload to servers: everything
sync_from_json "$tmpdir/upload.jq" upload
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
if: always()
uses: armbian/actions/runner-clean@main
"build-images-chunk-2": # templated "build-images-chunk-2"
needs: [ "matrix_prep", "all-artifacts-ready" ]
timeout-minutes: 240
if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-2 == 'yes' ) }} # <-- HERE: Chunk number.
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-2) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty I2' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: Install dependencies
run: |
if [ ! -e /usr/bin/mktorrent ]; then
sudo apt-get update
sudo apt-get install -y mktorrent
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: Checkout JSON artifacts repository
uses: actions/checkout@v6
with:
repository: armbian/armbian.github.io
ref: data
clean: false
path: armbian.github.io
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
# Use new release YAML targets
rsync -av armbian.github.io/data/release-targets/. userpatches/
- name: "Cleanup leftover output"
run: |
rm -f userpatches/VERSION
- name: ${{matrix.desc}}
id: build-one-image
timeout-minutes: 90
run: |
# calculate loop from runner name
if [ -z "${ImageOS}" ]; then
USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
fi
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
- name: Install SSH key
uses: shimataro/ssh-key-action@v2
with:
key: ${{ secrets.KEY_UPLOAD }}
known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
if_key_exists: replace
- name: Check API rate limits
run: |
# install dependencies
if ! command -v "gh" > /dev/null 2>&1; then
sudo apt-get -y -qq install gh
fi
while true
do
API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
if (( $PERCENT > 20 )); then
echo "API rate in good shape $PERCENT % free"
exit 0
fi
echo "API rate lower then 20%, sleping 10m"
sleep 10m
done
# show current api rate
curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
- name: Import GPG key
env:
GPG_KEY1: ${{ secrets.GPG_KEY1 }}
if: env.GPG_KEY1 != null
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_KEY1 }}
passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
- name: Sign
env:
GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
if: env.GPG_PASSPHRASE1 != null
run: |
for extension in zip xz qcow2; do
if ls -l output/images/*/archive/*.$extension &>/dev/null; then
echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension
fi
done
# Download the artifacts (output/info) produced by the prepare-matrix job.
- name: Download artifacts
uses: actions/download-artifact@v7
with:
name: build-info-json
path: output/info
- name: Generate torrent
timeout-minutes: 3
run: |
set -euo pipefail
# Build tracker list (ignore empty/whitespace-only lines)
TRACKERS=$(
grep -v '^[ ]*$' output/info/best-torrent-servers.txt \
| sort -R \
| sed 's/^/ --announce=/'
)
# Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/)
BOARD=""
FILE=""
first_match=""
for ext in zip xz qcow2; do
if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then
first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1)
# first_match = output/images/BOARD/archive/file.ext
BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD
FILE=$(basename "$first_match")
break
fi
done
# Safety check
if [ -z "$BOARD" ] || [ -z "$FILE" ]; then
echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2
exit 1
fi
# Nightly / stable logic (templated)
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
WEBSEEDS=""
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then
##################################################################
# STABLE RELEASES
# Use download mirrors (servers-download.jq)
# URL: https://<host><download_path_images>/<BOARD>/archive/<FILE>
# download_path_images is already normalized & defaults to /dl in JSON.
##################################################################
WEBSEEDS=$(
jq -r --arg board "$BOARD" --arg file "$FILE" '
.[]
| "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)"
' output/info/servers-download.jq | paste -sd, -
)
else
##################################################################
# NIGHTLY BUILDS
# Use cache mirrors (servers-cache.jq) + GitHub as extra webseed
# Path is FIXED: https://SERVER/cache/os/<version>/<FILE>
##################################################################
WEBSEEDS=$(
jq -r \
--arg repo "os" \
--arg ver "${{ needs.matrix_prep.outputs.version }}" \
--arg file "$FILE" '
.[]
| "https://\(.host)/cache/\($repo)/\($ver)/\($file)"
' output/info/servers-cache.jq | paste -sd, -
)
# Append GitHub webseed
if [ -n "$WEBSEEDS" ]; then
WEBSEEDS+=","
fi
WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
fi
echo "WEBSEEDS: $WEBSEEDS"
# Go to the archive directory that contains FILE
cd "$(dirname "$first_match")" || exit 1
mktorrent \
--comment="Armbian torrent for ${FILE}" \
--verbose \
${TRACKERS} \
--web-seed="${WEBSEEDS}" \
"${FILE}"
# drop .txt helper files
rm -f *.txt
- name: "Prepare release artifacts (exclude .asc, .sha, .torrent)"
run: |
# Start from a clean directory
rm -rf output/release
mkdir -p output/release
# Copy wanted artifacts from output/images, preserving folder structure
# e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/...
find output/images -type f -name 'Armbian_*.*' \
! -name '*.asc' \
! -name '*.sha' \
! -name '*.torrent' \
-exec cp --parents {} output/release/ \;
# debug
tree output/images
echo "# debug"
tree output/release
- name: "Upload artefacts except .asc, .sha and .torrent"
timeout-minutes: 60
if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
#if: ${{ env.RELEASE_REPOSITORY == 'os' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
uses: ncipollo/release-action@v1
with:
repo: "${{ env.RELEASE_REPOSITORY }}"
tag: "${{ needs.matrix_prep.outputs.version }}"
artifacts: "output/release/output/images/*/*/Armbian_*.*"
omitBody: true
replacesArtifacts: true
omitName: true
makeLatest: false
omitPrereleaseDuringUpdate: true
allowUpdates: true
artifactErrorsFailBuild: true
token: "${{ env.GH_TOKEN }}"
- name: "Upload to servers"
run: |
# temp workspace for downloaded jsons (auto-cleanup)
tmpdir="$(mktemp -d)"
trap 'rm -rf "$tmpdir"' EXIT
curl -fL --retry 10 --retry-delay 5 --retry-all-errors \
-o "$tmpdir/cache.jq" https://github.armbian.com/servers/cache.jq \
-o "$tmpdir/upload.jq" https://github.armbian.com/servers/upload.jq
# debug
echo "=== servers-cache.jq ==="
jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq
echo "=== servers-upload.jq ==="
jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq
max_retries=3
sync_from_json() {
local json_file=$1
local mode=$2 # "cache" or "upload"
echo "== Processing ${json_file} (mode: ${mode}) =="
# Iterate over JSON array elements
while IFS= read -r server; do
# JSON structure (same for all files):
# {
# "host": "...",
# "upload_path": "...",
# "download_path_archive": "...",
# "download_path_images": "...",
# "download_path_debs": "...",
# "port": 22,
# "username": "mirror"
# }
SERVER_URL=$(jq -r '.host // empty' <<<"$server")
SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server")
SERVER_PORT=$(jq -r '.port // 22' <<<"$server")
SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server")
# skip empty host
[ -z "$SERVER_URL" ] && continue
echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)"
# Clean known_hosts entry (host:port form)
ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true
# Select rsync filters + remote subdir
if [ "$mode" = "cache" ]; then
# only.sha, .torrent .asc
RSYNC_FILTER=(
--include='*/'
--include='*.sha'
--include='*.asc'
--include='*.torrent'
--exclude='*'
)
REMOTE_SUBDIR="cache/artifacts/"
else
# everything
RSYNC_FILTER=(
--include='*/'
--include='*'
)
REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/"
fi
# Retry loop
for attempt in $(seq 1 "$max_retries"); do
echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..."
if rsync --progress \
-e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
-rvP \
"${RSYNC_FILTER[@]}" \
output/images/ \
"${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}"
then
echo "[$SERVER_URL] rsync successful."
break
fi
if [ "$attempt" -eq "$max_retries" ]; then
echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts."
exit 1
fi
echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..."
sleep 10
done
done < <(jq -c '.[]' "$json_file")
}
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
# Upload to cache servers: only .sha/.asc/.torrent
sync_from_json "$tmpdir/cache.jq" cache
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" = "os" ]; then
# Upload to servers: everything
sync_from_json "$tmpdir/upload.jq" upload
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
if: always()
uses: armbian/actions/runner-clean@main
"build-images-chunk-3": # templated "build-images-chunk-3"
needs: [ "matrix_prep", "all-artifacts-ready" ]
timeout-minutes: 240
if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-3 == 'yes' ) }} # <-- HERE: Chunk number.
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-3) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty I3' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: Install dependencies
run: |
if [ ! -e /usr/bin/mktorrent ]; then
sudo apt-get update
sudo apt-get install -y mktorrent
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: Checkout JSON artifacts repository
uses: actions/checkout@v6
with:
repository: armbian/armbian.github.io
ref: data
clean: false
path: armbian.github.io
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
# Use new release YAML targets
rsync -av armbian.github.io/data/release-targets/. userpatches/
- name: "Cleanup leftover output"
run: |
rm -f userpatches/VERSION
- name: ${{matrix.desc}}
id: build-one-image
timeout-minutes: 90
run: |
# calculate loop from runner name
if [ -z "${ImageOS}" ]; then
USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
fi
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
- name: Install SSH key
uses: shimataro/ssh-key-action@v2
with:
key: ${{ secrets.KEY_UPLOAD }}
known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
if_key_exists: replace
- name: Check API rate limits
run: |
# install dependencies
if ! command -v "gh" > /dev/null 2>&1; then
sudo apt-get -y -qq install gh
fi
while true
do
API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
if (( $PERCENT > 20 )); then
echo "API rate in good shape $PERCENT % free"
exit 0
fi
echo "API rate lower then 20%, sleping 10m"
sleep 10m
done
# show current api rate
curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
- name: Import GPG key
env:
GPG_KEY1: ${{ secrets.GPG_KEY1 }}
if: env.GPG_KEY1 != null
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_KEY1 }}
passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
- name: Sign
env:
GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
if: env.GPG_PASSPHRASE1 != null
run: |
for extension in zip xz qcow2; do
if ls -l output/images/*/archive/*.$extension &>/dev/null; then
echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension
fi
done
# Download the artifacts (output/info) produced by the prepare-matrix job.
- name: Download artifacts
uses: actions/download-artifact@v7
with:
name: build-info-json
path: output/info
- name: Generate torrent
timeout-minutes: 3
run: |
set -euo pipefail
# Build tracker list (ignore empty/whitespace-only lines)
TRACKERS=$(
grep -v '^[ ]*$' output/info/best-torrent-servers.txt \
| sort -R \
| sed 's/^/ --announce=/'
)
# Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/)
BOARD=""
FILE=""
first_match=""
for ext in zip xz qcow2; do
if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then
first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1)
# first_match = output/images/BOARD/archive/file.ext
BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD
FILE=$(basename "$first_match")
break
fi
done
# Safety check
if [ -z "$BOARD" ] || [ -z "$FILE" ]; then
echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2
exit 1
fi
# Nightly / stable logic (templated)
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
WEBSEEDS=""
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then
##################################################################
# STABLE RELEASES
# Use download mirrors (servers-download.jq)
# URL: https://<host><download_path_images>/<BOARD>/archive/<FILE>
# download_path_images is already normalized & defaults to /dl in JSON.
##################################################################
WEBSEEDS=$(
jq -r --arg board "$BOARD" --arg file "$FILE" '
.[]
| "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)"
' output/info/servers-download.jq | paste -sd, -
)
else
##################################################################
# NIGHTLY BUILDS
# Use cache mirrors (servers-cache.jq) + GitHub as extra webseed
# Path is FIXED: https://SERVER/cache/os/<version>/<FILE>
##################################################################
WEBSEEDS=$(
jq -r \
--arg repo "os" \
--arg ver "${{ needs.matrix_prep.outputs.version }}" \
--arg file "$FILE" '
.[]
| "https://\(.host)/cache/\($repo)/\($ver)/\($file)"
' output/info/servers-cache.jq | paste -sd, -
)
# Append GitHub webseed
if [ -n "$WEBSEEDS" ]; then
WEBSEEDS+=","
fi
WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
fi
echo "WEBSEEDS: $WEBSEEDS"
# Go to the archive directory that contains FILE
cd "$(dirname "$first_match")" || exit 1
mktorrent \
--comment="Armbian torrent for ${FILE}" \
--verbose \
${TRACKERS} \
--web-seed="${WEBSEEDS}" \
"${FILE}"
# drop .txt helper files
rm -f *.txt
- name: "Prepare release artifacts (exclude .asc, .sha, .torrent)"
run: |
# Start from a clean directory
rm -rf output/release
mkdir -p output/release
# Copy wanted artifacts from output/images, preserving folder structure
# e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/...
find output/images -type f -name 'Armbian_*.*' \
! -name '*.asc' \
! -name '*.sha' \
! -name '*.torrent' \
-exec cp --parents {} output/release/ \;
# debug
tree output/images
echo "# debug"
tree output/release
- name: "Upload artefacts except .asc, .sha and .torrent"
timeout-minutes: 60
if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
#if: ${{ env.RELEASE_REPOSITORY == 'os' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
uses: ncipollo/release-action@v1
with:
repo: "${{ env.RELEASE_REPOSITORY }}"
tag: "${{ needs.matrix_prep.outputs.version }}"
artifacts: "output/release/output/images/*/*/Armbian_*.*"
omitBody: true
replacesArtifacts: true
omitName: true
makeLatest: false
omitPrereleaseDuringUpdate: true
allowUpdates: true
artifactErrorsFailBuild: true
token: "${{ env.GH_TOKEN }}"
- name: "Upload to servers"
run: |
# temp workspace for downloaded jsons (auto-cleanup)
tmpdir="$(mktemp -d)"
trap 'rm -rf "$tmpdir"' EXIT
curl -fL --retry 10 --retry-delay 5 --retry-all-errors \
-o "$tmpdir/cache.jq" https://github.armbian.com/servers/cache.jq \
-o "$tmpdir/upload.jq" https://github.armbian.com/servers/upload.jq
# debug
echo "=== servers-cache.jq ==="
jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq
echo "=== servers-upload.jq ==="
jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq
max_retries=3
sync_from_json() {
local json_file=$1
local mode=$2 # "cache" or "upload"
echo "== Processing ${json_file} (mode: ${mode}) =="
# Iterate over JSON array elements
while IFS= read -r server; do
# JSON structure (same for all files):
# {
# "host": "...",
# "upload_path": "...",
# "download_path_archive": "...",
# "download_path_images": "...",
# "download_path_debs": "...",
# "port": 22,
# "username": "mirror"
# }
SERVER_URL=$(jq -r '.host // empty' <<<"$server")
SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server")
SERVER_PORT=$(jq -r '.port // 22' <<<"$server")
SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server")
# skip empty host
[ -z "$SERVER_URL" ] && continue
echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)"
# Clean known_hosts entry (host:port form)
ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true
# Select rsync filters + remote subdir
if [ "$mode" = "cache" ]; then
# only.sha, .torrent .asc
RSYNC_FILTER=(
--include='*/'
--include='*.sha'
--include='*.asc'
--include='*.torrent'
--exclude='*'
)
REMOTE_SUBDIR="cache/artifacts/"
else
# everything
RSYNC_FILTER=(
--include='*/'
--include='*'
)
REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/"
fi
# Retry loop
for attempt in $(seq 1 "$max_retries"); do
echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..."
if rsync --progress \
-e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
-rvP \
"${RSYNC_FILTER[@]}" \
output/images/ \
"${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}"
then
echo "[$SERVER_URL] rsync successful."
break
fi
if [ "$attempt" -eq "$max_retries" ]; then
echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts."
exit 1
fi
echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..."
sleep 10
done
done < <(jq -c '.[]' "$json_file")
}
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
# Upload to cache servers: only .sha/.asc/.torrent
sync_from_json "$tmpdir/cache.jq" cache
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" = "os" ]; then
# Upload to servers: everything
sync_from_json "$tmpdir/upload.jq" upload
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
if: always()
uses: armbian/actions/runner-clean@main
"build-images-chunk-4": # templated "build-images-chunk-4"
needs: [ "matrix_prep", "all-artifacts-ready" ]
timeout-minutes: 240
if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-4 == 'yes' ) }} # <-- HERE: Chunk number.
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-4) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty I4' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: Install dependencies
run: |
if [ ! -e /usr/bin/mktorrent ]; then
sudo apt-get update
sudo apt-get install -y mktorrent
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: Checkout JSON artifacts repository
uses: actions/checkout@v6
with:
repository: armbian/armbian.github.io
ref: data
clean: false
path: armbian.github.io
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
# Use new release YAML targets
rsync -av armbian.github.io/data/release-targets/. userpatches/
- name: "Cleanup leftover output"
run: |
rm -f userpatches/VERSION
- name: ${{matrix.desc}}
id: build-one-image
timeout-minutes: 90
run: |
# calculate loop from runner name
if [ -z "${ImageOS}" ]; then
USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
fi
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
- name: Install SSH key
uses: shimataro/ssh-key-action@v2
with:
key: ${{ secrets.KEY_UPLOAD }}
known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
if_key_exists: replace
- name: Check API rate limits
run: |
# install dependencies
if ! command -v "gh" > /dev/null 2>&1; then
sudo apt-get -y -qq install gh
fi
while true
do
API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
if (( $PERCENT > 20 )); then
echo "API rate in good shape $PERCENT % free"
exit 0
fi
echo "API rate lower then 20%, sleping 10m"
sleep 10m
done
# show current api rate
curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
- name: Import GPG key
env:
GPG_KEY1: ${{ secrets.GPG_KEY1 }}
if: env.GPG_KEY1 != null
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_KEY1 }}
passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
- name: Sign
env:
GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
if: env.GPG_PASSPHRASE1 != null
run: |
for extension in zip xz qcow2; do
if ls -l output/images/*/archive/*.$extension &>/dev/null; then
echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension
fi
done
# Download the artifacts (output/info) produced by the prepare-matrix job.
- name: Download artifacts
uses: actions/download-artifact@v7
with:
name: build-info-json
path: output/info
- name: Generate torrent
timeout-minutes: 3
run: |
set -euo pipefail
# Build tracker list (ignore empty/whitespace-only lines)
TRACKERS=$(
grep -v '^[ ]*$' output/info/best-torrent-servers.txt \
| sort -R \
| sed 's/^/ --announce=/'
)
# Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/)
BOARD=""
FILE=""
first_match=""
for ext in zip xz qcow2; do
if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then
first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1)
# first_match = output/images/BOARD/archive/file.ext
BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD
FILE=$(basename "$first_match")
break
fi
done
# Safety check
if [ -z "$BOARD" ] || [ -z "$FILE" ]; then
echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2
exit 1
fi
# Nightly / stable logic (templated)
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
WEBSEEDS=""
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then
##################################################################
# STABLE RELEASES
# Use download mirrors (servers-download.jq)
# URL: https://<host><download_path_images>/<BOARD>/archive/<FILE>
# download_path_images is already normalized & defaults to /dl in JSON.
##################################################################
WEBSEEDS=$(
jq -r --arg board "$BOARD" --arg file "$FILE" '
.[]
| "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)"
' output/info/servers-download.jq | paste -sd, -
)
else
##################################################################
# NIGHTLY BUILDS
# Use cache mirrors (servers-cache.jq) + GitHub as extra webseed
# Path is FIXED: https://SERVER/cache/os/<version>/<FILE>
##################################################################
WEBSEEDS=$(
jq -r \
--arg repo "os" \
--arg ver "${{ needs.matrix_prep.outputs.version }}" \
--arg file "$FILE" '
.[]
| "https://\(.host)/cache/\($repo)/\($ver)/\($file)"
' output/info/servers-cache.jq | paste -sd, -
)
# Append GitHub webseed
if [ -n "$WEBSEEDS" ]; then
WEBSEEDS+=","
fi
WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
fi
echo "WEBSEEDS: $WEBSEEDS"
# Go to the archive directory that contains FILE
cd "$(dirname "$first_match")" || exit 1
mktorrent \
--comment="Armbian torrent for ${FILE}" \
--verbose \
${TRACKERS} \
--web-seed="${WEBSEEDS}" \
"${FILE}"
# drop .txt helper files
rm -f *.txt
- name: "Prepare release artifacts (exclude .asc, .sha, .torrent)"
run: |
# Start from a clean directory
rm -rf output/release
mkdir -p output/release
# Copy wanted artifacts from output/images, preserving folder structure
# e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/...
find output/images -type f -name 'Armbian_*.*' \
! -name '*.asc' \
! -name '*.sha' \
! -name '*.torrent' \
-exec cp --parents {} output/release/ \;
# debug
tree output/images
echo "# debug"
tree output/release
- name: "Upload artefacts except .asc, .sha and .torrent"
timeout-minutes: 60
if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
#if: ${{ env.RELEASE_REPOSITORY == 'os' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
uses: ncipollo/release-action@v1
with:
repo: "${{ env.RELEASE_REPOSITORY }}"
tag: "${{ needs.matrix_prep.outputs.version }}"
artifacts: "output/release/output/images/*/*/Armbian_*.*"
omitBody: true
replacesArtifacts: true
omitName: true
makeLatest: false
omitPrereleaseDuringUpdate: true
allowUpdates: true
artifactErrorsFailBuild: true
token: "${{ env.GH_TOKEN }}"
- name: "Upload to servers"
run: |
# temp workspace for downloaded jsons (auto-cleanup)
tmpdir="$(mktemp -d)"
trap 'rm -rf "$tmpdir"' EXIT
curl -fL --retry 10 --retry-delay 5 --retry-all-errors \
-o "$tmpdir/cache.jq" https://github.armbian.com/servers/cache.jq \
-o "$tmpdir/upload.jq" https://github.armbian.com/servers/upload.jq
# debug
echo "=== servers-cache.jq ==="
jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq
echo "=== servers-upload.jq ==="
jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq
max_retries=3
sync_from_json() {
local json_file=$1
local mode=$2 # "cache" or "upload"
echo "== Processing ${json_file} (mode: ${mode}) =="
# Iterate over JSON array elements
while IFS= read -r server; do
# JSON structure (same for all files):
# {
# "host": "...",
# "upload_path": "...",
# "download_path_archive": "...",
# "download_path_images": "...",
# "download_path_debs": "...",
# "port": 22,
# "username": "mirror"
# }
SERVER_URL=$(jq -r '.host // empty' <<<"$server")
SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server")
SERVER_PORT=$(jq -r '.port // 22' <<<"$server")
SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server")
# skip empty host
[ -z "$SERVER_URL" ] && continue
echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)"
# Clean known_hosts entry (host:port form)
ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true
# Select rsync filters + remote subdir
if [ "$mode" = "cache" ]; then
# only.sha, .torrent .asc
RSYNC_FILTER=(
--include='*/'
--include='*.sha'
--include='*.asc'
--include='*.torrent'
--exclude='*'
)
REMOTE_SUBDIR="cache/artifacts/"
else
# everything
RSYNC_FILTER=(
--include='*/'
--include='*'
)
REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/"
fi
# Retry loop
for attempt in $(seq 1 "$max_retries"); do
echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..."
if rsync --progress \
-e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
-rvP \
"${RSYNC_FILTER[@]}" \
output/images/ \
"${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}"
then
echo "[$SERVER_URL] rsync successful."
break
fi
if [ "$attempt" -eq "$max_retries" ]; then
echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts."
exit 1
fi
echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..."
sleep 10
done
done < <(jq -c '.[]' "$json_file")
}
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
# Upload to cache servers: only .sha/.asc/.torrent
sync_from_json "$tmpdir/cache.jq" cache
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" = "os" ]; then
# Upload to servers: everything
sync_from_json "$tmpdir/upload.jq" upload
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
if: always()
uses: armbian/actions/runner-clean@main
"build-images-chunk-5": # templated "build-images-chunk-5"
needs: [ "matrix_prep", "all-artifacts-ready" ]
timeout-minutes: 240
if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-5 == 'yes' ) }} # <-- HERE: Chunk number.
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-5) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty I5' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: Install dependencies
run: |
if [ ! -e /usr/bin/mktorrent ]; then
sudo apt-get update
sudo apt-get install -y mktorrent
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: Checkout JSON artifacts repository
uses: actions/checkout@v6
with:
repository: armbian/armbian.github.io
ref: data
clean: false
path: armbian.github.io
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
# Use new release YAML targets
rsync -av armbian.github.io/data/release-targets/. userpatches/
- name: "Cleanup leftover output"
run: |
rm -f userpatches/VERSION
- name: ${{matrix.desc}}
id: build-one-image
timeout-minutes: 90
run: |
# calculate loop from runner name
if [ -z "${ImageOS}" ]; then
USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
fi
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
- name: Install SSH key
uses: shimataro/ssh-key-action@v2
with:
key: ${{ secrets.KEY_UPLOAD }}
known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
if_key_exists: replace
- name: Check API rate limits
run: |
# install dependencies
if ! command -v "gh" > /dev/null 2>&1; then
sudo apt-get -y -qq install gh
fi
while true
do
API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
if (( $PERCENT > 20 )); then
echo "API rate in good shape $PERCENT % free"
exit 0
fi
echo "API rate lower then 20%, sleping 10m"
sleep 10m
done
# show current api rate
curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
- name: Import GPG key
env:
GPG_KEY1: ${{ secrets.GPG_KEY1 }}
if: env.GPG_KEY1 != null
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_KEY1 }}
passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
- name: Sign
env:
GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
if: env.GPG_PASSPHRASE1 != null
run: |
for extension in zip xz qcow2; do
if ls -l output/images/*/archive/*.$extension &>/dev/null; then
echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension
fi
done
# Download the artifacts (output/info) produced by the prepare-matrix job.
- name: Download artifacts
uses: actions/download-artifact@v7
with:
name: build-info-json
path: output/info
- name: Generate torrent
timeout-minutes: 3
run: |
set -euo pipefail
# Build tracker list (ignore empty/whitespace-only lines)
TRACKERS=$(
grep -v '^[ ]*$' output/info/best-torrent-servers.txt \
| sort -R \
| sed 's/^/ --announce=/'
)
# Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/)
BOARD=""
FILE=""
first_match=""
for ext in zip xz qcow2; do
if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then
first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1)
# first_match = output/images/BOARD/archive/file.ext
BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD
FILE=$(basename "$first_match")
break
fi
done
# Safety check
if [ -z "$BOARD" ] || [ -z "$FILE" ]; then
echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2
exit 1
fi
# Nightly / stable logic (templated)
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
WEBSEEDS=""
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then
##################################################################
# STABLE RELEASES
# Use download mirrors (servers-download.jq)
# URL: https://<host><download_path_images>/<BOARD>/archive/<FILE>
# download_path_images is already normalized & defaults to /dl in JSON.
##################################################################
WEBSEEDS=$(
jq -r --arg board "$BOARD" --arg file "$FILE" '
.[]
| "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)"
' output/info/servers-download.jq | paste -sd, -
)
else
##################################################################
# NIGHTLY BUILDS
# Use cache mirrors (servers-cache.jq) + GitHub as extra webseed
# Path is FIXED: https://SERVER/cache/os/<version>/<FILE>
##################################################################
WEBSEEDS=$(
jq -r \
--arg repo "os" \
--arg ver "${{ needs.matrix_prep.outputs.version }}" \
--arg file "$FILE" '
.[]
| "https://\(.host)/cache/\($repo)/\($ver)/\($file)"
' output/info/servers-cache.jq | paste -sd, -
)
# Append GitHub webseed
if [ -n "$WEBSEEDS" ]; then
WEBSEEDS+=","
fi
WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
fi
echo "WEBSEEDS: $WEBSEEDS"
# Go to the archive directory that contains FILE
cd "$(dirname "$first_match")" || exit 1
mktorrent \
--comment="Armbian torrent for ${FILE}" \
--verbose \
${TRACKERS} \
--web-seed="${WEBSEEDS}" \
"${FILE}"
# drop .txt helper files
rm -f *.txt
- name: "Prepare release artifacts (exclude .asc, .sha, .torrent)"
run: |
# Start from a clean directory
rm -rf output/release
mkdir -p output/release
# Copy wanted artifacts from output/images, preserving folder structure
# e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/...
find output/images -type f -name 'Armbian_*.*' \
! -name '*.asc' \
! -name '*.sha' \
! -name '*.torrent' \
-exec cp --parents {} output/release/ \;
# debug
tree output/images
echo "# debug"
tree output/release
- name: "Upload artefacts except .asc, .sha and .torrent"
timeout-minutes: 60
if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
#if: ${{ env.RELEASE_REPOSITORY == 'os' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
uses: ncipollo/release-action@v1
with:
repo: "${{ env.RELEASE_REPOSITORY }}"
tag: "${{ needs.matrix_prep.outputs.version }}"
artifacts: "output/release/output/images/*/*/Armbian_*.*"
omitBody: true
replacesArtifacts: true
omitName: true
makeLatest: false
omitPrereleaseDuringUpdate: true
allowUpdates: true
artifactErrorsFailBuild: true
token: "${{ env.GH_TOKEN }}"
- name: "Upload to servers"
run: |
# temp workspace for downloaded jsons (auto-cleanup)
tmpdir="$(mktemp -d)"
trap 'rm -rf "$tmpdir"' EXIT
curl -fL --retry 10 --retry-delay 5 --retry-all-errors \
-o "$tmpdir/cache.jq" https://github.armbian.com/servers/cache.jq \
-o "$tmpdir/upload.jq" https://github.armbian.com/servers/upload.jq
# debug
echo "=== servers-cache.jq ==="
jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq
echo "=== servers-upload.jq ==="
jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq
max_retries=3
sync_from_json() {
local json_file=$1
local mode=$2 # "cache" or "upload"
echo "== Processing ${json_file} (mode: ${mode}) =="
# Iterate over JSON array elements
while IFS= read -r server; do
# JSON structure (same for all files):
# {
# "host": "...",
# "upload_path": "...",
# "download_path_archive": "...",
# "download_path_images": "...",
# "download_path_debs": "...",
# "port": 22,
# "username": "mirror"
# }
SERVER_URL=$(jq -r '.host // empty' <<<"$server")
SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server")
SERVER_PORT=$(jq -r '.port // 22' <<<"$server")
SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server")
# skip empty host
[ -z "$SERVER_URL" ] && continue
echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)"
# Clean known_hosts entry (host:port form)
ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true
# Select rsync filters + remote subdir
if [ "$mode" = "cache" ]; then
# only.sha, .torrent .asc
RSYNC_FILTER=(
--include='*/'
--include='*.sha'
--include='*.asc'
--include='*.torrent'
--exclude='*'
)
REMOTE_SUBDIR="cache/artifacts/"
else
# everything
RSYNC_FILTER=(
--include='*/'
--include='*'
)
REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/"
fi
# Retry loop
for attempt in $(seq 1 "$max_retries"); do
echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..."
if rsync --progress \
-e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
-rvP \
"${RSYNC_FILTER[@]}" \
output/images/ \
"${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}"
then
echo "[$SERVER_URL] rsync successful."
break
fi
if [ "$attempt" -eq "$max_retries" ]; then
echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts."
exit 1
fi
echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..."
sleep 10
done
done < <(jq -c '.[]' "$json_file")
}
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
# Upload to cache servers: only .sha/.asc/.torrent
sync_from_json "$tmpdir/cache.jq" cache
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" = "os" ]; then
# Upload to servers: everything
sync_from_json "$tmpdir/upload.jq" upload
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
if: always()
uses: armbian/actions/runner-clean@main
"build-images-chunk-6": # templated "build-images-chunk-6"
needs: [ "matrix_prep", "all-artifacts-ready" ]
timeout-minutes: 240
if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-6 == 'yes' ) }} # <-- HERE: Chunk number.
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-6) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty I6' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: Install dependencies
run: |
if [ ! -e /usr/bin/mktorrent ]; then
sudo apt-get update
sudo apt-get install -y mktorrent
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: Checkout JSON artifacts repository
uses: actions/checkout@v6
with:
repository: armbian/armbian.github.io
ref: data
clean: false
path: armbian.github.io
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
# Use new release YAML targets
rsync -av armbian.github.io/data/release-targets/. userpatches/
- name: "Cleanup leftover output"
run: |
rm -f userpatches/VERSION
- name: ${{matrix.desc}}
id: build-one-image
timeout-minutes: 90
run: |
# calculate loop from runner name
if [ -z "${ImageOS}" ]; then
USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
fi
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
- name: Install SSH key
uses: shimataro/ssh-key-action@v2
with:
key: ${{ secrets.KEY_UPLOAD }}
known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
if_key_exists: replace
- name: Check API rate limits
run: |
# install dependencies
if ! command -v "gh" > /dev/null 2>&1; then
sudo apt-get -y -qq install gh
fi
while true
do
API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
if (( $PERCENT > 20 )); then
echo "API rate in good shape $PERCENT % free"
exit 0
fi
echo "API rate lower then 20%, sleping 10m"
sleep 10m
done
# show current api rate
curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
- name: Import GPG key
env:
GPG_KEY1: ${{ secrets.GPG_KEY1 }}
if: env.GPG_KEY1 != null
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_KEY1 }}
passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
- name: Sign
env:
GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
if: env.GPG_PASSPHRASE1 != null
run: |
for extension in zip xz qcow2; do
if ls -l output/images/*/archive/*.$extension &>/dev/null; then
echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension
fi
done
# Download the artifacts (output/info) produced by the prepare-matrix job.
- name: Download artifacts
uses: actions/download-artifact@v7
with:
name: build-info-json
path: output/info
- name: Generate torrent
timeout-minutes: 3
run: |
set -euo pipefail
# Build tracker list (ignore empty/whitespace-only lines)
TRACKERS=$(
grep -v '^[ ]*$' output/info/best-torrent-servers.txt \
| sort -R \
| sed 's/^/ --announce=/'
)
# Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/)
BOARD=""
FILE=""
first_match=""
for ext in zip xz qcow2; do
if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then
first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1)
# first_match = output/images/BOARD/archive/file.ext
BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD
FILE=$(basename "$first_match")
break
fi
done
# Safety check
if [ -z "$BOARD" ] || [ -z "$FILE" ]; then
echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2
exit 1
fi
# Nightly / stable logic (templated)
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
WEBSEEDS=""
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then
##################################################################
# STABLE RELEASES
# Use download mirrors (servers-download.jq)
# URL: https://<host><download_path_images>/<BOARD>/archive/<FILE>
# download_path_images is already normalized & defaults to /dl in JSON.
##################################################################
WEBSEEDS=$(
jq -r --arg board "$BOARD" --arg file "$FILE" '
.[]
| "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)"
' output/info/servers-download.jq | paste -sd, -
)
else
##################################################################
# NIGHTLY BUILDS
# Use cache mirrors (servers-cache.jq) + GitHub as extra webseed
# Path is FIXED: https://SERVER/cache/os/<version>/<FILE>
##################################################################
WEBSEEDS=$(
jq -r \
--arg repo "os" \
--arg ver "${{ needs.matrix_prep.outputs.version }}" \
--arg file "$FILE" '
.[]
| "https://\(.host)/cache/\($repo)/\($ver)/\($file)"
' output/info/servers-cache.jq | paste -sd, -
)
# Append GitHub webseed
if [ -n "$WEBSEEDS" ]; then
WEBSEEDS+=","
fi
WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
fi
echo "WEBSEEDS: $WEBSEEDS"
# Go to the archive directory that contains FILE
cd "$(dirname "$first_match")" || exit 1
mktorrent \
--comment="Armbian torrent for ${FILE}" \
--verbose \
${TRACKERS} \
--web-seed="${WEBSEEDS}" \
"${FILE}"
# drop .txt helper files
rm -f *.txt
- name: "Prepare release artifacts (exclude .asc, .sha, .torrent)"
run: |
# Start from a clean directory
rm -rf output/release
mkdir -p output/release
# Copy wanted artifacts from output/images, preserving folder structure
# e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/...
find output/images -type f -name 'Armbian_*.*' \
! -name '*.asc' \
! -name '*.sha' \
! -name '*.torrent' \
-exec cp --parents {} output/release/ \;
# debug
tree output/images
echo "# debug"
tree output/release
- name: "Upload artefacts except .asc, .sha and .torrent"
timeout-minutes: 60
if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
#if: ${{ env.RELEASE_REPOSITORY == 'os' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
uses: ncipollo/release-action@v1
with:
repo: "${{ env.RELEASE_REPOSITORY }}"
tag: "${{ needs.matrix_prep.outputs.version }}"
artifacts: "output/release/output/images/*/*/Armbian_*.*"
omitBody: true
replacesArtifacts: true
omitName: true
makeLatest: false
omitPrereleaseDuringUpdate: true
allowUpdates: true
artifactErrorsFailBuild: true
token: "${{ env.GH_TOKEN }}"
- name: "Upload to servers"
run: |
# temp workspace for downloaded jsons (auto-cleanup)
tmpdir="$(mktemp -d)"
trap 'rm -rf "$tmpdir"' EXIT
curl -fL --retry 10 --retry-delay 5 --retry-all-errors \
-o "$tmpdir/cache.jq" https://github.armbian.com/servers/cache.jq \
-o "$tmpdir/upload.jq" https://github.armbian.com/servers/upload.jq
# debug
echo "=== servers-cache.jq ==="
jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq
echo "=== servers-upload.jq ==="
jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq
max_retries=3
sync_from_json() {
local json_file=$1
local mode=$2 # "cache" or "upload"
echo "== Processing ${json_file} (mode: ${mode}) =="
# Iterate over JSON array elements
while IFS= read -r server; do
# JSON structure (same for all files):
# {
# "host": "...",
# "upload_path": "...",
# "download_path_archive": "...",
# "download_path_images": "...",
# "download_path_debs": "...",
# "port": 22,
# "username": "mirror"
# }
SERVER_URL=$(jq -r '.host // empty' <<<"$server")
SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server")
SERVER_PORT=$(jq -r '.port // 22' <<<"$server")
SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server")
# skip empty host
[ -z "$SERVER_URL" ] && continue
echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)"
# Clean known_hosts entry (host:port form)
ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true
# Select rsync filters + remote subdir
if [ "$mode" = "cache" ]; then
# only.sha, .torrent .asc
RSYNC_FILTER=(
--include='*/'
--include='*.sha'
--include='*.asc'
--include='*.torrent'
--exclude='*'
)
REMOTE_SUBDIR="cache/artifacts/"
else
# everything
RSYNC_FILTER=(
--include='*/'
--include='*'
)
REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/"
fi
# Retry loop
for attempt in $(seq 1 "$max_retries"); do
echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..."
if rsync --progress \
-e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
-rvP \
"${RSYNC_FILTER[@]}" \
output/images/ \
"${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}"
then
echo "[$SERVER_URL] rsync successful."
break
fi
if [ "$attempt" -eq "$max_retries" ]; then
echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts."
exit 1
fi
echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..."
sleep 10
done
done < <(jq -c '.[]' "$json_file")
}
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
# Upload to cache servers: only .sha/.asc/.torrent
sync_from_json "$tmpdir/cache.jq" cache
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" = "os" ]; then
# Upload to servers: everything
sync_from_json "$tmpdir/upload.jq" upload
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
if: always()
uses: armbian/actions/runner-clean@main
"build-images-chunk-7": # templated "build-images-chunk-7"
needs: [ "matrix_prep", "all-artifacts-ready" ]
timeout-minutes: 240
if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-7 == 'yes' ) }} # <-- HERE: Chunk number.
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-7) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty I7' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: Install dependencies
run: |
if [ ! -e /usr/bin/mktorrent ]; then
sudo apt-get update
sudo apt-get install -y mktorrent
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: Checkout JSON artifacts repository
uses: actions/checkout@v6
with:
repository: armbian/armbian.github.io
ref: data
clean: false
path: armbian.github.io
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
# Use new release YAML targets
rsync -av armbian.github.io/data/release-targets/. userpatches/
- name: "Cleanup leftover output"
run: |
rm -f userpatches/VERSION
- name: ${{matrix.desc}}
id: build-one-image
timeout-minutes: 90
run: |
# calculate loop from runner name
if [ -z "${ImageOS}" ]; then
USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
fi
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
- name: Install SSH key
uses: shimataro/ssh-key-action@v2
with:
key: ${{ secrets.KEY_UPLOAD }}
known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
if_key_exists: replace
- name: Check API rate limits
run: |
# install dependencies
if ! command -v "gh" > /dev/null 2>&1; then
sudo apt-get -y -qq install gh
fi
while true
do
API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
if (( $PERCENT > 20 )); then
echo "API rate in good shape $PERCENT % free"
exit 0
fi
echo "API rate lower then 20%, sleping 10m"
sleep 10m
done
# show current api rate
curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
- name: Import GPG key
env:
GPG_KEY1: ${{ secrets.GPG_KEY1 }}
if: env.GPG_KEY1 != null
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_KEY1 }}
passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
- name: Sign
env:
GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
if: env.GPG_PASSPHRASE1 != null
run: |
for extension in zip xz qcow2; do
if ls -l output/images/*/archive/*.$extension &>/dev/null; then
echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension
fi
done
# Download the artifacts (output/info) produced by the prepare-matrix job.
- name: Download artifacts
uses: actions/download-artifact@v7
with:
name: build-info-json
path: output/info
- name: Generate torrent
timeout-minutes: 3
run: |
set -euo pipefail
# Build tracker list (ignore empty/whitespace-only lines)
TRACKERS=$(
grep -v '^[ ]*$' output/info/best-torrent-servers.txt \
| sort -R \
| sed 's/^/ --announce=/'
)
# Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/)
BOARD=""
FILE=""
first_match=""
for ext in zip xz qcow2; do
if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then
first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1)
# first_match = output/images/BOARD/archive/file.ext
BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD
FILE=$(basename "$first_match")
break
fi
done
# Safety check
if [ -z "$BOARD" ] || [ -z "$FILE" ]; then
echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2
exit 1
fi
# Nightly / stable logic (templated)
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
WEBSEEDS=""
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then
##################################################################
# STABLE RELEASES
# Use download mirrors (servers-download.jq)
# URL: https://<host><download_path_images>/<BOARD>/archive/<FILE>
# download_path_images is already normalized & defaults to /dl in JSON.
##################################################################
WEBSEEDS=$(
jq -r --arg board "$BOARD" --arg file "$FILE" '
.[]
| "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)"
' output/info/servers-download.jq | paste -sd, -
)
else
##################################################################
# NIGHTLY BUILDS
# Use cache mirrors (servers-cache.jq) + GitHub as extra webseed
# Path is FIXED: https://SERVER/cache/os/<version>/<FILE>
##################################################################
WEBSEEDS=$(
jq -r \
--arg repo "os" \
--arg ver "${{ needs.matrix_prep.outputs.version }}" \
--arg file "$FILE" '
.[]
| "https://\(.host)/cache/\($repo)/\($ver)/\($file)"
' output/info/servers-cache.jq | paste -sd, -
)
# Append GitHub webseed
if [ -n "$WEBSEEDS" ]; then
WEBSEEDS+=","
fi
WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
fi
echo "WEBSEEDS: $WEBSEEDS"
# Go to the archive directory that contains FILE
cd "$(dirname "$first_match")" || exit 1
mktorrent \
--comment="Armbian torrent for ${FILE}" \
--verbose \
${TRACKERS} \
--web-seed="${WEBSEEDS}" \
"${FILE}"
# drop .txt helper files
rm -f *.txt
- name: "Prepare release artifacts (exclude .asc, .sha, .torrent)"
run: |
# Start from a clean directory
rm -rf output/release
mkdir -p output/release
# Copy wanted artifacts from output/images, preserving folder structure
# e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/...
find output/images -type f -name 'Armbian_*.*' \
! -name '*.asc' \
! -name '*.sha' \
! -name '*.torrent' \
-exec cp --parents {} output/release/ \;
# debug
tree output/images
echo "# debug"
tree output/release
- name: "Upload artefacts except .asc, .sha and .torrent"
timeout-minutes: 60
if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
#if: ${{ env.RELEASE_REPOSITORY == 'os' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
uses: ncipollo/release-action@v1
with:
repo: "${{ env.RELEASE_REPOSITORY }}"
tag: "${{ needs.matrix_prep.outputs.version }}"
artifacts: "output/release/output/images/*/*/Armbian_*.*"
omitBody: true
replacesArtifacts: true
omitName: true
makeLatest: false
omitPrereleaseDuringUpdate: true
allowUpdates: true
artifactErrorsFailBuild: true
token: "${{ env.GH_TOKEN }}"
- name: "Upload to servers"
run: |
# temp workspace for downloaded jsons (auto-cleanup)
tmpdir="$(mktemp -d)"
trap 'rm -rf "$tmpdir"' EXIT
curl -fL --retry 10 --retry-delay 5 --retry-all-errors \
-o "$tmpdir/cache.jq" https://github.armbian.com/servers/cache.jq \
-o "$tmpdir/upload.jq" https://github.armbian.com/servers/upload.jq
# debug
echo "=== servers-cache.jq ==="
jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq
echo "=== servers-upload.jq ==="
jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq
max_retries=3
sync_from_json() {
local json_file=$1
local mode=$2 # "cache" or "upload"
echo "== Processing ${json_file} (mode: ${mode}) =="
# Iterate over JSON array elements
while IFS= read -r server; do
# JSON structure (same for all files):
# {
# "host": "...",
# "upload_path": "...",
# "download_path_archive": "...",
# "download_path_images": "...",
# "download_path_debs": "...",
# "port": 22,
# "username": "mirror"
# }
SERVER_URL=$(jq -r '.host // empty' <<<"$server")
SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server")
SERVER_PORT=$(jq -r '.port // 22' <<<"$server")
SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server")
# skip empty host
[ -z "$SERVER_URL" ] && continue
echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)"
# Clean known_hosts entry (host:port form)
ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true
# Select rsync filters + remote subdir
if [ "$mode" = "cache" ]; then
# only.sha, .torrent .asc
RSYNC_FILTER=(
--include='*/'
--include='*.sha'
--include='*.asc'
--include='*.torrent'
--exclude='*'
)
REMOTE_SUBDIR="cache/artifacts/"
else
# everything
RSYNC_FILTER=(
--include='*/'
--include='*'
)
REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/"
fi
# Retry loop
for attempt in $(seq 1 "$max_retries"); do
echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..."
if rsync --progress \
-e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
-rvP \
"${RSYNC_FILTER[@]}" \
output/images/ \
"${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}"
then
echo "[$SERVER_URL] rsync successful."
break
fi
if [ "$attempt" -eq "$max_retries" ]; then
echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts."
exit 1
fi
echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..."
sleep 10
done
done < <(jq -c '.[]' "$json_file")
}
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
# Upload to cache servers: only .sha/.asc/.torrent
sync_from_json "$tmpdir/cache.jq" cache
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" = "os" ]; then
# Upload to servers: everything
sync_from_json "$tmpdir/upload.jq" upload
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
if: always()
uses: armbian/actions/runner-clean@main
"build-images-chunk-8": # templated "build-images-chunk-8"
needs: [ "matrix_prep", "all-artifacts-ready" ]
timeout-minutes: 240
if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-8 == 'yes' ) }} # <-- HERE: Chunk number.
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-8) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty I8' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: Install dependencies
run: |
if [ ! -e /usr/bin/mktorrent ]; then
sudo apt-get update
sudo apt-get install -y mktorrent
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: Checkout JSON artifacts repository
uses: actions/checkout@v6
with:
repository: armbian/armbian.github.io
ref: data
clean: false
path: armbian.github.io
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
# Use new release YAML targets
rsync -av armbian.github.io/data/release-targets/. userpatches/
- name: "Cleanup leftover output"
run: |
rm -f userpatches/VERSION
- name: ${{matrix.desc}}
id: build-one-image
timeout-minutes: 90
run: |
# calculate loop from runner name
if [ -z "${ImageOS}" ]; then
USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
fi
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
- name: Install SSH key
uses: shimataro/ssh-key-action@v2
with:
key: ${{ secrets.KEY_UPLOAD }}
known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
if_key_exists: replace
- name: Check API rate limits
run: |
# install dependencies
if ! command -v "gh" > /dev/null 2>&1; then
sudo apt-get -y -qq install gh
fi
while true
do
API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
if (( $PERCENT > 20 )); then
echo "API rate in good shape $PERCENT % free"
exit 0
fi
echo "API rate lower then 20%, sleping 10m"
sleep 10m
done
# show current api rate
curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
- name: Import GPG key
env:
GPG_KEY1: ${{ secrets.GPG_KEY1 }}
if: env.GPG_KEY1 != null
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_KEY1 }}
passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
- name: Sign
env:
GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
if: env.GPG_PASSPHRASE1 != null
run: |
for extension in zip xz qcow2; do
if ls -l output/images/*/archive/*.$extension &>/dev/null; then
echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension
fi
done
# Download the artifacts (output/info) produced by the prepare-matrix job.
- name: Download artifacts
uses: actions/download-artifact@v7
with:
name: build-info-json
path: output/info
- name: Generate torrent
timeout-minutes: 3
run: |
set -euo pipefail
# Build tracker list (ignore empty/whitespace-only lines)
TRACKERS=$(
grep -v '^[ ]*$' output/info/best-torrent-servers.txt \
| sort -R \
| sed 's/^/ --announce=/'
)
# Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/)
BOARD=""
FILE=""
first_match=""
for ext in zip xz qcow2; do
if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then
first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1)
# first_match = output/images/BOARD/archive/file.ext
BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD
FILE=$(basename "$first_match")
break
fi
done
# Safety check
if [ -z "$BOARD" ] || [ -z "$FILE" ]; then
echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2
exit 1
fi
# Nightly / stable logic (templated)
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
WEBSEEDS=""
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then
##################################################################
# STABLE RELEASES
# Use download mirrors (servers-download.jq)
# URL: https://<host><download_path_images>/<BOARD>/archive/<FILE>
# download_path_images is already normalized & defaults to /dl in JSON.
##################################################################
WEBSEEDS=$(
jq -r --arg board "$BOARD" --arg file "$FILE" '
.[]
| "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)"
' output/info/servers-download.jq | paste -sd, -
)
else
##################################################################
# NIGHTLY BUILDS
# Use cache mirrors (servers-cache.jq) + GitHub as extra webseed
# Path is FIXED: https://SERVER/cache/os/<version>/<FILE>
##################################################################
WEBSEEDS=$(
jq -r \
--arg repo "os" \
--arg ver "${{ needs.matrix_prep.outputs.version }}" \
--arg file "$FILE" '
.[]
| "https://\(.host)/cache/\($repo)/\($ver)/\($file)"
' output/info/servers-cache.jq | paste -sd, -
)
# Append GitHub webseed
if [ -n "$WEBSEEDS" ]; then
WEBSEEDS+=","
fi
WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
fi
echo "WEBSEEDS: $WEBSEEDS"
# Go to the archive directory that contains FILE
cd "$(dirname "$first_match")" || exit 1
mktorrent \
--comment="Armbian torrent for ${FILE}" \
--verbose \
${TRACKERS} \
--web-seed="${WEBSEEDS}" \
"${FILE}"
# drop .txt helper files
rm -f *.txt
- name: "Prepare release artifacts (exclude .asc, .sha, .torrent)"
run: |
# Start from a clean directory
rm -rf output/release
mkdir -p output/release
# Copy wanted artifacts from output/images, preserving folder structure
# e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/...
find output/images -type f -name 'Armbian_*.*' \
! -name '*.asc' \
! -name '*.sha' \
! -name '*.torrent' \
-exec cp --parents {} output/release/ \;
# debug
tree output/images
echo "# debug"
tree output/release
- name: "Upload artefacts except .asc, .sha and .torrent"
timeout-minutes: 60
if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
#if: ${{ env.RELEASE_REPOSITORY == 'os' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
uses: ncipollo/release-action@v1
with:
repo: "${{ env.RELEASE_REPOSITORY }}"
tag: "${{ needs.matrix_prep.outputs.version }}"
artifacts: "output/release/output/images/*/*/Armbian_*.*"
omitBody: true
replacesArtifacts: true
omitName: true
makeLatest: false
omitPrereleaseDuringUpdate: true
allowUpdates: true
artifactErrorsFailBuild: true
token: "${{ env.GH_TOKEN }}"
- name: "Upload to servers"
run: |
# temp workspace for downloaded jsons (auto-cleanup)
tmpdir="$(mktemp -d)"
trap 'rm -rf "$tmpdir"' EXIT
curl -fL --retry 10 --retry-delay 5 --retry-all-errors \
-o "$tmpdir/cache.jq" https://github.armbian.com/servers/cache.jq \
-o "$tmpdir/upload.jq" https://github.armbian.com/servers/upload.jq
# debug
echo "=== servers-cache.jq ==="
jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq
echo "=== servers-upload.jq ==="
jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq
max_retries=3
sync_from_json() {
local json_file=$1
local mode=$2 # "cache" or "upload"
echo "== Processing ${json_file} (mode: ${mode}) =="
# Iterate over JSON array elements
while IFS= read -r server; do
# JSON structure (same for all files):
# {
# "host": "...",
# "upload_path": "...",
# "download_path_archive": "...",
# "download_path_images": "...",
# "download_path_debs": "...",
# "port": 22,
# "username": "mirror"
# }
SERVER_URL=$(jq -r '.host // empty' <<<"$server")
SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server")
SERVER_PORT=$(jq -r '.port // 22' <<<"$server")
SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server")
# skip empty host
[ -z "$SERVER_URL" ] && continue
echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)"
# Clean known_hosts entry (host:port form)
ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true
# Select rsync filters + remote subdir
if [ "$mode" = "cache" ]; then
# only.sha, .torrent .asc
RSYNC_FILTER=(
--include='*/'
--include='*.sha'
--include='*.asc'
--include='*.torrent'
--exclude='*'
)
REMOTE_SUBDIR="cache/artifacts/"
else
# everything
RSYNC_FILTER=(
--include='*/'
--include='*'
)
REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/"
fi
# Retry loop
for attempt in $(seq 1 "$max_retries"); do
echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..."
if rsync --progress \
-e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
-rvP \
"${RSYNC_FILTER[@]}" \
output/images/ \
"${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}"
then
echo "[$SERVER_URL] rsync successful."
break
fi
if [ "$attempt" -eq "$max_retries" ]; then
echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts."
exit 1
fi
echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..."
sleep 10
done
done < <(jq -c '.[]' "$json_file")
}
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
# Upload to cache servers: only .sha/.asc/.torrent
sync_from_json "$tmpdir/cache.jq" cache
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" = "os" ]; then
# Upload to servers: everything
sync_from_json "$tmpdir/upload.jq" upload
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
if: always()
uses: armbian/actions/runner-clean@main
"build-images-chunk-9": # templated "build-images-chunk-9"
needs: [ "matrix_prep", "all-artifacts-ready" ]
timeout-minutes: 240
if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-9 == 'yes' ) }} # <-- HERE: Chunk number.
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-9) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty I9' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: Install dependencies
run: |
if [ ! -e /usr/bin/mktorrent ]; then
sudo apt-get update
sudo apt-get install -y mktorrent
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: Checkout JSON artifacts repository
uses: actions/checkout@v6
with:
repository: armbian/armbian.github.io
ref: data
clean: false
path: armbian.github.io
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
# Use new release YAML targets
rsync -av armbian.github.io/data/release-targets/. userpatches/
- name: "Cleanup leftover output"
run: |
rm -f userpatches/VERSION
- name: ${{matrix.desc}}
id: build-one-image
timeout-minutes: 90
run: |
# calculate loop from runner name
if [ -z "${ImageOS}" ]; then
USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
fi
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
- name: Install SSH key
uses: shimataro/ssh-key-action@v2
with:
key: ${{ secrets.KEY_UPLOAD }}
known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
if_key_exists: replace
- name: Check API rate limits
run: |
# install dependencies
if ! command -v "gh" > /dev/null 2>&1; then
sudo apt-get -y -qq install gh
fi
while true
do
API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
if (( $PERCENT > 20 )); then
echo "API rate in good shape $PERCENT % free"
exit 0
fi
echo "API rate lower then 20%, sleping 10m"
sleep 10m
done
# show current api rate
curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
- name: Import GPG key
env:
GPG_KEY1: ${{ secrets.GPG_KEY1 }}
if: env.GPG_KEY1 != null
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_KEY1 }}
passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
- name: Sign
env:
GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
if: env.GPG_PASSPHRASE1 != null
run: |
for extension in zip xz qcow2; do
if ls -l output/images/*/archive/*.$extension &>/dev/null; then
echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension
fi
done
# Download the artifacts (output/info) produced by the prepare-matrix job.
- name: Download artifacts
uses: actions/download-artifact@v7
with:
name: build-info-json
path: output/info
- name: Generate torrent
timeout-minutes: 3
run: |
set -euo pipefail
# Build tracker list (ignore empty/whitespace-only lines)
TRACKERS=$(
grep -v '^[ ]*$' output/info/best-torrent-servers.txt \
| sort -R \
| sed 's/^/ --announce=/'
)
# Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/)
BOARD=""
FILE=""
first_match=""
for ext in zip xz qcow2; do
if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then
first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1)
# first_match = output/images/BOARD/archive/file.ext
BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD
FILE=$(basename "$first_match")
break
fi
done
# Safety check
if [ -z "$BOARD" ] || [ -z "$FILE" ]; then
echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2
exit 1
fi
# Nightly / stable logic (templated)
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
WEBSEEDS=""
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then
##################################################################
# STABLE RELEASES
# Use download mirrors (servers-download.jq)
# URL: https://<host><download_path_images>/<BOARD>/archive/<FILE>
# download_path_images is already normalized & defaults to /dl in JSON.
##################################################################
WEBSEEDS=$(
jq -r --arg board "$BOARD" --arg file "$FILE" '
.[]
| "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)"
' output/info/servers-download.jq | paste -sd, -
)
else
##################################################################
# NIGHTLY BUILDS
# Use cache mirrors (servers-cache.jq) + GitHub as extra webseed
# Path is FIXED: https://SERVER/cache/os/<version>/<FILE>
##################################################################
WEBSEEDS=$(
jq -r \
--arg repo "os" \
--arg ver "${{ needs.matrix_prep.outputs.version }}" \
--arg file "$FILE" '
.[]
| "https://\(.host)/cache/\($repo)/\($ver)/\($file)"
' output/info/servers-cache.jq | paste -sd, -
)
# Append GitHub webseed
if [ -n "$WEBSEEDS" ]; then
WEBSEEDS+=","
fi
WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
fi
echo "WEBSEEDS: $WEBSEEDS"
# Go to the archive directory that contains FILE
cd "$(dirname "$first_match")" || exit 1
mktorrent \
--comment="Armbian torrent for ${FILE}" \
--verbose \
${TRACKERS} \
--web-seed="${WEBSEEDS}" \
"${FILE}"
# drop .txt helper files
rm -f *.txt
- name: "Prepare release artifacts (exclude .asc, .sha, .torrent)"
run: |
# Start from a clean directory
rm -rf output/release
mkdir -p output/release
# Copy wanted artifacts from output/images, preserving folder structure
# e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/...
find output/images -type f -name 'Armbian_*.*' \
! -name '*.asc' \
! -name '*.sha' \
! -name '*.torrent' \
-exec cp --parents {} output/release/ \;
# debug
tree output/images
echo "# debug"
tree output/release
- name: "Upload artefacts except .asc, .sha and .torrent"
timeout-minutes: 60
if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
#if: ${{ env.RELEASE_REPOSITORY == 'os' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
uses: ncipollo/release-action@v1
with:
repo: "${{ env.RELEASE_REPOSITORY }}"
tag: "${{ needs.matrix_prep.outputs.version }}"
artifacts: "output/release/output/images/*/*/Armbian_*.*"
omitBody: true
replacesArtifacts: true
omitName: true
makeLatest: false
omitPrereleaseDuringUpdate: true
allowUpdates: true
artifactErrorsFailBuild: true
token: "${{ env.GH_TOKEN }}"
- name: "Upload to servers"
run: |
# temp workspace for downloaded jsons (auto-cleanup)
tmpdir="$(mktemp -d)"
trap 'rm -rf "$tmpdir"' EXIT
curl -fL --retry 10 --retry-delay 5 --retry-all-errors \
-o "$tmpdir/cache.jq" https://github.armbian.com/servers/cache.jq \
-o "$tmpdir/upload.jq" https://github.armbian.com/servers/upload.jq
# debug
echo "=== servers-cache.jq ==="
jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq
echo "=== servers-upload.jq ==="
jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq
max_retries=3
sync_from_json() {
local json_file=$1
local mode=$2 # "cache" or "upload"
echo "== Processing ${json_file} (mode: ${mode}) =="
# Iterate over JSON array elements
while IFS= read -r server; do
# JSON structure (same for all files):
# {
# "host": "...",
# "upload_path": "...",
# "download_path_archive": "...",
# "download_path_images": "...",
# "download_path_debs": "...",
# "port": 22,
# "username": "mirror"
# }
SERVER_URL=$(jq -r '.host // empty' <<<"$server")
SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server")
SERVER_PORT=$(jq -r '.port // 22' <<<"$server")
SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server")
# skip empty host
[ -z "$SERVER_URL" ] && continue
echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)"
# Clean known_hosts entry (host:port form)
ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true
# Select rsync filters + remote subdir
if [ "$mode" = "cache" ]; then
# only.sha, .torrent .asc
RSYNC_FILTER=(
--include='*/'
--include='*.sha'
--include='*.asc'
--include='*.torrent'
--exclude='*'
)
REMOTE_SUBDIR="cache/artifacts/"
else
# everything
RSYNC_FILTER=(
--include='*/'
--include='*'
)
REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/"
fi
# Retry loop
for attempt in $(seq 1 "$max_retries"); do
echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..."
if rsync --progress \
-e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
-rvP \
"${RSYNC_FILTER[@]}" \
output/images/ \
"${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}"
then
echo "[$SERVER_URL] rsync successful."
break
fi
if [ "$attempt" -eq "$max_retries" ]; then
echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts."
exit 1
fi
echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..."
sleep 10
done
done < <(jq -c '.[]' "$json_file")
}
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
# Upload to cache servers: only .sha/.asc/.torrent
sync_from_json "$tmpdir/cache.jq" cache
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" = "os" ]; then
# Upload to servers: everything
sync_from_json "$tmpdir/upload.jq" upload
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
if: always()
uses: armbian/actions/runner-clean@main
"build-images-chunk-10": # templated "build-images-chunk-10"
needs: [ "matrix_prep", "all-artifacts-ready" ]
timeout-minutes: 240
if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-10 == 'yes' ) }} # <-- HERE: Chunk number.
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-10) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty I10' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: Install dependencies
run: |
if [ ! -e /usr/bin/mktorrent ]; then
sudo apt-get update
sudo apt-get install -y mktorrent
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: Checkout JSON artifacts repository
uses: actions/checkout@v6
with:
repository: armbian/armbian.github.io
ref: data
clean: false
path: armbian.github.io
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
# Use new release YAML targets
rsync -av armbian.github.io/data/release-targets/. userpatches/
- name: "Cleanup leftover output"
run: |
rm -f userpatches/VERSION
- name: ${{matrix.desc}}
id: build-one-image
timeout-minutes: 90
run: |
# calculate loop from runner name
if [ -z "${ImageOS}" ]; then
USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
fi
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
- name: Install SSH key
uses: shimataro/ssh-key-action@v2
with:
key: ${{ secrets.KEY_UPLOAD }}
known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
if_key_exists: replace
- name: Check API rate limits
run: |
# install dependencies
if ! command -v "gh" > /dev/null 2>&1; then
sudo apt-get -y -qq install gh
fi
while true
do
API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
if (( $PERCENT > 20 )); then
echo "API rate in good shape $PERCENT % free"
exit 0
fi
echo "API rate lower then 20%, sleping 10m"
sleep 10m
done
# show current api rate
curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
- name: Import GPG key
env:
GPG_KEY1: ${{ secrets.GPG_KEY1 }}
if: env.GPG_KEY1 != null
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_KEY1 }}
passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
- name: Sign
env:
GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
if: env.GPG_PASSPHRASE1 != null
run: |
for extension in zip xz qcow2; do
if ls -l output/images/*/archive/*.$extension &>/dev/null; then
echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension
fi
done
# Download the artifacts (output/info) produced by the prepare-matrix job.
- name: Download artifacts
uses: actions/download-artifact@v7
with:
name: build-info-json
path: output/info
- name: Generate torrent
timeout-minutes: 3
run: |
set -euo pipefail
# Build tracker list (ignore empty/whitespace-only lines)
TRACKERS=$(
grep -v '^[ ]*$' output/info/best-torrent-servers.txt \
| sort -R \
| sed 's/^/ --announce=/'
)
# Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/)
BOARD=""
FILE=""
first_match=""
for ext in zip xz qcow2; do
if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then
first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1)
# first_match = output/images/BOARD/archive/file.ext
BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD
FILE=$(basename "$first_match")
break
fi
done
# Safety check
if [ -z "$BOARD" ] || [ -z "$FILE" ]; then
echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2
exit 1
fi
# Nightly / stable logic (templated)
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
WEBSEEDS=""
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then
##################################################################
# STABLE RELEASES
# Use download mirrors (servers-download.jq)
# URL: https://<host><download_path_images>/<BOARD>/archive/<FILE>
# download_path_images is already normalized & defaults to /dl in JSON.
##################################################################
WEBSEEDS=$(
jq -r --arg board "$BOARD" --arg file "$FILE" '
.[]
| "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)"
' output/info/servers-download.jq | paste -sd, -
)
else
##################################################################
# NIGHTLY BUILDS
# Use cache mirrors (servers-cache.jq) + GitHub as extra webseed
# Path is FIXED: https://SERVER/cache/os/<version>/<FILE>
##################################################################
WEBSEEDS=$(
jq -r \
--arg repo "os" \
--arg ver "${{ needs.matrix_prep.outputs.version }}" \
--arg file "$FILE" '
.[]
| "https://\(.host)/cache/\($repo)/\($ver)/\($file)"
' output/info/servers-cache.jq | paste -sd, -
)
# Append GitHub webseed
if [ -n "$WEBSEEDS" ]; then
WEBSEEDS+=","
fi
WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
fi
echo "WEBSEEDS: $WEBSEEDS"
# Go to the archive directory that contains FILE
cd "$(dirname "$first_match")" || exit 1
mktorrent \
--comment="Armbian torrent for ${FILE}" \
--verbose \
${TRACKERS} \
--web-seed="${WEBSEEDS}" \
"${FILE}"
# drop .txt helper files
rm -f *.txt
- name: "Prepare release artifacts (exclude .asc, .sha, .torrent)"
run: |
# Start from a clean directory
rm -rf output/release
mkdir -p output/release
# Copy wanted artifacts from output/images, preserving folder structure
# e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/...
find output/images -type f -name 'Armbian_*.*' \
! -name '*.asc' \
! -name '*.sha' \
! -name '*.torrent' \
-exec cp --parents {} output/release/ \;
# debug
tree output/images
echo "# debug"
tree output/release
- name: "Upload artefacts except .asc, .sha and .torrent"
timeout-minutes: 60
if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
#if: ${{ env.RELEASE_REPOSITORY == 'os' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
uses: ncipollo/release-action@v1
with:
repo: "${{ env.RELEASE_REPOSITORY }}"
tag: "${{ needs.matrix_prep.outputs.version }}"
artifacts: "output/release/output/images/*/*/Armbian_*.*"
omitBody: true
replacesArtifacts: true
omitName: true
makeLatest: false
omitPrereleaseDuringUpdate: true
allowUpdates: true
artifactErrorsFailBuild: true
token: "${{ env.GH_TOKEN }}"
- name: "Upload to servers"
run: |
# temp workspace for downloaded jsons (auto-cleanup)
tmpdir="$(mktemp -d)"
trap 'rm -rf "$tmpdir"' EXIT
curl -fL --retry 10 --retry-delay 5 --retry-all-errors \
-o "$tmpdir/cache.jq" https://github.armbian.com/servers/cache.jq \
-o "$tmpdir/upload.jq" https://github.armbian.com/servers/upload.jq
# debug
echo "=== servers-cache.jq ==="
jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq
echo "=== servers-upload.jq ==="
jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq
max_retries=3
sync_from_json() {
local json_file=$1
local mode=$2 # "cache" or "upload"
echo "== Processing ${json_file} (mode: ${mode}) =="
# Iterate over JSON array elements
while IFS= read -r server; do
# JSON structure (same for all files):
# {
# "host": "...",
# "upload_path": "...",
# "download_path_archive": "...",
# "download_path_images": "...",
# "download_path_debs": "...",
# "port": 22,
# "username": "mirror"
# }
SERVER_URL=$(jq -r '.host // empty' <<<"$server")
SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server")
SERVER_PORT=$(jq -r '.port // 22' <<<"$server")
SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server")
# skip empty host
[ -z "$SERVER_URL" ] && continue
echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)"
# Clean known_hosts entry (host:port form)
ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true
# Select rsync filters + remote subdir
if [ "$mode" = "cache" ]; then
# only.sha, .torrent .asc
RSYNC_FILTER=(
--include='*/'
--include='*.sha'
--include='*.asc'
--include='*.torrent'
--exclude='*'
)
REMOTE_SUBDIR="cache/artifacts/"
else
# everything
RSYNC_FILTER=(
--include='*/'
--include='*'
)
REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/"
fi
# Retry loop
for attempt in $(seq 1 "$max_retries"); do
echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..."
if rsync --progress \
-e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
-rvP \
"${RSYNC_FILTER[@]}" \
output/images/ \
"${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}"
then
echo "[$SERVER_URL] rsync successful."
break
fi
if [ "$attempt" -eq "$max_retries" ]; then
echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts."
exit 1
fi
echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..."
sleep 10
done
done < <(jq -c '.[]' "$json_file")
}
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
# Upload to cache servers: only .sha/.asc/.torrent
sync_from_json "$tmpdir/cache.jq" cache
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" = "os" ]; then
# Upload to servers: everything
sync_from_json "$tmpdir/upload.jq" upload
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
if: always()
uses: armbian/actions/runner-clean@main
"build-images-chunk-11": # templated "build-images-chunk-11"
needs: [ "matrix_prep", "all-artifacts-ready" ]
timeout-minutes: 240
if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-11 == 'yes' ) }} # <-- HERE: Chunk number.
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-11) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty I11' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: Install dependencies
run: |
if [ ! -e /usr/bin/mktorrent ]; then
sudo apt-get update
sudo apt-get install -y mktorrent
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: Checkout JSON artifacts repository
uses: actions/checkout@v6
with:
repository: armbian/armbian.github.io
ref: data
clean: false
path: armbian.github.io
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
# Use new release YAML targets
rsync -av armbian.github.io/data/release-targets/. userpatches/
- name: "Cleanup leftover output"
run: |
rm -f userpatches/VERSION
- name: ${{matrix.desc}}
id: build-one-image
timeout-minutes: 90
run: |
# calculate loop from runner name
if [ -z "${ImageOS}" ]; then
USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
fi
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
- name: Install SSH key
uses: shimataro/ssh-key-action@v2
with:
key: ${{ secrets.KEY_UPLOAD }}
known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
if_key_exists: replace
- name: Check API rate limits
run: |
# install dependencies
if ! command -v "gh" > /dev/null 2>&1; then
sudo apt-get -y -qq install gh
fi
while true
do
API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
if (( $PERCENT > 20 )); then
echo "API rate in good shape $PERCENT % free"
exit 0
fi
echo "API rate lower then 20%, sleping 10m"
sleep 10m
done
# show current api rate
curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
- name: Import GPG key
env:
GPG_KEY1: ${{ secrets.GPG_KEY1 }}
if: env.GPG_KEY1 != null
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_KEY1 }}
passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
- name: Sign
env:
GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
if: env.GPG_PASSPHRASE1 != null
run: |
for extension in zip xz qcow2; do
if ls -l output/images/*/archive/*.$extension &>/dev/null; then
echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension
fi
done
# Download the artifacts (output/info) produced by the prepare-matrix job.
- name: Download artifacts
uses: actions/download-artifact@v7
with:
name: build-info-json
path: output/info
- name: Generate torrent
timeout-minutes: 3
run: |
set -euo pipefail
# Build tracker list (ignore empty/whitespace-only lines)
TRACKERS=$(
grep -v '^[ ]*$' output/info/best-torrent-servers.txt \
| sort -R \
| sed 's/^/ --announce=/'
)
# Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/)
BOARD=""
FILE=""
first_match=""
for ext in zip xz qcow2; do
if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then
first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1)
# first_match = output/images/BOARD/archive/file.ext
BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD
FILE=$(basename "$first_match")
break
fi
done
# Safety check
if [ -z "$BOARD" ] || [ -z "$FILE" ]; then
echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2
exit 1
fi
# Nightly / stable logic (templated)
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
WEBSEEDS=""
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then
##################################################################
# STABLE RELEASES
# Use download mirrors (servers-download.jq)
# URL: https://<host><download_path_images>/<BOARD>/archive/<FILE>
# download_path_images is already normalized & defaults to /dl in JSON.
##################################################################
WEBSEEDS=$(
jq -r --arg board "$BOARD" --arg file "$FILE" '
.[]
| "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)"
' output/info/servers-download.jq | paste -sd, -
)
else
##################################################################
# NIGHTLY BUILDS
# Use cache mirrors (servers-cache.jq) + GitHub as extra webseed
# Path is FIXED: https://SERVER/cache/os/<version>/<FILE>
##################################################################
WEBSEEDS=$(
jq -r \
--arg repo "os" \
--arg ver "${{ needs.matrix_prep.outputs.version }}" \
--arg file "$FILE" '
.[]
| "https://\(.host)/cache/\($repo)/\($ver)/\($file)"
' output/info/servers-cache.jq | paste -sd, -
)
# Append GitHub webseed
if [ -n "$WEBSEEDS" ]; then
WEBSEEDS+=","
fi
WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
fi
echo "WEBSEEDS: $WEBSEEDS"
# Go to the archive directory that contains FILE
cd "$(dirname "$first_match")" || exit 1
mktorrent \
--comment="Armbian torrent for ${FILE}" \
--verbose \
${TRACKERS} \
--web-seed="${WEBSEEDS}" \
"${FILE}"
# drop .txt helper files
rm -f *.txt
- name: "Prepare release artifacts (exclude .asc, .sha, .torrent)"
run: |
# Start from a clean directory
rm -rf output/release
mkdir -p output/release
# Copy wanted artifacts from output/images, preserving folder structure
# e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/...
find output/images -type f -name 'Armbian_*.*' \
! -name '*.asc' \
! -name '*.sha' \
! -name '*.torrent' \
-exec cp --parents {} output/release/ \;
# debug
tree output/images
echo "# debug"
tree output/release
- name: "Upload artefacts except .asc, .sha and .torrent"
timeout-minutes: 60
if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
#if: ${{ env.RELEASE_REPOSITORY == 'os' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
uses: ncipollo/release-action@v1
with:
repo: "${{ env.RELEASE_REPOSITORY }}"
tag: "${{ needs.matrix_prep.outputs.version }}"
artifacts: "output/release/output/images/*/*/Armbian_*.*"
omitBody: true
replacesArtifacts: true
omitName: true
makeLatest: false
omitPrereleaseDuringUpdate: true
allowUpdates: true
artifactErrorsFailBuild: true
token: "${{ env.GH_TOKEN }}"
- name: "Upload to servers"
run: |
# temp workspace for downloaded jsons (auto-cleanup)
tmpdir="$(mktemp -d)"
trap 'rm -rf "$tmpdir"' EXIT
curl -fL --retry 10 --retry-delay 5 --retry-all-errors \
-o "$tmpdir/cache.jq" https://github.armbian.com/servers/cache.jq \
-o "$tmpdir/upload.jq" https://github.armbian.com/servers/upload.jq
# debug
echo "=== servers-cache.jq ==="
jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq
echo "=== servers-upload.jq ==="
jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq
max_retries=3
sync_from_json() {
local json_file=$1
local mode=$2 # "cache" or "upload"
echo "== Processing ${json_file} (mode: ${mode}) =="
# Iterate over JSON array elements
while IFS= read -r server; do
# JSON structure (same for all files):
# {
# "host": "...",
# "upload_path": "...",
# "download_path_archive": "...",
# "download_path_images": "...",
# "download_path_debs": "...",
# "port": 22,
# "username": "mirror"
# }
SERVER_URL=$(jq -r '.host // empty' <<<"$server")
SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server")
SERVER_PORT=$(jq -r '.port // 22' <<<"$server")
SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server")
# skip empty host
[ -z "$SERVER_URL" ] && continue
echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)"
# Clean known_hosts entry (host:port form)
ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true
# Select rsync filters + remote subdir
if [ "$mode" = "cache" ]; then
# only.sha, .torrent .asc
RSYNC_FILTER=(
--include='*/'
--include='*.sha'
--include='*.asc'
--include='*.torrent'
--exclude='*'
)
REMOTE_SUBDIR="cache/artifacts/"
else
# everything
RSYNC_FILTER=(
--include='*/'
--include='*'
)
REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/"
fi
# Retry loop
for attempt in $(seq 1 "$max_retries"); do
echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..."
if rsync --progress \
-e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
-rvP \
"${RSYNC_FILTER[@]}" \
output/images/ \
"${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}"
then
echo "[$SERVER_URL] rsync successful."
break
fi
if [ "$attempt" -eq "$max_retries" ]; then
echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts."
exit 1
fi
echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..."
sleep 10
done
done < <(jq -c '.[]' "$json_file")
}
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
# Upload to cache servers: only .sha/.asc/.torrent
sync_from_json "$tmpdir/cache.jq" cache
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" = "os" ]; then
# Upload to servers: everything
sync_from_json "$tmpdir/upload.jq" upload
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
if: always()
uses: armbian/actions/runner-clean@main
"build-images-chunk-12": # templated "build-images-chunk-12"
needs: [ "matrix_prep", "all-artifacts-ready" ]
timeout-minutes: 240
if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-12 == 'yes' ) }} # <-- HERE: Chunk number.
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-12) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty I12' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: Install dependencies
run: |
if [ ! -e /usr/bin/mktorrent ]; then
sudo apt-get update
sudo apt-get install -y mktorrent
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: Checkout JSON artifacts repository
uses: actions/checkout@v6
with:
repository: armbian/armbian.github.io
ref: data
clean: false
path: armbian.github.io
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
# Use new release YAML targets
rsync -av armbian.github.io/data/release-targets/. userpatches/
- name: "Cleanup leftover output"
run: |
rm -f userpatches/VERSION
- name: ${{matrix.desc}}
id: build-one-image
timeout-minutes: 90
run: |
# calculate loop from runner name
if [ -z "${ImageOS}" ]; then
USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
fi
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
- name: Install SSH key
uses: shimataro/ssh-key-action@v2
with:
key: ${{ secrets.KEY_UPLOAD }}
known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
if_key_exists: replace
- name: Check API rate limits
run: |
# install dependencies
if ! command -v "gh" > /dev/null 2>&1; then
sudo apt-get -y -qq install gh
fi
while true
do
API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
if (( $PERCENT > 20 )); then
echo "API rate in good shape $PERCENT % free"
exit 0
fi
echo "API rate lower then 20%, sleping 10m"
sleep 10m
done
# show current api rate
curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
- name: Import GPG key
env:
GPG_KEY1: ${{ secrets.GPG_KEY1 }}
if: env.GPG_KEY1 != null
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_KEY1 }}
passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
- name: Sign
env:
GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
if: env.GPG_PASSPHRASE1 != null
run: |
for extension in zip xz qcow2; do
if ls -l output/images/*/archive/*.$extension &>/dev/null; then
echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension
fi
done
# Download the artifacts (output/info) produced by the prepare-matrix job.
- name: Download artifacts
uses: actions/download-artifact@v7
with:
name: build-info-json
path: output/info
- name: Generate torrent
timeout-minutes: 3
run: |
set -euo pipefail
# Build tracker list (ignore empty/whitespace-only lines)
TRACKERS=$(
grep -v '^[ ]*$' output/info/best-torrent-servers.txt \
| sort -R \
| sed 's/^/ --announce=/'
)
# Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/)
BOARD=""
FILE=""
first_match=""
for ext in zip xz qcow2; do
if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then
first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1)
# first_match = output/images/BOARD/archive/file.ext
BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD
FILE=$(basename "$first_match")
break
fi
done
# Safety check
if [ -z "$BOARD" ] || [ -z "$FILE" ]; then
echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2
exit 1
fi
# Nightly / stable logic (templated)
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
WEBSEEDS=""
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then
##################################################################
# STABLE RELEASES
# Use download mirrors (servers-download.jq)
# URL: https://<host><download_path_images>/<BOARD>/archive/<FILE>
# download_path_images is already normalized & defaults to /dl in JSON.
##################################################################
WEBSEEDS=$(
jq -r --arg board "$BOARD" --arg file "$FILE" '
.[]
| "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)"
' output/info/servers-download.jq | paste -sd, -
)
else
##################################################################
# NIGHTLY BUILDS
# Use cache mirrors (servers-cache.jq) + GitHub as extra webseed
# Path is FIXED: https://SERVER/cache/os/<version>/<FILE>
##################################################################
WEBSEEDS=$(
jq -r \
--arg repo "os" \
--arg ver "${{ needs.matrix_prep.outputs.version }}" \
--arg file "$FILE" '
.[]
| "https://\(.host)/cache/\($repo)/\($ver)/\($file)"
' output/info/servers-cache.jq | paste -sd, -
)
# Append GitHub webseed
if [ -n "$WEBSEEDS" ]; then
WEBSEEDS+=","
fi
WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
fi
echo "WEBSEEDS: $WEBSEEDS"
# Go to the archive directory that contains FILE
cd "$(dirname "$first_match")" || exit 1
mktorrent \
--comment="Armbian torrent for ${FILE}" \
--verbose \
${TRACKERS} \
--web-seed="${WEBSEEDS}" \
"${FILE}"
# drop .txt helper files
rm -f *.txt
- name: "Prepare release artifacts (exclude .asc, .sha, .torrent)"
run: |
# Start from a clean directory
rm -rf output/release
mkdir -p output/release
# Copy wanted artifacts from output/images, preserving folder structure
# e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/...
find output/images -type f -name 'Armbian_*.*' \
! -name '*.asc' \
! -name '*.sha' \
! -name '*.torrent' \
-exec cp --parents {} output/release/ \;
# debug
tree output/images
echo "# debug"
tree output/release
- name: "Upload artefacts except .asc, .sha and .torrent"
timeout-minutes: 60
if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
#if: ${{ env.RELEASE_REPOSITORY == 'os' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
uses: ncipollo/release-action@v1
with:
repo: "${{ env.RELEASE_REPOSITORY }}"
tag: "${{ needs.matrix_prep.outputs.version }}"
artifacts: "output/release/output/images/*/*/Armbian_*.*"
omitBody: true
replacesArtifacts: true
omitName: true
makeLatest: false
omitPrereleaseDuringUpdate: true
allowUpdates: true
artifactErrorsFailBuild: true
token: "${{ env.GH_TOKEN }}"
- name: "Upload to servers"
run: |
# temp workspace for downloaded jsons (auto-cleanup)
tmpdir="$(mktemp -d)"
trap 'rm -rf "$tmpdir"' EXIT
curl -fL --retry 10 --retry-delay 5 --retry-all-errors \
-o "$tmpdir/cache.jq" https://github.armbian.com/servers/cache.jq \
-o "$tmpdir/upload.jq" https://github.armbian.com/servers/upload.jq
# debug
echo "=== servers-cache.jq ==="
jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq
echo "=== servers-upload.jq ==="
jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq
max_retries=3
sync_from_json() {
local json_file=$1
local mode=$2 # "cache" or "upload"
echo "== Processing ${json_file} (mode: ${mode}) =="
# Iterate over JSON array elements
while IFS= read -r server; do
# JSON structure (same for all files):
# {
# "host": "...",
# "upload_path": "...",
# "download_path_archive": "...",
# "download_path_images": "...",
# "download_path_debs": "...",
# "port": 22,
# "username": "mirror"
# }
SERVER_URL=$(jq -r '.host // empty' <<<"$server")
SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server")
SERVER_PORT=$(jq -r '.port // 22' <<<"$server")
SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server")
# skip empty host
[ -z "$SERVER_URL" ] && continue
echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)"
# Clean known_hosts entry (host:port form)
ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true
# Select rsync filters + remote subdir
if [ "$mode" = "cache" ]; then
# only.sha, .torrent .asc
RSYNC_FILTER=(
--include='*/'
--include='*.sha'
--include='*.asc'
--include='*.torrent'
--exclude='*'
)
REMOTE_SUBDIR="cache/artifacts/"
else
# everything
RSYNC_FILTER=(
--include='*/'
--include='*'
)
REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/"
fi
# Retry loop
for attempt in $(seq 1 "$max_retries"); do
echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..."
if rsync --progress \
-e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
-rvP \
"${RSYNC_FILTER[@]}" \
output/images/ \
"${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}"
then
echo "[$SERVER_URL] rsync successful."
break
fi
if [ "$attempt" -eq "$max_retries" ]; then
echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts."
exit 1
fi
echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..."
sleep 10
done
done < <(jq -c '.[]' "$json_file")
}
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
# Upload to cache servers: only .sha/.asc/.torrent
sync_from_json "$tmpdir/cache.jq" cache
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" = "os" ]; then
# Upload to servers: everything
sync_from_json "$tmpdir/upload.jq" upload
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
if: always()
uses: armbian/actions/runner-clean@main
"build-images-chunk-13": # templated "build-images-chunk-13"
needs: [ "matrix_prep", "all-artifacts-ready" ]
timeout-minutes: 240
if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-13 == 'yes' ) }} # <-- HERE: Chunk number.
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-13) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty I13' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: Install dependencies
run: |
if [ ! -e /usr/bin/mktorrent ]; then
sudo apt-get update
sudo apt-get install -y mktorrent
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: Checkout JSON artifacts repository
uses: actions/checkout@v6
with:
repository: armbian/armbian.github.io
ref: data
clean: false
path: armbian.github.io
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
# Use new release YAML targets
rsync -av armbian.github.io/data/release-targets/. userpatches/
- name: "Cleanup leftover output"
run: |
rm -f userpatches/VERSION
- name: ${{matrix.desc}}
id: build-one-image
timeout-minutes: 90
run: |
# calculate loop from runner name
if [ -z "${ImageOS}" ]; then
USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
fi
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
- name: Install SSH key
uses: shimataro/ssh-key-action@v2
with:
key: ${{ secrets.KEY_UPLOAD }}
known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
if_key_exists: replace
- name: Check API rate limits
run: |
# install dependencies
if ! command -v "gh" > /dev/null 2>&1; then
sudo apt-get -y -qq install gh
fi
while true
do
API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
if (( $PERCENT > 20 )); then
echo "API rate in good shape $PERCENT % free"
exit 0
fi
echo "API rate lower then 20%, sleping 10m"
sleep 10m
done
# show current api rate
curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
- name: Import GPG key
env:
GPG_KEY1: ${{ secrets.GPG_KEY1 }}
if: env.GPG_KEY1 != null
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_KEY1 }}
passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
- name: Sign
env:
GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
if: env.GPG_PASSPHRASE1 != null
run: |
for extension in zip xz qcow2; do
if ls -l output/images/*/archive/*.$extension &>/dev/null; then
echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension
fi
done
# Download the artifacts (output/info) produced by the prepare-matrix job.
- name: Download artifacts
uses: actions/download-artifact@v7
with:
name: build-info-json
path: output/info
- name: Generate torrent
timeout-minutes: 3
run: |
set -euo pipefail
# Build tracker list (ignore empty/whitespace-only lines)
TRACKERS=$(
grep -v '^[ ]*$' output/info/best-torrent-servers.txt \
| sort -R \
| sed 's/^/ --announce=/'
)
# Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/)
BOARD=""
FILE=""
first_match=""
for ext in zip xz qcow2; do
if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then
first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1)
# first_match = output/images/BOARD/archive/file.ext
BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD
FILE=$(basename "$first_match")
break
fi
done
# Safety check
if [ -z "$BOARD" ] || [ -z "$FILE" ]; then
echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2
exit 1
fi
# Nightly / stable logic (templated)
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
WEBSEEDS=""
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then
##################################################################
# STABLE RELEASES
# Use download mirrors (servers-download.jq)
# URL: https://<host><download_path_images>/<BOARD>/archive/<FILE>
# download_path_images is already normalized & defaults to /dl in JSON.
##################################################################
WEBSEEDS=$(
jq -r --arg board "$BOARD" --arg file "$FILE" '
.[]
| "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)"
' output/info/servers-download.jq | paste -sd, -
)
else
##################################################################
# NIGHTLY BUILDS
# Use cache mirrors (servers-cache.jq) + GitHub as extra webseed
# Path is FIXED: https://SERVER/cache/os/<version>/<FILE>
##################################################################
WEBSEEDS=$(
jq -r \
--arg repo "os" \
--arg ver "${{ needs.matrix_prep.outputs.version }}" \
--arg file "$FILE" '
.[]
| "https://\(.host)/cache/\($repo)/\($ver)/\($file)"
' output/info/servers-cache.jq | paste -sd, -
)
# Append GitHub webseed
if [ -n "$WEBSEEDS" ]; then
WEBSEEDS+=","
fi
WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
fi
echo "WEBSEEDS: $WEBSEEDS"
# Go to the archive directory that contains FILE
cd "$(dirname "$first_match")" || exit 1
mktorrent \
--comment="Armbian torrent for ${FILE}" \
--verbose \
${TRACKERS} \
--web-seed="${WEBSEEDS}" \
"${FILE}"
# drop .txt helper files
rm -f *.txt
- name: "Prepare release artifacts (exclude .asc, .sha, .torrent)"
run: |
# Start from a clean directory
rm -rf output/release
mkdir -p output/release
# Copy wanted artifacts from output/images, preserving folder structure
# e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/...
find output/images -type f -name 'Armbian_*.*' \
! -name '*.asc' \
! -name '*.sha' \
! -name '*.torrent' \
-exec cp --parents {} output/release/ \;
# debug
tree output/images
echo "# debug"
tree output/release
- name: "Upload artefacts except .asc, .sha and .torrent"
timeout-minutes: 60
if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
#if: ${{ env.RELEASE_REPOSITORY == 'os' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
uses: ncipollo/release-action@v1
with:
repo: "${{ env.RELEASE_REPOSITORY }}"
tag: "${{ needs.matrix_prep.outputs.version }}"
artifacts: "output/release/output/images/*/*/Armbian_*.*"
omitBody: true
replacesArtifacts: true
omitName: true
makeLatest: false
omitPrereleaseDuringUpdate: true
allowUpdates: true
artifactErrorsFailBuild: true
token: "${{ env.GH_TOKEN }}"
- name: "Upload to servers"
run: |
# temp workspace for downloaded jsons (auto-cleanup)
tmpdir="$(mktemp -d)"
trap 'rm -rf "$tmpdir"' EXIT
curl -fL --retry 10 --retry-delay 5 --retry-all-errors \
-o "$tmpdir/cache.jq" https://github.armbian.com/servers/cache.jq \
-o "$tmpdir/upload.jq" https://github.armbian.com/servers/upload.jq
# debug
echo "=== servers-cache.jq ==="
jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq
echo "=== servers-upload.jq ==="
jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq
max_retries=3
sync_from_json() {
local json_file=$1
local mode=$2 # "cache" or "upload"
echo "== Processing ${json_file} (mode: ${mode}) =="
# Iterate over JSON array elements
while IFS= read -r server; do
# JSON structure (same for all files):
# {
# "host": "...",
# "upload_path": "...",
# "download_path_archive": "...",
# "download_path_images": "...",
# "download_path_debs": "...",
# "port": 22,
# "username": "mirror"
# }
SERVER_URL=$(jq -r '.host // empty' <<<"$server")
SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server")
SERVER_PORT=$(jq -r '.port // 22' <<<"$server")
SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server")
# skip empty host
[ -z "$SERVER_URL" ] && continue
echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)"
# Clean known_hosts entry (host:port form)
ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true
# Select rsync filters + remote subdir
if [ "$mode" = "cache" ]; then
# only.sha, .torrent .asc
RSYNC_FILTER=(
--include='*/'
--include='*.sha'
--include='*.asc'
--include='*.torrent'
--exclude='*'
)
REMOTE_SUBDIR="cache/artifacts/"
else
# everything
RSYNC_FILTER=(
--include='*/'
--include='*'
)
REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/"
fi
# Retry loop
for attempt in $(seq 1 "$max_retries"); do
echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..."
if rsync --progress \
-e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
-rvP \
"${RSYNC_FILTER[@]}" \
output/images/ \
"${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}"
then
echo "[$SERVER_URL] rsync successful."
break
fi
if [ "$attempt" -eq "$max_retries" ]; then
echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts."
exit 1
fi
echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..."
sleep 10
done
done < <(jq -c '.[]' "$json_file")
}
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
# Upload to cache servers: only .sha/.asc/.torrent
sync_from_json "$tmpdir/cache.jq" cache
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" = "os" ]; then
# Upload to servers: everything
sync_from_json "$tmpdir/upload.jq" upload
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
if: always()
uses: armbian/actions/runner-clean@main
"build-images-chunk-14": # templated "build-images-chunk-14"
needs: [ "matrix_prep", "all-artifacts-ready" ]
timeout-minutes: 240
if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-14 == 'yes' ) }} # <-- HERE: Chunk number.
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-14) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty I14' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: Install dependencies
run: |
if [ ! -e /usr/bin/mktorrent ]; then
sudo apt-get update
sudo apt-get install -y mktorrent
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: Checkout JSON artifacts repository
uses: actions/checkout@v6
with:
repository: armbian/armbian.github.io
ref: data
clean: false
path: armbian.github.io
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
# Use new release YAML targets
rsync -av armbian.github.io/data/release-targets/. userpatches/
- name: "Cleanup leftover output"
run: |
rm -f userpatches/VERSION
- name: ${{matrix.desc}}
id: build-one-image
timeout-minutes: 90
run: |
# calculate loop from runner name
if [ -z "${ImageOS}" ]; then
USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
fi
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
- name: Install SSH key
uses: shimataro/ssh-key-action@v2
with:
key: ${{ secrets.KEY_UPLOAD }}
known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
if_key_exists: replace
- name: Check API rate limits
run: |
# install dependencies
if ! command -v "gh" > /dev/null 2>&1; then
sudo apt-get -y -qq install gh
fi
while true
do
API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
if (( $PERCENT > 20 )); then
echo "API rate in good shape $PERCENT % free"
exit 0
fi
echo "API rate lower then 20%, sleping 10m"
sleep 10m
done
# show current api rate
curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
- name: Import GPG key
env:
GPG_KEY1: ${{ secrets.GPG_KEY1 }}
if: env.GPG_KEY1 != null
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_KEY1 }}
passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
- name: Sign
env:
GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
if: env.GPG_PASSPHRASE1 != null
run: |
for extension in zip xz qcow2; do
if ls -l output/images/*/archive/*.$extension &>/dev/null; then
echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension
fi
done
# Download the artifacts (output/info) produced by the prepare-matrix job.
- name: Download artifacts
uses: actions/download-artifact@v7
with:
name: build-info-json
path: output/info
- name: Generate torrent
timeout-minutes: 3
run: |
set -euo pipefail
# Build tracker list (ignore empty/whitespace-only lines)
TRACKERS=$(
grep -v '^[ ]*$' output/info/best-torrent-servers.txt \
| sort -R \
| sed 's/^/ --announce=/'
)
# Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/)
BOARD=""
FILE=""
first_match=""
for ext in zip xz qcow2; do
if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then
first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1)
# first_match = output/images/BOARD/archive/file.ext
BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD
FILE=$(basename "$first_match")
break
fi
done
# Safety check
if [ -z "$BOARD" ] || [ -z "$FILE" ]; then
echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2
exit 1
fi
# Nightly / stable logic (templated)
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
WEBSEEDS=""
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then
##################################################################
# STABLE RELEASES
# Use download mirrors (servers-download.jq)
# URL: https://<host><download_path_images>/<BOARD>/archive/<FILE>
# download_path_images is already normalized & defaults to /dl in JSON.
##################################################################
WEBSEEDS=$(
jq -r --arg board "$BOARD" --arg file "$FILE" '
.[]
| "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)"
' output/info/servers-download.jq | paste -sd, -
)
else
##################################################################
# NIGHTLY BUILDS
# Use cache mirrors (servers-cache.jq) + GitHub as extra webseed
# Path is FIXED: https://SERVER/cache/os/<version>/<FILE>
##################################################################
WEBSEEDS=$(
jq -r \
--arg repo "os" \
--arg ver "${{ needs.matrix_prep.outputs.version }}" \
--arg file "$FILE" '
.[]
| "https://\(.host)/cache/\($repo)/\($ver)/\($file)"
' output/info/servers-cache.jq | paste -sd, -
)
# Append GitHub webseed
if [ -n "$WEBSEEDS" ]; then
WEBSEEDS+=","
fi
WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
fi
echo "WEBSEEDS: $WEBSEEDS"
# Go to the archive directory that contains FILE
cd "$(dirname "$first_match")" || exit 1
mktorrent \
--comment="Armbian torrent for ${FILE}" \
--verbose \
${TRACKERS} \
--web-seed="${WEBSEEDS}" \
"${FILE}"
# drop .txt helper files
rm -f *.txt
- name: "Prepare release artifacts (exclude .asc, .sha, .torrent)"
run: |
# Start from a clean directory
rm -rf output/release
mkdir -p output/release
# Copy wanted artifacts from output/images, preserving folder structure
# e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/...
find output/images -type f -name 'Armbian_*.*' \
! -name '*.asc' \
! -name '*.sha' \
! -name '*.torrent' \
-exec cp --parents {} output/release/ \;
# debug
tree output/images
echo "# debug"
tree output/release
- name: "Upload artefacts except .asc, .sha and .torrent"
timeout-minutes: 60
if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
#if: ${{ env.RELEASE_REPOSITORY == 'os' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
uses: ncipollo/release-action@v1
with:
repo: "${{ env.RELEASE_REPOSITORY }}"
tag: "${{ needs.matrix_prep.outputs.version }}"
artifacts: "output/release/output/images/*/*/Armbian_*.*"
omitBody: true
replacesArtifacts: true
omitName: true
makeLatest: false
omitPrereleaseDuringUpdate: true
allowUpdates: true
artifactErrorsFailBuild: true
token: "${{ env.GH_TOKEN }}"
- name: "Upload to servers"
run: |
# temp workspace for downloaded jsons (auto-cleanup)
tmpdir="$(mktemp -d)"
trap 'rm -rf "$tmpdir"' EXIT
curl -fL --retry 10 --retry-delay 5 --retry-all-errors \
-o "$tmpdir/cache.jq" https://github.armbian.com/servers/cache.jq \
-o "$tmpdir/upload.jq" https://github.armbian.com/servers/upload.jq
# debug
echo "=== servers-cache.jq ==="
jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq
echo "=== servers-upload.jq ==="
jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq
max_retries=3
sync_from_json() {
local json_file=$1
local mode=$2 # "cache" or "upload"
echo "== Processing ${json_file} (mode: ${mode}) =="
# Iterate over JSON array elements
while IFS= read -r server; do
# JSON structure (same for all files):
# {
# "host": "...",
# "upload_path": "...",
# "download_path_archive": "...",
# "download_path_images": "...",
# "download_path_debs": "...",
# "port": 22,
# "username": "mirror"
# }
SERVER_URL=$(jq -r '.host // empty' <<<"$server")
SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server")
SERVER_PORT=$(jq -r '.port // 22' <<<"$server")
SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server")
# skip empty host
[ -z "$SERVER_URL" ] && continue
echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)"
# Clean known_hosts entry (host:port form)
ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true
# Select rsync filters + remote subdir
if [ "$mode" = "cache" ]; then
# only.sha, .torrent .asc
RSYNC_FILTER=(
--include='*/'
--include='*.sha'
--include='*.asc'
--include='*.torrent'
--exclude='*'
)
REMOTE_SUBDIR="cache/artifacts/"
else
# everything
RSYNC_FILTER=(
--include='*/'
--include='*'
)
REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/"
fi
# Retry loop
for attempt in $(seq 1 "$max_retries"); do
echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..."
if rsync --progress \
-e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
-rvP \
"${RSYNC_FILTER[@]}" \
output/images/ \
"${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}"
then
echo "[$SERVER_URL] rsync successful."
break
fi
if [ "$attempt" -eq "$max_retries" ]; then
echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts."
exit 1
fi
echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..."
sleep 10
done
done < <(jq -c '.[]' "$json_file")
}
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
# Upload to cache servers: only .sha/.asc/.torrent
sync_from_json "$tmpdir/cache.jq" cache
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" = "os" ]; then
# Upload to servers: everything
sync_from_json "$tmpdir/upload.jq" upload
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
if: always()
uses: armbian/actions/runner-clean@main
"build-images-chunk-15": # templated "build-images-chunk-15"
needs: [ "matrix_prep", "all-artifacts-ready" ]
timeout-minutes: 240
if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-15 == 'yes' ) }} # <-- HERE: Chunk number.
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-15) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty I15' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: Install dependencies
run: |
if [ ! -e /usr/bin/mktorrent ]; then
sudo apt-get update
sudo apt-get install -y mktorrent
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: Checkout JSON artifacts repository
uses: actions/checkout@v6
with:
repository: armbian/armbian.github.io
ref: data
clean: false
path: armbian.github.io
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
# Use new release YAML targets
rsync -av armbian.github.io/data/release-targets/. userpatches/
- name: "Cleanup leftover output"
run: |
rm -f userpatches/VERSION
- name: ${{matrix.desc}}
id: build-one-image
timeout-minutes: 90
run: |
# calculate loop from runner name
if [ -z "${ImageOS}" ]; then
USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
fi
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
- name: Install SSH key
uses: shimataro/ssh-key-action@v2
with:
key: ${{ secrets.KEY_UPLOAD }}
known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
if_key_exists: replace
- name: Check API rate limits
run: |
# install dependencies
if ! command -v "gh" > /dev/null 2>&1; then
sudo apt-get -y -qq install gh
fi
while true
do
API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
if (( $PERCENT > 20 )); then
echo "API rate in good shape $PERCENT % free"
exit 0
fi
echo "API rate lower then 20%, sleping 10m"
sleep 10m
done
# show current api rate
curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
- name: Import GPG key
env:
GPG_KEY1: ${{ secrets.GPG_KEY1 }}
if: env.GPG_KEY1 != null
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_KEY1 }}
passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
- name: Sign
env:
GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
if: env.GPG_PASSPHRASE1 != null
run: |
for extension in zip xz qcow2; do
if ls -l output/images/*/archive/*.$extension &>/dev/null; then
echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension
fi
done
# Download the artifacts (output/info) produced by the prepare-matrix job.
- name: Download artifacts
uses: actions/download-artifact@v7
with:
name: build-info-json
path: output/info
- name: Generate torrent
timeout-minutes: 3
run: |
set -euo pipefail
# Build tracker list (ignore empty/whitespace-only lines)
TRACKERS=$(
grep -v '^[ ]*$' output/info/best-torrent-servers.txt \
| sort -R \
| sed 's/^/ --announce=/'
)
# Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/)
BOARD=""
FILE=""
first_match=""
for ext in zip xz qcow2; do
if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then
first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1)
# first_match = output/images/BOARD/archive/file.ext
BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD
FILE=$(basename "$first_match")
break
fi
done
# Safety check
if [ -z "$BOARD" ] || [ -z "$FILE" ]; then
echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2
exit 1
fi
# Nightly / stable logic (templated)
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
WEBSEEDS=""
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then
##################################################################
# STABLE RELEASES
# Use download mirrors (servers-download.jq)
# URL: https://<host><download_path_images>/<BOARD>/archive/<FILE>
# download_path_images is already normalized & defaults to /dl in JSON.
##################################################################
WEBSEEDS=$(
jq -r --arg board "$BOARD" --arg file "$FILE" '
.[]
| "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)"
' output/info/servers-download.jq | paste -sd, -
)
else
##################################################################
# NIGHTLY BUILDS
# Use cache mirrors (servers-cache.jq) + GitHub as extra webseed
# Path is FIXED: https://SERVER/cache/os/<version>/<FILE>
##################################################################
WEBSEEDS=$(
jq -r \
--arg repo "os" \
--arg ver "${{ needs.matrix_prep.outputs.version }}" \
--arg file "$FILE" '
.[]
| "https://\(.host)/cache/\($repo)/\($ver)/\($file)"
' output/info/servers-cache.jq | paste -sd, -
)
# Append GitHub webseed
if [ -n "$WEBSEEDS" ]; then
WEBSEEDS+=","
fi
WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
fi
echo "WEBSEEDS: $WEBSEEDS"
# Go to the archive directory that contains FILE
cd "$(dirname "$first_match")" || exit 1
mktorrent \
--comment="Armbian torrent for ${FILE}" \
--verbose \
${TRACKERS} \
--web-seed="${WEBSEEDS}" \
"${FILE}"
# drop .txt helper files
rm -f *.txt
- name: "Prepare release artifacts (exclude .asc, .sha, .torrent)"
run: |
# Start from a clean directory
rm -rf output/release
mkdir -p output/release
# Copy wanted artifacts from output/images, preserving folder structure
# e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/...
find output/images -type f -name 'Armbian_*.*' \
! -name '*.asc' \
! -name '*.sha' \
! -name '*.torrent' \
-exec cp --parents {} output/release/ \;
# debug
tree output/images
echo "# debug"
tree output/release
- name: "Upload artefacts except .asc, .sha and .torrent"
timeout-minutes: 60
if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
#if: ${{ env.RELEASE_REPOSITORY == 'os' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
uses: ncipollo/release-action@v1
with:
repo: "${{ env.RELEASE_REPOSITORY }}"
tag: "${{ needs.matrix_prep.outputs.version }}"
artifacts: "output/release/output/images/*/*/Armbian_*.*"
omitBody: true
replacesArtifacts: true
omitName: true
makeLatest: false
omitPrereleaseDuringUpdate: true
allowUpdates: true
artifactErrorsFailBuild: true
token: "${{ env.GH_TOKEN }}"
- name: "Upload to servers"
run: |
# temp workspace for downloaded jsons (auto-cleanup)
tmpdir="$(mktemp -d)"
trap 'rm -rf "$tmpdir"' EXIT
curl -fL --retry 10 --retry-delay 5 --retry-all-errors \
-o "$tmpdir/cache.jq" https://github.armbian.com/servers/cache.jq \
-o "$tmpdir/upload.jq" https://github.armbian.com/servers/upload.jq
# debug
echo "=== servers-cache.jq ==="
jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq
echo "=== servers-upload.jq ==="
jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq
max_retries=3
sync_from_json() {
local json_file=$1
local mode=$2 # "cache" or "upload"
echo "== Processing ${json_file} (mode: ${mode}) =="
# Iterate over JSON array elements
while IFS= read -r server; do
# JSON structure (same for all files):
# {
# "host": "...",
# "upload_path": "...",
# "download_path_archive": "...",
# "download_path_images": "...",
# "download_path_debs": "...",
# "port": 22,
# "username": "mirror"
# }
SERVER_URL=$(jq -r '.host // empty' <<<"$server")
SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server")
SERVER_PORT=$(jq -r '.port // 22' <<<"$server")
SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server")
# skip empty host
[ -z "$SERVER_URL" ] && continue
echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)"
# Clean known_hosts entry (host:port form)
ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true
# Select rsync filters + remote subdir
if [ "$mode" = "cache" ]; then
# only.sha, .torrent .asc
RSYNC_FILTER=(
--include='*/'
--include='*.sha'
--include='*.asc'
--include='*.torrent'
--exclude='*'
)
REMOTE_SUBDIR="cache/artifacts/"
else
# everything
RSYNC_FILTER=(
--include='*/'
--include='*'
)
REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/"
fi
# Retry loop
for attempt in $(seq 1 "$max_retries"); do
echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..."
if rsync --progress \
-e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
-rvP \
"${RSYNC_FILTER[@]}" \
output/images/ \
"${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}"
then
echo "[$SERVER_URL] rsync successful."
break
fi
if [ "$attempt" -eq "$max_retries" ]; then
echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts."
exit 1
fi
echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..."
sleep 10
done
done < <(jq -c '.[]' "$json_file")
}
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
# Upload to cache servers: only .sha/.asc/.torrent
sync_from_json "$tmpdir/cache.jq" cache
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" = "os" ]; then
# Upload to servers: everything
sync_from_json "$tmpdir/upload.jq" upload
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
if: always()
uses: armbian/actions/runner-clean@main
"build-images-chunk-16": # templated "build-images-chunk-16"
needs: [ "matrix_prep", "all-artifacts-ready" ]
timeout-minutes: 240
if: ${{ !failure() && !cancelled() && ( github.repository_owner == 'armbian' ) && ( needs.matrix_prep.outputs.images-chunk-not-empty-16 == 'yes' ) }} # <-- HERE: Chunk number.
strategy:
fail-fast: false # let other jobs try to complete if one fails
matrix: ${{ fromJSON(needs.matrix_prep.outputs.images-chunk-json-16) }} # <-- HERE: Chunk number.
name: ${{ matrix.desc || 'Empty I16' }} # <-- HERE: Chunk number.
runs-on: ${{ matrix.runs_on }}
steps:
- name: Install dependencies
run: |
if [ ! -e /usr/bin/mktorrent ]; then
sudo apt-get update
sudo apt-get install -y mktorrent
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
uses: armbian/actions/runner-clean@main
# cleanup the place where we will clone the userpatches repo, to avoid git going insane and cleaning everything later
- name: Cleanup userpatches repo
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: rm -rf userpatches.repo
- name: Checkout build repo
uses: actions/checkout@v6
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default. it *will* delete the hosts /dev if mounted inside.
# clone the userpatches repo (`armbian/os`)
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: ${{ matrix.fdepth }}
clean: false # true is default.
path: userpatches.repo
- name: Checkout JSON artifacts repository
uses: actions/checkout@v6
with:
repository: armbian/armbian.github.io
ref: data
clean: false
path: armbian.github.io
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
mkdir -pv userpatches
rsync -av userpatches.repo/${{env.USERPATCHES_DIR}}/. userpatches/
rm -rf userpatches.repo
# Use new release YAML targets
rsync -av armbian.github.io/data/release-targets/. userpatches/
- name: "Cleanup leftover output"
run: |
rm -f userpatches/VERSION
- name: ${{matrix.desc}}
id: build-one-image
timeout-minutes: 90
run: |
# calculate loop from runner name
if [ -z "${ImageOS}" ]; then
USE_FIXED_LOOP_DEVICE=$(echo ${RUNNER_NAME} | rev | cut -d"-" -f1 | rev | sed 's/^0*//' | sed -e 's/^/\/dev\/loop/')
fi
bash ./compile.sh ${{ matrix.invocation }} REVISION="${{ needs.matrix_prep.outputs.version }}" USE_FIXED_LOOP_DEVICE="$USE_FIXED_LOOP_DEVICE" SHARE_LOG=yes MAKE_FOLDERS="archive" IMAGE_VERSION=${{ needs.matrix_prep.outputs.version }} ${{env.EXTRA_PARAMS_IMAGE}} ${{env.EXTRA_PARAMS_ALL_BUILDS}}
- name: "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
if: always()
run: |
echo "Logs: ${{ steps.build-one-image.outputs.logs_url }}"
- name: Install SSH key
uses: shimataro/ssh-key-action@v2
with:
key: ${{ secrets.KEY_UPLOAD }}
known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
if_key_exists: replace
- name: Check API rate limits
run: |
# install dependencies
if ! command -v "gh" > /dev/null 2>&1; then
sudo apt-get -y -qq install gh
fi
while true
do
API_CALLS_TOTAL=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.limit')
API_CALLS_LEFT=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /rate_limit | jq -r '.rate.remaining')
PERCENT=$(( API_CALLS_LEFT * 100 / API_CALLS_TOTAL ))
if (( $PERCENT > 20 )); then
echo "API rate in good shape $PERCENT % free"
exit 0
fi
echo "API rate lower then 20%, sleping 10m"
sleep 10m
done
# show current api rate
curl -s -H "Accept: application/vnd.github.v3+json" -H "Authorization: token ${{ secrets.ACCESS_TOKEN }}" https://api.github.com/rate_limit
- name: Import GPG key
env:
GPG_KEY1: ${{ secrets.GPG_KEY1 }}
if: env.GPG_KEY1 != null
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_KEY1 }}
passphrase: ${{ secrets.GPG_PASSPHRASE1 }}
- name: Sign
env:
GPG_PASSPHRASE1: ${{ secrets.GPG_PASSPHRASE1 }}
if: env.GPG_PASSPHRASE1 != null
run: |
for extension in zip xz qcow2; do
if ls -l output/images/*/archive/*.$extension &>/dev/null; then
echo ${{ secrets.GPG_PASSPHRASE1 }} | gpg --passphrase-fd 0 --armor --detach-sign --pinentry-mode loopback --batch --yes output/images/*/archive/*.$extension
fi
done
# Download the artifacts (output/info) produced by the prepare-matrix job.
- name: Download artifacts
uses: actions/download-artifact@v7
with:
name: build-info-json
path: output/info
- name: Generate torrent
timeout-minutes: 3
run: |
set -euo pipefail
# Build tracker list (ignore empty/whitespace-only lines)
TRACKERS=$(
grep -v '^[ ]*$' output/info/best-torrent-servers.txt \
| sort -R \
| sed 's/^/ --announce=/'
)
# Find BOARD and FILE (first zip/xz/qcow2 in output/images/*/archive/)
BOARD=""
FILE=""
first_match=""
for ext in zip xz qcow2; do
if ls output/images/*/archive/*."$ext" >/dev/null 2>&1; then
first_match=$(ls -1 output/images/*/archive/*."$ext" | head -n1)
# first_match = output/images/BOARD/archive/file.ext
BOARD=$(basename "$(dirname "$(dirname "$first_match")")") # -> BOARD
FILE=$(basename "$first_match")
break
fi
done
# Safety check
if [ -z "$BOARD" ] || [ -z "$FILE" ]; then
echo "No torrent source file found (zip/xz/qcow2) in output/images/*/archive" >&2
exit 1
fi
# Nightly / stable logic (templated)
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
WEBSEEDS=""
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" != "distribution" ]; then
##################################################################
# STABLE RELEASES
# Use download mirrors (servers-download.jq)
# URL: https://<host><download_path_images>/<BOARD>/archive/<FILE>
# download_path_images is already normalized & defaults to /dl in JSON.
##################################################################
WEBSEEDS=$(
jq -r --arg board "$BOARD" --arg file "$FILE" '
.[]
| "https://\(.host)\(.download_path_images)/\($board)/archive/\($file)"
' output/info/servers-download.jq | paste -sd, -
)
else
##################################################################
# NIGHTLY BUILDS
# Use cache mirrors (servers-cache.jq) + GitHub as extra webseed
# Path is FIXED: https://SERVER/cache/os/<version>/<FILE>
##################################################################
WEBSEEDS=$(
jq -r \
--arg repo "os" \
--arg ver "${{ needs.matrix_prep.outputs.version }}" \
--arg file "$FILE" '
.[]
| "https://\(.host)/cache/\($repo)/\($ver)/\($file)"
' output/info/servers-cache.jq | paste -sd, -
)
# Append GitHub webseed
if [ -n "$WEBSEEDS" ]; then
WEBSEEDS+=","
fi
WEBSEEDS+="https://github.com/armbian/os/releases/download/${{ needs.matrix_prep.outputs.version }}/${FILE}"
fi
echo "WEBSEEDS: $WEBSEEDS"
# Go to the archive directory that contains FILE
cd "$(dirname "$first_match")" || exit 1
mktorrent \
--comment="Armbian torrent for ${FILE}" \
--verbose \
${TRACKERS} \
--web-seed="${WEBSEEDS}" \
"${FILE}"
# drop .txt helper files
rm -f *.txt
- name: "Prepare release artifacts (exclude .asc, .sha, .torrent)"
run: |
# Start from a clean directory
rm -rf output/release
mkdir -p output/release
# Copy wanted artifacts from output/images, preserving folder structure
# e.g. output/images/BOARDNAME/archive/Armbian_*.img -> output/release/output/images/BOARDNAME/archive/...
find output/images -type f -name 'Armbian_*.*' \
! -name '*.asc' \
! -name '*.sha' \
! -name '*.torrent' \
-exec cp --parents {} output/release/ \;
# debug
tree output/images
echo "# debug"
tree output/release
- name: "Upload artefacts except .asc, .sha and .torrent"
timeout-minutes: 60
if: ${{ ( github.event.inputs.nightlybuild || 'yes' ) == 'yes' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
#if: ${{ env.RELEASE_REPOSITORY == 'os' || env.RELEASE_REPOSITORY == 'community' || env.RELEASE_REPOSITORY == 'distribution' }}
uses: ncipollo/release-action@v1
with:
repo: "${{ env.RELEASE_REPOSITORY }}"
tag: "${{ needs.matrix_prep.outputs.version }}"
artifacts: "output/release/output/images/*/*/Armbian_*.*"
omitBody: true
replacesArtifacts: true
omitName: true
makeLatest: false
omitPrereleaseDuringUpdate: true
allowUpdates: true
artifactErrorsFailBuild: true
token: "${{ env.GH_TOKEN }}"
- name: "Upload to servers"
run: |
# temp workspace for downloaded jsons (auto-cleanup)
tmpdir="$(mktemp -d)"
trap 'rm -rf "$tmpdir"' EXIT
curl -fL --retry 10 --retry-delay 5 --retry-all-errors \
-o "$tmpdir/cache.jq" https://github.armbian.com/servers/cache.jq \
-o "$tmpdir/upload.jq" https://github.armbian.com/servers/upload.jq
# debug
echo "=== servers-cache.jq ==="
jq . output/info/servers-cache.jq || cat output/info/servers-cache.jq
echo "=== servers-upload.jq ==="
jq . output/info/servers-upload.jq || cat output/info/servers-upload.jq
max_retries=3
sync_from_json() {
local json_file=$1
local mode=$2 # "cache" or "upload"
echo "== Processing ${json_file} (mode: ${mode}) =="
# Iterate over JSON array elements
while IFS= read -r server; do
# JSON structure (same for all files):
# {
# "host": "...",
# "upload_path": "...",
# "download_path_archive": "...",
# "download_path_images": "...",
# "download_path_debs": "...",
# "port": 22,
# "username": "mirror"
# }
SERVER_URL=$(jq -r '.host // empty' <<<"$server")
SERVER_PATH=$(jq -r '.upload_path // ""' <<<"$server")
SERVER_PORT=$(jq -r '.port // 22' <<<"$server")
SERVER_USERNAME=$(jq -r '.username // "mirror"' <<<"$server")
# skip empty host
[ -z "$SERVER_URL" ] && continue
echo "Processing: $SERVER_URL:$SERVER_PORT (upload_path: $SERVER_PATH)"
# Clean known_hosts entry (host:port form)
ssh-keygen -f "${HOME}/.ssh/known_hosts" -R "[${SERVER_URL}]:${SERVER_PORT}" 2>/dev/null || true
# Select rsync filters + remote subdir
if [ "$mode" = "cache" ]; then
# only.sha, .torrent .asc
RSYNC_FILTER=(
--include='*/'
--include='*.sha'
--include='*.asc'
--include='*.torrent'
--exclude='*'
)
REMOTE_SUBDIR="cache/artifacts/"
else
# everything
RSYNC_FILTER=(
--include='*/'
--include='*'
)
REMOTE_SUBDIR="incoming/${GITHUB_ACTOR}/"
fi
# Retry loop
for attempt in $(seq 1 "$max_retries"); do
echo "[$SERVER_URL] rsync attempt ${attempt}/${max_retries}..."
if rsync --progress \
-e "ssh -p ${SERVER_PORT} -o StrictHostKeyChecking=accept-new" \
-rvP \
"${RSYNC_FILTER[@]}" \
output/images/ \
"${SERVER_USERNAME}@${SERVER_URL}:${SERVER_PATH}/${REMOTE_SUBDIR}"
then
echo "[$SERVER_URL] rsync successful."
break
fi
if [ "$attempt" -eq "$max_retries" ]; then
echo "[$SERVER_URL] rsync FAILED after ${max_retries} attempts."
exit 1
fi
echo "[$SERVER_URL] rsync failed. Retrying in 10 seconds..."
sleep 10
done
done < <(jq -c '.[]' "$json_file")
}
nightlybuild="${{ github.event.inputs.nightlybuild }}"
nightlybuild_default="'yes'"
RELEASE_REPOSITORY="${{ env.RELEASE_REPOSITORY }}"
effective_nightlybuild="${nightlybuild:-$nightlybuild_default}"
# Upload to cache servers: only .sha/.asc/.torrent
sync_from_json "$tmpdir/cache.jq" cache
if [ "$effective_nightlybuild" = "no" ] && [ "$RELEASE_REPOSITORY" = "os" ]; then
# Upload to servers: everything
sync_from_json "$tmpdir/upload.jq" upload
fi
# cleaning self hosted runners
- name: "Runner clean ${{ needs.matrix_prep.outputs.version }}"
if: always()
uses: armbian/actions/runner-clean@main
# template file: 750.single_repo.yaml
# ------ publish packages to repository -------
publish-debs-to-repo:
name: "Download artifacts from ORAS cache"
#runs-on: ubuntu-latest
runs-on: repository
if: ${{ !failure() && !cancelled() && github.event.inputs.targetsFilterInclude == '' && inputs.ref == '' }} # eg: run if dependencies worked. See https://github.com/orgs/community/discussions/45058#discussioncomment-4817378
needs: [ "matrix_prep", "all-artifacts-ready" ]
steps:
# Login to ghcr.io, for later uploading rootfs to ghcr.io
- name: Docker Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: "${{ github.repository_owner }}" # GitHub username or org
password: "${{ secrets.GITHUB_TOKEN }}" # GitHub actions builtin token. repo has to have pkg access.
- name: Checkout build repo
uses: actions/checkout@v6
with:
repository: ${{ env.BUILD_REPOSITORY }}
ref: ${{ needs.matrix_prep.outputs.build-sha1 }}
fetch-depth: 0
path: build
clean: false
- name: "Checkout userpatches repo: ${{env.USERPATCHES_REPOSITORY}}#${{env.USERPATCHES_REF}}"
uses: actions/checkout@v6
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
with:
repository: ${{ env.USERPATCHES_REPOSITORY }}
ref: ${{ env.USERPATCHES_REF }}
fetch-depth: 0
clean: false
path: os
- name: "Put userpatches in place, and remove userpatches repo"
if: ${{ ( env.USERPATCHES_REPOSITORY != '' ) && ( env.USERPATCHES_REF != '' ) }}
run: |
rsync -av os/${{env.USERPATCHES_DIR}}/. build/userpatches/
# Download the artifacts (output/info) produced by the prepare-matrix job.
- name: Download artifacts
uses: actions/download-artifact@v7
with:
name: build-info-json
path: build/output/info
# List the artifacts we downloaded
- name: List artifacts
run: |
ls -laht build/output/info
- name: Run debs-to-repo download
env:
BETA: ${{ github.event.inputs.nightlybuild == 'true' && 'yes' || 'no' }}
run: |
set -euo pipefail
cd build
./compile.sh debs-to-repo-download \
REVISION="${{ needs.matrix_prep.outputs.version }}" \
BETA="$BETA" \
SHARE_LOG=yes \
${{ env.EXTRA_PARAMS_ALL_BUILDS }}
- name: Install SSH key
uses: shimataro/ssh-key-action@v2
with:
key: ${{ secrets.KEY_UPLOAD }}
known_hosts: ${{ secrets.KNOWN_HOSTS_ARMBIAN_UPLOAD }}
if_key_exists: replace
- name: "Upload artifacts"
run: |
set -euo pipefail
echo "Sync debs and debs-beta"
rsync -e "ssh -p ${{ secrets.HOST_UPLOAD_PORT }} -o StrictHostKeyChecking=accept-new" \
-rvc \
--delete \
--remove-source-files \
--prune-empty-dirs \
--include='debs/' \
--include='debs-beta/' \
--include='debs/***/' \
--include='debs-beta/***/' \
--include='debs/*.deb' \
--include='debs-beta/*.deb' \
--include='debs/**/*.deb' \
--include='debs-beta/**/*.deb' \
--exclude='*' \
--omit-dir-times \
--no-perms \
--no-owner \
--no-group \
build/output/ \
"${{ secrets.HOST_UPLOAD_USER }}@${{ secrets.HOST_UPLOAD }}:storage/incoming/${{ env.TARGET_PATH }}"
- name: "Run repository update action"
if: ${{ (github.event.inputs.skipImages || 'yes') == 'yes' }}
uses: peter-evans/repository-dispatch@v4
with:
token: ${{ secrets.DISPATCH }}
repository: armbian/armbian.github.io
event-type: "Repository update"
client-payload: >
{
"target": "${{ env.TARGET_PATH }}",
"download_external": true
}
- name: "Logs debs-to-repo-download: ${{ steps.download-debs.outputs.logs_url }}"
run: ':'
outputs:
# not related to matrix
version: ${{ needs.matrix_prep.outputs.version }}
# template file: 950.single_footer.yaml
# ------ aggregate all artifact chunks into a single dependency -------
closing:
name: "Footer"
runs-on: ubuntu-latest
if: ${{ !failure() && !cancelled() && inputs.ref == '' && (github.event.inputs.nightlybuild || 'yes') == 'yes' }}
needs: [ "matrix_prep", "all-artifacts-ready", "all-images-ready"]
steps:
# Download workflow artifacts
- name: "Download all workflow run artifacts"
if: ${{ (github.event.inputs.skipImages || 'yes') != 'yes' }}
uses: actions/download-artifact@v7
with:
name: assets-for-download-all
path: downloads
# Read version
- name: "Read version"
run: |
echo "version=$(cat downloads/version 2>/dev/null || true)" >> $GITHUB_ENV
# Delete artifacts
- uses: geekyeggo/delete-artifact@v5
with:
name: assets-for-download-all
failOnError: false
# Cleaning logs
- name: "Keep only 30 days of workflow logs"
uses: igorjs/gh-actions-clean-workflow@v7
with:
token: "${{ env.GH_TOKEN }}"
runs_older_than: 30 # optional
runs_to_keep: 0 # optional
# Switch pre-release to release
- uses: ncipollo/release-action@v1
if: ${{ (github.event.inputs.skipImages || 'yes') != 'yes' && (github.event.inputs.nightlybuild || 'yes') == 'yes' }}
with:
repo: "${{ env.RELEASE_REPOSITORY }}"
tag: "${{ env.version }}"
omitBody: true
omitName: true
allowUpdates: true
makeLatest: true
token: "${{ env.GH_TOKEN }}"
# Run repository mirroring to CDN
- name: "Run repository mirroring to CDN"
if: ${{ (github.event.inputs.skipImages || 'yes') == 'no' }}
uses: peter-evans/repository-dispatch@v4
with:
token: ${{ secrets.DISPATCH }}
repository: armbian/armbian.github.io
event-type: "Infrastructure: Mirror artifacts"
client-payload: '{"pull_repository": "armbian/${{ env.RELEASE_REPOSITORY }}", "cdn_tag": "${{ env.RELEASE_REPOSITORY }}"}'
# Run webindex update action
- name: "Run webindex update action"
if: ${{ (github.event.inputs.skipImages || 'yes') == 'no' }}
uses: peter-evans/repository-dispatch@v4
with:
token: ${{ secrets.DISPATCH }}
repository: armbian/armbian.github.io
event-type: "Data: Update download index"