mirror of
https://github.com/termux-pacman/termux-packages.git
synced 2025-12-26 05:30:20 +00:00
344 lines
15 KiB
YAML
344 lines
15 KiB
YAML
name: Packages
|
|
|
|
on:
|
|
push:
|
|
branches:
|
|
- master
|
|
paths:
|
|
- 'packages/**'
|
|
- 'root-packages/**'
|
|
- 'x11-packages/**'
|
|
pull_request:
|
|
paths:
|
|
- 'packages/**'
|
|
- 'root-packages/**'
|
|
- 'x11-packages/**'
|
|
workflow_dispatch:
|
|
inputs:
|
|
packages:
|
|
description: "A space-separated names of packages selected for rebuilding"
|
|
required: true
|
|
|
|
permissions: {} # none
|
|
|
|
jobs:
|
|
build:
|
|
permissions:
|
|
contents: read # actions/upload-artifact doesn't need contents: write
|
|
runs-on: ubuntu-24.04
|
|
strategy:
|
|
matrix:
|
|
target_arch: [aarch64, arm, i686, x86_64]
|
|
fail-fast: false
|
|
steps:
|
|
- name: Clone repository
|
|
uses: actions/checkout@v4
|
|
with:
|
|
fetch-depth: 1000
|
|
- name: Set process id limit for 32-bit builds depending on aosp-libs
|
|
run: echo 65535 | sudo tee /proc/sys/kernel/pid_max
|
|
- name: Gather build summary
|
|
id: build-info
|
|
env:
|
|
MANUAL_INPUT_PACKAGES: ${{ github.event.inputs.packages }}
|
|
run: |
|
|
if [ "${{ github.event_name }}" != "workflow_dispatch" ]; then
|
|
BASE_COMMIT=$(jq --raw-output .pull_request.base.sha "$GITHUB_EVENT_PATH")
|
|
# We are intentionally not using .commits[0].id and .commits[-1].id as github seems to
|
|
# only send 20 commits in the payload for github action runs instead of the 2048 documented
|
|
# limit. Perhaps 2048 is the limit just for webhooks, where they haven't documented
|
|
# properly that the limit for github actions is only 20:
|
|
#
|
|
# https://docs.github.com/en/webhooks/webhook-events-and-payloads#push
|
|
OLD_COMMIT=$(jq --raw-output .before "$GITHUB_EVENT_PATH")
|
|
HEAD_COMMIT=$(jq --raw-output .after "$GITHUB_EVENT_PATH")
|
|
if [ "$BASE_COMMIT" = "null" ]; then
|
|
if ! git log "$OLD_COMMIT" 2> /dev/null; then
|
|
if [ "$(git branch --show-current)" = "master" ]; then
|
|
echo "Force push detected on master branch. Unable to proceed."
|
|
exit 1
|
|
else
|
|
echo "::warning:: Force push detected on non-master branch. Trying to proceed at any cost but may not work as expected."
|
|
OLD_COMMIT=$(jq --raw-output .commits[0].id "$GITHUB_EVENT_PATH")
|
|
echo "::warning:: OLD_COMMIT is set to $OLD_COMMIT which may not be correct if you have pushed more than 20 commits."
|
|
fi
|
|
fi
|
|
echo "Processing commit range: ${OLD_COMMIT}..${HEAD_COMMIT}"
|
|
CHANGED_FILES=$(git diff-tree --no-commit-id --name-only -r "${OLD_COMMIT}" "${HEAD_COMMIT}")
|
|
else
|
|
# Pull requests.
|
|
echo "Processing pull request #$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH"): ${BASE_COMMIT}..HEAD"
|
|
CHANGED_FILES=$(git diff-tree --no-commit-id --name-only -r "${BASE_COMMIT}" "HEAD")
|
|
fi
|
|
fi
|
|
mkdir -p ./artifacts ./pkgs
|
|
touch ./pkgs/.placeholder
|
|
|
|
if [ "${{ github.event_name }}" != "workflow_dispatch" ]; then
|
|
# GitHub sometimes add merge commits at the end
|
|
# To prevent user confusion, filter them with --no-merges
|
|
# Process tag '%ci:no-build' that may be added as line to commit message.
|
|
# Forces CI to cancel current build with status 'passed'
|
|
if grep -qiP '^\s*%ci:no-build\s*$' <(git log --format="%B" -n 1 --no-merges "HEAD"); then
|
|
tar cf artifacts/pkgs-${{ matrix.target_arch }}.tar pkgs
|
|
echo "docker-build=true" >> $GITHUB_OUTPUT
|
|
echo "[!] Force exiting as tag '%ci:no-build' was applied to HEAD commit message."
|
|
exit 0
|
|
fi
|
|
|
|
for repo_path in $(jq --raw-output 'del(.pkg_format) | keys | .[]' repo.json); do
|
|
repo=$(jq --raw-output '.["'${repo_path}'"].name' repo.json)
|
|
# Parse changed files and identify new packages and deleted packages.
|
|
# Create lists of those packages that will be passed to upload job for
|
|
# further processing.
|
|
while read -r file; do
|
|
if ! [[ $file == ${repo_path}/* ]]; then
|
|
# This file does not belong to a package, so ignore it
|
|
continue
|
|
fi
|
|
if [[ $file =~ ^${repo_path}/([.a-z0-9+-]*)/([.a-z0-9+-]*).subpackage.sh$ ]]; then
|
|
# A subpackage was modified, check if it was deleted or just updated
|
|
pkg=${BASH_REMATCH[1]}
|
|
subpkg=${BASH_REMATCH[2]}
|
|
if [ ! -f "${repo_path}/${pkg}/${subpkg}.subpackage.sh" ]; then
|
|
echo "$subpkg" >> ./deleted_${repo}_packages.txt
|
|
fi
|
|
elif [[ $file =~ ^${repo_path}/([.a-z0-9+-]*)/.*$ ]]; then
|
|
# package, check if it was deleted or updated
|
|
pkg=${BASH_REMATCH[1]}
|
|
if [ -d "${repo_path}/${pkg}" ]; then
|
|
echo "$pkg" >> ./built_${repo}_packages.txt
|
|
# If there are subpackages we want to create a list of those
|
|
# as well
|
|
for file in $(find "${repo_path}/${pkg}/" -maxdepth 1 -type f -name \*.subpackage.sh | sort); do
|
|
echo "$(basename "${file%%.subpackage.sh}")" >> ./built_${repo}_subpackages.txt
|
|
done
|
|
else
|
|
echo "$pkg" >> ./deleted_${repo}_packages.txt
|
|
fi
|
|
fi
|
|
done<<<${CHANGED_FILES}
|
|
done
|
|
else
|
|
# Ensure MANUAL_INPUT_PACKAGES is newline free, and put it
|
|
# into an array
|
|
read -a PACKAGES <<< "${MANUAL_INPUT_PACKAGES//$'\n'/ }"
|
|
for pkg in "${PACKAGES[@]}"; do
|
|
repo_paths=$(jq --raw-output 'del(.pkg_format) | keys | .[]' repo.json)
|
|
found=false
|
|
for repo_path in $repo_paths; do
|
|
repo=$(jq --raw-output '.["'${repo_path}'"].name' repo.json)
|
|
if [ -d "${repo_path}/${pkg}" ]; then
|
|
found=true
|
|
echo "$pkg" >> ./built_${repo}_packages.txt
|
|
for subpkg in $(find "${repo_path}/${pkg}/" -maxdepth 1 -type f -name \*.subpackage.sh | sort); do
|
|
echo "$(basename "${subpkg%%.subpackage.sh}")" >> ./built_${repo}_subpackages.txt
|
|
done
|
|
fi
|
|
done
|
|
if [ "$found" != true ]; then
|
|
echo "Package '${pkg}' not found in any of the repo"
|
|
exit 1
|
|
fi
|
|
done
|
|
fi
|
|
|
|
for repo in $(jq --raw-output 'del(.pkg_format) | .[].name' repo.json); do
|
|
# Fix so that lists do not contain duplicates
|
|
if [ -f ./built_${repo}_packages.txt ]; then
|
|
sort ./built_${repo}_packages.txt | uniq > ./built_${repo}_packages.txt.tmp
|
|
mv ./built_${repo}_packages.txt.tmp ./built_${repo}_packages.txt
|
|
fi
|
|
if [ -f ./built_${repo}_subpackages.txt ]; then
|
|
sort ./built_${repo}_subpackages.txt | uniq > ./built_${repo}_subpackages.txt.tmp
|
|
mv ./built_${repo}_subpackages.txt.tmp ./built_${repo}_subpackages.txt
|
|
fi
|
|
if [ -f ./deleted_${repo}_packages.txt ]; then
|
|
sort ./deleted_${repo}_packages.txt | uniq > ./deleted_${repo}_packages.txt.tmp
|
|
mv ./deleted_${repo}_packages.txt.tmp ./deleted_${repo}_packages.txt
|
|
fi
|
|
done
|
|
|
|
declare -a packages=()
|
|
for repo_path in $(jq --raw-output 'del(.pkg_format) | keys | .[]' repo.json); do
|
|
repo=$(jq --raw-output '.["'${repo_path}'"].name' repo.json)
|
|
if [ -f "./built_${repo}_packages.txt" ]; then
|
|
packages+=($(cat "./built_${repo}_packages.txt"))
|
|
fi
|
|
done
|
|
|
|
echo "packages: ${packages[*]}"
|
|
|
|
docker='true'
|
|
if [[ "${#packages[@]}" -gt 0 ]]; then
|
|
for pkg in "${packages[@]}"; do
|
|
if grep -qFx "$pkg" ./scripts/big-pkgs.list; then
|
|
docker='false'
|
|
break
|
|
fi
|
|
done
|
|
fi
|
|
|
|
echo "docker-build=$docker" >> $GITHUB_OUTPUT
|
|
if [ "${{ github.event_name }}" != "workflow_dispatch" ]; then
|
|
# Build local Docker image if setup scripts were changed.
|
|
# Useful for pull requests submitting changes for both build environment and packages.
|
|
if grep -qP '^scripts/(Dockerfile|properties\.sh|setup-android-sdk\.sh|setup-ubuntu\.sh)$' <<< "$CHANGED_FILES"; then
|
|
echo "Detected changes for environment setup scripts. Building custom Docker image now."
|
|
if [ $docker == 'false' ]; then
|
|
echo "Skipping due to building large packages."
|
|
exit 0
|
|
fi
|
|
cd ./scripts
|
|
docker build -t ghcr.io/termux/package-builder:latest .
|
|
cd ..
|
|
fi
|
|
fi
|
|
|
|
- name: Lint packages
|
|
run: |
|
|
declare -a package_recipes=()
|
|
for repo_path in $(jq --raw-output 'del(.pkg_format) | keys | .[]' repo.json); do
|
|
repo=$(jq --raw-output '.["'${repo_path}'"].name' repo.json)
|
|
if [ -f "./built_${repo}_packages.txt" ]; then
|
|
package_recipes+=($(cat "./built_${repo}_packages.txt" | repo_path="${repo_path}" awk '{print ENVIRON["repo_path"]"/"$1"/build.sh"}'))
|
|
fi
|
|
done
|
|
|
|
if [[ "${#package_recipes[@]}" -gt 0 ]]; then
|
|
./scripts/lint-packages.sh "${package_recipes[@]}"
|
|
fi
|
|
|
|
- name: Free additional disk space (if needed)
|
|
env:
|
|
DOCKER_BUILD: ${{ steps.build-info.outputs.docker-build }}
|
|
run: |
|
|
if [ "$DOCKER_BUILD" == 'false' ]; then
|
|
./scripts/setup-ubuntu.sh
|
|
sudo apt install ninja-build
|
|
./scripts/free-space.sh
|
|
fi
|
|
|
|
- name: Build packages
|
|
env:
|
|
DOCKER_BUILD: ${{ steps.build-info.outputs.docker-build }}
|
|
run: |
|
|
declare -a packages=()
|
|
for repo_path in $(jq --raw-output 'del(.pkg_format) | keys | .[]' repo.json); do
|
|
repo=$(jq --raw-output '.["'${repo_path}'"].name' repo.json)
|
|
if [ -f "./built_${repo}_packages.txt" ]; then
|
|
packages+=($(cat "./built_${repo}_packages.txt"))
|
|
fi
|
|
done
|
|
|
|
echo "packages: ${packages[*]}"
|
|
|
|
if [[ "${#packages[@]}" -gt 0 ]]; then
|
|
if [ "$DOCKER_BUILD" == 'false' ]; then
|
|
NDK="$ANDROID_NDK" ANDROID_HOME="$ANDROID_SDK_ROOT" ./build-package.sh --format pacman -I -C -a "${{ matrix.target_arch }}" "${packages[@]}"
|
|
else
|
|
./scripts/run-docker.sh ./build-package.sh --format pacman -I -C -a "${{ matrix.target_arch }}" "${packages[@]}"
|
|
fi
|
|
fi
|
|
|
|
- name: Generate build artifacts
|
|
if: always()
|
|
run: |
|
|
test -d termux-packages/output && mv termux-packages/output/* ./output/
|
|
|
|
for repo in $(jq --raw-output 'del(.pkg_format) | .[].name' repo.json); do
|
|
# Put package lists into directory with *.pkg.* files so they will be transferred to
|
|
# upload job.
|
|
test -f ./built_${repo}_packages.txt && mv ./built_${repo}_packages.txt ./pkgs/
|
|
test -f ./built_${repo}_subpackages.txt && cat ./built_${repo}_subpackages.txt >> ./pkgs/built_${repo}_packages.txt \
|
|
&& rm ./built_${repo}_subpackages.txt
|
|
test -f ./deleted_${repo}_packages.txt && mv ./deleted_${repo}_packages.txt ./pkgs/
|
|
|
|
# Move only pkgs from built_packages into pkgs/ folder before
|
|
# creating an archive.
|
|
if [ -f "./pkgs/built_${repo}_packages.txt" ] && [ -d "output" ]; then
|
|
while read -r pkg; do
|
|
# Match both $pkg*.pkg.* and $pkg-static*.pkg.*
|
|
find output \( -name "$pkg_*.pkg.*" -o -name "$pkg-static_*.pkg.*" \) -type f -print0 | xargs -0r mv -t pkgs/
|
|
done < <(cat "./pkgs/built_${repo}_packages.txt")
|
|
fi
|
|
done
|
|
|
|
# Files containing certain symbols (e.g. ":") will cause failure in actions/upload-artifact.
|
|
# Archiving *.pkg.* files in a tarball to avoid issues with uploading.
|
|
tar cf artifacts/pkgs-${{ matrix.target_arch }}-${{ github.sha }}.tar pkgs
|
|
- name: Checksums for built *.pkg.* files
|
|
if: always()
|
|
run: |
|
|
find pkgs -type f -name "*.pkg.*" -exec sha256sum "{}" \; | sort -k2
|
|
- name: Store *.pkg.* files
|
|
if: always()
|
|
uses: actions/upload-artifact@v4
|
|
with:
|
|
name: pkgs-${{ matrix.target_arch }}-${{ github.sha }}
|
|
path: ./artifacts
|
|
|
|
upload:
|
|
permissions:
|
|
contents: read
|
|
if: github.event_name != 'pull_request'
|
|
needs: build
|
|
runs-on: ubuntu-latest
|
|
strategy:
|
|
matrix:
|
|
arch: [aarch64, arm, i686, x86_64]
|
|
fail-fast: false
|
|
steps:
|
|
- name: Clone repository
|
|
uses: actions/checkout@v4
|
|
- name: Get *.pkg.* files
|
|
uses: actions/download-artifact@v4.1.7
|
|
with:
|
|
path: ./
|
|
- name: Install aws-cli
|
|
run: |
|
|
git clone https://github.com/termux-pacman/aws-cli-action.git
|
|
./aws-cli-action/setup.sh '${{ secrets.AWS_ACCESS_KEY_ID }}' '${{ secrets.AWS_ACCESS_KEY }}' '${{ secrets.AWS_REGION }}'
|
|
- name: Import GPG key
|
|
run: |
|
|
echo '${{ secrets.SF_GPG_BOT }}' > key.gpg
|
|
gpg --pinentry-mode=loopback --passphrase '${{ secrets.PW_GPG_BOT }}' --import key.gpg > /dev/null
|
|
rm key.gpg
|
|
- name: Uploading packages to aws
|
|
run: |
|
|
source ./aws-cli-action/func.sh
|
|
sfuf() {
|
|
gpg --batch --pinentry-mode=loopback --passphrase '${{ secrets.PW_GPG_BOT }}' --detach-sign --use-agent -u '${{ secrets.KEY_GPG_BOT }}' --no-armor "$1"
|
|
for format_file in "" ".sig"; do
|
|
aws s3 cp "${1}${format_file}" s3://'${{ secrets.SFPU }}'/${repo#*-}/${{ matrix.arch }}/"${1##*/}${format_file}"
|
|
done
|
|
rm "$1.sig"
|
|
}
|
|
for archive in pkgs-*-${{ github.sha }}/pkgs-*-${{ github.sha }}.tar; do
|
|
archive_sp=(${archive//-/ })
|
|
tar xf "$archive"
|
|
mv pkgs pkgs-${archive_sp[1]}
|
|
done
|
|
pkgsarch='pkgs-${{ matrix.arch }}'
|
|
for repo in $(jq --raw-output 'del(.pkg_format) | .[].name' repo.json); do
|
|
dp_file="deleted_${repo}_packages.txt"
|
|
if [[ -f ${pkgsarch}/${dp_file} ]]; then
|
|
path_dp_file=${pkgsarch}/${{ github.sha }}_${dp_file}
|
|
mv ${pkgsarch}/${dp_file} ${path_dp_file}
|
|
sfuf "${path_dp_file}"
|
|
fi
|
|
if [ ! -f ${pkgsarch}/built_${repo}_packages.txt ]; then
|
|
continue
|
|
fi
|
|
pkgslist=$(cat ${pkgsarch}/built_${repo}_packages.txt | sed 's/+/0/g' | awk '{printf $1 "\n" $1 "-static\n"}')
|
|
for pkgfile in ${pkgsarch}/*.pkg.* $(ls $(echo -e "aarch64\narm\nx86_64\ni686" | sed '/${{ matrix.arch }}/d' | awk '{printf "pkgs-" $1 "/*-any.pkg.* "}') 2> /dev/null); do
|
|
dir_pkg_sp=(${pkgfile//// })
|
|
if [ "${dir_pkg_sp[0]}" != "${pkgsarch}" ] && [ -f "${pkgsarch}/${dir_pkg_sp[-1]}" ]; then
|
|
continue
|
|
fi
|
|
if grep -q "^$(get_name ${dir_pkg_sp[-1]})$" <<< "${pkgslist}"; then
|
|
sfuf "$pkgfile"
|
|
fi
|
|
done
|
|
done
|