updating and improving packages (#296) #1170
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Build gpkg | |
on: | |
push: | |
branches: | |
- main | |
paths: | |
- 'gpkg/**' | |
pull_request: | |
paths: | |
- 'gpkg/**' | |
workflow_dispatch: | |
inputs: | |
packages: | |
description: "A space-separated names of packages selected for rebuilding" | |
required: true | |
permissions: {} # none | |
jobs: | |
build: | |
permissions: | |
contents: read # actions/upload-artifact doesn't need contents: write | |
runs-on: ubuntu-24.04 | |
strategy: | |
matrix: | |
target_arch: [aarch64, arm, i686, x86_64] | |
fail-fast: false | |
steps: | |
- name: Clone repository | |
uses: actions/checkout@v4 | |
with: | |
fetch-depth: 1000 | |
- name: Set up QEMU | |
run: | | |
sudo apt update | |
yes | sudo apt install qemu-user qemu-user-static binfmt-support | |
sudo update-binfmts --enable qemu-aarch64 | |
sudo update-binfmts --enable qemu-arm | |
docker run --rm --privileged tonistiigi/binfmt --install all | |
- name: Get files from termux-packages repository | |
run: ./get-build-package.sh | |
- name: Gather build summary | |
run: | | |
if [ "${{ github.event_name }}" != "workflow_dispatch" ]; then | |
BASE_COMMIT=$(jq --raw-output .pull_request.base.sha "$GITHUB_EVENT_PATH") | |
OLD_COMMIT=$(jq --raw-output .commits[0].id "$GITHUB_EVENT_PATH") | |
HEAD_COMMIT=$(jq --raw-output .commits[-1].id "$GITHUB_EVENT_PATH") | |
if [ "$BASE_COMMIT" = "null" ]; then | |
if [ "$OLD_COMMIT" = "$HEAD_COMMIT" ]; then | |
# Single-commit push. | |
echo "Processing commit: ${HEAD_COMMIT}" | |
CHANGED_FILES=$(git diff-tree --no-commit-id --name-only -r "${HEAD_COMMIT}") | |
else | |
# Multi-commit push. | |
OLD_COMMIT="${OLD_COMMIT}~1" | |
echo "Processing commit range: ${OLD_COMMIT}..${HEAD_COMMIT}" | |
CHANGED_FILES=$(git diff-tree --no-commit-id --name-only -r "${OLD_COMMIT}" "${HEAD_COMMIT}") | |
fi | |
else | |
# Pull requests. | |
echo "Processing pull request #$(jq --raw-output .pull_request.number "$GITHUB_EVENT_PATH"): ${BASE_COMMIT}..HEAD" | |
CHANGED_FILES=$(git diff-tree --no-commit-id --name-only -r "${BASE_COMMIT}" "HEAD") | |
fi | |
fi | |
mkdir -p ./artifacts ./pkgs | |
touch ./pkgs/.placeholder | |
if [ "${{ github.event_name }}" != "workflow_dispatch" ]; then | |
# Process tag '%ci:no-build' that may be added as line to commit message. | |
# Forces CI to cancel current build with status 'passed' | |
if grep -qiP '^\s*%ci:no-build\s*$' <(git log --format="%B" -n 1 "HEAD"); then | |
tar cf artifacts/pkgs-${{ matrix.target_arch }}.tar pkgs | |
echo "[!] Force exiting as tag '%ci:no-build' was applied to HEAD commit message." | |
exit 0 | |
fi | |
# Build local Docker image if setup scripts were changed. | |
# Useful for pull requests submitting changes for both build environment and packages. | |
if grep -qP '^scripts/(Dockerfile|properties\.sh|setup-android-sdk\.sh|setup-cgct\.sh|setup-ubuntu\.sh)$' <<< "$CHANGED_FILES"; then | |
echo "Detected changes for environment setup scripts. Building custom Docker image now." | |
docker build -t ghcr.io/termux/package-builder-cgct:latest . | |
fi | |
for repo_path in $(jq --raw-output 'del(.pkg_format) | keys | .[]' repo.json); do | |
repo=$(jq --raw-output '.["'${repo_path}'"].name' repo.json) | |
# Parse changed files and identify new packages and deleted packages. | |
# Create lists of those packages that will be passed to upload job for | |
# further processing. | |
while read -r file; do | |
if ! [[ $file == ${repo_path}/* ]]; then | |
# This file does not belong to a package, so ignore it | |
continue | |
fi | |
if [[ $file =~ ^${repo_path}/([.a-z0-9+-]*)/([.a-z0-9+-]*).subpackage.sh$ ]]; then | |
# A subpackage was modified, check if it was deleted or just updated | |
pkg=${BASH_REMATCH[1]} | |
subpkg=${BASH_REMATCH[2]} | |
if [ ! -f "${repo_path}/${pkg}/${subpkg}.subpackage.sh" ]; then | |
echo "$subpkg" >> ./deleted_${repo}_packages.txt | |
fi | |
elif [[ $file =~ ^${repo_path}/([.a-z0-9+-]*)/.*$ ]]; then | |
# package, check if it was deleted or updated | |
pkg=${BASH_REMATCH[1]} | |
if [ -d "${repo_path}/${pkg}" ]; then | |
echo "$pkg" >> ./built_${repo}_packages.txt | |
# If there are subpackages we want to create a list of those | |
# as well | |
for file in $(find "${repo_path}/${pkg}/" -maxdepth 1 -type f -name \*.subpackage.sh | sort); do | |
echo "$(basename "${file%%.subpackage.sh}")" >> ./built_${repo}_subpackages.txt | |
done | |
else | |
echo "$pkg" >> ./deleted_${repo}_packages.txt | |
fi | |
fi | |
done<<<${CHANGED_FILES} | |
done | |
else | |
for pkg in ${{ github.event.inputs.packages }}; do | |
repo_paths=$(jq --raw-output 'del(.pkg_format) | keys | .[]' repo.json) | |
found=false | |
for repo_path in $repo_paths; do | |
repo=$(jq --raw-output '.["'${repo_path}'"].name' repo.json) | |
if [ -d "${repo_path}/${pkg}" ]; then | |
found=true | |
echo "$pkg" >> ./built_${repo}_packages.txt | |
for subpkg in $(find "${repo_path}/${pkg}/" -maxdepth 1 -type f -name \*.subpackage.sh | sort); do | |
echo "$(basename "${subpkg%%.subpackage.sh}")" >> ./built_${repo}_subpackages.txt | |
done | |
fi | |
done | |
if [ "$found" != true ]; then | |
echo "Package '${pkg}' not found in any of the repo" | |
exit 1 | |
fi | |
done | |
fi | |
for repo in $(jq --raw-output 'del(.pkg_format) | .[].name' repo.json); do | |
# Fix so that lists do not contain duplicates | |
if [ -f ./built_${repo}_packages.txt ]; then | |
sort ./built_${repo}_packages.txt | uniq > ./built_${repo}_packages.txt.tmp | |
mv ./built_${repo}_packages.txt.tmp ./built_${repo}_packages.txt | |
fi | |
if [ -f ./built_${repo}_subpackages.txt ]; then | |
sort ./built_${repo}_subpackages.txt | uniq > ./built_${repo}_subpackages.txt.tmp | |
mv ./built_${repo}_subpackages.txt.tmp ./built_${repo}_subpackages.txt | |
fi | |
if [ -f ./deleted_${repo}_packages.txt ]; then | |
sort ./deleted_${repo}_packages.txt | uniq > ./deleted_${repo}_packages.txt.tmp | |
mv ./deleted_${repo}_packages.txt.tmp ./deleted_${repo}_packages.txt | |
fi | |
done | |
- name: Lint packages | |
run: | | |
declare -a package_recipes | |
for repo_path in $(jq --raw-output 'del(.pkg_format) | keys | .[]' repo.json); do | |
repo=$(jq --raw-output '.["'${repo_path}'"].name' repo.json) | |
if [ -f ./built_${repo}_packages.txt ]; then | |
package_recipes="$package_recipes $(cat ./built_${repo}_packages.txt | repo_path=${repo_path} awk '{print ENVIRON["repo_path"]"/"$1"/build.sh"}')" | |
fi | |
done | |
if [ ! -z "$package_recipes" ]; then | |
./scripts/lint-packages.sh $package_recipes | |
fi | |
- name: Build packages | |
run: | | |
declare -a packages | |
for repo_path in $(jq --raw-output 'del(.pkg_format) | keys | .[]' repo.json); do | |
repo=$(jq --raw-output '.["'${repo_path}'"].name' repo.json) | |
if [ -f ./built_${repo}_packages.txt ]; then | |
packages="$packages $(cat ./built_${repo}_packages.txt)" | |
fi | |
done | |
if [ ! -z "$packages" ]; then | |
if grep -qP "(^|\s)${packages// /($|\s)|(^|\s)}($|\s)" ./big-pkgs.list; then | |
./scripts/setup-ubuntu.sh | |
./scripts/setup-cgct.sh | |
sudo apt install ninja-build | |
sudo apt purge -yq $(dpkg -l | grep '^ii' | awk '{ print $2 }' | grep -P '(aspnetcore|cabal-|dotnet-|ghc-|libmono|php)') \ | |
firefox google-chrome-stable microsoft-edge-stable mono-runtime-common monodoc-manual ruby | |
sudo apt autoremove -yq | |
sudo rm -fr /opt/hostedtoolcache /usr/share/dotnet /usr/share/swift | |
NDK=$ANDROID_NDK_LATEST_HOME ANDROID_HOME=$ANDROID_SDK_ROOT ./build-package.sh -I -a ${{ matrix.target_arch }} --format pacman --library glibc $packages | |
else | |
TERMUX_BUILDER_IMAGE_NAME="ghcr.io/termux/package-builder-cgct" ./scripts/run-docker.sh ./build-package.sh -I -a ${{ matrix.target_arch }} --format pacman --library glibc $packages | |
fi | |
fi | |
- name: Generate build artifacts | |
if: always() | |
run: | | |
for repo in $(jq --raw-output 'del(.pkg_format) | .[].name' repo.json); do | |
# Put package lists into directory with *.pkg.* files so they will be transferred to | |
# upload job. | |
test -f ./built_${repo}_packages.txt && mv ./built_${repo}_packages.txt ./pkgs/ | |
test -f ./built_${repo}_subpackages.txt && cat ./built_${repo}_subpackages.txt >> ./pkgs/built_${repo}_packages.txt \ | |
&& rm ./built_${repo}_subpackages.txt | |
test -f ./deleted_${repo}_packages.txt && mv ./deleted_${repo}_packages.txt ./pkgs/ | |
# Move only pkgs from built_packages into pkgs/ folder before | |
# creating an archive. | |
while read -r pkg; do | |
# Match both $pkg.pkg.* and $pkg-static.pkg.*. | |
find output \( -name "$pkg_*.pkg.*" -o -name "$pkg-static_*.pkg.*" \) -type f -print0 | xargs -0r mv -t pkgs/ | |
done < <(cat ./pkgs/built_${repo}_packages.txt) | |
done | |
# Files containing certain symbols (e.g. ":") will cause failure in actions/upload-artifact. | |
# Archiving *.pkg.* files in a tarball to avoid issues with uploading. | |
tar cf artifacts/pkgs-${{ matrix.target_arch }}-${{ github.sha }}.tar pkgs | |
- name: Checksums for built *.pkg.* files | |
if: always() | |
run: | | |
find pkgs -type f -name "*.pkg.*" -exec sha256sum "{}" \; | sort -k2 | |
- name: Store *.pkg.* files | |
if: always() | |
uses: actions/upload-artifact@v4 | |
with: | |
name: pkgs-${{ matrix.target_arch }}-${{ github.sha }} | |
path: ./artifacts | |
upload: | |
permissions: | |
contents: read | |
if: github.event_name != 'pull_request' | |
needs: build | |
runs-on: ubuntu-latest | |
strategy: | |
matrix: | |
arch: [aarch64, arm, i686, x86_64] | |
fail-fast: false | |
steps: | |
- name: Clone repository | |
uses: actions/checkout@v4 | |
- name: Get *.pkg.* files | |
uses: actions/[email protected] | |
with: | |
path: ./ | |
- name: Install aws-cli | |
run: | | |
git clone https://github.com/termux-pacman/aws-cli-action.git | |
./aws-cli-action/setup.sh '${{ secrets.AWS_ACCESS_KEY_ID }}' '${{ secrets.AWS_ACCESS_KEY }}' '${{ secrets.AWS_REGION }}' | |
- name: Import GPG key | |
run: | | |
echo '${{ secrets.SF_GPG_BOT }}' > key.gpg | |
gpg --pinentry-mode=loopback --passphrase '${{ secrets.PW_GPG_BOT }}' --import key.gpg > /dev/null | |
rm key.gpg | |
- name: Uploading packages to aws | |
run: | | |
source ./aws-cli-action/func.sh | |
package__is_package_name_have_glibc_prefix() { | |
for __pkgname_part in ${1//-/ }; do | |
if [ "${__pkgname_part}" = "glibc" ]; then | |
return 0 | |
fi | |
done | |
return 1 | |
} | |
package__add_prefix_glibc_to_package_name() { | |
if [[ "${1}" = *"-static" ]]; then | |
echo "${1/-static/-glibc-static}" | |
else | |
echo "${1}-glibc" | |
fi | |
} | |
sfuf() { | |
gpg --batch --pinentry-mode=loopback --passphrase '${{ secrets.PW_GPG_BOT }}' --detach-sign --use-agent -u '${{ secrets.KEY_GPG_BOT }}' --no-armor "$1" | |
for format_file in "" ".sig"; do | |
aws s3 cp "${1}${format_file}" s3://'${{ secrets.SFPU }}'/${repo}/${{ matrix.arch }}/"${1##*/}${format_file}" | |
done | |
rm "$1.sig" | |
} | |
archive="pkgs-${{ matrix.arch }}-${{ github.sha }}/pkgs-${{ matrix.arch }}-${{ github.sha }}.tar" | |
tar xf "$archive" | |
for repo in $(jq --raw-output 'del(.pkg_format) | .[].name' repo.json); do | |
dp_file="deleted_${repo}_packages.txt" | |
if [[ -f pkgs/$dp_file ]]; then | |
path_dp_file=pkgs/${{ github.sha }}_${dp_file} | |
{ | |
for d_pkg in $(cat pkgs/${dp_file}); do | |
if ! package__is_package_name_have_glibc_prefix "${d_pkg}"; then | |
package__add_prefix_glibc_to_package_name ${d_pkg} | |
else | |
echo ${d_pkg} | |
fi | |
done | |
} > ${path_dp_file} | |
sfuf "${path_dp_file}" | |
fi | |
for name_pkg in $(cat ./pkgs/built_${repo}_packages.txt); do | |
if ! package__is_package_name_have_glibc_prefix "${name_pkg}"; then | |
name_pkg=$(package__add_prefix_glibc_to_package_name $name_pkg) | |
fi | |
for list_pkg in pkgs/*.pkg.*; do | |
dir_pkg_sp=(${list_pkg//// }) | |
for static in "" "-static"; do | |
if [[ $(echo "${name_pkg}${static}" | sed 's/+/0/g') = $(get_name ${dir_pkg_sp[-1]}) ]]; then | |
sfuf "$list_pkg" | |
fi | |
done | |
done | |
done | |
done |