falco/scripts/publish-rpm
Luca Guerra d69f329b54 fix(ci): update sync in deb and rpm scripts with acl
Signed-off-by: Luca Guerra <luca@guerra.sh>
2024-02-09 10:33:04 +01:00

194 lines
5.3 KiB
Bash
Executable File

#!/usr/bin/env bash
set -e
usage() {
echo "usage: $0 -f <package_x86_64.rpm> -f <package_aarch64.rpm> -r <rpm|rpm-dev> [-s]"
exit 1
}
check_program() {
if ! command -v $1 &> /dev/null
then
echo "$1 is required and could not be found"
exit
fi
}
# Updates the signature of a RPM package in the local repository
#
# $1: path of the repository.
# $2: path of the RPM file.
sign_rpm() {
pushd $1 > /dev/null
rm -f $(basename -- $2).asc
gpg --detach-sign --digest-algo SHA256 --armor $(basename -- $2)
popd > /dev/null
}
# Add a package to the local RPM repository
#
# $1: path of the repository.
# $2: path of the RPM file.
add_rpm() {
cp -f $2 $1
sign_rpm $1 $2
}
# Sign the local RPM repository
#
# $1: path of the repository.
sign_repo() {
pushd $1 > /dev/null
rm -f repodata/repomd.xml.asc
gpg --detach-sign --digest-algo SHA256 --armor repodata/repomd.xml
popd > /dev/null
}
# Update the local RPM repository
#
# $1: path of the repository.
update_repo() {
pushd $1 > /dev/null
createrepo --update --no-database .
popd > /dev/null
}
reduce_dir_size() {
local DIR=$1
local MAX_SIZE_GB=$2
local EXTENSION=$3
local MAX_SIZE=$((MAX_SIZE_GB*1024*1024)) # Convert GB to KB for du command
# Check if directory exists
if [[ ! -d "$DIR" ]]; then
echo "The directory $DIR does not exist."
return 1
fi
# Calculate current directory size in KB
local CUR_SIZE=$(du -sk "$DIR" | cut -f1)
# Check if we need to delete any files
if ((CUR_SIZE <= MAX_SIZE)); then
return 0
fi
# Calculate size to delete in bytes
local DEL_SIZE=$(( (CUR_SIZE - MAX_SIZE) * 1024 ))
local ACC_SIZE=0
find "$DIR" -maxdepth 1 -type f -name "*.$EXTENSION" -printf "%T+ %s %p\n" | sort | while read -r date size file; do
if ((ACC_SIZE + size < DEL_SIZE)); then
rm "$file"
ACC_SIZE=$((ACC_SIZE + size))
local asc_file="$file.asc"
if [[ -e "$asc_file" ]]; then
local asc_size=$(stat --format="%s" "$asc_file")
rm "$asc_file"
ACC_SIZE=$((ACC_SIZE + asc_size))
fi
else
break
fi
done
}
# parse options
while getopts ":f::r::s" opt; do
case "${opt}" in
f )
files+=("${OPTARG}")
;;
r )
repo="${OPTARG}"
[[ "${repo}" == "rpm" || "${repo}" == "rpm-dev" ]] || usage
;;
s )
sign_all="true"
;;
: )
echo "invalid option: ${OPTARG} requires an argument" 1>&2
exit 1
;;
\?)
echo "invalid option: ${OPTARG}" 1>&2
exit 1
;;
esac
done
shift $((OPTIND-1))
if ([ ${#files[@]} -eq 0 ] && [ -z "${sign_all}" ]) || [ -z "${repo}" ]; then
usage
fi
# check prerequisites
check_program createrepo
check_program gpg
check_program aws
# settings
s3_bucket_repo="s3://falco-distribution/packages/${repo}"
cloudfront_path="/packages/${repo}"
tmp_repo_path=/tmp/falco-$repo
# prepare repository local copy
echo "Fetching ${s3_bucket_repo}..."
mkdir -p ${tmp_repo_path}
aws s3 cp ${s3_bucket_repo} ${tmp_repo_path} --recursive
# update signatures for all existing packages
if [ "${sign_all}" ]; then
for file in ${tmp_repo_path}/*; do
if [ -f "$file" ]; then # exclude directories, symlinks, etc...
if [[ ! $file == *.asc ]]; then # exclude signature files
package=$(basename -- ${file})
echo "Signing ${package}..."
sign_rpm ${tmp_repo_path} ${file}
echo "Syncing ${package}.asc to ${s3_bucket_repo}..."
aws s3 cp ${tmp_repo_path}/${package}.asc ${s3_bucket_repo}/${package}.asc --acl public-read
fi
fi
done
aws cloudfront create-invalidation --distribution-id ${AWS_CLOUDFRONT_DIST_ID} --paths ${cloudfront_path}/*.asc
sign_repo ${tmp_repo_path}
fi
# remove old dev packages if necessary
if [[ ${repo} == "rpm-dev" ]]; then
reduce_dir_size ${tmp_repo_path} 10 rpm
fi
# update the repo by adding new packages
if ! [ ${#files[@]} -eq 0 ]; then
for file in "${files[@]}"; do
echo "Adding ${file}..."
add_rpm ${tmp_repo_path} ${file}
done
update_repo ${tmp_repo_path}
sign_repo ${tmp_repo_path}
# publish
for file in "${files[@]}"; do
package=$(basename -- ${file})
echo "Publishing ${package} to ${s3_bucket_repo}..."
aws s3 cp ${tmp_repo_path}/${package} ${s3_bucket_repo}/${package} --acl public-read
aws s3 cp ${tmp_repo_path}/${package}.asc ${s3_bucket_repo}/${package}.asc --acl public-read
aws cloudfront create-invalidation --distribution-id ${AWS_CLOUDFRONT_DIST_ID} --paths ${cloudfront_path}/${package}
aws cloudfront create-invalidation --distribution-id ${AWS_CLOUDFRONT_DIST_ID} --paths ${cloudfront_path}/${package}.asc
done
fi
# sync repodata
aws s3 sync ${tmp_repo_path}/repodata ${s3_bucket_repo}/repodata --delete --acl public-read
aws cloudfront create-invalidation --distribution-id ${AWS_CLOUDFRONT_DIST_ID} --paths ${cloudfront_path}/repodata/*
# delete packages that have been pruned
# the dryrun option is there so we can check that we're doing the right thing, can be removed after testing
if [[ ${repo} == "rpm-dev" ]]; then
aws s3 sync ${tmp_repo_path} ${s3_bucket_repo} --dryrun --delete --acl public-read
fi