#!/bin/sh # contains functions used by more than one script # shellcheck disable=SC2039,SC2119,SC2120 if [ -z "${base_dir}" ]; then # just to make shellcheck happy . '../lib/load-configuration' fi # find_pkgbuilds package repository git_repository git_revision mod_git_revision # find the PKGBUILD and modification of $package from $repository # sets $PKGBUILD and $PKGBUILD_mod find_pkgbuilds() { local package="$1" local repository="$2" local git_repository="$3" local git_revision="$4" local mod_git_revision="$5" local repo_path eval 'repo_path="${repo_paths__'"${git_repository}"'}"' if [ ! "$(git -C "${repo_path}" cat-file -t "${git_revision}" 2> /dev/null)" = "commit" ]; then >&2 printf 'Repository %s does not contain commit %s, but it should.\n' \ "${git_repository}" \ "${git_revision}" return 2 fi if [ ! "$(git -C "${repo_paths__archlinux32}" cat-file -t "${mod_git_revision}" 2> /dev/null)" = "commit" ]; then >&2 printf 'Repository archlinux32 does not contain commit %s, but it should.\n' \ "${mod_git_revision}" return 2 fi PKGBUILD=$( git -C "${repo_path}" archive "${git_revision}" -- "${package}/repos/${repository}-*/PKGBUILD" 2> /dev/null | \ tar -t 2> /dev/null | \ grep -- '/PKGBUILD$' | \ grep -v -- '-i686/PKGBUILD$' | \ grep -v -- '[-/]\(staging\|testing\|unstable\)-[^/]\+/PKGBUILD$' | \ sort | \ tail -n1 ) PKGBUILD_mod=$( git -C "${repo_paths__archlinux32}" archive "${mod_git_revision}" -- "${repository}/${package}/PKGBUILD" 2> /dev/null | \ tar -t "${repository}/${package}/PKGBUILD" 2> /dev/null ) || true if [ -z "${PKGBUILD}" ] && \ [ -z "${PKGBUILD_mod}" ]; then >&2 printf 'Neither PKGBUILD nor modification of PKGBUILD found for package "%s" from %s (%s), revisions %s and %s.\n' \ "${package}" \ "${repository}" \ "${git_repository}" \ "${git_revision}" \ "${mod_git_revision}" return 1 fi } # find_repository_with_commit commit # find the repository which has $commit find_repository_with_commit() { local repository for repository in ${repo_names}; do # shellcheck disable=SC2016 if [ "$(eval git -C "$(printf '"${repo_paths__%s}"' "${repository}")" cat-file -t '"$1"' 2> /dev/null)" = "commit" ]; then echo "${repository}" return 0 fi done >&2 printf 'find_repository_with_commit: Cannot find repository with commit "%s"\n' "$1" return 2 } # find_git_repository_to_package_repository repository # find the git repository which tracks the package repository $repository find_git_repository_to_package_repository() { local repository repository=$( # shellcheck disable=SC2016 { printf 'SELECT `git_repositories`.`name`' printf ' FROM `git_repositories`' mysql_join_git_repositories_upstream_repositories printf ' WHERE `upstream_repositories`.`name`=from_base64("%s");\n' \ "$(printf '%s' "$1" | base64 -w0)" } | \ mysql_run_query ) if [ -z "${repository}" ]; then >&2 echo "can't find git repository with package repository '$1'" exit 1 else echo "${repository}" return 0 fi } # ls_master_mirror $path # list content of $path on the master mirror (via rsync) ls_master_mirror() { local path="$1" ${master_mirror_rsync_command} \ "${master_mirror_rsync_directory}/${path}/" | \ grep -v '\s\.$' | \ awk '{print $5}' } # remove_old_package_versions # removes all older versions of the packages given at stdin (by bpir.id) # from all repositories less[1] stable than the current repository, as # well as any different version of the same package from equally[2] # stable repositories # 1] determined by `repository_stability_relations` # 2] identical `repositories`.`stability` remove_old_package_versions() { ( # the new shell is intentional tmp_dir=$(mktemp -d 'tmp.common-functions.remove_old_package_versions.XXXXXXXXXX' --tmpdir) trap 'rm -rf --one-file-system "${tmp_dir}"' EXIT while read -r bpir_id; do # shellcheck disable=SC2016 { printf 'SELECT ' printf '`d_bpir`.`id`,' printf 'IF(`d_r`.`stability`=`o_r`.`stability` AND `d_bpir`.`id`!=`o_bpir`.`id`,1,0),' printf 'CONCAT(' printf 'IF(`d_bp`.`epoch`=0,"",CONCAT(`d_bp`.`epoch`,":")),' printf '`d_bp`.`pkgver`,"-",' printf '`d_bp`.`pkgrel`,' printf 'IF(`d_bp`.`sub_pkgrel_omitted`,"",CONCAT(".",`d_bp`.`sub_pkgrel`))' printf '),' printf 'IF(`d_r`.`id`=`o_r`.`id`,1,0),' printf '`d_ra`.`name`,' printf '`d_r`.`name`,' printf '`d_bp`.`pkgname`,' printf 'CONCAT(`d_ra`.`name`,"/",' printf '`d_r`.`name`,"/",' mysql_package_name_query 'd_bp' 'd_bpa' 'd_bpc' printf ')' printf ' FROM `binary_packages_in_repositories` AS `d_bpir`' mysql_join_binary_packages_in_repositories_binary_packages 'd_bpir' 'd_bp' mysql_join_binary_packages_in_repositories_repositories 'd_bpir' 'd_r' printf ' AND `d_r`.`is_on_master_mirror`' mysql_join_repositories_architectures 'd_r' 'd_ra' mysql_join_binary_packages_architectures 'd_bp' 'd_bpa' printf ' LEFT' mysql_join_binary_packages_compressions 'd_bp' 'd_bpc' printf ' JOIN `binary_packages` AS `o_bp`' printf ' ON `d_bp`.`pkgname`=`o_bp`.`pkgname`' mysql_join_binary_packages_binary_packages_in_repositories 'o_bp' 'o_bpir' mysql_join_binary_packages_in_repositories_repositories 'o_bpir' 'o_r' printf ' AND `o_r`.`is_on_master_mirror`' printf ' AND `o_r`.`architecture`=`d_r`.`architecture`' printf ' JOIN `repository_stability_relations`' printf ' ON `repository_stability_relations`.`less_stable`=`d_r`.`stability`' printf ' AND `repository_stability_relations`.`more_stable`=`o_r`.`stability`' printf ' WHERE `o_bpir`.`id`=from_base64("%s")' \ "$( printf '%s' "${bpir_id}" | \ base64 -w0 )" printf ';\n' } | \ mysql_run_query | \ tr '\t' ' ' | \ expand_version 3 | \ sort -k3V,3 -k2r,2 | \ shrink_version 3 | \ sed ' s/^/'"${bpir_id}"' / $ a ' done | \ sed -n ' /^\([0-9]\+\) \1 /,/^$/ d /^$/ d s/^[0-9]\+ \([0-9]\+ \)\(\S\+ \)\{2\}/\1/ h /^[0-9]\+ 0 / { s/^\(\S\+ \)\{2\}// s/ \S\+$// w'"${tmp_dir}"'/repo-removes g } s/^\(\S\+ \)\{5\}// w'"${tmp_dir}"'/sftp-removes g s/ .*$// w'"${tmp_dir}"'/db-removes ' for file in 'repo-removes' 'sftp-remove' 'db-removes'; do if [ -s "${tmp_dir}/${file}" ]; then sort -u "${tmp_dir}/${file}" | \ sponge "${tmp_dir}/${file}" fi done # repo-remove packages while read -r arch repo pkgname; do mkdir "${tmp_dir}/transit" failsafe_rsync \ "${master_mirror_rsync_directory}/${arch}/${repo}/${repo}.db."* \ "${master_mirror_rsync_directory}/${arch}/${repo}/${repo}.files."* \ "${tmp_dir}/transit/" repo-remove "${tmp_dir}/transit/${repo}.db.tar.gz" "${pkgname}" failsafe_rsync \ "${tmp_dir}/transit/${repo}.db."* \ "${tmp_dir}/transit/${repo}.files."* \ "${master_mirror_rsync_directory}/${arch}/${repo}/" rm -rf --one-file-system "${tmp_dir}/transit" done < \ "${tmp_dir}/repo-removes" # db-remove packages if [ -s "${tmp_dir}/db-removes" ]; then # shellcheck disable=SC2016 { printf 'CREATE TEMPORARY TABLE `del` (`id` BIGINT NOT NULL AUTO_INCREMENT, PRIMARY KEY (`id`));\n' printf 'LOAD DATA LOCAL INFILE "%s" INTO TABLE `del` (`id`);\n' \ "${tmp_dir}/db-removes" printf 'DELETE `binary_packages_in_repositories`' printf ' FROM `binary_packages_in_repositories`' printf ' JOIN `del`' printf ' ON `binary_packages_in_repositories`.`id`=`del`.`id`;\n' mysql_query_and_delete_unneeded_binary_packages } | \ mysql_run_query | \ sort -u >> \ "${tmp_dir}/sftp-removes" fi # sftp-remove packages if [ -s "${tmp_dir}/sftp-removes" ]; then sed ' p s/$/.sig/ ' "${tmp_dir}/sftp-removes" | \ sed ' s|^|rm "| s|$|"| ' | \ failsafe_sftp fi ) } # wait_some_time $minimum $diff # wait between minimum and minimum+diff seconds (diff defaults to 30) wait_some_time() { local minimum=$1 local diff=$2 local random if [ -z "${diff}" ]; then diff=30 fi random=$( dd if='/dev/urandom' count=1 2> /dev/null | \ cksum | \ cut -d' ' -f1 ) sleep $((minimum + random % diff)) } # str_to_regex $string # escape dots for use in regex str_to_regex() { echo "$1" | \ sed ' s|[.[]|\\\0|g ' } # make_source_info $package $repository $git_revision $mod_git_revision $output # create .SRCINFO from PKGBUILD within git repositories, output to $output make_source_info() { local package="$1" local repository="$2" local git_revision="$3" local mod_git_revision="$4" local output="$5" local git_repo local PKGBUILD local PKGBUILD_mod if ! git_repo=$(find_repository_with_commit "${git_revision}") || \ [ -z "${git_repo}" ]; then return 1 fi if ! find_pkgbuilds "${package}" "${repository}" "${git_repo}" "${git_revision}" "${mod_git_revision}"; then return 1 fi ( # the new shell is intentional local epoch local pkgver tmp_dir=$(mktemp -d "${work_dir}/tmp.make_source_info.XXXXXX") trap 'rm -rf --one-file-system "${tmp_dir}"' EXIT extract_source_directory "${git_repo}" "${git_revision}" "${mod_git_revision}" "${tmp_dir}" '0' { cd "${tmp_dir}" # some additional info printf 'upstream_git_repository = %s\n' "${git_repo}" printf 'PKGBUILD = %s\n' "${PKGBUILD}" printf 'PKGBUILD_mod = %s\n' "${PKGBUILD_mod}" makepkg --printsrcinfo cd .. } > \ "${tmp_dir}/SRCINFO" unset epoch unset pkgver eval "$( sed -n ' s/^\t\(epoch\|pkgver\) = /\1=/ T p ' "${tmp_dir}/SRCINFO" )" sed -i ' /^pkgname = /! b /= gtk-doc$/ b /= linux-api-headers$/ b s/= \(openjdk[0-9]\+\)-doc$/\0\n\tdepends = \1-src/ t append_version s/= \(qt5\)-doc$/\0\n\tdepends = \1-base/ t append_version s/= \(\S\+\)-i18n-\S\+$/\0\n\tdepends = \1/ t append_version s/= \(linux\(-\S\+\)\?\)-\(docs\|headers\)$/\0\n\tdepends = \1/ t append_version b :append_version s/$/='"${epoch}${epoch+:}${pkgver}"'/ ' "${tmp_dir}/SRCINFO" cat "${tmp_dir}/SRCINFO" > \ "${output}" ) } # recursively_umount_and_rm $dir # umount all mountpoints in $dir which are also in $dir's # filesystem, possibly also $dir itself and then # rm -rf --one-file-system $dir recursively_umount_and_rm() { local dir="$1" if [ -z "${dir}" ]; then >&2 echo 'ERROR: recursively_umount_and_rm requires an argument' exit 42 fi if [ ! -d "${dir}" ]; then >&2 printf 'ERROR: recursively_umount_and_rm requires a directory as argument - "%s" is not\n' \ "${dir}" exit 42 fi # the sh -c '...' construct branch below is borrowed from # archlinux/devtools lib/archroot.sh: is_subvolume(), is_same_fs() and # subvolume_delete_recursive() # shellcheck disable=SC1004,SC2016 find "${dir}" \ -depth \ -xdev \ -type d \ -exec 'mountpoint' '-q' '{}' ';' \ -exec 'sudo' 'umount' '-l' '{}' ';' \ -prune \ , \ -inum 256 \ -exec sh -c ' [ "$(stat -f -c %T "$1")" = btrfs ] && \ [ "$(stat -c %d "$1")" = "$2" ] ' {} "$(stat -c %d "${dir}")" \; \ -exec btrfs subvolume delete {} \; \ -prune || true rm -rf --one-file-system "${dir}" || true } # mangle_pkgbuild $PKGBUILD [$sub_pkgrel] # mangle $arch in PKBUILDs to contain i486, i686, pentium4 # append $sub_pkgrel to the pkgrel # remove "lib32-" and "gcc-multilib" from {make,check,opt,}depends # remove $pkgrel from {make,check,opt,}depends mangle_pkgbuild() { local PKGBUILD="$1" local sub_pkgrel="$2" if [ -n "${sub_pkgrel}" ]; then sub_pkgrel=".${sub_pkgrel}" fi if grep -q '^\s*pkgname=["'"'"']\?lib32-' "${PKGBUILD}"; then sed -i ' s/^\(\s*pkgrel=\)['"'"'"]\?\([0-9]\+\)\.[0-9]*['"'"'"]\?\s*\(#.*\)\?$/\1"\2"/ ' "${PKGBUILD}" fi sed -i ' /^arch=[^#]*any/!{ /^arch=(/s/(/(i486 i686 pentium4 / } s/^\(\s*pkgrel=\)['"'"'"]\?\([0-9.]\+\)['"'"'"]\?\s*\(#.*\)\?$/\1"\2'"${sub_pkgrel}"'"/ ' "${PKGBUILD}" # shellcheck disable=SC2016 sed -i ' /^\s*\(make\|check\|opt\|\)depends\(_[^=[:space:]]\+\)\?=(/ { :a /^\s*\(\S[^=]*\)=(\(\([^()"'"'"']\|"[^"]*"\|['"'"'][^'"'"']*['"'"']\s*\)*\(#[^\n]*\n\)\?\)*)/! { $b N ba } :b s/\(=.*["'"'"'([:space:]]\)lib32-/\1/g s/\(=.*["'"'"'([:space:]]\)gcc-multilib\(["'"'"')[:space:]]\)/\1gcc\2/g s/\(=.*["'"'"'([:space:]][^[:space:]$]\+[<=>]\S\+\)-[^:"'"'"')[:space:]]\+\([:"'"'"')[:space:]]\)/\1\2/g tb } ' "${PKGBUILD}" } # find_package_repository_to_package $package $git_repository $git_commit # find the package repository a package from a given git repository # belongs to find_package_repository_to_package() { local package="$1" local git_repository="$2" local git_commit="$3" local repo_path local repo eval 'repo_path="${repo_paths__'"${git_repository}"'}"' if [ "${git_repository}" = 'archlinux32' ]; then repo=$( git -C "${repo_path}" archive "${git_commit}" -- | \ tar -t --wildcards "*/${package}/" | \ cut -d/ -f1 | \ grep -vxF blacklist | \ sort -u ) else repo=$( git -C "${repo_path}" archive "${git_commit}" -- "${package}/repos" 2> /dev/null | \ tar -t | \ cut -d/ -f3 | \ grep -vxF '' | \ grep -v 'staging\|testing\|-unstable' | \ grep -v -- '-i686$' | \ sed 's|-[^-]\+$||' | \ sort -u ) fi if [ -z "${repo}" ]; then >&2 printf 'find_package_repository_to_package %s %s %s: no repo found\n' \ "${package}" \ "${git_repository}" \ "${git_commit}" return 1 fi if [ "$( echo "${repo}" | \ wc -l )" -ne 1 ]; then >&2 printf 'find_package_repository_to_package %s %s %s: multiple repos found:\n' \ "${package}" \ "${git_repository}" \ "${git_commit}" >&2 printf '%s\n' "${repo}" return 1 fi echo "${repo}" } # extract_source_directory $git_repo $rev $mod_rev $output $sub_pkgrel # extract files found in the svn/git source directories # $PKGBUILD and $PKGBUILD_mod are expected to be set correctly extract_source_directory() { local git_repo="$1" # shellcheck disable=SC2034 local rev="$2" local mod_rev="$3" local output="$4" local sub_pkgrel="$5" if [ -n "${PKGBUILD}" ]; then eval 'git -C "${repo_paths__'"${git_repo}"'}" archive "${rev}" -- "${PKGBUILD%/*}"' | \ tar -x --strip-components=3 -C "${output}" printf '\n' >> \ "${output}/PKGBUILD" fi if [ -n "${PKGBUILD_mod}" ]; then git -C "${repo_paths__archlinux32}" archive "${mod_rev}" -- "${PKGBUILD_mod%/*}" | \ tar -x --overwrite --exclude 'PKGBUILD' --strip-components=2 -C "${output}" 2> /dev/null || \ true git -C "${repo_paths__archlinux32}" archive "${mod_rev}" -- "${PKGBUILD_mod}" | \ tar -Ox "${PKGBUILD_mod}" >> \ "${output}/PKGBUILD" printf '\n' >> \ "${output}/PKGBUILD" fi # we do not want to update pkgver, so we just undefine it printf 'unset -f pkgver\n' >> \ "${output}/PKGBUILD" mangle_pkgbuild "${output}/PKGBUILD" "${sub_pkgrel}" # shellcheck disable=SC2016 sed -i '/^\$Id\$$/d' "${output}/PKGBUILD" # we don't want write permissions on the PKGBUILD - otherwise pkgver() # will change the version! (**HACK**) chmod oga-w "${output}/PKGBUILD" } # download_sources_by_hash # try to download all sources by their hash into the current directory # returns 0 if any source was downloaded and 1 otherwise download_sources_by_hash() { local return_value=1 local tmp_dir local sum_type local arch_suffix if [ ! -f 'PKGBUILD' ]; then >&2 echo 'No PKGBUILD found - download_sources_by_hash() must be run from the source directory.' return 1 fi tmp_dir=$(mktemp -d 'tmp.common-functions.download_sources_by_hash.XXXXXXXXXX' --tmpdir="$(pwd)") makepkg --printsrcinfo > "${tmp_dir}/.SRCINFO" for arch_suffix in '' '_i486' '_i686' '_pentium4'; do for sum_type in 'md5sum' 'sha1sum' 'sha256sum' 'sha512sum'; do grep '^\s*'"${sum_type}s${arch_suffix}"' = ' "${tmp_dir}/.SRCINFO" | \ sed 's|^.* = ||' | \ cat -n > \ "${tmp_dir}/sums" grep '^\s*source'"${arch_suffix}"' = ' "${tmp_dir}/.SRCINFO" | \ sed ' s|^.* = || s|::.*$|| s|.*/|| ' | \ cat -n > \ "${tmp_dir}/urls" if [ "$(wc -l < "${tmp_dir}/sums")" -eq "$(wc -l < "${tmp_dir}/urls")" ]; then join -1 1 -2 1 -o 1.2,2.2 "${tmp_dir}/sums" "${tmp_dir}/urls" > \ "${tmp_dir}/joined" while read -r sum file; do if [ "${sum}" = 'SKIP' ]; then continue fi if echo "${sum} ${file}" | \ ${sum_type} -c > /dev/null 2>&1; then # the correct source is already there continue fi if wget -O "${tmp_dir}/transfer" "${source_by_hash_mirror}${sum_type}/${sum}"; then mv "${tmp_dir}/transfer" "${file}" return_value=0 fi done < \ "${tmp_dir}/joined" fi done done rm -rf --one-file-system "${tmp_dir}" return ${return_value} } # expand_version $column_num # add "0:" to version in $colum_num-th column if no ":" is there (epoch) # add "+0" to version in $colum_num-th column if no "+" is there (git count/hash) expand_version() { local column_num column_num="$1" sed ' /^\(\S\+\s\+\)\{'"$((column_num-1))"'\}\S*+/! s/^\(\(\S\+\s\+\)\{'"$((column_num-1))"'\}\S*\)-/\1+0-/ /^\(\S\+\s\+\)\{'"$((column_num-1))"'\}\S*:/! s/^\(\(\S\+\s\+\)\{'"$((column_num-1))"'\}\)/\10:/ ' } # shrink_version $column_num # remove "0:" from version in $colum_num-th column (epoch) # remove "+0" from version in $colum_num-th column (git count/hash) shrink_version() { local column_num column_num="$1" sed ' s/^\(\(\S\+\s\+\)\{'"$((column_num-1))"'\}\S*\)+0-/\1-/ s/^\(\(\S\+\s\+\)\{'"$((column_num-1))"'\}\)0:/\1/ ' } # sort_square_bracket_content $file # sort the content of [] in $file, print to stdout sort_square_bracket_content() { local file local line local token local token_list local rest file="$1" while read -r line; do printf '%s ' "${line}" | \ tr ' ' '\n' | \ while read -r token; do if echo "${token}" | \ grep -qF '['; then printf '%s[' "${token%[*}" token="${token##*[}" token_list="${token%,}" while ! echo "${token_list}" | \ grep -qF ']'; do read -r token token_list=$( printf '%s\n' \ "${token_list}" \ "${token%,}" ) done rest="]${token_list#*]}" token_list="${token_list%%]*}" token=$( printf '%s' "${token_list}" | \ sort | \ sed 's|$|,|' printf '%s' "${rest}" ) fi printf '%s\n' "${token}" done | \ tr '\n' ' ' | \ sed ' s|, ]|]|g s| $|| ' printf '\n' done < \ "${file}" } # smoothen_namcap_log $file # remove unneccesary differences from namcap-logs: # - remove architecture specific information # - sort lines # - sort content of square brackets smoothen_namcap_log() { local file file="$1" # shellcheck disable=SC2016 sort_square_bracket_content "${file}" | \ sed ' # normalize architecture specific information s|i[34567]86|$ARCH|g s|x86\([-_]64\)\?|$ARCH|g # remove haskell hashes s|\('"'"'[^'"'"']*-[0-9.]\+\)-[a-zA-Z0-9]\{1,22\}\(-ghc[^'"'"']*'"'"'\)|\1\2|g ' | \ sort | \ sponge "${file}" } # trigger_mirror_refreshs # trigger a refresh of capable tier 1 mirrors (as backup for master mirror) trigger_mirror_refreshs() { local tmp_file tmp_file=$(mktemp 'tmp.common-functions.trigger_mirror_refreshs.XXXXXXXXXX' --tmpdir) date '+%s' > \ "${tmp_file}" failsafe_rsync \ "${tmp_file}" \ "${master_mirror_rsync_directory}/lastupdate" rm "${tmp_file}" screen -wipe || true for trigger_url in ${mirror_refresh_trigger_urls}; do screen -S trigger-mirror-update -d -m curl -L --connect-timeout 10 "${trigger_url}" done } # extract_pkgname_epoch_pkgver_pkgrel_sub_pkgrel_arch_from_package_name extract_pkgname_epoch_pkgver_pkgrel_sub_pkgrel_arch_from_package_name() { pkgname="$1" pkgname="${pkgname%.pkg.tar*}" arch="${pkgname##*-}" pkgname="${pkgname%-*}" sub_pkgrel="${pkgname##*-}" pkgname="${pkgname%-*}" pkgrel="${sub_pkgrel%.*}" if [ "${pkgrel}" = "${sub_pkgrel}" ]; then sub_pkgrel='0' else sub_pkgrel="${sub_pkgrel##*.}" fi epoch="${pkgname##*-}" pkgname="${pkgname%-*}" pkgver="${epoch#*:}" if [ "${pkgver}" = "${epoch}" ]; then epoch='0' else epoch="${epoch%%:*}" fi } # irc_say $channel [copy] # say content of stdin in irc channel $channel (default: #archlinux32) # and print copy to stdout if 'copy' is given # shellcheck disable=SC2120 irc_say() { local channel local content if [ -z "$1" ]; then channel='#archlinux32' else channel="$1" fi if [ -s "${work_dir}/irc-shut-up" ] && \ [ "$(date '+%s')" -gt "$(cat "${work_dir}/irc-shut-up")" ]; then rm "${work_dir}/irc-shut-up" fi content=$(cat) if [ "$2" = 'copy' ]; then printf '%s\n' "${content}" fi if [ -s "${work_dir}/irc-shut-up" ] && \ [ -z "${channel%%#*}" ]; then return fi if [ -p "${irc_dir}/${channel}/in" ]; then printf '%s\n' "${content}" \ | sponge "${irc_dir}/${channel}/in" else printf '%s\n' "${content}" \ | sed 's@^@/j '"${channel}"' @' \ | sponge "${irc_dir}/in" fi } # calculate_script_checksum # calculate and print a checksum of the main script and all scripts in lib/ calculate_script_checksum() { { sha512sum "$0" find "${base_dir}/lib" -type f \ -exec sha512sum '{}' \; } | \ sort | \ awk '{print $1}' | \ sha512sum | \ awk '{print $1}' } # verbose_flock # flock wrapper with some informational output on error verbose_flock() { local err=0 flock "$@" || { err=$? lsof +c0 "/proc/$$/fd/$( printf '%s\n' "$@" | \ grep -vm1 '^-' )" >&2 2>/dev/null || true >&2 printf 'FYI: I am %s.\n' "$$" return ${err} } } # recompress_gz $tmp_dir $file1.gz $file2.gz ... # recompress the given file(s) to make them rsync friendly recompress_gz() { tmp_file=$(mktemp "$1/recompress_gz.XXXXXXXX") shift local file for file in "$@"; do if [ ! -f "${file}" ]; then continue fi mv "${file}" "${tmp_file}" zcat "${tmp_file}" | \ gzip --best --rsyncable > \ "${file}" done rm "${tmp_file}" } # failsafe_sftp # execute the commands from stdin on the master mirror, retrying if # unsuccessful # caveats: # - only checks for success of "rm", "ln", "rename" # - commands must be executable in arbitrary order failsafe_sftp() { ( # new shell is intentional temp_dir=$(mktemp -d 'tmp.common-functions.sftp_failsafe.XXXXXXXXXX' --tmpdir) trial_counter=20 trap 'rm -rf --one-file-system "${temp_dir}"' EXIT cat > "${temp_dir}/input" if [ -n "${master_mirror_sftp_root}" ]; then sed -i ' /^\(rm\|rename\) / s@"\([^"]\+\)"@"'"${master_mirror_sftp_root}"'/\1"@g /^ln / s@"\([^"]\+\)"$@"'"${master_mirror_sftp_root}"'/\1"@ ' "${temp_dir}/input" fi sed -n ' s/^rm "\([^"]\+\)"$/- \1/ s/^ln\( [^"]\S*\)* "[^"]\+" "\([^"]\+\)"$/+ \2/ s/^rename "\([^"]\+\)" "\([^"]\+\)"$/- \1\n+ \2/ T p ' "${temp_dir}/input" > \ "${temp_dir}/expectations" if ${master_mirror_sftp_command} < "${temp_dir}/input"; then # we're done exit 0 fi # make failing sftp commands non-fatal - maybe some succeeded above? sed -i ' s/^[^-]/-\0/ ' "${temp_dir}/input" # we stop on exhausted $trial_counter while [ ${trial_counter} -gt 0 ]; do wait_some_time 30 # success requires 3 things: # - succeeding sftp executions (commands are non-critical already) # - succeeding sftp command to create listing (e.g. all expected # files are found) # - no unexpected files were found if ${master_mirror_sftp_command} < \ "${temp_dir}/input" && \ sed ' s/^+ \(\S\+\)$/ls -1 "\1"/ s,^- \(\S\+/\)[^/]\+$,ls -1 "\1", ' "${temp_dir}/expectations" | \ sort -u | \ ${master_mirror_sftp_command} > "${temp_dir}/check" && \ ! grep -qxF "$( sed -n ' s/^- // T p ' "${temp_dir}/expectations" )" "${temp_dir}/check"; then exit 0 fi trial_counter=$((trial_counter-1)) done exit 1 ) || \ return $? } # failsafe_rsync # execute rsync with the given parameters on the master mirror, retrying # if unsuccessful # caveats: # - output might be garbled # - error code will be projected into {0,1} failsafe_rsync() { local trial_counter trial_counter=20 if ${master_mirror_rsync_command} "$@"; then return 0 fi while [ ${trial_counter} -gt 0 ]; do wait_some_time 30 if ${master_mirror_rsync_command} "$@"; then return 0 fi trial_counter=$((trial_counter-1)) done return 1 } # apply_trunk_patch source_dir diff_source_dir # apply a patch between diff_source_dir/PKGBUILD and # diff_source_dir/../../trunk/PKGBUILD onto source_dir/PKGBUILD apply_trunk_patch() { local source_dir="$1" local diff_source_dir="$2" if [ -z "${diff_source_dir}" ]; then # no diff_source_dir => no action return fi chmod +w "${source_dir}/PKGBUILD" diff -u3 "${diff_source_dir}/PKGBUILD" "${diff_source_dir}/../../trunk/PKGBUILD" \ | sed ' 1,2 s#^\(\(+++\|---\)\s\+\)\S\+/\(PKGBUILD\s.*\)$#\1\3# ' \ | patch -p0 "${source_dir}/PKGBUILD" chmod -w "${source_dir}/PKGBUILD" find "${diff_source_dir}/../../trunk/" \ -mindepth 1 \ -maxdepth 1 \ -not -name 'PKGBUILD' \ -type f \ -exec cp '{}' "${source_dir}" \; } # update_blocked_packages_count # update the count how many packages are blocked by a build assignment update_blocked_packages_count() { ( # new shell is intentional temp_dir=$(mktemp -d 'tmp.sanity-check.XXXXXXXXXX' --tmpdir) trap 'rm -rf --one-file-system "${temp_dir}"' EXIT # shellcheck disable=SC2016 { printf 'SELECT ' printf '`a_bp`.`build_assignment`,' printf '`b_bp`.`build_assignment`' printf ' FROM `binary_packages` AS `a_bp`' mysql_join_binary_packages_binary_packages_in_repositories 'a_bp' 'a_bpir' printf ' AND `a_bpir`.`repository`=%s' \ "${repository_ids__any_build_list}" mysql_join_binary_packages_dependencies 'a_bp' mysql_join_dependencies_dependency_types printf ' AND `dependency_types`.`relevant_for_binary_packages`' mysql_join_dependencies_install_target_providers_with_versions mysql_join_install_target_providers_binary_packages '' 'b_bp' mysql_join_binary_packages_binary_packages_in_repositories 'b_bp' 'b_bpir' printf ' AND `a_bpir`.`repository`=%s' \ "${repository_ids__any_build_list}" printf ' WHERE `a_bp`.`architecture`=`b_bp`.`architecture`' printf ' OR `a_bp`.`architecture`=%s' \ "${architecture_ids__any}" printf ' OR `b_bp`.`architecture`=%s' \ "${architecture_ids__any}" } | \ mysql_run_query 'unimportant' | \ tr '\t' ' ' | \ sort -u | \ sort -k1,1 > \ "${temp_dir}/links.new" touch "${temp_dir}/links.1" "${temp_dir}/links.2" # new links (sorted by 1st column); old links (sorted by 1st / 2nd column) while [ -s "${temp_dir}/links.new" ]; do cat "${temp_dir}/links.1" "${temp_dir}/links.new" | \ sort -k2,2 > \ "${temp_dir}/links.2" sort -k1,1 "${temp_dir}/links.2" > \ "${temp_dir}/links.1" duplicates=$( uniq -d "${temp_dir}/links.1" ) if [ -n "${duplicates}" ]; then >&2 echo 'internal error: there are %s duplicate links' \ "$( printf '%s\n' "${duplicates}" | \ wc -l )" exit 42 fi { { join -1 2 -2 1 -o 1.1,2.2 "${temp_dir}/links.2" "${temp_dir}/links.new" sort -k2,2 "${temp_dir}/links.new" | \ join -1 2 -2 1 -o 1.1,2.2 - "${temp_dir}/links.1" sort -k2,2 "${temp_dir}/links.new" | \ join -1 2 -2 1 -o 1.1,2.2 - "${temp_dir}/links.new" } | \ sort -u sed 'p' "${temp_dir}/links.1" } | \ sort | \ uniq -u | \ sort -k1,1 | \ sponge "${temp_dir}/links.new" done cut -d' ' -f2 "${temp_dir}/links.2" | \ uniq -c | \ sed 's/^\s\+//' | \ tr ' ' '\t' > \ "${temp_dir}/link-counts" # shellcheck disable=SC2016 { printf 'CREATE TEMPORARY TABLE `lc` (' printf '`count` MEDIUMINT,' printf '`ba_id` BIGINT,' printf 'PRIMARY KEY (`ba_id`)' printf ');\n' printf 'LOAD DATA LOCAL INFILE "%s" INTO TABLE `lc`(`count`,`ba_id`);\n' \ "${temp_dir}/link-counts" printf 'UPDATE `build_assignments`' printf ' LEFT JOIN `lc`' printf ' ON `build_assignments`.`id`=`lc`.`ba_id`' printf ' SET `build_assignments`.`currently_blocking`=`lc`.`count`;\n' } | \ mysql_run_query 'unimportant' ) } # extract_dependencies_from_package $pkgfile # extract make-, run- and check-depends from $pkgfile # format: deptype install_target version_relation epoch version extract_dependencies_from_package() { { bsdtar -Oxf "$1" '.PKGINFO' \ | sed ' s/^checkdepend = /check / t s/^depend = /run / t s/^makedepend = /make / t d ' \ | sed ' s/\(=\|[<>]=\?\)\([^-]\+\)\(-[^-]\+\)\?$/ \1 \2/ t s/$/ >= '"${min_version}"'/ ' \ | sed ' s/:\(\S\+\)$/ \1/ t s/ \(\S\+\)$/ 0 \1/ ' # inject base & base-devel printf ' >= %s\n' \ "${min_version}" \ | tr ':' ' ' \ | sed ' s/^.*$/run base\0\nmake base-devel\0/ ' } \ | sort -u } # expand_blacklist_architectures $tmp_file # expand the architecture of the blacklisted packages given on stdin as # lines # $arch$pkgbase/$pkgname/$whatever expand_blacklist_architectures() { # shellcheck disable=SC2016 { printf 'SELECT `superior`.`name`,`inferior`.`name`' printf ' FROM `architectures` AS `inferior`' printf ' JOIN `architecture_compatibilities`' printf ' ON `architecture_compatibilities`.`built_for`=`inferior`.`id`' printf ' JOIN `architectures` AS `superior`' printf ' ON `architecture_compatibilities`.`runs_on`=`superior`.`id`' # "any" has a special role - it should be regarded as *inferior* to # all architectures printf ' WHERE `superior`.`name`!="any"' printf ' AND `inferior`.`name`!="any";\n' printf 'SELECT "any",`architectures`.`name`' printf ' FROM `architectures`;\n' } \ | mysql_run_query \ | sort -k1,1 \ > "$1" sort -k1,1 \ | join -1 1 -2 1 -o 1.2,2.2 "$1" - \ | sort -u }