From 172ffa5e2b3d4731680e463a0351c3e7cc92df89 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Fri, 9 Feb 2024 13:48:55 -0500 Subject: [PATCH 001/143] initial migration of container infra from bioconda-containers --- images/base-glibc-busybox-bash/Dockerfile | 116 ++++ .../Dockerfile.busybox | 23 + .../base-glibc-busybox-bash/Dockerfile.test | 27 + images/base-glibc-busybox-bash/build-busybox | 140 ++++ images/base-glibc-busybox-bash/install-pkgs | 361 ++++++++++ images/base-glibc-debian-bash/Dockerfile | 131 ++++ images/base-glibc-debian-bash/Dockerfile.test | 39 ++ .../Dockerfile | 40 ++ .../Dockerfile.test | 7 + .../issue-responder | 615 ++++++++++++++++++ .../bioconda-utils-build-env-cos7/Dockerfile | 65 ++ .../Dockerfile.test | 6 + images/bot/Dockerfile | 78 +++ images/bot/Dockerfile.test | 9 + images/bot/pyproject.toml | 3 + images/bot/setup.cfg | 20 + images/bot/src/bioconda_bot/__init__.py | 0 images/bot/src/bioconda_bot/automerge.py | 138 ++++ .../bot/src/bioconda_bot/changeVisibility.py | 63 ++ images/bot/src/bioconda_bot/cli.py | 81 +++ images/bot/src/bioconda_bot/comment.py | 197 ++++++ images/bot/src/bioconda_bot/common.py | 249 +++++++ images/bot/src/bioconda_bot/merge.py | 371 +++++++++++ images/bot/src/bioconda_bot/update.py | 78 +++ images/create-env/CHANGELOG.md | 152 +++++ images/create-env/Dockerfile | 44 ++ images/create-env/Dockerfile.test | 81 +++ images/create-env/README.md | 99 +++ images/create-env/create-env | 242 +++++++ images/create-env/install-conda | 124 ++++ images/create-env/print-env-activate | 95 +++ 31 files changed, 3694 insertions(+) create mode 100644 images/base-glibc-busybox-bash/Dockerfile create mode 100644 images/base-glibc-busybox-bash/Dockerfile.busybox create mode 100644 images/base-glibc-busybox-bash/Dockerfile.test create mode 100755 images/base-glibc-busybox-bash/build-busybox create mode 100755 images/base-glibc-busybox-bash/install-pkgs create mode 100644 images/base-glibc-debian-bash/Dockerfile create mode 100644 images/base-glibc-debian-bash/Dockerfile.test create mode 100644 images/bioconda-recipes-issue-responder/Dockerfile create mode 100644 images/bioconda-recipes-issue-responder/Dockerfile.test create mode 100755 images/bioconda-recipes-issue-responder/issue-responder create mode 100644 images/bioconda-utils-build-env-cos7/Dockerfile create mode 100644 images/bioconda-utils-build-env-cos7/Dockerfile.test create mode 100644 images/bot/Dockerfile create mode 100644 images/bot/Dockerfile.test create mode 100644 images/bot/pyproject.toml create mode 100644 images/bot/setup.cfg create mode 100644 images/bot/src/bioconda_bot/__init__.py create mode 100644 images/bot/src/bioconda_bot/automerge.py create mode 100644 images/bot/src/bioconda_bot/changeVisibility.py create mode 100644 images/bot/src/bioconda_bot/cli.py create mode 100644 images/bot/src/bioconda_bot/comment.py create mode 100644 images/bot/src/bioconda_bot/common.py create mode 100644 images/bot/src/bioconda_bot/merge.py create mode 100644 images/bot/src/bioconda_bot/update.py create mode 100644 images/create-env/CHANGELOG.md create mode 100644 images/create-env/Dockerfile create mode 100644 images/create-env/Dockerfile.test create mode 100644 images/create-env/README.md create mode 100755 images/create-env/create-env create mode 100755 images/create-env/install-conda create mode 100755 images/create-env/print-env-activate diff --git a/images/base-glibc-busybox-bash/Dockerfile b/images/base-glibc-busybox-bash/Dockerfile new file mode 100644 index 00000000000..e875a2d41ac --- /dev/null +++ b/images/base-glibc-busybox-bash/Dockerfile @@ -0,0 +1,116 @@ +# Don't use Debian's busybox package since it only provides a smaller subset of +# BusyBox's functions (e.g., no administrative tools like adduser etc.). +# Since we create a glibc image anyway, we can also use a the slightly smaller +# dynamically linked binary. + +ARG debian_version +FROM "debian:${debian_version}-slim" AS build_base +RUN [ ! -f /etc/apt/sources.list ] || sed --in-place= --regexp-extended \ + '/ stretch/ { s,-updates,-backports, ; s,/(deb|security)\.,/archive., }' \ + /etc/apt/sources.list + + +FROM build_base AS rootfs_builder + +ARG busybox_image +COPY --from="${busybox_image}" /build /build +WORKDIR /busybox-rootfs +RUN arch="$( uname -m )" \ + && \ + mkdir -p ./bin ./sbin ./usr/bin ./usr/sbin \ + && \ + cp -al "/build/busybox.${arch}" ./bin/busybox \ + && \ + ldd ./bin/busybox \ + | grep --only-matching --extended-regexp '/lib\S+' \ + | xargs -n1 sh -xc 'mkdir -p ".${1%/*}" && cp -aL "${1}" ".${1%/*}"' -- \ + && \ + chroot . /bin/busybox --install \ + && \ + rm -rf ./lib* + +WORKDIR /rootfs + +RUN mkdir -p ./etc ./home ./opt ./root ./run /tmp ./usr ./var/log \ + && \ + for dir in bin lib sbin ; do \ + mkdir "./usr/${dir}" \ + && \ + if [ -L "/bin" ] ; then \ + ln -s "usr/${dir}" "./${dir}" ; \ + else \ + mkdir "./${dir}" ; \ + fi ; \ + done + +RUN find /busybox-rootfs -type f \ + -exec sh -c 'cp -al -- "${1}" "./${1#/busybox-rootfs/}"' -- '{}' ';' + +# Install helper tools used by install-pkgs. +RUN apt-get update -qq \ + && \ + DEBIAN_FRONTEND=noninteractive \ + apt-get install --yes --no-install-recommends \ + patchelf + +COPY install-pkgs /usr/local/bin +RUN install-pkgs "$( pwd )" /tmp/work \ + bash \ + base-passwd \ + libc-bin \ + login \ + ncurses-base \ + && \ + # Remove contents of /usr/local as downstream images overwrite those. + find ./usr/local/ \ + -mindepth 1 -depth \ + -delete + +RUN while IFS=: read _ _ uid gid _ home _ ; do \ + [ -n "${home##/var/run/*}" ] || home="${home#/var}" \ + && \ + [ -d "./${home#/}" ] || [ "${home}" = "/nonexistent" ] && continue ; \ + mkdir -p "./${home#/}" \ + && \ + chown "${uid}:${gid}" "./${home#/}" \ + && \ + chmod 775 "./${home#/}" \ + ; done < ./etc/passwd \ + && \ + pwck --read-only --root "$( pwd )" \ + | { ! grep -v -e 'no changes' -e '/nonexistent' ; } \ + && \ + grpck --read-only --root "$( pwd )" \ + && \ + find \ + -xdev -type f \! -path ./var/\* \! -path ./usr/share/\* \! -name \*.pl \ + | xargs -P0 -n100 sh -c \ + 'chroot . ldd -- "${@}" 2> /dev/null | sed -n "/:/h; /not found/{x;p;x;p}"' -- \ + | { ! grep . ; } + +# env-activate.sh (+ optionally env-execute) should be overwritten downstream. +# - env-activate.sh: +# Is sourced (via symlink in /etc/profile.d/) to activate the /usr/local env. +# - env-execute: +# Is set as the ENTRYPOINT to activate /usr/local before exec'ing CMD. +RUN touch ./usr/local/env-activate.sh \ + && \ + touch ./usr/local/env-execute \ + && \ + chmod +x ./usr/local/env-execute \ + && \ + ln -s \ + /usr/local/env-activate.sh \ + ./etc/profile.d/env-activate.sh \ + && \ + printf '%s\n' \ + '#! /bin/bash' \ + ". '/usr/local/env-activate.sh'" \ + 'exec "${@}"' \ + > ./usr/local/env-execute + +FROM scratch +COPY --from=rootfs_builder /rootfs / +ENV LANG=C.UTF-8 +ENTRYPOINT [ "/usr/local/env-execute" ] +CMD [ "bash" ] diff --git a/images/base-glibc-busybox-bash/Dockerfile.busybox b/images/base-glibc-busybox-bash/Dockerfile.busybox new file mode 100644 index 00000000000..fcbd60bd350 --- /dev/null +++ b/images/base-glibc-busybox-bash/Dockerfile.busybox @@ -0,0 +1,23 @@ +# Build busybox ourselves to have more fine-grained control over what we want +# (or not want) to include. +# Use old Debian version to ensure compatible (low glibc requirement) binaries. +FROM debian:9-slim AS busybox_builder +RUN [ ! -f /etc/apt/sources.list ] || sed --in-place= --regexp-extended \ + '/ stretch/ { s,-updates,-backports, ; s,/(deb|security)\.,/archive., }' \ + /etc/apt/sources.list \ + && \ + apt-get update && \ + DEBIAN_FRONTEND=noninteractive \ + apt-get install --yes --no-install-recommends \ + bzip2 curl ca-certificates tar \ + gcc libc6-dev \ + gcc-aarch64-linux-gnu libc6-dev-arm64-cross \ + make patch + +WORKDIR /build +COPY build-busybox ./ +ARG busybox_version +RUN ./build-busybox \ + "${busybox_version}" \ + x86_64 aarch64 + diff --git a/images/base-glibc-busybox-bash/Dockerfile.test b/images/base-glibc-busybox-bash/Dockerfile.test new file mode 100644 index 00000000000..feba4402b8a --- /dev/null +++ b/images/base-glibc-busybox-bash/Dockerfile.test @@ -0,0 +1,27 @@ +ARG base +FROM "${base}" + +# Check if env-activate.sh gets sourced for login shell and in env-execute. +RUN [ "$( sh -lc 'printf world' )" = 'world' ] \ + && \ + [ "$( /usr/local/env-execute sh -c 'printf world' )" = 'world' ] \ + && \ + printf '%s\n' \ + 'printf "hello "' \ + > /usr/local/env-activate.sh \ + && \ + [ "$( sh -lc 'printf world' )" = 'hello world' ] \ + && \ + [ "$( /usr/local/env-execute sh -c 'printf world' )" = 'hello world' ] \ + && \ + printf '' \ + > /usr/local/env-activate.sh + +RUN arch=$(uname -m) \ + && \ + wget --quiet \ + "https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-${arch}.sh" \ + && \ + sh ./Miniforge3-Linux-${arch}.sh -bp /opt/conda \ + && \ + /opt/conda/bin/conda info --all diff --git a/images/base-glibc-busybox-bash/build-busybox b/images/base-glibc-busybox-bash/build-busybox new file mode 100755 index 00000000000..902b33753d8 --- /dev/null +++ b/images/base-glibc-busybox-bash/build-busybox @@ -0,0 +1,140 @@ +#! /bin/sh +set -xeu + +download() { + curl --location --silent \ + "https://busybox.net/downloads/busybox-${version}.tar.bz2" \ + | tar -xjf- --strip-components=1 +} + +patch() { + case "${version}" in 1.36.* ) + # Small fix to let it build with older glibc versions. + curl --location --silent \ + 'https://git.busybox.net/busybox/patch/miscutils/seedrng.c?id=200a9669fbf6f06894e4243cccc9fc11a1a6073a' \ + 'https://git.busybox.net/busybox/patch/miscutils/seedrng.c?id=cb57abb46f06f4ede8d9ccbdaac67377fdf416cf' \ + | command patch --strip=1 + esac + + # Add support for running busybox wget without OpenSSL under QEMU. + # (NB: If we run into other QEMU+BusyBox problems that needs debugging: That + # vfork issue might affect other BusyBox parts, so check for it first.) + command patch --strip=1 <<'EOP' +From e7b57533ffcd5842fa93f5aa96949b3eaed54b67 Mon Sep 17 00:00:00 2001 +From: Marcel Bargull +Date: Sat, 14 Oct 2023 22:58:42 +0200 +Subject: [PATCH] wget: don't assume vfork blocking for openssl exec + +Under QEMU, busybox wget fails to fallback to busybox ssl_client in case +openssl s_client can't be executed because QEMU's vfork does not block. +Ref.: https://man7.org/linux/man-pages/man2/vfork.2.html#VERSIONS + +Signed-off-by: Marcel Bargull +--- + networking/wget.c | 24 +++++++++++++++++++++--- + 1 file changed, 21 insertions(+), 3 deletions(-) + +diff --git a/networking/wget.c b/networking/wget.c +index 9ec0e67b9..4bcc26e86 100644 +--- a/networking/wget.c ++++ b/networking/wget.c +@@ -683,3 +683,9 @@ static int spawn_https_helper_openssl(const char *host, unsigned port) + int pid; +- IF_FEATURE_WGET_HTTPS(volatile int child_failed = 0;) ++ ++# if ENABLE_FEATURE_WGET_HTTPS ++ struct fd_pair status; ++ int exec_errno = 0; ++ ++ xpiped_pair(status); ++# endif + +@@ -701,2 +707,7 @@ static int spawn_https_helper_openssl(const char *host, unsigned port) + ++# if ENABLE_FEATURE_WGET_HTTPS ++ close(status.rd); ++ if (fcntl(status.wr, F_SETFD, FD_CLOEXEC) != 0) ++ bb_simple_perror_msg_and_die("fcntl"); ++# endif + close(sp[0]); +@@ -743,5 +754,8 @@ static int spawn_https_helper_openssl(const char *host, unsigned port) + BB_EXECVP(argv[0], argv); ++ exec_errno = errno; + xmove_fd(3, 2); + # if ENABLE_FEATURE_WGET_HTTPS +- child_failed = 1; ++ if (write(status.wr, &exec_errno, sizeof(exec_errno)) != sizeof(exec_errno)) ++ bb_simple_perror_msg_and_die("write"); ++ close(status.wr); + xfunc_die(); +@@ -758,3 +772,7 @@ static int spawn_https_helper_openssl(const char *host, unsigned port) + # if ENABLE_FEATURE_WGET_HTTPS +- if (child_failed) { ++ close(status.wr); ++ if (read(status.rd, &exec_errno, sizeof(exec_errno)) == -1) ++ bb_simple_perror_msg_and_die("read"); ++ close(status.rd); ++ if (exec_errno) { + close(sp[0]); +EOP +} + +config() { + make defconfig + mv .config .defconfig + # Set CONFIG_SUBST_WCHAR=0 for better Unicode support and remove big components. + printf %s\\n \ + CONFIG_AR=y \ + CONFIG_FEATURE_AR_CREATE=y \ + CONFIG_FEATURE_AR_LONG_FILENAMES=y \ + CONFIG_SUBST_WCHAR=0 \ + CONFIG_RPM=n \ + CONFIG_RPM2CPIO=n \ + CONFIG_FSCK_MINIX=n \ + CONFIG_MKFS_MINIX=n \ + CONFIG_BC=n \ + CONFIG_DC=n \ + CONFIG_HDPARM=n \ + CONFIG_HEXEDIT=n \ + CONFIG_I2CGET=n \ + CONFIG_I2CSET=n \ + CONFIG_I2CDUMP=n \ + CONFIG_I2CDETECT=n \ + CONFIG_I2CTRANSFER=n \ + CONFIG_DNSD=n \ + CONFIG_FTPD=n \ + CONFIG_HTTPD=n \ + CONFIG_TCPSVD=n \ + CONFIG_UDPSVD=n \ + CONFIG_UDHCPD=n \ + CONFIG_SH_IS_ASH=n \ + CONFIG_SH_IS_NONE=y \ + CONFIG_SHELL_ASH=n \ + CONFIG_ASH=n \ + CONFIG_HUSH=n \ + CONFIG_SHELL_HUSH=n \ + | cat - .defconfig \ + > .config + # make still asks which shell to use for sh although CONFIG_SH_IS_NONE=y is set!? + printf \\n | make oldconfig +} + +build() { + make -j "$( nproc )" busybox +} + +main() { + version="${1}" + shift + download + patch + for target ; do + export MAKEFLAGS="ARCH=${target} CROSS_COMPILE=${target}-linux-gnu-" + make clean + config + build + cp -al ./busybox "./busybox.${target}" + done +} + +main "${@}" diff --git a/images/base-glibc-busybox-bash/install-pkgs b/images/base-glibc-busybox-bash/install-pkgs new file mode 100755 index 00000000000..fdb483dd268 --- /dev/null +++ b/images/base-glibc-busybox-bash/install-pkgs @@ -0,0 +1,361 @@ +#! /bin/sh +set -xeu + +arch=$(uname -m) + +prepare_remove_docs() { + # remove lintian and docs (apart from copyright) + rm -rf \ + ./usr/share/lintian \ + ./usr/share/man + find ./usr/share/doc/ -type f ! -name copyright -delete + find ./usr/share/doc/ -type d -empty -delete +} + + +prepare_usrmerge() { + # If we are on Debian >=12, /bin et al. are symlinks to /usr/ counterparts. + # Since we don't do full apt installs, we accomodate for it here. + if [ -L "${root_fs}/bin" ] ; then + for dir in bin lib* sbin ; do + [ -d "./${dir}" ] || continue + [ -L "./${dir}" ] && continue + mkdir -p ./usr + cp -ral "./${dir}" ./usr/ + rm -rf "./${dir}" + ln -s "usr/${dir}" "${dir}" + done + fi +} + + +add_rpath() { + local binary="${1}" + shift + local new_rpath="${1}" + shift + local rpath + rpath="$( + patchelf \ + --print-rpath \ + "${binary}" + )" + patchelf \ + --set-rpath \ + "${rpath:+${rpath}:}${new_rpath}" \ + "${binary}" +} + + +prepare() { + local pkg="${1}" + shift + local destdir="${1}" + shift + + case "${pkg}" in + libc6 ) + # To reduce image size, remove all charset conversion modules apart + # from smaller ones for some common encodings. + # Update gconv-modules accordingly. + # NOTE: When adding/removing any, check required dyn. linked libs! + + local gconv_path="./usr/lib/${arch}-linux-gnu/gconv" + local gconv_modules_regex + if [ -e "${gconv_path}/gconv-modules.d/gconv-modules-extra.conf" ] ; then + gconv_modules_regex="$( + sed -nE 's/^module\s+\S+\s+\S+\s+(\S+)\s+.*/\1/p' \ + < "${gconv_path}/gconv-modules" \ + | sort -u \ + | tr '\n' '|' \ + | sed 's/|$//' + )" + : > "${gconv_path}/gconv-modules.d/gconv-modules-extra.conf" + else + gconv_modules_regex='UTF-\w+|UNICODE|ISO8859-(1|15)|CP1252|ANSI_X3\.110' + local gconv_modules_file_tmp='./.tmp.gconv-modules' + + mv "${gconv_path}"/gconv-modules "${gconv_modules_file_tmp}" + + grep -E \ + '^\s*$|^#|^(alias\s+.*|module\s+[^\s]+\s+[^\s]+)\s+\<('"${gconv_modules_regex}"')(//|\s)' \ + "${gconv_modules_file_tmp}" \ + | sed -nEe '1N;N;/^(#.*)\n.*\1/{D;D};P;D' | cat -s \ + > "${gconv_path}"/gconv-modules + rm "${gconv_modules_file_tmp}" + fi + + find "${gconv_path}" \ + -mindepth 1 -maxdepth 1 \ + -name '*.so' \ + -type f \ + -regextype posix-extended \ + ! -regex '.*/('"${gconv_modules_regex}"').so' \ + -print -delete + + iconvconfig --prefix ./ + + ;; + bash ) + rm -rf ./usr/share/locale + # Add custom rpath for libtinfo (see below) to bash binaries. + local new_rpath="/lib/${arch}-linux-gnu/terminfo:/usr/lib/${arch}-linux-gnu/terminfo" + add_rpath ./bin/bash "${new_rpath}" + add_rpath ./usr/bin/clear_console "${new_rpath}" + ;; + libtinfo* ) + # Move libtinfo libraries to a custom path to ensure it is not + # unintentionally used in downstream images. + find ./usr/lib/${arch}-linux-gnu -type f \ + | { + while read binary ; do + add_rpath "${binary}" "/lib/${arch}-linux-gnu/terminfo" + done + } + + mv ./lib/${arch}-linux-gnu ./temp + mkdir ./lib/${arch}-linux-gnu + mv ./temp ./lib/${arch}-linux-gnu/terminfo + + mv ./usr/lib/${arch}-linux-gnu ./temp + mkdir ./usr/lib/${arch}-linux-gnu + mv ./temp ./usr/lib/${arch}-linux-gnu/terminfo + ;; + base-passwd ) + # The dependencies libdebconfclient0 (and libselinux1 for Debian>=12) + # are needed for update-passwd, but we ignore them => remove the binary. + rm ./usr/sbin/update-passwd + ;; + login ) + rm -rf ./usr/share/locale + # The following binaries provided by BusyBox or pull in more dependencies + # (PAM, libselinux1, and their dependencies) => remove them. + rm -f \ + ./bin/login \ + ./bin/su \ + ./usr/bin/lastlog \ + ./usr/bin/newgrp \ + ./usr/bin/sg + ;; + libc-bin | \ + libgcc1 | \ + base-files | \ + gcc-*-base | \ + libcrypt1 | \ + libgcc-s1 | \ + libdebconfclient0 | \ + libpcre* | \ + libselinux1 | \ + ncurses-base | \ + zlib1g ) + : + ;; + * ) + # Abort if we get an unexpected package. + printf %s\\n "\`prepare\` not defined for ${pkg}" >&2 + return 1 + ;; + esac + prepare_remove_docs + prepare_usrmerge +} + + +postinst_ldconfig_trigger() { + ldconfig --verbose -r ./ +} + + +postinst() { + local pkg="${1}" + shift + local destdir="${1}" + shift + + case "${pkg}" in + libc-bin ) + cp -p --remove-destination \ + ./usr/share/libc-bin/nsswitch.conf \ + ./etc/nsswitch.conf + postinst_ldconfig_trigger + ;; + base-files ) + cp "${destdir}/DEBIAN/postinst" ./base-files-postinst + chroot ./ sh /base-files-postinst configure + rm ./base-files-postinst + ;; + base-passwd ) + mkdir -p "${destdir}/etc" + cp -p --remove-destination \ + "${destdir}/usr/share/base-passwd/group.master" \ + ./etc/group + cp -p --remove-destination \ + "${destdir}/usr/share/base-passwd/passwd.master" \ + ./etc/passwd + DPKG_ROOT="$( pwd )" \ + shadowconfig on + ;; + login ) + for file in /var/log/faillog /etc/subuid /etc/subgid ; do + [ -f "./${file}" ] || continue + touch "${file}" + chown 0:0 "${file}" + chmod 644 "${file}" + done + ;; + bash ) + # Replace BusyBox's sh by Bash + rm -f ./bin/sh + ln -s /bin/bash ./bin/sh + chroot ./ add-shell /bin/sh + chroot ./ add-shell /bin/bash + chroot ./ add-shell /bin/rbash + # Bash 4.* did not have default key bindings for control-arrow-key key + # combinations. Add some for convenience: + cat >> ./etc/inputrc <<'EOF' + +"\e[5C": forward-word +"\e[5D": backward-word +"\e\e[C": forward-word +"\e\e[D": backward-word +"\e[1;5C": forward-word +"\e[1;5D": backward-word +EOF + ;; + libc6 | \ + libdebconfclient0 | \ + libgcc1 | \ + libcrypt1 | \ + libgcc-s1 | \ + libpcre* | \ + libselinux1 | \ + libtinfo* | \ + zlib1g ) + postinst_ldconfig_trigger + ;; + gcc-*-base | \ + ncurses-base ) + : + ;; + * ) + # Abort if we get an unexpected package. + printf %s\\n "\`postinst\` not defined for ${pkg}" >&2 + return 1 + ;; + esac +} + + +install_pkg() { + local pkg="${1}" + shift + + local work_dir="${work_base}/${pkg}" + mkdir "${work_dir}" + cd "${work_dir}" + + # Download package + apt-get download "${pkg}" + local deb_file + deb_file="$( find "$( pwd )" -maxdepth 1 -name '*.deb' )" + + # Prepare package + local destdir="${work_dir}/destdir" + mkdir "${destdir}" + cd "${destdir}" + dpkg-deb --raw-extract "${deb_file}" ./ + prepare "${pkg}" "${destdir}" + dpkg-deb --build ./ "${deb_file}" + cd "${work_dir}" + + # Extract package + dpkg-deb --vextract "${deb_file}" "${root_fs}" + rm "${deb_file}" + printf %s\\n "$( basename "${deb_file}" )" >> "${root_fs}/.deb.lst" + + # Finalize package installation + cd "${root_fs}" + postinst "${pkg}" "${destdir}" + + cd "${work_base}" + rm -rf "${work_dir}" + printf %s\\n "${pkg}" >> "${root_fs}/.pkg.lst" +} + + +get_deps() { + [ -z "${*}" ] && return 0 + + # Instead of using `apt-cache depends --recurse` or `debfoster -d`, recurse + # manually so that we can exclude some packages that are either already + # installed or would pull in files/packages we don't need. + + local ignore_pkgs + ignore_pkgs="$( + printf %s\\n \ + base-files '' debianutils dash \ + libdebconfclient0 libselinux1 \ + libaudit1 libpam-modules libpam-runtime libpam0g \ + | grep -vFx "$( printf %s\\n "${@}" )" + )" + [ -f "${root_fs}/.pkg.lst" ] && \ + ignore_pkgs=$( printf %s\\n ${ignore_pkgs} $( cat -s "${root_fs}/.pkg.lst" ) ) + + local new_pkgs="${*}" + local old_pkgs='' + while ! [ "${new_pkgs}" = "${old_pkgs}" ] ; do + old_pkgs="${new_pkgs}" + new_pkgs="$( + apt-cache depends \ + --no-recommends --no-suggests --no-conflicts \ + --no-breaks --no-replaces --no-enhances \ + ${old_pkgs} \ + | sed -n 's/.*Depends: //p' | cat -s + )" + new_pkgs="$( + printf %s\\n ${old_pkgs} ${new_pkgs} \ + | sort -u \ + | grep -vFx "$( printf %s\\n ${ignore_pkgs} )" + )" + done + printf %s\\n ${new_pkgs} +} + + +install_with_deps() { + get_deps "${@}" | while read -r pkg ; do + install_pkg "${pkg}" + done +} + + +main() { + root_fs="${1}" + shift + work_base="${1}" + shift + + mkdir -p "${work_base}" + cd "${work_base}" + + apt-get update + + # Unconditionally install glibc (package libc6). + # Also install dependencies acc. to `apt-cache depends`: + # - libgcc1 only consists of libgcc_s.so.1 (+ docs, which we remove). + # - gcc-*-base only has empty directories (+ docs, which we remove). + install_with_deps libc6 + + # libc-bin must be in ${@} for Unicode support (C.UTF-8 locale). + install_with_deps "${@}" + + # base-files contains /usr/share/common-licenses/, /etc/profile, etc. + # Install base-files afterwards so we have a working sh for the postinst. + install_with_deps base-files + + cd "${root_fs}" + rm -rf "${work_base}" +} + + +main "${@}" diff --git a/images/base-glibc-debian-bash/Dockerfile b/images/base-glibc-debian-bash/Dockerfile new file mode 100644 index 00000000000..c0adc29222d --- /dev/null +++ b/images/base-glibc-debian-bash/Dockerfile @@ -0,0 +1,131 @@ +ARG debian_version + +FROM "debian:${debian_version}-slim" +RUN [ ! -f /etc/apt/sources.list ] || sed --in-place= --regexp-extended \ + '/ stretch/ { s,-updates,-backports, ; s,/(deb|security)\.,/archive., }' \ + /etc/apt/sources.list \ + && \ + apt-get update -qq \ + && \ + # Add en_US.UTF-8 locale. + printf '%s\n' 'en_US.UTF-8 UTF-8' \ + >> /etc/locale.gen \ + && \ + DEBIAN_FRONTEND=noninteractive \ + apt-get install --yes --no-install-recommends \ + $( \ + . /etc/os-release \ + && \ + [ "${VERSION_ID-10}" -lt 10 ] \ + && \ + printf '%s\n' \ + libegl1-mesa \ + libgl1-mesa-glx \ + || \ + printf '%s\n' \ + libegl1 \ + libgl1 \ + libglx-mesa0 \ + ) \ + libglvnd0 \ + libopengl0 \ + locales \ + openssh-client \ + procps \ + && \ + # Remove "locales" package, but keep the generated locale. + sed -i \ + 's/\s*rm .*locale-archive$/: &/' \ + /var/lib/dpkg/info/locales.prerm \ + && \ + DEBIAN_FRONTEND=noninteractive \ + apt-get remove --yes \ + locales \ + && \ + # On Debian 10 (and 11) libgl1-mesa-glx pulls in libgl1-mesa-dri (which in + # turn has more heavy-weight dependencies). We leave these out of the image + # (by manually removing it from "Depends:" list) like we do with Debian 9. + sed -i \ + '/^Depends:/ s/, libgl1-mesa-dri\>//g' \ + /var/lib/dpkg/status \ + && \ + DEBIAN_FRONTEND=noninteractive \ + apt-get autoremove --yes \ + && \ + # Remove apt package lists. + rm -rf /var/lib/apt/lists/* \ + && \ + # Remove contents of /usr/local as downstream images overwrite those. + find ./usr/local/ \ + -mindepth 1 -depth \ + -delete + +RUN dpkg-query --show --showformat \ + '${db:Status-Status} ${Package}\n' \ + | sed -n 's/:/%3a/g ; s/^installed //p' \ + > /.pkg.lst \ + && \ + dpkg-query --show --showformat \ + '${db:Status-Status} ${Package}_${Version}_${Architecture}\n' \ + | sed -n 's/:/%3a/g ; s/$/.deb/ ; s/^installed //p' \ + > /.deb.lst + +RUN while IFS=: read _ _ uid gid _ home _ ; do \ + [ -n "${home##/var/run/*}" ] || home="${home#/var}" \ + && \ + [ -d "./${home#/}" ] || [ "${home}" = "/nonexistent" ] && continue ; \ + mkdir -p "./${home#/}" \ + && \ + chown "${uid}:${gid}" "./${home#/}" \ + && \ + chmod 775 "./${home#/}" \ + ; done < ./etc/passwd \ + && \ + pwck --read-only --root "$( pwd )" \ + | { ! grep -v -e 'no changes' -e '/nonexistent' ; } \ + && \ + grpck --read-only --root "$( pwd )" \ + && \ + find \ + -xdev -type f \! -path ./var/\* \! -path ./usr/share/\* \! -name \*.pl \ + | xargs -P0 -n100 sh -c \ + 'chroot . ldd -- "${@}" 2> /dev/null | sed -n "/:/h; /not found/{x;p;x;p}"' -- \ + | { ! grep . ; } + +# Bash 4.* did not have default key bindings for control-arrow-key key +# combinations. Add some for convenience: +RUN >> /etc/inputrc \ + printf '%s\n' \ + '' \ + '"\e[5C": forward-word' \ + '"\e[5D": backward-word' \ + '"\e\e[C": forward-word' \ + '"\e\e[D": backward-word' \ + '"\e[1;5C": forward-word' \ + '"\e[1;5D": backward-word' \ + ; + +# env-activate.sh (+ optionally env-execute) should be overwritten downstream. +# - env-activate.sh: +# Is sourced (via symlink in /etc/profile.d/) to activate the /usr/local env. +# - env-execute: +# Is set as the ENTRYPOINT to activate /usr/local before exec'ing CMD. +RUN touch /usr/local/env-activate.sh \ + && \ + touch /usr/local/env-execute \ + && \ + chmod +x /usr/local/env-execute \ + && \ + ln -s \ + /usr/local/env-activate.sh \ + /etc/profile.d/env-activate.sh \ + && \ + printf '%s\n' \ + '#! /bin/bash' \ + ". '/usr/local/env-activate.sh'" \ + 'exec "${@}"' \ + > /usr/local/env-execute + +ENV LANG=C.UTF-8 +ENTRYPOINT [ "/usr/local/env-execute" ] +CMD [ "bash" ] diff --git a/images/base-glibc-debian-bash/Dockerfile.test b/images/base-glibc-debian-bash/Dockerfile.test new file mode 100644 index 00000000000..f2f0bace3a8 --- /dev/null +++ b/images/base-glibc-debian-bash/Dockerfile.test @@ -0,0 +1,39 @@ +ARG base +FROM "${base}" + +# Check if env-activate.sh gets sourced for login shell and in env-execute. +RUN [ "$( sh -lc 'printf world' )" = 'world' ] \ + && \ + [ "$( /usr/local/env-execute sh -c 'printf world' )" = 'world' ] \ + && \ + printf '%s\n' \ + 'printf "hello "' \ + > /usr/local/env-activate.sh \ + && \ + [ "$( sh -lc 'printf world' )" = 'hello world' ] \ + && \ + [ "$( /usr/local/env-execute sh -c 'printf world' )" = 'hello world' ] \ + && \ + printf '' \ + > /usr/local/env-activate.sh + +# Check if all desired locales are there. +RUN locale -a | grep -i 'c\.utf-\?8' \ + && \ + locale -a | grep -i 'en_us\.utf-\?8' + +RUN apt-get update -qq \ + && \ + DEBIAN_FRONTEND=noninteractive \ + apt-get install --yes --no-install-recommends \ + ca-certificates \ + wget \ + && \ + arch=$(uname -m) \ + && \ + wget --quiet \ + "https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-${arch}.sh" \ + && \ + sh ./Miniforge3-Linux-${arch}.sh -bp /opt/conda \ + && \ + /opt/conda/bin/conda info --all diff --git a/images/bioconda-recipes-issue-responder/Dockerfile b/images/bioconda-recipes-issue-responder/Dockerfile new file mode 100644 index 00000000000..9b94896414c --- /dev/null +++ b/images/bioconda-recipes-issue-responder/Dockerfile @@ -0,0 +1,40 @@ +ARG base=quay.io/bioconda/base-glibc-busybox-bash:2.0.0 + +FROM quay.io/bioconda/create-env:2.0.0 as build +RUN /opt/create-env/env-execute \ + create-env \ + --conda=mamba \ + --strip-files=\* \ + --remove-paths=\*.a \ + --remove-paths=\*.pyc \ + /usr/local \ + aiohttp \ + anaconda-client \ + ca-certificates \ + git \ + openssh \ + python=3.8 \ + pyyaml \ + skopeo \ + && \ + # Workaround for https://github.com/conda/conda/issues/10490 + export CONDA_REPODATA_THREADS=1 && \ + # We don't need Perl (used by Git for some functionalities). + # => Remove perl package to reduce image size. + /opt/create-env/env-execute \ + conda remove --yes \ + --prefix=/usr/local \ + --force-remove \ + perl + +FROM "${base}" +COPY --from=build /usr/local /usr/local +COPY ./issue-responder /usr/local/bin/ + +# Used environment variables: +# - JOB_CONTEXT +# - BOT_TOKEN +# - GITTER_TOKEN +# - ANACONDA_TOKEN +# - QUAY_OAUTH_TOKEN +# - QUAY_LOGIN diff --git a/images/bioconda-recipes-issue-responder/Dockerfile.test b/images/bioconda-recipes-issue-responder/Dockerfile.test new file mode 100644 index 00000000000..665dc72ed0a --- /dev/null +++ b/images/bioconda-recipes-issue-responder/Dockerfile.test @@ -0,0 +1,7 @@ +ARG base + + +FROM "${base}" +RUN JOB_CONTEXT='{"event": {"issue": {}}}' \ + /usr/local/env-execute \ + issue-responder diff --git a/images/bioconda-recipes-issue-responder/issue-responder b/images/bioconda-recipes-issue-responder/issue-responder new file mode 100755 index 00000000000..9d915f2f528 --- /dev/null +++ b/images/bioconda-recipes-issue-responder/issue-responder @@ -0,0 +1,615 @@ +#! /usr/bin/env python + +import logging +import os +import re +import sys +from asyncio import gather, run, sleep +from asyncio.subprocess import create_subprocess_exec +from pathlib import Path +from shutil import which +from subprocess import check_call +from typing import Any, Dict, List, Optional, Set, Tuple +from zipfile import ZipFile + +from aiohttp import ClientSession +from yaml import safe_load + +logger = logging.getLogger(__name__) +log = logger.info + + +async def async_exec( + command: str, *arguments: str, env: Optional[Dict[str, str]] = None +) -> None: + process = await create_subprocess_exec(command, *arguments, env=env) + return_code = await process.wait() + if return_code != 0: + raise RuntimeError( + f"Failed to execute {command} {arguments} (return code: {return_code})" + ) + + +# Post a comment on a given issue/PR with text in message +async def send_comment(session: ClientSession, issue_number: int, message: str) -> None: + token = os.environ["BOT_TOKEN"] + url = ( + f"https://api.github.com/repos/bioconda/bioconda-recipes/issues/{issue_number}/comments" + ) + headers = { + "Authorization": f"token {token}", + "User-Agent": "BiocondaCommentResponder", + } + payload = {"body": message} + log("Sending comment: url=%s", url) + log("Sending comment: payload=%s", payload) + async with session.post(url, headers=headers, json=payload) as response: + status_code = response.status + log("the response code was %d", status_code) + if status_code < 200 or status_code > 202: + sys.exit(1) + + +def list_zip_contents(fname: str) -> [str]: + f = ZipFile(fname) + return [e.filename for e in f.infolist() if e.filename.endswith('.tar.gz') or e.filename.endswith('.tar.bz2')] + + +# Download a zip file from url to zipName.zip and return that path +# Timeout is 30 minutes to compensate for any network issues +async def download_file(session: ClientSession, zipName: str, url: str) -> str: + async with session.get(url, timeout=60*30) as response: + if response.status == 200: + ofile = f"{zipName}.zip" + with open(ofile, 'wb') as fd: + while True: + chunk = await response.content.read(1024*1024*1024) + if not chunk: + break + fd.write(chunk) + return ofile + return None + + +# Find artifact zip files, download them and return their URLs and contents +async def fetch_azure_zip_files(session: ClientSession, buildId: str) -> [(str, str)]: + artifacts = [] + + url = f"https://dev.azure.com/bioconda/bioconda-recipes/_apis/build/builds/{buildId}/artifacts?api-version=4.1" + log("contacting azure %s", url) + async with session.get(url) as response: + # Sometimes we get a 301 error, so there are no longer artifacts available + if response.status == 301: + return artifacts + res = await response.text() + + res_object = safe_load(res) + if res_object['count'] == 0: + return artifacts + + for artifact in res_object['value']: + zipName = artifact['name'] # LinuxArtifacts or OSXArtifacts + zipUrl = artifact['resource']['downloadUrl'] + log(f"zip name is {zipName} url {zipUrl}") + fname = await download_file(session, zipName, zipUrl) + if not fname: + continue + pkgsImages = list_zip_contents(fname) + for pkg in pkgsImages: + artifacts.append((zipUrl, pkg)) + + return artifacts + + +def parse_azure_build_id(url: str) -> str: + return re.search("buildId=(\d+)", url).group(1) + + +# Given a PR and commit sha, fetch a list of the artifact zip files URLs and their contents +async def fetch_pr_sha_artifacts(session: ClientSession, pr: int, sha: str) -> List[Tuple[str, str]]: + url = f"https://api.github.com/repos/bioconda/bioconda-recipes/commits/{sha}/check-runs" + + headers = { + "User-Agent": "BiocondaCommentResponder", + "Accept": "application/vnd.github.antiope-preview+json", + } + async with session.get(url, headers=headers) as response: + response.raise_for_status() + res = await response.text() + check_runs = safe_load(res) + log(f"DEBUG url was {url} returned {check_runs}") + + for check_run in check_runs["check_runs"]: + # The names are "bioconda.bioconda-recipes (test_osx test_osx)" or similar + if check_run["name"].startswith("bioconda.bioconda-recipes (test_"): + # The azure build ID is in the details_url as buildId=\d+ + buildID = parse_azure_build_id(check_run["details_url"]) + log(f"DEBUG buildID is {buildID}") + zipFiles = await fetch_azure_zip_files(session, buildID) + log(f"DEBUG zipFiles are {zipFiles}") + return zipFiles # We've already fetched all possible artifacts + + return [] + + +# Given a PR and commit sha, post a comment with any artifacts +async def make_artifact_comment(session: ClientSession, pr: int, sha: str) -> None: + artifacts = await fetch_pr_sha_artifacts(session, pr, sha) + nPackages = len(artifacts) + log(f"DEBUG the artifacts are {artifacts}") + + if nPackages > 0: + comment = "Package(s) built on Azure are ready for inspection:\n\n" + comment += "Arch | Package | Zip File\n-----|---------|---------\n" + install_noarch = "" + install_linux = "" + install_osx = "" + + # Table of packages and repodata.json + for URL, artifact in artifacts: + if not (package_match := re.match(r"^((.+)\/(.+)\/(.+)\/(.+\.tar\.bz2))$", artifact)): + continue + url, archdir, basedir, subdir, packageName = package_match.groups() + urlBase = URL[:-3] # trim off zip from format= + urlBase += "file&subPath=%2F{}".format("%2F".join([basedir, subdir])) + conda_install_url = urlBase + # N.B., the zip file URL is nearly identical to the URL for the individual member files. It's unclear if there's an API for getting the correct URL to the files themselves + #pkgUrl = "%2F".join([urlBase, packageName]) + #repoUrl = "%2F".join([urlBase, "current_repodata.json"]) + #resp = await session.get(repoUrl) + + if subdir == "noarch": + comment += "noarch |" + elif subdir == "linux-64": + comment += "linux-64 |" + else: + comment += "osx-64 |" + comment += f" {packageName} | [{archdir}]({URL})\n" + + # Conda install examples + comment += "***\n\nYou may also use `conda` to install these after downloading and extracting the appropriate zip file. From the LinuxArtifacts or OSXArtifacts directories:\n\n" + comment += "```conda install -c ./packages \n```\n" + + # Table of containers + comment += "***\n\nDocker image(s) built (images are in the LinuxArtifacts zip file above):\n\n" + comment += "Package | Tag | Install with `docker`\n" + comment += "--------|-----|----------------------\n" + + for URL, artifact in artifacts: + if artifact.endswith(".tar.gz"): + image_name = artifact.split("/").pop()[: -len(".tar.gz")] + if ':' in image_name: + package_name, tag = image_name.split(':', 1) + #image_url = URL[:-3] # trim off zip from format= + #image_url += "file&subPath=%2F{}.tar.gz".format("%2F".join(["images", '%3A'.join([package_name, tag])])) + comment += f"[{package_name}] | {tag} | " + comment += f'
show`gzip -dc LinuxArtifacts/images/{image_name}.tar.gz \\| docker load`\n' + comment += "\n\n" + else: + comment = ( + "No artifacts found on the most recent Azure build. " + "Either the build failed, the artifacts have were removed due to age, or the recipe was blacklisted/skipped." + ) + await send_comment(session, pr, comment) + + +# Post a comment on a given PR with its CircleCI artifacts +async def artifact_checker(session: ClientSession, issue_number: int) -> None: + url = f"https://api.github.com/repos/bioconda/bioconda-recipes/pulls/{issue_number}" + headers = { + "User-Agent": "BiocondaCommentResponder", + } + async with session.get(url, headers=headers) as response: + response.raise_for_status() + res = await response.text() + pr_info = safe_load(res) + + await make_artifact_comment(session, issue_number, pr_info["head"]["sha"]) + + +# Return true if a user is a member of bioconda +async def is_bioconda_member(session: ClientSession, user: str) -> bool: + token = os.environ["BOT_TOKEN"] + url = f"https://api.github.com/orgs/bioconda/members/{user}" + headers = { + "Authorization": f"token {token}", + "User-Agent": "BiocondaCommentResponder", + } + rc = 404 + async with session.get(url, headers=headers) as response: + try: + response.raise_for_status() + rc = response.status + except: + # Do nothing, this just prevents things from crashing on 404 + pass + + return rc == 204 + + +# Reposts a quoted message in a given issue/PR if the user isn't a bioconda member +async def comment_reposter(session: ClientSession, user: str, pr: int, message: str) -> None: + if await is_bioconda_member(session, user): + log("Not reposting for %s", user) + return + log("Reposting for %s", user) + await send_comment( + session, + pr, + f"Reposting for @{user} to enable pings (courtesy of the BiocondaBot):\n\n> {message}", + ) + + +# Fetch and return the JSON of a PR +# This can be run to trigger a test merge +async def get_pr_info(session: ClientSession, pr: int) -> Any: + token = os.environ["BOT_TOKEN"] + url = f"https://api.github.com/repos/bioconda/bioconda-recipes/pulls/{pr}" + headers = { + "Authorization": f"token {token}", + "User-Agent": "BiocondaCommentResponder", + } + async with session.get(url, headers=headers) as response: + response.raise_for_status() + res = await response.text() + pr_info = safe_load(res) + return pr_info + + +# Update a branch from upstream master, this should be run in a try/catch +async def update_from_master_runner(session: ClientSession, pr: int) -> None: + async def git(*args: str) -> None: + return await async_exec("git", *args) + + # Setup git, otherwise we can't push + await git("config", "--global", "user.email", "biocondabot@gmail.com") + await git("config", "--global", "user.name", "BiocondaBot") + + pr_info = await get_pr_info(session, pr) + remote_branch = pr_info["head"]["ref"] + remote_repo = pr_info["head"]["repo"]["full_name"] + + max_depth = 2000 + # Clone + await git( + "clone", + f"--depth={max_depth}", + f"--branch={remote_branch}", + f"git@github.com:{remote_repo}.git", + "bioconda-recipes", + ) + + async def git_c(*args: str) -> None: + return await git("-C", "bioconda-recipes", *args) + + # Add/pull upstream + await git_c("remote", "add", "upstream", "https://github.com/bioconda/bioconda-recipes") + await git_c("fetch", f"--depth={max_depth}", "upstream", "master") + + # Merge + await git_c("merge", "upstream/master") + + await git_c("push") + + +# Merge the upstream master branch into a PR branch, leave a message on error +async def update_from_master(session: ClientSession, pr: int) -> None: + try: + await update_from_master_runner(session, pr) + except Exception as e: + await send_comment( + session, + pr, + "I encountered an error updating your PR branch. You can report this to bioconda/core if you'd like.\n-The Bot", + ) + sys.exit(1) + + +# Ensure there's at least one approval by a member +async def approval_review(session: ClientSession, issue_number: int) -> bool: + token = os.environ["BOT_TOKEN"] + url = f"https://api.github.com/repos/bioconda/bioconda-recipes/pulls/{issue_number}/reviews" + headers = { + "Authorization": f"token {token}", + "User-Agent": "BiocondaCommentResponder", + } + async with session.get(url, headers=headers) as response: + response.raise_for_status() + res = await response.text() + reviews = safe_load(res) + + approved_reviews = [review for review in reviews if review["state"] == "APPROVED"] + if not approved_reviews: + return False + + # Ensure the review author is a member + return any( + gather( + *( + is_bioconda_member(session, review["user"]["login"]) + for review in approved_reviews + ) + ) + ) + + +# Check the mergeable state of a PR +async def check_is_mergeable( + session: ClientSession, issue_number: int, second_try: bool = False +) -> bool: + token = os.environ["BOT_TOKEN"] + # Sleep a couple of seconds to allow the background process to finish + if second_try: + await sleep(3) + + # PR info + url = f"https://api.github.com/repos/bioconda/bioconda-recipes/pulls/{issue_number}" + headers = { + "Authorization": f"token {token}", + "User-Agent": "BiocondaCommentResponder", + } + async with session.get(url, headers=headers) as response: + response.raise_for_status() + res = await response.text() + pr_info = safe_load(res) + + # We need mergeable == true and mergeable_state == clean, an approval by a member and + if pr_info.get("mergeable") is None and not second_try: + return await check_is_mergeable(session, issue_number, True) + elif ( + pr_info.get("mergeable") is None + or not pr_info["mergeable"] + or pr_info["mergeable_state"] != "clean" + ): + return False + + return await approval_review(session, issue_number) + + +# Ensure uploaded containers are in repos that have public visibility +async def toggle_visibility(session: ClientSession, container_repo: str) -> None: + url = f"https://quay.io/api/v1/repository/biocontainers/{container_repo}/changevisibility" + QUAY_OAUTH_TOKEN = os.environ["QUAY_OAUTH_TOKEN"] + headers = { + "Authorization": f"Bearer {QUAY_OAUTH_TOKEN}", + "Content-Type": "application/json", + } + body = {"visibility": "public"} + rc = 0 + try: + async with session.post(url, headers=headers, json=body) as response: + rc = response.status + except: + # Do nothing + pass + log("Trying to toggle visibility (%s) returned %d", url, rc) + + +# Download an artifact from CircleCI, rename and upload it +async def download_and_upload(session: ClientSession, x: str) -> None: + basename = x.split("/").pop() + # the tarball needs a regular name without :, the container needs pkg:tag + image_name = basename.replace("%3A", ":").replace("\n", "").replace(".tar.gz", "") + file_name = basename.replace("%3A", "_").replace("\n", "") + + async with session.get(x) as response: + with open(file_name, "wb") as file: + logged = 0 + loaded = 0 + while chunk := await response.content.read(256 * 1024): + file.write(chunk) + loaded += len(chunk) + if loaded - logged >= 50 * 1024 ** 2: + log("Downloaded %.0f MiB: %s", max(1, loaded / 1024 ** 2), x) + logged = loaded + log("Downloaded %.0f MiB: %s", max(1, loaded / 1024 ** 2), x) + + if x.endswith(".gz"): + # Container + log("uploading with skopeo: %s", file_name) + # This can fail, retry with 5 second delays + count = 0 + maxTries = 5 + success = False + QUAY_LOGIN = os.environ["QUAY_LOGIN"] + env = os.environ.copy() + # TODO: Fix skopeo package to find certificates on its own. + skopeo_path = which("skopeo") + if not skopeo_path: + raise RuntimeError("skopeo not found") + env["SSL_CERT_DIR"] = str(Path(skopeo_path).parents[1].joinpath("ssl")) + while count < maxTries: + try: + await async_exec( + "skopeo", + "--command-timeout", + "600s", + "copy", + f"docker-archive:{file_name}", + f"docker://quay.io/biocontainers/{image_name}", + "--dest-creds", + QUAY_LOGIN, + env=env, + ) + success = True + break + except: + count += 1 + if count == maxTries: + raise + await sleep(5) + if success: + await toggle_visibility(session, basename.split("%3A")[0]) + elif x.endswith(".bz2"): + # Package + log("uploading package") + ANACONDA_TOKEN = os.environ["ANACONDA_TOKEN"] + await async_exec("anaconda", "-t", ANACONDA_TOKEN, "upload", file_name, "--force") + + log("cleaning up") + os.remove(file_name) + + +# Upload artifacts to quay.io and anaconda, return the commit sha +# Only call this for mergeable PRs! +async def upload_artifacts(session: ClientSession, pr: int) -> str: + # Get last sha + pr_info = await get_pr_info(session, pr) + sha: str = pr_info["head"]["sha"] + + # Fetch the artifacts + artifacts = await fetch_pr_sha_artifacts(session, pr, sha) + artifacts = [artifact for artifact in artifacts if artifact.endswith((".gz", ".bz2"))] + assert artifacts + + # Download/upload Artifacts + for artifact in artifacts: + await download_and_upload(session, artifact) + + return sha + + +# Assume we have no more than 250 commits in a PR, which is probably reasonable in most cases +async def get_pr_commit_message(session: ClientSession, issue_number: int) -> str: + token = os.environ["BOT_TOKEN"] + url = f"https://api.github.com/repos/bioconda/bioconda-recipes/pulls/{issue_number}/commits" + headers = { + "Authorization": f"token {token}", + "User-Agent": "BiocondaCommentResponder", + } + async with session.get(url, headers=headers) as response: + response.raise_for_status() + res = await response.text() + commits = safe_load(res) + message = "".join(f" * {commit['commit']['message']}\n" for commit in reversed(commits)) + return message + + +# Merge a PR +async def merge_pr(session: ClientSession, pr: int) -> None: + token = os.environ["BOT_TOKEN"] + await send_comment( + session, + pr, + "I will attempt to upload artifacts and merge this PR. This may take some time, please have patience.", + ) + + try: + mergeable = await check_is_mergeable(session, pr) + log("mergeable state of %s is %s", pr, mergeable) + if not mergeable: + await send_comment(session, pr, "Sorry, this PR cannot be merged at this time.") + else: + log("uploading artifacts") + sha = await upload_artifacts(session, pr) + log("artifacts uploaded") + + # Carry over last 250 commit messages + msg = await get_pr_commit_message(session, pr) + + # Hit merge + url = f"https://api.github.com/repos/bioconda/bioconda-recipes/pulls/{pr}/merge" + headers = { + "Authorization": f"token {token}", + "User-Agent": "BiocondaCommentResponder", + } + payload = { + "sha": sha, + "commit_title": f"[ci skip] Merge PR {pr}", + "commit_message": f"Merge PR #{pr}, commits were: \n{msg}", + "merge_method": "squash", + } + log("Putting merge commit") + async with session.put(url, headers=headers, json=payload) as response: + rc = response.status + log("body %s", payload) + log("merge_pr the response code was %s", rc) + except: + await send_comment( + session, + pr, + "I received an error uploading the build artifacts or merging the PR!", + ) + logger.exception("Upload failed", exc_info=True) + + +# Add the "Please review and merge" label to a PR +async def add_pr_label(session: ClientSession, pr: int) -> None: + token = os.environ["BOT_TOKEN"] + url = f"https://api.github.com/repos/bioconda/bioconda-recipes/issues/{pr}/labels" + headers = { + "Authorization": f"token {token}", + "User-Agent": "BiocondaCommentResponder", + } + payload = {"labels": ["please review & merge"]} + async with session.post(url, headers=headers, json=payload) as response: + response.raise_for_status() + + +async def gitter_message(session: ClientSession, msg: str) -> None: + token = os.environ["GITTER_TOKEN"] + room_id = "57f3b80cd73408ce4f2bba26" + url = f"https://api.gitter.im/v1/rooms/{room_id}/chatMessages" + headers = { + "Authorization": f"Bearer {token}", + "Content-Type": "application/json", + "Accept": "application/json", + "User-Agent": "BiocondaCommentResponder", + } + payload = {"text": msg} + log("Sending request to %s", url) + async with session.post(url, headers=headers, json=payload) as response: + response.raise_for_status() + + +async def notify_ready(session: ClientSession, pr: int) -> None: + try: + await gitter_message( + session, + f"PR ready for review: https://github.com/bioconda/bioconda-recipes/pull/{pr}", + ) + except Exception: + logger.exception("Posting to Gitter failed", exc_info=True) + # Do not die if we can't post to gitter! + + +# This requires that a JOB_CONTEXT environment variable, which is made with `toJson(github)` +async def main() -> None: + job_context = safe_load(os.environ["JOB_CONTEXT"]) + log("%s", job_context) + if job_context["event"]["issue"].get("pull_request") is None: + return + issue_number = job_context["event"]["issue"]["number"] + + original_comment = job_context["event"]["comment"]["body"] + log("the comment is: %s", original_comment) + + comment = original_comment.lower() + async with ClientSession() as session: + if comment.startswith(("@bioconda-bot", "@biocondabot")): + if "please update" in comment: + await update_from_master(session, issue_number) + elif " hello" in comment: + await send_comment(session, issue_number, "Yes?") + elif " please fetch artifacts" in comment or " please fetch artefacts" in comment: + await artifact_checker(session, issue_number) + elif " please merge" in comment: + await send_comment(session, issue_number, "Sorry, I'm currently disabled") + #await merge_pr(session, issue_number) + elif " please add label" in comment: + await add_pr_label(session, issue_number) + await notify_ready(session, issue_number) + # else: + # # Methods in development can go below, flanked by checking who is running them + # if job_context["actor"] != "dpryan79": + # console.log("skipping") + # sys.exit(0) + elif "@bioconda/" in comment: + await comment_reposter( + session, job_context["actor"], issue_number, original_comment + ) + + +if __name__ == "__main__": + logging.basicConfig(level=logging.INFO) + run(main()) diff --git a/images/bioconda-utils-build-env-cos7/Dockerfile b/images/bioconda-utils-build-env-cos7/Dockerfile new file mode 100644 index 00000000000..f90b0a696c4 --- /dev/null +++ b/images/bioconda-utils-build-env-cos7/Dockerfile @@ -0,0 +1,65 @@ +ARG base_image + +FROM ${base_image} as base + +# Copy over C.UTF-8 locale from our base image to make it consistently available during build. +COPY --from=quay.io/bioconda/base-glibc-busybox-bash /usr/lib/locale/C.utf8 /usr/lib/locale/C.utf8 + +# Provide system deps unconditionally until we are able to offer per-recipe installs. +# (Addresses, e.g., "ImportError: libGL.so.1" in tests directly invoked by conda-build.) +# Also install packages that have been installed historically (openssh-client). +RUN yum install -y mesa-libGL-devel \ + && \ + yum install -y openssh-clients \ + && \ + yum clean all && \ + rm -rf /var/cache/yum/* + +# This changes root's .condarc which ENTRYPOINT copies to /home/conda/.condarc later. +RUN . /opt/conda/etc/profile.d/conda.sh && \ + conda config \ + --add channels defaults \ + --add channels bioconda \ + --add channels conda-forge \ + && \ + { conda config --remove repodata_fns current_repodata.json 2> /dev/null || true ; } && \ + conda config --prepend repodata_fns repodata.json && \ + conda config --set channel_priority strict && \ + conda config --set auto_update_conda False + +FROM base as build +WORKDIR /tmp/repo +ARG BIOCONDA_UTILS_FOLDER=./bioconda-utils/ +COPY ${BIOCONDA_UTILS_FOLDER} ./ +RUN . /opt/conda/etc/profile.d/conda.sh && conda list +RUN . /opt/conda/etc/profile.d/conda.sh && conda activate base && \ + pip wheel . && \ + mkdir - /opt/bioconda-utils && \ + cp ./bioconda_utils-*.whl \ + ./bioconda_utils/bioconda_utils-requirements.txt \ + /opt/bioconda-utils/ \ + && \ + chgrp -R lucky /opt/bioconda-utils && \ + chmod -R g=u /opt/bioconda-utils + +FROM base +COPY --from=build /opt/bioconda-utils /opt/bioconda-utils +RUN . /opt/conda/etc/profile.d/conda.sh && conda activate base && \ + # Make sure we get the (working) conda we want before installing the rest. + sed -nE \ + '/^conda([>/d' recipe/meta.yaml \ +# && \ +# conda-build -m .ci_support/linux_64_.yaml recipe/ +ARG packages= +ARG python=3.8 +ARG prefix=/usr/local +RUN . /opt/create-env/env-activate.sh && \ + export CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY=0 \ + && \ + create-env \ + --conda=mamba \ + --strip-files=\* \ + --remove-paths=\*.a \ + --remove-paths=\*.c \ + --remove-paths=\*.pyc \ + --remove-paths=\*.pyi \ + --remove-paths=\*.pyx \ + --remove-paths=\*.pyx \ + --remove-paths=include/\* \ + --remove-paths=share/doc/\* \ + --remove-paths=share/man/\* \ + --remove-paths='share/terminfo/[!x]/*' \ + --remove-paths=share/locale/\* \ + --remove-paths=lib/python*/ensurepip/\* \ + "${prefix}" \ + --channel=local \ + --channel=conda-forge \ + --override-channels \ + pip wheel setuptools \ + python="${python}" \ + aiohttp \ + ca-certificates \ + idna\<3 \ + pyyaml \ + ${packages} \ + && \ + # Remove tk since no tkinter & co. are needed. + conda remove \ + --yes \ + --force-remove \ + --prefix="${prefix}" \ + tk \ + && \ + # Get rid of Perl pulled in by Git. + # (Bot only uses non-Perl Git functionality => remove baggage.) + if conda list --prefix="${prefix}" | grep -q '^perl\s' ; then \ + conda remove \ + --yes \ + --force-remove \ + --prefix="${prefix}" \ + perl \ + ; fi +# Install bioconda_bot. +WORKDIR /tmp/bot +COPY . ./ +RUN . "${prefix}/env-activate.sh" && \ + pip wheel --no-deps . \ + && \ + pip install --no-deps --find-links . bioconda_bot + +FROM "${base}" +COPY --from=build /usr/local /usr/local diff --git a/images/bot/Dockerfile.test b/images/bot/Dockerfile.test new file mode 100644 index 00000000000..5a6fdcbbd5b --- /dev/null +++ b/images/bot/Dockerfile.test @@ -0,0 +1,9 @@ +ARG base +FROM "${base}" +RUN . /usr/local/env-activate.sh && \ + ls -lA /usr/local/conda-meta/*.json && \ + bioconda-bot --help && \ + bioconda-bot comment --help && \ + bioconda-bot merge --help && \ + bioconda-bot update --help && \ + bioconda-bot change --help diff --git a/images/bot/pyproject.toml b/images/bot/pyproject.toml new file mode 100644 index 00000000000..9787c3bdf00 --- /dev/null +++ b/images/bot/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +requires = ["setuptools", "wheel"] +build-backend = "setuptools.build_meta" diff --git a/images/bot/setup.cfg b/images/bot/setup.cfg new file mode 100644 index 00000000000..749dfc7ed74 --- /dev/null +++ b/images/bot/setup.cfg @@ -0,0 +1,20 @@ +[metadata] +name = bioconda-bot +version = 0.0.1 + +[options] +python_requires = >=3.8 +install_requires = + aiohttp + PyYaml + +packages = find: +package_dir = + = src + +[options.packages.find] +where = src + +[options.entry_points] +console_scripts = + bioconda-bot = bioconda_bot.cli:main diff --git a/images/bot/src/bioconda_bot/__init__.py b/images/bot/src/bioconda_bot/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/images/bot/src/bioconda_bot/automerge.py b/images/bot/src/bioconda_bot/automerge.py new file mode 100644 index 00000000000..a09ee4148d0 --- /dev/null +++ b/images/bot/src/bioconda_bot/automerge.py @@ -0,0 +1,138 @@ +import logging +import os + +from typing import Any, Dict, List, Optional, Set, Tuple + +from aiohttp import ClientSession +from yaml import safe_load + +from .common import ( + get_job_context, + get_prs_for_sha, + get_sha_for_status_check, + get_sha_for_workflow_run, +) +from .merge import MergeState, request_merge + +logger = logging.getLogger(__name__) +log = logger.info + + +async def get_pr_labels(session: ClientSession, pr: int) -> Set[str]: + token = os.environ["BOT_TOKEN"] + url = f"https://api.github.com/repos/bioconda/bioconda-recipes/issues/{pr}/labels" + headers = { + "Authorization": f"token {token}", + "User-Agent": "BiocondaCommentResponder", + } + async with session.get(url, headers=headers) as response: + response.raise_for_status() + res = await response.text() + labels = safe_load(res) + return {label["name"] for label in labels} + + +async def is_automerge_labeled(session: ClientSession, pr: int) -> bool: + labels = await get_pr_labels(session, pr) + return "automerge" in labels + + +async def merge_if_labeled(session: ClientSession, pr: int) -> MergeState: + if not await is_automerge_labeled(session, pr): + return MergeState.UNKNOWN + return await request_merge(session, pr) + + +async def get_check_runs(session: ClientSession, sha: str) -> Any: + url = f"https://api.github.com/repos/bioconda/bioconda-recipes/commits/{sha}/check-runs" + + headers = { + "User-Agent": "BiocondaCommentResponder", + "Accept": "application/vnd.github.antiope-preview+json", + } + async with session.get(url, headers=headers) as response: + response.raise_for_status() + res = await response.text() + check_runs = [ + check_run + for check_run in safe_load(res)["check_runs"] or [] + if check_run["name"] != "bioconda-bot automerge" + ] + log("Got %d check_runs for SHA %s", len(check_runs or []), sha) + return check_runs + + +async def all_checks_completed(session: ClientSession, sha: str) -> bool: + check_runs = await get_check_runs(session, sha) + + is_all_completed = all(check_run["status"] == "completed" for check_run in check_runs) + if not is_all_completed: + log("Some check_runs are not completed yet.") + for i, check_run in enumerate(check_runs, 1): + log("check_run %d / %d: %s", i, len(check_runs), check_run) + return is_all_completed + + +async def all_checks_passed(session: ClientSession, sha: str) -> bool: + check_runs = await get_check_runs(session, sha) + + # TODO: "neutral" might be a valid conclusion to consider in the future. + valid_conclusions = {"success", "skipped"} + if any(check_run["conclusion"] not in valid_conclusions for check_run in check_runs): + log(f"Some check_runs are not marked as {'/'.join(valid_conclusions)} yet.") + for i, check_run in enumerate(check_runs, 1): + log("check_run %d / %d: %s", i, len(check_runs), check_run) + return False + return True + + +async def merge_automerge_passed(sha: str) -> None: + async with ClientSession() as session: + if not await all_checks_passed(session, sha): + return + prs = await get_prs_for_sha(session, sha) + if not prs: + log("No PRs found for SHA %s", sha) + for pr in prs: + merge_state = await merge_if_labeled(session, pr) + log("PR %d has merge state %s", pr, merge_state) + if merge_state is MergeState.MERGED: + break + + +async def get_sha_for_review(job_context: Dict[str, Any]) -> Optional[str]: + if job_context["event_name"] != "pull_request_review": + return None + log("Got %s event", "pull_request_review") + event = job_context["event"] + if event["review"]["state"] != "approved": + return None + sha: Optional[str] = event["pull_request"]["head"]["sha"] + log("Use %s event SHA %s", "pull_request_review", sha) + return sha + + +async def get_sha_for_labeled_pr(job_context: Dict[str, Any]) -> Optional[str]: + if job_context["event_name"] != "pull_request": + return None + log("Got %s event", "pull_request") + event = job_context["event"] + if event["action"] != "labeled" or event["label"]["name"] != "automerge": + return None + sha: Optional[str] = event["pull_request"]["head"]["sha"] + log("Use %s event SHA %s", "pull_request", sha) + return sha + + +# This requires that a JOB_CONTEXT environment variable, which is made with `toJson(github)` +async def main() -> None: + job_context = await get_job_context() + + sha = ( + await get_sha_for_status_check(job_context) + or await get_sha_for_workflow_run(job_context) + or await get_sha_for_review(job_context) + or await get_sha_for_labeled_pr(job_context) + ) + if sha: + await merge_automerge_passed(sha) diff --git a/images/bot/src/bioconda_bot/changeVisibility.py b/images/bot/src/bioconda_bot/changeVisibility.py new file mode 100644 index 00000000000..ba036f83479 --- /dev/null +++ b/images/bot/src/bioconda_bot/changeVisibility.py @@ -0,0 +1,63 @@ +import logging +import os +import re +import sys +from asyncio import gather, sleep +from asyncio.subprocess import create_subprocess_exec +from enum import Enum, auto +from pathlib import Path +from shutil import which +from typing import Any, Dict, List, Optional, Set, Tuple +from zipfile import ZipFile, ZipInfo + +from aiohttp import ClientSession +from yaml import safe_load + +from .common import ( + async_exec, + fetch_pr_sha_artifacts, + get_job_context, + get_pr_comment, + get_pr_info, + is_bioconda_member, + send_comment, +) + +logger = logging.getLogger(__name__) +log = logger.info + + +# Ensure uploaded containers are in repos that have public visibility +# TODO: This should ping @bioconda/core if it fails +async def toggle_visibility(session: ClientSession, container_repo: str) -> None: + url = f"https://quay.io/api/v1/repository/biocontainers/{container_repo}/changevisibility" + QUAY_OAUTH_TOKEN = os.environ["QUAY_OAUTH_TOKEN"] + headers = { + "Authorization": f"Bearer {QUAY_OAUTH_TOKEN}", + "Content-Type": "application/json", + } + body = {"visibility": "public"} + rc = 0 + try: + async with session.post(url, headers=headers, json=body) as response: + rc = response.status + except: + # Do nothing + pass + log("Trying to toggle visibility (%s) returned %d", url, rc) + + +# This requires that a JOB_CONTEXT environment variable, which is made with `toJson(github)` +async def main() -> None: + job_context = await get_job_context() + issue_number, original_comment = await get_pr_comment(job_context) + if issue_number is None or original_comment is None: + return + + comment = original_comment.lower() + if comment.startswith(("@bioconda-bot", "@biocondabot")): + if " please toggle visibility" in comment: + pkg = comment.split("please change visibility")[1].strip().split()[0] + async with ClientSession() as session: + await toggle_visibility(session, pkg) + await send_comment(session, issue_number, "Visibility changed.") diff --git a/images/bot/src/bioconda_bot/cli.py b/images/bot/src/bioconda_bot/cli.py new file mode 100644 index 00000000000..a88601d5370 --- /dev/null +++ b/images/bot/src/bioconda_bot/cli.py @@ -0,0 +1,81 @@ +from logging import INFO, basicConfig + +from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser +from asyncio import run +from typing import List, Optional + + +def build_parser_comment(parser: ArgumentParser) -> None: + def run_command() -> None: + from .comment import main as main_ + + run(main_()) + + parser.set_defaults(run_command=run_command) + + +def build_parser_merge(parser: ArgumentParser) -> None: + def run_command() -> None: + from .merge import main as main_ + + run(main_()) + + parser.set_defaults(run_command=run_command) + + +def build_parser_update(parser: ArgumentParser) -> None: + def run_command() -> None: + from .update import main as main_ + + run(main_()) + + parser.set_defaults(run_command=run_command) + + +def build_parser_automerge(parser: ArgumentParser) -> None: + def run_command() -> None: + from .automerge import main as main_ + + run(main_()) + + parser.set_defaults(run_command=run_command) + + +def build_parser_changeVisibility(parser: ArgumentParser) -> None: + def run_command() -> None: + from .changeVisibility import main as main_ + + run(main_()) + + parser.set_defaults(run_command=run_command) + + +def get_argument_parser() -> ArgumentParser: + parser = ArgumentParser( + prog="bioconda-bot", + formatter_class=ArgumentDefaultsHelpFormatter, + ) + sub_parsers = parser.add_subparsers( + dest="command", + required=True, + ) + for command_name, build_parser in ( + ("comment", build_parser_comment), + ("merge", build_parser_merge), + ("update", build_parser_update), + ("automerge", build_parser_automerge), + ("change", build_parser_changeVisibility), + ): + sub_parser = sub_parsers.add_parser( + command_name, + formatter_class=ArgumentDefaultsHelpFormatter, + ) + build_parser(sub_parser) + return parser + + +def main(args: Optional[List[str]] = None) -> None: + basicConfig(level=INFO) + parser = get_argument_parser() + parsed_args = parser.parse_args(args) + parsed_args.run_command() diff --git a/images/bot/src/bioconda_bot/comment.py b/images/bot/src/bioconda_bot/comment.py new file mode 100644 index 00000000000..eb9e13fb7b0 --- /dev/null +++ b/images/bot/src/bioconda_bot/comment.py @@ -0,0 +1,197 @@ +import logging +import os +import re + +from aiohttp import ClientSession +from yaml import safe_load + +from .common import ( + async_exec, + fetch_pr_sha_artifacts, + get_job_context, + get_pr_comment, + get_pr_info, + get_prs_for_sha, + get_sha_for_status_check, + is_bioconda_member, + send_comment, +) + +logger = logging.getLogger(__name__) +log = logger.info + + +# Given a PR and commit sha, post a comment with any artifacts +async def make_artifact_comment(session: ClientSession, pr: int, sha: str) -> None: + artifacts = await fetch_pr_sha_artifacts(session, pr, sha) + nPackages = len(artifacts) + + if nPackages > 0: + comment = "Package(s) built on Azure are ready for inspection:\n\n" + comment += "Arch | Package | Zip File\n-----|---------|---------\n" + install_noarch = "" + install_linux = "" + install_osx = "" + + # Table of packages and repodata.json + for URL, artifact in artifacts: + if not (package_match := re.match(r"^((.+)\/(.+)\/(.+)\/(.+\.tar\.bz2))$", artifact)): + continue + url, archdir, basedir, subdir, packageName = package_match.groups() + urlBase = URL[:-3] # trim off zip from format= + urlBase += "file&subPath=%2F{}".format("%2F".join([basedir, subdir])) + conda_install_url = urlBase + # N.B., the zip file URL is nearly identical to the URL for the individual member files. It's unclear if there's an API for getting the correct URL to the files themselves + #pkgUrl = "%2F".join([urlBase, packageName]) + #repoUrl = "%2F".join([urlBase, "current_repodata.json"]) + #resp = await session.get(repoUrl) + + if subdir == "noarch": + comment += "noarch |" + elif subdir == "linux-64": + comment += "linux-64 |" + elif subdir == "linux-aarch64": + comment += "linux-aarch64 |" + else: + comment += "osx-64 |" + comment += f" {packageName} | [{archdir}]({URL})\n" + + # Conda install examples + comment += "***\n\nYou may also use `conda` to install these after downloading and extracting the appropriate zip file. From the LinuxArtifacts or OSXArtifacts directories:\n\n" + comment += "```\nconda install -c ./packages \n```\n" + + # Table of containers + comment += "***\n\nDocker image(s) built (images are in the LinuxArtifacts zip file above):\n\n" + comment += "Package | Tag | Install with `docker`\n" + comment += "--------|-----|----------------------\n" + + for URL, artifact in artifacts: + if artifact.endswith(".tar.gz"): + image_name = artifact.split("/").pop()[: -len(".tar.gz")] + if ':' in image_name: + package_name, tag = image_name.split(':', 1) + #image_url = URL[:-3] # trim off zip from format= + #image_url += "file&subPath=%2F{}.tar.gz".format("%2F".join(["images", '%3A'.join([package_name, tag])])) + comment += f"{package_name} | {tag} | " + comment += f'
show`gzip -dc LinuxArtifacts/images/{image_name}.tar.gz \\| docker load`\n' + comment += "\n\n" + else: + comment = ( + "No artifacts found on the most recent Azure build. " + "Either the build failed, the artifacts have were removed due to age, or the recipe was blacklisted/skipped." + ) + await send_comment(session, pr, comment) + + +# Post a comment on a given PR with its CircleCI artifacts +async def artifact_checker(session: ClientSession, issue_number: int) -> None: + url = f"https://api.github.com/repos/bioconda/bioconda-recipes/pulls/{issue_number}" + headers = { + "User-Agent": "BiocondaCommentResponder", + } + async with session.get(url, headers=headers) as response: + response.raise_for_status() + res = await response.text() + pr_info = safe_load(res) + + await make_artifact_comment(session, issue_number, pr_info["head"]["sha"]) + + +# Reposts a quoted message in a given issue/PR if the user isn't a bioconda member +async def comment_reposter(session: ClientSession, user: str, pr: int, message: str) -> None: + if await is_bioconda_member(session, user): + log("Not reposting for %s", user) + return + log("Reposting for %s", user) + await send_comment( + session, + pr, + f"Reposting for @{user} to enable pings (courtesy of the BiocondaBot):\n\n> {message}", + ) + + +# Add the "Please review and merge" label to a PR +async def add_pr_label(session: ClientSession, pr: int) -> None: + token = os.environ["BOT_TOKEN"] + url = f"https://api.github.com/repos/bioconda/bioconda-recipes/issues/{pr}/labels" + headers = { + "Authorization": f"token {token}", + "User-Agent": "BiocondaCommentResponder", + } + payload = {"labels": ["please review & merge"]} + async with session.post(url, headers=headers, json=payload) as response: + response.raise_for_status() + + +async def gitter_message(session: ClientSession, msg: str) -> None: + token = os.environ["GITTER_TOKEN"] + room_id = "57f3b80cd73408ce4f2bba26" + url = f"https://api.gitter.im/v1/rooms/{room_id}/chatMessages" + headers = { + "Authorization": f"Bearer {token}", + "Content-Type": "application/json", + "Accept": "application/json", + "User-Agent": "BiocondaCommentResponder", + } + payload = {"text": msg} + log("Sending request to %s", url) + async with session.post(url, headers=headers, json=payload) as response: + response.raise_for_status() + + +async def notify_ready(session: ClientSession, pr: int) -> None: + try: + await gitter_message( + session, + f"PR ready for review: https://github.com/bioconda/bioconda-recipes/pull/{pr}", + ) + except Exception: + logger.exception("Posting to Gitter failed", exc_info=True) + # Do not die if we can't post to gitter! + + +# This requires that a JOB_CONTEXT environment variable, which is made with `toJson(github)` +async def main() -> None: + job_context = await get_job_context() + + sha = await get_sha_for_status_check(job_context) + if sha: + # This is a successful status or check_suite event => post artifact lists. + async with ClientSession() as session: + for pr in await get_prs_for_sha(session, sha): + await artifact_checker(session, pr) + return + + issue_number, original_comment = await get_pr_comment(job_context) + if issue_number is None or original_comment is None: + return + + comment = original_comment.lower() + async with ClientSession() as session: + if comment.startswith(("@bioconda-bot", "@biocondabot")): + if "please update" in comment: + log("This should have been directly invoked via bioconda-bot-update") + from .update import update_from_master + + await update_from_master(session, issue_number) + elif " hello" in comment: + await send_comment(session, issue_number, "Yes?") + elif " please fetch artifacts" in comment or " please fetch artefacts" in comment: + await artifact_checker(session, issue_number) + #elif " please merge" in comment: + # await send_comment(session, issue_number, "Sorry, I'm currently disabled") + # #log("This should have been directly invoked via bioconda-bot-merge") + # #from .merge import request_merge + # #await request_merge(session, issue_number) + elif " please add label" in comment: + await add_pr_label(session, issue_number) + await notify_ready(session, issue_number) + # else: + # # Methods in development can go below, flanked by checking who is running them + # if job_context["actor"] != "dpryan79": + # console.log("skipping") + # sys.exit(0) + elif "@bioconda/" in comment: + await comment_reposter( + session, job_context["actor"], issue_number, original_comment + ) diff --git a/images/bot/src/bioconda_bot/common.py b/images/bot/src/bioconda_bot/common.py new file mode 100644 index 00000000000..565674fdd00 --- /dev/null +++ b/images/bot/src/bioconda_bot/common.py @@ -0,0 +1,249 @@ +import logging +import os +import re +import sys +from asyncio import gather, sleep +from asyncio.subprocess import create_subprocess_exec +from pathlib import Path +from shutil import which +from typing import Any, Dict, List, Optional, Set, Tuple +from zipfile import ZipFile + +from aiohttp import ClientSession +from yaml import safe_load + +logger = logging.getLogger(__name__) +log = logger.info + + +async def async_exec( + command: str, *arguments: str, env: Optional[Dict[str, str]] = None +) -> None: + process = await create_subprocess_exec(command, *arguments, env=env) + return_code = await process.wait() + if return_code != 0: + raise RuntimeError( + f"Failed to execute {command} {arguments} (return code: {return_code})" + ) + + +# Post a comment on a given issue/PR with text in message +async def send_comment(session: ClientSession, issue_number: int, message: str) -> None: + token = os.environ["BOT_TOKEN"] + url = ( + f"https://api.github.com/repos/bioconda/bioconda-recipes/issues/{issue_number}/comments" + ) + headers = { + "Authorization": f"token {token}", + "User-Agent": "BiocondaCommentResponder", + } + payload = {"body": message} + log("Sending comment: url=%s", url) + log("Sending comment: payload=%s", payload) + async with session.post(url, headers=headers, json=payload) as response: + status_code = response.status + log("the response code was %d", status_code) + if status_code < 200 or status_code > 202: + sys.exit(1) + + +# Return true if a user is a member of bioconda +async def is_bioconda_member(session: ClientSession, user: str) -> bool: + token = os.environ["BOT_TOKEN"] + url = f"https://api.github.com/orgs/bioconda/members/{user}" + headers = { + "Authorization": f"token {token}", + "User-Agent": "BiocondaCommentResponder", + } + rc = 404 + async with session.get(url, headers=headers) as response: + try: + response.raise_for_status() + rc = response.status + except: + # Do nothing, this just prevents things from crashing on 404 + pass + + return rc == 204 + + +# Fetch and return the JSON of a PR +# This can be run to trigger a test merge +async def get_pr_info(session: ClientSession, pr: int) -> Any: + token = os.environ["BOT_TOKEN"] + url = f"https://api.github.com/repos/bioconda/bioconda-recipes/pulls/{pr}" + headers = { + "Authorization": f"token {token}", + "User-Agent": "BiocondaCommentResponder", + } + async with session.get(url, headers=headers) as response: + response.raise_for_status() + res = await response.text() + pr_info = safe_load(res) + return pr_info + + +def list_zip_contents(fname: str) -> [str]: + f = ZipFile(fname) + return [e.filename for e in f.infolist() if e.filename.endswith('.tar.gz') or e.filename.endswith('.tar.bz2')] + + +# Download a zip file from url to zipName.zip and return that path +# Timeout is 30 minutes to compensate for any network issues +async def download_file(session: ClientSession, zipName: str, url: str) -> str: + async with session.get(url, timeout=60*30) as response: + if response.status == 200: + ofile = f"{zipName}.zip" + with open(ofile, 'wb') as fd: + while True: + chunk = await response.content.read(1024*1024*1024) + if not chunk: + break + fd.write(chunk) + return ofile + return None + + +# Find artifact zip files, download them and return their URLs and contents +async def fetch_azure_zip_files(session: ClientSession, buildId: str) -> [(str, str)]: + artifacts = [] + + url = f"https://dev.azure.com/bioconda/bioconda-recipes/_apis/build/builds/{buildId}/artifacts?api-version=4.1" + log("contacting azure %s", url) + async with session.get(url) as response: + # Sometimes we get a 301 error, so there are no longer artifacts available + if response.status == 301: + return artifacts + res = await response.text() + + res_object = safe_load(res) + if res_object['count'] == 0: + return artifacts + + for artifact in res_object['value']: + zipName = artifact['name'] # LinuxArtifacts or OSXArtifacts + zipUrl = artifact['resource']['downloadUrl'] + log(f"zip name is {zipName} url {zipUrl}") + fname = await download_file(session, zipName, zipUrl) + if not fname: + continue + pkgsImages = list_zip_contents(fname) + for pkg in pkgsImages: + artifacts.append((zipUrl, pkg)) + + return artifacts + + +def parse_azure_build_id(url: str) -> str: + return re.search("buildId=(\d+)", url).group(1) + + +# Given a PR and commit sha, fetch a list of the artifact zip files URLs and their contents +async def fetch_pr_sha_artifacts(session: ClientSession, pr: int, sha: str) -> List[Tuple[str, str]]: + url = f"https://api.github.com/repos/bioconda/bioconda-recipes/commits/{sha}/check-runs" + + headers = { + "User-Agent": "BiocondaCommentResponder", + "Accept": "application/vnd.github.antiope-preview+json", + } + async with session.get(url, headers=headers) as response: + response.raise_for_status() + res = await response.text() + check_runs = safe_load(res) + + for check_run in check_runs["check_runs"]: + # The names are "bioconda.bioconda-recipes (test_osx test_osx)" or similar + if check_run["name"].startswith("bioconda.bioconda-recipes (test_"): + # The azure build ID is in the details_url as buildId=\d+ + buildID = parse_azure_build_id(check_run["details_url"]) + zipFiles = await fetch_azure_zip_files(session, buildID) + return zipFiles # We've already fetched all possible artifacts + + return [] + + +async def get_sha_for_status(job_context: Dict[str, Any]) -> Optional[str]: + if job_context["event_name"] != "status": + return None + log("Got %s event", "status") + event = job_context["event"] + if event["state"] != "success": + return None + branches = event.get("branches") + if not branches: + return None + sha: Optional[str] = branches[0]["commit"]["sha"] + log("Use %s event SHA %s", "status", sha) + return sha + + +async def get_sha_for_check_suite_or_workflow( + job_context: Dict[str, Any], event_name: str +) -> Optional[str]: + if job_context["event_name"] != event_name: + return None + log("Got %s event", event_name) + event_source = job_context["event"][event_name] + if event_source["conclusion"] != "success": + return None + sha: Optional[str] = event_source.get("head_sha") + if not sha: + pull_requests = event_source.get("pull_requests") + if pull_requests: + sha = pull_requests[0]["head"]["sha"] + if not sha: + return None + log("Use %s event SHA %s", event_name, sha) + return sha + + +async def get_sha_for_check_suite(job_context: Dict[str, Any]) -> Optional[str]: + return await get_sha_for_check_suite_or_workflow(job_context, "check_suite") + + +async def get_sha_for_workflow_run(job_context: Dict[str, Any]) -> Optional[str]: + return await get_sha_for_check_suite_or_workflow(job_context, "workflow_run") + + +async def get_prs_for_sha(session: ClientSession, sha: str) -> List[int]: + headers = { + "User-Agent": "BiocondaCommentResponder", + "Accept": "application/vnd.github.v3+json", + } + pr_numbers: List[int] = [] + per_page = 100 + for page in range(1, 20): + url = ( + "https://api.github.com/repos/bioconda/bioconda-recipes/pulls" + f"?per_page={per_page}" + f"&page={page}" + ) + async with session.get(url, headers=headers) as response: + response.raise_for_status() + res = await response.text() + prs = safe_load(res) + pr_numbers.extend(pr["number"] for pr in prs if pr["head"]["sha"] == sha) + if len(prs) < per_page: + break + return pr_numbers + + +async def get_sha_for_status_check(job_context: Dict[str, Any]) -> Optional[str]: + return await get_sha_for_status(job_context) or await get_sha_for_check_suite(job_context) + + +async def get_job_context() -> Any: + job_context = safe_load(os.environ["JOB_CONTEXT"]) + log("%s", job_context) + return job_context + + +async def get_pr_comment(job_context: Dict[str, Any]) -> Tuple[Optional[int], Optional[str]]: + event = job_context["event"] + if event["issue"].get("pull_request") is None: + return None, None + issue_number = event["issue"]["number"] + + original_comment = event["comment"]["body"] + log("the comment is: %s", original_comment) + return issue_number, original_comment diff --git a/images/bot/src/bioconda_bot/merge.py b/images/bot/src/bioconda_bot/merge.py new file mode 100644 index 00000000000..455c7f31d39 --- /dev/null +++ b/images/bot/src/bioconda_bot/merge.py @@ -0,0 +1,371 @@ +import logging +import os +import re +import sys +from asyncio import gather, sleep +from asyncio.subprocess import create_subprocess_exec +from enum import Enum, auto +from pathlib import Path +from shutil import which +from typing import Any, Dict, List, Optional, Set, Tuple +from zipfile import ZipFile, ZipInfo + +from aiohttp import ClientSession +from yaml import safe_load + +from .common import ( + async_exec, + fetch_pr_sha_artifacts, + get_job_context, + get_pr_comment, + get_pr_info, + is_bioconda_member, + send_comment, +) + +logger = logging.getLogger(__name__) +log = logger.info + + +class MergeState(Enum): + UNKNOWN = auto() + MERGEABLE = auto() + NOT_MERGEABLE = auto() + NEEDS_REVIEW = auto() + MERGED = auto() + + +# Ensure there's at least one approval by a member +async def approval_review(session: ClientSession, issue_number: int) -> bool: + token = os.environ["BOT_TOKEN"] + url = f"https://api.github.com/repos/bioconda/bioconda-recipes/pulls/{issue_number}/reviews" + headers = { + "Authorization": f"token {token}", + "User-Agent": "BiocondaCommentResponder", + } + async with session.get(url, headers=headers) as response: + response.raise_for_status() + res = await response.text() + reviews = safe_load(res) + + approved_reviews = [review for review in reviews if review["state"] == "APPROVED"] + if not approved_reviews: + return False + + # Ensure the review author is a member + return any( + gather( + *( + is_bioconda_member(session, review["user"]["login"]) + for review in approved_reviews + ) + ) + ) + + +# Check the mergeable state of a PR +async def check_is_mergeable( + session: ClientSession, issue_number: int, second_try: bool = False +) -> MergeState: + token = os.environ["BOT_TOKEN"] + # Sleep a couple of seconds to allow the background process to finish + if second_try: + await sleep(3) + + # PR info + url = f"https://api.github.com/repos/bioconda/bioconda-recipes/pulls/{issue_number}" + headers = { + "Authorization": f"token {token}", + "User-Agent": "BiocondaCommentResponder", + } + async with session.get(url, headers=headers) as response: + response.raise_for_status() + res = await response.text() + pr_info = safe_load(res) + + if pr_info.get("merged"): + return MergeState.MERGED + + # We need mergeable == true and mergeable_state == clean, an approval by a member and + if pr_info.get("mergeable") is None and not second_try: + return await check_is_mergeable(session, issue_number, True) + + # Check approved reviews beforehand because we (somehow?) get NOT_MERGEABLE otherwise. + if not await approval_review(session, issue_number): + return MergeState.NEEDS_REVIEW + + if ( + pr_info.get("mergeable") is None + or not pr_info["mergeable"] + or pr_info["mergeable_state"] != "clean" + ): + return MergeState.NOT_MERGEABLE + + return MergeState.MERGEABLE + + +# Ensure uploaded containers are in repos that have public visibility +# TODO: This should ping @bioconda/core if it fails +async def toggle_visibility(session: ClientSession, container_repo: str) -> None: + url = f"https://quay.io/api/v1/repository/biocontainers/{container_repo}/changevisibility" + QUAY_OAUTH_TOKEN = os.environ["QUAY_OAUTH_TOKEN"] + headers = { + "Authorization": f"Bearer {QUAY_OAUTH_TOKEN}", + "Content-Type": "application/json", + } + body = {"visibility": "public"} + rc = 0 + try: + async with session.post(url, headers=headers, json=body) as response: + rc = response.status + except: + # Do nothing + pass + log("Trying to toggle visibility (%s) returned %d", url, rc) + + +## Download an artifact from CircleCI, rename and upload it +#async def download_and_upload(session: ClientSession, x: str) -> None: +# basename = x.split("/").pop() +# # the tarball needs a regular name without :, the container needs pkg:tag +# image_name = basename.replace("%3A", ":").replace("\n", "").replace(".tar.gz", "") +# file_name = basename.replace("%3A", "_").replace("\n", "") +# +# async with session.get(x) as response: +# with open(file_name, "wb") as file: +# logged = 0 +# loaded = 0 +# while chunk := await response.content.read(256 * 1024): +# file.write(chunk) +# loaded += len(chunk) +# if loaded - logged >= 50 * 1024 ** 2: +# log("Downloaded %.0f MiB: %s", max(1, loaded / 1024 ** 2), x) +# logged = loaded +# log("Downloaded %.0f MiB: %s", max(1, loaded / 1024 ** 2), x) +# +# if x.endswith(".gz"): +# # Container +# log("uploading with skopeo: %s", file_name) +# # This can fail, retry with 5 second delays +# count = 0 +# maxTries = 5 +# success = False +# QUAY_LOGIN = os.environ["QUAY_LOGIN"] +# env = os.environ.copy() +# # TODO: Fix skopeo package to find certificates on its own. +# skopeo_path = which("skopeo") +# if not skopeo_path: +# raise RuntimeError("skopeo not found") +# env["SSL_CERT_DIR"] = str(Path(skopeo_path).parents[1].joinpath("ssl")) +# while count < maxTries: +# try: +# await async_exec( +# "skopeo", +# "--command-timeout", +# "600s", +# "copy", +# f"docker-archive:{file_name}", +# f"docker://quay.io/biocontainers/{image_name}", +# "--dest-creds", +# QUAY_LOGIN, +# env=env, +# ) +# success = True +# break +# except: +# count += 1 +# if count == maxTries: +# raise +# await sleep(5) +# if success: +# await toggle_visibility(session, basename.split("%3A")[0]) +# elif x.endswith(".bz2"): +# # Package +# log("uploading package") +# ANACONDA_TOKEN = os.environ["ANACONDA_TOKEN"] +# await async_exec("anaconda", "-t", ANACONDA_TOKEN, "upload", file_name, "--force") +# +# log("cleaning up") +# os.remove(file_name) + + +async def upload_package(session: ClientSession, zf: ZipFile, e: ZipInfo): + log(f"extracting {e.filename}") + fName = zf.extract(e) + + log(f"uploading {fName}") + ANACONDA_TOKEN = os.environ["ANACONDA_TOKEN"] + await async_exec("anaconda", "-t", ANACONDA_TOKEN, "upload", fName, "--force") + + log("cleaning up") + os.remove(fName) + + +async def upload_image(session: ClientSession, zf: ZipFile, e: ZipInfo): + basename = e.filename.split("/").pop() + image_name = basename.replace("\n", "").replace(".tar.gz", "") + + log(f"extracting {e.filename}") + fName = zf.extract(e) + # Skopeo can't handle a : in the file name, so we need to remove it + newFName = fName.replace(":", "") + os.rename(fName, newFName) + + log(f"uploading with skopeo: {newFName} {image_name}") + # This can fail, retry with 5 second delays + count = 0 + maxTries = 5 + success = False + QUAY_LOGIN = os.environ["QUAY_LOGIN"] + env = os.environ.copy() + # TODO: Fix skopeo package to find certificates on its own. + skopeo_path = which("skopeo") + if not skopeo_path: + raise RuntimeError("skopeo not found") + env["SSL_CERT_DIR"] = str(Path(skopeo_path).parents[1].joinpath("ssl")) + while count < maxTries: + try: + await async_exec( + "skopeo", + "--command-timeout", + "600s", + "copy", + f"docker-archive:{newFName}", + f"docker://quay.io/biocontainers/{image_name}", + "--dest-creds", + QUAY_LOGIN, + env=env, + ) + success = True + break + except: + count += 1 + if count == maxTries: + raise + await sleep(5) + if success: + await toggle_visibility(session, basename.split(":")[0] if ":" in basename else basename.split("%3A")[0]) + + log("cleaning up") + os.remove(newFName) + + +# Given an already downloaded zip file name in the current working directory, upload the contents +async def extract_and_upload(session: ClientSession, fName: str) -> int: + if os.path.exists(fName): + zf = ZipFile(fName) + for e in zf.infolist(): + if e.filename.endswith('.tar.bz2'): + await upload_package(session, zf, e) + elif e.filename.endswith('.tar.gz'): + await upload_image(session, zf, e) + return 0 + return 1 + + +# Upload artifacts to quay.io and anaconda, return the commit sha +# Only call this for mergeable PRs! +async def upload_artifacts(session: ClientSession, pr: int) -> str: + # Get last sha + pr_info = await get_pr_info(session, pr) + sha: str = pr_info["head"]["sha"] + + # Fetch the artifacts (a list of (URL, artifact) tuples actually) + artifacts = await fetch_pr_sha_artifacts(session, pr, sha) + artifacts = [artifact for (URL, artifact) in artifacts if artifact.endswith((".gz", ".bz2"))] + assert artifacts + + # Download/upload Artifacts + for zipFileName in ["LinuxArtifacts.zip", "OSXArtifacts.zip"]: + await extract_and_upload(session, zipFileName) + + return sha + + +# Assume we have no more than 250 commits in a PR, which is probably reasonable in most cases +async def get_pr_commit_message(session: ClientSession, issue_number: int) -> str: + token = os.environ["BOT_TOKEN"] + url = f"https://api.github.com/repos/bioconda/bioconda-recipes/pulls/{issue_number}/commits" + headers = { + "Authorization": f"token {token}", + "User-Agent": "BiocondaCommentResponder", + } + async with session.get(url, headers=headers) as response: + response.raise_for_status() + res = await response.text() + commits = safe_load(res) + message = "".join(f" * {commit['commit']['message']}\n" for commit in reversed(commits)) + return message + + +# Merge a PR +async def merge_pr(session: ClientSession, pr: int, init_message: str) -> MergeState: + token = os.environ["BOT_TOKEN"] + mergeable = await check_is_mergeable(session, pr) + log("mergeable state of %s is %s", pr, mergeable) + if mergeable is not MergeState.MERGEABLE: + return mergeable + + if init_message: + await send_comment(session, pr, init_message) + try: + log("uploading artifacts") + sha = await upload_artifacts(session, pr) + log("artifacts uploaded") + + # Carry over last 250 commit messages + msg = await get_pr_commit_message(session, pr) + + # Hit merge + url = f"https://api.github.com/repos/bioconda/bioconda-recipes/pulls/{pr}/merge" + headers = { + "Authorization": f"token {token}", + "User-Agent": "BiocondaCommentResponder", + } + payload = { + "sha": sha, + "commit_title": f"[ci skip] Merge PR {pr}", + "commit_message": f"Merge PR #{pr}, commits were: \n{msg}", + "merge_method": "squash", + } + log("Putting merge commit") + async with session.put(url, headers=headers, json=payload) as response: + rc = response.status + log("body %s", payload) + log("merge_pr the response code was %s", rc) + except: + await send_comment( + session, + pr, + "I received an error uploading the build artifacts or merging the PR!", + ) + logger.exception("Upload failed", exc_info=True) + return MergeState.MERGED + + +async def request_merge(session: ClientSession, pr: int) -> MergeState: + init_message = "I will attempt to upload artifacts and merge this PR. This may take some time, please have patience." + merged = await merge_pr(session, pr, init_message) + if merged is MergeState.NEEDS_REVIEW: + await send_comment( + session, + pr, + "Sorry, this PR cannot be merged until it's approved by a Bioconda member.", + ) + elif merged is MergeState.NOT_MERGEABLE: + await send_comment(session, pr, "Sorry, this PR cannot be merged at this time.") + return merged + + +# This requires that a JOB_CONTEXT environment variable, which is made with `toJson(github)` +async def main() -> None: + job_context = await get_job_context() + issue_number, original_comment = await get_pr_comment(job_context) + if issue_number is None or original_comment is None: + return + + comment = original_comment.lower() + if comment.startswith(("@bioconda-bot", "@biocondabot")): + if " please merge" in comment: + async with ClientSession() as session: + await request_merge(session, issue_number) diff --git a/images/bot/src/bioconda_bot/update.py b/images/bot/src/bioconda_bot/update.py new file mode 100644 index 00000000000..0af1f8db09e --- /dev/null +++ b/images/bot/src/bioconda_bot/update.py @@ -0,0 +1,78 @@ +import logging +import sys + +from aiohttp import ClientSession + +from .common import ( + async_exec, + get_job_context, + get_pr_comment, + get_pr_info, + send_comment, +) + +logger = logging.getLogger(__name__) +log = logger.info + + +# Update a branch from upstream master, this should be run in a try/catch +async def update_from_master_runner(session: ClientSession, pr: int) -> None: + async def git(*args: str) -> None: + return await async_exec("git", *args) + + # Setup git, otherwise we can't push + await git("config", "--global", "user.email", "biocondabot@gmail.com") + await git("config", "--global", "user.name", "BiocondaBot") + + pr_info = await get_pr_info(session, pr) + remote_branch = pr_info["head"]["ref"] + remote_repo = pr_info["head"]["repo"]["full_name"] + + max_depth = 2000 + # Clone + await git( + "clone", + f"--depth={max_depth}", + f"--branch={remote_branch}", + f"git@github.com:{remote_repo}.git", + "bioconda-recipes", + ) + + async def git_c(*args: str) -> None: + return await git("-C", "bioconda-recipes", *args) + + # Add/pull upstream + await git_c("remote", "add", "upstream", "https://github.com/bioconda/bioconda-recipes") + await git_c("fetch", f"--depth={max_depth}", "upstream", "master") + + # Merge + await git_c("merge", "upstream/master") + + await git_c("push") + + +# Merge the upstream master branch into a PR branch, leave a message on error +async def update_from_master(session: ClientSession, pr: int) -> None: + try: + await update_from_master_runner(session, pr) + except Exception as e: + await send_comment( + session, + pr, + "I encountered an error updating your PR branch. You can report this to bioconda/core if you'd like.\n-The Bot", + ) + sys.exit(1) + + +# This requires that a JOB_CONTEXT environment variable, which is made with `toJson(github)` +async def main() -> None: + job_context = await get_job_context() + issue_number, original_comment = await get_pr_comment(job_context) + if issue_number is None or original_comment is None: + return + + comment = original_comment.lower() + if comment.startswith(("@bioconda-bot", "@biocondabot")): + if "please update" in comment: + async with ClientSession() as session: + await update_from_master(session, issue_number) diff --git a/images/create-env/CHANGELOG.md b/images/create-env/CHANGELOG.md new file mode 100644 index 00000000000..cd5a8d32db5 --- /dev/null +++ b/images/create-env/CHANGELOG.md @@ -0,0 +1,152 @@ +# Changelog + + +## bioconda/create-env 3.0 (2023-10-17) + +### Changed + +- Add linux-aarch64 image; bioconda/create-env is now a multiplatform manifest. + +- Change to a simple "major.minor" version scheme and offer mutable "major" tag. + +- Drop defaults channel from included config. + +- Use Miniforge installer to build this image. + +- Rebuilt on the latest base image with Debian 12.2 / BusyBox 1.36.1. + +- Do not install findutils, sed if provided by the base image (as is currently). + + +## bioconda/create-env 2.2.1 (2022-10-14) + +### Changed + +- Limit open fd (ulimit -n) for strip (small number chosen arbitrarily). + + The container image itself had unstripped binaries in 2.2.0. + + +## bioconda/create-env 2.2.0 (2022-10-14) + +### Changed + +- Use the exact conda, mamba versions as used in bioconda-recipes' builds. + + +## bioconda/create-env 2.1.0 (2021-04-14) + +### Changed + +- Copy instead of hardlink licenses, exit on error + + Hardlink fails if copying spans cross devices (e.g., via bound volumes). + + +## bioconda/create-env 2.0.0 (2021-04-13) + +### Changed + +- Rename `--remove-files` to `--remove-paths` + +- Replace `--strip` by `--strip-files=GLOB` + +- Replace `CONDA_ALWAYS_COPY=1` usage by config option + +- Use `/bin/bash` for entrypoints + + `/bin/sh` fails on some Conda packages' activations scripts' Bashisms. + + +## bioconda/create-env 1.2.1 (2021-04-09) + +### Fixed + +- Fail `--strip` if `strip` is not available + +### Changed + +- Delete links/dirs for `--remove-files` + + +## bioconda/create-env 1.2.0 (2021-03-30) + +### Added + +- Add license copying + +- Add status messages + +- Add help texts + +### Changed + +- Suppress `bash -i` ioctl warning + + +## bioconda/create-env 1.1.1 (2021-03-27) + +### Changed + +- Use `CONDA_ALWAYS_COPY=1` + + +## bioconda/create-env 1.1.0 (2021-03-27) + +### Added + +- Add option to change `create --copy` + +### Changed + +- Rebuild with `python` pinned to `3.8` + + To avoid hitting + - https://github.com/conda/conda/issues/10490 + - https://bugs.python.org/issue43517 + + +## bioconda/create-env 1.0.2 (2021-03-22) + +### Changed + +- Rebuild on new Debian 10 base images + + +## bioconda/create-env 1.0.1 (2021-03-22) + +### Fixed + +- Use entrypoint from `/opt/create-env/` + + `/usr/local` gets "overwritten" (=bind-mounted) when building via mulled. + + +## bioconda/create-env 1.0.0 (2021-03-21) + +### Added + +- Initial release + + + diff --git a/images/create-env/Dockerfile b/images/create-env/Dockerfile new file mode 100644 index 00000000000..93b839481b5 --- /dev/null +++ b/images/create-env/Dockerfile @@ -0,0 +1,44 @@ +# Use the exact conda, mamba versions as used in bioconda-recipes' builds. +ARG bioconda_utils_version +FROM quay.io/bioconda/bioconda-utils-build-env-cos7:${bioconda_utils_version} as bioconda-build-env +RUN /opt/conda/bin/conda list \ + --export '^(conda|mamba)$' \ + | sed -n 's/=[^=]*$//p' \ + > /tmp/requirements.txt + + +FROM quay.io/bioconda/base-glibc-busybox-bash as build + +WORKDIR /tmp/work +COPY --from=bioconda-build-env /tmp/requirements.txt ./ +COPY install-conda print-env-activate create-env ./ +RUN arch="$( uname -m )" \ + && \ + wget --quiet -O ./miniconda.sh \ + "https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-${arch}.sh" + +RUN ./install-conda ./requirements.txt /opt/create-env + + +FROM quay.io/bioconda/base-glibc-busybox-bash + +COPY --from=build /opt/create-env /opt/create-env +# Copy (Bioconda-specific) Conda configuration created by the install-conda script. +COPY --from=build /root/.condarc /root/ + +RUN \ + # Use a per-user config (instead of conda config --sys) for more flexibility. + cp /root/.condarc /etc/skel/ \ + && \ + # Enable conda shell function for login shells. + ln -s /opt/create-env/etc/profile.d/conda.sh /etc/profile.d/ \ + && \ + # Enable conda function in interactive Bash (via .bashrc) and POSIX shells (via ENV). + printf '%s\n' \ + '\. /etc/profile.d/conda.sh' \ + | tee -a /root/.bashrc \ + >> /etc/skel/.bashrc +ENV ENV=/etc/profile.d/conda.sh + +ENTRYPOINT [ "/opt/create-env/bin/tini", "--", "/opt/create-env/env-execute" ] +CMD [ "bash" ] diff --git a/images/create-env/Dockerfile.test b/images/create-env/Dockerfile.test new file mode 100644 index 00000000000..9c2566aefc3 --- /dev/null +++ b/images/create-env/Dockerfile.test @@ -0,0 +1,81 @@ +ARG base + +FROM "${base}" +RUN set -x && \ + CONDA_PKGS_DIRS="/tmp/pkgs" \ + /opt/create-env/env-execute \ + create-env \ + --conda=mamba \ + /usr/local \ + file findutils grep +RUN set -x && \ + . /usr/local/env-activate.sh && \ + if find /opt/create-env \ + -xdev \ + -type f \ + -exec file {} \+ \ + | grep 'not stripped' \ + ; then \ + >&2 printf 'found unstripped binaries\n' ; exit 1 \ + ; fi +RUN set -x && \ + . /usr/local/env-activate.sh && \ + if find /opt/create-env \ + -xdev \ + -type f \ + -name \*.a \ + | grep . \ + ; then \ + >&2 printf 'found static libraries\n' ; exit 1 \ + ; fi + + +FROM "${base}" as build_bioconda_package +RUN set -x && \ + /opt/create-env/env-execute \ + create-env \ + --conda=mamba \ + --strip-files=\* \ + /usr/local \ + catfasta2phyml +FROM quay.io/bioconda/base-glibc-busybox-bash +COPY --from=build_bioconda_package /usr/local /usr/local +RUN set -x && \ + /usr/local/env-execute \ + catfasta2phyml --version \ + && \ + [ ! "${CONDA_PREFIX}" = /usr/local ] \ + && \ + { set -x && . /usr/local/env-activate.sh && set +x ; } \ + && \ + [ "${CONDA_PREFIX}" = /usr/local ] \ + && \ + catfasta2phyml --version + + +FROM "${base}" as build_conda +RUN set -x && \ + /opt/create-env/env-execute \ + create-env \ + --conda=mamba \ + --env-activate-args='--prefix-is-base' \ + --strip-files=\* \ + --remove-paths=\*.a \ + --remove-paths=\*.pyc \ + /opt/conda \ + conda +FROM quay.io/bioconda/base-glibc-busybox-bash +COPY --from=build_conda /opt/conda /opt/conda +COPY --from=build_conda /opt/conda/env-activate.sh /usr/local/ +RUN set -x && \ + /usr/local/env-execute \ + conda info --all \ + && \ + { set -x && . /usr/local/env-activate.sh && set +x ; } \ + && \ + . "${CONDA_PREFIX}/etc/profile.d/conda.sh" \ + && \ + conda activate \ + && \ + conda info \ + | grep 'base environment.*/opt/conda' diff --git a/images/create-env/README.md b/images/create-env/README.md new file mode 100644 index 00000000000..ca9a7ed9a47 --- /dev/null +++ b/images/create-env/README.md @@ -0,0 +1,99 @@ +# bioconda/create-env + +The `create-env` container image, available as [`quay.io/bioconda/create-env`](https://quay.io/repository/bioconda/create-env?tab=tags), provides [`conda`](https://github.com/conda/conda/) (and [`mamba`](https://github.com/mamba-org/mamba)) alongside a convenience wrapper `create-env` to create small container images based on Conda packages. + + +## Options + +`create-env` runs `conda create` for a given `PREFIX` plus a set of packages and (optionally) runs post-processing steps on the created environment. + +Post-processing steps are triggered by arguments to `create-env`: + +- `--env-activate-script=FILE`: + + Create a shell activation script `FILE` (defaults to `PREFIX/env-activate.sh`) which contains the environment activation instructions as executed per `conda activate PREFIX`. + + Example usage: `sh -c '. PREFIX/env-activate.sh && command-to-run-from-PREFIX'`. + +- `--env-execute-script=FILE`: + + Create an executable `FILE` (defaults to `PREFIX/env-execute`) which runs a given program in the activated `PREFIX` environment. + + Example usage: `PREFIX/env-execute command-to-run-from-PREFIX`. + +- `--remove-paths=GLOB`: + + Remove some paths from `PREFIX` to reduce the target container image size. + +- `--strip-files=GLOB`: + + Run [`strip`](https://sourceware.org/binutils/docs/binutils/strip.html) on files in `PREFIX` whose paths match `GLOB` to reduce the target container image size. + +- `licenses-path=PATH`: + + Directory in which to copy license files for the installed packages (defaults to `PREFIX/conda-meta`). + + +## Usage example: +```Dockerfile +FROM quay.io/bioconda/create-env:2.1.0 as build +# Create an environment containing python=3.9 at /usr/local using mamba, strip +# files and remove some less important files: +RUN export CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY=0 \ + && \ + /opt/create-env/env-execute \ + create-env \ + --conda=mamba \ + --strip-files='bin/*' \ + --strip-files='lib/*' \ + --remove-paths='*.a' \ + --remove-paths='share/terminfo/[!x]*' \ + /usr/local \ + python=3.9 + +# The base image below (quay.io/bioconda/base-glibc-busybox-bash:2.1.0) defines +# /usr/local/env-execute as the ENTRYPOINT so that created containers always +# start in an activated environment. +FROM quay.io/bioconda/base-glibc-busybox-bash:2.1.0 as target +COPY --from=build /usr/local /usr/local + +FROM target as test +RUN /usr/local/env-execute python -c 'import sys; print(sys.version)' +RUN /usr/local/env-activate.sh && python -c 'import sys; print(sys.version)' + +# Build and test with, e.g.: +# buildah bud --target=target --tag=localhost/python:3.9 . +# podman run --rm localhost/python:3.9 python -c 'import sys; print(sys.version)' +``` + +## Miscellaneous information: + +- Run `podman run --rm quay.io/bioconda/create-env create-env --help` for usage information. + +- Run `podman run --rm quay.io/bioconda/create-env conda config --show-sources` to see predefined configuration options. + +- The environment in which `create-env` runs has been itself created by `create-env`. + As such, `/opt/create-env/env-activate.sh` and `/opt/create-env/env-execute` scripts can be used to activate/execute in `create-env`'s environment in a `Dockerfile` context. + In other contexts when a container is run via the image's entrypoint, the environments is activated automatically. + + The separate `/opt/create-env` path is used to avoid collisions with environments created at, e.g., `/usr/local` or `/opt/conda`. + +- By default, package files are copied rather than hard-linked to avoid altering Conda package cachge files when running `strip`. + + If the target image should contain multiple environments, it is advisable to set `CONDA_ALWAYS_COPY=0` to allow hardlinks between the environments (to reduce the overall image size) and run `strip` after the environments have been created. + This can be done by invoking `create-env` twice whilst omitting the environment creation during the second invocation (using `--conda=:`). + + E.g.: + ```sh + . /opt/create-env/env-activate.sh + export CONDA_ALWAYS_COPY=0 + create-env --conda=mamba /opt/python-3.8 python=3.8 + create-env --conda=mamba /opt/python-3.9 python=3.9 + create-env --conda=: --strip-files=\* /opt/python-3.8 + create-env --conda=: --strip-files=\* /opt/python-3.9 + ``` + +- Container images created as in the example above are meant to be lightweight and as such do **not** contain `conda`. + Hence, there is no `conda activate PREFIX` available but only the source-able `PREFIX/env-activate.sh` scripts and the `PREFIX/env-execute` launchers. + These scripts are generated at build time and assume no previously activated Conda environment. + Likewise, the environments are not expected to be deactivated, which is why no corresponding deactivate scripts are provided. diff --git a/images/create-env/create-env b/images/create-env/create-env new file mode 100755 index 00000000000..fde5bffc334 --- /dev/null +++ b/images/create-env/create-env @@ -0,0 +1,242 @@ +#! /bin/sh -eu + +for arg do + case "${arg}" in + --help ) + cat <<'end-of-help' +Usage: create-env [OPTIONS]... [--] PREFIX [CONDA_CREATE_ARGS]... +Use conda (or mamba via --conda=mamba) to create a Conda environment at PREFIX +according to specifications given by CONDA_CREATE_ARGS. + + --conda=CONDA Conda implementation to run CONDA CREATE for. + E.g.: "conda", "mamba", "conda env", "mamba env". + Use ":" to skip env creation. (default: conda) + --create-command=CREATE Conda command to run. E.g.: "create", "install". + (default: create) + --env-activate-args=ARGS Single string of arguments to pass on to + print-env-activate. (default: --prefix=PREFIX) + --env-activate-script=FILE Destination path of environment activation + script. (default: PREFIX/env-activate.sh) + --env-execute-script=FILE Destination path of environment execution script. + (default: PREFIX/env-execute) + --remove-paths=GLOB Glob of paths to remove from PREFIX after its + creation. Can be passed on multiple times. Will + be passed on to `find -path PREFIX/GLOB`. + (no default) + --strip-files=GLOB Glob of paths in PREFIX to run `strip` on. Will + be passed on to `find -type f -path PREFIX/GLOB`. + Error messages from `strip` are suppressed, i.e., + --strip-files=* may be used to run `strip` on all + files. Can be passed on multiple times. + (no default) + --licenses-path=PATH Destination path to copy package license files + to (relative to PREFIX or absolute). Pass on + empty path (--licenses-path=) to skip copying. + (default: conda-meta) +end-of-help + exit 0 ;; + --conda=* ) + conda_impl="${arg#--conda=}" + shift ;; + --create-command=* ) + create_command="${arg#--create-command=}" + shift ;; + --env-activate-args=* ) + env_activate_args="${arg#--env-activate-args=}" + shift ;; + --env-activate-script=* ) + env_activate_file="${arg#--env-activate-script=}" + shift ;; + --env-execute-script=* ) + env_execute_file="${arg#--env-execute-script=}" + shift ;; + --remove-paths=* ) + remove_paths_globs="$( + printf '%s\n' \ + ${remove_paths_globs+"${remove_paths_globs}"} \ + "${arg#--remove-paths=}" + )" + shift ;; + --strip-files=* ) + strip_files_globs="$( + printf '%s\n' \ + ${strip_files_globs+"${strip_files_globs}"} \ + "${arg#--strip-files=}" + )" + shift ;; + --licenses-path=* ) + licenses_path="${arg#--licenses-path=}" + shift ;; + -- ) + break ;; + -* ) + printf 'unknown option: %s\n' "${arg}" + exit 1 ;; + * ) + break + esac +done + +if [ $# -eq 0 ] ; then + printf 'missing argument: environment path\n' + exit 1 +fi + +prefix="${1%%/}" +shift + +conda_impl="${conda_impl:-conda}" +create_command="${create_command-create}" +env_activate_args="--prefix='${prefix}' ${env_activate_args-}" +env_activate_file="${env_activate_file-"${prefix}/env-activate.sh"}" +env_execute_file="${env_execute_file-"${prefix}/env-execute"}" +remove_paths_globs="$( printf '%s\n' "${remove_paths_globs-}" | sort -u )" +strip_files_globs="$( printf '%s\n' "${strip_files_globs-}" | sort -u )" +licenses_path="${licenses_path-conda-meta}" + + +set +u +eval "$( conda shell.posix activate base )" +set -u + +printf 'creating environment at %s ...\n' "${prefix}" 1>&2 +CONDA_YES=1 \ + ${conda_impl} \ + ${create_command} \ + --prefix="${prefix}" \ + "${@}" + +if [ -n "${env_activate_file}${env_execute_file}" ] ; then + printf 'generating activation script...\n' 1>&2 + activate_script="$( + eval "set -- ${env_activate_args}" + print-env-activate "${@}" + )" + if [ -n "${env_activate_file-}" ] ; then + printf 'writing activation script to %s ...\n' "${env_activate_file}" 1>&2 + printf '%s\n' \ + "${activate_script}" \ + > "${env_activate_file}" + activate_script=". '${env_activate_file}'" + fi + if [ -n "${env_execute_file-}" ] ; then + printf 'writing execution script to %s ...\n' "${env_execute_file}" 1>&2 + printf '%s\n' \ + '#! /bin/bash' \ + "${activate_script}" \ + 'exec "${@}"' \ + > "${env_execute_file}" + chmod +x "${env_execute_file}" + fi +fi + + +if [ -n "${remove_paths_globs}" ] ; then + printf 'removing paths from %s ...\n' "${prefix}" 1>&2 + ( + eval "set -- $( + printf %s "${remove_paths_globs}" \ + | sed -e "s|.*|-path '${prefix}/&'|" -e '1!s/^/-o /' \ + | tr '\n' ' ' + )" + find "${prefix}" \ + \( "${@}" \) \ + -delete + ) +fi + +if [ -n "${strip_files_globs}" ] ; then + # Ensure "strip" is available beforehand because errors are ignored later on. + strip --version > /dev/null + printf 'stripping binaries in %s ...\n' "${prefix}" 1>&2 + ( + eval "set -- $( + printf %s "${strip_files_globs}" \ + | sed -e "s|.*|-path '${prefix}/&'|" -e '1!s/^/-o /' \ + | tr '\n' ' ' + )" + # Strip binaries. (Run strip on all files; ignore errors for non-ELF files.) + # Limit open fds (ulimit -n) for strip (small number chosen arbitrarily). + # (To avoid "could not create temporary file to hold stripped copy: Too many open files") + + # Filter out the binaries currently in use by the pipeline via sed below. + skip_inode_expressions="$( + command -v -- find xargs sed strip \ + | xargs -- stat -L -c '-e /^%d,%i:/d' -- + )" + find "${prefix}" \ + -type f \ + \( "${@}" \) \ + -print0 \ + | xargs \ + -0 \ + -n 64 \ + -- \ + stat -L -c '%d,%i:%n' -- \ + | sed \ + ${skip_inode_expressions} \ + -e 's/^[^:]*://' \ + | tr \\n \\0 \ + | + xargs \ + -0 \ + -n 64 \ + -- \ + strip -- \ + 2>&1 \ + | sed '/: file format not recognized/d' \ + || true + ) +fi + + +if [ -n "${licenses_path}" ] ; then + abs_licenses_path="$( + cd "${prefix}" + mkdir -p "${licenses_path}" + cd "${licenses_path}" + pwd + )" + printf 'copying license files to %s ...\n' "${abs_licenses_path}" 1>&2 + pkgs_dirs="$( + conda config --show pkgs_dirs \ + | sed -n 's|[^/]*\(/.*\)|"\1"|p' \ + | tr '\n' ' ' + )" + ( + eval "set -- $( + find "${prefix}/conda-meta" \ + -maxdepth 1 \ + -name \*.json \ + | sed 's|.*/\(.*\)\.json|"\1"|' \ + | tr '\n' ' ' + )" + for pkg do + pkg_info="$( + eval "set -- ${pkgs_dirs}" + for pkgs_dir ; do + if [ -d "${pkgs_dir}/${pkg}/info" ] ; then + printf %s "${pkgs_dir}/${pkg}/info" + exit + fi + done + printf 'missing metadata for %s\n' "${pkg}" 1>&2 + exit 1 + )" + find "${pkg_info}" \ + -maxdepth 1 \ + \( -name LICENSE.txt -o -name licenses \) \ + -exec sh -ec ' + dest_dir="${1}" ; shift + mkdir -p "${dest_dir}" + cp -fR "${@}" "${dest_dir}/" + ' -- "${abs_licenses_path}/${pkg}" {} \+ \ + || { + printf 'failed to copy licenses for %s\n' "${pkg}" 1>&2 + exit 1 + } + done + ) +fi + +printf 'finished create-env for %s\n' "${prefix}" 1>&2 diff --git a/images/create-env/install-conda b/images/create-env/install-conda new file mode 100755 index 00000000000..a3b9b33272e --- /dev/null +++ b/images/create-env/install-conda @@ -0,0 +1,124 @@ +#! /bin/bash -eux + +requirements_file="${1}" +conda_install_prefix="${2}" + +# Install a bootstrap Miniconda installation. +miniconda_boostrap_prefix="$( pwd )/miniconda" +# Run the following in a subshell to avoid environment changes from bootstrap. +( + + # Use the base image-provided tools if they work for us: + tools='' + find -print0 -maxdepth 0 && xargs -0 true < /dev/null \ + || tools="${tools} findutils" + sed -e '' < /dev/null \ + || tools="${tools} sed" + + sh ./miniconda.sh \ + -b \ + -p "${miniconda_boostrap_prefix}" + + # Install the base Conda installation. + . "${miniconda_boostrap_prefix}/etc/profile.d/conda.sh" + + # Install conda, mamba and some additional tools: + # - tini: init program, + # - binutils, findutils: tools to strip down image/environment size, + + # Only need `strip` executable from binutils. Other binaries from the package + # and especially the "sysroot" dependency is only bloat for this container + # image. (NOTE: The binary needs libgcc-ng which is explicitly added later.) + mamba create --yes \ + --prefix="${conda_install_prefix}" \ + --channel=conda-forge \ + binutils + cp -aL "${conda_install_prefix}/bin/strip" ./strip + conda run --prefix="${conda_install_prefix}" strip -- ./strip + mamba remove --yes --all \ + --prefix="${conda_install_prefix}" + + mamba create --yes \ + --prefix="${conda_install_prefix}" \ + --channel=conda-forge \ + \ + --file="${requirements_file}" \ + \ + tini \ + \ + libgcc-ng \ + ${tools} \ + ; + + mv \ + ./print-env-activate \ + ./create-env \ + ./strip \ + "${conda_install_prefix}/bin/" +) + +# Activate the new base environment. +activate_script="$( + "${conda_install_prefix}/bin/conda" shell.posix activate base +)" +set +u +eval "${activate_script}" +set -u +unset activate_script + +# Strip find/xargs/sed beforehand as they are excluded in the strip pipeline. +for prog in find xargs sed ; do + case "$( command -v "${prog}" )" in + "${conda_install_prefix%%/}"/* ) + strip -- "$( command -v "${prog}" )" + esac +done + +# Use --conda=: to turn the `conda create` into a no-op, but do continue to +# run strip, remove files and output the activate/execute scripts. +CONDA_PKGS_DIRS="${miniconda_boostrap_prefix}/pkgs" \ + create-env \ + --conda=: \ + --strip-files=\* \ + --remove-paths=\*.a \ + --remove-paths=\*.pyc \ + --env-activate-args=--prefix-is-base \ + "${conda_install_prefix}" + +# Remove bootstrap Miniconda files. +rm -rf "${miniconda_boostrap_prefix}" + +# Add standard Bioconda config to root's Conda config. +conda config \ + --append channels conda-forge \ + --append channels bioconda \ + ; +conda config \ + --remove channels defaults \ + 2> /dev/null \ + || true +conda config \ + --remove repodata_fns current_repodata.json \ + 2> /dev/null \ + || true +conda config \ + --prepend repodata_fns repodata.json + +# Use `always_copy` to cut links to package cache. +# (Which is esp. important if files are manipulated via --strip-files !) +conda config \ + --set always_copy true \ + --set allow_softlinks false + + +# Log information of the newly created Conda installation. +# NB: Running conda after the .pyc removal will recreate some .pyc files. +# This is intentional as it speeds up conda startup time. +conda list --name=base +conda info --all +mamba --version +# Make sure we have the requested conda, mamba versions installed. +conda list \ + --export '^(conda|mamba)$' \ + | sed -n 's/=[^=]*$//p' \ + | diff "${requirements_file}" - diff --git a/images/create-env/print-env-activate b/images/create-env/print-env-activate new file mode 100755 index 00000000000..fbaa4a405b2 --- /dev/null +++ b/images/create-env/print-env-activate @@ -0,0 +1,95 @@ +#! /bin/bash -eu + +for arg do + case "${arg}" in + --help ) + cat <<'end-of-help' +Usage: print-env-activate [OPTIONS]... [--] [PREFIX] +Print shell activation script contents conda creates for environment at PREFIX. + + --prefix=PREFIX Optionally pass on PREFIX path as option-argument + instead of operand. + --prefix-is-base[=yes|=no] Specify if PREFIX is a base environment and use + `PREFIX/bin/conda` to create a full base + environment activation script. (default: no) +end-of-help + exit 0 ;; + --prefix=* ) + prefix="${arg#--prefix=}" + shift ;; + --prefix-is-base=yes | --prefix-is-base ) + prefix_is_base=1 + shift ;; + --prefix-is-base=no ) + prefix_is_base=0 + shift ;; + -- ) + break ;; + -* ) + printf 'unknown option: %s\n' "${arg}" + exit 1 ;; + * ) + break + esac +done + +if [ -z "${prefix:-}" ] ; then + prefix="${1}" + shift +fi + +if [ $# -ne 0 ] ; then + printf 'excess argument: %s\n' "${@}" + exit +fi + +if [ "${prefix_is_base-}" = 1 ] ; then + conda_exe="${prefix}/bin/conda" +else + conda_exe="$( command -v conda )" +fi + +# Deactivate current active env for full `conda shell.posix activate` changes. +deactivate_script="$( + conda shell.posix deactivate +)" +if [ "${prefix_is_base-}" = 1 ] ; then + deactivate_script="$( + printf %s "${deactivate_script}" \ + | sed "s|/[^\"'=:]*/condabin:||g" + )" +fi +set +u +eval "${deactivate_script}" +set -u +unset deactivate_script + +# NOTE: The following gets a proper PS1 value from an interactive Bash which +# `conda shell posix.activate` can reuse. +# NB: Ideally, conda activate should not use the current PS1 but rather write +# out something like PS1="${CONDA_PROMPT_MODIFIER}${PS1}". +# (Also, running this in the build instead of final container might not +# reflect the actual PS1 the target container image would provide.) +PS1="$( + bash -ic 'printf %s "${PS1}"' 2>/dev/null + printf . +)" +PS1="${PS1%.}" + +activate_script="$( + export PS1 + if [ ! "${prefix_is_base-}" = 1 ] ; then + export CONDA_ENV_PROMPT= + fi + "${conda_exe}" shell.posix activate "${prefix}" +)" + +printf '%s\n' "${activate_script}" \ + | { + if [ "${prefix_is_base-}" = 1 ] ; then + cat + else + grep -vE '^export (_CE_M|_CE_CONDA|CONDA_EXE|CONDA_PYTHON_EXE)=' \ + | sed "s|/[^\"'=:]*/condabin:||g" + fi + } From 40cee01338817c524a018aa453bbcc717b7e4941 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Fri, 9 Feb 2024 13:51:11 -0500 Subject: [PATCH 002/143] first draft of build script --- .github/workflows/scripts/generic_build.bash | 181 +++++++++++++++++++ 1 file changed, 181 insertions(+) create mode 100644 .github/workflows/scripts/generic_build.bash diff --git a/.github/workflows/scripts/generic_build.bash b/.github/workflows/scripts/generic_build.bash new file mode 100644 index 00000000000..fa423bc7ce2 --- /dev/null +++ b/.github/workflows/scripts/generic_build.bash @@ -0,0 +1,181 @@ +#!/bin/bash + +set -xeu + +[ -z $IMAGE_NAME ] && echo "Please set IMAGE_NAME" && exit 1 +[ -z $IMAGE_DIR ] && echo "Please set IMAGE_DIR" && exit 1 +[ -z $TAGS ] && echo "Please set TAGS" && exit 1 +[ -z $ARCHS ] && echo "Please set ARCHS" && exit 1 +[ -z $TYPE ] && echo "Please set TYPE: [ base-debian | base-busybox | build-env | create-env ]" + +# Dockerfile lives here +cd $IMAGE_DIR + +for tag in ${TAGS} ; do + buildah manifest create "${IMAGE_NAME}:${tag}" +done + +# Read space-separated archs input string into an array +read -r -a archs_and_images <<<"$ARCHS" + +# ---------------------------------------------------------------------- +# Incrementally compose build args, depending on which inputs were +# provided. +BUILD_ARGS=() +if [ "$TYPE" == "base-debian" || "$TYPE" == "base-busybox" ]; then + [ -z "${DEBIAN_VERSION}" ] && echo "Please set DEBIAN VERSION" && exit 1 + BUILD_ARGS+=("--build-arg=debian_version=$DEBIAN_VERSION") +fi + +if [ "$TYPE" == "build-env" || "$TYPE" == "create-env" ]; then + + [ -z "${BIOCONDA_UTILS_VERSION}" ] && echo "Please set BIOCONDA_UTILS_VERSION" && exit 1 + + # Due to different nomenclature used by conda-forge and buildah, we + # need to map archs to base images, so overwrite archs_and_images. + archs_and_images=( + "amd64=quay.io/condaforge/linux-anvil-cos7-x86_64" + "arm64=quay.io/condaforge/linux-anvil-aarch64" + ) + + # FIXME: build-env should export its own conda version immediately after + # running (or maybe as a label on the image?) so we can just use that as + # a build arg for create-env. + # + # build-env uses bioconda-utils that's local; create-env uses the build-env + # tagged after this version. + if [ "$TYPE" == "create-env" ]; then + BUILD_ARGS+=("--build-arg=bioconda_utils_version=$BIOCONDA_UTILS_VERSION") + fi +fi + +if [ "$TYPE" == "base-busybox" ]; then + [ -z "$BUSYBOX_VERSION" ] && echo "Please set BUSYBOX_VERSION" && exit 1 + BUILD_ARGS+=("--build-arg=busybox_version=$BUSYBOX_VERSION") + + # Make a busybox image that we'll use further below. As shown in the + # Dockerfile.busybox, this uses the build-busybox script which in turn + # cross-compiles for x86_64 and aarch64, and these execuables are later + # copied into an arch-specific container. + # + # Note that --iidfile (used here and in later commands) prints the built + # image ID to the specified file so we can refer to the image later. + iidfile="$( mktemp )" + buildah bud \ + --iidfile="${iidfile}" \ + --file=Dockerfile.busybox \ + ${BUILD_ARGS[@]} + busybox_image="$( cat "${iidfile}" )" + rm "${iidfile}" + + # And then extend the build args with this image. + BUILD_ARGS+=("--build-arg=busybox_image=${busybox_image}") +fi + +# ---------------------------------------------------------------------- + +# Build each arch's image using the array of archs. +# +for arch_and_image in "${archs_and_images[@]}" ; do + arch=$(echo $arch_and_image | cut -f1 -d "=") + base_image=$(echo $arch_and_image | cut -f2 -d "=") + + # build-env is the only one that needs an arch-specific base image from + # conda-forge; this needs to be set within this loop rather than adding to + # BUILD_ARGS array. + BASE_IMAGE_BUILD_ARG="" + if [ "$TYPE" == "build-env" ]; then + BASE_IMAGE_BUILD_ARG="--build-arg=base_image="${base_image}"" + fi + + # Actual building happens here. + iidfile="$( mktemp )" + buildah bud \ + --arch="${arch}" \ + --iidfile="${iidfile}" \ + ${BUILD_ARGS[@]} \ + $BASE_IMAGE_BUILD_ARG + image_id="$( cat "${iidfile}" )" + rm "${iidfile}" + + # Extract various package info and version info, then store that info + # as labels. Container is removed at the end to avoid e.g. having these + # commands in the history of the container. + container="$( buildah from "${image_id}" )" + run() { buildah run "${container}" "${@}" ; } + LABELS=() + LABELS+=("--label=deb-list=$( run cat /.deb.lst | tr '\n' '|' | sed 's/|$//' )") + LABELS+=("--label=pkg-list=$( run cat /.pkg.lst | tr '\n' '|' | sed 's/|$//' )") + LABELS+=("--label=glibc=$( run sh -c 'exec "$( find -xdev -name libc.so.6 -print -quit )"' | sed '1!d' )") + LABELS+=("--label=debian=$( run cat /etc/debian_version | sed '1!d' )") + LABELS+=("--label=bash=$( run bash --version | sed '1!d' )") + if [ "$TYPE" == "build-env" ]; then + bioconda_utils="$( + run sh -c '. /opt/conda/etc/profile.d/conda.sh && conda activate base && bioconda-utils --version' \ + | rev | cut -f1 -d " " | rev + )" + LABELS+=("--label=bioconda-utils=${bioconda_utils}") + + # save conda/mamba versions to install in create-env + conda_version=$( + run sh -c '/opt/conda/bin/conda/list --export "^(conda|mamba)$"' \ + | sed -n 's/=[^=]*$//p' + ) + fi + + if [ ! -z "${BUSYBOX_VERSION}" ]; then + LABELS+=("--label=busybox-version=${BUSYBOX_VERSION}") + fi + buildah rm "${container}" + + # Add labels to a new container... + container="$( buildah from "${image_id}" )" + buildah config ${LABELS[@]} "${container}" + + # ...then store the container (now with labels) as a new image. This + # is what we'll use to eventually upload. + image_id="$( buildah commit "${container}" )" + buildah rm "${container}" + + # Add images to manifest. Individual image tags include arch; manifest does not. + for tag in ${TAGS} ; do + buildah tag \ + "${image_id}" \ + "${IMAGE_NAME}:${tag}-${arch}" + buildah manifest add \ + "${IMAGE_NAME}:${tag}" \ + "${image_id}" + + buildah inspect -t image ${image_name}:${tag}-${arch} + done # tags +done # archs_and_images +buildah inspect -t manifest ${image_name} + +# Extract image IDs from the manifest built in the last step +ids="$( + for tag in ${{ inputs.tags }} ; do + buildah manifest inspect "${image_name}:${tag}" \ + | jq -r '.manifests[]|.digest' \ + | while read id ; do + buildah images --format '{{.ID}}{{.Digest}}' \ + | sed -n "s/${id}//p" + done + done + )" + +# Run the tests; see Dockerfile.test in the relevant image dir for the +# actual tests run +ids="$( printf %s "${ids}" | sort -u )" +for id in ${ids} ; do + podman history "${id}" + buildah bud \ + --build-arg=base="${id}" \ + --file=Dockerfile.test \ + "${IMAGE_DIR}" +done + +# Clean up +buildah rmi --prune || true + +# TODO: what should be exported here? Image IDs? Manifest? How do we access +# this stuff outside the job? From d68171b95ac8b3567c89fb48f496b4b92c3da6d1 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 11 Feb 2024 13:14:24 -0500 Subject: [PATCH 003/143] rename/reorganize --- .../{build-image.yml => build-images.yml} | 0 .github/workflows/generic_build.bash | 270 ++++++++++++++++++ .github/workflows/scripts/generic_build.bash | 181 ------------ 3 files changed, 270 insertions(+), 181 deletions(-) rename .github/workflows/{build-image.yml => build-images.yml} (100%) create mode 100755 .github/workflows/generic_build.bash delete mode 100644 .github/workflows/scripts/generic_build.bash diff --git a/.github/workflows/build-image.yml b/.github/workflows/build-images.yml similarity index 100% rename from .github/workflows/build-image.yml rename to .github/workflows/build-images.yml diff --git a/.github/workflows/generic_build.bash b/.github/workflows/generic_build.bash new file mode 100755 index 00000000000..ea82f08bd59 --- /dev/null +++ b/.github/workflows/generic_build.bash @@ -0,0 +1,270 @@ +#!/bin/bash + +# This single script builds the following containers depending on the value of +# the env var TYPE: +# +# - build-env: contains conda + conda-build + bioconda-utils, used for building +# package +# - create-env: contains the exact version of conda from build-env (which is +# expected to have been built beforehand). Used for creating env from +# package + depdendencies +# - base-busybox: the minimal container into which created conda envs are +# copied. This is the container uploaded to quay.io +# - base-debian: an extended version of the busybox container for special cases +# +# Built containers are added to a manifest. If multiple architectures are +# provided, they will all be added to a manifest which can be subsequently +# uploaded to a registry. + +USAGE=' +Builds various containers. + +Set env vars immediately before running. + +REQUIRED ARGS FOR ALL TYPES +=========================== +TYPE: base-busybox | base-debian | build-env | create-env +IMAGE_DIR: Location of Dockerfile. +IMAGE_NAME: Image name to upload. +ARCHS: Space-separated architectures e.g. "amd64 arm64" + +REQUIRED for base-busybox +------------------------- + TAGS: Space-separated tags. + DEBIAN_VERSION + BUSYBOX_VERSION + +REQUIRED for base-debian +------------------------ + TAGS: Space-separated tags. + DEBIAN_VERSION + +REQUIRED for build-env +---------------------- + BIOCONDA_UTILS_VERSION + BIOCONDA_UTILS_FOLDER: relative to the Dockerfile + +REQUIRED for create-env +----------------------- + BIOCONDA_UTILS_VERSION + BIOCONDA_UTILS_FOLDER: relative to the Dockerfile + CONDA_VERSION: conda version to install, typically of the form "conda=x.y.z" extracted from build-env + MAMBA_VERSION: mamba version to install, typically of the form "mamba=x.y.z" extracted from build-env + BUSYBOX_IMAGE: the image to use as a base; typically this will be the results + of building base-busybox in a previous run of this script. + +EXAMPLE USAGE +============= + + IMAGE_NAME=base-glibc-debian-bash \ + IMAGE_DIR=../../../images/base-glibc-debian-bash \ + TYPE="base-debian" \ + TAGS="0.1.1 0.1" \ + ARCHS="arm64 amd64" \ + DEBIAN_VERSION="12.2" \ + ./generic_build.bash + +' +# ------------------------------------------------------------------------------ +# Handle required env vars +[ -z "$IMAGE_NAME" ] && echo -e "$USAGE error: please set IMAGE_NAME" && exit 1 +[ -z "$IMAGE_DIR" ] && echo "error: please set IMAGE_DIR, where Dockerfile is found." && exit 1 +[ -z "$TYPE" ] && echo "error: please set TYPE: [ base-debian | base-busybox | build-env | create-env ]" && exit 1 +[ -z "$ARCHS" ] && echo "error: please set ARCHS" && exit 1 + +if [ "$TYPE" == "build-env" ] || [ "$TYPE" == "create-env" ]; then + [ -n "$TAGS" ] && echo "error: TAGS should not be set for build-env or create-env; use BIOCONDA_UTILS_VERSION instead" && exit 1 + [ -z "$BIOCONDA_UTILS_VERSION" ] && echo "error: please set BIOCONDA_UTILS_VERSION for build-env and create-env" && exit 1 + + TAGS="$BIOCONDA_UTILS_VERSION" # Set TAGS to BIOCONDA_UTILS_VERSION from here on + + if [ "$TYPE" == "build-env" ]; then + [ -z "$BIOCONDA_UTILS_FOLDER" ] && echo "error: please set BIOCONDA_UTILS_FOLDER for build-env" && exit 1 + [ -z "$BUSYBOX_IMAGE" ] && echo "error: please set BUSYBOX_IMAGE for create-env" && exit 1 + fi + + if [ "$TEYPE" == "create-env" ]; then + [ -z "$BUSYBOX_IMAGE" ] && echo "error: please set BUSYBOX_IMAGE for create-env" && exit 1 + [ -z "$CONDA_VERSION" ] && echo "error: please set CONDA_VERSION for create-env" && exit 1 + [ -z "$MAMBA_VERSION" ] && echo "error: please set MAMBA_VERSION for create-env" && exit 1 + fi +fi + +if [ "$TYPE" == "base-debian" ] || [ "$TYPE" == "base-busybox" ]; then + [ -z "${DEBIAN_VERSION}" ] && echo "error: please set DEBIAN VERSION" && exit 1 +fi + +if [ "$TYPE" == "base-busybox" ]; then + [ -z "$BUSYBOX_VERSION" ] && echo "error: please set BUSYBOX_VERSION" && exit 1 +fi +# ------------------------------------------------------------------------------ + +set -xeu + +# Dockerfile lives here +cd $IMAGE_DIR + +# One manifest per tag +for tag in ${TAGS} ; do + buildah manifest create "${IMAGE_NAME}:${tag}" +done + +# Read space-separated archs input string into an array +read -r -a archs_and_images <<<"$ARCHS" + +# ------------------------------------------------------------------------------ +# BUILD_ARGS: Incrementally compose build args array, depending on which inputs +# were provided. This will eventually be provided to buildah bud. +# +BUILD_ARGS=() +if [ "$TYPE" == "base-debian" ]; then + BUILD_ARGS+=("--build-arg=debian_version=$DEBIAN_VERSION") # version of debian to use as base +fi + +if [ "$TYPE" == "build-env" ] || [ "$TYPE" == "create-env" ]; then + + if [ "$TYPE" == "create-env" ]; then + BUILD_ARGS+=("--build-arg=BUSYBOX_IMAGE=$BUSYBOX_IMAGE") # which image to use as base + BUILD_ARGS+=("--build-arg=CONDA_VERSION=$CONDA_VERSION") # conda version to install + BUILD_ARGS+=("--build-arg=MAMBA_VERSION=$MAMBA_VERSION") # mamba version to install + fi + + if [ "$TYPE" == "build-env" ]; then + BUILD_ARGS+=("--build-arg=BUSYBOX_IMAGE=$BUSYBOX_IMAGE") # which image to use as base + BUILD_ARGS+=("--build-arg=BIOCONDA_UTILS_FOLDER=$BIOCONDA_UTILS_FOLDER") # git clone, relative to Dockerfile + BUILD_ARGS+=("--build-arg=bioconda_utils_version=$BIOCONDA_UTILS_VERSION") # specify version to checkout and install, also used as tag + fi +fi + +if [ "$TYPE" == "base-busybox" ]; then + BUILD_ARGS+=("--build-arg=debian_version=$DEBIAN_VERSION") # version of debian to use as base for building busybox + BUILD_ARGS+=("--build-arg=busybox_version=$BUSYBOX_VERSION") # busybox version to build and use + + # Make a busybox image that we'll use further below. As shown in the + # Dockerfile.busybox, this uses the build-busybox script which in turn + # cross-compiles for x86_64 and aarch64, and these execuables are later + # copied into an arch-specific container. + # + # Note that --iidfile (used here and in later commands) prints the built + # image ID to the specified file so we can refer to the image later. + iidfile="$( mktemp )" + echo $BUILD_ARGS + buildah bud \ + --iidfile="${iidfile}" \ + --file=Dockerfile.busybox \ + ${BUILD_ARGS[@]} + busybox_image="$( cat "${iidfile}" )" + rm "${iidfile}" + + BUILD_ARGS+=("--build-arg=busybox_image=${busybox_image}") # just-built image from which busybox executable will be copied +fi + +# ------------------------------------------------------------------------------ +# BUILDING: +# - Build each arch's image. +# - Extract info +# - Add info as labels +# - Add tags to image +# - Add image to manifest +# +for arch in $ARCHS; do + + # For build-env, need to use different base image from upstream conda-forge + # depending on arch. + BASE_IMAGE_BUILD_ARG="" + if [ "$TYPE" == "build-env" ]; then + if [ "$arch" == "amd64" ]; then + BASE_IMAGE_BUILD_ARG="--build-arg=base_image=quay.io/condaforge/linux-anvil-cos7-x86_64" + fi + if [ "$arch" == "arm64" ]; then + BASE_IMAGE_BUILD_ARG="--build-arg=base_image=quay.io/condaforge/linux-anvil-aarch64" + fi + fi + + # Actual building happens here. + iidfile="$( mktemp )" + buildah bud \ + --arch="${arch}" \ + --iidfile="${iidfile}" \ + ${BUILD_ARGS[@]} \ + $BASE_IMAGE_BUILD_ARG + image_id="$( cat "${iidfile}" )" + rm "${iidfile}" + + # Extract various package info and version info, then store that info + # as labels. Container is removed at the end to avoid e.g. having these + # commands in the history of the container. + container="$( buildah from "${image_id}" )" + run() { buildah run "${container}" "${@}" ; } + LABELS=() + LABELS+=("--label=deb-list=$( run cat /.deb.lst | tr '\n' '|' | sed 's/|$//' )") + LABELS+=("--label=pkg-list=$( run cat /.pkg.lst | tr '\n' '|' | sed 's/|$//' )") + LABELS+=("--label=glibc=$( run sh -c 'exec "$( find -xdev -name libc.so.6 -print -quit )"' | sed '1!d' )") + LABELS+=("--label=debian=$( run cat /etc/debian_version | sed '1!d' )") + LABELS+=("--label=bash=$( run bash --version | sed '1!d' )") + if [ "$TYPE" == "build-env" ]; then + bioconda_utils="$( + run sh -c '. /opt/conda/etc/profile.d/conda.sh && conda activate base && bioconda-utils --version' \ + | rev | cut -f1 -d " " | rev + )" + LABELS+=("--label=bioconda-utils=${bioconda_utils}") + fi + + if [ "$TYPE" == "base-busybox" ]; then + LABELS+=("--label=busybox-version=${BUSYBOX_VERSION}") + fi + buildah rm "${container}" + + # Add labels to a new container... + container="$( buildah from "${image_id}" )" + buildah config "${LABELS[@]}" "${container}" + + # ...then store the container (now with labels) as a new image. + # This is what we'll use to eventually upload. + image_id="$( buildah commit "${container}" )" + buildah rm "${container}" + + # Add images to manifest. Note that individual image tags include arch; + # manifest does not. + for tag in ${TAGS} ; do + buildah tag \ + "${image_id}" \ + "${IMAGE_NAME}:${tag}-${arch}" + buildah manifest add \ + "${IMAGE_NAME}:${tag}" \ + "${image_id}" + + buildah inspect -t image ${IMAGE_NAME}:${tag}-${arch} + done # tags +done # archs_and_images + +for tag in ${TAGS}; do + buildah inspect -t manifest ${IMAGE_NAME}:${tag} +done + +# Extract image IDs from the manifest built in the last step +ids="$( + for tag in $TAGS ; do + buildah manifest inspect "${IMAGE_NAME}:${tag}" \ + | jq -r '.manifests[]|.digest' \ + | while read id ; do + buildah images --format '{{.ID}}{{.Digest}}' \ + | sed -n "s/${id}//p" + done + done + )" + +# Run the tests; see Dockerfile.test in the relevant image dir for the +# actual tests run +# +# N.B. need to unique since one image can have multiple tags +ids="$( printf %s "${ids}" | sort -u )" +for id in ${ids} ; do + podman history "${id}" + buildah bud \ + --build-arg=base="${id}" \ + --file=Dockerfile.test +done + +# Clean up +buildah rmi --prune || true diff --git a/.github/workflows/scripts/generic_build.bash b/.github/workflows/scripts/generic_build.bash deleted file mode 100644 index fa423bc7ce2..00000000000 --- a/.github/workflows/scripts/generic_build.bash +++ /dev/null @@ -1,181 +0,0 @@ -#!/bin/bash - -set -xeu - -[ -z $IMAGE_NAME ] && echo "Please set IMAGE_NAME" && exit 1 -[ -z $IMAGE_DIR ] && echo "Please set IMAGE_DIR" && exit 1 -[ -z $TAGS ] && echo "Please set TAGS" && exit 1 -[ -z $ARCHS ] && echo "Please set ARCHS" && exit 1 -[ -z $TYPE ] && echo "Please set TYPE: [ base-debian | base-busybox | build-env | create-env ]" - -# Dockerfile lives here -cd $IMAGE_DIR - -for tag in ${TAGS} ; do - buildah manifest create "${IMAGE_NAME}:${tag}" -done - -# Read space-separated archs input string into an array -read -r -a archs_and_images <<<"$ARCHS" - -# ---------------------------------------------------------------------- -# Incrementally compose build args, depending on which inputs were -# provided. -BUILD_ARGS=() -if [ "$TYPE" == "base-debian" || "$TYPE" == "base-busybox" ]; then - [ -z "${DEBIAN_VERSION}" ] && echo "Please set DEBIAN VERSION" && exit 1 - BUILD_ARGS+=("--build-arg=debian_version=$DEBIAN_VERSION") -fi - -if [ "$TYPE" == "build-env" || "$TYPE" == "create-env" ]; then - - [ -z "${BIOCONDA_UTILS_VERSION}" ] && echo "Please set BIOCONDA_UTILS_VERSION" && exit 1 - - # Due to different nomenclature used by conda-forge and buildah, we - # need to map archs to base images, so overwrite archs_and_images. - archs_and_images=( - "amd64=quay.io/condaforge/linux-anvil-cos7-x86_64" - "arm64=quay.io/condaforge/linux-anvil-aarch64" - ) - - # FIXME: build-env should export its own conda version immediately after - # running (or maybe as a label on the image?) so we can just use that as - # a build arg for create-env. - # - # build-env uses bioconda-utils that's local; create-env uses the build-env - # tagged after this version. - if [ "$TYPE" == "create-env" ]; then - BUILD_ARGS+=("--build-arg=bioconda_utils_version=$BIOCONDA_UTILS_VERSION") - fi -fi - -if [ "$TYPE" == "base-busybox" ]; then - [ -z "$BUSYBOX_VERSION" ] && echo "Please set BUSYBOX_VERSION" && exit 1 - BUILD_ARGS+=("--build-arg=busybox_version=$BUSYBOX_VERSION") - - # Make a busybox image that we'll use further below. As shown in the - # Dockerfile.busybox, this uses the build-busybox script which in turn - # cross-compiles for x86_64 and aarch64, and these execuables are later - # copied into an arch-specific container. - # - # Note that --iidfile (used here and in later commands) prints the built - # image ID to the specified file so we can refer to the image later. - iidfile="$( mktemp )" - buildah bud \ - --iidfile="${iidfile}" \ - --file=Dockerfile.busybox \ - ${BUILD_ARGS[@]} - busybox_image="$( cat "${iidfile}" )" - rm "${iidfile}" - - # And then extend the build args with this image. - BUILD_ARGS+=("--build-arg=busybox_image=${busybox_image}") -fi - -# ---------------------------------------------------------------------- - -# Build each arch's image using the array of archs. -# -for arch_and_image in "${archs_and_images[@]}" ; do - arch=$(echo $arch_and_image | cut -f1 -d "=") - base_image=$(echo $arch_and_image | cut -f2 -d "=") - - # build-env is the only one that needs an arch-specific base image from - # conda-forge; this needs to be set within this loop rather than adding to - # BUILD_ARGS array. - BASE_IMAGE_BUILD_ARG="" - if [ "$TYPE" == "build-env" ]; then - BASE_IMAGE_BUILD_ARG="--build-arg=base_image="${base_image}"" - fi - - # Actual building happens here. - iidfile="$( mktemp )" - buildah bud \ - --arch="${arch}" \ - --iidfile="${iidfile}" \ - ${BUILD_ARGS[@]} \ - $BASE_IMAGE_BUILD_ARG - image_id="$( cat "${iidfile}" )" - rm "${iidfile}" - - # Extract various package info and version info, then store that info - # as labels. Container is removed at the end to avoid e.g. having these - # commands in the history of the container. - container="$( buildah from "${image_id}" )" - run() { buildah run "${container}" "${@}" ; } - LABELS=() - LABELS+=("--label=deb-list=$( run cat /.deb.lst | tr '\n' '|' | sed 's/|$//' )") - LABELS+=("--label=pkg-list=$( run cat /.pkg.lst | tr '\n' '|' | sed 's/|$//' )") - LABELS+=("--label=glibc=$( run sh -c 'exec "$( find -xdev -name libc.so.6 -print -quit )"' | sed '1!d' )") - LABELS+=("--label=debian=$( run cat /etc/debian_version | sed '1!d' )") - LABELS+=("--label=bash=$( run bash --version | sed '1!d' )") - if [ "$TYPE" == "build-env" ]; then - bioconda_utils="$( - run sh -c '. /opt/conda/etc/profile.d/conda.sh && conda activate base && bioconda-utils --version' \ - | rev | cut -f1 -d " " | rev - )" - LABELS+=("--label=bioconda-utils=${bioconda_utils}") - - # save conda/mamba versions to install in create-env - conda_version=$( - run sh -c '/opt/conda/bin/conda/list --export "^(conda|mamba)$"' \ - | sed -n 's/=[^=]*$//p' - ) - fi - - if [ ! -z "${BUSYBOX_VERSION}" ]; then - LABELS+=("--label=busybox-version=${BUSYBOX_VERSION}") - fi - buildah rm "${container}" - - # Add labels to a new container... - container="$( buildah from "${image_id}" )" - buildah config ${LABELS[@]} "${container}" - - # ...then store the container (now with labels) as a new image. This - # is what we'll use to eventually upload. - image_id="$( buildah commit "${container}" )" - buildah rm "${container}" - - # Add images to manifest. Individual image tags include arch; manifest does not. - for tag in ${TAGS} ; do - buildah tag \ - "${image_id}" \ - "${IMAGE_NAME}:${tag}-${arch}" - buildah manifest add \ - "${IMAGE_NAME}:${tag}" \ - "${image_id}" - - buildah inspect -t image ${image_name}:${tag}-${arch} - done # tags -done # archs_and_images -buildah inspect -t manifest ${image_name} - -# Extract image IDs from the manifest built in the last step -ids="$( - for tag in ${{ inputs.tags }} ; do - buildah manifest inspect "${image_name}:${tag}" \ - | jq -r '.manifests[]|.digest' \ - | while read id ; do - buildah images --format '{{.ID}}{{.Digest}}' \ - | sed -n "s/${id}//p" - done - done - )" - -# Run the tests; see Dockerfile.test in the relevant image dir for the -# actual tests run -ids="$( printf %s "${ids}" | sort -u )" -for id in ${ids} ; do - podman history "${id}" - buildah bud \ - --build-arg=base="${id}" \ - --file=Dockerfile.test \ - "${IMAGE_DIR}" -done - -# Clean up -buildah rmi --prune || true - -# TODO: what should be exported here? Image IDs? Manifest? How do we access -# this stuff outside the job? From acb1d84c8880321a620b39d816ba93fb46042ef0 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 11 Feb 2024 13:14:58 -0500 Subject: [PATCH 004/143] parameterize busybox image to copy from --- images/bioconda-utils-build-env-cos7/Dockerfile | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/images/bioconda-utils-build-env-cos7/Dockerfile b/images/bioconda-utils-build-env-cos7/Dockerfile index f90b0a696c4..7619cdb7822 100644 --- a/images/bioconda-utils-build-env-cos7/Dockerfile +++ b/images/bioconda-utils-build-env-cos7/Dockerfile @@ -1,9 +1,8 @@ ARG base_image - FROM ${base_image} as base -# Copy over C.UTF-8 locale from our base image to make it consistently available during build. -COPY --from=quay.io/bioconda/base-glibc-busybox-bash /usr/lib/locale/C.utf8 /usr/lib/locale/C.utf8 +ARG BUSYBOX_IMAGE +COPY --from=${BUSYBOX_IMAGE} /usr/lib/locale/C.utf8 /usr/lib/locale/C.utf8 # Provide system deps unconditionally until we are able to offer per-recipe installs. # (Addresses, e.g., "ImportError: libGL.so.1" in tests directly invoked by conda-build.) From 988ee014c10851d7b79a9532e10761b9f4aa391b Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 11 Feb 2024 13:15:22 -0500 Subject: [PATCH 005/143] git checkout parameterized branch --- images/bioconda-utils-build-env-cos7/Dockerfile | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/images/bioconda-utils-build-env-cos7/Dockerfile b/images/bioconda-utils-build-env-cos7/Dockerfile index 7619cdb7822..55c2b309d7d 100644 --- a/images/bioconda-utils-build-env-cos7/Dockerfile +++ b/images/bioconda-utils-build-env-cos7/Dockerfile @@ -11,6 +11,8 @@ RUN yum install -y mesa-libGL-devel \ && \ yum install -y openssh-clients \ && \ + yum install -y git \ + && \ yum clean all && \ rm -rf /var/cache/yum/* @@ -28,8 +30,12 @@ RUN . /opt/conda/etc/profile.d/conda.sh && \ FROM base as build WORKDIR /tmp/repo -ARG BIOCONDA_UTILS_FOLDER=./bioconda-utils/ +ARG BIOCONDA_UTILS_FOLDER=./bioconda-utils COPY ${BIOCONDA_UTILS_FOLDER} ./ + +# Make sure we're using the configured version of bioconda-utils for this +# build. +RUN git checkout ${bioconda_utils_version} RUN . /opt/conda/etc/profile.d/conda.sh && conda list RUN . /opt/conda/etc/profile.d/conda.sh && conda activate base && \ pip wheel . && \ From a4801a5b8e56a227fd55f0bb953532bfac6764f3 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 11 Feb 2024 13:15:59 -0500 Subject: [PATCH 006/143] parameterize busybox image --- images/create-env/Dockerfile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/images/create-env/Dockerfile b/images/create-env/Dockerfile index 93b839481b5..c09d00795ab 100644 --- a/images/create-env/Dockerfile +++ b/images/create-env/Dockerfile @@ -7,7 +7,8 @@ RUN /opt/conda/bin/conda list \ > /tmp/requirements.txt -FROM quay.io/bioconda/base-glibc-busybox-bash as build +ARG BUSYBOX_IMAGE +FROM ${BUSYBOX_IMAGE} as build WORKDIR /tmp/work COPY --from=bioconda-build-env /tmp/requirements.txt ./ @@ -19,10 +20,9 @@ RUN arch="$( uname -m )" \ RUN ./install-conda ./requirements.txt /opt/create-env - -FROM quay.io/bioconda/base-glibc-busybox-bash - +FROM ${BUSYBOX_IMAGE} COPY --from=build /opt/create-env /opt/create-env + # Copy (Bioconda-specific) Conda configuration created by the install-conda script. COPY --from=build /root/.condarc /root/ From de2635b7c4e6e28fcefd88ce3857863bab077c61 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 11 Feb 2024 13:16:14 -0500 Subject: [PATCH 007/143] depend on passed-in conda/mamba versions --- images/create-env/Dockerfile | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/images/create-env/Dockerfile b/images/create-env/Dockerfile index c09d00795ab..476d8484c3c 100644 --- a/images/create-env/Dockerfile +++ b/images/create-env/Dockerfile @@ -1,23 +1,17 @@ -# Use the exact conda, mamba versions as used in bioconda-recipes' builds. -ARG bioconda_utils_version -FROM quay.io/bioconda/bioconda-utils-build-env-cos7:${bioconda_utils_version} as bioconda-build-env -RUN /opt/conda/bin/conda list \ - --export '^(conda|mamba)$' \ - | sed -n 's/=[^=]*$//p' \ - > /tmp/requirements.txt - - ARG BUSYBOX_IMAGE FROM ${BUSYBOX_IMAGE} as build WORKDIR /tmp/work -COPY --from=bioconda-build-env /tmp/requirements.txt ./ COPY install-conda print-env-activate create-env ./ RUN arch="$( uname -m )" \ && \ wget --quiet -O ./miniconda.sh \ "https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-${arch}.sh" +# Install exact versions of conda/mamba +ARG CONDA_VERSION +ARG MAMBA_VERSION +RUN echo $CONDA_VERSION > requirements.txt && echo $MAMBA_VERSION >> requirements.txt RUN ./install-conda ./requirements.txt /opt/create-env FROM ${BUSYBOX_IMAGE} From 5a00ef3a89c2dc23b9fa0c1efe15bcf5bd3dcbf9 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 11 Feb 2024 13:16:38 -0500 Subject: [PATCH 008/143] first round of refactoring build-images.yml --- .github/workflows/build-images.yml | 64 +++++++++++++++--------------- 1 file changed, 32 insertions(+), 32 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 1ae6a9ec7bd..7d0ab48414d 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -1,4 +1,4 @@ -name: Build image +name: Build images concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true @@ -10,19 +10,21 @@ on: - 'docs/**' - 'test/**' +env: + BIOCONDA_UTILS_FOLDER: bioconda-utils + DEBIAN_VERSION: "12.2" + BUSYBOX_VERSION: "1.36.1" + BASE_TAGS: "0.1.1 latest" + BUILD_ENV_IMAGE_NAME: tmp-build-env + CREATE_ENV_IMAGE_NAME: tmp-create-env + BASE_DEBIAN_IMAGE_NAME: tmp-debian + BASE_BUSYBOX_IMAGE_NAME: tmp-busybox + + jobs: build: name: Build image runs-on: ubuntu-20.04 - strategy: - matrix: - include: - - arch: arm64 - image: bioconda-utils-build-env-cos7-aarch64 - base_image: quay.io/condaforge/linux-anvil-aarch64 - - arch: amd64 - image: bioconda-utils-build-env-cos7 - base_image: quay.io/condaforge/linux-anvil-cos7-x86_64 steps: - uses: actions/checkout@v4 with: @@ -31,9 +33,6 @@ jobs: - id: get-tag run: | tag=${{ github.event.release && github.event.release.tag_name || github.sha }} - - # https://github.blog/changelog/2022-10-11-github-actions-deprecating-save-state-and-set-output-commands/ - # printf %s "::set-output name=tag::${tag#v}" printf %s "tag=${tag#v}" >> $GITHUB_OUTPUT - name: Install qemu dependency @@ -41,23 +40,24 @@ jobs: sudo apt-get update sudo apt-get install -y qemu-user-static - - name: Build image - id: buildah-build - uses: redhat-actions/buildah-build@v2 - with: - image: ${{ matrix.image }} - arch: ${{ matrix.arch }} - build-args: | - BASE_IMAGE=${{ matrix.base_image }} - tags: >- - latest - ${{ steps.get-tag.outputs.tag }} - dockerfiles: | - ./Dockerfile - - - name: Test built image + - name: Build base-debian run: | - image='${{ steps.buildah-build.outputs.image }}' - for tag in ${{ steps.buildah-build.outputs.tags }} ; do - podman run --rm "${image}:${tag}" bioconda-utils --version - done + IMAGE_NAME=$BASE_DEBIAN_IMAGE_NAME \ + IMAGE_DIR=../../images/base-glibc-debian-bash \ + ARCHS="amd64 arm64" \ + TYPE="base-debian" \ + DEBIAN_VERSION=$DEBIAN_VERSION \ + TAGS=$BASE_TAGS \ + ./generic_build.bash + + - name: Build base-busybox + run: | + IMAGE_NAME=$BASE_BUSYBOX_IMAGE_NAME \ + IMAGE_DIR=../../images/base-glibc-busybox-bash \ + ARCHS="amd64 arm64" \ + TYPE="base-busybox" \ + DEBIAN_VERSION=$DEBIAN_VERSION \ + BUSYBOX_VERSION=$BUSYBOX_VERSION \ + TAGS=$BASE_TAGS \ + ./generic_build.bash + From 8fb22572a218422d014923d16ec7aa3f6ad962a2 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 11 Feb 2024 13:25:09 -0500 Subject: [PATCH 009/143] move generic build to top, and adjust workflow --- .github/workflows/build-images.yml | 18 +++++++++--------- .../generic_build.bash => generic_build.bash | 0 2 files changed, 9 insertions(+), 9 deletions(-) rename .github/workflows/generic_build.bash => generic_build.bash (100%) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 7d0ab48414d..f32f51f9a86 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -43,7 +43,7 @@ jobs: - name: Build base-debian run: | IMAGE_NAME=$BASE_DEBIAN_IMAGE_NAME \ - IMAGE_DIR=../../images/base-glibc-debian-bash \ + IMAGE_DIR=images/base-glibc-debian-bash \ ARCHS="amd64 arm64" \ TYPE="base-debian" \ DEBIAN_VERSION=$DEBIAN_VERSION \ @@ -52,12 +52,12 @@ jobs: - name: Build base-busybox run: | - IMAGE_NAME=$BASE_BUSYBOX_IMAGE_NAME \ - IMAGE_DIR=../../images/base-glibc-busybox-bash \ - ARCHS="amd64 arm64" \ - TYPE="base-busybox" \ - DEBIAN_VERSION=$DEBIAN_VERSION \ - BUSYBOX_VERSION=$BUSYBOX_VERSION \ - TAGS=$BASE_TAGS \ - ./generic_build.bash + IMAGE_NAME=$BASE_BUSYBOX_IMAGE_NAME \ + IMAGE_DIR=images/base-glibc-busybox-bash \ + ARCHS="amd64 arm64" \ + TYPE="base-busybox" \ + DEBIAN_VERSION=$DEBIAN_VERSION \ + BUSYBOX_VERSION=$BUSYBOX_VERSION \ + TAGS=$BASE_TAGS \ + ./generic_build.bash diff --git a/.github/workflows/generic_build.bash b/generic_build.bash similarity index 100% rename from .github/workflows/generic_build.bash rename to generic_build.bash From e9864bad7b5d5cc19bc05d84a54d15a413690c07 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 11 Feb 2024 22:17:01 -0500 Subject: [PATCH 010/143] split out base-debian into an independent job --- .github/workflows/build-images.yml | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index f32f51f9a86..1904f54a141 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -20,21 +20,19 @@ env: BASE_DEBIAN_IMAGE_NAME: tmp-debian BASE_BUSYBOX_IMAGE_NAME: tmp-busybox - jobs: - build: - name: Build image + + # NOTE: base-debian can be a separate job since it is independent of the + # others. create-env depends on build-env, and both depend on base-busybox, + # so we can't split that out. + build-debian: + name: Build base-debian runs-on: ubuntu-20.04 steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - - id: get-tag - run: | - tag=${{ github.event.release && github.event.release.tag_name || github.sha }} - printf %s "tag=${tag#v}" >> $GITHUB_OUTPUT - - name: Install qemu dependency run: | sudo apt-get update From 9ffeda8cf76ec2280ec8549efc856932f9590788 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 11 Feb 2024 22:20:38 -0500 Subject: [PATCH 011/143] factor out archs --- .github/workflows/build-images.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 1904f54a141..4e847e12315 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -19,6 +19,7 @@ env: CREATE_ENV_IMAGE_NAME: tmp-create-env BASE_DEBIAN_IMAGE_NAME: tmp-debian BASE_BUSYBOX_IMAGE_NAME: tmp-busybox + ARCHS: "amd64 arm64" jobs: @@ -42,9 +43,9 @@ jobs: run: | IMAGE_NAME=$BASE_DEBIAN_IMAGE_NAME \ IMAGE_DIR=images/base-glibc-debian-bash \ - ARCHS="amd64 arm64" \ TYPE="base-debian" \ DEBIAN_VERSION=$DEBIAN_VERSION \ + ARCHS=$ARCHS \ TAGS=$BASE_TAGS \ ./generic_build.bash @@ -52,8 +53,8 @@ jobs: run: | IMAGE_NAME=$BASE_BUSYBOX_IMAGE_NAME \ IMAGE_DIR=images/base-glibc-busybox-bash \ - ARCHS="amd64 arm64" \ TYPE="base-busybox" \ + ARCHS=$ARCHS \ DEBIAN_VERSION=$DEBIAN_VERSION \ BUSYBOX_VERSION=$BUSYBOX_VERSION \ TAGS=$BASE_TAGS \ From c7ac9b1dcb7dd8c1756f122662998a81cf3c76af Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 11 Feb 2024 22:20:51 -0500 Subject: [PATCH 012/143] add build-env and create-env --- .github/workflows/build-images.yml | 66 ++++++++++++++++++++++++++++++ 1 file changed, 66 insertions(+) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 4e847e12315..4b15bde588f 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -49,6 +49,24 @@ jobs: TAGS=$BASE_TAGS \ ./generic_build.bash + build: + name: Build base-busybox, build-env, and create-env images + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - id: get-tag + run: | + tag=${{ github.event.release && github.event.release.tag_name || github.sha }} + printf %s "tag=${tag#v}" >> $GITHUB_OUTPUT + + - name: Install qemu dependency + run: | + sudo apt-get update + sudo apt-get install -y qemu-user-static + - name: Build base-busybox run: | IMAGE_NAME=$BASE_BUSYBOX_IMAGE_NAME \ @@ -60,3 +78,51 @@ jobs: TAGS=$BASE_TAGS \ ./generic_build.bash + - name: Build build-env + run: | + # The Dockerfile expects bioconda-utils to be cloned; even though we're + # working in the bioconda-utils repo the code needs to be in the build + # context, which is in the respective image dir. + if [ ! -e "images/bioconda-utils-build-env-cos7/bioconda-utils" ]; then + git clone https://github.com/bioconda/bioconda-utils images/bioconda-utils-build-env-cos7/bioconda-utils + else + (cd ../../../images/bioconda-utils-build-env-cos7/bioconda-utils && git fetch) + fi + + # This expects the busybox image to have been built locally. + IMAGE_NAME=$BUILD_ENV_IMAGE_NAME \ + IMAGE_DIR=images/bioconda-utils-build-env-cos7 \ + ARCHS=$ARCHS \ + TYPE="build-env" \ + BIOCONDA_UTILS_VERSION='${{ steps.get-tag.output.tag }}' \ + BUSYBOX_IMAGE=localhost/$BASE_BUSYBOX_IMAGE_NAME \ + ./generic_build.bash + + + - name: Build create-env + run: | + BIOCONDA_UTILS_VERSION='${{ steps.get-tag.output.tag }}' + + # Here we extract the conda and mamba versions from the just-created + # build-env container. This ensures that when creating environments, we + # use the exact same conda/mamba versions used when building the + # package. + CONDA_VERSION=$( + podman run -t localhost/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION} \ + bash -c "/opt/conda/bin/conda list --export '^conda$'| sed -n 's/=[^=]*$//p'" + ) + MAMBA_VERSION=$( + podman run -t localhost/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION} \ + bash -c "/opt/conda/bin/conda list --export '^mamba$'| sed -n 's/=[^=]*$//p'" + ) + + # Remove trailing \r with parameter expansion + export CONDA_VERSION=${CONDA_VERSION%$'\r'} + export MAMBA_VERSION=${MAMBA_VERSION%$'\r'} + + IMAGE_NAME=$CREATE_ENV_IMAGE_NAME \ + IMAGE_DIR=images/create-env \ + ARCHS=$ARCHS \ + TYPE="create-env" \ + BUSYBOX_IMAGE=localhost/$BASE_BUSYBOX_IMAGE_NAME \ + ./generic_build.bash From 8ffc3ea8da7ddb9cea7115298097ed5aaba59391 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 11 Feb 2024 22:32:32 -0500 Subject: [PATCH 013/143] try using branch name for now --- .github/workflows/build-images.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 4b15bde588f..2ef3a56a5f2 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -94,14 +94,14 @@ jobs: IMAGE_DIR=images/bioconda-utils-build-env-cos7 \ ARCHS=$ARCHS \ TYPE="build-env" \ - BIOCONDA_UTILS_VERSION='${{ steps.get-tag.output.tag }}' \ + BIOCONDA_UTILS_VERSION='${{ github.head_ref || github.ref_name }}' \ BUSYBOX_IMAGE=localhost/$BASE_BUSYBOX_IMAGE_NAME \ ./generic_build.bash - name: Build create-env run: | - BIOCONDA_UTILS_VERSION='${{ steps.get-tag.output.tag }}' + BIOCONDA_UTILS_VERSION='${{ github.head_ref || github.ref_name }}' \ # Here we extract the conda and mamba versions from the just-created # build-env container. This ensures that when creating environments, we @@ -125,4 +125,5 @@ jobs: ARCHS=$ARCHS \ TYPE="create-env" \ BUSYBOX_IMAGE=localhost/$BASE_BUSYBOX_IMAGE_NAME \ + BIOCONDA_UTILS_VERSION=$BIOCONDA_UTILS_VERSION \ ./generic_build.bash From a971e5df41ca45aa273659f5dd0db68b1e881cbb Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Mon, 12 Feb 2024 21:02:13 -0500 Subject: [PATCH 014/143] fix path --- .github/workflows/build-images.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 2ef3a56a5f2..f8d31e970d9 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -86,7 +86,7 @@ jobs: if [ ! -e "images/bioconda-utils-build-env-cos7/bioconda-utils" ]; then git clone https://github.com/bioconda/bioconda-utils images/bioconda-utils-build-env-cos7/bioconda-utils else - (cd ../../../images/bioconda-utils-build-env-cos7/bioconda-utils && git fetch) + (cd images/bioconda-utils-build-env-cos7/bioconda-utils && git fetch) fi # This expects the busybox image to have been built locally. From 976f35055c7a086492c1cf53724e9bff8b27c7a7 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Mon, 12 Feb 2024 21:02:45 -0500 Subject: [PATCH 015/143] rm redundant if clause --- generic_build.bash | 21 +++++++++------------ 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/generic_build.bash b/generic_build.bash index ea82f08bd59..601ce65d327 100755 --- a/generic_build.bash +++ b/generic_build.bash @@ -121,19 +121,16 @@ if [ "$TYPE" == "base-debian" ]; then BUILD_ARGS+=("--build-arg=debian_version=$DEBIAN_VERSION") # version of debian to use as base fi -if [ "$TYPE" == "build-env" ] || [ "$TYPE" == "create-env" ]; then - - if [ "$TYPE" == "create-env" ]; then - BUILD_ARGS+=("--build-arg=BUSYBOX_IMAGE=$BUSYBOX_IMAGE") # which image to use as base - BUILD_ARGS+=("--build-arg=CONDA_VERSION=$CONDA_VERSION") # conda version to install - BUILD_ARGS+=("--build-arg=MAMBA_VERSION=$MAMBA_VERSION") # mamba version to install - fi +if [ "$TYPE" == "create-env" ]; then + BUILD_ARGS+=("--build-arg=BUSYBOX_IMAGE=$BUSYBOX_IMAGE") # which image to use as base + BUILD_ARGS+=("--build-arg=CONDA_VERSION=$CONDA_VERSION") # conda version to install + BUILD_ARGS+=("--build-arg=MAMBA_VERSION=$MAMBA_VERSION") # mamba version to install +fi - if [ "$TYPE" == "build-env" ]; then - BUILD_ARGS+=("--build-arg=BUSYBOX_IMAGE=$BUSYBOX_IMAGE") # which image to use as base - BUILD_ARGS+=("--build-arg=BIOCONDA_UTILS_FOLDER=$BIOCONDA_UTILS_FOLDER") # git clone, relative to Dockerfile - BUILD_ARGS+=("--build-arg=bioconda_utils_version=$BIOCONDA_UTILS_VERSION") # specify version to checkout and install, also used as tag - fi +if [ "$TYPE" == "build-env" ]; then + BUILD_ARGS+=("--build-arg=BUSYBOX_IMAGE=$BUSYBOX_IMAGE") # which image to use as base + BUILD_ARGS+=("--build-arg=BIOCONDA_UTILS_FOLDER=$BIOCONDA_UTILS_FOLDER") # git clone, relative to Dockerfile + BUILD_ARGS+=("--build-arg=bioconda_utils_version=$BIOCONDA_UTILS_VERSION") # specify version to checkout and install, also used as tag fi if [ "$TYPE" == "base-busybox" ]; then From 5d3acc7dce67b7dd82114afe931f5bf0b2b53d04 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Mon, 12 Feb 2024 21:48:51 -0500 Subject: [PATCH 016/143] parameterize create-env test to use local images --- generic_build.bash | 3 +++ images/create-env/Dockerfile.test | 6 ++++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/generic_build.bash b/generic_build.bash index 601ce65d327..d2e8c2812ca 100755 --- a/generic_build.bash +++ b/generic_build.bash @@ -117,6 +117,7 @@ read -r -a archs_and_images <<<"$ARCHS" # were provided. This will eventually be provided to buildah bud. # BUILD_ARGS=() +TEST_BUILD_ARGS=() # specifically used when testing with Dockerfile.test if [ "$TYPE" == "base-debian" ]; then BUILD_ARGS+=("--build-arg=debian_version=$DEBIAN_VERSION") # version of debian to use as base fi @@ -125,6 +126,7 @@ if [ "$TYPE" == "create-env" ]; then BUILD_ARGS+=("--build-arg=BUSYBOX_IMAGE=$BUSYBOX_IMAGE") # which image to use as base BUILD_ARGS+=("--build-arg=CONDA_VERSION=$CONDA_VERSION") # conda version to install BUILD_ARGS+=("--build-arg=MAMBA_VERSION=$MAMBA_VERSION") # mamba version to install + TEST_BUILD_ARGS+=("--build-arg=BUSYBOX_IMAGE=$BUSYBOX_IMAGE") fi if [ "$TYPE" == "build-env" ]; then @@ -260,6 +262,7 @@ for id in ${ids} ; do podman history "${id}" buildah bud \ --build-arg=base="${id}" \ + ${TEST_BUILD_ARGS[@]} \ --file=Dockerfile.test done diff --git a/images/create-env/Dockerfile.test b/images/create-env/Dockerfile.test index 9c2566aefc3..818ce71123c 100644 --- a/images/create-env/Dockerfile.test +++ b/images/create-env/Dockerfile.test @@ -38,7 +38,9 @@ RUN set -x && \ --strip-files=\* \ /usr/local \ catfasta2phyml -FROM quay.io/bioconda/base-glibc-busybox-bash + +ARG BUSYBOX_IMAGE +FROM ${BUSYBOX_IMAGE} COPY --from=build_bioconda_package /usr/local /usr/local RUN set -x && \ /usr/local/env-execute \ @@ -64,7 +66,7 @@ RUN set -x && \ --remove-paths=\*.pyc \ /opt/conda \ conda -FROM quay.io/bioconda/base-glibc-busybox-bash +FROM ${BUSYBOX_IMAGE} COPY --from=build_conda /opt/conda /opt/conda COPY --from=build_conda /opt/conda/env-activate.sh /usr/local/ RUN set -x && \ From e368e397b6645fa5ef85addd23ebaa4fddb6c3a3 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Mon, 12 Feb 2024 22:26:27 -0500 Subject: [PATCH 017/143] check for existing tags --- generic_build.bash | 38 +++++++++++++++++++++++++++++++++++++- 1 file changed, 37 insertions(+), 1 deletion(-) diff --git a/generic_build.bash b/generic_build.bash index d2e8c2812ca..207881b2c7a 100755 --- a/generic_build.bash +++ b/generic_build.bash @@ -66,7 +66,7 @@ EXAMPLE USAGE ' # ------------------------------------------------------------------------------ -# Handle required env vars +# HANDLE REQUIRED ENV VARS [ -z "$IMAGE_NAME" ] && echo -e "$USAGE error: please set IMAGE_NAME" && exit 1 [ -z "$IMAGE_DIR" ] && echo "error: please set IMAGE_DIR, where Dockerfile is found." && exit 1 [ -z "$TYPE" ] && echo "error: please set TYPE: [ base-debian | base-busybox | build-env | create-env ]" && exit 1 @@ -99,6 +99,39 @@ if [ "$TYPE" == "base-busybox" ]; then fi # ------------------------------------------------------------------------------ + +# ------------------------------------------------------------------------------ +# CHECK FOR EXISTING TAGS. This is because quay.io does not support immutable +# images and we don't want to clobber existing. +response="$(curl -sL "https://quay.io/api/v1/repository/bioconda/${IMAGE_NAME}/tag/")" + +# Images can be set to expire; the jq query selects only non-expired images. +existing_tags="$( + printf %s "${response}" \ + | jq -r '.tags[]|select(.end_ts == null or .end_ts >= now)|.name' + )" \ + || { + printf %s\\n \ + 'Could not get list of image tags.' \ + 'Does the repository exist on Quay.io?' \ + 'Quay.io REST API response was:' \ + "${response}" + exit 1 + } +for tag in $TAGS ; do + case "${tag}" in + "latest" ) ;; + * ) + if printf %s "${existing_tags}" | grep -qxF "${tag}" ; then + printf 'error: tag %s already exists for %s on quay.io!\n' "${tag}" "${IMAGE_NAME}" + exit 1 + fi + esac +done + +#------------------------------------------------------------------------------- +# SETUP + set -xeu # Dockerfile lives here @@ -241,6 +274,9 @@ for tag in ${TAGS}; do buildah inspect -t manifest ${IMAGE_NAME}:${tag} done +# ------------------------------------------------------------------------------ +# TESTING + # Extract image IDs from the manifest built in the last step ids="$( for tag in $TAGS ; do From bbbd9d305ff2ac435bd993fd88ee5a745f142737 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Mon, 12 Feb 2024 22:35:04 -0500 Subject: [PATCH 018/143] allow missing repository on quay.io if configured --- .github/workflows/build-images.yml | 1 + generic_build.bash | 14 ++++++++------ 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index f8d31e970d9..5a410eb2262 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -20,6 +20,7 @@ env: BASE_DEBIAN_IMAGE_NAME: tmp-debian BASE_BUSYBOX_IMAGE_NAME: tmp-busybox ARCHS: "amd64 arm64" + WARN_IF_MISSING: "false" # Used for testing when the repository is known to be missing on quay.io jobs: diff --git a/generic_build.bash b/generic_build.bash index 207881b2c7a..4b2a3c8a374 100755 --- a/generic_build.bash +++ b/generic_build.bash @@ -111,12 +111,14 @@ existing_tags="$( | jq -r '.tags[]|select(.end_ts == null or .end_ts >= now)|.name' )" \ || { - printf %s\\n \ - 'Could not get list of image tags.' \ - 'Does the repository exist on Quay.io?' \ - 'Quay.io REST API response was:' \ - "${response}" - exit 1 + if [ ${WARN_IF_MISSING:-true} == "true" ]; then + printf %s\\n \ + 'Could not get list of image tags.' \ + 'Does the repository exist on Quay.io?' \ + 'Quay.io REST API response was:' \ + "${response}" + exit 1 + fi } for tag in $TAGS ; do case "${tag}" in From 36d2fc66316d8c778f3b2e6a12e55b29cd125e6f Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Mon, 12 Feb 2024 23:01:31 -0500 Subject: [PATCH 019/143] add ARG for next FROM context --- images/create-env/Dockerfile.test | 1 + 1 file changed, 1 insertion(+) diff --git a/images/create-env/Dockerfile.test b/images/create-env/Dockerfile.test index 818ce71123c..accca57e193 100644 --- a/images/create-env/Dockerfile.test +++ b/images/create-env/Dockerfile.test @@ -66,6 +66,7 @@ RUN set -x && \ --remove-paths=\*.pyc \ /opt/conda \ conda +ARG BUSYBOX_IMAGE FROM ${BUSYBOX_IMAGE} COPY --from=build_conda /opt/conda /opt/conda COPY --from=build_conda /opt/conda/env-activate.sh /usr/local/ From 03c247258585faa606f9f322c2d786c5cbb62629 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Tue, 13 Feb 2024 17:29:07 -0500 Subject: [PATCH 020/143] add build script for local testing (may be moved later) --- build.sh | 86 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 86 insertions(+) create mode 100644 build.sh diff --git a/build.sh b/build.sh new file mode 100644 index 00000000000..4f6e27c0d98 --- /dev/null +++ b/build.sh @@ -0,0 +1,86 @@ +#!/bin/bash + +# create-env depends on base-busybox and build-env (which in turn also depends +# on base-busybox). base-debian is independent. +# +# This can be run locally for testing, and can be used as a template for CI. +# +# base-busybox base-debian +# | | +# build-env | +# \ | +# \ | +# create-env + +set -euo + +# Used for build-env. bioconda-utils will be cloned to this folder inside the +# image dir (where the Dockerfile is) and the version will be checked out. +export BIOCONDA_UTILS_FOLDER=bioconda-utils +export BIOCONDA_UTILS_VERSION=v2.11.1 + +export DEBIAN_VERSION="12.2" +export BUSYBOX_VERSION="1.36.1" + +# Use same tags for base-busybox and base-debian +export BASE_TAGS="0.1.1 0.1 latest" +export WARN_IF_MISSING=false + +# Store as separate vars so we can use these for dependencies. +BUILD_ENV_IMAGE_NAME=tmp-build-env +CREATE_ENV_IMAGE_NAME=tmp-create-env +BASE_DEBIAN_IMAGE_NAME=tmp-debian +BASE_BUSYBOX_IMAGE_NAME=tmp-busybox + +# # Build base-busybox------------------------------------------------------------ +IMAGE_NAME=$BASE_BUSYBOX_IMAGE_NAME \ +IMAGE_DIR=images/base-glibc-busybox-bash \ +ARCHS="arm64" \ +TYPE="base-busybox" \ +TAGS=$BASE_TAGS \ +./generic_build.bash + +# Build base-debian------------------------------------------------------------- +IMAGE_NAME=$BASE_DEBIAN_IMAGE_NAME \ +IMAGE_DIR=images/base-glibc-debian-bash \ +ARCHS="amd64" \ +TYPE="base-debian" \ +TAGS=$BASE_TAGS \ +./generic_build.bash + +# Build build-env--------------------------------------------------------------- + + # Clone bioconda-utils into same directory as Dockerfile + if [ ! -e "images/bioconda-utils-build-env-cos7/bioconda-utils" ]; then + git clone https://github.com/bioconda/bioconda-utils images/bioconda-utils-build-env-cos7/bioconda-utils + else + (cd images/bioconda-utils-build-env-cos7/bioconda-utils && git fetch) + fi + + IMAGE_NAME=$BUILD_ENV_IMAGE_NAME \ + IMAGE_DIR=images/bioconda-utils-build-env-cos7 \ + ARCHS="amd64" \ + TYPE="build-env" \ + BUSYBOX_IMAGE=localhost/$BASE_BUSYBOX_IMAGE_NAME \ + ./generic_build.bash + +# # Build create-env-------------------------------------------------------------- +# Get the exact versions of mamba and conda that were installed in build-env. +CONDA_VERSION=$( + podman run -t localhost/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION} \ + bash -c "/opt/conda/bin/conda list --export '^conda$'| sed -n 's/=[^=]*$//p'" +) +MAMBA_VERSION=$( + podman run -t localhost/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION} \ + bash -c "/opt/conda/bin/conda list --export '^mamba$'| sed -n 's/=[^=]*$//p'" +) +# Remove trailing \r with parameter expansion +export CONDA_VERSION=${CONDA_VERSION%$'\r'} +export MAMBA_VERSION=${MAMBA_VERSION%$'\r'} + +IMAGE_NAME=$CREATE_ENV_IMAGE_NAME \ +IMAGE_DIR=images/create-env \ +ARCHS="arm64" \ +TYPE="create-env" \ +BUSYBOX_IMAGE=localhost/$BASE_BUSYBOX_IMAGE_NAME \ +./generic_build.bash From b7a6f4a7026c4214a5a258f9dcde861b4da175bb Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Tue, 13 Feb 2024 18:40:53 -0500 Subject: [PATCH 021/143] add docs --- generic_build.bash | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/generic_build.bash b/generic_build.bash index 4b2a3c8a374..c6f9d524047 100755 --- a/generic_build.bash +++ b/generic_build.bash @@ -23,10 +23,10 @@ Set env vars immediately before running. REQUIRED ARGS FOR ALL TYPES =========================== -TYPE: base-busybox | base-debian | build-env | create-env -IMAGE_DIR: Location of Dockerfile. -IMAGE_NAME: Image name to upload. -ARCHS: Space-separated architectures e.g. "amd64 arm64" + TYPE: base-busybox | base-debian | build-env | create-env + IMAGE_DIR: Location of Dockerfile. + IMAGE_NAME: Image name to upload. + ARCHS: Space-separated architectures e.g. "amd64 arm64" REQUIRED for base-busybox ------------------------- @@ -53,6 +53,14 @@ REQUIRED for create-env BUSYBOX_IMAGE: the image to use as a base; typically this will be the results of building base-busybox in a previous run of this script. +OPTIONAL args +------------- + + WARN_IF_MISSING: true | false + If true (default), will exit if there is no remote repository yet. Set to + false when testing with custom image names. + + EXAMPLE USAGE ============= From 3ff8d680f16b13e6aba2b68f42856d5debf998c1 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Tue, 13 Feb 2024 18:42:57 -0500 Subject: [PATCH 022/143] parameterize archs --- build.sh | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/build.sh b/build.sh index 4f6e27c0d98..57b06aee84c 100644 --- a/build.sh +++ b/build.sh @@ -25,6 +25,7 @@ export BUSYBOX_VERSION="1.36.1" # Use same tags for base-busybox and base-debian export BASE_TAGS="0.1.1 0.1 latest" export WARN_IF_MISSING=false +export ARCHS="arm64 amd64" # Store as separate vars so we can use these for dependencies. BUILD_ENV_IMAGE_NAME=tmp-build-env @@ -35,18 +36,18 @@ BASE_BUSYBOX_IMAGE_NAME=tmp-busybox # # Build base-busybox------------------------------------------------------------ IMAGE_NAME=$BASE_BUSYBOX_IMAGE_NAME \ IMAGE_DIR=images/base-glibc-busybox-bash \ -ARCHS="arm64" \ TYPE="base-busybox" \ TAGS=$BASE_TAGS \ ./generic_build.bash + ARCHS=$ARCHS \ # Build base-debian------------------------------------------------------------- IMAGE_NAME=$BASE_DEBIAN_IMAGE_NAME \ IMAGE_DIR=images/base-glibc-debian-bash \ -ARCHS="amd64" \ TYPE="base-debian" \ TAGS=$BASE_TAGS \ ./generic_build.bash + ARCHS=$ARCHS \ # Build build-env--------------------------------------------------------------- @@ -56,10 +57,10 @@ TAGS=$BASE_TAGS \ else (cd images/bioconda-utils-build-env-cos7/bioconda-utils && git fetch) fi + ARCHS=$ARCHS \ IMAGE_NAME=$BUILD_ENV_IMAGE_NAME \ IMAGE_DIR=images/bioconda-utils-build-env-cos7 \ - ARCHS="amd64" \ TYPE="build-env" \ BUSYBOX_IMAGE=localhost/$BASE_BUSYBOX_IMAGE_NAME \ ./generic_build.bash @@ -80,7 +81,7 @@ export MAMBA_VERSION=${MAMBA_VERSION%$'\r'} IMAGE_NAME=$CREATE_ENV_IMAGE_NAME \ IMAGE_DIR=images/create-env \ -ARCHS="arm64" \ TYPE="create-env" \ BUSYBOX_IMAGE=localhost/$BASE_BUSYBOX_IMAGE_NAME \ ./generic_build.bash + ARCHS=$ARCHS \ From b580a38dee28c2ca05b8ac8cfd4bbfb536f2ed5a Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Tue, 13 Feb 2024 18:43:14 -0500 Subject: [PATCH 023/143] rm manifests by default before building locally --- build.sh | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/build.sh b/build.sh index 57b06aee84c..0d47de67913 100644 --- a/build.sh +++ b/build.sh @@ -33,6 +33,22 @@ CREATE_ENV_IMAGE_NAME=tmp-create-env BASE_DEBIAN_IMAGE_NAME=tmp-debian BASE_BUSYBOX_IMAGE_NAME=tmp-busybox +REMOVE_MANIFEST=true + +# buildah will complain if a manifest already exists. +if [ ${REMOVE_MANIFEST:-true} == "true" ]; then + for imgname in \ + $BUILD_ENV_IMAGE_NAME \ + $CREATE_ENV_IMAGE_NAME \ + $BASE_DEBIAN_IMAGE_NAME \ + $BASE_BUSYBOX_IMAGE_NAME; do + for tag in ${BASE_TAGS} $BIOCONDA_UTILS_VERSION; do + buildah manifest rm "${imgname}:${tag}" || true + done + done +fi + + # # Build base-busybox------------------------------------------------------------ IMAGE_NAME=$BASE_BUSYBOX_IMAGE_NAME \ IMAGE_DIR=images/base-glibc-busybox-bash \ From cfd7b854976e601abda29e20bad12b1cad5cb43a Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Tue, 13 Feb 2024 18:43:31 -0500 Subject: [PATCH 024/143] add switches to build each container --- build.sh | 94 ++++++++++++++++++++++++++++++++------------------------ 1 file changed, 53 insertions(+), 41 deletions(-) diff --git a/build.sh b/build.sh index 0d47de67913..674873284ef 100644 --- a/build.sh +++ b/build.sh @@ -33,6 +33,11 @@ CREATE_ENV_IMAGE_NAME=tmp-create-env BASE_DEBIAN_IMAGE_NAME=tmp-debian BASE_BUSYBOX_IMAGE_NAME=tmp-busybox + +BUILD_BUSYBOX=true +BUILD_DEBIAN=true +BUILD_BUILD_ENV=true +BUILD_CREATE_ENV=true REMOVE_MANIFEST=true # buildah will complain if a manifest already exists. @@ -50,54 +55,61 @@ fi # # Build base-busybox------------------------------------------------------------ -IMAGE_NAME=$BASE_BUSYBOX_IMAGE_NAME \ -IMAGE_DIR=images/base-glibc-busybox-bash \ -TYPE="base-busybox" \ -TAGS=$BASE_TAGS \ -./generic_build.bash +if [ $BUILD_BUSYBOX == "true" ]; then + IMAGE_NAME=$BASE_BUSYBOX_IMAGE_NAME \ + IMAGE_DIR=images/base-glibc-busybox-bash \ ARCHS=$ARCHS \ + TYPE="base-busybox" \ + TAGS=$BASE_TAGS \ + ./generic_build.bash +fi # Build base-debian------------------------------------------------------------- -IMAGE_NAME=$BASE_DEBIAN_IMAGE_NAME \ -IMAGE_DIR=images/base-glibc-debian-bash \ -TYPE="base-debian" \ -TAGS=$BASE_TAGS \ -./generic_build.bash +if [ $BUILD_DEBIAN == "true" ]; then + IMAGE_NAME=$BASE_DEBIAN_IMAGE_NAME \ + IMAGE_DIR=images/base-glibc-debian-bash \ ARCHS=$ARCHS \ + TYPE="base-debian" \ + TAGS=$BASE_TAGS \ + ./generic_build.bash +fi # Build build-env--------------------------------------------------------------- - # Clone bioconda-utils into same directory as Dockerfile - if [ ! -e "images/bioconda-utils-build-env-cos7/bioconda-utils" ]; then - git clone https://github.com/bioconda/bioconda-utils images/bioconda-utils-build-env-cos7/bioconda-utils - else - (cd images/bioconda-utils-build-env-cos7/bioconda-utils && git fetch) - fi +if [ $BUILD_BUILD_ENV == "true" ]; then + # Clone bioconda-utils into same directory as Dockerfile + if [ ! -e "images/bioconda-utils-build-env-cos7/bioconda-utils" ]; then + git clone https://github.com/bioconda/bioconda-utils images/bioconda-utils-build-env-cos7/bioconda-utils + else + (cd images/bioconda-utils-build-env-cos7/bioconda-utils && git fetch) + fi + IMAGE_NAME=$BUILD_ENV_IMAGE_NAME \ + IMAGE_DIR=images/bioconda-utils-build-env-cos7 \ ARCHS=$ARCHS \ - - IMAGE_NAME=$BUILD_ENV_IMAGE_NAME \ - IMAGE_DIR=images/bioconda-utils-build-env-cos7 \ - TYPE="build-env" \ - BUSYBOX_IMAGE=localhost/$BASE_BUSYBOX_IMAGE_NAME \ - ./generic_build.bash - + TYPE="build-env" \ + BUSYBOX_IMAGE=localhost/$BASE_BUSYBOX_IMAGE_NAME \ + ./generic_build.bash +fi # # Build create-env-------------------------------------------------------------- -# Get the exact versions of mamba and conda that were installed in build-env. -CONDA_VERSION=$( - podman run -t localhost/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION} \ - bash -c "/opt/conda/bin/conda list --export '^conda$'| sed -n 's/=[^=]*$//p'" -) -MAMBA_VERSION=$( - podman run -t localhost/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION} \ - bash -c "/opt/conda/bin/conda list --export '^mamba$'| sed -n 's/=[^=]*$//p'" -) -# Remove trailing \r with parameter expansion -export CONDA_VERSION=${CONDA_VERSION%$'\r'} -export MAMBA_VERSION=${MAMBA_VERSION%$'\r'} - -IMAGE_NAME=$CREATE_ENV_IMAGE_NAME \ -IMAGE_DIR=images/create-env \ -TYPE="create-env" \ -BUSYBOX_IMAGE=localhost/$BASE_BUSYBOX_IMAGE_NAME \ -./generic_build.bash + +if [ $BUILD_CREATE_ENV == "true" ]; then + # Get the exact versions of mamba and conda that were installed in build-env. + CONDA_VERSION=$( + podman run -t localhost/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION} \ + bash -c "/opt/conda/bin/conda list --export '^conda$'| sed -n 's/=[^=]*$//p'" + ) + MAMBA_VERSION=$( + podman run -t localhost/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION} \ + bash -c "/opt/conda/bin/conda list --export '^mamba$'| sed -n 's/=[^=]*$//p'" + ) + # Remove trailing \r with parameter expansion + export CONDA_VERSION=${CONDA_VERSION%$'\r'} + export MAMBA_VERSION=${MAMBA_VERSION%$'\r'} + + IMAGE_NAME=$CREATE_ENV_IMAGE_NAME \ + IMAGE_DIR=images/create-env \ ARCHS=$ARCHS \ + TYPE="create-env" \ + BUSYBOX_IMAGE=localhost/$BASE_BUSYBOX_IMAGE_NAME \ + ./generic_build.bash +fi From 073c3dfdee158f09c6eefb71e68c2b65400ba61e Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Thu, 15 Feb 2024 14:11:54 -0500 Subject: [PATCH 025/143] fix arg --- images/create-env/Dockerfile | 1 + images/create-env/Dockerfile.test | 9 ++++----- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/images/create-env/Dockerfile b/images/create-env/Dockerfile index 476d8484c3c..ea72c88d931 100644 --- a/images/create-env/Dockerfile +++ b/images/create-env/Dockerfile @@ -14,6 +14,7 @@ ARG MAMBA_VERSION RUN echo $CONDA_VERSION > requirements.txt && echo $MAMBA_VERSION >> requirements.txt RUN ./install-conda ./requirements.txt /opt/create-env +ARG BUSYBOX_IMAGE FROM ${BUSYBOX_IMAGE} COPY --from=build /opt/create-env /opt/create-env diff --git a/images/create-env/Dockerfile.test b/images/create-env/Dockerfile.test index accca57e193..cae8f9ce5c6 100644 --- a/images/create-env/Dockerfile.test +++ b/images/create-env/Dockerfile.test @@ -1,5 +1,5 @@ ARG base - +ARG BUSYBOX_IMAGE FROM "${base}" RUN set -x && \ CONDA_PKGS_DIRS="/tmp/pkgs" \ @@ -39,8 +39,7 @@ RUN set -x && \ /usr/local \ catfasta2phyml -ARG BUSYBOX_IMAGE -FROM ${BUSYBOX_IMAGE} +FROM "${BUSYBOX_IMAGE}" COPY --from=build_bioconda_package /usr/local /usr/local RUN set -x && \ /usr/local/env-execute \ @@ -66,8 +65,8 @@ RUN set -x && \ --remove-paths=\*.pyc \ /opt/conda \ conda -ARG BUSYBOX_IMAGE -FROM ${BUSYBOX_IMAGE} + +FROM "${BUSYBOX_IMAGE}" COPY --from=build_conda /opt/conda /opt/conda COPY --from=build_conda /opt/conda/env-activate.sh /usr/local/ RUN set -x && \ From 33e6ff4e65b1c80065b5373bbaa7eacc5cdb3ae8 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Thu, 15 Feb 2024 14:12:06 -0500 Subject: [PATCH 026/143] use python (which will come from conda-forge) rather than bioconda package, which is not yet built for arm --- images/create-env/Dockerfile.test | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/images/create-env/Dockerfile.test b/images/create-env/Dockerfile.test index cae8f9ce5c6..5de59c76993 100644 --- a/images/create-env/Dockerfile.test +++ b/images/create-env/Dockerfile.test @@ -37,13 +37,13 @@ RUN set -x && \ --conda=mamba \ --strip-files=\* \ /usr/local \ - catfasta2phyml + python FROM "${BUSYBOX_IMAGE}" COPY --from=build_bioconda_package /usr/local /usr/local RUN set -x && \ /usr/local/env-execute \ - catfasta2phyml --version \ + python --version \ && \ [ ! "${CONDA_PREFIX}" = /usr/local ] \ && \ @@ -51,7 +51,7 @@ RUN set -x && \ && \ [ "${CONDA_PREFIX}" = /usr/local ] \ && \ - catfasta2phyml --version + python --version FROM "${base}" as build_conda From 0475a921c2bff7f97fe73925980650f6d3107eb6 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Fri, 16 Feb 2024 22:28:10 -0500 Subject: [PATCH 027/143] improve comments in build.sh --- build.sh | 35 ++++++++++++++++++++++------------- 1 file changed, 22 insertions(+), 13 deletions(-) diff --git a/build.sh b/build.sh index 674873284ef..68ce0e8f47c 100644 --- a/build.sh +++ b/build.sh @@ -14,8 +14,9 @@ set -euo -# Used for build-env. bioconda-utils will be cloned to this folder inside the -# image dir (where the Dockerfile is) and the version will be checked out. +# Used for build-env. +# bioconda-utils will be cloned to this folder inside the image dir (where the +# Dockerfile is) and the version will be checked out. export BIOCONDA_UTILS_FOLDER=bioconda-utils export BIOCONDA_UTILS_VERSION=v2.11.1 @@ -23,8 +24,14 @@ export DEBIAN_VERSION="12.2" export BUSYBOX_VERSION="1.36.1" # Use same tags for base-busybox and base-debian -export BASE_TAGS="0.1.1 0.1 latest" -export WARN_IF_MISSING=false +export BASE_TAGS="latest" + +# If the repository doesn't already exist on quay.io, by default this is +# considered an error. Set to false to avoid this (e.g., when building images +# with new names, or local test ones). +export ERROR_IF_MISSING=false + +# Architectures to build for (under emulation) export ARCHS="arm64 amd64" # Store as separate vars so we can use these for dependencies. @@ -33,15 +40,17 @@ CREATE_ENV_IMAGE_NAME=tmp-create-env BASE_DEBIAN_IMAGE_NAME=tmp-debian BASE_BUSYBOX_IMAGE_NAME=tmp-busybox - -BUILD_BUSYBOX=true -BUILD_DEBIAN=true -BUILD_BUILD_ENV=true -BUILD_CREATE_ENV=true -REMOVE_MANIFEST=true - -# buildah will complain if a manifest already exists. -if [ ${REMOVE_MANIFEST:-true} == "true" ]; then +BUILD_BUSYBOX=false # build busybox image? +BUILD_DEBIAN=false # build debian image? +BUILD_BUILD_ENV=false # build build-env image? +BUILD_CREATE_ENV=true # build create-env image? + +# buildah will complain if a manifest exists for these images. If you do set +# REMOVE_MANIFEST=true, you'll need to recreate them all again. You can instead +# remove individual images like `buildah rm $BUILD_ENV_IMAGE_NAME`. You may +# need to run it several times. +REMOVE_MANIFEST=false +if [ ${REMOVE_MANIFEST:-false} == "true" ]; then for imgname in \ $BUILD_ENV_IMAGE_NAME \ $CREATE_ENV_IMAGE_NAME \ From fd219470c690e8eeb60bd35a2e2cc06f9e78c6bb Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Fri, 16 Feb 2024 22:28:35 -0500 Subject: [PATCH 028/143] warn -> error --- generic_build.bash | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/generic_build.bash b/generic_build.bash index c6f9d524047..349ea2d0788 100755 --- a/generic_build.bash +++ b/generic_build.bash @@ -119,7 +119,7 @@ existing_tags="$( | jq -r '.tags[]|select(.end_ts == null or .end_ts >= now)|.name' )" \ || { - if [ ${WARN_IF_MISSING:-true} == "true" ]; then + if [ ${ERROR_IF_MISSING:-true} == "true" ]; then printf %s\\n \ 'Could not get list of image tags.' \ 'Does the repository exist on Quay.io?' \ From 739990555abbdaa08e8b74848454c44cbd85262c Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Fri, 16 Feb 2024 22:28:58 -0500 Subject: [PATCH 029/143] move test build-args closer to actual test --- generic_build.bash | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/generic_build.bash b/generic_build.bash index 349ea2d0788..acaea602343 100755 --- a/generic_build.bash +++ b/generic_build.bash @@ -160,7 +160,6 @@ read -r -a archs_and_images <<<"$ARCHS" # were provided. This will eventually be provided to buildah bud. # BUILD_ARGS=() -TEST_BUILD_ARGS=() # specifically used when testing with Dockerfile.test if [ "$TYPE" == "base-debian" ]; then BUILD_ARGS+=("--build-arg=debian_version=$DEBIAN_VERSION") # version of debian to use as base fi @@ -169,7 +168,6 @@ if [ "$TYPE" == "create-env" ]; then BUILD_ARGS+=("--build-arg=BUSYBOX_IMAGE=$BUSYBOX_IMAGE") # which image to use as base BUILD_ARGS+=("--build-arg=CONDA_VERSION=$CONDA_VERSION") # conda version to install BUILD_ARGS+=("--build-arg=MAMBA_VERSION=$MAMBA_VERSION") # mamba version to install - TEST_BUILD_ARGS+=("--build-arg=BUSYBOX_IMAGE=$BUSYBOX_IMAGE") fi if [ "$TYPE" == "build-env" ]; then @@ -286,6 +284,12 @@ done # ------------------------------------------------------------------------------ # TESTING +# +# Args used specifically used when testing with Dockerfile.test +TEST_BUILD_ARGS=() +if [ "$TYPE" == "create-env" ]; then + TEST_BUILD_ARGS+=("--build-arg=BUSYBOX_IMAGE=$BUSYBOX_IMAGE") +fi # Extract image IDs from the manifest built in the last step ids="$( From be2b3f0530b143ed7321405d4dc03f3a987f9ff3 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Fri, 16 Feb 2024 22:29:32 -0500 Subject: [PATCH 030/143] clean up buildah inspect output --- generic_build.bash | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/generic_build.bash b/generic_build.bash index acaea602343..6c3a60919c2 100755 --- a/generic_build.bash +++ b/generic_build.bash @@ -226,6 +226,7 @@ for arch in $ARCHS; do buildah bud \ --arch="${arch}" \ --iidfile="${iidfile}" \ + --file=Dockerfile \ ${BUILD_ARGS[@]} \ $BASE_IMAGE_BUILD_ARG image_id="$( cat "${iidfile}" )" @@ -274,7 +275,17 @@ for arch in $ARCHS; do "${IMAGE_NAME}:${tag}" \ "${image_id}" - buildah inspect -t image ${IMAGE_NAME}:${tag}-${arch} + # Inspect image details, but remove the most verbose (like history) and + # redundant (just need one of Docker or OCIv1) fields. + buildah inspect -t image ${IMAGE_NAME}:${tag}-$arch} \ + | jq 'del( + .History, + .OCIv1.history, + .Config, + .Manifest, + .Docker, + .NamespaceOptions)' + done # tags done # archs_and_images From 324d8ee97e86183e5528999bbe41a84c00901e08 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Fri, 16 Feb 2024 22:29:54 -0500 Subject: [PATCH 031/143] overhaul how tests are run, plus notes & comments --- generic_build.bash | 79 +++++++++++++++++++++++++++++++++++----------- 1 file changed, 60 insertions(+), 19 deletions(-) diff --git a/generic_build.bash b/generic_build.bash index 6c3a60919c2..0416639625c 100755 --- a/generic_build.bash +++ b/generic_build.bash @@ -302,30 +302,71 @@ if [ "$TYPE" == "create-env" ]; then TEST_BUILD_ARGS+=("--build-arg=BUSYBOX_IMAGE=$BUSYBOX_IMAGE") fi -# Extract image IDs from the manifest built in the last step -ids="$( - for tag in $TAGS ; do - buildah manifest inspect "${IMAGE_NAME}:${tag}" \ - | jq -r '.manifests[]|.digest' \ - | while read id ; do - buildah images --format '{{.ID}}{{.Digest}}' \ - | sed -n "s/${id}//p" - done - done - )" - -# Run the tests; see Dockerfile.test in the relevant image dir for the -# actual tests run +# Turns out that buildah cannot use --arch and and provide an image ID as the +# `base` build-arg at the same time, because we get the error: +# +# "error creating build container: pull policy is always but image has been +# referred to by ID". # -# N.B. need to unique since one image can have multiple tags -ids="$( printf %s "${ids}" | sort -u )" -for id in ${ids} ; do - podman history "${id}" +# This happens even when using --pull-never. This may be fixed in later +# versions, in which case we can use the code below in the "EXTRA" section. +# +# Since the rest of this script builds a single image and assigns possibly +# multiple tags, we just use the first tag to use as the `base` build-arg. + +tag=$(echo $TAGS | cut -f1 -d " ") +for arch in $ARCHS; do + echo "[LOG] Starting test for ${IMAGE_NAME}:${tag}, $arch." buildah bud \ - --build-arg=base="${id}" \ + --arch="$arch" \ + --build-arg=base="localhost/${IMAGE_NAME}:${tag}" \ ${TEST_BUILD_ARGS[@]} \ --file=Dockerfile.test done + +# EXTRA ------------------------------------------------------------------------ +# The following demonstrates how to extract images from corresponding manifest +# digests. This may be a better approach in the future, but as noted above we +# cannot use FROM and --arch and instead use name:tag. +# +# It may be useful in the future but it is disabled for now. +# +if [ "" ] ; then + # Manifests provide a digest; we then need to look up the corresponding image + # name for that digest. + ids="$( + for tag in $TAGS ; do + buildah manifest inspect "${IMAGE_NAME}:${tag}" \ + | jq -r '.manifests[]|.digest' \ + | while read id ; do + buildah images --format '{{.ID}}{{.Digest}}' \ + | sed -n "s/${id}//p" + done + done + )" + + # N.B. need to unique since one image can have multiple tags. In general, + # this should be one image for each arch, no matter how many tags. + ids="$( printf %s "${ids}" | sort -u )" + + # Run the tests; see Dockerfile.test in the relevant image dir for the + # actual tests that are run. + for id in ${ids} ; do + + podman history "${id}" + + # Make sure we're explicit with the arch so that the right image is pulled + # from the respective container. + arch=$(buildah inspect "${id}" | jq -r '.OCIv1.architecture' | sort -u) + + buildah bud \ + --arch="$arch" \ + --build-arg=base="localhost/${IMAGE_NAME}" \ + ${TEST_BUILD_ARGS[@]} \ + --file=Dockerfile.test + done +fi + # Clean up buildah rmi --prune || true From 06b138ca8b0e90bbcdc30b06234c53ffe4fc12da Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Fri, 16 Feb 2024 22:54:03 -0500 Subject: [PATCH 032/143] warn -> error in workflow --- .github/workflows/build-images.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 5a410eb2262..a07f92d8f44 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -20,7 +20,7 @@ env: BASE_DEBIAN_IMAGE_NAME: tmp-debian BASE_BUSYBOX_IMAGE_NAME: tmp-busybox ARCHS: "amd64 arm64" - WARN_IF_MISSING: "false" # Used for testing when the repository is known to be missing on quay.io + ERROR_IF_MISSING: "false" # Used for testing when the repository is known to be missing on quay.io jobs: From 3f623904247ecc4a237fee91e4a4c7eee48f50dc Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 17 Feb 2024 12:03:29 -0500 Subject: [PATCH 033/143] rm build docker container from prev workflow --- .github/workflows/GithubActionTests.yml | 7 ------- 1 file changed, 7 deletions(-) diff --git a/.github/workflows/GithubActionTests.yml b/.github/workflows/GithubActionTests.yml index bb319d1a28d..d59a85ccc7d 100644 --- a/.github/workflows/GithubActionTests.yml +++ b/.github/workflows/GithubActionTests.yml @@ -33,13 +33,6 @@ jobs: conda activate bioconda python setup.py install - - name: Build docker container - run: | - docker build -t quay.io/bioconda/bioconda-utils-build-env-cos7:latest ./ - docker history quay.io/bioconda/bioconda-utils-build-env-cos7:latest - docker run --rm -t quay.io/bioconda/bioconda-utils-build-env-cos7:latest sh -lec 'type -t conda && conda info -a && conda list' - docker build -t quay.io/bioconda/bioconda-utils-test-env-cos7:latest -f ./Dockerfile.test ./ - - name: Run tests '${{ matrix.py_test_marker }}' run: | eval "$(conda shell.bash hook)" From 315604e8ddd82dce8447a1f5102b35730d0d52c4 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 17 Feb 2024 12:04:08 -0500 Subject: [PATCH 034/143] first attempt at pushing base-debian --- .github/workflows/build-images.yml | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index a07f92d8f44..69914899382 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -50,7 +50,19 @@ jobs: TAGS=$BASE_TAGS \ ./generic_build.bash - build: + - name: Push base-debian + if: ${{ github.ref == "refs/heads/unify-containers" }} + id: push-base-debian + uses: redhat-actions/push-to-registry@v2 + with: + image: ${{ env.BASE_DEBIAN_IMAGE_NAME }} + tags: ${{ env.BASE_TAGS }} + registry: ${{ secrets.QUAY_BIOCONDA_REPO }} + username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} + password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} + + build-others: + if: false # disable for now name: Build base-busybox, build-env, and create-env images runs-on: ubuntu-20.04 steps: From 5f91c86699015dda940a7115e0a0dd169df393c5 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 17 Feb 2024 12:06:22 -0500 Subject: [PATCH 035/143] for now always try pushing --- .github/workflows/build-images.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 69914899382..e6638f7e81f 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -51,7 +51,6 @@ jobs: ./generic_build.bash - name: Push base-debian - if: ${{ github.ref == "refs/heads/unify-containers" }} id: push-base-debian uses: redhat-actions/push-to-registry@v2 with: From 8b019bbe4eb777895255de40469f30f1702ca670 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 17 Feb 2024 12:23:55 -0500 Subject: [PATCH 036/143] hard code registry like bioconda-containers --- .github/workflows/build-images.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index e6638f7e81f..903748a41ca 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -56,7 +56,7 @@ jobs: with: image: ${{ env.BASE_DEBIAN_IMAGE_NAME }} tags: ${{ env.BASE_TAGS }} - registry: ${{ secrets.QUAY_BIOCONDA_REPO }} + registry: quay.io/bioconda username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} From 072c987ed9e05f3454b2e2a46156cce266a4e876 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 17 Feb 2024 12:25:41 -0500 Subject: [PATCH 037/143] add comments and ids to workflow --- .github/workflows/build-images.yml | 15 +++++++++++---- .gitignore | 1 + 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 903748a41ca..561792b6be0 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -20,14 +20,14 @@ env: BASE_DEBIAN_IMAGE_NAME: tmp-debian BASE_BUSYBOX_IMAGE_NAME: tmp-busybox ARCHS: "amd64 arm64" - ERROR_IF_MISSING: "false" # Used for testing when the repository is known to be missing on quay.io + ERROR_IF_MISSING: "false" # Set to false when testing when the repository is known to be missing on quay.io jobs: # NOTE: base-debian can be a separate job since it is independent of the # others. create-env depends on build-env, and both depend on base-busybox, # so we can't split that out. - build-debian: + build-base-debian: name: Build base-debian runs-on: ubuntu-20.04 steps: @@ -41,6 +41,7 @@ jobs: sudo apt-get install -y qemu-user-static - name: Build base-debian + id: base-debian run: | IMAGE_NAME=$BASE_DEBIAN_IMAGE_NAME \ IMAGE_DIR=images/base-glibc-debian-bash \ @@ -50,6 +51,9 @@ jobs: TAGS=$BASE_TAGS \ ./generic_build.bash + # NOTE: a repository must first exist on quay.io/bioconda and that + # repository must also be configured to allow write access for the + # appropriate service account. - name: Push base-debian id: push-base-debian uses: redhat-actions/push-to-registry@v2 @@ -80,6 +84,7 @@ jobs: sudo apt-get install -y qemu-user-static - name: Build base-busybox + id: base-busybox run: | IMAGE_NAME=$BASE_BUSYBOX_IMAGE_NAME \ IMAGE_DIR=images/base-glibc-busybox-bash \ @@ -91,6 +96,7 @@ jobs: ./generic_build.bash - name: Build build-env + id: build-env run: | # The Dockerfile expects bioconda-utils to be cloned; even though we're # working in the bioconda-utils repo the code needs to be in the build @@ -101,7 +107,8 @@ jobs: (cd images/bioconda-utils-build-env-cos7/bioconda-utils && git fetch) fi - # This expects the busybox image to have been built locally. + # This expects the busybox image to have been built locally, as in the + # above step. IMAGE_NAME=$BUILD_ENV_IMAGE_NAME \ IMAGE_DIR=images/bioconda-utils-build-env-cos7 \ ARCHS=$ARCHS \ @@ -110,8 +117,8 @@ jobs: BUSYBOX_IMAGE=localhost/$BASE_BUSYBOX_IMAGE_NAME \ ./generic_build.bash - - name: Build create-env + id: create-env run: | BIOCONDA_UTILS_VERSION='${{ github.head_ref || github.ref_name }}' \ diff --git a/.gitignore b/.gitignore index 1b98ca9bb87..93bb35b8c6d 100644 --- a/.gitignore +++ b/.gitignore @@ -15,3 +15,4 @@ docs/source/developer/_autosummary # Mac OS Files .DS_Store +env From 8e264432d8f03a9c38fe2968de23e2473b80d281 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 17 Feb 2024 12:26:08 -0500 Subject: [PATCH 038/143] start support for logging --- .github/workflows/build-images.yml | 2 ++ generic_build.bash | 8 ++++++++ 2 files changed, 10 insertions(+) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 561792b6be0..845a1b2813e 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -51,6 +51,8 @@ jobs: TAGS=$BASE_TAGS \ ./generic_build.bash + cat "base-debian.log" >> $GITHUB_OUTPUT + # NOTE: a repository must first exist on quay.io/bioconda and that # repository must also be configured to allow write access for the # appropriate service account. diff --git a/generic_build.bash b/generic_build.bash index 0416639625c..31d0d58eb15 100755 --- a/generic_build.bash +++ b/generic_build.bash @@ -60,6 +60,9 @@ OPTIONAL args If true (default), will exit if there is no remote repository yet. Set to false when testing with custom image names. + LOG: filename + Write info here so other jobs can read from it. Defaults to $TYPE.log + EXAMPLE USAGE ============= @@ -105,6 +108,10 @@ fi if [ "$TYPE" == "base-busybox" ]; then [ -z "$BUSYBOX_VERSION" ] && echo "error: please set BUSYBOX_VERSION" && exit 1 fi + +LOG=${LOG:="${TYPE}.log"} +touch $LOG + # ------------------------------------------------------------------------------ @@ -134,6 +141,7 @@ for tag in $TAGS ; do * ) if printf %s "${existing_tags}" | grep -qxF "${tag}" ; then printf 'error: tag %s already exists for %s on quay.io!\n' "${tag}" "${IMAGE_NAME}" + echo "TAG_EXISTS=true" >> $LOG exit 1 fi esac From c545c66482d762b9e5ebb1bcc0330be1ee5e50d9 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 17 Feb 2024 12:42:43 -0500 Subject: [PATCH 039/143] attempt to control github actions via log --- .github/workflows/build-images.yml | 3 ++- generic_build.bash | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 845a1b2813e..beb2c3b1928 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -51,7 +51,7 @@ jobs: TAGS=$BASE_TAGS \ ./generic_build.bash - cat "base-debian.log" >> $GITHUB_OUTPUT + cat "${TYPE}.log" >> $GITHUB_OUTPUT # NOTE: a repository must first exist on quay.io/bioconda and that # repository must also be configured to allow write access for the @@ -59,6 +59,7 @@ jobs: - name: Push base-debian id: push-base-debian uses: redhat-actions/push-to-registry@v2 + if: ${{ steps.base-debian.outputs.TAG_EXISTS_base-debian != "true" }} with: image: ${{ env.BASE_DEBIAN_IMAGE_NAME }} tags: ${{ env.BASE_TAGS }} diff --git a/generic_build.bash b/generic_build.bash index 31d0d58eb15..6dad0440f05 100755 --- a/generic_build.bash +++ b/generic_build.bash @@ -141,7 +141,7 @@ for tag in $TAGS ; do * ) if printf %s "${existing_tags}" | grep -qxF "${tag}" ; then printf 'error: tag %s already exists for %s on quay.io!\n' "${tag}" "${IMAGE_NAME}" - echo "TAG_EXISTS=true" >> $LOG + echo "TAG_EXISTS_${TYPE}=true" >> $LOG exit 1 fi esac From f8783f45c35aec7f55a21ead889778e3acdaadf0 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 17 Feb 2024 12:59:59 -0500 Subject: [PATCH 040/143] mess with conditionals --- .github/workflows/build-images.yml | 6 ++++-- generic_build.bash | 4 +++- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index beb2c3b1928..edd5574fd08 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -43,13 +43,15 @@ jobs: - name: Build base-debian id: base-debian run: | + # Will exit 64 if the tag exists. We don't want the entire Actions + # workflow to fail because of it. IMAGE_NAME=$BASE_DEBIAN_IMAGE_NAME \ IMAGE_DIR=images/base-glibc-debian-bash \ TYPE="base-debian" \ DEBIAN_VERSION=$DEBIAN_VERSION \ ARCHS=$ARCHS \ TAGS=$BASE_TAGS \ - ./generic_build.bash + ./generic_build.bash || [ $? == 64 ] cat "${TYPE}.log" >> $GITHUB_OUTPUT @@ -59,7 +61,7 @@ jobs: - name: Push base-debian id: push-base-debian uses: redhat-actions/push-to-registry@v2 - if: ${{ steps.base-debian.outputs.TAG_EXISTS_base-debian != "true" }} + if: ${{ steps.base-debian.outputs.TAG_EXISTS_base-debian == "false" }} with: image: ${{ env.BASE_DEBIAN_IMAGE_NAME }} tags: ${{ env.BASE_TAGS }} diff --git a/generic_build.bash b/generic_build.bash index 6dad0440f05..92c6b64daf4 100755 --- a/generic_build.bash +++ b/generic_build.bash @@ -142,11 +142,13 @@ for tag in $TAGS ; do if printf %s "${existing_tags}" | grep -qxF "${tag}" ; then printf 'error: tag %s already exists for %s on quay.io!\n' "${tag}" "${IMAGE_NAME}" echo "TAG_EXISTS_${TYPE}=true" >> $LOG - exit 1 + exit 64 fi esac done +echo "TAG_EXISTS_${TYPE}=false" + #------------------------------------------------------------------------------- # SETUP From 66926b410a0b3bb2e75e1b165dff882809e349a3 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 17 Feb 2024 13:00:46 -0500 Subject: [PATCH 041/143] single-quote to match others --- .github/workflows/build-images.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index edd5574fd08..34208a5fa8f 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -61,7 +61,7 @@ jobs: - name: Push base-debian id: push-base-debian uses: redhat-actions/push-to-registry@v2 - if: ${{ steps.base-debian.outputs.TAG_EXISTS_base-debian == "false" }} + if: ${{ steps.base-debian.outputs.TAG_EXISTS_base-debian == 'false' }} with: image: ${{ env.BASE_DEBIAN_IMAGE_NAME }} tags: ${{ env.BASE_TAGS }} From 55919a1ffb52543e5f1343a4692437729b91b7a5 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 17 Feb 2024 13:03:48 -0500 Subject: [PATCH 042/143] no env var --- .github/workflows/build-images.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 34208a5fa8f..0373a0065a2 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -53,7 +53,7 @@ jobs: TAGS=$BASE_TAGS \ ./generic_build.bash || [ $? == 64 ] - cat "${TYPE}.log" >> $GITHUB_OUTPUT + cat "base-debian.log" >> $GITHUB_OUTPUT # NOTE: a repository must first exist on quay.io/bioconda and that # repository must also be configured to allow write access for the From 7b15c02763ba518862076c7fbb89d572acc7ecd3 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 17 Feb 2024 13:05:04 -0500 Subject: [PATCH 043/143] now try version bump --- .github/workflows/build-images.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 0373a0065a2..2a72108d05d 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -14,7 +14,7 @@ env: BIOCONDA_UTILS_FOLDER: bioconda-utils DEBIAN_VERSION: "12.2" BUSYBOX_VERSION: "1.36.1" - BASE_TAGS: "0.1.1 latest" + BASE_TAGS: "0.1.2 latest" BUILD_ENV_IMAGE_NAME: tmp-build-env CREATE_ENV_IMAGE_NAME: tmp-create-env BASE_DEBIAN_IMAGE_NAME: tmp-debian From 2aec81cc622d28c7d62c9afa5cb23ce5aaef8d0d Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 17 Feb 2024 13:13:33 -0500 Subject: [PATCH 044/143] more conditional --- .github/workflows/build-images.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 2a72108d05d..212a23dfaa2 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -61,7 +61,7 @@ jobs: - name: Push base-debian id: push-base-debian uses: redhat-actions/push-to-registry@v2 - if: ${{ steps.base-debian.outputs.TAG_EXISTS_base-debian == 'false' }} + if: ${{ ! steps.base-debian.outputs.TAG_EXISTS_base-debian }} with: image: ${{ env.BASE_DEBIAN_IMAGE_NAME }} tags: ${{ env.BASE_TAGS }} From b4d4cbf0192fba3354b84b0c8e157d4d3613efa6 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 17 Feb 2024 15:36:06 -0500 Subject: [PATCH 045/143] enable builds and pushes for other containers --- .github/workflows/build-images.yml | 46 +++++++++++++++++++++++++++--- 1 file changed, 42 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 212a23dfaa2..af6ae7e5a48 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -70,7 +70,6 @@ jobs: password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} build-others: - if: false # disable for now name: Build base-busybox, build-env, and create-env images runs-on: ubuntu-20.04 steps: @@ -98,7 +97,20 @@ jobs: DEBIAN_VERSION=$DEBIAN_VERSION \ BUSYBOX_VERSION=$BUSYBOX_VERSION \ TAGS=$BASE_TAGS \ - ./generic_build.bash + ./generic_build.bash || [ $? == 64 ] + + cat "base-busybox.log" >> $GITHUB_OUTPUT + + - name: Push base-busybox + id: push-base-busybox + uses: redhat-actions/push-to-registry@v2 + if: ${{ ! steps.base-busybox.outputs.TAG_EXISTS_base-busybox }} + with: + image: ${{ env.BASE_BUSYBOX_IMAGE_NAME }} + tags: ${{ env.BASE_TAGS }} + registry: quay.io/bioconda + username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} + password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} - name: Build build-env id: build-env @@ -120,7 +132,20 @@ jobs: TYPE="build-env" \ BIOCONDA_UTILS_VERSION='${{ github.head_ref || github.ref_name }}' \ BUSYBOX_IMAGE=localhost/$BASE_BUSYBOX_IMAGE_NAME \ - ./generic_build.bash + ./generic_build.bash || [ $? == 64 ] + + cat "build-env.log" >> $GITHUB_OUTPUT + + - name: Push build-env + id: push-build-env + uses: redhat-actions/push-to-registry@v2 + if: ${{ ! steps.build-env.outputs.TAG_EXISTS_build-env }} + with: + image: ${{ env.BUILD_ENV_IMAGE_NAME }} + tags: ${{ github.head_ref || github.ref_name }} + registry: quay.io/bioconda + username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} + password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} - name: Build create-env id: create-env @@ -150,4 +175,17 @@ jobs: TYPE="create-env" \ BUSYBOX_IMAGE=localhost/$BASE_BUSYBOX_IMAGE_NAME \ BIOCONDA_UTILS_VERSION=$BIOCONDA_UTILS_VERSION \ - ./generic_build.bash + ./generic_build.bash || [ $? == 64 ] + + cat "create-env.log" >> $GITHUB_OUTPUT + + - name: Push create-env + id: push-create-env + uses: redhat-actions/push-to-registry@v2 + if: ${{ ! steps.create-env.outputs.TAG_EXISTS_create-env }} + with: + image: ${{ env.CREATE_ENV_IMAGE_NAME }} + tags: ${{ github.head_ref || github.ref_name }} + registry: quay.io/bioconda + username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} + password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} From 5298c43efdc43a254fc03195f7efb67a7f1376ea Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 17 Feb 2024 16:16:36 -0500 Subject: [PATCH 046/143] swap out registry depending on if tag exists --- .github/workflows/build-images.yml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index af6ae7e5a48..853be99ab73 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -124,6 +124,13 @@ jobs: (cd images/bioconda-utils-build-env-cos7/bioconda-utils && git fetch) fi + # If the busybox image was not built in this CI run (e.g. if its tags + # have not changed) then we'll get it from quay.io. + REGISTRY="localhost" + if [ ${{ steps.base-busybox.outputs.TAG_EXISTS_base-busybox }} ]; then + REGISTRY="quay.io/bioconda" + fi + # This expects the busybox image to have been built locally, as in the # above step. IMAGE_NAME=$BUILD_ENV_IMAGE_NAME \ @@ -131,7 +138,7 @@ jobs: ARCHS=$ARCHS \ TYPE="build-env" \ BIOCONDA_UTILS_VERSION='${{ github.head_ref || github.ref_name }}' \ - BUSYBOX_IMAGE=localhost/$BASE_BUSYBOX_IMAGE_NAME \ + BUSYBOX_IMAGE=$REGISTRY/$BASE_BUSYBOX_IMAGE_NAME \ ./generic_build.bash || [ $? == 64 ] cat "build-env.log" >> $GITHUB_OUTPUT From 250b47b56ac6ed0d7130eaf3ce78e34c97af883f Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 17 Feb 2024 16:38:23 -0500 Subject: [PATCH 047/143] do registry swap for create-env --- .github/workflows/build-images.yml | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 853be99ab73..9b75ed758a0 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -163,12 +163,16 @@ jobs: # build-env container. This ensures that when creating environments, we # use the exact same conda/mamba versions used when building the # package. + REGISTRY="localhost" + if [ ${{ steps.build-env.outputs.TAG_EXISTS_build-env }} ]; then + REGISTRY="quay.io/bioconda" + fi CONDA_VERSION=$( - podman run -t localhost/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION} \ + podman run -t $REGISTRY/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION} \ bash -c "/opt/conda/bin/conda list --export '^conda$'| sed -n 's/=[^=]*$//p'" ) MAMBA_VERSION=$( - podman run -t localhost/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION} \ + podman run -t $REGISTRY/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION} \ bash -c "/opt/conda/bin/conda list --export '^mamba$'| sed -n 's/=[^=]*$//p'" ) @@ -176,11 +180,16 @@ jobs: export CONDA_VERSION=${CONDA_VERSION%$'\r'} export MAMBA_VERSION=${MAMBA_VERSION%$'\r'} + REGISTRY="localhost" + if [ ${{ steps.base-busybox.outputs.TAG_EXISTS_base-busybox }} ]; then + REGISTRY="quay.io/bioconda" + fi + IMAGE_NAME=$CREATE_ENV_IMAGE_NAME \ IMAGE_DIR=images/create-env \ ARCHS=$ARCHS \ TYPE="create-env" \ - BUSYBOX_IMAGE=localhost/$BASE_BUSYBOX_IMAGE_NAME \ + BUSYBOX_IMAGE=$REGISTRY/$BASE_BUSYBOX_IMAGE_NAME \ BIOCONDA_UTILS_VERSION=$BIOCONDA_UTILS_VERSION \ ./generic_build.bash || [ $? == 64 ] From ac3d8aad986a8482967c76ba50431b7b7d805990 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 17 Feb 2024 16:39:01 -0500 Subject: [PATCH 048/143] lots o' comments --- .github/workflows/build-images.yml | 53 +++++++++++++++++++++--------- 1 file changed, 38 insertions(+), 15 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 9b75ed758a0..c3e4e065f14 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -1,3 +1,7 @@ +# Build all container images. +# +# Most of the work is done in generic_build.bash, so see that file for details. + name: Build images concurrency: group: ${{ github.workflow }}-${{ github.ref }} @@ -27,6 +31,9 @@ jobs: # NOTE: base-debian can be a separate job since it is independent of the # others. create-env depends on build-env, and both depend on base-busybox, # so we can't split that out. + # + # Later steps for other containers are similar, so comments are only added to + # this first job. build-base-debian: name: Build base-debian runs-on: ubuntu-20.04 @@ -35,6 +42,7 @@ jobs: with: fetch-depth: 0 + # Required for emulating ARM - name: Install qemu dependency run: | sudo apt-get update @@ -43,8 +51,9 @@ jobs: - name: Build base-debian id: base-debian run: | - # Will exit 64 if the tag exists. We don't want the entire Actions - # workflow to fail because of it. + # See generic_build.bash for expected env vars. The script will exit 64 + # if the tag exists. That's OK, and we don't want the entire Actions + # workflow to fail because of it, so we check the exit code. IMAGE_NAME=$BASE_DEBIAN_IMAGE_NAME \ IMAGE_DIR=images/base-glibc-debian-bash \ TYPE="base-debian" \ @@ -53,14 +62,24 @@ jobs: TAGS=$BASE_TAGS \ ./generic_build.bash || [ $? == 64 ] + # generic_build.bash will write key=val lines to the log ($TYPE.log); + # these lines are added to $GITHUB_OUTPUT so that later steps can use + # ${{ steps..outputs.key }} to get the value. See + # generic_build.bash for what it's writing to the log (and therefore + # which keys are available via the step's outputs). cat "base-debian.log" >> $GITHUB_OUTPUT - # NOTE: a repository must first exist on quay.io/bioconda and that - # repository must also be configured to allow write access for the - # appropriate service account. + # Here, and in the subsequent steps that also push images, a repository + # must first exist on quay.io/bioconda AND that repository must also be + # configured to allow write access for the appropriate service account. + # This must be done by a user with admin access to quay.io/bioconda. - name: Push base-debian id: push-base-debian uses: redhat-actions/push-to-registry@v2 + + # generic_build.bash reported whether the tag exists to the log; that was + # added to GITHUB_OUTPUT and is used here to determine if we should + # upload. if: ${{ ! steps.base-debian.outputs.TAG_EXISTS_base-debian }} with: image: ${{ env.BASE_DEBIAN_IMAGE_NAME }} @@ -69,6 +88,9 @@ jobs: username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} + # Other containers are interdependent, we so build them sequentially. + # The steps are largely similar to base-debian above, so check there for + # comments on common parts. build-others: name: Build base-busybox, build-env, and create-env images runs-on: ubuntu-20.04 @@ -115,24 +137,24 @@ jobs: - name: Build build-env id: build-env run: | - # The Dockerfile expects bioconda-utils to be cloned; even though we're - # working in the bioconda-utils repo the code needs to be in the build - # context, which is in the respective image dir. + # The build-env Dockerfile expects bioconda-utils to be cloned; even + # though this CI is operating in the bioconda-utils repo, the code + # needs to be available in the build context, which is in the + # respective image dir. if [ ! -e "images/bioconda-utils-build-env-cos7/bioconda-utils" ]; then git clone https://github.com/bioconda/bioconda-utils images/bioconda-utils-build-env-cos7/bioconda-utils else (cd images/bioconda-utils-build-env-cos7/bioconda-utils && git fetch) fi - # If the busybox image was not built in this CI run (e.g. if its tags - # have not changed) then we'll get it from quay.io. + # If the busybox image was not built in this CI run (e.g. if the + # specified tags already exist on quay.io) then we'll get it from + # quay.io. Otherwise use the just-built one. REGISTRY="localhost" if [ ${{ steps.base-busybox.outputs.TAG_EXISTS_base-busybox }} ]; then REGISTRY="quay.io/bioconda" fi - # This expects the busybox image to have been built locally, as in the - # above step. IMAGE_NAME=$BUILD_ENV_IMAGE_NAME \ IMAGE_DIR=images/bioconda-utils-build-env-cos7 \ ARCHS=$ARCHS \ @@ -160,9 +182,10 @@ jobs: BIOCONDA_UTILS_VERSION='${{ github.head_ref || github.ref_name }}' \ # Here we extract the conda and mamba versions from the just-created - # build-env container. This ensures that when creating environments, we - # use the exact same conda/mamba versions used when building the - # package. + # build-env container (or, if it was not created in this CI run because + # it already exists, then pull from quay.io). This ensures that when + # creating environments, we use the exact same conda/mamba versions + # that were used when building the package. REGISTRY="localhost" if [ ${{ steps.build-env.outputs.TAG_EXISTS_build-env }} ]; then REGISTRY="quay.io/bioconda" From daddbd4030513565677a56f3d8efc608cdb006dc Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 17 Feb 2024 16:41:03 -0500 Subject: [PATCH 049/143] TIL GHA expressions work in comments --- .github/workflows/build-images.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index c3e4e065f14..f154bc3289c 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -64,9 +64,9 @@ jobs: # generic_build.bash will write key=val lines to the log ($TYPE.log); # these lines are added to $GITHUB_OUTPUT so that later steps can use - # ${{ steps..outputs.key }} to get the value. See - # generic_build.bash for what it's writing to the log (and therefore - # which keys are available via the step's outputs). + # steps.id.outputs.key to get the value. See generic_build.bash for + # what it's writing to the log (and therefore which keys are available + # via the step's outputs). cat "base-debian.log" >> $GITHUB_OUTPUT # Here, and in the subsequent steps that also push images, a repository From bbecba3147326d53b7487174bae7c77c4d4fb848 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 17 Feb 2024 16:59:03 -0500 Subject: [PATCH 050/143] missing will now become error --- .github/workflows/build-images.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index f154bc3289c..a7dc3fb4895 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -24,7 +24,6 @@ env: BASE_DEBIAN_IMAGE_NAME: tmp-debian BASE_BUSYBOX_IMAGE_NAME: tmp-busybox ARCHS: "amd64 arm64" - ERROR_IF_MISSING: "false" # Set to false when testing when the repository is known to be missing on quay.io jobs: From 59d8b30773c66c19edcefcf7c2bbf67a9a40daae Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 17 Feb 2024 17:04:41 -0500 Subject: [PATCH 051/143] bump just base version to test behavior --- .github/workflows/build-images.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index a7dc3fb4895..375669509ff 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -18,7 +18,7 @@ env: BIOCONDA_UTILS_FOLDER: bioconda-utils DEBIAN_VERSION: "12.2" BUSYBOX_VERSION: "1.36.1" - BASE_TAGS: "0.1.2 latest" + BASE_TAGS: "0.1.3 latest" BUILD_ENV_IMAGE_NAME: tmp-build-env CREATE_ENV_IMAGE_NAME: tmp-create-env BASE_DEBIAN_IMAGE_NAME: tmp-debian From 860adc00ecf56a2e3d5cec68c7ca3acfa872bf55 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 17 Feb 2024 17:45:07 -0500 Subject: [PATCH 052/143] explicitly specify (single) tag, and add to bioconda-utils version --- .github/workflows/build-images.yml | 19 ++++++++++++------- generic_build.bash | 10 +++++----- 2 files changed, 17 insertions(+), 12 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 375669509ff..9069c067b0a 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -18,7 +18,7 @@ env: BIOCONDA_UTILS_FOLDER: bioconda-utils DEBIAN_VERSION: "12.2" BUSYBOX_VERSION: "1.36.1" - BASE_TAGS: "0.1.3 latest" + BASE_TAG: "0.1.3" # "latest" will always be added during the build. BUILD_ENV_IMAGE_NAME: tmp-build-env CREATE_ENV_IMAGE_NAME: tmp-create-env BASE_DEBIAN_IMAGE_NAME: tmp-debian @@ -58,7 +58,7 @@ jobs: TYPE="base-debian" \ DEBIAN_VERSION=$DEBIAN_VERSION \ ARCHS=$ARCHS \ - TAGS=$BASE_TAGS \ + TAG=$BASE_TAG \ ./generic_build.bash || [ $? == 64 ] # generic_build.bash will write key=val lines to the log ($TYPE.log); @@ -103,6 +103,7 @@ jobs: tag=${{ github.event.release && github.event.release.tag_name || github.sha }} printf %s "tag=${tag#v}" >> $GITHUB_OUTPUT + - name: Install qemu dependency run: | sudo apt-get update @@ -117,7 +118,7 @@ jobs: ARCHS=$ARCHS \ DEBIAN_VERSION=$DEBIAN_VERSION \ BUSYBOX_VERSION=$BUSYBOX_VERSION \ - TAGS=$BASE_TAGS \ + TAG=$BASE_TAG \ ./generic_build.bash || [ $? == 64 ] cat "base-busybox.log" >> $GITHUB_OUTPUT @@ -154,12 +155,15 @@ jobs: REGISTRY="quay.io/bioconda" fi + BIOCONDA_UTILS_VERSION='${{ steps.get-tag.outputs.tag }}' + IMAGE_NAME=$BUILD_ENV_IMAGE_NAME \ IMAGE_DIR=images/bioconda-utils-build-env-cos7 \ ARCHS=$ARCHS \ TYPE="build-env" \ - BIOCONDA_UTILS_VERSION='${{ github.head_ref || github.ref_name }}' \ - BUSYBOX_IMAGE=$REGISTRY/$BASE_BUSYBOX_IMAGE_NAME \ + BIOCONDA_UTILS_VERSION=$BIOCONDA_UTILS_VERSION \ + TAG=$BIOCONDA_UTILS_VERSION-$BASE_TAG \ + BUSYBOX_IMAGE=$REGISTRY/$BASE_BUSYBOX_IMAGE_NAME:$BASE_TAG \ ./generic_build.bash || [ $? == 64 ] cat "build-env.log" >> $GITHUB_OUTPUT @@ -178,7 +182,7 @@ jobs: - name: Build create-env id: create-env run: | - BIOCONDA_UTILS_VERSION='${{ github.head_ref || github.ref_name }}' \ + BIOCONDA_UTILS_VERSION='${{ steps.get-tag.outputs.tag }}' # Here we extract the conda and mamba versions from the just-created # build-env container (or, if it was not created in this CI run because @@ -211,8 +215,9 @@ jobs: IMAGE_DIR=images/create-env \ ARCHS=$ARCHS \ TYPE="create-env" \ - BUSYBOX_IMAGE=$REGISTRY/$BASE_BUSYBOX_IMAGE_NAME \ BIOCONDA_UTILS_VERSION=$BIOCONDA_UTILS_VERSION \ + TAG=$BIOCONDA_UTILS_VERSION-$BASE_TAG \ + BUSYBOX_IMAGE=$REGISTRY/$BASE_BUSYBOX_IMAGE_NAME:$BASE_TAG \ ./generic_build.bash || [ $? == 64 ] cat "create-env.log" >> $GITHUB_OUTPUT diff --git a/generic_build.bash b/generic_build.bash index 92c6b64daf4..62d1c764415 100755 --- a/generic_build.bash +++ b/generic_build.bash @@ -27,16 +27,15 @@ REQUIRED ARGS FOR ALL TYPES IMAGE_DIR: Location of Dockerfile. IMAGE_NAME: Image name to upload. ARCHS: Space-separated architectures e.g. "amd64 arm64" + TAG: image tag REQUIRED for base-busybox ------------------------- - TAGS: Space-separated tags. DEBIAN_VERSION BUSYBOX_VERSION REQUIRED for base-debian ------------------------ - TAGS: Space-separated tags. DEBIAN_VERSION REQUIRED for build-env @@ -82,13 +81,11 @@ EXAMPLE USAGE [ -z "$IMAGE_DIR" ] && echo "error: please set IMAGE_DIR, where Dockerfile is found." && exit 1 [ -z "$TYPE" ] && echo "error: please set TYPE: [ base-debian | base-busybox | build-env | create-env ]" && exit 1 [ -z "$ARCHS" ] && echo "error: please set ARCHS" && exit 1 +[ -z "$TAG" ] && echo "error: please set TAG" && exit 1 if [ "$TYPE" == "build-env" ] || [ "$TYPE" == "create-env" ]; then - [ -n "$TAGS" ] && echo "error: TAGS should not be set for build-env or create-env; use BIOCONDA_UTILS_VERSION instead" && exit 1 [ -z "$BIOCONDA_UTILS_VERSION" ] && echo "error: please set BIOCONDA_UTILS_VERSION for build-env and create-env" && exit 1 - TAGS="$BIOCONDA_UTILS_VERSION" # Set TAGS to BIOCONDA_UTILS_VERSION from here on - if [ "$TYPE" == "build-env" ]; then [ -z "$BIOCONDA_UTILS_FOLDER" ] && echo "error: please set BIOCONDA_UTILS_FOLDER for build-env" && exit 1 [ -z "$BUSYBOX_IMAGE" ] && echo "error: please set BUSYBOX_IMAGE for create-env" && exit 1 @@ -112,6 +109,9 @@ fi LOG=${LOG:="${TYPE}.log"} touch $LOG +# Also add "latest" tag. +TAGS="$TAG latest" + # ------------------------------------------------------------------------------ From 14c43c5468344fab78bd1ccfc8ee924c5c76ebff Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 17 Feb 2024 19:28:36 -0500 Subject: [PATCH 053/143] be better about tags --- .github/workflows/build-images.yml | 27 +++++++++++++++------------ 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 9069c067b0a..2e772c2fcb2 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -82,7 +82,7 @@ jobs: if: ${{ ! steps.base-debian.outputs.TAG_EXISTS_base-debian }} with: image: ${{ env.BASE_DEBIAN_IMAGE_NAME }} - tags: ${{ env.BASE_TAGS }} + tags: ${{ env.BASE_TAG }} registry: quay.io/bioconda username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} @@ -98,12 +98,14 @@ jobs: with: fetch-depth: 0 + # Get an appropriate tag to represent the version of bioconda-utils being + # used, and make it available to other steps as outputs. This will be used + # as BIOCONDA_UTILS_VERSION in later steps. - id: get-tag run: | - tag=${{ github.event.release && github.event.release.tag_name || github.sha }} + tag=${{ github.event.release && github.event.release.tag_name || github.head_ref || github.ref_name }} printf %s "tag=${tag#v}" >> $GITHUB_OUTPUT - - name: Install qemu dependency run: | sudo apt-get update @@ -129,7 +131,7 @@ jobs: if: ${{ ! steps.base-busybox.outputs.TAG_EXISTS_base-busybox }} with: image: ${{ env.BASE_BUSYBOX_IMAGE_NAME }} - tags: ${{ env.BASE_TAGS }} + tags: ${{ env.BASE_TAG }} registry: quay.io/bioconda username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} @@ -162,8 +164,8 @@ jobs: ARCHS=$ARCHS \ TYPE="build-env" \ BIOCONDA_UTILS_VERSION=$BIOCONDA_UTILS_VERSION \ - TAG=$BIOCONDA_UTILS_VERSION-$BASE_TAG \ - BUSYBOX_IMAGE=$REGISTRY/$BASE_BUSYBOX_IMAGE_NAME:$BASE_TAG \ + TAG="${BIOCONDA_UTILS_VERSION}-${BASE_TAG}" \ + BUSYBOX_IMAGE="${REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}" \ ./generic_build.bash || [ $? == 64 ] cat "build-env.log" >> $GITHUB_OUTPUT @@ -174,7 +176,7 @@ jobs: if: ${{ ! steps.build-env.outputs.TAG_EXISTS_build-env }} with: image: ${{ env.BUILD_ENV_IMAGE_NAME }} - tags: ${{ github.head_ref || github.ref_name }} + tags: ${{ steps.get-tag.outputs.tag }}-${{ env.BASE_TAG }} registry: quay.io/bioconda username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} @@ -194,11 +196,11 @@ jobs: REGISTRY="quay.io/bioconda" fi CONDA_VERSION=$( - podman run -t $REGISTRY/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION} \ + podman run -t "${REGISTRY}/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}" \ bash -c "/opt/conda/bin/conda list --export '^conda$'| sed -n 's/=[^=]*$//p'" ) MAMBA_VERSION=$( - podman run -t $REGISTRY/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION} \ + podman run -t "${REGISTRY}/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}" \ bash -c "/opt/conda/bin/conda list --export '^mamba$'| sed -n 's/=[^=]*$//p'" ) @@ -206,6 +208,7 @@ jobs: export CONDA_VERSION=${CONDA_VERSION%$'\r'} export MAMBA_VERSION=${MAMBA_VERSION%$'\r'} + # See build-env for explanation REGISTRY="localhost" if [ ${{ steps.base-busybox.outputs.TAG_EXISTS_base-busybox }} ]; then REGISTRY="quay.io/bioconda" @@ -216,8 +219,8 @@ jobs: ARCHS=$ARCHS \ TYPE="create-env" \ BIOCONDA_UTILS_VERSION=$BIOCONDA_UTILS_VERSION \ - TAG=$BIOCONDA_UTILS_VERSION-$BASE_TAG \ - BUSYBOX_IMAGE=$REGISTRY/$BASE_BUSYBOX_IMAGE_NAME:$BASE_TAG \ + TAG="${BIOCONDA_UTILS_VERSION}-${BASE_TAG}" \ + BUSYBOX_IMAGE="${REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}" \ ./generic_build.bash || [ $? == 64 ] cat "create-env.log" >> $GITHUB_OUTPUT @@ -228,7 +231,7 @@ jobs: if: ${{ ! steps.create-env.outputs.TAG_EXISTS_create-env }} with: image: ${{ env.CREATE_ENV_IMAGE_NAME }} - tags: ${{ github.head_ref || github.ref_name }} + tags: ${{ steps.get-tag.outputs.tag }}-${{ env.BASE_TAG }} registry: quay.io/bioconda username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} From 0d6602e8617ece9e91650e21cdfe50cf4ae4b7dc Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 17 Feb 2024 19:49:28 -0500 Subject: [PATCH 054/143] fix tags --- .github/workflows/build-images.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 2e772c2fcb2..74f1a498999 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -196,11 +196,11 @@ jobs: REGISTRY="quay.io/bioconda" fi CONDA_VERSION=$( - podman run -t "${REGISTRY}/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}" \ + podman run -t "${REGISTRY}/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-${BASE_TAG}" \ bash -c "/opt/conda/bin/conda list --export '^conda$'| sed -n 's/=[^=]*$//p'" ) MAMBA_VERSION=$( - podman run -t "${REGISTRY}/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}" \ + podman run -t "${REGISTRY}/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-${BASE_TAG}" \ bash -c "/opt/conda/bin/conda list --export '^mamba$'| sed -n 's/=[^=]*$//p'" ) From 9b62f67b51df98c7ef0b16f457c358555c9ec9ce Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 17 Feb 2024 20:17:21 -0500 Subject: [PATCH 055/143] add "base" to base version --- .github/workflows/build-images.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 74f1a498999..a2819b235db 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -164,7 +164,7 @@ jobs: ARCHS=$ARCHS \ TYPE="build-env" \ BIOCONDA_UTILS_VERSION=$BIOCONDA_UTILS_VERSION \ - TAG="${BIOCONDA_UTILS_VERSION}-${BASE_TAG}" \ + TAG="${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ BUSYBOX_IMAGE="${REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}" \ ./generic_build.bash || [ $? == 64 ] @@ -196,11 +196,11 @@ jobs: REGISTRY="quay.io/bioconda" fi CONDA_VERSION=$( - podman run -t "${REGISTRY}/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-${BASE_TAG}" \ + podman run -t "${REGISTRY}/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ bash -c "/opt/conda/bin/conda list --export '^conda$'| sed -n 's/=[^=]*$//p'" ) MAMBA_VERSION=$( - podman run -t "${REGISTRY}/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-${BASE_TAG}" \ + podman run -t "${REGISTRY}/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ bash -c "/opt/conda/bin/conda list --export '^mamba$'| sed -n 's/=[^=]*$//p'" ) @@ -219,7 +219,7 @@ jobs: ARCHS=$ARCHS \ TYPE="create-env" \ BIOCONDA_UTILS_VERSION=$BIOCONDA_UTILS_VERSION \ - TAG="${BIOCONDA_UTILS_VERSION}-${BASE_TAG}" \ + TAG="${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ BUSYBOX_IMAGE="${REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}" \ ./generic_build.bash || [ $? == 64 ] From f9b87200f8496ca0182d9d72c741c62e8ac39775 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 17 Feb 2024 20:30:39 -0500 Subject: [PATCH 056/143] add base prefix to upload steps --- .github/workflows/build-images.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index a2819b235db..e17e48c923e 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -176,7 +176,7 @@ jobs: if: ${{ ! steps.build-env.outputs.TAG_EXISTS_build-env }} with: image: ${{ env.BUILD_ENV_IMAGE_NAME }} - tags: ${{ steps.get-tag.outputs.tag }}-${{ env.BASE_TAG }} + tags: ${{ steps.get-tag.outputs.tag }}-base${{ env.BASE_TAG }} registry: quay.io/bioconda username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} @@ -231,7 +231,7 @@ jobs: if: ${{ ! steps.create-env.outputs.TAG_EXISTS_create-env }} with: image: ${{ env.CREATE_ENV_IMAGE_NAME }} - tags: ${{ steps.get-tag.outputs.tag }}-${{ env.BASE_TAG }} + tags: ${{ steps.get-tag.outputs.tag }}-base${{ env.BASE_TAG }} registry: quay.io/bioconda username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} From ab4189012430de8668a8be0f046ba3490cd3a705 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 17 Feb 2024 22:06:19 -0500 Subject: [PATCH 057/143] bump version to ensure 'latest' tags get pushed --- .github/workflows/build-images.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index e17e48c923e..3aba92ebad4 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -18,7 +18,7 @@ env: BIOCONDA_UTILS_FOLDER: bioconda-utils DEBIAN_VERSION: "12.2" BUSYBOX_VERSION: "1.36.1" - BASE_TAG: "0.1.3" # "latest" will always be added during the build. + BASE_TAG: "0.1.4" # "latest" will always be added during the build. BUILD_ENV_IMAGE_NAME: tmp-build-env CREATE_ENV_IMAGE_NAME: tmp-create-env BASE_DEBIAN_IMAGE_NAME: tmp-debian @@ -82,7 +82,7 @@ jobs: if: ${{ ! steps.base-debian.outputs.TAG_EXISTS_base-debian }} with: image: ${{ env.BASE_DEBIAN_IMAGE_NAME }} - tags: ${{ env.BASE_TAG }} + tags: latest ${{ env.BASE_TAG }} registry: quay.io/bioconda username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} @@ -131,7 +131,7 @@ jobs: if: ${{ ! steps.base-busybox.outputs.TAG_EXISTS_base-busybox }} with: image: ${{ env.BASE_BUSYBOX_IMAGE_NAME }} - tags: ${{ env.BASE_TAG }} + tags: latest ${{ env.BASE_TAG }} registry: quay.io/bioconda username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} @@ -176,7 +176,7 @@ jobs: if: ${{ ! steps.build-env.outputs.TAG_EXISTS_build-env }} with: image: ${{ env.BUILD_ENV_IMAGE_NAME }} - tags: ${{ steps.get-tag.outputs.tag }}-base${{ env.BASE_TAG }} + tags: latest ${{ steps.get-tag.outputs.tag }}-base${{ env.BASE_TAG }} registry: quay.io/bioconda username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} @@ -231,7 +231,7 @@ jobs: if: ${{ ! steps.create-env.outputs.TAG_EXISTS_create-env }} with: image: ${{ env.CREATE_ENV_IMAGE_NAME }} - tags: ${{ steps.get-tag.outputs.tag }}-base${{ env.BASE_TAG }} + tags: latest ${{ steps.get-tag.outputs.tag }}-base${{ env.BASE_TAG }} registry: quay.io/bioconda username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} From efe0154656ac862adc4452a0a06c1bbfe1465812 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 18 Feb 2024 10:01:57 -0500 Subject: [PATCH 058/143] move pushes to end --- .github/workflows/build-images.yml | 95 ++++++++++++++++++------------ 1 file changed, 56 insertions(+), 39 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 3aba92ebad4..424a3722251 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -35,6 +35,8 @@ jobs: # this first job. build-base-debian: name: Build base-debian + outputs: + TAG_EXISTS_base-debian: ${{ steps.base-debian.outputs.TAG_EXISTS_base-debian }} runs-on: ubuntu-20.04 steps: - uses: actions/checkout@v4 @@ -68,32 +70,20 @@ jobs: # via the step's outputs). cat "base-debian.log" >> $GITHUB_OUTPUT - # Here, and in the subsequent steps that also push images, a repository - # must first exist on quay.io/bioconda AND that repository must also be - # configured to allow write access for the appropriate service account. - # This must be done by a user with admin access to quay.io/bioconda. - - name: Push base-debian - id: push-base-debian - uses: redhat-actions/push-to-registry@v2 - - # generic_build.bash reported whether the tag exists to the log; that was - # added to GITHUB_OUTPUT and is used here to determine if we should - # upload. - if: ${{ ! steps.base-debian.outputs.TAG_EXISTS_base-debian }} - with: - image: ${{ env.BASE_DEBIAN_IMAGE_NAME }} - tags: latest ${{ env.BASE_TAG }} - registry: quay.io/bioconda - username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} - password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} # Other containers are interdependent, we so build them sequentially. # The steps are largely similar to base-debian above, so check there for # comments on common parts. build-others: name: Build base-busybox, build-env, and create-env images + outputs: + TAG_EXISTS_base-busybox: ${{ steps.base-debian.outputs.TAG_EXISTS_base-busybox }} + TAG_EXISTS_build-env: ${{ steps.base-debian.outputs.TAG_EXISTS_build-env }} + TAG_EXISTS_create-env: ${{ steps.base-debian.outputs.TAG_EXISTS_create-env }} + runs-on: ubuntu-20.04 steps: + - uses: actions/checkout@v4 with: fetch-depth: 0 @@ -125,16 +115,6 @@ jobs: cat "base-busybox.log" >> $GITHUB_OUTPUT - - name: Push base-busybox - id: push-base-busybox - uses: redhat-actions/push-to-registry@v2 - if: ${{ ! steps.base-busybox.outputs.TAG_EXISTS_base-busybox }} - with: - image: ${{ env.BASE_BUSYBOX_IMAGE_NAME }} - tags: latest ${{ env.BASE_TAG }} - registry: quay.io/bioconda - username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} - password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} - name: Build build-env id: build-env @@ -170,16 +150,6 @@ jobs: cat "build-env.log" >> $GITHUB_OUTPUT - - name: Push build-env - id: push-build-env - uses: redhat-actions/push-to-registry@v2 - if: ${{ ! steps.build-env.outputs.TAG_EXISTS_build-env }} - with: - image: ${{ env.BUILD_ENV_IMAGE_NAME }} - tags: latest ${{ steps.get-tag.outputs.tag }}-base${{ env.BASE_TAG }} - registry: quay.io/bioconda - username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} - password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} - name: Build create-env id: create-env @@ -225,10 +195,57 @@ jobs: cat "create-env.log" >> $GITHUB_OUTPUT + push: + needs: [build-base-debian, build-others, test] + steps: + + # Here, and in the subsequent steps that also push images, a repository + # must first exist on quay.io/bioconda AND that repository must also be + # configured to allow write access for the appropriate service account. + # This must be done by a user with admin access to quay.io/bioconda. + # + # generic_build.bash reported whether the tag exists to the log; that was + # added to GITHUB_OUTPUT, those outputs are exposed to the jobs, and + # those jobs are dependencies of this job. So now we can use those + # outputs to determine if we should upload. + + - name: Push base-debian + id: push-base-debian + uses: redhat-actions/push-to-registry@v2 + if: ${{ ! needs.build-base-debian.outputs.TAG_EXISTS_base-debian }} + with: + image: ${{ env.BASE_DEBIAN_IMAGE_NAME }} + tags: latest ${{ env.BASE_TAG }} + registry: quay.io/bioconda + username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} + password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} + + - name: Push base-busybox + id: push-base-busybox + uses: redhat-actions/push-to-registry@v2 + if: ${{ ! needs.build-others.outputs.TAG_EXISTS_base-busybox }} + with: + image: ${{ env.BASE_BUSYBOX_IMAGE_NAME }} + tags: latest ${{ env.BASE_TAG }} + registry: quay.io/bioconda + username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} + password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} + + - name: Push build-env + id: push-build-env + uses: redhat-actions/push-to-registry@v2 + if: ${{ ! needs.build-others.outputs.TAG_EXISTS_build-env }} + with: + image: ${{ env.BUILD_ENV_IMAGE_NAME }} + tags: latest ${{ steps.get-tag.outputs.tag }}-base${{ env.BASE_TAG }} + registry: quay.io/bioconda + username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} + password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} + - name: Push create-env id: push-create-env uses: redhat-actions/push-to-registry@v2 - if: ${{ ! steps.create-env.outputs.TAG_EXISTS_create-env }} + if: ${{ ! needs.build-others.outputs.TAG_EXISTS_create-env }} with: image: ${{ env.CREATE_ENV_IMAGE_NAME }} tags: latest ${{ steps.get-tag.outputs.tag }}-base${{ env.BASE_TAG }} From 64226bf4b7653542940679030c29f049f0db7fde Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 18 Feb 2024 10:04:19 -0500 Subject: [PATCH 059/143] runs-on --- .github/workflows/build-images.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 424a3722251..4206c38b55f 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -197,6 +197,7 @@ jobs: push: needs: [build-base-debian, build-others, test] + runs-on: ubuntu-20.04 steps: # Here, and in the subsequent steps that also push images, a repository From ece22a28773a22e184dbc20ab3afeafcab163625 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 18 Feb 2024 10:05:20 -0500 Subject: [PATCH 060/143] don't depend on test --- .github/workflows/build-images.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 4206c38b55f..654d550b9dc 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -196,7 +196,7 @@ jobs: cat "create-env.log" >> $GITHUB_OUTPUT push: - needs: [build-base-debian, build-others, test] + needs: [build-base-debian, build-others] runs-on: ubuntu-20.04 steps: From 1b28410ad8896eee48da4ef61bb519c22e53f063 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 18 Feb 2024 10:09:05 -0500 Subject: [PATCH 061/143] fix job outputs --- .github/workflows/build-images.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 654d550b9dc..2c81bff2736 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -77,9 +77,9 @@ jobs: build-others: name: Build base-busybox, build-env, and create-env images outputs: - TAG_EXISTS_base-busybox: ${{ steps.base-debian.outputs.TAG_EXISTS_base-busybox }} - TAG_EXISTS_build-env: ${{ steps.base-debian.outputs.TAG_EXISTS_build-env }} - TAG_EXISTS_create-env: ${{ steps.base-debian.outputs.TAG_EXISTS_create-env }} + TAG_EXISTS_base-busybox: ${{ steps.base-busybox.outputs.TAG_EXISTS_base-busybox }} + TAG_EXISTS_build-env: ${{ steps.build-env.outputs.TAG_EXISTS_build-env }} + TAG_EXISTS_create-env: ${{ steps.create-env.outputs.TAG_EXISTS_create-env }} runs-on: ubuntu-20.04 steps: From 9bcd6c78ca14617121a27fd09567674509818ed1 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 18 Feb 2024 10:23:07 -0500 Subject: [PATCH 062/143] better message when tag exists --- generic_build.bash | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/generic_build.bash b/generic_build.bash index 62d1c764415..1857536eea9 100755 --- a/generic_build.bash +++ b/generic_build.bash @@ -140,7 +140,7 @@ for tag in $TAGS ; do "latest" ) ;; * ) if printf %s "${existing_tags}" | grep -qxF "${tag}" ; then - printf 'error: tag %s already exists for %s on quay.io!\n' "${tag}" "${IMAGE_NAME}" + printf 'Tag %s already exists for %s on quay.io! Logging, and exiting with code 64\n' "${tag}" "${IMAGE_NAME}" >&2 echo "TAG_EXISTS_${TYPE}=true" >> $LOG exit 64 fi From bd16b6a797411302c4c8d753f326346b4bb4a6e5 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 18 Feb 2024 10:23:42 -0500 Subject: [PATCH 063/143] comments cleanup --- .github/workflows/build-images.yml | 33 +++++++++++++----------------- 1 file changed, 14 insertions(+), 19 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 2c81bff2736..b60c623fc08 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -27,18 +27,19 @@ env: jobs: - # NOTE: base-debian can be a separate job since it is independent of the - # others. create-env depends on build-env, and both depend on base-busybox, - # so we can't split that out. - # - # Later steps for other containers are similar, so comments are only added to - # this first job. build-base-debian: + # NOTE: base-debian can be a separate job since it is independent of the + # others. create-env depends on build-env, and both depend on base-busybox, + # so we can't split that out. + # + # Later steps for other containers are similar, so comments are only added to + # this first job. name: Build base-debian outputs: TAG_EXISTS_base-debian: ${{ steps.base-debian.outputs.TAG_EXISTS_base-debian }} runs-on: ubuntu-20.04 steps: + - uses: actions/checkout@v4 with: fetch-depth: 0 @@ -70,11 +71,10 @@ jobs: # via the step's outputs). cat "base-debian.log" >> $GITHUB_OUTPUT - - # Other containers are interdependent, we so build them sequentially. - # The steps are largely similar to base-debian above, so check there for - # comments on common parts. build-others: + # Other containers are interdependent, we so build them sequentially. + # The steps are largely similar to base-debian above, so check there for + # comments on common parts. name: Build base-busybox, build-env, and create-env images outputs: TAG_EXISTS_base-busybox: ${{ steps.base-busybox.outputs.TAG_EXISTS_base-busybox }} @@ -88,10 +88,10 @@ jobs: with: fetch-depth: 0 - # Get an appropriate tag to represent the version of bioconda-utils being - # used, and make it available to other steps as outputs. This will be used - # as BIOCONDA_UTILS_VERSION in later steps. - id: get-tag + # Get an appropriate tag to represent the version of bioconda-utils being + # used, and make it available to other steps as outputs. This will be used + # as BIOCONDA_UTILS_VERSION in later steps. run: | tag=${{ github.event.release && github.event.release.tag_name || github.head_ref || github.ref_name }} printf %s "tag=${tag#v}" >> $GITHUB_OUTPUT @@ -112,10 +112,8 @@ jobs: BUSYBOX_VERSION=$BUSYBOX_VERSION \ TAG=$BASE_TAG \ ./generic_build.bash || [ $? == 64 ] - cat "base-busybox.log" >> $GITHUB_OUTPUT - - name: Build build-env id: build-env run: | @@ -147,20 +145,17 @@ jobs: TAG="${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ BUSYBOX_IMAGE="${REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}" \ ./generic_build.bash || [ $? == 64 ] - cat "build-env.log" >> $GITHUB_OUTPUT - - name: Build create-env id: create-env run: | - BIOCONDA_UTILS_VERSION='${{ steps.get-tag.outputs.tag }}' - # Here we extract the conda and mamba versions from the just-created # build-env container (or, if it was not created in this CI run because # it already exists, then pull from quay.io). This ensures that when # creating environments, we use the exact same conda/mamba versions # that were used when building the package. + BIOCONDA_UTILS_VERSION='${{ steps.get-tag.outputs.tag }}' REGISTRY="localhost" if [ ${{ steps.build-env.outputs.TAG_EXISTS_build-env }} ]; then REGISTRY="quay.io/bioconda" From 1ffb34d308ff95e5d866cca1a8f5bfa75209d0a2 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 18 Feb 2024 10:27:43 -0500 Subject: [PATCH 064/143] add test before push --- .github/workflows/build-images.yml | 76 +++++++++++++++++++++++++++--- 1 file changed, 70 insertions(+), 6 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index b60c623fc08..28dd57280aa 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -187,23 +187,87 @@ jobs: TAG="${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ BUSYBOX_IMAGE="${REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}" \ ./generic_build.bash || [ $? == 64 ] - cat "create-env.log" >> $GITHUB_OUTPUT - push: + test: + name: Test using images needs: [build-base-debian, build-others] runs-on: ubuntu-20.04 steps: - # Here, and in the subsequent steps that also push images, a repository - # must first exist on quay.io/bioconda AND that repository must also be - # configured to allow write access for the appropriate service account. - # This must be done by a user with admin access to quay.io/bioconda. + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + # Clone bioconda-recipes to use as part of the tests. + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + repository: bioconda/bioconda-recipes + path: /recipes + + - name: set path + run: echo "/opt/mambaforge/bin" >> $GITHUB_PATH + + - name: Install bioconda-utils + run: | + export BIOCONDA_DISABLE_BUILD_PREP=1 + wget https://raw.githubusercontent.com/bioconda/bioconda-common/master/{common,install-and-set-up-conda,configure-conda}.sh + bash install-and-set-up-conda.sh + eval "$(conda shell.bash hook)" + mamba create -n bioconda -y --file test-requirements.txt --file bioconda_utils/bioconda_utils-requirements.txt + conda activate bioconda + python setup.py install + + - name: test + run: | + # Decide, for each image, whether it was just built as part of this run + # (in which case we use localhost) or otherwise pull from quay.io. + if [ ${{ needs.build-others.outputs.TAG_EXISTS_base-busybox }} ]; then + DEST_BASE_IMAGE_REGISTRY='quay.io/bioconda' + else + DEST_BASE_IMAGE_REGISTRY=localhost + fi + + if [ ${{ needs.build-others.outputs.TAG_EXISTS_build-env }} ]; then + BUILD_ENV_REGISTRY='quay.io/bioconda' + else + BUILD_ENV_REGISTRY=localhost + fi + + if [ ${{ needs-built-others.outputs.TAG_EXISTS_create-env }} ]; then + BUILD_ENV_REGISTRY='quay.io/bioconda' + else + BUILD_ENV_REGISTRY=localhost + fi + + BIOCONDA_UTILS_VERSION='${{ steps.get-tag.outputs.tag }}' + + cd /recipes + + # Run a test build, specifying the exact images to use. + DEST_BASE_IMAGE="${DEST_BASE_IMAGE_REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}" \ + bioconda-utils build \ + --docker-base-image "${BUILD_ENV_REGISTRY}/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ + --mulled-conda-image "${CREATE_ENV_REGISTRY}/${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ + --packages seqtk \ + --docker \ + --mulled-test \ + --force + + push: + needs: [build-base-debian, build-others, test] + runs-on: ubuntu-20.04 + # For these push steps, a repository must first exist on quay.io/bioconda + # AND that repository must also be configured to allow write access for the + # appropriate service account. This must be done by a user with admin + # access to quay.io/bioconda. # # generic_build.bash reported whether the tag exists to the log; that was # added to GITHUB_OUTPUT, those outputs are exposed to the jobs, and # those jobs are dependencies of this job. So now we can use those # outputs to determine if we should upload. + steps: - name: Push base-debian id: push-base-debian From 9a02abc4295987c728758e054c96b07c80e9dc0d Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 18 Feb 2024 10:34:48 -0500 Subject: [PATCH 065/143] typo --- .github/workflows/build-images.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 28dd57280aa..091d23abbad 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -235,7 +235,7 @@ jobs: BUILD_ENV_REGISTRY=localhost fi - if [ ${{ needs-built-others.outputs.TAG_EXISTS_create-env }} ]; then + if [ ${{ needs-build-others.outputs.TAG_EXISTS_create-env }} ]; then BUILD_ENV_REGISTRY='quay.io/bioconda' else BUILD_ENV_REGISTRY=localhost From b39c87c85e22e8a665f0d817cfa98759e4941048 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 18 Feb 2024 10:35:28 -0500 Subject: [PATCH 066/143] typo --- .github/workflows/build-images.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 091d23abbad..d915209ff13 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -235,7 +235,7 @@ jobs: BUILD_ENV_REGISTRY=localhost fi - if [ ${{ needs-build-others.outputs.TAG_EXISTS_create-env }} ]; then + if [ ${{ needs.build-others.outputs.TAG_EXISTS_create-env }} ]; then BUILD_ENV_REGISTRY='quay.io/bioconda' else BUILD_ENV_REGISTRY=localhost From b654430536024aebce2339a681b9c4247b8b0382 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 18 Feb 2024 10:38:20 -0500 Subject: [PATCH 067/143] keep recipes in work dir --- .github/workflows/build-images.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index d915209ff13..0243f5422f0 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -204,7 +204,7 @@ jobs: with: fetch-depth: 0 repository: bioconda/bioconda-recipes - path: /recipes + path: recipes - name: set path run: echo "/opt/mambaforge/bin" >> $GITHUB_PATH @@ -243,7 +243,7 @@ jobs: BIOCONDA_UTILS_VERSION='${{ steps.get-tag.outputs.tag }}' - cd /recipes + cd recipes # Run a test build, specifying the exact images to use. DEST_BASE_IMAGE="${DEST_BASE_IMAGE_REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}" \ From f47c2dea1efa84a85d28575145802f0053ebffe1 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 18 Feb 2024 10:43:03 -0500 Subject: [PATCH 068/143] activate env in test --- .github/workflows/build-images.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 0243f5422f0..a66472c9966 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -246,6 +246,7 @@ jobs: cd recipes # Run a test build, specifying the exact images to use. + conda activate bioconda DEST_BASE_IMAGE="${DEST_BASE_IMAGE_REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}" \ bioconda-utils build \ --docker-base-image "${BUILD_ENV_REGISTRY}/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ From cb8157dad1dcf51cc13c2b42a679e981a779fb87 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 18 Feb 2024 10:49:35 -0500 Subject: [PATCH 069/143] eval hook --- .github/workflows/build-images.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index a66472c9966..37f6fc482f0 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -246,6 +246,7 @@ jobs: cd recipes # Run a test build, specifying the exact images to use. + eval "$(conda shell.bash hook)" conda activate bioconda DEST_BASE_IMAGE="${DEST_BASE_IMAGE_REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}" \ bioconda-utils build \ From 3166fbd9034777a081f2951a24cc132783888d13 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 18 Feb 2024 10:58:52 -0500 Subject: [PATCH 070/143] include output for bioconda-utils version in job --- .github/workflows/build-images.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 37f6fc482f0..359612e430a 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -80,6 +80,7 @@ jobs: TAG_EXISTS_base-busybox: ${{ steps.base-busybox.outputs.TAG_EXISTS_base-busybox }} TAG_EXISTS_build-env: ${{ steps.build-env.outputs.TAG_EXISTS_build-env }} TAG_EXISTS_create-env: ${{ steps.create-env.outputs.TAG_EXISTS_create-env }} + BIOCONDA_UTILS_TAG: ${{ steps.get-tag.outputs.tag }} runs-on: ubuntu-20.04 steps: @@ -241,7 +242,7 @@ jobs: BUILD_ENV_REGISTRY=localhost fi - BIOCONDA_UTILS_VERSION='${{ steps.get-tag.outputs.tag }}' + BIOCONDA_UTILS_VERSION='${{ needs.build-others.outputs.BIOCONDA_UTILS_TAG }}' cd recipes From a428b4b0748b0bbd880bff63b044eebfbb8ab925 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 18 Feb 2024 11:12:40 -0500 Subject: [PATCH 071/143] fix names --- .github/workflows/build-images.yml | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 359612e430a..f4ec32480ad 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -237,9 +237,9 @@ jobs: fi if [ ${{ needs.build-others.outputs.TAG_EXISTS_create-env }} ]; then - BUILD_ENV_REGISTRY='quay.io/bioconda' + CREATE_ENV_REGISTRY='quay.io/bioconda' else - BUILD_ENV_REGISTRY=localhost + CREATE_ENV_REGISTRY=localhost fi BIOCONDA_UTILS_VERSION='${{ needs.build-others.outputs.BIOCONDA_UTILS_TAG }}' @@ -249,7 +249,10 @@ jobs: # Run a test build, specifying the exact images to use. eval "$(conda shell.bash hook)" conda activate bioconda - DEST_BASE_IMAGE="${DEST_BASE_IMAGE_REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}" \ + + # Used to tell mulled-build which image to use + export DEST_BASE_IMAGE="${DEST_BASE_IMAGE_REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}" + bioconda-utils build \ --docker-base-image "${BUILD_ENV_REGISTRY}/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ --mulled-conda-image "${CREATE_ENV_REGISTRY}/${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ From aedd0329526d3b5e0fc4aca1ed45afd887ff72f8 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 18 Feb 2024 11:30:02 -0500 Subject: [PATCH 072/143] bump version for end-to-end testing --- .github/workflows/build-images.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index f4ec32480ad..563f1509e5d 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -18,7 +18,7 @@ env: BIOCONDA_UTILS_FOLDER: bioconda-utils DEBIAN_VERSION: "12.2" BUSYBOX_VERSION: "1.36.1" - BASE_TAG: "0.1.4" # "latest" will always be added during the build. + BASE_TAG: "0.1.5" # "latest" will always be added during the build. BUILD_ENV_IMAGE_NAME: tmp-build-env CREATE_ENV_IMAGE_NAME: tmp-create-env BASE_DEBIAN_IMAGE_NAME: tmp-debian From 26325e96d57e2019352c0498a40879e87c4b90b1 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 18 Feb 2024 12:36:58 -0500 Subject: [PATCH 073/143] don't use "localhost" as registry for docker --- .github/workflows/build-images.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 563f1509e5d..e5bb91d8e4d 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -225,21 +225,21 @@ jobs: # Decide, for each image, whether it was just built as part of this run # (in which case we use localhost) or otherwise pull from quay.io. if [ ${{ needs.build-others.outputs.TAG_EXISTS_base-busybox }} ]; then - DEST_BASE_IMAGE_REGISTRY='quay.io/bioconda' + DEST_BASE_IMAGE_REGISTRY='quay.io/bioconda/' else - DEST_BASE_IMAGE_REGISTRY=localhost + DEST_BASE_IMAGE_REGISTRY="" fi if [ ${{ needs.build-others.outputs.TAG_EXISTS_build-env }} ]; then - BUILD_ENV_REGISTRY='quay.io/bioconda' + BUILD_ENV_REGISTRY='quay.io/bioconda/' else - BUILD_ENV_REGISTRY=localhost + BUILD_ENV_REGISTRY="" fi if [ ${{ needs.build-others.outputs.TAG_EXISTS_create-env }} ]; then - CREATE_ENV_REGISTRY='quay.io/bioconda' + CREATE_ENV_REGISTRY='quay.io/bioconda/' else - CREATE_ENV_REGISTRY=localhost + CREATE_ENV_REGISTRY="" fi BIOCONDA_UTILS_VERSION='${{ needs.build-others.outputs.BIOCONDA_UTILS_TAG }}' From ec3704c8b7328686ae1e078359f3e11335baacbf Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 18 Feb 2024 16:46:51 -0500 Subject: [PATCH 074/143] registry includes the slash --- .github/workflows/build-images.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index e5bb91d8e4d..6dfc031cdca 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -251,11 +251,11 @@ jobs: conda activate bioconda # Used to tell mulled-build which image to use - export DEST_BASE_IMAGE="${DEST_BASE_IMAGE_REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}" + export DEST_BASE_IMAGE="${DEST_BASE_IMAGE_REGISTRY}${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}" bioconda-utils build \ - --docker-base-image "${BUILD_ENV_REGISTRY}/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ - --mulled-conda-image "${CREATE_ENV_REGISTRY}/${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ + --docker-base-image "${BUILD_ENV_REGISTRY}${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ + --mulled-conda-image "${CREATE_ENV_REGISTRY}${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ --packages seqtk \ --docker \ --mulled-test \ From 56b1fe2ef6c38ec5f456a18f9d00a54abbc20d95 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 18 Feb 2024 20:56:43 -0500 Subject: [PATCH 075/143] convert back to steps rather than jobs because containers can't (easily) be passed between jobs --- .github/workflows/build-images.yml | 55 ++++++++++++++---------------- 1 file changed, 25 insertions(+), 30 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 6dfc031cdca..df01b69fc15 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -190,15 +190,9 @@ jobs: ./generic_build.bash || [ $? == 64 ] cat "create-env.log" >> $GITHUB_OUTPUT - test: - name: Test using images - needs: [build-base-debian, build-others] - runs-on: ubuntu-20.04 - steps: - - - uses: actions/checkout@v4 - with: - fetch-depth: 0 + # END OF BUILDING IMAGES + # ---------------------------------------------------------------------- + # START TESTING # Clone bioconda-recipes to use as part of the tests. - uses: actions/checkout@v4 @@ -224,25 +218,25 @@ jobs: run: | # Decide, for each image, whether it was just built as part of this run # (in which case we use localhost) or otherwise pull from quay.io. - if [ ${{ needs.build-others.outputs.TAG_EXISTS_base-busybox }} ]; then + if [ ${{ steps.base-busybox.outputs.TAG_EXISTS_base-busybox }} ]; then DEST_BASE_IMAGE_REGISTRY='quay.io/bioconda/' else DEST_BASE_IMAGE_REGISTRY="" fi - if [ ${{ needs.build-others.outputs.TAG_EXISTS_build-env }} ]; then + if [ ${{ steps.build-env.outputs.TAG_EXISTS_build-env }} ]; then BUILD_ENV_REGISTRY='quay.io/bioconda/' else BUILD_ENV_REGISTRY="" fi - if [ ${{ needs.build-others.outputs.TAG_EXISTS_create-env }} ]; then + if [ ${{ steps.create-env.outputs.TAG_EXISTS_create-env }} ]; then CREATE_ENV_REGISTRY='quay.io/bioconda/' else CREATE_ENV_REGISTRY="" fi - BIOCONDA_UTILS_VERSION='${{ needs.build-others.outputs.BIOCONDA_UTILS_TAG }}' + BIOCONDA_UTILS_VERSION='${{ steps.get-tag.outputs.BIOCONDA_UTILS_TAG }}' cd recipes @@ -253,6 +247,7 @@ jobs: # Used to tell mulled-build which image to use export DEST_BASE_IMAGE="${DEST_BASE_IMAGE_REGISTRY}${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}" + # Build a package with containers. bioconda-utils build \ --docker-base-image "${BUILD_ENV_REGISTRY}${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ --mulled-conda-image "${CREATE_ENV_REGISTRY}${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ @@ -261,24 +256,24 @@ jobs: --mulled-test \ --force - push: - needs: [build-base-debian, build-others, test] - runs-on: ubuntu-20.04 - # For these push steps, a repository must first exist on quay.io/bioconda - # AND that repository must also be configured to allow write access for the - # appropriate service account. This must be done by a user with admin - # access to quay.io/bioconda. - # - # generic_build.bash reported whether the tag exists to the log; that was - # added to GITHUB_OUTPUT, those outputs are exposed to the jobs, and - # those jobs are dependencies of this job. So now we can use those - # outputs to determine if we should upload. - steps: + # END TESTING + # ------------------------------------------------------------------------ + # START PUSHING IMAGES + + # For these push steps, a repository must first exist on quay.io/bioconda + # AND that repository must also be configured to allow write access for the + # appropriate service account. This must be done by a user with admin + # access to quay.io/bioconda. + # + # generic_build.bash reported whether the tag exists to the log; that was + # added to GITHUB_OUTPUT, those outputs are exposed to the jobs, and + # those jobs are dependencies of this job. So now we can use those + # outputs to determine if we should upload. - name: Push base-debian id: push-base-debian uses: redhat-actions/push-to-registry@v2 - if: ${{ ! needs.build-base-debian.outputs.TAG_EXISTS_base-debian }} + if: ${{ !steps.base-debian.outputs.TAG_EXISTS_base-debian }} with: image: ${{ env.BASE_DEBIAN_IMAGE_NAME }} tags: latest ${{ env.BASE_TAG }} @@ -289,7 +284,7 @@ jobs: - name: Push base-busybox id: push-base-busybox uses: redhat-actions/push-to-registry@v2 - if: ${{ ! needs.build-others.outputs.TAG_EXISTS_base-busybox }} + if: ${{ ! steps.base-busybox.outputs.TAG_EXISTS_base-busybox }} with: image: ${{ env.BASE_BUSYBOX_IMAGE_NAME }} tags: latest ${{ env.BASE_TAG }} @@ -300,7 +295,7 @@ jobs: - name: Push build-env id: push-build-env uses: redhat-actions/push-to-registry@v2 - if: ${{ ! needs.build-others.outputs.TAG_EXISTS_build-env }} + if: ${{ ! steps.build-env.outputs.TAG_EXISTS_build-env }} with: image: ${{ env.BUILD_ENV_IMAGE_NAME }} tags: latest ${{ steps.get-tag.outputs.tag }}-base${{ env.BASE_TAG }} @@ -311,7 +306,7 @@ jobs: - name: Push create-env id: push-create-env uses: redhat-actions/push-to-registry@v2 - if: ${{ ! needs.build-others.outputs.TAG_EXISTS_create-env }} + if: ${{ ! steps.create-env.outputs.TAG_EXISTS_create-env }} with: image: ${{ env.CREATE_ENV_IMAGE_NAME }} tags: latest ${{ steps.get-tag.outputs.tag }}-base${{ env.BASE_TAG }} From 81aca2742f0a681b17781612a3397eed73f2cf9f Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 18 Feb 2024 20:57:06 -0500 Subject: [PATCH 076/143] need to podman push images to docker-daemon so docker can use --- .github/workflows/build-images.yml | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index df01b69fc15..64bbbd7ae59 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -218,22 +218,35 @@ jobs: run: | # Decide, for each image, whether it was just built as part of this run # (in which case we use localhost) or otherwise pull from quay.io. + # + # If localhost, we need to get the container from podman to docker, + # using podman push, so that bioconda-utils (which uses docker) can see + # the local image. if [ ${{ steps.base-busybox.outputs.TAG_EXISTS_base-busybox }} ]; then DEST_BASE_IMAGE_REGISTRY='quay.io/bioconda/' else DEST_BASE_IMAGE_REGISTRY="" + podman push \ + localhost/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG} \ + docker-daemon:${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG} fi if [ ${{ steps.build-env.outputs.TAG_EXISTS_build-env }} ]; then BUILD_ENV_REGISTRY='quay.io/bioconda/' else BUILD_ENV_REGISTRY="" + podman push \ + localhost/${BUILD_ENV_IMAGE_NAME}:${BASE_TAG} \ + docker-daemon:${BUILD_ENV_IMAGE_NAME}:${BASE_TAG} fi if [ ${{ steps.create-env.outputs.TAG_EXISTS_create-env }} ]; then CREATE_ENV_REGISTRY='quay.io/bioconda/' else CREATE_ENV_REGISTRY="" + podman push \ + localhost/${CREATE_ENV_IMAGE_NAME}:${BASE_TAG} \ + docker-daemon:${CREATE_ENV_IMAGE_NAME}:${BASE_TAG} fi BIOCONDA_UTILS_VERSION='${{ steps.get-tag.outputs.BIOCONDA_UTILS_TAG }}' From 15c1954b58cf0abf6e6489c871376de47cb43332 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 18 Feb 2024 20:57:45 -0500 Subject: [PATCH 077/143] update build.sh for local tests --- build.sh | 48 ++++++++++++++++++++++++------------------------ 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/build.sh b/build.sh index 68ce0e8f47c..58194ee52e7 100644 --- a/build.sh +++ b/build.sh @@ -24,7 +24,7 @@ export DEBIAN_VERSION="12.2" export BUSYBOX_VERSION="1.36.1" # Use same tags for base-busybox and base-debian -export BASE_TAGS="latest" +export BASE_TAG="0.1" # If the repository doesn't already exist on quay.io, by default this is # considered an error. Set to false to avoid this (e.g., when building images @@ -40,46 +40,36 @@ CREATE_ENV_IMAGE_NAME=tmp-create-env BASE_DEBIAN_IMAGE_NAME=tmp-debian BASE_BUSYBOX_IMAGE_NAME=tmp-busybox -BUILD_BUSYBOX=false # build busybox image? -BUILD_DEBIAN=false # build debian image? -BUILD_BUILD_ENV=false # build build-env image? +BUILD_BUSYBOX=true # build busybox image? +BUILD_DEBIAN=true # build debian image? +BUILD_BUILD_ENV=true # build build-env image? BUILD_CREATE_ENV=true # build create-env image? -# buildah will complain if a manifest exists for these images. If you do set -# REMOVE_MANIFEST=true, you'll need to recreate them all again. You can instead -# remove individual images like `buildah rm $BUILD_ENV_IMAGE_NAME`. You may -# need to run it several times. -REMOVE_MANIFEST=false -if [ ${REMOVE_MANIFEST:-false} == "true" ]; then - for imgname in \ - $BUILD_ENV_IMAGE_NAME \ - $CREATE_ENV_IMAGE_NAME \ - $BASE_DEBIAN_IMAGE_NAME \ - $BASE_BUSYBOX_IMAGE_NAME; do - for tag in ${BASE_TAGS} $BIOCONDA_UTILS_VERSION; do - buildah manifest rm "${imgname}:${tag}" || true - done - done -fi - - # # Build base-busybox------------------------------------------------------------ if [ $BUILD_BUSYBOX == "true" ]; then + + buildah manifest rm "${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}" || true + buildah manifest rm "${BASE_BUSYBOX_IMAGE_NAME}:latest" || true + IMAGE_NAME=$BASE_BUSYBOX_IMAGE_NAME \ IMAGE_DIR=images/base-glibc-busybox-bash \ ARCHS=$ARCHS \ TYPE="base-busybox" \ - TAGS=$BASE_TAGS \ + TAG=$BASE_TAG \ ./generic_build.bash fi # Build base-debian------------------------------------------------------------- if [ $BUILD_DEBIAN == "true" ]; then + + buildah manifest rm "${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG}" || true + buildah manifest rm "${BASE_DEBIAN_IMAGE_NAME}:latest" || true + IMAGE_NAME=$BASE_DEBIAN_IMAGE_NAME \ IMAGE_DIR=images/base-glibc-debian-bash \ ARCHS=$ARCHS \ TYPE="base-debian" \ - TAGS=$BASE_TAGS \ + TAG=$BASE_TAG \ ./generic_build.bash fi @@ -92,16 +82,25 @@ if [ $BUILD_BUILD_ENV == "true" ]; then else (cd images/bioconda-utils-build-env-cos7/bioconda-utils && git fetch) fi + + buildah manifest rm "${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" || true + buildah manifest rm "${BUILD_ENV_IMAGE_NAME}:latest" || true + IMAGE_NAME=$BUILD_ENV_IMAGE_NAME \ IMAGE_DIR=images/bioconda-utils-build-env-cos7 \ ARCHS=$ARCHS \ TYPE="build-env" \ + TAG=$BASE_TAG \ BUSYBOX_IMAGE=localhost/$BASE_BUSYBOX_IMAGE_NAME \ ./generic_build.bash fi # # Build create-env-------------------------------------------------------------- if [ $BUILD_CREATE_ENV == "true" ]; then + + buildah manifest rm "${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" || true + buildah manifest rm "${CREATE_ENV_IMAGE_NAME}:latest" || true + # Get the exact versions of mamba and conda that were installed in build-env. CONDA_VERSION=$( podman run -t localhost/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION} \ @@ -119,6 +118,7 @@ if [ $BUILD_CREATE_ENV == "true" ]; then IMAGE_DIR=images/create-env \ ARCHS=$ARCHS \ TYPE="create-env" \ + TAG=$BASE_TAG \ BUSYBOX_IMAGE=localhost/$BASE_BUSYBOX_IMAGE_NAME \ ./generic_build.bash fi From 01122df9e4715a665d83534df921bd577115bdbb Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 18 Feb 2024 20:58:25 -0500 Subject: [PATCH 078/143] comments and cleanup --- generic_build.bash | 36 ++++++++++++++++++++++-------------- 1 file changed, 22 insertions(+), 14 deletions(-) diff --git a/generic_build.bash b/generic_build.bash index 1857536eea9..8562e860199 100755 --- a/generic_build.bash +++ b/generic_build.bash @@ -1,7 +1,7 @@ #!/bin/bash -# This single script builds the following containers depending on the value of -# the env var TYPE: +# This single script builds the following images depending on the value of the +# env var TYPE: # # - build-env: contains conda + conda-build + bioconda-utils, used for building # package @@ -9,12 +9,16 @@ # expected to have been built beforehand). Used for creating env from # package + depdendencies # - base-busybox: the minimal container into which created conda envs are -# copied. This is the container uploaded to quay.io -# - base-debian: an extended version of the busybox container for special cases +# copied. This is the image uploaded to quay.io +# - base-debian: an extended version of the busybox image for special cases # -# Built containers are added to a manifest. If multiple architectures are -# provided, they will all be added to a manifest which can be subsequently -# uploaded to a registry. +# Built images are added to a manifest. If multiple architectures are provided, +# they will all be added to a manifest which can be subsequently uploaded to +# a registry. +# +# After images are built, they are tested. +# +# This script does NOT upload anything, that must be handled separately. USAGE=' Builds various containers. @@ -117,7 +121,10 @@ TAGS="$TAG latest" # ------------------------------------------------------------------------------ # CHECK FOR EXISTING TAGS. This is because quay.io does not support immutable -# images and we don't want to clobber existing. +# images and we don't want to clobber existing. `latest` will likely always be +# present though, so don't consider that existing. If you know that the +# repository doesn't exist (e.g., you're testing using different names) then +# set ERROR_IF_MISSING=false. response="$(curl -sL "https://quay.io/api/v1/repository/bioconda/${IMAGE_NAME}/tag/")" # Images can be set to expire; the jq query selects only non-expired images. @@ -157,7 +164,7 @@ set -xeu # Dockerfile lives here cd $IMAGE_DIR -# One manifest per tag +# One manifest per tag; multiple archs will go in the same manifest. for tag in ${TAGS} ; do buildah manifest create "${IMAGE_NAME}:${tag}" done @@ -183,7 +190,7 @@ fi if [ "$TYPE" == "build-env" ]; then BUILD_ARGS+=("--build-arg=BUSYBOX_IMAGE=$BUSYBOX_IMAGE") # which image to use as base BUILD_ARGS+=("--build-arg=BIOCONDA_UTILS_FOLDER=$BIOCONDA_UTILS_FOLDER") # git clone, relative to Dockerfile - BUILD_ARGS+=("--build-arg=bioconda_utils_version=$BIOCONDA_UTILS_VERSION") # specify version to checkout and install, also used as tag + BUILD_ARGS+=("--build-arg=bioconda_utils_version=$BIOCONDA_UTILS_VERSION") # specify version to checkout and install, also used as part of tag fi if [ "$TYPE" == "base-busybox" ]; then @@ -192,7 +199,7 @@ if [ "$TYPE" == "base-busybox" ]; then # Make a busybox image that we'll use further below. As shown in the # Dockerfile.busybox, this uses the build-busybox script which in turn - # cross-compiles for x86_64 and aarch64, and these execuables are later + # cross-compiles for x86_64 and aarch64, and these executables are later # copied into an arch-specific container. # # Note that --iidfile (used here and in later commands) prints the built @@ -271,11 +278,11 @@ for arch in $ARCHS; do buildah config "${LABELS[@]}" "${container}" # ...then store the container (now with labels) as a new image. - # This is what we'll use to eventually upload. + # This is what we'll eventually upload. image_id="$( buildah commit "${container}" )" buildah rm "${container}" - # Add images to manifest. Note that individual image tags include arch; + # Add images to manifest. Note that individual **image** tags include arch; # manifest does not. for tag in ${TAGS} ; do buildah tag \ @@ -306,7 +313,7 @@ done # ------------------------------------------------------------------------------ # TESTING # -# Args used specifically used when testing with Dockerfile.test +# Args to be used specifically when testing with Dockerfile.test TEST_BUILD_ARGS=() if [ "$TYPE" == "create-env" ]; then TEST_BUILD_ARGS+=("--build-arg=BUSYBOX_IMAGE=$BUSYBOX_IMAGE") @@ -377,6 +384,7 @@ if [ "" ] ; then --file=Dockerfile.test done fi +# ------------------------------------------------------------------------------- # Clean up buildah rmi --prune || true From bd9001400602ffa163f5da1b900478279a54b367 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 18 Feb 2024 22:36:22 -0500 Subject: [PATCH 079/143] tags need bioconda-utils version --- .github/workflows/build-images.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 64bbbd7ae59..6294b752036 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -216,6 +216,7 @@ jobs: - name: test run: | + BIOCONDA_UTILS_VERSION='${{ steps.get-tag.outputs.tag }}' # Decide, for each image, whether it was just built as part of this run # (in which case we use localhost) or otherwise pull from quay.io. # @@ -236,7 +237,7 @@ jobs: else BUILD_ENV_REGISTRY="" podman push \ - localhost/${BUILD_ENV_IMAGE_NAME}:${BASE_TAG} \ + localhost/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG} \ docker-daemon:${BUILD_ENV_IMAGE_NAME}:${BASE_TAG} fi @@ -245,7 +246,7 @@ jobs: else CREATE_ENV_REGISTRY="" podman push \ - localhost/${CREATE_ENV_IMAGE_NAME}:${BASE_TAG} \ + localhost/${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG} \ docker-daemon:${CREATE_ENV_IMAGE_NAME}:${BASE_TAG} fi From 7d6f5c8542ea8deb3bef317496dc5527768c8116 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Mon, 19 Feb 2024 09:51:45 -0500 Subject: [PATCH 080/143] use correct output --- .github/workflows/build-images.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 6294b752036..3563ea1d44a 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -250,8 +250,6 @@ jobs: docker-daemon:${CREATE_ENV_IMAGE_NAME}:${BASE_TAG} fi - BIOCONDA_UTILS_VERSION='${{ steps.get-tag.outputs.BIOCONDA_UTILS_TAG }}' - cd recipes # Run a test build, specifying the exact images to use. From 7ce9bd170a8fa08623110a6bcaf10143e44cff13 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Mon, 19 Feb 2024 16:19:19 -0500 Subject: [PATCH 081/143] attempt pushing to docker-daemon --- generic_build.bash | 3 +++ 1 file changed, 3 insertions(+) diff --git a/generic_build.bash b/generic_build.bash index 8562e860199..bd56f4b11e9 100755 --- a/generic_build.bash +++ b/generic_build.bash @@ -386,5 +386,8 @@ if [ "" ] ; then fi # ------------------------------------------------------------------------------- +podman manifest push --all localhost/${IMAGE_NAME} docker-daemon:${IMAGE_NAME} +docker run ${IMAGE_NAME} ls -l + # Clean up buildah rmi --prune || true From 8b7b1ff0f024974631f7ba0d9eaac236020bac4d Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Mon, 19 Feb 2024 16:34:27 -0500 Subject: [PATCH 082/143] disable some jobs so we can do some container pushing tests --- .github/workflows/GithubActionTests.yml | 3 +++ .github/workflows/build-images.yml | 1 + 2 files changed, 4 insertions(+) diff --git a/.github/workflows/GithubActionTests.yml b/.github/workflows/GithubActionTests.yml index d59a85ccc7d..1df118499e1 100644 --- a/.github/workflows/GithubActionTests.yml +++ b/.github/workflows/GithubActionTests.yml @@ -6,6 +6,7 @@ concurrency: jobs: test-linux: + if: false name: Linux tests runs-on: ubuntu-latest strategy: @@ -43,6 +44,7 @@ jobs: echo "Skipping pytest - only docs modified" fi test-macosx: + if: false name: OSX tests runs-on: macos-latest steps: @@ -74,6 +76,7 @@ jobs: fi autobump-test: + if: false name: autobump test runs-on: ubuntu-latest steps: diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 3563ea1d44a..7a336f1ae44 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -72,6 +72,7 @@ jobs: cat "base-debian.log" >> $GITHUB_OUTPUT build-others: + if: false # Other containers are interdependent, we so build them sequentially. # The steps are largely similar to base-debian above, so check there for # comments on common parts. From dc4514a7b017ef3890a9f42788fcb1f457ba273e Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Mon, 19 Feb 2024 16:37:59 -0500 Subject: [PATCH 083/143] try pushing to ghcr.io --- .github/workflows/build-images.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 7a336f1ae44..4c0d7affdcf 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -71,6 +71,10 @@ jobs: # via the step's outputs). cat "base-debian.log" >> $GITHUB_OUTPUT + - name: push to ghcr + run: | + podman push ghcr.io/bioconda/$BASE_DEBIAN_IMAGE_NAME:${BASE_TAG} + build-others: if: false # Other containers are interdependent, we so build them sequentially. From 6a177d2ebd569be0ed913e8a3ec79565fe75fc59 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Mon, 19 Feb 2024 16:39:26 -0500 Subject: [PATCH 084/143] rm push to docker daemon --- generic_build.bash | 3 --- 1 file changed, 3 deletions(-) diff --git a/generic_build.bash b/generic_build.bash index bd56f4b11e9..8562e860199 100755 --- a/generic_build.bash +++ b/generic_build.bash @@ -386,8 +386,5 @@ if [ "" ] ; then fi # ------------------------------------------------------------------------------- -podman manifest push --all localhost/${IMAGE_NAME} docker-daemon:${IMAGE_NAME} -docker run ${IMAGE_NAME} ls -l - # Clean up buildah rmi --prune || true From 3188ea3985551f76f0f548e5ff4edec5f2f754ba Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Mon, 19 Feb 2024 16:48:37 -0500 Subject: [PATCH 085/143] login to ghcr --- .github/workflows/build-images.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 4c0d7affdcf..0d6345a9eb7 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -73,7 +73,10 @@ jobs: - name: push to ghcr run: | - podman push ghcr.io/bioconda/$BASE_DEBIAN_IMAGE_NAME:${BASE_TAG} + echo "${{ secrets.GITHUB_TOKEN }}" | podman login ghcr.io -u '${{ github.actor }}' --password-stdin + podman push \ + localhost/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG} \ + ghcr.io/bioconda/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG} build-others: if: false From 39ab36a50f9ff3eebba53c3726d806ac9df83ff7 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Mon, 19 Feb 2024 17:17:52 -0500 Subject: [PATCH 086/143] test pull from ghcr --- .github/workflows/build-images.yml | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 0d6345a9eb7..e503496145f 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -35,8 +35,6 @@ jobs: # Later steps for other containers are similar, so comments are only added to # this first job. name: Build base-debian - outputs: - TAG_EXISTS_base-debian: ${{ steps.base-debian.outputs.TAG_EXISTS_base-debian }} runs-on: ubuntu-20.04 steps: @@ -73,11 +71,26 @@ jobs: - name: push to ghcr run: | - echo "${{ secrets.GITHUB_TOKEN }}" | podman login ghcr.io -u '${{ github.actor }}' --password-stdin + echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin podman push \ localhost/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG} \ ghcr.io/bioconda/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG} + test-pull: + runs-on: ubuntu-20.04 + steps: + + - test-pull: + run: | + echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin + podman pull ghcr.io/bioconda/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG} + podman run ghcr.io/bioconda/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG} + + echo '${{ secrets.GITHUB_TOKEN }}' | docker login ghcr.io -u '${{ github.actor }}' --password-stdin + docker pull ghcr.io/bioconda/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG} + docker run ghcr.io/bioconda/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG} + + build-others: if: false # Other containers are interdependent, we so build them sequentially. From 93cb439f346906362a9443326bb6d782e91665ac Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Mon, 19 Feb 2024 17:20:07 -0500 Subject: [PATCH 087/143] yaml syntax --- .github/workflows/build-images.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index e503496145f..a58d8a18ac2 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -77,10 +77,10 @@ jobs: ghcr.io/bioconda/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG} test-pull: + name: test pulling runs-on: ubuntu-20.04 steps: - - - test-pull: + - name: test-pull run: | echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin podman pull ghcr.io/bioconda/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG} From ce4534531b6533576e477b7caf564dad4298dec8 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Mon, 19 Feb 2024 17:57:23 -0500 Subject: [PATCH 088/143] try building everything and pushing/pulling to/from ghcr.io --- .github/workflows/build-images.yml | 115 +++++++++++++++++++---------- 1 file changed, 74 insertions(+), 41 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index a58d8a18ac2..5950dd51266 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -35,6 +35,8 @@ jobs: # Later steps for other containers are similar, so comments are only added to # this first job. name: Build base-debian + outputs: + TAG_EXISTS_base-debian: ${{ steps.base-debian.outputs.TAG_EXISTS_base-debian }} runs-on: ubuntu-20.04 steps: @@ -70,27 +72,13 @@ jobs: cat "base-debian.log" >> $GITHUB_OUTPUT - name: push to ghcr + if: '${{ ! steps.base-debian.outputs.TAG_EXISTS_base-debian }}' run: | echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin podman push \ localhost/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG} \ ghcr.io/bioconda/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG} - test-pull: - name: test pulling - runs-on: ubuntu-20.04 - steps: - - name: test-pull - run: | - echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin - podman pull ghcr.io/bioconda/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG} - podman run ghcr.io/bioconda/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG} - - echo '${{ secrets.GITHUB_TOKEN }}' | docker login ghcr.io -u '${{ github.actor }}' --password-stdin - docker pull ghcr.io/bioconda/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG} - docker run ghcr.io/bioconda/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG} - - build-others: if: false # Other containers are interdependent, we so build them sequentially. @@ -136,6 +124,14 @@ jobs: ./generic_build.bash || [ $? == 64 ] cat "base-busybox.log" >> $GITHUB_OUTPUT + - name: push base-busybox to ghcr + if: '${{ ! steps.base-busybox.outputs.TAG_EXISTS_base-busybox }}' + run: | + echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin + podman push \ + localhost/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG} \ + ghcr.io/bioconda/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG} + - name: Build build-env id: build-env run: | @@ -169,6 +165,14 @@ jobs: ./generic_build.bash || [ $? == 64 ] cat "build-env.log" >> $GITHUB_OUTPUT + - name: push build-env to ghcr + if: '${{ ! steps.build-env.outputs.TAG_EXISTS_build-env }}' + run: | + echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin + podman push \ + "localhost/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ + "ghcr.io/bioconda/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" + - name: Build create-env id: create-env run: | @@ -211,10 +215,28 @@ jobs: ./generic_build.bash || [ $? == 64 ] cat "create-env.log" >> $GITHUB_OUTPUT + - name: push create-env to ghcr + if: '${{ ! steps.create-env.outputs.TAG_EXISTS_create-env }}' + run: | + echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin + podman push \ + "localhost/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ + "ghcr.io/bioconda/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" + # END OF BUILDING IMAGES # ---------------------------------------------------------------------- # START TESTING + test: + name: test bioconda-utils with images + runs-on: ubuntu-20.04 + needs: [build-base-debian, build-others] + steps: + + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + # Clone bioconda-recipes to use as part of the tests. - uses: actions/checkout@v4 with: @@ -237,38 +259,46 @@ jobs: - name: test run: | + BIOCONDA_UTILS_VERSION='${{ steps.get-tag.outputs.tag }}' + + # bioconda-utils uses docker, so log in to ghcr.io with docker. + echo '${{ secrets.GITHUB_TOKEN }}' | docker login ghcr.io -u '${{ github.actor }}' --password-stdin + + # we also want to use podman to push to quay.io, but we need the images + # locally to this runner to do so, hence also logging in with podman. + echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin + # Decide, for each image, whether it was just built as part of this run - # (in which case we use localhost) or otherwise pull from quay.io. + # (in which case it would have been just uploaded to ghcr.io) or + # otherwise pull from quay.io. # - # If localhost, we need to get the container from podman to docker, - # using podman push, so that bioconda-utils (which uses docker) can see - # the local image. - if [ ${{ steps.base-busybox.outputs.TAG_EXISTS_base-busybox }} ]; then - DEST_BASE_IMAGE_REGISTRY='quay.io/bioconda/' + # If ghcr.io, then also pull the image with podman so it will be + # available to upload to quay.io in subsequent steps. We do this even + # for base-debian, even if it's not used for the test. + if [ ${{ ! needs.base-debian.outputs.TAG_EXISTS_base-debian }} ]; then + podman pull "ghcr.io/bioconda/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG}" + fi + + if [ ${{ needs.build-others.outputs.TAG_EXISTS_base-busybox }} ]; then + DEST_BASE_IMAGE_REGISTRY='quay.io/bioconda' else - DEST_BASE_IMAGE_REGISTRY="" - podman push \ - localhost/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG} \ - docker-daemon:${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG} + DEST_BASE_IMAGE_REGISTRY="ghcr.io/bioconda" + podman pull "${DEST_BASE_IMAGE_REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}" fi - if [ ${{ steps.build-env.outputs.TAG_EXISTS_build-env }} ]; then - BUILD_ENV_REGISTRY='quay.io/bioconda/' + if [ ${{ needs.build-others.outputs.TAG_EXISTS_build-env }} ]; then + BUILD_ENV_REGISTRY='quay.io/bioconda' else - BUILD_ENV_REGISTRY="" - podman push \ - localhost/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG} \ - docker-daemon:${BUILD_ENV_IMAGE_NAME}:${BASE_TAG} + BUILD_ENV_REGISTRY="ghcr.io/bioconda" + podman pull "${BUILD_ENV_REGISTRY}/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" fi - if [ ${{ steps.create-env.outputs.TAG_EXISTS_create-env }} ]; then - CREATE_ENV_REGISTRY='quay.io/bioconda/' + if [ ${{ needs.build-others.outputs.TAG_EXISTS_create-env }} ]; then + CREATE_ENV_REGISTRY='quay.io/bioconda' else - CREATE_ENV_REGISTRY="" - podman push \ - localhost/${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG} \ - docker-daemon:${CREATE_ENV_IMAGE_NAME}:${BASE_TAG} + CREATE_ENV_REGISTRY="ghcr.io/bioconda" + podman pull "${CREATE_ENV_REGISTRY}/${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" fi cd recipes @@ -278,12 +308,12 @@ jobs: conda activate bioconda # Used to tell mulled-build which image to use - export DEST_BASE_IMAGE="${DEST_BASE_IMAGE_REGISTRY}${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}" + export DEST_BASE_IMAGE="${DEST_BASE_IMAGE_REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}" # Build a package with containers. bioconda-utils build \ - --docker-base-image "${BUILD_ENV_REGISTRY}${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ - --mulled-conda-image "${CREATE_ENV_REGISTRY}${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ + --docker-base-image "${BUILD_ENV_REGISTRY}/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ + --mulled-conda-image "${CREATE_ENV_REGISTRY}/${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ --packages seqtk \ --docker \ --mulled-test \ @@ -302,11 +332,14 @@ jobs: # added to GITHUB_OUTPUT, those outputs are exposed to the jobs, and # those jobs are dependencies of this job. So now we can use those # outputs to determine if we should upload. + # + # Note that "latest" is built by generic_build.bash as well, and we're + # including it here in the upload. - name: Push base-debian id: push-base-debian uses: redhat-actions/push-to-registry@v2 - if: ${{ !steps.base-debian.outputs.TAG_EXISTS_base-debian }} + if: ${{ ! steps.base-debian.outputs.TAG_EXISTS_base-debian }} with: image: ${{ env.BASE_DEBIAN_IMAGE_NAME }} tags: latest ${{ env.BASE_TAG }} From 9967d446777b471b60e15d73578bd563424730a2 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Mon, 19 Feb 2024 17:58:09 -0500 Subject: [PATCH 089/143] re-enable build-others job --- .github/workflows/build-images.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 5950dd51266..8233c8a6414 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -80,7 +80,6 @@ jobs: ghcr.io/bioconda/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG} build-others: - if: false # Other containers are interdependent, we so build them sequentially. # The steps are largely similar to base-debian above, so check there for # comments on common parts. From c4554f74a8408d212511f1648bed5862c22c441f Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Mon, 19 Feb 2024 18:19:04 -0500 Subject: [PATCH 090/143] get bioconda-utils version from step output --- .github/workflows/build-images.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 8233c8a6414..150b4ff9993 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -168,6 +168,7 @@ jobs: if: '${{ ! steps.build-env.outputs.TAG_EXISTS_build-env }}' run: | echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin + BIOCONDA_UTILS_VERSION='${{ steps.get-tag.outputs.tag }}' podman push \ "localhost/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ "ghcr.io/bioconda/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" @@ -218,6 +219,7 @@ jobs: if: '${{ ! steps.create-env.outputs.TAG_EXISTS_create-env }}' run: | echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin + BIOCONDA_UTILS_VERSION='${{ steps.get-tag.outputs.tag }}' podman push \ "localhost/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ "ghcr.io/bioconda/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" From 73aef1cb51df71367651ad507360bca36afa9f17 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Mon, 19 Feb 2024 19:19:43 -0500 Subject: [PATCH 091/143] get bioconda-utils version from build-other job --- .github/workflows/build-images.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 150b4ff9993..1c204a07104 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -261,7 +261,7 @@ jobs: - name: test run: | - BIOCONDA_UTILS_VERSION='${{ steps.get-tag.outputs.tag }}' + BIOCONDA_UTILS_VERSION='${{ needs.build-others.outputs.get-tag.outputs.tag }}' # bioconda-utils uses docker, so log in to ghcr.io with docker. echo '${{ secrets.GITHUB_TOKEN }}' | docker login ghcr.io -u '${{ github.actor }}' --password-stdin From 2ce4b8c4bd8563a514b8be5c51c90332b6d6e2c8 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Mon, 19 Feb 2024 19:57:06 -0500 Subject: [PATCH 092/143] aaaand use right output --- .github/workflows/build-images.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 1c204a07104..f665fec5b49 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -261,7 +261,7 @@ jobs: - name: test run: | - BIOCONDA_UTILS_VERSION='${{ needs.build-others.outputs.get-tag.outputs.tag }}' + BIOCONDA_UTILS_VERSION='${{ needs.build-others.outputs.BIOCONDA_UTILS_TAG }}' # bioconda-utils uses docker, so log in to ghcr.io with docker. echo '${{ secrets.GITHUB_TOKEN }}' | docker login ghcr.io -u '${{ github.actor }}' --password-stdin From c5eb65089ab223c764809768659446cc97a7258b Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Mon, 19 Feb 2024 21:23:25 -0500 Subject: [PATCH 093/143] push the right image --- .github/workflows/build-images.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index f665fec5b49..0dede367854 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -221,8 +221,8 @@ jobs: echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin BIOCONDA_UTILS_VERSION='${{ steps.get-tag.outputs.tag }}' podman push \ - "localhost/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ - "ghcr.io/bioconda/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" + "localhost/${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ + "ghcr.io/bioconda/${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" # END OF BUILDING IMAGES # ---------------------------------------------------------------------- From f45e2ffb990a17f6ce632a0ae6bd7cde7ca6cd22 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Mon, 19 Feb 2024 22:12:41 -0500 Subject: [PATCH 094/143] add additional label to tie to bioconda-utils --- generic_build.bash | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/generic_build.bash b/generic_build.bash index 8562e860199..140af43a282 100755 --- a/generic_build.bash +++ b/generic_build.bash @@ -255,6 +255,12 @@ for arch in $ARCHS; do container="$( buildah from "${image_id}" )" run() { buildah run "${container}" "${@}" ; } LABELS=() + + # See + # https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry; + # this allows the container visibility to inherit that of the linked repo + # (public in the case of bioconda-utils) + LABELS+=("org.opencontainers.image.source=https://github.com/bioconda/bioconda-utils") LABELS+=("--label=deb-list=$( run cat /.deb.lst | tr '\n' '|' | sed 's/|$//' )") LABELS+=("--label=pkg-list=$( run cat /.pkg.lst | tr '\n' '|' | sed 's/|$//' )") LABELS+=("--label=glibc=$( run sh -c 'exec "$( find -xdev -name libc.so.6 -print -quit )"' | sed '1!d' )") From e9c6b7f2425ddbc9f3134c4d95da086bab1e0dcb Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Tue, 20 Feb 2024 09:06:22 -0500 Subject: [PATCH 095/143] fix label --- generic_build.bash | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/generic_build.bash b/generic_build.bash index 140af43a282..df6a7b184db 100755 --- a/generic_build.bash +++ b/generic_build.bash @@ -260,7 +260,7 @@ for arch in $ARCHS; do # https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry; # this allows the container visibility to inherit that of the linked repo # (public in the case of bioconda-utils) - LABELS+=("org.opencontainers.image.source=https://github.com/bioconda/bioconda-utils") + LABELS+=("--label=org.opencontainers.image.source=https://github.com/bioconda/bioconda-utils") LABELS+=("--label=deb-list=$( run cat /.deb.lst | tr '\n' '|' | sed 's/|$//' )") LABELS+=("--label=pkg-list=$( run cat /.pkg.lst | tr '\n' '|' | sed 's/|$//' )") LABELS+=("--label=glibc=$( run sh -c 'exec "$( find -xdev -name libc.so.6 -print -quit )"' | sed '1!d' )") From c8cb13e784455963a5ab04f610bb3b8d6fceeef6 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Tue, 20 Feb 2024 09:55:51 -0500 Subject: [PATCH 096/143] new version number to see if ghcr will make public --- .github/workflows/build-images.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 0dede367854..86a9f019199 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -18,7 +18,7 @@ env: BIOCONDA_UTILS_FOLDER: bioconda-utils DEBIAN_VERSION: "12.2" BUSYBOX_VERSION: "1.36.1" - BASE_TAG: "0.1.5" # "latest" will always be added during the build. + BASE_TAG: "0.1.6" # "latest" will always be added during the build. BUILD_ENV_IMAGE_NAME: tmp-build-env CREATE_ENV_IMAGE_NAME: tmp-create-env BASE_DEBIAN_IMAGE_NAME: tmp-debian From e291d36d03e13e0a58c67bd846bc67524b532aa9 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Tue, 20 Feb 2024 15:41:00 -0500 Subject: [PATCH 097/143] ensure all tags are pulled --- .github/workflows/build-images.yml | 4 ++++ .gitignore | 1 + build.sh | 6 +++--- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 86a9f019199..8f52866de39 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -279,6 +279,7 @@ jobs: # for base-debian, even if it's not used for the test. if [ ${{ ! needs.base-debian.outputs.TAG_EXISTS_base-debian }} ]; then podman pull "ghcr.io/bioconda/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG}" + podman pull "ghcr.io/bioconda/${BASE_DEBIAN_IMAGE_NAME}:latest" fi if [ ${{ needs.build-others.outputs.TAG_EXISTS_base-busybox }} ]; then @@ -286,6 +287,7 @@ jobs: else DEST_BASE_IMAGE_REGISTRY="ghcr.io/bioconda" podman pull "${DEST_BASE_IMAGE_REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}" + podman pull "${DEST_BASE_IMAGE_REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:latest" fi if [ ${{ needs.build-others.outputs.TAG_EXISTS_build-env }} ]; then @@ -293,6 +295,7 @@ jobs: else BUILD_ENV_REGISTRY="ghcr.io/bioconda" podman pull "${BUILD_ENV_REGISTRY}/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" + podman pull "${BUILD_ENV_REGISTRY}/${BUILD_ENV_IMAGE_NAME}:latest" fi if [ ${{ needs.build-others.outputs.TAG_EXISTS_create-env }} ]; then @@ -300,6 +303,7 @@ jobs: else CREATE_ENV_REGISTRY="ghcr.io/bioconda" podman pull "${CREATE_ENV_REGISTRY}/${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" + podman pull "${CREATE_ENV_REGISTRY}/${CREATE_ENV_IMAGE_NAME}:latest" fi cd recipes diff --git a/.gitignore b/.gitignore index 93bb35b8c6d..8e7c1e872d1 100644 --- a/.gitignore +++ b/.gitignore @@ -16,3 +16,4 @@ docs/source/developer/_autosummary # Mac OS Files .DS_Store env +recipes/ diff --git a/build.sh b/build.sh index 58194ee52e7..9b5de3a1fe1 100644 --- a/build.sh +++ b/build.sh @@ -40,10 +40,10 @@ CREATE_ENV_IMAGE_NAME=tmp-create-env BASE_DEBIAN_IMAGE_NAME=tmp-debian BASE_BUSYBOX_IMAGE_NAME=tmp-busybox -BUILD_BUSYBOX=true # build busybox image? +BUILD_BUSYBOX=false # build busybox image? BUILD_DEBIAN=true # build debian image? -BUILD_BUILD_ENV=true # build build-env image? -BUILD_CREATE_ENV=true # build create-env image? +BUILD_BUILD_ENV=false # build build-env image? +BUILD_CREATE_ENV=false # build create-env image? # # Build base-busybox------------------------------------------------------------ if [ $BUILD_BUSYBOX == "true" ]; then From 8c42d0fdfc896fea616e508972b1bf6306808585 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Tue, 20 Feb 2024 16:40:46 -0500 Subject: [PATCH 098/143] ensure latest manifest is pushed in the first place --- .github/workflows/build-images.yml | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 8f52866de39..69270949e01 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -75,9 +75,8 @@ jobs: if: '${{ ! steps.base-debian.outputs.TAG_EXISTS_base-debian }}' run: | echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin - podman push \ - localhost/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG} \ - ghcr.io/bioconda/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG} + podman push localhost/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG} ghcr.io/bioconda/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG} + podman push localhost/${BASE_DEBIAN_IMAGE_NAME}:latest ghcr.io/bioconda/${BASE_DEBIAN_IMAGE_NAME}:latest build-others: # Other containers are interdependent, we so build them sequentially. @@ -127,9 +126,8 @@ jobs: if: '${{ ! steps.base-busybox.outputs.TAG_EXISTS_base-busybox }}' run: | echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin - podman push \ - localhost/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG} \ - ghcr.io/bioconda/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG} + podman push localhost/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG} ghcr.io/bioconda/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG} + podman push localhost/${BASE_BUSYBOX_IMAGE_NAME}:latest ghcr.io/bioconda/${BASE_BUSYBOX_IMAGE_NAME}:latest - name: Build build-env id: build-env @@ -169,9 +167,8 @@ jobs: run: | echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin BIOCONDA_UTILS_VERSION='${{ steps.get-tag.outputs.tag }}' - podman push \ - "localhost/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ - "ghcr.io/bioconda/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" + podman push "localhost/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" "ghcr.io/bioconda/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" + podman push "localhost/${BUILD_ENV_IMAGE_NAME}:latest" "ghcr.io/bioconda/${BUILD_ENV_IMAGE_NAME}:latest" - name: Build create-env id: create-env @@ -220,9 +217,8 @@ jobs: run: | echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin BIOCONDA_UTILS_VERSION='${{ steps.get-tag.outputs.tag }}' - podman push \ - "localhost/${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ - "ghcr.io/bioconda/${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" + podman push "localhost/${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" "ghcr.io/bioconda/${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" + podman push "localhost/${CREATE_ENV_IMAGE_NAME}:latest" "ghcr.io/bioconda/${CREATE_ENV_IMAGE_NAME}:latest" # END OF BUILDING IMAGES # ---------------------------------------------------------------------- From 00f458eb2a1b1059fad0ed1286fa36e82c14b8a5 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Tue, 20 Feb 2024 17:31:35 -0500 Subject: [PATCH 099/143] use job-level tag --- .github/workflows/build-images.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 69270949e01..54af8148f01 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -365,7 +365,7 @@ jobs: if: ${{ ! steps.build-env.outputs.TAG_EXISTS_build-env }} with: image: ${{ env.BUILD_ENV_IMAGE_NAME }} - tags: latest ${{ steps.get-tag.outputs.tag }}-base${{ env.BASE_TAG }} + tags: latest ${{ needs.build-others.outputs.BIOCONDA_UTILS_TAG }}-base${{ env.BASE_TAG }} registry: quay.io/bioconda username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} @@ -376,7 +376,7 @@ jobs: if: ${{ ! steps.create-env.outputs.TAG_EXISTS_create-env }} with: image: ${{ env.CREATE_ENV_IMAGE_NAME }} - tags: latest ${{ steps.get-tag.outputs.tag }}-base${{ env.BASE_TAG }} + tags: latest ${{ needs.build-others.outputs.BIOCONDA_UTILS_TAG }}-base${{ env.BASE_TAG }} registry: quay.io/bioconda username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} From a0554c49b7b23027b558e6ce72f7817721fb2038 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Tue, 20 Feb 2024 18:18:51 -0500 Subject: [PATCH 100/143] use jobs not steps --- .github/workflows/build-images.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 54af8148f01..7daf2f80aed 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -273,7 +273,7 @@ jobs: # If ghcr.io, then also pull the image with podman so it will be # available to upload to quay.io in subsequent steps. We do this even # for base-debian, even if it's not used for the test. - if [ ${{ ! needs.base-debian.outputs.TAG_EXISTS_base-debian }} ]; then + if [ ${{ ! needs.build-base-debian.outputs.TAG_EXISTS_base-debian }} ]; then podman pull "ghcr.io/bioconda/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG}" podman pull "ghcr.io/bioconda/${BASE_DEBIAN_IMAGE_NAME}:latest" fi @@ -340,7 +340,7 @@ jobs: - name: Push base-debian id: push-base-debian uses: redhat-actions/push-to-registry@v2 - if: ${{ ! steps.base-debian.outputs.TAG_EXISTS_base-debian }} + if: ${{ ! needs.base-debian.outputs.TAG_EXISTS_base-debian }} with: image: ${{ env.BASE_DEBIAN_IMAGE_NAME }} tags: latest ${{ env.BASE_TAG }} @@ -351,7 +351,7 @@ jobs: - name: Push base-busybox id: push-base-busybox uses: redhat-actions/push-to-registry@v2 - if: ${{ ! steps.base-busybox.outputs.TAG_EXISTS_base-busybox }} + if: ${{ ! needs.build-others.outputs.TAG_EXISTS_base-busybox }} with: image: ${{ env.BASE_BUSYBOX_IMAGE_NAME }} tags: latest ${{ env.BASE_TAG }} @@ -362,7 +362,7 @@ jobs: - name: Push build-env id: push-build-env uses: redhat-actions/push-to-registry@v2 - if: ${{ ! steps.build-env.outputs.TAG_EXISTS_build-env }} + if: ${{ ! needs.build-others.outputs.TAG_EXISTS_build-env }} with: image: ${{ env.BUILD_ENV_IMAGE_NAME }} tags: latest ${{ needs.build-others.outputs.BIOCONDA_UTILS_TAG }}-base${{ env.BASE_TAG }} @@ -373,7 +373,7 @@ jobs: - name: Push create-env id: push-create-env uses: redhat-actions/push-to-registry@v2 - if: ${{ ! steps.create-env.outputs.TAG_EXISTS_create-env }} + if: ${{ ! needs.build-others.outputs.TAG_EXISTS_create-env }} with: image: ${{ env.CREATE_ENV_IMAGE_NAME }} tags: latest ${{ needs.build-others.outputs.BIOCONDA_UTILS_TAG }}-base${{ env.BASE_TAG }} From ce17266f8577e3e46d28651c2bca05eb92794485 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Wed, 21 Feb 2024 19:25:02 -0500 Subject: [PATCH 101/143] typo in missing argument handling --- generic_build.bash | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/generic_build.bash b/generic_build.bash index df6a7b184db..5ed7b4a381d 100755 --- a/generic_build.bash +++ b/generic_build.bash @@ -95,7 +95,7 @@ if [ "$TYPE" == "build-env" ] || [ "$TYPE" == "create-env" ]; then [ -z "$BUSYBOX_IMAGE" ] && echo "error: please set BUSYBOX_IMAGE for create-env" && exit 1 fi - if [ "$TEYPE" == "create-env" ]; then + if [ "$TYPE" == "create-env" ]; then [ -z "$BUSYBOX_IMAGE" ] && echo "error: please set BUSYBOX_IMAGE for create-env" && exit 1 [ -z "$CONDA_VERSION" ] && echo "error: please set CONDA_VERSION for create-env" && exit 1 [ -z "$MAMBA_VERSION" ] && echo "error: please set MAMBA_VERSION for create-env" && exit 1 From 59ef090ca4635ebbabf8bc40b0a37f8b3a1b35c3 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Wed, 21 Feb 2024 19:26:18 -0500 Subject: [PATCH 102/143] re-enable library tests --- .github/workflows/GithubActionTests.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/GithubActionTests.yml b/.github/workflows/GithubActionTests.yml index 1df118499e1..d59a85ccc7d 100644 --- a/.github/workflows/GithubActionTests.yml +++ b/.github/workflows/GithubActionTests.yml @@ -6,7 +6,6 @@ concurrency: jobs: test-linux: - if: false name: Linux tests runs-on: ubuntu-latest strategy: @@ -44,7 +43,6 @@ jobs: echo "Skipping pytest - only docs modified" fi test-macosx: - if: false name: OSX tests runs-on: macos-latest steps: @@ -76,7 +74,6 @@ jobs: fi autobump-test: - if: false name: autobump test runs-on: ubuntu-latest steps: From dbc13b7b18aeb994811feaeecf7a19119b8da73b Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Tue, 27 Feb 2024 16:26:51 -0500 Subject: [PATCH 103/143] create & store local version of locale to reduce inter-image dependencies --- .../bioconda-utils-build-env-cos7/Dockerfile | 3 +-- images/locale/C.utf8/LC_ADDRESS | Bin 0 -> 127 bytes images/locale/C.utf8/LC_COLLATE | Bin 0 -> 1406 bytes images/locale/C.utf8/LC_CTYPE | Bin 0 -> 353616 bytes images/locale/C.utf8/LC_IDENTIFICATION | Bin 0 -> 258 bytes images/locale/C.utf8/LC_MEASUREMENT | Bin 0 -> 23 bytes .../locale/C.utf8/LC_MESSAGES/SYS_LC_MESSAGES | Bin 0 -> 48 bytes images/locale/C.utf8/LC_MONETARY | Bin 0 -> 270 bytes images/locale/C.utf8/LC_NAME | Bin 0 -> 62 bytes images/locale/C.utf8/LC_NUMERIC | Bin 0 -> 50 bytes images/locale/C.utf8/LC_PAPER | Bin 0 -> 34 bytes images/locale/C.utf8/LC_TELEPHONE | Bin 0 -> 47 bytes images/locale/C.utf8/LC_TIME | Bin 0 -> 3360 bytes images/locale/Dockerfile | 19 ++++++++++++++++++ images/locale/generate_locale.sh | 5 +++++ 15 files changed, 25 insertions(+), 2 deletions(-) create mode 100644 images/locale/C.utf8/LC_ADDRESS create mode 100644 images/locale/C.utf8/LC_COLLATE create mode 100644 images/locale/C.utf8/LC_CTYPE create mode 100644 images/locale/C.utf8/LC_IDENTIFICATION create mode 100644 images/locale/C.utf8/LC_MEASUREMENT create mode 100644 images/locale/C.utf8/LC_MESSAGES/SYS_LC_MESSAGES create mode 100644 images/locale/C.utf8/LC_MONETARY create mode 100644 images/locale/C.utf8/LC_NAME create mode 100644 images/locale/C.utf8/LC_NUMERIC create mode 100644 images/locale/C.utf8/LC_PAPER create mode 100644 images/locale/C.utf8/LC_TELEPHONE create mode 100644 images/locale/C.utf8/LC_TIME create mode 100644 images/locale/Dockerfile create mode 100644 images/locale/generate_locale.sh diff --git a/images/bioconda-utils-build-env-cos7/Dockerfile b/images/bioconda-utils-build-env-cos7/Dockerfile index 55c2b309d7d..2a287e2d17d 100644 --- a/images/bioconda-utils-build-env-cos7/Dockerfile +++ b/images/bioconda-utils-build-env-cos7/Dockerfile @@ -1,8 +1,7 @@ ARG base_image FROM ${base_image} as base -ARG BUSYBOX_IMAGE -COPY --from=${BUSYBOX_IMAGE} /usr/lib/locale/C.utf8 /usr/lib/locale/C.utf8 +COPY ./C.utf8 /usr/lib/locale/C.utf8 # Provide system deps unconditionally until we are able to offer per-recipe installs. # (Addresses, e.g., "ImportError: libGL.so.1" in tests directly invoked by conda-build.) diff --git a/images/locale/C.utf8/LC_ADDRESS b/images/locale/C.utf8/LC_ADDRESS new file mode 100644 index 0000000000000000000000000000000000000000..c4b6714c7a4c3711b61514786ede83632e486987 GIT binary patch literal 127 zcmZ9?u?|E~6olcMs1nf71>TR;Rp~Q#0}wJ1cmMzZ literal 0 HcmV?d00001 diff --git a/images/locale/C.utf8/LC_COLLATE b/images/locale/C.utf8/LC_COLLATE new file mode 100644 index 0000000000000000000000000000000000000000..b36405433f1ea96046ee836cecdcf4782f867189 GIT binary patch literal 1406 zcmbW#_gBwh6u|LsX(LI=Y#EVKMkKUUL?jwWWXsA(MO#Zz6d4(nvfD<5q>PlUva&)U zGetw>{rQ1Ez&+=6o^zk)+;h*l&s9;7RPQ1ZiR>}sKg_1)A2E^0Tat^vAtv5VLQ+av zMpmwUhmP`{6gn$*QBv-z(ye=sp1pdjs;O&e_UYTNzt(_(+Jkg-2kYq%89HpZ!HAKD zql`w68Eb4}YGyvp!qRH|1nY^DY$i{!ooY92y1j#=(~Oz3oM*etnd>@l{(^;z7B5-q zwru%|m8;xWuUWg!W4))B_lAu=zJC4zfkB&sLqfyCH%CNn*%}qSZF@}Yj-7G4cJJ96 zzinARj*#ZsjhkZ?tSftkDoq&`C9j_zTx|i#-`?$ zpRK=sx5+REMj{eUA==Oly@Z4i-o()k5)eiw1!)L1vXDc2bU;VQqZ1U+8Hy18PYKHC z3Ka-_x}yhrLg?8Ws!)SEG@yw-=!<^n4=oJ9KxktSbfAmD(1SjPU?_%RI1DfXBVmY9 zFv4hz!B`l>1g0>9ImW>PmaxKjOn@~eViIgH8B<`3sj$N|Ooy<)4se7Mgk7GAS#XB1 zqb`_(xp2ii%*O&O#3C%l5-f!qmSH(oU?o<;9jmbhYq1U_i-PVK??*FXFKe`*8pVk$^-b;SiFM zg2On1qd11+IDwN$MHes@XK)rd$VDE`;XE!N9~V)8ODIGUF5?QW;u?x^ z9VIBm4cx>n+{PW;#Xa0d8Ol+C2Y84_c#J2g#8W)ObG*PyRN)m~;|;1&gSU8x_o&4O oe8eYw#ut1=9loI+4fu{9Xhaj5(So07#V`Cu8yxJdbPYv+0BaWr!2kdN literal 0 HcmV?d00001 diff --git a/images/locale/C.utf8/LC_CTYPE b/images/locale/C.utf8/LC_CTYPE new file mode 100644 index 0000000000000000000000000000000000000000..cba1025364b1da45c249d622c01f17327c0b166b GIT binary patch literal 353616 zcmeF)4M3LdVh~_HKIX{Ck&%&^k(rU1ImQ?>a%5!AnUOIg#~6p>9LE@kh#5J?7-P&a z=9puQIp&yiX3iX$nVA`xBQs}Y%*f1$$jHbMV`fI4uIK&Xefbsj`_4IQKHqoo-2eCe z+|Td(fROQ(zb)ghBqStMenb*0BT}scY4Z(Ao8ZnrNJzNHVBjC|PZAP_c+4;(jPjH* zo-xjICYWT3mrRq;mynP|GAX2zMmp=rU;`W3L?)Zr!dAACMK-zQkxv1;*-Ifs6jMSO zy?^kdgu>B}5_Nm-SzK3&{`T{_9U()_>9Z ziNDo%e?p@f_&%EWewz6KTKGYZ^Fy@q!?f`uoZv@k=f^n7kJG_V(8*8I#ZPgHpQf9i z;WR%>4?o8lex9@Z0_XT!oab+IfnVeze}_x_621IgF7x-e!r$jAE`bTx_=jA__q@NXF8S9!?4Wr%;rBmO;) z`42qdKQhdJVub(9DF210{8z^KZ#?6_GtU3uIlsmP|C336ohg2U7yK_?^1pe-|6!W{ z%WJ-1u}b)eL_Q{ouV4jVNix5UmHc*6_#Ld`t4QT{vYOvT8o!%0{2tQzy{zT;v5w!* zdj0?z{6RMGhuFv;W)pveO#UdF`D1M1kF%9O!8X2{EWUjqKr@*vp@1AAf;D{v!MNOBC_V9N;fg%wORk-$DuBN-5t) z8DFgazgu7Rd=CwLFO7U3O?*Gi`~WTdAjkP3TKQqx_z_OTl_O_^UL(}&$+|D;4c4?d;ALb`Bx0^uX(_~VUSq-!LTgz1uq3Ep|DUYCqX6dqExrl0QL!lBQ7$LDGj>cXqPUj3QRetSZ7&F89W6TYM7Gu0Id<&~eQ`l|__t*Wi5OsM{@ zs?XM{R`p%qnef@qe`opUsyk+?dNMhS6dm} zX=9%uzf$-MABFEr#i>3!WspvCP-=Ngac1~4SFz&7+dlX(2Oo}s568fVW8lLv@ZlKv zEp-f>@$Sw2|I1PFp5;Bud?@u{20qNdhZ*=V10QDK!wkIr3=EaK|9km9_qk{2pfoQC zg7l$X)&xEB~CH!;r(2W=OLtgwB`$j;qm;6K3)7~} z;y-Hr4Fv6+h^&Kc3dYf5{eyV9AKFgwnf{DzC@9|%OB)KpLHj=`@5c!GSYUjiwu15h zg>e4RpRxX0%w2IXU%3SJeuhTjAF%e#k7D0!81fC0An$_@bMWC9_%b;Lg6mlHJZ}>A zO~N2P6r8(1EbJOm99(DG#5=9`6I|=Qi?Nyo~V|`(far z&&-{P5AovsxcTdf3%f3TQp=F{sm0#kez`5h-kSv1)ZqLX^6q%ZePAef2fUYWknS7_ zmg3+XIl$p?*`R!JBs}D}3dMu8sGkycU47Fs6uAcsx%ULi(7y0G_*xtnj-}2I4F!D# z{Ra0epRE^Kirou`N|o_0I9Q529?6HV zA7Ry#20qNdr#%B7eTkREFYz*a^S|)XM+vi2=Ej!TA74=Wt(PU*`=VsM z^?<__f?taeS{AM!&5cb-EM<0RPJL4t&W~>VoctxC(He8tAI(je6?mLy1er9ONDz=Jranl=55jJ>`y;tq(LY12fRee+m>IVsc2U8 zIF806>-cen=VDHQH${W_jT~K}U*!_l;Ii5a7mAyoXu@*W3k)!N5G;`O=A_8R%qbWf zO)Mk2snK;TCkTaVSTFOW3UwQs$=Pkqw5QSeIrCD2eKIrP#jihKSUl@Jb7neU-gpf+ zdI$xx6<>Q<+Z&yLnMNWBez8zAYdO;vtre{^XCSkOwd~kCTvLPX2&N=5knnjG>d=s8 zMmIMQ8Br`boDi9mFU(9%G!Xu^OJcIZxy#ImlEWvK5DqOkVd1F?eVt!pZVL;?XGRbXL<$8>E?hpE8oqMO z3}D`PqvhtT-_-1!;`5@R$%&-S8v0Dh$dG2@Zw|~FQ*?`hj%Ifdi3RONwl-KP7OnEB zt{2*jPqo80G#;D~(L1~K8;+KRM!%f2nK=&!Lem`tms9s0%ETQBk)Gc$3$g6^y)8K< zp=}6`uyDP&dvC1Cw+hYg{~c38!=9Ol==1!N8}p7my=%45yv(XBC5SIINO_@;eQ{+X zb%Nwz39W)cp>|^B!zs~tTsM*Wp_EWK7{eE72^~M-VZU3C%N+2`f$(m<%&!$1b1>WQ zute+5ss2uF&2%4WbSA!VAg+&~!or2-rbe0$t>)y-Yhq55%e0BZ|F>#5^#j-+m-cWJjUgr-bw6}E> z&RHb?Ef0yfkN(l-7u%GX^qDSZQWgv>yS>OE_ZH=6^%p)~!w++zGbG53jxkm;nzHmd zR{o0~3JvUyA5fNGVCfBABsXrWLx<)37XgvW!-6Y2A-o4>9)#lBn;Q&95KfC5OYkH$ zb7Tkk(K7RDM-qY}p`ejkhaSXc>IsC~4zHqBq8lYIv>9>zM~Vci=rCq_2tB*UOt0{mv@*Hsc5?&XkX|GN-vn z{;V!$MdnS&CzCMK$D++eN-UcH4u|qgr*9EhqW=Z6LT|T1?^A+oC*tB4-+A?CPTP-S z*O7UJV)wxXvX)F*)>g+_2p5mV-#Ik5-?wV$3$dw~+fFDRx!!$htI!inbjQS9okJTj zH@@hWMIL+HhZimKMa+nw$l2XSt``b~t{c(y%thqW4#ZurB3;cM-|}P810pyFgOhNf z#RquF@v|{MywDcMx3y6IB56UjP}Y+5gF?}QO2?O-7Y^OeLkCs(z?yY*h2OUYb!H{c zYh>w!`7`o%DNDEhY2*fbK0F=4j$e4shx-l6EoX_0GB%;JhW$RXJFB60h{Rt{!dtSO zMuL)WU2a)5LT6iO2LxYYCthrpm+bcq<67>x7wRKAf>>TSB^F;ew8*&}mmF<5ypGHG zM1tXY{NjC$&cZT}kZAEG*TKxpE--t}mWoB|Max9v3$KHIq9s3>^}_wUXX>2USWat^ ztqGk}@#WulD}#eH{xUfC^TGSJ|JL>6H*Oi>H?_ZvCW4Dr&`f;MShFr-v78_ji^W%% zNtPWh;j?&9Aav>m@4}=8_oZ0Np-{N<_?99C!WEageZ^Or9iG$2`$QuR$2Sp53MDM^ zT%|x{3W9a`|Jegve(e1&qOa%R z1Un|Y%VJf-@wfdNtN+Cd#gB1j|HmhO5yP_{AcLbid}J?YGQw4Z9TKe<*(Um_?Fuh8}HnP-ZB1`{V%_GWIQv+w?L3I`)%Ls zo6@|-m7kaPNhL(aH>bJT{RRC;$}M*_r@f`4!ToaCOXwb}{BlRKymHIy{%yv&ycTDd z3Qa?-ljy8O*Ru;pV&OWWNN6v`CofpxOPA=n7`ide+TY>*6Fdglv!O_E`3VLYY}t~V z?idKFSwiJQ8y0RZR&8!PROl1=5)3m~f{ls{IMSI_tcP$~EG`r)HWP}!J}elHZ(ydt zqK`Qt?*LURZHw(+@Ju-y8Q?OSLU z&)W3mw)ZyW!iQt{8xgU3LAMJYagd=78+6HC27!F zBpz2rFfzO#F%k@|B8B3UKAG^$=ReH_eMUMCLgD!fjUzmo$RI+ip!d*qEmkkMz6HrM zt8gULZhX~nIFvbSQl-p_FFz9PC^Y){T`iun{4U<6S8+*MFyZrU5;@$D`aUUKm7mKopGyR{G;tDJy=7>AdqHDzRY!KNXsl6Uur2&XO)_{M88XaGs9YFytA(VGbMu)AQqV&ie&^3Tai5Fqo-$ZKM1{%3uT2jBi2Hw zMmRC_HMsoFT!MqvqAkX%ghTVvLl^Jxl{55dG`LPg8Wspz3Xd;HjWrf25leW#kmkY- zEVz~%Iqst)h;(liTxMd|nOORqP;_|F_M`Eo*5R25PfciU=d>9gjcX^UHnS_DV~flG zWP_1GggTA;61z@>3&drGg0cKXL(w^pFAz!#S`2MObZ@-<7R?MNxD7-$;k{OGKZHeU zyw@leue?ZapICCJ=lDJKhVVi=_YGCPe6!zpYl}Z0Kba|ywiw;xOHWR;)Tg&zy8rjj zebdO_FstFqbnpxkn6oX5L=84Jc9Z;uD|Ar8CP&_CYjbAvV(rf)Eg6WF zUpy3^*>E$lQcE_w?94fn_o-ao;s-OQKOg;qtqZm)zSQF3*!Is4EmUiMsZh#H(YfPU zJRXd5UhYTlJ7J5A?_H8Z+Y}tS;e&T(6Xyh$YTE9IZQ=q2=QOqK=mJeGoHYBq)BeK6 z;!@{~A=pbxE_2Gqwe-f|oT{Pd8!OE&5DYDL{dC+f)aECdHoNfB;MsZd@lBNhtfMQhG(@5?7Js`?binU$3>s%JPr=u@ zeZ~zhbf84?=arkO7p-6&4bJ2(90>0s!w>z~R5<2rjBZb?QvCcZ-h8y)r?rlaXC@S$ zwn&FFsS5`_tzpfyyKuM5PmSz=cU~>Or}r!wTs|}re8PMCmZF=tXw7H=>sfc6rLS-H z!lErMkP+=9IP>OLou9Hm6Z4XmzN*fA|77W6?~yxqK#N^rgI`7!T7s)i?15|MIDXf_ z-2P+ncWv?Q>V^6X9g%ZBZ^i}^D>5f-Zq&d22$F&Wec8u&FsF0dc~d;t0YSBJ#klG= zeTllEN}-CeLJOATHvUNB*k*Vg=ebFYB!pybSUUcG$~Xryn&Ig;p;`X z{;czAR+H}&nRkB9%x9$4x$z)*vAgrk*k_G(Ig$ADW|8o6nwcr1jhS#X5VRW~j_+q? zww-3y}Q`=r<54#OX=oE&lIM<^#0j}B)}-=XcB`vG}QnNK_# z8SkgLnl`VtzoO6rT4`&4fpR%=68|C?`93Zhje}~Yx#Yw_`ymx{>eV*_Qd6At1}h9~&b>Iwcld4fMrp73py@x|)@ zyY*Gi_t3!i(#ZGG#P`$8575F7a-1Kcl^>>!AK?T)N;^NsNq(FTeu7SZk}iIVQ~WgD z{0yi0S$g<6&hYb`@~b@L-!jC%;}QR! z$NUGL@E_^p)9U|!X?v;u>poJz608z2ZNDYY|IgU&az4u;zMaE-2NisdBm7k=`A(|% zJk|U)YWVBa@?9L|Z&1hI>7GKh69AE&OIZ z|Np+RFZcW(R`x$J!hdFz|H4!LD`WgOp7Gxq=YQ~=Ut@y*$t1td6u-d>g7g0+|C?9* zAEx=gye2sRhxxSn56=Ik`u~Fb-5lT}wz-pLxr=7IgXXw<=DKr+)0cOTU8w#x?S3^` zd=1%rEjfH0xqLlE>AqNk>rfIaNMnJsK37)vmY zU<^V3LEqcSV+Z-{q<~!nbF+uN?4ywV6cNl>F$XE3lrqXW#9=BpLM2sHQ$sCBspA;+ zG|)&B&9rcwR@yj0J16O&lP*rt&1rf#!&%O8o(o*$61`mJ3Rk(tb#8EzK5lWFe(rFW zd)#M$2MqF%As+FVCk!*fC{G#V8RI->f=Q-$!Ao8-&1-^#Es-QvkjzR_SVbzUNn;J^ ztYsbR$zTH;*+eFr*}_(~kwrE++?=k9Wpphn;Y2i4nv~hxVPSQaqU7Vtu)AVqLvz+5R7r4kJdb!LM zu5yj*+~6jC+~PL<+~F?wxX%C&7~~;CJmN7=7-ob~o-)QW#(B;JlT7h~m%L({*90fO z-yE5c#0rvGNeZh-Wi@H6A)U3XV?7ycU?ZEzWHVdX$~LmdCWl#{ZA4rNMbInHx|i(I0Y%Ut0q*SO9NZqmmsZqv^l?sAX&4Df(K z9x}uu9`l4@Mi}KOV?1M==S(ok6fbzmE2eo(LX!PY5-Uh%B`K^TmDQxNhIH1lj`d`) zfsJe;lg(^lE8EB-n;df4P98hRXD0>hVmEu(%RUO(PZ0+w<{%}MQbsw4I7|gcsHBQ& zYN+KXbsVFf1{!IinHG-IN*gC==Oi6;(#0vdIZY2|ILkTCbAgLoqL<5D;VRd-&JAwT z$1QHt&mHb^kNXVpfI%KI#3LT_gkeS)Ds*#9K4f@D^b z!YWc(O&V)RXD#bkPX-&<$R;w`%oet?jV!XsA(!psv4ebeQot^DvxmLxqmca+ae!hD zQbH+Zlyiu~RB(h!s;H)hT8>i3G3sfcktUjH;W(|dae{VE(m^L(oT8i4^l*l=oZ~zf zxX2}Xxy%)=a*gZU;3j?C;x_%<;V$>M&j1e?m;xSJcW`t3mGR8B;dCmlrO!0!3 zykeTyB&@LiNn!=btR#h1q_Ubc){xFx*0G)pHn5RRWU`qpY-Jl+WRpWK+sR`G`Rt^C zUF>ELd)Y@J`zhi8#T=xBQpzaj5QnMY2$fV(O%1girH*6N(?BClG}FRyT5011?VO~8 zPP#ZnH>c_03}-pVc`k5~OZ0M?D_rFo*SWz>`nbhy`nkhh?s1<19x%v5hIqtdo-oV^ zqdaAdXN>cl2_~811uuEUG_OhcO8cKAR*=j}QdmVQt4U)G>8xcP>&ajP8`(rAo7uuv zwvk0PIpngPJa&-JP72t?ZuYR3eH5~vA`Vc@K}sm4jB*Zfm;gFIx2M?B^U!;CP>Q^t74IM112k||#Bl2=Ugn&1O}B1xpB!yL^vYIs3kj`4xv7QVzu#rt}C&p*+(J! zDdGUd9HfL&$|&a$hpFHQl~hqp4YeGlj$_o*KqE~w)539DY2yU#oTP(Jx;RBQr|IDg zXF11tE^v`c^m3UiT;&?qxxr2PxW#Syxx-!Vai0MmFvvrOc*J9#Fw6*}JY|e$jPslc zCYj;|FL}i@uL)lEC6dGnl37U#t4L)vX{;fgwX9=38Ejx9o5*A{TiD7rvdAWfT(*

@2LiSU{0g5?D38j=#&LIv{!4WE{qM90NIZ7SJsHcHOnrNnlW%wDyvCj4e6|99qY+p0~^^y zCY#y9R<@BvHaX<7oji7s&rS;1#cuYnmwgnnpCS%W%t1;hrHpb8ahM8@P)QZl)KJS& z>NrL{4K&h3Gc6pal{QY$&Ph7xq>EE@bDAE`aF%nN=K>eGL@$@Q!d0$uog3Vwk6YZP zpF7;;9`_mG0fRhbh(|o;3B!yq%2UR8#yHQJV3H|b@RCYx zlg1j-S<5=slfecyvWZMKvxTi}Ba3Wu$Yncu>>!_=6tIik>|rnaC}clH9H5wklu$|; zbPbkIo`r|9N1J)Gez=Qz&=E^>)p zE^~#eT;n=7xJe(kxJ^HIxXV56Gr$7|dB_ltc+3-q8DW&CjPZwewv)#W^4UoNyV%Vh z_Og#c_EW?GiaAILrIb<5Ar4c)5h|&oni^_3N*%|jr-4SAXr_hZw9>{2+Br!Fopf=E zZcfv~8P0N!^IYH}m+0j(SGdYGu5*K%^l^*Z^mB*1+~Yn2JYbNA4DpD^JYkp-MtRB@ z&lu-96HGG23tsYyX8xcP>&ajP8`(rAo7uuvwvk0P zIpngPJa&-JP72t?ZuYR3eH5~vA`Vc@K}sm4jB*Zfm; zgFIx2M?B^U!;CP>Q^t74IM112k||#Bl2=Ugn&1b@5=mkO$*d%WRiv_-G}e<Gp|i z3^uTlO=Pl}Eo@~QS!9z#F5Agt2l?!zfL-in4|~~1A^R!f0L2`ngi^{V=Maaf;0Tpe zQB4iC9How9)YCvCO*GTOaaw8P1nr!pgHF0QMK`DE;S6Uv$9XPrkxTS)nJZl78rQkO zP5QXSZTh*xUG8z80Uj{OLxy<7W1cX~2%|h@jAxAVoCzkG;sq~x#Wb%8zQLbJ5-Uh% zB`K^TmDQxNhIH1lj`d`)fsJe;lg(^lE8EB-n;df4P98hRXD0>hVmEu(%RUO(PZ0+w z<{%}MQbsw4I7|gcsHBQ&YN+KXbsVFf1{!IinHG-IN*gC==Oi6;(#0vdIZY2|ILkTC zbAgLoqL<5D;VRd-&JAwT$1QHt&mHb^kNXVpfI%KI#3LT_gkeS)i3G3sfcktUjH;W(|dae{VE z(m^L(oT8i4^l*l=oZ~zfxX2}Xxy%)=a*gZU;3j?C;x_%<;V$>M&j1e?m;xSJc zW`t3mGR8B;dCmlrO!0!3ykeTy1V7x9ND?bZW+f@CB9+yov4(WkvX1p+uz`(iB9qN* zVJq9nBAXm?*-joi$Y&=7>|!^2*vmc&*-sG%DCQs~lu|}Hhd4|HN2sKVYHFzED0Lj8 zo(39eqL~(s(@GmBXy+sybkfBsx;ae`XE@6_&U1l_T%woDT;VF$xXul3(#I`s)6X65 za*z8A@PI)cGQ=Yu^MqkW809HrJY$^aOfbn5FL=o-rg=^91LTP$v4Uh)lENxdSxp*i zNM|kUSWgBU*vKX_*~}KUvW+aV$sw2R z!&GpDN~)-)hFXqN$1&<@pphn;Y2i4nv~hxVPSQaqU7Vtu)AVqLvz+5R7r4kJdb!LM zu5yj*+~6jC+~PL<+~F?wxX%C&7~~;CJmN7=7-ob~o-)QW#(B;JlT7h~m%L({*91S< zmq-#TNMbInHx|i(I0Y%Ut0q*SO9NZqmmsZqv^l?sAX&4Df(K9x}uu9`l4@Mi}KOV?1M= z=S(ok6fbzmE2eo(@DE24Nn!=btR#h1q_Ubc){xFx*0G)pHn5RRWU`qpY-Jl+WRpWK z+sR`G`Rt^CUF>ELd)Y@J`zhi8#T=xBQpzaj5QnMY2$fV(O%1girH*6N(?BClG}FRy zT5011?VO~8PP#ZnH>c_03}-pVc`k5~OZ0M?D_rFo*SWz>`nbhy`nkhh?s1<19x%v5 zhIqtdo-oV^qdaAdXN>cl2_~811uuEUG_MJMs5g-$R*=j}QdmVQt4U)G>8xcP>&ajP z8`(rAo7uuvwvk0PIpngPJa&-JP72t?ZuYR3eH5~vA`Vc@K}sm4jB*Zfm;gFIx2M?B^U!;CP>Q^t74IM112k||#Bl2=Ugn&2OdC6dGnl37U# zt4L)vX{;fgwX9=38Ejx9o5*A{TiD7rvdAWfT(*@2LiSU{0g5?D z38j=#&LIv{!4WE{qM90NIZ7SJsHcHOnrNnl7bJ?PSMS2dN{*b&T*a#T;vkHT;>W_xyE&FaFaf6ahrbbaF=`BXMhI`@{l1O@t7wJ zGr}lO8RHq_JZFMQrg*_iUNOyUf`2HQND?bZW+f@CB9+yov4(WkvX1p+uz`(iB9qN* zVJq9nBAXm?*-joi$Y&=7>|!^2*vmc&*-sG%DCQs~lu|}Hhd4|HN2sKVYHFzED0Lj8 zo(39eqL~(s(@GmBXy+sybkfBsx;ae`XE@6_&U1l_T%woDT;VF$xXul3(#I`s)6X65 za*z8A@PI)cGQ=Yu^MqkW809HrJY$^aOfbn5FL=o-rg=^9k9iVFVg<>pB!yL^vYIs3 zkj`4xv7QVzu#rt}C&p*+(J!DdGUd9HfL&$|&a$ zhpFHQl~hqp4YeGlj$_o*KqE~w)539DY2yU#oTP(Jx;RBQr|IDgXF11tE^v`c^m3Ui zT;&?qxxr2PxW#Syxx-!Vai0MmFvvrOc*J9#Fw6*}JY|e$jPslcCYj;|FL}i@uL=Hv zcp^!xAeohYxlg1j-S<5=slfecyvWZMKvxTi}Ba3Wu$Yncu>>!_=6tIik>|rna zC}clH9H5wklu$|;bPbkIo`r|9N1 zJ)Gez=Qz&=E^>)pE^~#eT;n=7xJe(kxJ^HIxXV56Gr$7|dB_ltc+3-q8DW&CjPZ}4N??5BtW6myUgN-3k9LmZ}pBUDmFH8s?7lsb-4PXmoK(M$`+ zX{C)5v~!XUI_csR-JGU}Go0ld=efW|F44Ejl+>E{l2xyOA5c)%bJ z8R8L-dBQLwjPjH*o-xjICYWT37rf*Z)4V45#Sn=kv4Uh)lENxdSxp*iNM|kUSWgBU z*vKX_*~}KUvW+aV$sw2R!&GpDN~)-) zhFXqN$1&<@pphn;Y2i4nv~hxVPSQaqU7Vtu)AVqLvz+5R7r4kJdb!LMu5yj*+~6jC z+~PL<+~F?wxX%C&7~~;CJmN7=7-ob~o-)QW#(B;JlT7h~m%L({*98BFH<2V(kjzR_ zSVbzUNn;J^tYsbR$zTH;*+eFr*}_(~kwrE+Y9H*5wPSDOtI_RW}Q*?8h9?o!wewv)#W^4UoN zyV%Vh_Og#c_EW?GiaAILrIb<5Ar4c)5h|&oni^_3N*%|jr-4SAXr_hZw9>{2+Br!F zopf=EZcfv~8P0N!^IYH}m+0j(SGdYGu5*K%^l^*Z^mB*1+~Yn2JYbNA4DpD^JYkp- zMtRB@&lu-96HGG23tsYyXbhr8V4 zJ_9^pkcSNMh{rr(m=Q*K${5cW=Q$HhGQ|sC@``C*6Z|5mM3PuRGAl`86{)NyjWwjR zmUXNrgAHtC6Pavg3tQPn7TM&G%XaeEK|VVvU>CdD!(R4L$bO1AKrsg?p_DSpImBTq zI6@^=R8vDON2%i&^)%2(6V0@6oL1U6K|3eupp!06(amXkIKx@aah?lY7n||(amwVi2fCminkRcxNm?sP~!YEG};~C>TXM#zlc)?3vG0kg&U*3^O z5-Uh%B`K^TmDQxNhIH1lj`d`)fsJe;lg(^lE8EB-n;df4P98hRXD0>hVmEu(%RUO( zPZ0+w<{%}MQbsw4I7|gcsHBQ&YN+KXbsVFf1{!IinHG-IN*gC==Oi6;(#0vdIZY2| zILkTCbAgLoqL<5D;VRd-&JAwT$1QHt&mHb^kNXVpfI%KI#3LT_gkeS)nJsK(8(CzNLoVCN zV+Z-{q<~%QW)FMWMER4#ImdY}aFI*&a+xb!$y!A<(O#clez!(Hxip8*~)$U}yB z#ABW?%m|}AWsGNx^PCALnc@X6dBrrZ34U=;B1xEaaKoTi5}oaG$nxxhs( z(aUA7aFuIZ=LR?F;}*B+=MHzd$9)EPz#tD9;t`K|!Z0I@@{}>2G0t-)m}H6i3G3sfcktUjH;W(|dae{VE(m^L( zoT8i4^l*l=oZ~zfxX2}Xxy%)=a*gZU;3j?C;x_%<;V$>M&j1e?m;xSJcW`t3m zGR8B;dCmlrO!0!3ykeTy1i$b#kt9}-%t}&NMJlUFV-4x7WgY9uU;`W3L?)Zr!dAAC zMK(F)vYkA3kk3vE*u`%4u$O%lvY#RjP|QI}D5Z>Y4sn>&Ty7o$Nrk^|97bJ?PSMS2dN{*b z&T*a#T;vkHT;>W_xyE&FaFaf6ahrbbaF=`BXMhI`@{l1O@t7wJGr}lO8RHq_JZFMQ zrg*_iUNOyUf?w8|ND?bZW+f@CB9+yov4(WkvX1p+uz`(iB9qN*VJq9nBAXm?*-joi z$Y&=7>|!^2*vmc&*-sG%DCQs~lu|}Hhd4|HN2sKVYHFzED0Lj8o(39eqL~(s(@GmB zXy+sybkfBsx;ae`XE@6_&U1l_T%woDT;VF$xXul3(#I`s)6X65a*z8A@PI)cGQ=Yu z^MqkW809HrJY$^aOfbn5FL=o-rg=^93*Hk+Vg<>pB!yL^vYIs3kj`4xv7QVzu#rt< zvY9PxWgA&!lS3}s$zuok?4*EQ>}C&p*+(J!DdGUd9HfL&$|&a$hpFHQl~hqp4YeGl zj$_o*KqE~w)539DY2yU#oTP(Jx;RBQr|IDgXF11tE^v`c^m3UiT;&?qxxr2PxW#Sy zxx-!Vai0MmFvvrOc*J9#Fw6*}JY|e$jPslcCYj;|FL}i@uL*upZz4&oAeohYx zlg1j-S<5=slfecyvWZMKvxTi}Ba3Wu$Yncu>>!_=6tIik>|rnaC}clH9H5wklu$|; zbPbkIo`r|9N1J)Gez=Qz&=E^>)p zE^~#eT;n=7xJe(kxJ^HIxXV56Gr$7|dB_ltc+3-q8DW&CjPZ}4N??5BtW6myUgN-3k9LmZ}pBUDmFH8s?7lsb-4PXmoK(M$`+X{C)5v~!XUI_csR z-JGU}Go0ld=efW|F44Ejl+>E{l2xyOA5c)%bJ8R8L-dBQLwjPjH* zo-xjICYWT37rf*Z)4V45CEbZ6v4Uh)lENxdSxp*iNM|kUSWgBU*vKX_*~}KUvW+aV z$sw2R!&GpDN~)-)hFXqN$1&<@pphn; zY2i4nv~hxVPSQaqU7Vtu)AVqLvz+5R7r4kJdb!LMu5yj*+~6jC+~PL<+~F?wxX%C& z7~~;CJmN7=7-ob~o-)QW#(B;JlT7h~m%L({*93piC6OdnkjzR_SVbzUNn;J^tYsbR z$zTH;*+eFr*}_(~kwrE+Y9H*5wPSDOtI_RW}Q*?8h9?o!gSD6LnG{8gX| zR-r0PMX1s$Qk7HDs)DMhDyyohx~i#atGcSbYN#5iIMq}&Qwgf2YOUI;_Ns&Gq`IhX zDoOQJy;Wb;Uky}))lfBDjZ~x67&T5!P?OXYHBHS>v(y|lSIt)o)nc_&EjN$#OKN6O z8WxLj2`88#yz+f_u+m#fCupq9>ybh6p!I? zJb@?i6rRRZJcDQP9G=Guco8q*WxRq{@fu#o8+a3M;cdKwckv$H#|QWjAK_zsf=}@o zKF1gM5?|qKe1mWC9lpm8_z^$hXZ(U+@f&`}ANUi0;cxs8|KMNzhZgD5zK{+rF+FC$ zjA(_{myhEV*_l6vDgS3V;nZYrWlXSusJ4R3v7w4ur;>9 zw%88aVD!}YiUH{xd8f?IJLZpWi| z43FapJc+09G^XMiJd5Y>JYK+ycnL4#6}*bq@H*bWn|KRv;~l(*_wfNf#7FoTpWst` zhR^W@zQkAf8sFese24Gx1AfF$_!+<8SNwrL@fZHa|L_m~#eZmF$?G3lVtUMg8PN)@ zF%#OLEoMeLw8t!%6&)}eI-(OgV|H}G9GDYb(G7E99?XmRumF0YHx|S~SQv|7QS`xL z=!?a%1p1*r24EltVK9bZD3-)9495s8g{83!Mq(63V+D-CidYG&Vl}LeHLwoW!}{0& z8)G~+!{(TPt+5TZ#rBwp9k3&I#ctRgldvcD#{oDH2jfs2jmbC`C*mZWjI%KX=i)qE zfD3U6F2m)x8rR@P+=QEP3vR`2xE*)kPTYmNaS!greRv#C;we0hsdyGI;2pe+5AY$H zTTNyjqq*Dk|4E-lI<&;}m;p1Q6`SuiU)U^a9_Cv?W_=z=*gC%U2= z=0bPOjd?IHdSE{E#QazQz0eyAVj(PyMX)IPU@`Q?;#dOx&>sUZ5Q8unLogIeVi<;F z1eU_mSOz1pESAG4jK=a<0b{TtR>I0y1*>8;td2FXCf35*SO@E3J*9bZFXI)wir4T1e#B4s1;64?w8+56D@>2pXovQg z1zoWu*2G#^2OD4OY6MLb#zh!0_ntNMjw&5W(x3)J!tNJnc0U2@DQ3? zU}nrcFf+&S1e$wZW?VdtcKOG z2G+z{SR3nLU95-ou>m&3SZsuiF%FwxQ;f$h*aLfFFYJv6(cI=Ta~ZGUb-aN$(cI=T za~sW_E;IM>0X{@?yUWaDGV$nVV>C z8kxC`=Dv}cyJ&75nYoYV?va^?_!!L{Br{LZ+(a_-9L;?sGcVEHN;2~r&D|t3Z_(UP zGV>nIJtZ?A(cD%t^BK*ZB{N^q+*~sA9nJkEGe6PXVlwj^&0QulfAIe|n)LV|j|7-;DaKZ8@V&=5(Uj?gT z95z97U&V~MePSjR%^ego746bA_cF|!Kyxp{jJcU%CKW5&r#-HMRngo|Fk|i~n7NDQ zmV%jpENN~*vuEGTU5v___P83>#s(ON?XWxcz@FF(dt)CQf>UuGnjQ0IR-oB0Z)O*I zJET1}d%ew=y+dX;q1i!XW(%5qL}s?3*-d0-2bw)aW_F?3S!8Apn*Bv)_MzEjWaa>x zy+&pZq1ka{<_Ma7M`n(p*?nZ@1e!faW=^5miDV`f&3+^^XVL6R`v2rkBORv4444tE z&>C$qGuoj&X2qPC3*FHJ^PwmDU@`Q?66lBi7=VEoguxhsp;!{bFak?qX)J@0SQg7+ z6h>ottbj3C5i4P3tb$ds8rHy?SPN@o9juG>us$}xh8T;DurbDA6Ksm{*bJLv0=B@G z*a}-?8*Gd1ustSX2keNQurqeSuGkH`V-ohjp4c1vVm};!19318!J#+|hvNtwiKB2d zCgT_!hvRVqPQ=ML1*hT+oQ)|s7w6%8T!0I45iZ6hxD=P+a$JEcaTTt{HMkbn;d{5Fg=Ve1cE$89v7s_!3{?YkY%m@g2U$ z5BL#3;b;7UU-27$#~=6;f8lTZ5C7m_{D&51Cyyo_T4H+4fEm#WtuYhYpe<%bJG93v zm=zr`8#Xq^48t)3OJQj&gOOMk%V88oV|lEAF<22RVP&j>Rk0dY#~N4@ zYhi7ygLSbU*2f0e5M!|sHpVz?f=w|Vn_+WIz!umNTVZQ#gKe=Lw#P*5fE}?DcE&E) z6}w?~Ou`=66MJEA?1O!=ANI!qI1mTnU>t%&aTpHA5jYY@;b=_8F*p{-;dq>Y6LAtw z#wj=zr{Q#*firOy&c->If^%^m&c}tg2p8iLT#CzZIj+E!xC&R}8eEI(a6N9ojkpOn z;}+bC+i*MXz@4}YcjF%1i~Ddt9>9Zm2oK{CJc`HgIG(_hcnVKrDxSf!cn;6w1-yut z@G@S(t9T8s;|;utx9~RJ!Mk`5@8bh}h>!3wKEbE>44>l*e2K5{HNL^O_zvIW2mFYi z@H2kFulNnW;}86azwkHyhkx)d{zHqryzZkVrpFAJ5v|Y~GocOIVrH~Md(47a(E+oe zBRZioX2+c9if)(--7zIV4AuNnVuqgUqG4#daSOWdf9|JHDgD@CF zFceE-7=~j6mcr6l1|zX7mcuBF#`0JZD`91p5^*2G#^8|z?QtcUfn0mfouY=ZIF z44Y#Dw!oIy3R`0vY>Vx%JtkrY?1-JPGj_qQ*bTd5686BJ*b94OAMA_$us;sK!8inm z;xHVJBXA^+!qJ$FV{j~v!|^x)C*mZWj8kwbPQ&Rq183qaoQ-oZ1?S>CoR14|Auhtj zxCEEtGF*-;a3!w7)wl-N;yPT98*n3T!p*n^x8io(fje;*?#4a17x&{KJd8*1C?3ZX zcoI+HX-vg4coxs$dAxuZ@e*FfYj_=R;7z=RxA89C!~6IEAL1i?j8E_>KEvnu0$<`Q ze2s7LExyC|_z6Gb7yO3b@dy6IU-%pU!#`-@!RsViVtUMg8PN(ep$*z%W^}-8=!j0} zjM*_Kx}qEALU+uKc`z?}peGhUFZ9OZ=#K#yh(Q>PAy^W_Fao171}kDEtc(pX9-Cov zOu!b{1-oJr_Q0Ol3wvW9?2G-dKMufwI0y&h5FCoba5#>@kvIxRV=|7xu{aLL;{=?9 zlW_`8#c4PlXW&eng|l%Err=y$h>LJBF2SX^0$1WHT#ajREpEU~xD|KeF5Hd#a6ewa zi+Bky;}yJ$*YG;tz?*mrZ{r=ji}&z8KEQ|g2p{7Ue2UNTIljP`_zGX+8+?oJ@I8J& zi+mRUpLA%6=`kZ(p*7l|EoMeLw8t!%6&)}eI-(P1M;CNOH_V0Zm>ct;2j)XhEP&ov z2#a7*^uc22i^Z`7`e6VDVh{#n2!>)w495s8g{83!Mq+uafH7DRD`8Eng|)E`*2Q{Q z9~)ppjKxOS7~`-BHpO^shRrbnTVP9Ug{`p-w#9bX9uu(xcEnED8M|Ot?1tSj3435q z?1jCt5B9}=*dGVrKpce0I0>iVG@OGexE{~qIXsUS@Dg6Z44!H4w>D^pj+h-?FbBF~ zek_1q=#4&D42xq448{m7i_sW^m9QBmU`uR;?J*HMVkhj3U9c-AVGrz$eQ+=i!QnUp zM`JRs#dWwIH{db6fj991KEy}(7+>LQe1mWCJr>TN_PiEBivoOnMoUbO889PSp*3bg z8??pDXovQg1+$_9W#zqAR*#E_BD-mCb`I2EVibew@RaTd)1?~cbegtS}^VLSu{IQ%{)i5 zAJxoDG`mvGyhgJ()y!KoJ52Bh8FJvqRF%C>)K+I0hf%Q~Zi%KcktS zX!bOk`Gf!8*{H|=cx+eX|KI%o{f6dCJ$T#`&7MFr{e99LfSrq_Jzn6O=4Z4hp7uB$ zT4H+4fEm#WtuYhYpe<%bJG93vm=zr`8#Xq^48t)3OJQj&gOOMk%V88o zV|lEAF<22RVP&j>Rk0dY#~N4@Yhi7ygLSbU*2f0e5M!|sHpVz?f=w|Vn_+WIz!umN zTVZQ#gKe=Lw#P*5fE}?DcE&E)6}w?~Ou`=66MJEA?1O!=ANI!qI1mTnU>t%&aTpHA z5jYY@;b=_8F*p{-;dq>Y6LAtw#wj=zr{Q#*firOy&c->If^%^m&c_9~5EtQMT!Kq+ z87{{axDr?4YFvYBaUHJ54Y(0E;bz=|TX7q1#~rv6cj0c_gL`ow?#Bao5D(#DJc38@ z7#_zHcoI+HX-vg4coxs$dAxuZ@e*FfD|i(xN~FDBWkMS?yT;6zePd=yqS-xWrYuHb zd5pnIX!ekqF}ujjn0;hs%uX^hW-pl;vzyF}*-vK1>?kv1_LP}1yUNU%ePw3M&N4GC zuobq!c4+pOndyq%(d;ubV|JRE>4kmJ>^L)H_MDj+fWvSEjzY8R%#7J{W@b8?oo8ld zquG6CCI!tNG&5!wnwbS?_M(|tgiCN4uE15e7T4oO+>BdsJMP5YxEJ^1K|G8{@ED%J zQ+NR{;$^&o*YG;t#9Md=@8V;8iqG*Ke!;K!4Zq_L{E5HtFaAS6zqDUxe+us$}x zh8T;DurbDA6Ksm{*bJLv0=B@G*a}-?8*Gd1ustSX2keNQurqeSuGkH`V-ohjp4ba} zV;}5`{jfg{z=1dj2jdVNioGiFB@%z-)472Plwx?^t4gC3X;O(V-YNh zK3EKWu{f4MKlH}{48$M|#t;m}k{E{J7=fj*G?u|gEQ{qZ3Zt<+R=^mniM6mc*1@`1 z59?zCY>2Vg2peM@Ho>MCkIk?-CSVI}iLJ0Tw!ya84%=fQcEFC<2|HsK?26s6J0@Wd z?1{awH}=84*bn>T033*eaR?5@VK^K|;7A;WqcIuB;8+}o<8cB`#7Q_Ar{GkahSPBd z&csLkg}ZSN?!|q$9}nO`JcNhw2p+{_cpOjQNj!z8F%{3?Sv-g5@d94NOL!Tt;8nba z*YO74#9Me9@8Dg$hxhRTKEy}(7@y!%e1^~Q1-`^r_!{5fTYQJ_@dJLuPxu+X;8*;H z-|+|j#9#Ou|HD7{7yqF}0I%0*iRm!|W<)Er#!P5~wwM|1&>pj3R&>B@=!j0}jM>oz zb6`$%MK{ca?wA|%U|#gVeCUb!u>g9ZHx|S~SQv|7QS`xL=!?a%1p1*r24EltVK9bZ zD3-)9495s8g{83!Mq*hkhfx@f<*@?BU`4Eim9Yv|#cEg`YhX>Rg|)E`*2Q{Q9~)pp zjKxOS7~`-BHpO^shRrbnTVP9Ug{`p-w#9bX9uu(xcEnED8M|Ot?1tSj3435q?1jCt z5B9}=*dGVrKpcdFaR?5@VK^K|;7A;WqcIuB;8+}o<8cB`#7Q_Ar{GkahSPBd&csLk zg}ZSN?!|q$9}nO`JcNhw2p+{_cpOjQNj!z8F%{3?Sv-g5@d94NOL!Tt;8nba*YO74 z#9Me9@8Dg$hxhRTKEy}(7@y!%e1^~Q1-`^r_!{5fTYQJ_@dJLuPxu+X;8*;H-|+|j z#9#Ou|HD7{7ysehKQR9ct8Ui83x=!yBU0D7S} z7Q{kW7>i(0^uc22i^Z`7`k_AtU?2uzFos|#mc%d&#|SKirLhb~Vp%MQQ5cQou>!_m zMXZFCu?kkjYFHg>U`?!rwXqJ?#d=sD8(>3>#YWf|p6R`t!#7@{5yI@!BhTSmrsL98cg$ zJcXw*70=*VJcsA;0$#*Rcp0zYRlJ7R@dn<+TX-Aq;9b0j_wfNf#7FoTpWst`hR^W@ zzQkAf8sFese24Gx1AfF$_!+<8SNw+G@dy6IU-%pU!$0^J|DnY%UjNY&(_;qAh*oHg zna~DpF*DksJvyKxW=9vyfjQ9?-7purV{XiY9_Wc)=#53NDEeSA^u^*>0{ze*127PS zFc?EH6iZ?lhGPVl!qQjl_5up(B%%2)-fVl}LeHLxbu!rE8|>ta2u zj}5RP#$qFEjB(fmn_@gR!{(TPEwClF!q(UZ+hRLxkBQg;J7O2?irug~CSeckiM_Bl z_QAf`5BuW)9EgK(Fb=_?I1Gp52pox{a5N_47#xe^a6C@Hi8u)-;}o2V({MV@z?nD; zXX6}9!MQjO=i>rgh>LJBF2SX^442~yT#2i2HLk(6xDMCj2Hc37a5HYft+)-h;||=3 zyKpz|!M(T-_u~OPh==en9>Jq{43FapJc+09G^XMiJd5Y>JYK+ycnL4#6}*bq@H*bW zn`rTy*B7+J^q2uNq7^!!GiFB@%z?Qv59UP=%!i(s9}A!tdSgK>goUvP7DXQ{hQ3%F zOQ0Y6V*mzX5C&r?mc%d&#|SKqkysX^Fd8dh3|7QSSQ)EeRjh{9u?AZF<@rQQOph5b zBU+&~WGd_j3F3`B{2-cF#=0rX)J@0SQg7+6h>nVR>VqJ8LMDbtcKOG2G+z{ zSR3nNJ*f#xfX*Ww9JaVKkP<3K)YG zu@Y9sDp(b(VRfv5HL(`f#yVIR>tTItfDJJg8)0LN!zS1i_fjzMo_QpQg7yDs<9DoCH5Dvy6I24EBa2$anaTJcm zWE_KIaU71v2{;ia;bfeGQ*jzj#~C;iXW?v|gDE%{=iz)@fD3UEF2*Ie6qn(0T!AZb z6|TlLxE9ypdfb2;aT9LFEw~l8;db1CJ8>88#yz+f_u+m#j7RV&9>e2!0#9Qqp24$t z4$tEyyo^`yDqh3ucnfdi9ejWf@ew}8C-@Yf;R}3;ukba#!MFGh|D;Rz{};lZ>C^0m zy|EAW#Ua*d-)|@m!{ImrN8xBp#xXb+$KiOKfD>^NPR1!X6{q2JoPjfO7S6^wn1XY0 z9?r)FxDXfNVqAhtaTzYh6}S>t;c8riYjGW}#|^jWvqf#u^Lv#8dwvD;xHVJBQV@4?Z*vrPBR!oFceE- z7=~j6mcr6l1|zX7mcuBF#`0JJo1#z7wEwSS=!?a%1p1*r24EltVK9bZD3-)9Ea95= zPAsC7!F&rbX6qd#qtcA6)4%WqbSRWf;LyW~n*civ) zSR9AraRN@nNjMp&;8dK3({TpQ#925Si@T>i4<*nK{V@OoF$jY(6iZ?lhGPVl!qQj< zr{GkahSPBd&csGiFB@%z-)472Plwx?^t4gL%;d^Pwl^#{%eu-dGR|VPPzSMbQU~VF~m@e+vtU+qz-;JMDhF~a`#4rrU2rPxAu?$9HSuBT97>(tz0>)rPtb~=Z z3RcBxSRHF%O{|5ru@2V7`q%&)Vk|bo#u$f9uqno4Gi;6t*aBN(D{PHzur0R3_Lztr zup@TD&e#RJVmIuLN!SB>VlV8CeXuX~!~Qq`2jUe**FJNa4ycr1-KZO;c{Gst8opk#r3!WH{vGTj9YLk zZo}=k19##s+=KgYKOVq?cnA;U5j=*+@dTd4Q+OIv@eH2Db9f#v;6=QIm+=Z-#cOyS zZ{SV5g}3nz-o<-(A0OaDe1wnj2|mSV_#9v0OMHc|@eRJkclaJZ;79y~pYaQR#c%i> zf8bC2g}?DX{DXh-A6i`H^%*TOJ!Zg+XpNcB25r$EvtU+qz-;JGd_j3F3`B{3W$uoRZY zNGyxxFbbow0#?Qbb*zCku{PGhx>yhEV*_l6vDgS3V;nZYrWlXSusJ4R3v7w4 zur;>9w%88aVUcifZ2`}Rnyo%TGI^MvWcnfdi9lVS8@IF4khxiB|;}d*} z&+s|Ez?b+6U*j8mi|_C~e!!3T2|wc({EFZ3JO03*_zQpIfA|Og;y<*w#_Ka$VtUMg z8PN)@F%#OLEoMeLw8t!%6&)}eI-(OgV|H}G9GDYb(G7EpqpiX|}&!!ZI&VQDObkysYXVH8GVd8~jj zSP?5>Wvqf#u^Lv#8dwu+VQs8~b+I1S#|GFCW3dr7#yD((O)(yuVRKBt7T6M7VQXxI zZLuA;$3*Oa9kCO3#xB?uyJ2@s!XDTYdtqH{x)|d%x z&=xbJ9ol0S%!&?}4IR-5oiRJQU=GZQuIPrj&>eGQ9?Xj#m=8TMKNdhQ^u~f%2n%Bo zEQ&r@41KXUmOwxB#{dk(APmM348@WdhT#~2rLZ)X!ALBN757_5kuurgM` zs#p!HV-2i{wXinU!Ma!v>th3Kh_ToR8)Fe4nJ7X8@irug~CSeckiM_Bl_QAf`5BuW)9EgK(Fb=_?I1Gp52pox{a5N_47#xe^ za6C@Hi8u)-;}o2V({MV@z?nD;XX6}9!MQjO=i>rgh>LJBF2SX^442~yT#2i2HLk(6 zxDMCj2Hc37a5HYft+)-h;||=3yKpz|!M(T-_u~OPh==en9>Jq{43FapJc+09G^XM? zJdYRfB3{DFcm=QGHN1{D@Fw2E+js}>;yt{N5AY#A!pHaopW-uojxX>fzQWh|2H)a4 ze2*XSBYwiq_yxb>H~fx2@F)Jl-}oQ?!N2$qEgtZCkCvDoGhjxv!c1s`wwM|1&>pj3 zR&>B@=!j0}jM>ozb6`%)h3=Re^I%@|zP z>mQ~4cW>}5zQgwz{W9(QmB$JggB7t7Rz{0=JdbFJ=`kZ(p*3bg8??pDXovQg1+$_9 zW#zqAR*#E_BD-mKJ>%_SP%*^^v6I9!e9)+ zk{FKVFa|4O6|9EUu?E({+E^DGU@SJqCK!*+F#%g*8*Gd1uswFbj@Su1V;Ag--7pDz zU{CCYeXuX~!~QrB2jdVNioT~}9w*>LoP?8c3QomoI2~u;Oq_+Y zaSo>7B3z71a2YPg6}S>t;Tl|v>u^18z>T;GH{%xEira8I?!cY63wPrlwD`!!8MMUo zm;p1Q6&zE z7kXnsEQE!z2o^;jEQY>V97~`d`eOhFVh{#n2!>)w48w4Y#Ijfpqc9rFV+D-CidYkC z;fLR8&*w+{grD&Xe#LM29e?0YwD@89|9=OS4lOY~X26VSh1Qq}ZO|4oqaE5~7R-tc zm<=7#37s)Jx?m2>iLU5|xzHVRV;;*^ z^v3`U#2^gD5DdkV7>3~(fu*oCmcd9Yi{&s1qp>_zz!2Vg2peM@Ho>MCkIk?-CSVI}iLJ0Tw!ya84%=fQcEFC<2|HsK?26s6 zJ0@Wd?1{awH}=84*bn>T033*ea4-(Rp*ReO;|Lsyqi{4P;}{%^<8VAqz==2sC*u^H ziqmj9&cK;C3uogTOu@N059i|oT!@QsF)qQSxD1!$3S5b+a5b*MwYUz~;|AP_n{YF3 z!L7Irx8n}niMwz&?!mpd5BK8%Jcx(zFdo69cnpu@2|S6X@HD3489a;U@H}3?i+Bky z;}yJ$*YG;tz?*mrZ{r=ji}&z8KEQ|g2p{7Ue2UNTIljP`_zGX+8+?oJ@I8LOkN62c z;}`sj-|##Bz@PXFf8&4n2mj(fwD`&EKU!jX%zzou3av2{+Mq3FMmw~}ESMD?FdI6e z6FOsdbio{$6J5~_bD=xt#ypr8Jun}7Vty=uUg(Vlu@DxPAsC7!F$}{o0!v|OEQ66)7RzB2Mq_!bfH7DRD`91p5^R>vAx6Ki2@tb=v2 z9@fVO*brl}5jMs+Y=TWO9-CovOu!b{5?f(wY=dpF9k$0r?0_Ay6L!Wf*cH2BcTB<_ z*b{qUZ|sA8u^;xw0XPr`;b0tsLva`m#}POZN8xBp#xXb+$KiOKfD>^NPR1!X6{q2J zoPjfO7S6^wn1XY09?r)FxDXfNVqAhtaTzYh6}S>t;c8riYjGW}#|^j@fE(tH~1Fc;d}gmAMq1@#xM94 zzu|ZMfj{vV{>K0C5B|k}Xz`2Jf3(E(m;p1Q6`SuiU)U^a9_Cv?W_ z=z=*gC%U2==0bPOjpmab>6MkTQCXCea#6Wd9_6XLR3T-~#G6xufht6WsWPgZDz7T4 zDyoKxQ|(kw)kpPLgVazpLXB2q)kHN#O;@v2ikhz$sin%CLRhWVsf}uj+OBq~z3PBE ztd6OZDpj3R7u6MYL)}sL)g$#(y-=^!JM~F@RX>zjk7!oIrPHUCGAOey#a5ZMC=SZ3 zBXLn?Ert0f_j#44@=}FVQRS=rRG=04L^VZCSF==#ny(hArD}y*t=6fHYKz*gcB#GUfI6&> zsgo*Iol_T;`Da!))E#wSnZIUys$Qtq%6wYie5&<}`mTPdKgz;VZ)eI%*(f`eRXM8c zDyPb&@+wc|r3$H{%2)ZRKoz3GRT))Il~)y26;)l;Qgu}W)krl_%~T83TD4OhRA<#q z^-#T4KQ&MdQNz_JHAanBlhjl-L(Num)dICxEmJGi8ns?+Qd`vywOj2|2h|aET%A&9 z)OmGDT~#;KZFNsQR8Q1%^-8@}AJrH2U72^=Kgzt*rdL+VM%gI`<*afjHrUxSCyoCslIA}8mxw?kt$h@ zQxnw`HC@e8DQb~gp;oJPYNOhswyRxguR5R(t7Gb%67C3Q{RQg_t@^;kVqFV!3M zN&QfNlw}4Bi;OCh%B-@eY|2^XP;M%>@=*Dew<@fBRB`36f>fvqSEW^16|G`aWmQep zRCQE+6|3S@yh>25R9ls(I;pNIN%c~F)c`eE4O1gkvKpr*swrx^nx#_I0<}afSF6-o zwLxuG+tg0AN9|XK)KPUpomOYn1$9|nQ#aKebzePFPt^{M3es9cn*a#wkkr}9#TR8i%t{8XR{QDLf-id0dmf~usdsamR@id9WiGu1-1 zR_#;=)me2@JydTsKn+tP)mSw_O;*#?Of^T%Qw!A+wOp-M>(oZIMQvBR)NZv$?N$5K zesw?{REN}IbwnLi$JB9kLY-8n)M=Hf&Zx8MoI0;AsEg{7x~#6KtLmD%u5PHC>Xy2# z?x?%!p1Q9dsE6v2daRzPr|Ow{u3o5@>Xmw}-l(_goqDf6sE_KC`mDaFuj-rnu70SW z>X-Vh{!@R{U-eI!-!P<8mMXoO0`vqs*~!fl2k9%R}D~u)i5aR*?t>;Tw zD_do+9F&uCQLf5eSda2&3kLs)X zss3t!8mI=T!D@&as)niIYJ?i8Myb&%S&dO+)kHN_%~Z40Y&A!vsJUvMny(h9g=&#n ztd^;jYK>a2HmS{Oi`uHTsqJcq+NpM_-D;28tM;k=>VP_^4ynWHh&rl{spIN|N>%68 zWp!QMQg_t@^-w)hkJS_PR6SGA)eH4fy;85$8}(MbQ}5LW^-+CNpVb%jRee+6)erSk z{ZhZxf9j9=tNtmAOnUoMmMXoTvS zScR$5s+=mXDySG$QB_ivRTWiLRa4bf4OLUsQnghbRaezh^;HAaP{pc7sYTc$uBhwkmb$ARsK@G=da2&1_v(}Ss(z^7>aR*?tM>_I zt!$OOa!^jnMY$?>l~;KxFI7ktRldqk1*#Agrb?+u6{RYuN~)@=q3WpmDptj*c$J`9 zskSOnby8hblIo@Usex*U8m>mEF>1V;q^7DFYPOoI7O2H)nOdpVsP$@-+NyS_-D;mY zsE(-P>XbU8&Z|r6s=A?Wt9$C9dZM1ISL&_$pgyZ_>Zkfo{Zp2i^)W_eQkhj2l}$OT z9Li1QRvs$9@>YeFk1DQ8C_m+|0#u+1Qo$-jg{qP&OogimRZ5jsWmKdptIDY;6|Ksv z3MxibRFza^RYg@*)l_v=L)BEZRBcs9)m8OWebqoURI#d&YOLZ^6V+73t7fXXN>D9S zOVvuXR&7*U)lRiniK>I@s5+_6s+;PedaHhFpc&r)YOz|T zR;o2>z1pOXN#uZm8Spo_eUBsORdHdaFLD&+41{ss2;{ zl%<`Xf0apPR#{Xw<*afjH7Cwr#7lB zYP;H{_NoKwusWtrs#JAOT~t@pb#+VKRS(o-^-R4~Z`6DBNqtp6)Nl1yrL)(^DP^r} zmA!IMPRd2ODtDDvc`7eeNEKDS%1;HV5EZ6MsYn&2DyT}TnyRVlsQM~a#i@9epjxT6 zDp7S(T~(6mrTVG?YOornMyg~rPEAx()O0mVrKtI8ky@%&sMTtn+Nidu?P{0Ws}88c z>X_upsbaxvR4kuNx3Lj z<*xE7PvxZwsiMkP`Kdq^qQX=u6{(_B1yxB^RW(#?RZlfkja9sAp<1hUs)OpRx~U$j zx9X<`sv&B)8l}dl@oJKqs%EI!YOY$K7OQ1yrCOubt4(UF+M#xOm>Wn(C zE~%^PhPtipsfX%`dahonx9WrXtiGwA>Ob{QS!UJyzRILBt1K#;a#lH%o64;`RDR{H z3M(H~T=}aY6{^BjX;oH5s~A;TRZ}%p9aUe&syG#|5>zYIRwb%Vs;f#;y;NT{Kn+&I z)JT=A#;J*FikhxwsT4I|EmBL>3bk6TQybM5wO#E}d({DTSRGR*RjN9tE~+c)y1J$A zst4+^dZu2gH|o9mq`s;j>bLr<(mCkuR9P!qWv?8RlX6k6%3bAEp2|xVQbm=o@>78- zM1`qRDpEzM3aXN-s%og(s-9}78mp$NxoWA}sP?L(>Y}=Zy96 zUaNQNqxz!0t6%DmvdE@C4=F2UqwG{x<*2f&oGO>fqw=dls;Kf+ekxFfs4!JZMXItY zTE(c!s+y{$>Z%5+k!qrvsTQiWYOgw~E~>lgsrsrxYPcGuCa9Tej#{Kvt95Fl+NyS_ zJ?eluqK>OG>b$z3E~-oFvbv^jsyphwdZeDJ7wWZor#`AL>bv@-{wNDa{dy@YWuxp= zR^_O&tDGvA%A@kB0;-@YqKc^!DnJFRk}5)#QRP&5RZ&$@)m1H3PsOSxDnYeYiK??o zQhn5Lm8>SHscNoTq?W7IYKz*bcB$QJkJ_vDsr~AJI;ak*!|I4Ss*b7S>V!I}PN~x> zRh?01)j4%uT~HU*C3RU{QCHP9bzR+1H`P7$SUpp()O+u)@VShm~WI$e&LUlAjdvroK^hSRS!Z5swahQZDn1MN%hlN;-WmtiC zu^yZ89(LkG9K;bE$4Q*QS$v60xPn`_je7_em+gV)5f!nJ07;M>FC!f?AsccbKMJ8J zN}wz%pbBcB4jP~-TB02~q6>PUF9u;aUd32U#1zcHY|O(VEX7K!!A87?UD$&I_z1^w z5@&E8mv9x|;yP~PHhx1mK7@V_kq{Lz5f?8a2~r?6G9W8*AwP4ppb1)_ z4LYC;dY~@`VK_!%942ErW@A3y!rOQUYq1equ>&7q9}eOOKE`RB#YJ4jclZ&%;2y%q z=XnXDAr2BE6seE_*^mc?P#k4Y5!Fx|4bT*=&>mgU8v`*MqcH(f@doB&307h?)?+ia zVHZBcejLUzoWvQN$7Ni@54eRp_#MwAU>OhPVI#I+8+PGC?88AE!3mtiX?%e%aT#CZTl|1qxQ%-VNys?`5fKG35C`#* z5J?bFSlj*%FR zv6z6#n2H&A18-tJ7GVkA#!9@4wb+2ocn>@9K0d@g9K=UBhL7IeL2;BqSyVt} zR6|YFL47nrQ?x*9v_nU9L3i{*Ukt!t48sVF!WfLlBuv3{%)%VZ!$K^^GOWNmScCQ0 zgss?)UD%DiIDo@AiW4}A)A$_cZ~>Qb72n`{+`ujTg1h()A&EJ+AOa#G3Zf$x;vxYO zAt{pMC8S0=WJDHZM=s<=0Tf0tltdYnMwIwf>5MHDx^gQWJWgRL>}ZvArwUkltwvJ zL={v=E!0H=G)5CNMN70r2XsMq^g>?@z+eo+2#mrQjK?HQ!F0^R9L&Q)EXFddz&luj z_1J{1*p6M;jlDR4!#Ii)IEmBv9OrNWmvI%};CtM_E!@Vh_#NT+lH9X+9xos&Vjwo+ z;YB1yGNi!ENQ3mqgsjMc+{lN5D1zcBg|euC%BY5#sDt`wgr;bL)@X;0=z{L(g}xYo z!5D@S7=6nE%n1_W}jAdAXcd!QQu?btT9lNj_dvO4VaTF(T5~uMw&fx+s z<0`(v_qc&u_yu?I8$yzC{38M)Aqt`+7UCiS5+Ny)<0YgF_c6Z zlt(30MGe$OJv2lUG)F76MF(_6FZ9O{jKFJ{fGL=XH?a^)u@Y;r3EQz72XGW8@i{Kw zD!#`p+(k$z%Z(_AizG;a)X0FW$c6kUf|4kQ%BX?5XoTiygO2Ei-WY(P7>O~Mh^d%` zxmbi{ScSFNgl%{qdvOTIa1v*59+z} zM;$aoGqgqrbVV=p#}JIbYnX^>n2iNkid9&LE!c@YIE3Rkg|oPVAMg|I;txEVf_)9q z5E}`Q1Syaj8ITpZkRL@*66H`CHBc9g&>U^h5#7)m127aLF$NPc6|*oGi?9rS4skp8oGGZYf5+emt zBLlJ`H}azhN}(L8q893*5t^YD+MyG=p%?mL5QbwE#$ht1V>aevF_z<9tj8AYz;5iv zM>vj?ID>Pzh%2~(U-1W?NzL*gGNL0cUPKb4MtWpMcH~BW6hSGJLnYKfGqgfGbV7IZ z!9Wbd1WdymEW%Q(zy@r=cD#>6IEk~kiXU+g&!*w{MSP?|24qDpdyY|O`EEXTW8k1g1N z-Pn(hZ~~|B1uozUuHide$4&fYAU+Zz2||$)sgM>KkQv#K z6M2vyg-{eFP#Wb>5mitfwNMug&=}3o5^c~PozN9M&>Q_Q5JNB=BQY9dF#(e?6*KS# z-o$(?!VqU$9(Le;e29HGh>vg#ALCPehA(g)7x5Lo#<%zZKjJ6c#;^Dt z;nK6;;CZ}&sEC2sh=&)E7|DV6Ltj8v7#dhq%ZtTSY z9L736Ka$ksL1}HPRs? zvLHKhAukG`Fp8lh%Ah$dJ6y+2{ER!e zhd&TLBga1?A~K>OCgLDI5+VsgkrJtp78#Hk*^m=?kROFm6eUm^o%86+O@!{V)(iFdQQ>8e=g5lQ9)D@CM$*d@RBeyp5H37i+NroADlY;C+0E zeK?4Za10;gQ+$Rma2^-&6~4x|_yIrSC)~!b_#NRgas1~f2+7Rxj|hl_D2R?&h>HYBgrrE0myjCikP%ss9l4Mf1yC5pP!eTO9+glPHBcM% z&=5_~9Ien69ncxw&3L_A7Bsm;}DME zI6lECoWWUqiA%VGYxoY=aT7n|4({O(gwMjUj);hiXo!h8h>wIwf>5MHDx^gQWIt^6hU#6LRnNmWmH2=)IohTK~pqCbF@H9v_fmNL0hy# zdvriYbV4`uL?86WAPmJTconZ<946v*Ov6mf##}7GTUd(aScTPChmF{RZPRWBLNa2DU#zQq((Yq zL>6R6F62c46h<+WL>ZJvB~(QX)J8otL=!YeE3`!ibVfJyL?86WAPmJTconZ<946v* zOv6mf##}7GTUd(aScTPChmF{RZPRWBO#I?6e*DkX^{b$kqtSK2l-J5MNtB!Q4SSR1=Udtb%*P@u!P{7gcd-^5uo>@R2j0hr z*oTAo2*>a-KE-GF0_SlNU*T(fiy!bKe!^}1ir*0~JJ&coj~5UXF%TQ^@FEf;8B*Y7 zq(ORQLRREJZsbEj6hU#6LRnNmWmH2=)IohTLQ}LrYqUd0bU}CYLSGEPU<|_ujKUa< z$0SU_bj-pW%)>$~#xktHJ6MDD*o3Wk58JREJFpYGup4`E0EclDCvXy{@j1@n0xsh! zzQOmnfm`?mckvtShv?5B0wN&_q9YdKA^{R1DU#zQq((YqL>6R6F62c46h<+WL>ZJv zB~(QX)J8otL=!YeE3`#Bv_}VYL??7c7j#88bVm>LL@)G4AM`~(^v3`U#2^gD5DdjI z496=NfsuF>qc9q;VGPD%9L8e;CSnpM<8@5IR7}Hk%)m^{!W)>4Id~IuF%R>x01L4Q zZ(%W(U@4a2Z7jzMti&q3gLknSYp@pUupS$*5u30XTd)=HVH>t%2XnjSsO0 zd$AAuaR3K#2#4_zj^HSc;W$p>_!Osb8lT||KF1e0i*q=SFL41EaS5046|Uea zzQ#3tgKzO2zQ+%^jvM$9H*pI;;b;7U+qi?f_!al?8-B+h2+7I$1mW=vp2c&Bfaehr zk?;Z{BMPD-8locxVj>n|BM#ys9^xYbUPMA9LSiIAQY1qtk|PCD;w8L{R7j09NQ-nx zj||9&OvsEZ$ck*pjvUB|T*!?)$cuc)j{+!&LMV(PD2iezjuI$|QYeiwD2s9^j|!-W zN~nw~sETT+jvAZ#Sj#n@OBk?LmVKiRD7>va@jK>5_#Os)b znV5~aSb(>%6w9#+tFaCnu?5?(6CYp?_Tvza;5a_PDV)Jse2GiAf@}B=*Krd+;|}iO z4}{OfevF8SjA)37IEasgNPoL|xA7}}N4VUaAMiY0Kvcv)Y{bKhNQ`7iftQg6 z>5&OpkpsDr4+T*K#Zd}nQ2~`v4K+~*_0b4T(E_c}4js`2-O&quF#v-x3?ncKV=x|* zFa^^w3v)0J3$Yl>umbO34c225wqiSWVK?^T01o3QPT(X?<8z$D1zg5ee1q?C1Gn%C z?&3Fulo@iv&o7q)3jJkQ(Wb5m}HOxsVqHP#DEf5@k>xl~5HmP#g8o z5KYh=t7v(6MfJhgD@1Y;8nbaahQnLF%2^@8*{M$Z(%8xV-;3o9X4VMwqYke zz#iCVs{p+`}ITpO@nw5fK^D5EF3_9|@5Jp-72T zNQ(@}jBLn>JjjniD2fs&jdG}nDyWWHsEY<@jAm$wHfWDd=!zcbjeZ!2AsCL47>%)* zfXSGO8F&M4Vm=mO3Esv^yovL$@hLvT7dVfL_zGX+Tl|0@ z@e^+2SNx7}`8fXZJYGOl#6WDs!;46aWJrORkp}6J30aW?xseYAQ3S_V=#N1didXO| zUc)#{#Os)bnV5~aSb(>%6w9#+tFaCnu?5?(6CYp?_Tvza;5a_PDV)Jse2GiAf@}B= z*Krd+;|}iO4}>qk@sEgzjA)37IEasgNPoL|xA7}}N4SC<|9BoRASz-YHsaw$ zBt|l%z{^O3^vHy)$bsC*hk_`A;wXi(sDR3-hMK5@`e=lvXo1#fhmPoi?&yWS7=XbT zh7lNrF&K|Yn1bn;g*ljqg;6R6F62c46h<+WL>ZJv zB~(QX)J8otL=!YeE3`!ibVfJyL?86WAPmJTconZ<946v*Ov6mf##}7GTUd(aScTPC zhmF{RZPY@P}qZwMF4cem-MMq?}{U^1p+2HwD%n2$wRg14~}?_w=BU^Cvs4!n;Ku@49F5su+we2UNT1$~#xktH zJ6MDD*o3Xvj$PP|y*PlwIEoWEiPQKT=Wqd+aTVX-d)&Y+{DQmq4IxE2{t*F@5Czc@ z3vrPEiI5b@@e)!a9Wo*dvLhGrq5uk_5DKFRilP{bqXbH#6iTBE%Ay>~qXH_T5-Ot# zs-haIqXufC7HXpo>Y^U%qX8PC5gMZjnxYw+qXk-`6{x}qDp zqX&AT5Bg&ehT;{xiq|j>6Y)BxVJ2o{E*9V|EX8uH!fLF;Mr^@0?8FDygZ(&!BRGyv za0+K|7GL5LuHYKJ!*$%m&$xqo_yggKab7}1L`F2kL>$CNLL@;bQX&=7A_Fob8*(BK z@}m%nq6A8#94ev;s-qU_q5&GC8Cs$Z+M^S?q6d1T9|mFwhGQf~V=N|MGNxh%-oTrf zk40F5x3LoMVl6gcGv31vypIpD4+rrPj^Sf`iqG%`&f_A!!q@l~Kj26FgxmNPzaw06 z&P#Y6FCZ#nAU5LRMI=TjUP2mVKo;ac9uzH~1bma0|cSE`CEu3CNXoRL{f!1h; zj_88!=!L!*fWa7s5g3Ir7>`Mqg6WurIhcoqSd3*@fp@S5>#+%2u^qdx8+&m8hjA1q za1y8SInLn%F5@b`!S}dIXh1FPxjo5;1*ohCY2m5gdM{pdU;1tf_EWX4gT){PbhwHeBpK%BG@CU+| z=DL80h>U26i8zRlgh+x=q(myDMFwO>HsnMe6rbS>oX16cg|G1~e!!3T3Agbpen+@69PfA@FCZ#nAU5LRMI=Ttq`=EagY?LR ztjK}f$cKU`g5oHJvZ#Q{sD_%TgZgNMrf7lIXorsIg6`;rz8HYP7={rTg)tb9NtlA^ zn1wl*hlN;-WmtiCumD!#$@xPe>v1$Xfq zLdtUdBLX5J3Zf$x;vxYOAt{pMC8S0=WJDHZM=s<=0Tf0tltdYnMr8ltNilKxI@zP1Hes zG(uCfKx?!^M|44V^g>?@z+eo+2#mrQjK?HQ!E}Uj=(!=e3*0uB%qcH~r^6vtq{eEv z->xgQ0g}^@?I~qisWp|%+kPR&4f@!8-S**N)2RlXPI}tfC)>l`oC74MmTZ5U-G-hI zZtnkUUqN*PUz>h7@b$8XL+6KyeV=UH5K6GVB?$JF2X5{Uc5C&_&^pcgecU)U-=S7y z2!$I*>+U{Vu0W4t^+1oq+bR!-q7M_t?mx`-RLu>xechn-zmqvNJskc{<|+2Wq0+;| zG4c;`pCIGbl=|PvdJNQHT)UjPp*z92p{Wbp{?pu2rlrTL*8fg!E7SQqxrOxaBw3G@ zN6oemyFtg*wS70=yrJ2x*h|%4IWor_j_IM!RmVK>9&8;b-PBm zp|uO#KCZeRQ@Txq)%BdB`}Dt}Zr4UP*kiYI4Q};5t#c`UIJAG5c&;S5g5(a8pLAW% zw|cxkYTd5Ke`oy4vTS-T_}|oZzqe`WF=5Xu+|bPhZXdN?UB=aO{o|_JHN_2fPIJSf z*7aJZ=hFYHdTqBL-1e5N*Hb-bYrUTI?$<&~$#m@bt{XIKpJ3;BJvRr-dM?*} zl(%di4y_)hKc7eK{MSqF({q~UVCNJ)cLY0z|EFV3AH$9*JIA>}Un6vJMQ$H;zYVre z){<%J{-FCwuzkR`#lUdh1gt$s+O|!!ZRb@zUt3$x*|r|FZR=9|TU+n9bDy?@J=f6n ztLrw{HVL+_o^(Cf{0FP=Vfk&a`*olH>VEaX^I<(dKk9RbVEcQp^%ktYFt?`;xYUElaTD?{TfeQ(?2O%X&Mk);a9fZh}2tu)1zP-Dbh+!S?B3 zb)8?G&tUaOy=Fh^u@UTA9_$(%?0Ko}7jDq~NB5OSUFX5p$>XXAJJ0HM#LjJQ&~v>W zGq&t{{j}${dX3QK4t8y|bFdq9-RpH+*QH*&b(;s2?d2JIdqgw$-fXF5B01o$B-HU|H9xZa>|Bddht0 za~@sqy1g~)w%7Hg$EL1p-M930mu)AVuD@;btMhN`)Zf|Z2(x<=V zp_29avpyHf7W!cN>%4~v$u2dmCkU%s*B8TbYHNfIi9%iCeP}O+XrFhauG4NJ^~RF( zNggEGrm53yF3YO>tgdH$KX0n^m&rbj{~xY_&9W`|3iiWc>BGcl8qau*jb6O}|F^p4 zbm=ow@?6PpNnR#-vt%7t`>&Qh>m_fLtkd~W>Uy3F)@O(G*KzkteU0Qzl65|hOI`CG zsqd7$Me;7m?@OK`S(kmG)Z;$uwgjK)N{%f#g5-pf6G%=eS?hVEo-;_!E_M4qZ{3hj zvQDSC)O$$oDLDyQm!TkSeVi1uQ%V0~0Psy}T zOAa<)@ug30$-%ZouzFG%w~*wklE0RmQF2Aex*N*RNC-$|I``Rgd?exBO zW?y?R?d@=bwLg{ieDZtJUf^rL<7;Pof#ruA^7z{EeC_zYc4A*Uy{}z5GT-Te8?0T( z*RCz?wdYH_ zSaiNm1~*jjwX6Et_W!Uz-ku7a%-T@OuV`c-6_RV9Z{;&p1~#gG<~TS~4gn9;c9wb#nNB>JxA>B~NdF#^_4RBL>EBtVsqg2#EPeDf>{@yKxK6UZR^2Q0wlWX; z9-7XNK5nqb($~4c9?Pbw+oPF0wr;alQrGp~N$R>Db-p$0dtJJ&^}R)V?db;Vqwh89 z@u82W$MN4;hG6?t2lv3-KI(Gnvg!8G_w;l*^?1^Ct=mJh?iac*RFnO&xy*k}sb816 z9$UIy8%zJDl3Pk{Be|{Qc9Q$caWF^f9VL&DK69nsRdN@}-6VIH+(U9t$$cablRRAV zaye&=milXw6U%&#k@{H4x=klaeUfDT3|8OMlFjm88H>tN>+w)b>dhoSYQ4COtH(wO zsq3-#r0XT+arM}K()CjExO&X%^AJ5J>gP*({(aJQJ-_SuE?8Zk2kLW!N3H8~NWBI; zuDU)i)$7Wm*7Z4_UV9%`U7t7V^QgyFuOj=QUcZCY^*O3Ok9b`5>M}oi{eIGQeZH;F zgC191pAYGCg(qFt*HHQ#C|F%zU-Xu3@ucf@uW=O9`LAjJ;&<) zpzA~*SFe?W<#~+m!)_pqIb@yrg!H)5yug6b%`oXT3!Rn9e{QjRhohN*&mx7(=gFU`}-{47P{Z7S`$of5jf0C!l`#=9A>-RLCMAq-0Jc+E| z8+j6WrmW{Dk@fp6|0L`8dHzY(@7O$vtl!D`J6XTy^CYr<-|0!@VCSe{$9}M5Fxb8w zY@Z3X?SietVDtI7>QCZas?!X%-X2%|Nu0ZMn!&cqDoz?9aEbmC>%|x)Q^ZZZp%tyRt{VwwsEC)NE1gq<3Nt%Q8e@o{1@8tQ? zU$?L3Cf+I!`6tIwugOYJ7a zt>z3t>M5kI+4}1^dS7}ON8e}6Bz4Py+FF|z);~1t)^2ngW#bNGgVh7w=f7Y3G_euG zhMb{%7Zo-%31is?cc1_MCim}s@Q?nj82uY8$+_FOA+rnI+TTDiZm_ID-+#{`^_-G(NzN@fkL0|Pb-Txs9ipK0Nh3L}n1bKj+Zb-W{cnt}A_?y}R`3A-OnN-;=VoZWC+k z`|tL7lC||%tl(?gd;0bm`k3~szrEl-r?SuL zY~9(ix|;R3<*@ftY`ShJO|Z6pzGw4S26pQP`_6k2xbGm?bc(|Ixxwby-rud^3f$T} zSAssK-EYg`2Ai&Zw>v9ru)a3^V%7*7Y(8!Nbv$?9|K+XiyLMh`E;He3x;nGYAmdm)*f?%^?0HQnYzT#09<96kY<_Lt^gf#Rm%|=M z>ogxu&&ILmN!;8Y?AASw*6mKa&#eQ!&(-a5v`+K>IBwqbJ{!mC!N#%uH_*IUJ=i#I zy@k@phETX=vFrvt*IEwL_Sb5;k-7C}?Q8e>r>QmVYq^e0*Ppeo-RGaKUMKW?spl3w z7q*qhvK*-Gul15~{8{_jeg0``P5WBzBh&R~?Q8e>r>ob*<}#fYk~>NcCEK~n_9s2( zxVr66TBmvcZ)#?=5H?uW*Gu*~%3qt9-1X+~ok+GAsK2Z0b6l%4aouzVxd-Cb_4C(kdj7|-wtqhQ`R22~uRYAy9`0+~{r-8h zKK|MQ-*`)X?Pb39a%t;+YV+?9?+>o+iOW% zr)l8^tJymB*Rltw**f+2v*(unTCM=K+yQEN0@Uoak$-wN&vs}9ss~ygs|8vns|Q*x zs|Q*(s|Q*>s|Q*}s|Q+6s|Q+Es|Q+MoA+SL>t6?fmNU>Y23o#A%NA(40xeUZF;o7Dp?pVb2`qtyc~r_}>3tJMQ7Z=h{weKWxg)<@g6 zT-vtf(Y7szwr&2kZS$>dn_q3)d}`a~$F_;T=ATb%`{&ha{`s|kN$pD0@SJos8tJ4s~(_MBS5WYfLg5pwb}t{bpq7t2B_Kl-~MHLdOmpYxtV{x z{I{JC{Le%5J%<78H*QGp0=G{(XO_omAlbH!8|=Ll-FE(VuxZq1JY6o!4c$b!Eg)H6 zvs&GMpWbiRSmOqpH=RHKeXgH9zT;~B!)mT=_YZUpZvW0>**I>ntn*TUw*P)zmXvNp zZZ$WMda(6t({_VSQ}-F&$Mn54eLu~%r_uLWEC*_9EjfL@kT(PEIfNVRyy}LgG82c} zyDIw&dutbElA;M{+00T_vAs=%&v9lPCFxqYko>CT>5>;o-YogN zb^hNx$)DQ`hp^!*$qDSmL)ef^avI55B#)6iS#mFX z!4WnLl)Osv4$0q3K4ULT!iEcyZ%WSB*Y)84o|L>@a*uvqpM#R`NbWws>vLOjuEAct zs^r%sZ<8Euh}S=gJ=n6mpo{+*Jq66*Z3CYfA1Td5+{3`FO9- z0m*Mn@apeMUN3plM6XZTNnYL~`Hik_$+#C;9Sh zZ`_`5dU>$qwis)@}1p!b}<0 z+OwoRN3uQMYtk-AUpFj}`Xb4TB`=lyw&dlKS4w_I@@mOzC9jvfQSxTVTP1IkyhHLX z$-9H(JyPE%`ACp_TY6V}{VT~=CI28Xu_QN@+)Q$F$t@(el-x>kYsqaSx0T#Za(l@g zBzKhDNpfe&T_ksv+)Z+K$vq_Zl-xve9LaGd@0Gky@*&9|Nj@R@Q^}_#Czkw$>qxFE`4!0{ zB#)9jN%CaLQzXABdA?+Qzf0e3l8un&fXJe=GSr$!Tn{Snv9ttkvus z?yuQ7+h4QuwZCTPSby#L05v#xNMP_y-7 z=V5bCv@)omRHs@r-D zRJU~*sBZJq3I6qI^KaK9yWg&3%Mq-PO*_yu?0n%L$Icu6nw>xVH9L>^Yf}RB>sa-# z{hkTXuYG`i4FmME*Jl35wf7$VHGBWhU$gfI{WW{9(qFUp8~inUU)x`^_d@-(bZ!1x zixl873ibVKzo>k`?4Mp=-e>gJ=JQ^Szt%9o^ww4TYrl&D*6Dx%kK3w#fN^{MwHB`O zUu$~<%wLTF+pS}Od5#%idiEZje|hb_I)ANl&A-+LH2Q1J-v9QGJE8xdYgIU(vs(W2 z9ZoyPXG2!_$G5r(RL>ryo+C&-XOMcXAobiq>Uo0H^9HEfJlZABKmS$_w7gaiw7ga~ zfu<8^d981t<$Zd64GDRAeGLhD($`maUEdg!9T9d-_3yjx8f@2Gd;R6V-|A1VuY6zf z>Gd__RDf$*ZeBzA_ZjtxqdA|q z`p31`i~gFuZuHme^`pP`6`!B`Yc~TdNB#h{=>eXL{41WHBo45R?0OaGI&Aeo*I}y% zx(-`C&~@0Zk%6wmR?iCmI<$JA>#)`BF#`3s`vTPiU5Bl2pyjoCpyjoCpyjoCpyjp4 z^e?Y1r+%NnmQUNZT-vs2Yul!&ZJU2>+k9)==2zP`pW3$hv3c~@{PSsT|Gbu=CSQlR z|9_Qq3s9>?%?%X@HDKS@b%Q@Yn49ZGUZPfSP^w;O}Sehx=>xS%kl4 z@8S4s_WrrQX77djYxY@%zh>_P`D^ysg}-K>VfbtIS%$x6pK17O_SuHNW}k8RYxenu zzh>`8`D+IP)D8xy+51=iaSsQmeH5T(?|1pfwfDXJHT(R;U$ghY{5AXB#9uobpmr`m z?R4k-m~-9?7cgG&ECWF*X+GKf9;0=HT&Gh z-_Jh(@z?BgAgx{E`Zv}N;IP4t+he!~eP7e+HqW;0=EDtEzlqOa*$tL$8a9p_?D)5R z&icRWt6NP!&$IKwGFaUWc3!aK%#LT9_I|j*?z7tRuv=HR^M*}BKU=h#^|gJ(4K|+5 zpKU8QSk1QQ2heq4b=xl1*QVo!6=8LqXRBG;_FFfsB3s=J);gNTvuj)GXuP0kBJEywAvVYwlp=QgrTl%?UHPC#y`?UX2 z#+vLQTV`8t zOW>cLP0O~28|<24^JUjho0j&s^O1gUz}g3)pTpSrHjb_z>t~Ol+tO+_{Y`Mg>9Cr9 zAHnuR+dekFF4yO@?Qyhb*EU@i8`s*po~)05xm-=hwR4L-uB{6_zFb|`i`8r$+rH!m zt63i#-wjr?$FOa2B`mx9?0V}SOXtJtb{x6EwcS3a^I_*6JqNnBts|XguyfQU`q{eC z_2p{%ICkB3*C;n%HjnPu(!N%=easDZpY7-NIBu|-wQXP3aqW20bBeWXIc#~>gk?94 zA85PBcQr~Oo4dHygV#^mw&&{Fw$~Nfw$~Nfw$~Nfw$~Nfw$~Nfw$~Nfw$~NfcCRb^ zHLLq;{&}@N{`s|tq}Uk7AH`p{gx(2tdy(X;Y+I*~V ze_32tS6>^TpDmvo$`W)QyECB}HltxfM!r+8&mHYI5%yk;8=`UlR^IDzgMHoUoL zLOwUK&y?I?%Vq1s4V?(ue(xEtyL z#HJajZp%;#Zm?z0wk?Be+cLOp%iyw&YyI47aO>x?^>f+!*|KWe=1bf5T3)`^et*9G zHCujvt#p8zy*%*uv-ymjoI4mSyLDvq?6S?X%hu24*^Oga@3Vc<-Dg?vvt@JlS=Ren z(Y9&1!PdLBZQfkl=FMfBH<+YuomJ*S7uNW!wKf_n8{MRhGl-~dUn{8g* zVCQYyuiem=VC&QkBoTg^Y;R`buh)%^2sHUBbL&A%KrZ~o=5n)TJ?wVHo9 ztnFV8tNEA1YX0T0ntwU0=3fr0`Ip0L{^hXw@Gpne{L5j-mmTMNe3W~ZLfBA2a_1$k z`q#gQ=I*KL0=KEcd5!9lQ_FO6FZcSi5AWq_QXePvGGo0yx9mw<*r311+b4aF$w?(! zCD)s6S>4O}8!F9J>w10c`*Ch~wWdp)N2Jfswj#rZw!K^m2^lfi%Y{dJdCRL_zCFsz z6DE2&y!D8;q@58^blWf>Vdy{x}cJSo3@Jy*x;^DM88-OxmSdpu}}t8z_}(bM!$CqR3S6R0^WdB?y z+bF6$)*(3&_u)I6Zb%`^_<>Bnk!+W6y?(edGE)Vmj zIj^~wTgvjTlKF2d+i1V!p>4czugE^xS*Cwn=JUuTum4Z7ZxpNH)%EXXd@JXT4`sdm zF8g6RIo4KA^`_HKepftQ&Tk({*1tcpr5Z zx0LM?Th?JESwBl<`j_Qg8bQ|21zA6RWuH7E$JCdS56OOYLFTitod37Twn!rLd{p+6 z88Q!dM|kUbp-i*(Y_C37&U@2k84t=jzben)^lzYq_V=bUS&s3Y-MsotIiJ)Sy`;Y!Dl8Ul^^zVv9iyth3TTot&=@^zz0nAm^4wl3PliAi0*DueQsw>)(-yHPV~@ZMn|ok#ogw zl7Eon^q%B2Grj4oo8#rPGVVrMZ>!{-d8LZizoM*%{jyCjNv)*qg(!{H`k#og($v;Z2Ea&74k|)Xb9U7*OF|%mvX~6$>Sv#k>}MzCGV8uTAzDF8sw(P`&N>> z$@%TMAzq*FhI+a0u)w*tT+3p}>&I!5!^wFgujCW*`X%BxZ~7bL7}tINqU@`G$Z?ck z_W7Poyy<*jEd0Yp0-66*a_(9$`*s`2L*!T}sddSFW&gh`S@-{%axBc0d_l6Ff7Z&e zP@?X`*$=59xmi80o<`1@$K_miT5?;t&aRYvs);w9N^!+c9`$^y?;#|TYgsMH`ghs%^^rd3IU~pKLCJb7C+*@byB^m|dVBSpa*V$t zS+50M<=BtV(;GLxgB<()<@!=zp4aI)p-_-x zznWare~{ewRc~2}$}v7tvL5?8E`RrPTd(W=wmcoa-?sk#K33Cy_S8rYgZrPe zXuHRAe9i(lSi7aK-Co**$o~Ev<^I!>ZJIxZwXLrAU66kI|0C-EgZQ!ZIV1UV$zMpm zD*2k^^lss}{aWgOC+oZg%YWzpbkt?}MCRe7b*syw>p|yJAIsLEZa>*H|Erim#tN6kYlN4a&nt(>Vf?56OS-W1i6`U6 zmz;?{j&%QDMgMf-gnE;SExEB*4$+)O>S-nSmn@f+kT|j&aV5u)98>ZOk|RrwC^?ek zD3YT}jwU&}p-}IRzms*H1k1tp5!(iC(EUR9m%^?Cx5*{z_Ig~~BBe|} zmE?4i^<0)w>IEbhl3Y--ynG9JN$M|4E+W~YE^`XGPv1dKnY*i=3*74YMfZC>*T~DTKb>2f$aLgom44*#r|%TW%cuLl(R8ERb1^r_ z%cT3?337FP{?^VFxot1GgXE5qJ4x;=xr^kklI3O6pT29R&j))-e|f1BA}??L^xYwO z%i~YqZ|yJB86bI}D;hX^4pS^OI|5ie-~oE&2qz9$@cpyH`s4R z+@Qas*(&vIlJz$9$>k+ikX%u6CCQZ~SCL#*ay7}-CD)K#Q*tfIwI$b)Tvu{E$@L{S zklavmBgu^=H<8>_ax=-zB`@%Hw~&RBS4e(G@?yzvNnRG}HNwl2O8x(-C&?2^{lBVT zm-;*z_n>5N>VHm0y|-6Fj!FJXy3Le&b;%1Qw_5Iv@RrmUNv3sP4gA}zfub@e#Uwv{f8oK(kdJml^!!!Z{9_)SgM}0s2 zNxz5wsOKDgzxzpS_b#$8>FddV<@?%C&n-{SE&mSt+oOKw7wnw#xIWu^66c~vJ^!?n z^Mjs?{@Xr#3wDlr)P1I`tj|Z?fAsU|7M=d2&hFrlln7^XLDD@2@<)_C476kNUlnr~BK3_4e=Z{?e0p4fyZ&J*TJp z+tdB+-{JM()9dMj{qX7MH4oOqXG)$W`3=dlCC`!krsTPj=SiL~`Ih8`k{3ySOY&mLOC&FqtbZHx z`4rx|S}lFnNM0*>o#geBH%Q(nd6VSLlDA0SD%t)ykf_d_(d{$xrv&2V4H>e*0iOJl$^}tcR!P+Xw66 z>G}4-dU$%ieXt&$K2LbC9-cl=c(5LxKEHji9-cnGeXt(>-JaimF3)wpkSsrE`sc1%KQ^Y0;4NAj$r&VPmRv@1Hpv+!=ayVfa&gHe zB$t$2N^*9|c_kN>oKtc!$yp`mkep6(ddZn2XOWysavsU~BzKa$Me=;foh1ib&!wf0 zuFwCgb@-_3P1lvKAKjONEtl>Oe`g;Ew%v7}b(;p8H{D+UWE=gRef~-8=eoRqXW!QK z@OSoS-G?9bT%r5(KRIvczVRg1;lJ`+5$xE1T<6&Tq;+N2ef_&T)|Q{u-hZyAZF?S~ zZF?@FZF@eVZTtH>+P1&JqwUB|xoe zfLgTxHTw>pe|j|n)a-Bk`1{%4`tjH7J1qX1{p}xr%|7$>*X+A{TC=}@X7i_i`^@%( zK-)afvRS`C`-07*|FNEaX39!oc+~G8*|A~!fg5Z;*0yaw{SLFWjT>y=H9C&XgKaxE z*fjlZou=Jy+usd#pMLkbKl@R6xWWGBPO!hbQ@)w&#k!C`NOeeNdaq1tto#5dp?s0y z;%IL=X{EkM`iHji`XrYh$LRlmP*E0ato*UOadv=*4e!dA=#xnQV)BQZlF63~mW_2| zhJ;*~Kg^Za4*akoj(llrs;rZm&0P;}WmzIgE;rHZ^OgMZu8h*>rmWBBWtv^adE-`> z#~s?&tM`*1r$&@{9@5F{6G?tR5K(e=Sr6T0U2U)vT-Z=?l1m}?{{~|Q#@BO@otJER z{WaSc{I${SL;l+4E`P1rvikdd+Uu{iFPLY4zuIhnt=TzpI{S*AORe?;>s)KH8~;n+ z?fx(A>>koj@=VG4H~5c8{Y`s96E@_F;61MX zo&FkfytRo?NPF6@=|IYfkN#5iBBz>Nd{rjBE zPkyPFlB~x=Z>h)4>DEq2i2jZCVE=c4{_XbX9bby`;z0ygRBbPkbEhQ zw=I5@`Y)0r$n{mP!6~HPO!Crr-ZZaC{#N=Vk{^gimApZ+ z{(p><<&Td2E`745^ro*l+%RwcQ|S7ayjSLDQ#NlppEdPzGwI)6@Kx$l@*6MXBGG*CAykznI^fAN_xRB1-*|Jnm$9 z?lD!e{zulcW%?_Jc+)>!+{?lK-;(6=Jid|S>XJW@|No(^%;#O{GeUB;Hs0gv|6kNG zhqtcAOP^$tNoV1`f4>_csxs&%;og`;3=+*WAvDqtqjurBzc}J$%NnQ^(D(v+cFL{sj$t%w@ z_5a36B=u=BKjox8P;zg{D@ca?a9TpEzZ_+^@Sg{V|gNUwhvJ9?4PNIa;kF0}c}8%*j~ZQG&q61ibvWgYA*A z4Hv^7ntX6%$Usb)X*FD|SGoz6-+U=U}b-jA^-m6!ytE;=KYpVX+ zrJnv-MgO{@Yy6de>iMrxxZWe^1B!m1!q2XF*^xi<_)A{x?YvacKc?t%Ec=D%oef&dch(*Zz~De_P>tAHSd7;LEz;GLP#$*{*t* zr(dn`cfa13`?)uyHkyV=PLfH+9T-u@Amw=6wgl-{h-22 z@A3Q}-0I7Hi=y|a``}(h-*A(cy;9*XQvB!rq1X9Jg}+|${7})~6?mPOzQxOaQqg~< z@YAZE|3_*u+-Lk-FS||gd|uJ7Q}nIs9(}*E?MsTM_r1QXCtcz3OBMYqZ}9ZbsYP|a zqVSKavTjp&&-=XWMumU+7EgbhD)*y`=feuWUEyE;eJ^{brYrnzg+HqBi{G{(68_(c ze$M+Na_KD!|67GWL*aKTd_m!QU%`K)==Ul7pA>#T;V)4O5WnqDyq)(e`g5-IcK+gK zj~`L^(>~zo_x(q&XF}0``&v(bqtf|ih2N=ozJHzPf1BbN`=BpN?^pj;MgR8-|E~)F zbA>;7yVvt$Mc4bbou_!-rtl9c{6S^gBZ}Vh=U)Hgif5amzeVA1eTT2_gNpvJ!e9If zuk%-me%FgU{qL^#dj8!F9{=x3cCW&BeaOq|edP4saa%s(c|QMcuk#CveviV>{!7pQ zPK93(c-f7od;B|!=bH*Ytnh~w{}2Ak>(_hPeg6Yq=g$@Xi5;H*7YcvMM?L*{+r8}B zihkk8JkNs)pHcX`75@0gJ^z`X@c6Gj?Dc>6lb-&`-}U&{6#bol>G>a3JbP~S{9k&b z$Di@n9=}qJ8$bPHUeDXF@;vWQ_?(h`;wL=+mlV&fimvzc)A+M@di`Jhq}TIfMStO| zJkPT~;xrj!;AX`;hJ1L%>vi;S`9C>!q&m^7H-zi=4oTQ(R z_)MgqHF&R13(efW)8y8ImVAy_3mZCBCZBW?2f9~%+y6q3Pd}a6U=eq1)ef$Hv*~>8;;-QEA9eYd(!v^`# z#if$%gTvq;WG^_&=2IjzWo`JpRr%WcooW*)E_TXI1*5X*XPBl`)-zR-By-bHL}NwD zL0EJxE&kJKP4kaVe1Hlo=2)5?Jy6<*^Z`-h;tf^Yo3z_YrEj6(R0m#AyPPax$a_I^ z0Y*1{)3O!(!!v(W(Y8N*>~!8Kn|h8la_5&yh<_((-{*JR#D5j~BDBl#LVroYQ;&?=Zn`*PBK$rO0A(rkE)|BQG6)Soa%^%Nwvs8|* zkW^?o;WD3uxQT$16*R%tkOh{fHH4T9+gDl}Je5=D6KyQV38W{$H|08=dBm;2$WvP5 zZ$u z222q^pGQ1`cijm9eL2M#kPIVFv-#wi0NyAVJvls$B9wzkmjM*gMep8&qx zFhzZUk->UR6DhQME>9Ub13m=kFpu*!1wHdl*9e}drWDTv((@`UvQzHKi|JPUQ}E{a zi~erV-bS8|a|md}qw}U(;MH>Tkjo0>8&OXXF}V!eClr5{-h#fqJrH|_z&jw{|12PC zBkaKoM0w2nWZ0my-yD4~#AA_!X+fg6mk&I30oH}V217RQfZ_C4`8>A0&%9$G?{PCe zX5}i3riV@9s5v%sxFop5u$dzhfs?XiB+PR&A2eMCqDsZ$K`A_Fu!?)nuEhcfgw$FSvBHr?VOXUNK`C&-NR@HKGo`_U!?&0?IHJk`94?!d_|Hu2#$n78e)!3E7q#m#Wyd)K988;Jw>~{u0N}zZU9uFq?y;^Q%ou zyL2&Z&{k^P$;Zx0q_ekF4SsnolGC4prbvf#$fEA%MMmo;F7$A_!>az>>`A3wpC8T& z>+HnCA>N^Em|mD=2FA}%I7)t00SmZ!?Ez*H0Rlt>s$u3n#0`QpY+86`*LVGn_M^ztV#rhJIiY2?FIbRq+ zIu@fePQX5V;9z5)HjMf2@ZuA&2ZE=^VV^5|Dcs{Q6CElqJrYhz3o6a&JlAgeHnV?Q z@4=Le_H?kVjHOPd}u{69Z9c~%Yl8%qCY?EO*>*1YUHTBy+xCUORtlLPh6}TAJX}To;$yhue2e)}ioF zIpPn)BbGXCN#GW5v>wkvWU2R3cveC%=kf69oFOYpVB421Nn}<(mYUX;jB7#JR;kNR z5T==ifF<)h+i-ztb&EeSDRVx}y`*e&kA}D{^6Cm~y0s%q32gU+J8$FAn&wRIiqw$Z zg$bruw7o;75?@3A3m(tqG(}^THXS)P(`fLOEw8<$5;uL9y(W09kk%hwYIIag_b~lY zkm(irKM>_%_)?@vmxdnMR}-FV4 zm#apaY$7eXjlD*M7SvED=iQ!-;6?A@df>g`VWz`Znj^7Iwa|=k*r)NyQEF!h?hdEj zGpv!f{WbgF_s*p?7V2WSt1o;%t+uH!{IY4kMrG_^^D6;}G+FqAr#OC>56i6>(hIn+ z^im)#SpEfMTMWlxg~y(&QsvG04}ulobeVV44 zk|qQ9W>I^K!8ZpI=v2^(;WY;Du9gcCZSAUX^1SZ_(%K`uBL3l=(Xba? z-kg8>wWdn>ZN_o`$CRLO1bHv8rFHsv=-OD38425o_LXcOkinEr&TTd^u|D*FJ>li8 z)G`Re#~+G%`FcsCzg(>}uN4w<-utB&sn^>HN^or$;Pw^Xx^(o3H~;k3-M1b+`lj%B z*#8)y6Z-RYs1|P=6+ciuPH8)3pDWYFE9vC+srs$7eJ%Nt=Gk6u-ucc><;X9^aWrV- zI7_iUT^EEQS#!7GXEyecY*-xbALNAiBAntL$Zj{yIVSAI>w1Zh3`M(3dZb8u4`jC+ z&y#`SwaY*K*7J0t-HoU8jXP6ly7ZZxe9c?nYv6&LcGIdwjoM(Eu7LM%dzY@J<>%2_ zEiX-nW z0%ib%m=`h*7{nZeallUCGfZP{$T(mpXfuF8%p+pnxQmGomO*10d^6-{8uXJH1~ETo z26Y}sJdP8!B6UrfgVRRQLo}IXEW&nf3(8Id8`ivvZFv#l^UpdFkmrmp(o?%CUuyYtV(t|wL zz*(=s-vlpU@Emg3AoVR`>!aoPVH|#KD3AP^JRa3g4sOcPr+9U_r z%|iBNWmx*8`{*2T^@n@JZXKKaSewR|V00Dy@zYNvuaSz?P>X2rZbfE2St6WxjBQoT z5A-vsM9#GoNkWGX=6sp?fnjofAZLm|ebWKKKnEk*Q7U@!mv zmIMv6n*mnk`2pdOIdPfu>)5cv@D_742RtA2P}J7coHLVj2k?|Fvw3zf_Vv$!V%rpt z)6DPZBn*U#nP0{Eb%xBWpc-0bm|w?Qvhep?N_0J;-Pn4#`E}dz++^D>n8}L>K9aLo zK?A6O!s2^3m6k+XJf;W$vwNdkrLCnsf`~kmh1fiEh<>Ciw1?N44SmCiqxp5SFp6}= zkCd5D8=B@Gh$Uuzom`Qa*XHKe9iP1Dy>t7@WnqhAYIWfdO_K!5^OcCWCBH(p3dzD> z20texy0n}9Ytjq6$|3`mE-i2CXQjsX&@@&?CS zm|vGQyGovGd#fz5#SrsrOg4ZQGeYbEO*^1vTo>Rx9c0xrU%6<@I=^`uoIJ0+K>a@; zFuh-;68QtzakTU~|MX}zOZjcak^Xvl)4Wf``E^HHX=_1K^Xt%g?#XKjl8-Zi#^lb& z`-Qp^asK?e*rl3kf^Rptt5(Y8fR>lTKD5`~5HqTLzF_>@5`uktlb+hr5=R!8Ahnx} z?rkXwDk-#Ok)o&bWogM5=FI_I_CifPJNd-?QXEI@t=8*pTHY}3!*Mz(5+n(&q`f8S zn9)_?B^B2y*La+oOraGc#Gi;24}2o2w385jLN*rE-Ooiniu+5)mcsP9@OHhi}wAFp*1*gT9M9< zD((=SSC7YK>+^Fd#;@Xaa0kiq`8gP$fQxw}8Q)6Rdc3}vH{x+MQ}`=H&W%78T$z)DCUjK(p5T4s*RREl{c(j+jZR?Hg_XT;-amt4@~ zXk^+_EBNLVhQ+)Q?V>hu^jBpiiti04_q-A5pCwL?lFu1u_*~P{oP%Pcz~Z#BMa(F< z(DLG9-bnO{0aul+yNY=u#k`UH`3%Lpk?7KC649MeF>geBBaYiojrlFGNw=r6b>+HD z#k`SdT_*qiHudy@=-W&&Z$x|%J)H|-h2mn~NOBfkeJeGtukeF@{3I0fMy#)jc_Z<5 z(PFB?4Y%ti1F2@nc_Yl%>y2Q!hnChQE7s{z4n0}WoOaV{H5;|TG+lw82UtzZ&!e?k zUQ!>gGexT^+@PJfZjw5gPP>-U_zGHgdLY?u66xlQ&VQ_Js0Z^pxl8gf!1YPSM)L%d zym@I^kBVzjajmJHfEmCbZYJY^oq!p@pj(4#C*m2vAolYb2kZpQ00y}xm0F*Q>r@e< zHL27ZRCzWf^SNHtdJ0@mi|b2q%_gqZv|2%Y-gFGxxJDG$UXnG7xF!+TA!0CVV{uI) z0M{=X2kZpQa4jRu(HwX4G-nWVtuYB)b1WISzL*5AGqzfRb2b^c)>pTNolV#_!!^L% z`d_RE*Z$&p=K5a}WDPJ$b1g6iSr1Is0yA=WKpe!}&2fMP&gCq^sq2BJ`wiE(7yR}b zJQ@5TbGMVv9pUlW<91V-YTFma#n=gB?u&eivSHRl!=rb^G>*nnOe|(ULoFKSNBPaL zH2S+HF^JKZXp`uk(#3}84g+&Nn@ZclX`8hFC)4@BkL^2%V{d8L{4e7*CbtKFOJi8< zFZJ0HnHne!Zz^r}Z{$dmym&2-2rfNp5vCxfm`<3@!_3ShfyXl&%@H(<%GmQ zjlSu%Ps8TwI=yOncsw2R!s={;a8<6iDu&T&rPAeIghIN9We)lm@k3Uu6|{}>PmHm! zs5|diPDY|u*UwcAC)bB@|FP!!Ty@e_^=xp=U6m12fc3eeLq#7igWI6{|MX@WR*rVY>%caW$o(lBf&3xAB;B(6AC`eRLfwNBLIRn%2e zs~j$h%qi8x0?Tu{db-h49;bMHSj0`qhHcjiref?->cXIEGNsu_O0x6gDlR1K<0;)w zy}*kiQZ>Tzk!t2eVy?chH=!*1{N$I_4u>?54$TIsC2f#q8HStjw4d!=F9*Lgeflzn zwC+gsS6+D=m^n`sUfA}G$x+IXbb4GdQY*PWK~|M;0rJ@eFpLOLMMU70=`w*OYK2-X zYkdQ1ObbDM51z&*v{8o~uIu-OBR8(Pu2>ka>#KX-kSrDRZH>WShmIl*S@AZ5=C^oL zoUE~66DjL6qAcEt41md5Qv|)>y+nfI&V=7% zon`7UfW$@sKIcUHmyG})!8JI7j1hFfBe+T@08@Yw+zBTDQ-FEE2s)a1(xG4i_!QHC z5h$JjZ3-|?<ovzzDwSO@TgwZ#xqNrV-BrMsTy6089};pGQ1`?-LUM)^mz6AQ?vR ztwQ`Tp}t!r`It6(a(L3@k~CAE(=$(7TWEJ9cw-d)3HL1}slS!<6!if{2J59JQfT#D zo-%^>&xyo4=seEX6!gqHT_gCOGo^SYke*j*(Ke;N`$&C@*q{5heL;UWXm2Cfw`Ky+ zh)3s5wZN<8<{RvfL%tEUzfDXo!}bZqFKwuZPbLo}o*_04Z{%Q&g~U@wlgT|8awv~^ z9}OFH_M4-zJqD)hwDqQr`|?RR*kG8=TWvV~RX$y9cTb>U@VJa*fm$J-Ti4z#C`d|9 z$lqhYyc_Z<&uhXzxJ|$t+ChU<@c$x)1ykLJuZ17e1M{nR)=$sXR9gm)`oH2^50v-y zhvfrhQ%-c)P=$3d+Rcj>Tsn4lcx3K!XLshPK%@jn%^HyZ3enL1^7Ut!e80y4ZAk1f zP)1RBrZfcxhi|b3%aJx!-CyAy?Qs{oJqEPZrP8bjN(^84wp|k{j96S;9HOizMOekQ zCG%or-;Nv?M)X_VW57!4Z+CHnrTo}gi7e?Y$sPlrLQ9X%hI7cG?&d`%p{s~^{|IqQM+J^{w}E)2tq^&SI9RUc!;u!gin+_4f% zne%1#7&wvKV}My)ylm@mc!Vv57bbIHI)-MJc)|_FJO(B@7AB2yIUixT zmv%{rh1ki3)vqkyVA&st!x;kEeYI~x1Hl0gqC_Qg=ZxMa~=qs zcizUKHO-ma6{(?oJ7b+e>(ARGL9DZyBj*aaE(TxO^4eP}anpC%Yl6oLY5k%7d|BcB zDC8aqyjB99hd`8Kj|8O6HRSKTWDmod@VGq^OcL}mfeH2!M36?H4g$|%q{_%74~G40 z&x3TiYNW|2(xThgYeXu@t)Wit2G+9?y#BxLd&9#_hp#k8Vw-BA8R4)~7A8ljox$vp zV0#fG@AgPQ|NGwLXG6nJT?}{ih3}`;HWh|nHtpB2#11yULLbv);SZkT_+5Uq4*}8( zxUck5AT3z_1!P+c$6`ZP^7B~1qI z&5EpU9`TI>nqp$w3$YB|JvtS%VmnjFUoowhA=0uhlH&w;9L%A5((D5jymB(;8cM?8^VTUtVWs147W$9tqV-*tAxh1Uc{hIh}0- zyPC<$-y@+JEz99iEk4%vY=W9o$nB8ij*uo>Nh2%U5f2Mo%aI<65l?%&D=3(-x$u#4no4-{YVes zTiF<32Vg&70N;(q06PHt0R#BfGzRF#u-~=09bIZ4eYU>|+Xis4j{$b@qE}(RN)O=r z8&KbY$1<)*B_2b^lx!@cNE9A`2cr9xLGoRaKe5h?-QatT1iS<@;9*?92jT5?a`F;p z@WzJ1Ef)!RuXXQB$NGnnI)2E$x=YK49t6C$9fz3f3J%~U&P>^MO#3G08Cn*fS z#OA>kv&UbS)do9wzuR@Mr7wlEb9TIORI?xB8&_@5?UhZX#p(DzpEO&0_{9+8rxDsJ z_HwFt|1b!p($~!~I&34BcoxCnigQE6rjV-3n+u?H(q;g2{wN#5VLOO&4FAJIdZ}W$ zYECVzv4q$WpBi%hGlNpn1icoS0j($=wbzDDm#Rl;SRyIrmkX3d;4J%Zx;?7Z_|lo;I@bs6u9{_T0;69UJW|93c2QbR@J7%KG#o}_F6^j z-DZSNH)(cuJr64_hr>~o612)bPx=q`SN~KlOI{+LWBH?=81;Leq%picJ`EzsT zayeoQ?44t+GL{=lEXGoJi)}ve4v=#fXkJoj*Ee^t z@7Mx6q+}-u5}y#-3C2G8`cxj~e`A-ir=hc8)Wt1#{Pi`k_GKOG`I8jXr{m)_d}CugV>YDo-90VveV(CP-|4M&ojr1Vfa6#)R@GVUozuo~C5Egq1CBXK;Mfz# zcQ~e#HjX2F-4t^SgJB?}ZQ9(mJ&rYD8^>ODxVmU-_ovV#zt!q_I#Cic?Xxg`*;;uSf9MFV7z5ck9Wv;i;REl@8aGH zyQ8JJ@fJw1l`K`PXyfsgfznd_k{D8E6gL|008^EWH5A&~cn75rR$;v3?PI2-_nBiG z2Yd11ZPK_5x>!~->j29?!F3u)0&s~jiV#CzRjuk}xaYGv#WtunDN@w2p>xaMXm^ZZJYYjrD-q+JA zBm<~vc@1dBuFM0+-0bHGo_XFS*$?w>JB0V-A!H8Wzpq31zvvMD(>jE4;352}Vh9XF zn*hUr-vW#Po&}*52t((kD;+vNo!%;S=(l0}^8lLxF95s<@Djjw%$H;9CAyfU5wn&Ng%CHORNm&Dl&vPT_$y^1#sV zH&^h`YtfEhXL*Olo0Ca$4ZR^r>1=xFn$GGgv=ts$DGv<2X{9t5dJ7K}9#|_63~gJh zl8bWJy9eelcC#9=-u=|9{R`rp<}r=v@4%w`w!4yvhr4w&axQVtzS~kYD+%~FLNvSzCd8m zbW8Cx$xDmNkmq-bO@QZt;ROK7zYsBj_)A>;V#HT}mJ}bz^D0MszT-O&=_?%V0??id zVEX8@OQp*kof=4Id;;|6IGpM870z@rEP!9avmEc`&?~s)o$qLOf#+NY-^kGY%nZ8d z((#$l`&3mAomb$62iDpH81h_K0heB^9~a|=E@?h<2?%Yy#Xmx&9#TO2=a615Jkq3& z8u;#Uz_RZJz864T!uOEA55Nfnq#;_dmm! zv&iXo51i`nk461gX8bAH?!OI3CuvL<{$CmYH{)0Q z1xz-oO^wsem!@UOYo11Umb}m#$udv)vf_5-^W=0s%Vx-mZEpH8%Wk2kOL^*5Kna|p zuVDgdLe;@_hzU12KxBfHlNAeJR-7d#^ej1{3tv_&d|9!`G5-l!Oeyq-4%YWYt;mZ? zUB1v-iPPj;|A($Eu^szcv9;j!9+=O#mwArW!4AWYu6t6kq_?JD?AEf2KY4#i9D z%CC1hsehw5Sx53FX<2ElS9sEB%)=5}Q4rov#gXD1WVz*O^+{biPjKOpw8TP}Sm>(} zuSJ=wQLk<;V;i>NSZ(_@{}Haa0^0V$YM-y7yzcYB96HO@fbOe#_ggfBbJNYx{T?hV zTuTphTiabtU;Gf)ZnHqUn&nO7%ahmn#EVdPfF7u!jh-w}oYB@juo};Ojml`<;zHtN z@<5}uu^M^Z;>(B84|fA}i$7M&jsX)!+1KZbk7$eu6jzeYt3PDKwi zyH`=Od!aP)A@@mH$x!UUvXb5{ba&DN&F;z4$ubLFg$D``tgHvPhf)#Nrw4{%_G-ZT z^v=m`^-7+1dajXv=C5_YRAAY_kV0~?uBoyYv-4eL2OJ0!kuu^fd{BODblyoWcanJ-Ooqevr7}CpuVaTr~ zp!jtidr-8xsuPlH#M{>FLk~RdI+Mo z*;wRvK$gI8&$@>|1e-NFQ&4%GW03@A^GhWq_Y+aWgfJMuJ zPF6_n8J*Nw=qfz079Myy3SA9Yi@J5P%x)YvZN;We`eH3~NuT?K2g2~_*lhSL#A1i! zCF6Ol8{G@OM-czmtsyNul-0QCO2+p5u-jMx_BK*SJsrZ{H z^G?850Mh`bS=M8S9{~Ieu&dWU`?Omw_w~3Ny8bc6M^ZN(OF5cGT<8)f^H(yx7G*A{ zKEn`R4OotiC#&pvs8~0{$?Er_UY+qkx3%5Q)T+=@c%V5CoC~&Qff}3TP2Zmu3hP%7>6S^i%-Xh2{FNSQ|ENU0AN)+Nrf#QFK#5uj$&9Kn7ECJy=->Rtf$sAF*VNSOWoB78^lI>}@S2!eWf%0q19={3c0Eiby@W9G?VCb5a)n8~YJWzOG^*u23rqwUADDmX*z-sId z#WGF~Z{^owfrDM^B&_7QrwbbCzXADsl%LQd^AqPE9Ay(2kAkNp3L5 zpPi^nCv>dUdC0Q;2TPr8hwavn_A#_8pOmxO(vQN++zQr*B`|2ZrFfd;rNw2)^ZUXk z!1KWH0s!S-h?qe9B`$t3;;TPPiVx&@m7_i1@tueC6^?cRXwL;Oee~I-(q)cL4Wu(Z z0s3UN_)O@1s;Y<1EAYYt zYwdycw#^4Fz1Z`F_)qAP<};Uo(AHc0BUI`k1+;$->E*&BP3owD?;Zy%`(EIC0mLPI z59#{=T-Sp%L@O3^-I6;1{I>rQSI#aM>#~zL(A@@=+A2p z-C}2_?P$eEx@`QFzbnq0wyE8^tY^8rcH8aw{6AthLbv;^u&?ky;eo~;khSHKu}FPv zAq+nbxDD`W2e%`h0z3q`_b-IAhzk!C9$1ea*!ZEi)sH?oo^B^~VM1p+ItrQY@W845 z{!r9^W!__xt^3<>bdtt&;s2HKe=~l?Uw}%DAnklCP+D1vG8TZS#mO9>rc|2GxRBO1+DNvo(GcG)Otzh z)vMY3)vj-|b`^Z5mIvBxhvKDn<=4BM)W1=jtRs1ow5+t&D?Dj5=51CP!rQ4>scwia zw>+&psY~YxED04OH)y-vWTTFjmT}hia{}Haa0^0V$N}iiS_xkX_ z96HO@fc4>x?y_p06kDc u8$DT|IHRq5U^Slm8kNzy#f8MlR-+#g`AGAMOU|7Jsak9seH)%}3Y( literal 0 HcmV?d00001 diff --git a/images/locale/C.utf8/LC_IDENTIFICATION b/images/locale/C.utf8/LC_IDENTIFICATION new file mode 100644 index 0000000000000000000000000000000000000000..fa899b0a7e1b07ebb48af320a62ac6c2ccc54db0 GIT binary patch literal 258 zcmb0XWL6MhU|{e7;y@q{0^(pG4gumkAkGKk0w68~;vyg}2I3MRE(PLBAZ`KTKS1oP zkdvRBn3D=*G9;Cz>!#;qCMD}a#EKo#^Gfydi_$@oAYi0t$Y5k(WTb0gsB3J%kZEX< SXJuqyXhdgLXo#Dx1p@%V|1yC9 literal 0 HcmV?d00001 diff --git a/images/locale/C.utf8/LC_MEASUREMENT b/images/locale/C.utf8/LC_MEASUREMENT new file mode 100644 index 0000000000000000000000000000000000000000..5325e19558016a16743afa807724927d0afc6650 GIT binary patch literal 23 bcmb0WWL98eU|)IVq+jS0b)}iHUnaFAhrNv)dJNL)pQ`v1>!WI01$+RxanFj005!{ B2O9tY literal 0 HcmV?d00001 diff --git a/images/locale/C.utf8/LC_NUMERIC b/images/locale/C.utf8/LC_NUMERIC new file mode 100644 index 0000000000000000000000000000000000000000..23ba63a16af392fb317a8c85f572c44bdf61feb5 GIT binary patch literal 50 pcmWd;WL98fU|>)HVkIC}24WQ;)&OE1Al8FoAjJ?G;-+iC001>o0^9%q literal 0 HcmV?d00001 diff --git a/images/locale/C.utf8/LC_PAPER b/images/locale/C.utf8/LC_PAPER new file mode 100644 index 0000000000000000000000000000000000000000..18cfad8afdf5c0c0a5470f118af0305896b0ebe8 GIT binary patch literal 34 jcmWdJU+pvC?mlMoW>01n$B_k3Xkv@wuZl^EnGi9Cwfps z1$E5g8~lcQ*tV8D3}F<l!FAlk z0=92t%^1KiYB+;=T*GZ_+GMs9o#?`D>_HFuFn~c6F@#}!h8j*`7W24_tGIz%xC?vN z>^&5+s^urER?HGRlvJ!Znptl;W`$B+b(K;>)w+-|r7Kz0rHI^g@evJFLz^sB&|j)r zZ&J2=DYaZJ<;pV#>d(e4mmRf2QnLdib=yCA%zBa$ggUrP_@a4E>;Z7^?fDT>QcoJ4V7cbxBz3sHR7;)#y9(h)bz zA6mT6Yin%I>njC@%1~_%nddqg+!u*epFrgh6jyXZQT-^Cqtc5MW4fWJ{}SrDp@ie9 z57jkPK?%ohtqbQdMQ&?-I4`Y5b6hSjoUi7&Tu!)#)^Rj{Uys(TK3a47n>oIx>QD12 zLq+RUA8M`Go7yoR`b-HK<Wrv_YtFW&l zLC>_t3Ftj2_Wcg`NBh!RAMV#meS~|sQjX@Tb*c=)XYile)e(ou~r(_&zjo2EpP4)=--a+A9uE`XPx&N)=z3U9=8?F-m TZs=a$64m>6I7j>@{SH0>W+Y$F literal 0 HcmV?d00001 diff --git a/images/locale/Dockerfile b/images/locale/Dockerfile new file mode 100644 index 00000000000..d13aa4fb6fd --- /dev/null +++ b/images/locale/Dockerfile @@ -0,0 +1,19 @@ +FROM "debian:12.1-slim" +RUN apt-get update -qq \ + && \ + # Add en_US.UTF-8 locale. + printf '%s\n' 'en_US.UTF-8 UTF-8' \ + >> /etc/locale.gen \ + && \ + DEBIAN_FRONTEND=noninteractive \ + apt-get install --yes --no-install-recommends \ + locales \ + && \ + # Remove "locales" package, but keep the generated locale. + sed -i \ + 's/\s*rm .*locale-archive$/: &/' \ + /var/lib/dpkg/info/locales.prerm \ + && \ + DEBIAN_FRONTEND=noninteractive \ + apt-get remove --yes \ + locales diff --git a/images/locale/generate_locale.sh b/images/locale/generate_locale.sh new file mode 100644 index 00000000000..9b91b2bfbeb --- /dev/null +++ b/images/locale/generate_locale.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +iidfile="$( mktemp )" +buildah bud --iidfile="${iidfile}" --file=Dockerfile +podman run -v $(pwd):/tmp "$( cat $iidfile )" cp -r /usr/lib/locale/C.utf8 /tmp From 4c89ac7dbe16f3e3b9e92dfa79a2df3d2fab20e4 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Tue, 27 Feb 2024 16:28:51 -0500 Subject: [PATCH 104/143] overhaul how everything is configured --- generic_build.bash | 396 ------------------ images/README.md | 16 + images/base-glibc-busybox-bash/prepare.sh | 22 + images/base-glibc-debian-bash/prepare.sh | 5 + .../bioconda-utils-build-env-cos7/prepare.sh | 20 + images/build.sh | 87 ++++ images/create-env/prepare.sh | 44 ++ images/versions.sh | 63 +++ 8 files changed, 257 insertions(+), 396 deletions(-) delete mode 100755 generic_build.bash create mode 100644 images/README.md create mode 100644 images/base-glibc-busybox-bash/prepare.sh create mode 100644 images/base-glibc-debian-bash/prepare.sh create mode 100644 images/bioconda-utils-build-env-cos7/prepare.sh create mode 100644 images/build.sh create mode 100644 images/create-env/prepare.sh create mode 100644 images/versions.sh diff --git a/generic_build.bash b/generic_build.bash deleted file mode 100755 index 5ed7b4a381d..00000000000 --- a/generic_build.bash +++ /dev/null @@ -1,396 +0,0 @@ -#!/bin/bash - -# This single script builds the following images depending on the value of the -# env var TYPE: -# -# - build-env: contains conda + conda-build + bioconda-utils, used for building -# package -# - create-env: contains the exact version of conda from build-env (which is -# expected to have been built beforehand). Used for creating env from -# package + depdendencies -# - base-busybox: the minimal container into which created conda envs are -# copied. This is the image uploaded to quay.io -# - base-debian: an extended version of the busybox image for special cases -# -# Built images are added to a manifest. If multiple architectures are provided, -# they will all be added to a manifest which can be subsequently uploaded to -# a registry. -# -# After images are built, they are tested. -# -# This script does NOT upload anything, that must be handled separately. - -USAGE=' -Builds various containers. - -Set env vars immediately before running. - -REQUIRED ARGS FOR ALL TYPES -=========================== - TYPE: base-busybox | base-debian | build-env | create-env - IMAGE_DIR: Location of Dockerfile. - IMAGE_NAME: Image name to upload. - ARCHS: Space-separated architectures e.g. "amd64 arm64" - TAG: image tag - -REQUIRED for base-busybox -------------------------- - DEBIAN_VERSION - BUSYBOX_VERSION - -REQUIRED for base-debian ------------------------- - DEBIAN_VERSION - -REQUIRED for build-env ----------------------- - BIOCONDA_UTILS_VERSION - BIOCONDA_UTILS_FOLDER: relative to the Dockerfile - -REQUIRED for create-env ------------------------ - BIOCONDA_UTILS_VERSION - BIOCONDA_UTILS_FOLDER: relative to the Dockerfile - CONDA_VERSION: conda version to install, typically of the form "conda=x.y.z" extracted from build-env - MAMBA_VERSION: mamba version to install, typically of the form "mamba=x.y.z" extracted from build-env - BUSYBOX_IMAGE: the image to use as a base; typically this will be the results - of building base-busybox in a previous run of this script. - -OPTIONAL args -------------- - - WARN_IF_MISSING: true | false - If true (default), will exit if there is no remote repository yet. Set to - false when testing with custom image names. - - LOG: filename - Write info here so other jobs can read from it. Defaults to $TYPE.log - - -EXAMPLE USAGE -============= - - IMAGE_NAME=base-glibc-debian-bash \ - IMAGE_DIR=../../../images/base-glibc-debian-bash \ - TYPE="base-debian" \ - TAGS="0.1.1 0.1" \ - ARCHS="arm64 amd64" \ - DEBIAN_VERSION="12.2" \ - ./generic_build.bash - -' -# ------------------------------------------------------------------------------ -# HANDLE REQUIRED ENV VARS -[ -z "$IMAGE_NAME" ] && echo -e "$USAGE error: please set IMAGE_NAME" && exit 1 -[ -z "$IMAGE_DIR" ] && echo "error: please set IMAGE_DIR, where Dockerfile is found." && exit 1 -[ -z "$TYPE" ] && echo "error: please set TYPE: [ base-debian | base-busybox | build-env | create-env ]" && exit 1 -[ -z "$ARCHS" ] && echo "error: please set ARCHS" && exit 1 -[ -z "$TAG" ] && echo "error: please set TAG" && exit 1 - -if [ "$TYPE" == "build-env" ] || [ "$TYPE" == "create-env" ]; then - [ -z "$BIOCONDA_UTILS_VERSION" ] && echo "error: please set BIOCONDA_UTILS_VERSION for build-env and create-env" && exit 1 - - if [ "$TYPE" == "build-env" ]; then - [ -z "$BIOCONDA_UTILS_FOLDER" ] && echo "error: please set BIOCONDA_UTILS_FOLDER for build-env" && exit 1 - [ -z "$BUSYBOX_IMAGE" ] && echo "error: please set BUSYBOX_IMAGE for create-env" && exit 1 - fi - - if [ "$TYPE" == "create-env" ]; then - [ -z "$BUSYBOX_IMAGE" ] && echo "error: please set BUSYBOX_IMAGE for create-env" && exit 1 - [ -z "$CONDA_VERSION" ] && echo "error: please set CONDA_VERSION for create-env" && exit 1 - [ -z "$MAMBA_VERSION" ] && echo "error: please set MAMBA_VERSION for create-env" && exit 1 - fi -fi - -if [ "$TYPE" == "base-debian" ] || [ "$TYPE" == "base-busybox" ]; then - [ -z "${DEBIAN_VERSION}" ] && echo "error: please set DEBIAN VERSION" && exit 1 -fi - -if [ "$TYPE" == "base-busybox" ]; then - [ -z "$BUSYBOX_VERSION" ] && echo "error: please set BUSYBOX_VERSION" && exit 1 -fi - -LOG=${LOG:="${TYPE}.log"} -touch $LOG - -# Also add "latest" tag. -TAGS="$TAG latest" - -# ------------------------------------------------------------------------------ - - -# ------------------------------------------------------------------------------ -# CHECK FOR EXISTING TAGS. This is because quay.io does not support immutable -# images and we don't want to clobber existing. `latest` will likely always be -# present though, so don't consider that existing. If you know that the -# repository doesn't exist (e.g., you're testing using different names) then -# set ERROR_IF_MISSING=false. -response="$(curl -sL "https://quay.io/api/v1/repository/bioconda/${IMAGE_NAME}/tag/")" - -# Images can be set to expire; the jq query selects only non-expired images. -existing_tags="$( - printf %s "${response}" \ - | jq -r '.tags[]|select(.end_ts == null or .end_ts >= now)|.name' - )" \ - || { - if [ ${ERROR_IF_MISSING:-true} == "true" ]; then - printf %s\\n \ - 'Could not get list of image tags.' \ - 'Does the repository exist on Quay.io?' \ - 'Quay.io REST API response was:' \ - "${response}" - exit 1 - fi - } -for tag in $TAGS ; do - case "${tag}" in - "latest" ) ;; - * ) - if printf %s "${existing_tags}" | grep -qxF "${tag}" ; then - printf 'Tag %s already exists for %s on quay.io! Logging, and exiting with code 64\n' "${tag}" "${IMAGE_NAME}" >&2 - echo "TAG_EXISTS_${TYPE}=true" >> $LOG - exit 64 - fi - esac -done - -echo "TAG_EXISTS_${TYPE}=false" - -#------------------------------------------------------------------------------- -# SETUP - -set -xeu - -# Dockerfile lives here -cd $IMAGE_DIR - -# One manifest per tag; multiple archs will go in the same manifest. -for tag in ${TAGS} ; do - buildah manifest create "${IMAGE_NAME}:${tag}" -done - -# Read space-separated archs input string into an array -read -r -a archs_and_images <<<"$ARCHS" - -# ------------------------------------------------------------------------------ -# BUILD_ARGS: Incrementally compose build args array, depending on which inputs -# were provided. This will eventually be provided to buildah bud. -# -BUILD_ARGS=() -if [ "$TYPE" == "base-debian" ]; then - BUILD_ARGS+=("--build-arg=debian_version=$DEBIAN_VERSION") # version of debian to use as base -fi - -if [ "$TYPE" == "create-env" ]; then - BUILD_ARGS+=("--build-arg=BUSYBOX_IMAGE=$BUSYBOX_IMAGE") # which image to use as base - BUILD_ARGS+=("--build-arg=CONDA_VERSION=$CONDA_VERSION") # conda version to install - BUILD_ARGS+=("--build-arg=MAMBA_VERSION=$MAMBA_VERSION") # mamba version to install -fi - -if [ "$TYPE" == "build-env" ]; then - BUILD_ARGS+=("--build-arg=BUSYBOX_IMAGE=$BUSYBOX_IMAGE") # which image to use as base - BUILD_ARGS+=("--build-arg=BIOCONDA_UTILS_FOLDER=$BIOCONDA_UTILS_FOLDER") # git clone, relative to Dockerfile - BUILD_ARGS+=("--build-arg=bioconda_utils_version=$BIOCONDA_UTILS_VERSION") # specify version to checkout and install, also used as part of tag -fi - -if [ "$TYPE" == "base-busybox" ]; then - BUILD_ARGS+=("--build-arg=debian_version=$DEBIAN_VERSION") # version of debian to use as base for building busybox - BUILD_ARGS+=("--build-arg=busybox_version=$BUSYBOX_VERSION") # busybox version to build and use - - # Make a busybox image that we'll use further below. As shown in the - # Dockerfile.busybox, this uses the build-busybox script which in turn - # cross-compiles for x86_64 and aarch64, and these executables are later - # copied into an arch-specific container. - # - # Note that --iidfile (used here and in later commands) prints the built - # image ID to the specified file so we can refer to the image later. - iidfile="$( mktemp )" - echo $BUILD_ARGS - buildah bud \ - --iidfile="${iidfile}" \ - --file=Dockerfile.busybox \ - ${BUILD_ARGS[@]} - busybox_image="$( cat "${iidfile}" )" - rm "${iidfile}" - - BUILD_ARGS+=("--build-arg=busybox_image=${busybox_image}") # just-built image from which busybox executable will be copied -fi - -# ------------------------------------------------------------------------------ -# BUILDING: -# - Build each arch's image. -# - Extract info -# - Add info as labels -# - Add tags to image -# - Add image to manifest -# -for arch in $ARCHS; do - - # For build-env, need to use different base image from upstream conda-forge - # depending on arch. - BASE_IMAGE_BUILD_ARG="" - if [ "$TYPE" == "build-env" ]; then - if [ "$arch" == "amd64" ]; then - BASE_IMAGE_BUILD_ARG="--build-arg=base_image=quay.io/condaforge/linux-anvil-cos7-x86_64" - fi - if [ "$arch" == "arm64" ]; then - BASE_IMAGE_BUILD_ARG="--build-arg=base_image=quay.io/condaforge/linux-anvil-aarch64" - fi - fi - - # Actual building happens here. - iidfile="$( mktemp )" - buildah bud \ - --arch="${arch}" \ - --iidfile="${iidfile}" \ - --file=Dockerfile \ - ${BUILD_ARGS[@]} \ - $BASE_IMAGE_BUILD_ARG - image_id="$( cat "${iidfile}" )" - rm "${iidfile}" - - # Extract various package info and version info, then store that info - # as labels. Container is removed at the end to avoid e.g. having these - # commands in the history of the container. - container="$( buildah from "${image_id}" )" - run() { buildah run "${container}" "${@}" ; } - LABELS=() - - # See - # https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry; - # this allows the container visibility to inherit that of the linked repo - # (public in the case of bioconda-utils) - LABELS+=("--label=org.opencontainers.image.source=https://github.com/bioconda/bioconda-utils") - LABELS+=("--label=deb-list=$( run cat /.deb.lst | tr '\n' '|' | sed 's/|$//' )") - LABELS+=("--label=pkg-list=$( run cat /.pkg.lst | tr '\n' '|' | sed 's/|$//' )") - LABELS+=("--label=glibc=$( run sh -c 'exec "$( find -xdev -name libc.so.6 -print -quit )"' | sed '1!d' )") - LABELS+=("--label=debian=$( run cat /etc/debian_version | sed '1!d' )") - LABELS+=("--label=bash=$( run bash --version | sed '1!d' )") - if [ "$TYPE" == "build-env" ]; then - bioconda_utils="$( - run sh -c '. /opt/conda/etc/profile.d/conda.sh && conda activate base && bioconda-utils --version' \ - | rev | cut -f1 -d " " | rev - )" - LABELS+=("--label=bioconda-utils=${bioconda_utils}") - fi - - if [ "$TYPE" == "base-busybox" ]; then - LABELS+=("--label=busybox-version=${BUSYBOX_VERSION}") - fi - buildah rm "${container}" - - # Add labels to a new container... - container="$( buildah from "${image_id}" )" - buildah config "${LABELS[@]}" "${container}" - - # ...then store the container (now with labels) as a new image. - # This is what we'll eventually upload. - image_id="$( buildah commit "${container}" )" - buildah rm "${container}" - - # Add images to manifest. Note that individual **image** tags include arch; - # manifest does not. - for tag in ${TAGS} ; do - buildah tag \ - "${image_id}" \ - "${IMAGE_NAME}:${tag}-${arch}" - buildah manifest add \ - "${IMAGE_NAME}:${tag}" \ - "${image_id}" - - # Inspect image details, but remove the most verbose (like history) and - # redundant (just need one of Docker or OCIv1) fields. - buildah inspect -t image ${IMAGE_NAME}:${tag}-$arch} \ - | jq 'del( - .History, - .OCIv1.history, - .Config, - .Manifest, - .Docker, - .NamespaceOptions)' - - done # tags -done # archs_and_images - -for tag in ${TAGS}; do - buildah inspect -t manifest ${IMAGE_NAME}:${tag} -done - -# ------------------------------------------------------------------------------ -# TESTING -# -# Args to be used specifically when testing with Dockerfile.test -TEST_BUILD_ARGS=() -if [ "$TYPE" == "create-env" ]; then - TEST_BUILD_ARGS+=("--build-arg=BUSYBOX_IMAGE=$BUSYBOX_IMAGE") -fi - -# Turns out that buildah cannot use --arch and and provide an image ID as the -# `base` build-arg at the same time, because we get the error: -# -# "error creating build container: pull policy is always but image has been -# referred to by ID". -# -# This happens even when using --pull-never. This may be fixed in later -# versions, in which case we can use the code below in the "EXTRA" section. -# -# Since the rest of this script builds a single image and assigns possibly -# multiple tags, we just use the first tag to use as the `base` build-arg. - -tag=$(echo $TAGS | cut -f1 -d " ") -for arch in $ARCHS; do - echo "[LOG] Starting test for ${IMAGE_NAME}:${tag}, $arch." - buildah bud \ - --arch="$arch" \ - --build-arg=base="localhost/${IMAGE_NAME}:${tag}" \ - ${TEST_BUILD_ARGS[@]} \ - --file=Dockerfile.test -done - - -# EXTRA ------------------------------------------------------------------------ -# The following demonstrates how to extract images from corresponding manifest -# digests. This may be a better approach in the future, but as noted above we -# cannot use FROM and --arch and instead use name:tag. -# -# It may be useful in the future but it is disabled for now. -# -if [ "" ] ; then - # Manifests provide a digest; we then need to look up the corresponding image - # name for that digest. - ids="$( - for tag in $TAGS ; do - buildah manifest inspect "${IMAGE_NAME}:${tag}" \ - | jq -r '.manifests[]|.digest' \ - | while read id ; do - buildah images --format '{{.ID}}{{.Digest}}' \ - | sed -n "s/${id}//p" - done - done - )" - - # N.B. need to unique since one image can have multiple tags. In general, - # this should be one image for each arch, no matter how many tags. - ids="$( printf %s "${ids}" | sort -u )" - - # Run the tests; see Dockerfile.test in the relevant image dir for the - # actual tests that are run. - for id in ${ids} ; do - - podman history "${id}" - - # Make sure we're explicit with the arch so that the right image is pulled - # from the respective container. - arch=$(buildah inspect "${id}" | jq -r '.OCIv1.architecture' | sort -u) - - buildah bud \ - --arch="$arch" \ - --build-arg=base="localhost/${IMAGE_NAME}" \ - ${TEST_BUILD_ARGS[@]} \ - --file=Dockerfile.test - done -fi -# ------------------------------------------------------------------------------- - -# Clean up -buildah rmi --prune || true diff --git a/images/README.md b/images/README.md new file mode 100644 index 00000000000..00eea08d295 --- /dev/null +++ b/images/README.md @@ -0,0 +1,16 @@ +The intended use is to run `build.sh`, providing it an image directory. + +Each image directory contains a `prepare.sh` script, a `Dockerfile` for +building, and a `Dockerfile.test` for testing. + +Each `prepare.sh` should at least in turn source `versions.sh`. + +`build.sh` sources `prepare.sh` to populate the env vars needed for that +particular image or do any other needed work in preparation for building. + +To avoid inter-image dependencies, we prepare the C.utf8 locale ahead of time +in `locale.sh`. This can be copied over to image dirs if/when needed by +`prepare.sh`. + +`build.sh` will write to a `metadata.json` file in the image dir with the name +of the manifest created, so that subsequent jobs can use it. diff --git a/images/base-glibc-busybox-bash/prepare.sh b/images/base-glibc-busybox-bash/prepare.sh new file mode 100644 index 00000000000..b11f305bfaa --- /dev/null +++ b/images/base-glibc-busybox-bash/prepare.sh @@ -0,0 +1,22 @@ +source ../versions.sh +IMAGE_NAME="${BASE_BUSYBOX_IMAGE_NAME}" +TAG="$BASE_TAG" + +BUILD_ARGS=() +BUILD_ARGS+=("--build-arg=debian_version=${DEBIAN_VERSION}") +BUILD_ARGS+=("--build-arg=busybox_version=${BUSYBOX_VERSION}") + +# Build busybox binaries for each arch; respective busybox base containers will +# extract the relevant binary from this image. +iidfile="$( mktemp )" +buildah bud \ + --iidfile="${iidfile}" \ + --file=Dockerfile.busybox \ + ${BUILD_ARGS[@]} +busybox_image="$( cat "${iidfile}" )" +rm "${iidfile}" + +# Override build args for what's needed in main Dockerfile +BUILD_ARGS=() +BUILD_ARGS+=("--build-arg=debian_version=${DEBIAN_VERSION}") +BUILD_ARGS+=("--build-arg=busybox_image=${busybox_image}") diff --git a/images/base-glibc-debian-bash/prepare.sh b/images/base-glibc-debian-bash/prepare.sh new file mode 100644 index 00000000000..d3459adb0fc --- /dev/null +++ b/images/base-glibc-debian-bash/prepare.sh @@ -0,0 +1,5 @@ +source ../versions.sh +IMAGE_NAME="${BASE_DEBIAN_IMAGE_NAME}" +TAG="$BASE_TAG" +BUILD_ARGS=() +BUILD_ARGS+=("--build-arg=debian_version=$DEBIAN_VERSION") diff --git a/images/bioconda-utils-build-env-cos7/prepare.sh b/images/bioconda-utils-build-env-cos7/prepare.sh new file mode 100644 index 00000000000..cb222c47b43 --- /dev/null +++ b/images/bioconda-utils-build-env-cos7/prepare.sh @@ -0,0 +1,20 @@ +source ../versions.sh +IMAGE_NAME="${BUILD_ENV_IMAGE_NAME}" +TAG="${BIOCONDA_UTILS_VERSION}_base$BASE_TAG" + +# See ../locale/generate_locale.sh for how it was generated in the first place. +cp -r ../locale/C.utf8 . + +# Copy everything we need to install into this image. We need all requirements. +mkdir -p bioconda-utils +for item in setup.py setup.cfg versioneer.py bioconda_utils; do + cp -ar ../../$item bioconda-utils +done + +# build script needs to special-case base images depending on archs when +# building the build-env +IS_BUILD_ENV=true + +BUILD_ARGS=() +BUILD_ARGS+=("--build-arg=BIOCONDA_UTILS_FOLDER=bioconda-utils") +BUILD_ARGS+=("--build-arg=bioconda_utils_version=$BIOCONDA_UTILS_VERSION") diff --git a/images/build.sh b/images/build.sh new file mode 100644 index 00000000000..36433205036 --- /dev/null +++ b/images/build.sh @@ -0,0 +1,87 @@ +#!/bin/bash + +# This script builds multi-arch images and a manifest that points to them. The +# manifest can be then pushed to a registry with e.g. podman manifest push. +# +# Usage: +# +# build.sh +# +# The only arg directly provided to this script is the image directory, +# containing at least a Dockerfile. In that directory, if prepare.sh exists it +# will be sourced to get all other env vars used here, as well as do any +# image-specific prep work. +# +# Expected env vars populated by prepare.sh +# ----------------------------------------- +# TAG: tag to build +# ARCHS: space-separated string of archs to build +# IMAGE_NAME: name of image; created manifest will be IMAGE_NAME:tag +# BUILD_ARGS: array of arguments like ("--build-arg=argument1=the-value", "--build-arg=arg2=a") +# +# After successfully building, a metadata.txt file will be created in the image +# directory containing the manifest names that can be used to upload to +# a registry. + +set -xeu + +IMAGE_DIR=$1 + +cd $IMAGE_DIR + +[ -e prepare.sh ] && source prepare.sh + +# Add "latest" to tags +TAGS=$(echo "$TAG latest") + +for tag in ${TAGS} ; do + buildah manifest rm "${IMAGE_NAME}:${tag}" || true + buildah manifest create "${IMAGE_NAME}:${tag}" +done + +for arch in $ARCHS; do + + # This is specific to the build-env: we need to decide on the base image + # depending on the arch. + BASE_IMAGE_BUILD_ARG="" + if [ "${IS_BUILD_ENV:-false}" == "true" ]; then + if [ "$arch" == "amd64" ]; then + BASE_IMAGE_BUILD_ARG="--build-arg=base_image=quay.io/condaforge/linux-anvil-cos7-x86_64" + fi + if [ "$arch" == "arm64" ]; then + BASE_IMAGE_BUILD_ARG="--build-arg=base_image=quay.io/condaforge/linux-anvil-aarch64" + fi + fi + + # Actual building happens here. Keep track of the built image in $iidfile. + iidfile="$( mktemp )" + buildah bud \ + --arch="${arch}" \ + --iidfile="${iidfile}" \ + --file=Dockerfile \ + ${BUILD_ARGS[@]} \ + $BASE_IMAGE_BUILD_ARG + image_id="$( cat "${iidfile}" )" + rm "${iidfile}" + + # Add a label needed for GitHub Actions to inherit container permissions from + # repo permissions. Must be set on container, not image. Then save resulting + # image. + container="$( buildah from "${image_id}" )" + buildah config \ + --label="org.opencontainers.image.source=https://github.com/bioconda/bioconda-utils" \ + "${container}" + image_id="$( buildah commit "${container}" )" + buildah rm "${container}" + + # Add image to respective manifests. + for tag in ${TAGS} ; do + buildah tag "${image_id}" "${IMAGE_NAME}:${tag}-${arch}" + buildah manifest add "${IMAGE_NAME}:${tag}" "${image_id}" + done +done + +cat /dev/null > metadata.txt +for tag in ${TAGS} ; do + echo $IMAGE_NAME:$tag >> metadata.txt +done diff --git a/images/create-env/prepare.sh b/images/create-env/prepare.sh new file mode 100644 index 00000000000..eafa7b76633 --- /dev/null +++ b/images/create-env/prepare.sh @@ -0,0 +1,44 @@ +source ../versions.sh +IMAGE_NAME="${CREATE_ENV_IMAGE_NAME}" +TAG="${BIOCONDA_UTILS_VERSION}_base$BASE_TAG" +BUILD_ARGS=() + + +# Get the exact versions of mamba and conda that were installed in build-env. +# +# If this tag exists on quay.io (that is, this create-env is being built in +# a subsequent run), then use that. Otherwise, we assume this tag has already +# been built locally (and the GitHub Actions job dependency should reflect +# this) +if [ $(tag_exists $BUILD_ENV_IMAGE_NAME $BIOCONDA_IMAGE_TAG) ]; then + REGISTRY=quay.io/bioconda +else + REGISTRY=localhost +fi + +CONDA_VERSION=$( + podman run -t $REGISTRY/${BUILD_ENV_IMAGE_NAME}:${TAG} \ + bash -c "/opt/conda/bin/conda list --export '^conda$'| sed -n 's/=[^=]*$//p'" +) +MAMBA_VERSION=$( + podman run -t localhost/${BUILD_ENV_IMAGE_NAME}:${TAG} \ + bash -c "/opt/conda/bin/conda list --export '^mamba$'| sed -n 's/=[^=]*$//p'" +) +# Remove trailing \r with parameter expansion +export CONDA_VERSION=${CONDA_VERSION%$'\r'} +export MAMBA_VERSION=${MAMBA_VERSION%$'\r'} + +BUILD_ARGS+=("--build-arg=CONDA_VERSION=$CONDA_VERSION") +BUILD_ARGS+=("--build-arg=MAMBA_VERSION=$MAMBA_VERSION") + +# Needs busybox image to copy some items over +if [ $(tag_exists $BASE_BUSYBOX_IMAGE_NAME $TAG) ]; then + REGISTRY=quay.io/bioconda +else + REGISTRY=localhost +fi + +BUILD_ARGS+=("--build-arg=BUSYBOX_IMAGE=${REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:${TAG}") + +TEST_BUILD_ARGS=() +TEST_BUILD_ARGS+=("--build-arg=BUSYBOX_IMAGE=$BUSYBOX_IMAGE") diff --git a/images/versions.sh b/images/versions.sh new file mode 100644 index 00000000000..5a6e8aeb3f1 --- /dev/null +++ b/images/versions.sh @@ -0,0 +1,63 @@ +#!/bin/bash + +# Configures various versions to be used throughout infrastructure +ARCHS="amd64 arm64" +DEBIAN_VERSION=12.2 +BUSYBOX_VERSION=1.36.1 +BASE_DEBIAN_IMAGE_NAME="tmp-debian" +BASE_BUSYBOX_IMAGE_NAME="tmp-busybox" +BUILD_ENV_IMAGE_NAME="tmp-build-env" +CREATE_ENV_IMAGE_NAME="tmp-create-env" +BASE_TAG="0.1" + +# This assumes you've already checked out whatever branch/commit to use. +# +# Respects setting outside this script, if e.g. you want GitHub Actions to +# handle naming based on branch. +BIOCONDA_UTILS_VERSION=${BIOCONDA_UTILS_VERSION:-$(git describe --tags --dirty --always)} + +# Used as the tag for create-env and build-env, which depend on bioconda-utils +BIOCONDA_IMAGE_TAG=${BIOCONDA_UTILS_VERSION}_base${BASE_TAG} + +# FUNCTIONS -------------------------------------------------------------------- + +function tag_exists () { + # Returns 0 if the tag for the image exists on quay.io, otherwise returns 1. + # Skips "latest" tags (likely they will always be present) + # $1: image name + # $2: tags + local IMAGE_NAME="$1" + local TAGS="$2" + + response="$(curl -sL "https://quay.io/api/v1/repository/bioconda/${IMAGE_NAME}/tag/")" + + # Images can be set to expire; the jq query selects only non-expired images. + existing_tags="$( + printf %s "${response}" \ + | jq -r '.tags[]|select(.end_ts == null or .end_ts >= now)|.name' + )" \ + || { + printf %s\\n \ + 'Could not get list of image tags.' \ + 'Does the repository exist on Quay.io?' \ + 'Quay.io REST API response was:' \ + "${response}" >&2 + return 1 + } + for tag in $TAGS ; do + case "${tag}" in + "latest" ) ;; + * ) + if printf %s "${existing_tags}" | grep -qxF "${tag}" ; then + printf 'Tag %s already exists for %s on quay.io!\n' "${tag}" "${IMAGE_NAME}" >&2 + echo "exists" + fi + esac + done +} + +function push_to_ghcr () { + # Log in to GitHub container registry and push image + echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin + podman push localhost/${1}:${2} ghcr.io/bioconda/${1}:${2} +} From 2c16506e33861315c36a6fdd00fa5daea611de6a Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Tue, 27 Feb 2024 16:30:40 -0500 Subject: [PATCH 105/143] cleanup --- .gitignore | 5 +++ build.sh | 124 ----------------------------------------------------- 2 files changed, 5 insertions(+), 124 deletions(-) delete mode 100644 build.sh diff --git a/.gitignore b/.gitignore index 8e7c1e872d1..1c13699dcea 100644 --- a/.gitignore +++ b/.gitignore @@ -17,3 +17,8 @@ docs/source/developer/_autosummary .DS_Store env recipes/ + +# files created when building images +images/*/metadata.txt +images/bioconda-utils-build-env-cos7/bioconda-utils +images/bioconda-utils-build-env-cos7/C.utf8 diff --git a/build.sh b/build.sh deleted file mode 100644 index 9b5de3a1fe1..00000000000 --- a/build.sh +++ /dev/null @@ -1,124 +0,0 @@ -#!/bin/bash - -# create-env depends on base-busybox and build-env (which in turn also depends -# on base-busybox). base-debian is independent. -# -# This can be run locally for testing, and can be used as a template for CI. -# -# base-busybox base-debian -# | | -# build-env | -# \ | -# \ | -# create-env - -set -euo - -# Used for build-env. -# bioconda-utils will be cloned to this folder inside the image dir (where the -# Dockerfile is) and the version will be checked out. -export BIOCONDA_UTILS_FOLDER=bioconda-utils -export BIOCONDA_UTILS_VERSION=v2.11.1 - -export DEBIAN_VERSION="12.2" -export BUSYBOX_VERSION="1.36.1" - -# Use same tags for base-busybox and base-debian -export BASE_TAG="0.1" - -# If the repository doesn't already exist on quay.io, by default this is -# considered an error. Set to false to avoid this (e.g., when building images -# with new names, or local test ones). -export ERROR_IF_MISSING=false - -# Architectures to build for (under emulation) -export ARCHS="arm64 amd64" - -# Store as separate vars so we can use these for dependencies. -BUILD_ENV_IMAGE_NAME=tmp-build-env -CREATE_ENV_IMAGE_NAME=tmp-create-env -BASE_DEBIAN_IMAGE_NAME=tmp-debian -BASE_BUSYBOX_IMAGE_NAME=tmp-busybox - -BUILD_BUSYBOX=false # build busybox image? -BUILD_DEBIAN=true # build debian image? -BUILD_BUILD_ENV=false # build build-env image? -BUILD_CREATE_ENV=false # build create-env image? - -# # Build base-busybox------------------------------------------------------------ -if [ $BUILD_BUSYBOX == "true" ]; then - - buildah manifest rm "${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}" || true - buildah manifest rm "${BASE_BUSYBOX_IMAGE_NAME}:latest" || true - - IMAGE_NAME=$BASE_BUSYBOX_IMAGE_NAME \ - IMAGE_DIR=images/base-glibc-busybox-bash \ - ARCHS=$ARCHS \ - TYPE="base-busybox" \ - TAG=$BASE_TAG \ - ./generic_build.bash -fi - -# Build base-debian------------------------------------------------------------- -if [ $BUILD_DEBIAN == "true" ]; then - - buildah manifest rm "${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG}" || true - buildah manifest rm "${BASE_DEBIAN_IMAGE_NAME}:latest" || true - - IMAGE_NAME=$BASE_DEBIAN_IMAGE_NAME \ - IMAGE_DIR=images/base-glibc-debian-bash \ - ARCHS=$ARCHS \ - TYPE="base-debian" \ - TAG=$BASE_TAG \ - ./generic_build.bash -fi - -# Build build-env--------------------------------------------------------------- - -if [ $BUILD_BUILD_ENV == "true" ]; then - # Clone bioconda-utils into same directory as Dockerfile - if [ ! -e "images/bioconda-utils-build-env-cos7/bioconda-utils" ]; then - git clone https://github.com/bioconda/bioconda-utils images/bioconda-utils-build-env-cos7/bioconda-utils - else - (cd images/bioconda-utils-build-env-cos7/bioconda-utils && git fetch) - fi - - buildah manifest rm "${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" || true - buildah manifest rm "${BUILD_ENV_IMAGE_NAME}:latest" || true - - IMAGE_NAME=$BUILD_ENV_IMAGE_NAME \ - IMAGE_DIR=images/bioconda-utils-build-env-cos7 \ - ARCHS=$ARCHS \ - TYPE="build-env" \ - TAG=$BASE_TAG \ - BUSYBOX_IMAGE=localhost/$BASE_BUSYBOX_IMAGE_NAME \ - ./generic_build.bash -fi -# # Build create-env-------------------------------------------------------------- - -if [ $BUILD_CREATE_ENV == "true" ]; then - - buildah manifest rm "${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" || true - buildah manifest rm "${CREATE_ENV_IMAGE_NAME}:latest" || true - - # Get the exact versions of mamba and conda that were installed in build-env. - CONDA_VERSION=$( - podman run -t localhost/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION} \ - bash -c "/opt/conda/bin/conda list --export '^conda$'| sed -n 's/=[^=]*$//p'" - ) - MAMBA_VERSION=$( - podman run -t localhost/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION} \ - bash -c "/opt/conda/bin/conda list --export '^mamba$'| sed -n 's/=[^=]*$//p'" - ) - # Remove trailing \r with parameter expansion - export CONDA_VERSION=${CONDA_VERSION%$'\r'} - export MAMBA_VERSION=${MAMBA_VERSION%$'\r'} - - IMAGE_NAME=$CREATE_ENV_IMAGE_NAME \ - IMAGE_DIR=images/create-env \ - ARCHS=$ARCHS \ - TYPE="create-env" \ - TAG=$BASE_TAG \ - BUSYBOX_IMAGE=localhost/$BASE_BUSYBOX_IMAGE_NAME \ - ./generic_build.bash -fi From 9b404957e028ff3ff02328502427a3b2d525522b Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Tue, 27 Feb 2024 16:30:49 -0500 Subject: [PATCH 106/143] copied bioconda-utils version expected to be already checked out --- images/bioconda-utils-build-env-cos7/Dockerfile | 3 --- 1 file changed, 3 deletions(-) diff --git a/images/bioconda-utils-build-env-cos7/Dockerfile b/images/bioconda-utils-build-env-cos7/Dockerfile index 2a287e2d17d..39291edfbc4 100644 --- a/images/bioconda-utils-build-env-cos7/Dockerfile +++ b/images/bioconda-utils-build-env-cos7/Dockerfile @@ -32,9 +32,6 @@ WORKDIR /tmp/repo ARG BIOCONDA_UTILS_FOLDER=./bioconda-utils COPY ${BIOCONDA_UTILS_FOLDER} ./ -# Make sure we're using the configured version of bioconda-utils for this -# build. -RUN git checkout ${bioconda_utils_version} RUN . /opt/conda/etc/profile.d/conda.sh && conda list RUN . /opt/conda/etc/profile.d/conda.sh && conda activate base && \ pip wheel . && \ From d5d356002459328981db01017c0a88f58db9327c Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Tue, 27 Feb 2024 16:31:06 -0500 Subject: [PATCH 107/143] simplify creating requirements --- images/create-env/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/images/create-env/Dockerfile b/images/create-env/Dockerfile index ea72c88d931..75e68f15e5d 100644 --- a/images/create-env/Dockerfile +++ b/images/create-env/Dockerfile @@ -11,7 +11,7 @@ RUN arch="$( uname -m )" \ # Install exact versions of conda/mamba ARG CONDA_VERSION ARG MAMBA_VERSION -RUN echo $CONDA_VERSION > requirements.txt && echo $MAMBA_VERSION >> requirements.txt +RUN echo -e "$CONDA_VERSION\n$MAMBA_VERSION" > requirements.txt RUN ./install-conda ./requirements.txt /opt/create-env ARG BUSYBOX_IMAGE From fed48a4485271e337c68ad3423a831fcae70d786 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Tue, 27 Feb 2024 16:31:29 -0500 Subject: [PATCH 108/143] first pass overhauling simplified & unified builds GH Actions --- .github/workflows/build-images.yml | 221 ++++++++++------------------- 1 file changed, 76 insertions(+), 145 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 7daf2f80aed..b02d857b53d 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -13,38 +13,22 @@ on: - '.circleci/**' - 'docs/**' - 'test/**' - env: - BIOCONDA_UTILS_FOLDER: bioconda-utils - DEBIAN_VERSION: "12.2" - BUSYBOX_VERSION: "1.36.1" - BASE_TAG: "0.1.6" # "latest" will always be added during the build. - BUILD_ENV_IMAGE_NAME: tmp-build-env - CREATE_ENV_IMAGE_NAME: tmp-create-env - BASE_DEBIAN_IMAGE_NAME: tmp-debian - BASE_BUSYBOX_IMAGE_NAME: tmp-busybox - ARCHS: "amd64 arm64" + BIOCONDA_UTILS_VERSION: ${{ github.event.release && github.event.release.tag_name || github.head_ref || github.ref_name }} jobs: build-base-debian: - # NOTE: base-debian can be a separate job since it is independent of the - # others. create-env depends on build-env, and both depend on base-busybox, - # so we can't split that out. - # - # Later steps for other containers are similar, so comments are only added to - # this first job. name: Build base-debian + runs-on: ubuntu-22.04 outputs: TAG_EXISTS_base-debian: ${{ steps.base-debian.outputs.TAG_EXISTS_base-debian }} - runs-on: ubuntu-20.04 steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - # Required for emulating ARM - name: Install qemu dependency run: | sudo apt-get update @@ -53,57 +37,31 @@ jobs: - name: Build base-debian id: base-debian run: | - # See generic_build.bash for expected env vars. The script will exit 64 - # if the tag exists. That's OK, and we don't want the entire Actions - # workflow to fail because of it, so we check the exit code. - IMAGE_NAME=$BASE_DEBIAN_IMAGE_NAME \ - IMAGE_DIR=images/base-glibc-debian-bash \ - TYPE="base-debian" \ - DEBIAN_VERSION=$DEBIAN_VERSION \ - ARCHS=$ARCHS \ - TAG=$BASE_TAG \ - ./generic_build.bash || [ $? == 64 ] - - # generic_build.bash will write key=val lines to the log ($TYPE.log); - # these lines are added to $GITHUB_OUTPUT so that later steps can use - # steps.id.outputs.key to get the value. See generic_build.bash for - # what it's writing to the log (and therefore which keys are available - # via the step's outputs). - cat "base-debian.log" >> $GITHUB_OUTPUT + source images/versions.sh + if [ $(tag_exists $BASE_DEBIAN_IMAGE_NAME $BASE_TAG) ]; then + echo "TAG_EXISTS_base-debian=true" >> $GITHUB_OUTPUT + else + cd images && bash build.sh base-glibc-debian-bash + fi - name: push to ghcr if: '${{ ! steps.base-debian.outputs.TAG_EXISTS_base-debian }}' run: | - echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin - podman push localhost/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG} ghcr.io/bioconda/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG} - podman push localhost/${BASE_DEBIAN_IMAGE_NAME}:latest ghcr.io/bioconda/${BASE_DEBIAN_IMAGE_NAME}:latest - - build-others: - # Other containers are interdependent, we so build them sequentially. - # The steps are largely similar to base-debian above, so check there for - # comments on common parts. - name: Build base-busybox, build-env, and create-env images + source images/versions.sh + push_to_ghcr $BASE_DEBIAN_IMAGE_NAME $BASE_TAG + push_to_ghcr $BASE_DEBIAN_IMAGE_NAME latest + + build-base-busybox: + name: Build base-busybox + runs-on: ubuntu-22.04 outputs: TAG_EXISTS_base-busybox: ${{ steps.base-busybox.outputs.TAG_EXISTS_base-busybox }} - TAG_EXISTS_build-env: ${{ steps.build-env.outputs.TAG_EXISTS_build-env }} - TAG_EXISTS_create-env: ${{ steps.create-env.outputs.TAG_EXISTS_create-env }} - BIOCONDA_UTILS_TAG: ${{ steps.get-tag.outputs.tag }} - - runs-on: ubuntu-20.04 steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - - id: get-tag - # Get an appropriate tag to represent the version of bioconda-utils being - # used, and make it available to other steps as outputs. This will be used - # as BIOCONDA_UTILS_VERSION in later steps. - run: | - tag=${{ github.event.release && github.event.release.tag_name || github.head_ref || github.ref_name }} - printf %s "tag=${tag#v}" >> $GITHUB_OUTPUT - - name: Install qemu dependency run: | sudo apt-get update @@ -112,113 +70,85 @@ jobs: - name: Build base-busybox id: base-busybox run: | - IMAGE_NAME=$BASE_BUSYBOX_IMAGE_NAME \ - IMAGE_DIR=images/base-glibc-busybox-bash \ - TYPE="base-busybox" \ - ARCHS=$ARCHS \ - DEBIAN_VERSION=$DEBIAN_VERSION \ - BUSYBOX_VERSION=$BUSYBOX_VERSION \ - TAG=$BASE_TAG \ - ./generic_build.bash || [ $? == 64 ] - cat "base-busybox.log" >> $GITHUB_OUTPUT - - - name: push base-busybox to ghcr + source images/versions.sh + if [ $(tag_exists $BASE_BUSYBOX_IMAGE_NAME $BASE_TAG) ]; then + echo "TAG_EXISTS_base-busybox=true" >> $GITHUB_OUTPUT + else + cd images && bash build.sh base-glibc-busybox-bash + fi + + - name: push to ghcr if: '${{ ! steps.base-busybox.outputs.TAG_EXISTS_base-busybox }}' run: | - echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin - podman push localhost/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG} ghcr.io/bioconda/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG} - podman push localhost/${BASE_BUSYBOX_IMAGE_NAME}:latest ghcr.io/bioconda/${BASE_BUSYBOX_IMAGE_NAME}:latest + source images/versions.sh + push_to_ghcr $BASE_BUSYBOX_IMAGE_NAME $BASE_TAG + push_to_ghcr $BASE_BUSYBOX_IMAGE_NAME latest + + build-build-env: + name: Build build-env + outputs: + TAG_EXISTS_build-env: ${{ steps.build-env.outputs.TAG_EXISTS_build-env }} + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Install qemu dependency + run: | + sudo apt-get update + sudo apt-get install -y qemu-user-static - name: Build build-env id: build-env run: | - # The build-env Dockerfile expects bioconda-utils to be cloned; even - # though this CI is operating in the bioconda-utils repo, the code - # needs to be available in the build context, which is in the - # respective image dir. - if [ ! -e "images/bioconda-utils-build-env-cos7/bioconda-utils" ]; then - git clone https://github.com/bioconda/bioconda-utils images/bioconda-utils-build-env-cos7/bioconda-utils + source images/versions.sh + if [ $(tag_exists $BUILD_ENV_IMAGE_NAME $BIOCONDA_IMAGE_TAG) ]; then + echo "TAG_EXISTS_build-env=true" >> $GITHUB_OUTPUT else - (cd images/bioconda-utils-build-env-cos7/bioconda-utils && git fetch) + cd images && bash build.sh bioconda-utils-build-env-cos7 fi - # If the busybox image was not built in this CI run (e.g. if the - # specified tags already exist on quay.io) then we'll get it from - # quay.io. Otherwise use the just-built one. - REGISTRY="localhost" - if [ ${{ steps.base-busybox.outputs.TAG_EXISTS_base-busybox }} ]; then - REGISTRY="quay.io/bioconda" - fi - - BIOCONDA_UTILS_VERSION='${{ steps.get-tag.outputs.tag }}' + - name: push to ghcr + if: '${{ ! steps.build-env.outputs.TAG_EXISTS_build-env }}' + run: | + source images/versions.sh + push_to_ghcr $BUILD_ENV_IMAGE_NAME $BIOCONDA_IMAGE_TAG + push_to_ghcr $BUILD_ENV_IMAGE_NAME latest - IMAGE_NAME=$BUILD_ENV_IMAGE_NAME \ - IMAGE_DIR=images/bioconda-utils-build-env-cos7 \ - ARCHS=$ARCHS \ - TYPE="build-env" \ - BIOCONDA_UTILS_VERSION=$BIOCONDA_UTILS_VERSION \ - TAG="${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ - BUSYBOX_IMAGE="${REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}" \ - ./generic_build.bash || [ $? == 64 ] - cat "build-env.log" >> $GITHUB_OUTPUT + build-create-env: + name: Build create-env + needs: [build-env base-busybox] + outputs: + TAG_EXISTS_create-env: ${{ steps.create-env.outputs.TAG_EXISTS_create-env }} + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 - - name: push build-env to ghcr - if: '${{ ! steps.build-env.outputs.TAG_EXISTS_build-env }}' + - name: Install qemu dependency run: | - echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin - BIOCONDA_UTILS_VERSION='${{ steps.get-tag.outputs.tag }}' - podman push "localhost/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" "ghcr.io/bioconda/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" - podman push "localhost/${BUILD_ENV_IMAGE_NAME}:latest" "ghcr.io/bioconda/${BUILD_ENV_IMAGE_NAME}:latest" + sudo apt-get update + sudo apt-get install -y qemu-user-static - name: Build create-env id: create-env run: | - # Here we extract the conda and mamba versions from the just-created - # build-env container (or, if it was not created in this CI run because - # it already exists, then pull from quay.io). This ensures that when - # creating environments, we use the exact same conda/mamba versions - # that were used when building the package. - BIOCONDA_UTILS_VERSION='${{ steps.get-tag.outputs.tag }}' - REGISTRY="localhost" - if [ ${{ steps.build-env.outputs.TAG_EXISTS_build-env }} ]; then - REGISTRY="quay.io/bioconda" - fi - CONDA_VERSION=$( - podman run -t "${REGISTRY}/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ - bash -c "/opt/conda/bin/conda list --export '^conda$'| sed -n 's/=[^=]*$//p'" - ) - MAMBA_VERSION=$( - podman run -t "${REGISTRY}/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ - bash -c "/opt/conda/bin/conda list --export '^mamba$'| sed -n 's/=[^=]*$//p'" - ) - - # Remove trailing \r with parameter expansion - export CONDA_VERSION=${CONDA_VERSION%$'\r'} - export MAMBA_VERSION=${MAMBA_VERSION%$'\r'} - - # See build-env for explanation - REGISTRY="localhost" - if [ ${{ steps.base-busybox.outputs.TAG_EXISTS_base-busybox }} ]; then - REGISTRY="quay.io/bioconda" + source images/versions.sh + if [ $(tag_exists $CREATE_ENV_IMAGE_NAME $BIOCONDA_IMAGE_TAG) ]; then + echo "TAG_EXISTS_create-env=true" >> $GITHUB_OUTPUT + else + cd images && bash build.sh create-env fi - IMAGE_NAME=$CREATE_ENV_IMAGE_NAME \ - IMAGE_DIR=images/create-env \ - ARCHS=$ARCHS \ - TYPE="create-env" \ - BIOCONDA_UTILS_VERSION=$BIOCONDA_UTILS_VERSION \ - TAG="${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ - BUSYBOX_IMAGE="${REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}" \ - ./generic_build.bash || [ $? == 64 ] - cat "create-env.log" >> $GITHUB_OUTPUT - - - name: push create-env to ghcr + - name: push to ghcr if: '${{ ! steps.create-env.outputs.TAG_EXISTS_create-env }}' run: | - echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin - BIOCONDA_UTILS_VERSION='${{ steps.get-tag.outputs.tag }}' - podman push "localhost/${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" "ghcr.io/bioconda/${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" - podman push "localhost/${CREATE_ENV_IMAGE_NAME}:latest" "ghcr.io/bioconda/${CREATE_ENV_IMAGE_NAME}:latest" + source images/versions.sh + push_to_ghcr $CREATE_ENV_IMAGE_NAME $BIOCONDA_IMAGE_TAG + push_to_ghcr $CREATE_ENV_IMAGE_NAME latest + # END OF BUILDING IMAGES # ---------------------------------------------------------------------- @@ -227,7 +157,8 @@ jobs: test: name: test bioconda-utils with images runs-on: ubuntu-20.04 - needs: [build-base-debian, build-others] + needs: [build-base-debian, build-base-busybox, build-build-env, build-create-env] + if: false steps: - uses: actions/checkout@v4 From fb25c5a8f50a74399aa0f0147a3184232e06aecd Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Tue, 27 Feb 2024 17:24:03 -0500 Subject: [PATCH 109/143] reenable --- .github/workflows/build-images.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index b02d857b53d..d57695df8a8 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -158,7 +158,6 @@ jobs: name: test bioconda-utils with images runs-on: ubuntu-20.04 needs: [build-base-debian, build-base-busybox, build-build-env, build-create-env] - if: false steps: - uses: actions/checkout@v4 From b3b2ccbd15ec4867ac2a775b758dca4d35596267 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Tue, 27 Feb 2024 17:25:30 -0500 Subject: [PATCH 110/143] disable main tests --- .github/workflows/GithubActionTests.yml | 3 +++ .github/workflows/build-images.yml | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/GithubActionTests.yml b/.github/workflows/GithubActionTests.yml index d59a85ccc7d..1df118499e1 100644 --- a/.github/workflows/GithubActionTests.yml +++ b/.github/workflows/GithubActionTests.yml @@ -6,6 +6,7 @@ concurrency: jobs: test-linux: + if: false name: Linux tests runs-on: ubuntu-latest strategy: @@ -43,6 +44,7 @@ jobs: echo "Skipping pytest - only docs modified" fi test-macosx: + if: false name: OSX tests runs-on: macos-latest steps: @@ -74,6 +76,7 @@ jobs: fi autobump-test: + if: false name: autobump test runs-on: ubuntu-latest steps: diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index d57695df8a8..3f52b84e6d9 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -118,7 +118,7 @@ jobs: build-create-env: name: Build create-env - needs: [build-env base-busybox] + needs: [build-env, base-busybox] outputs: TAG_EXISTS_create-env: ${{ steps.create-env.outputs.TAG_EXISTS_create-env }} runs-on: ubuntu-22.04 From 00f52a758c3743397f69bde1f2bd073f98f4a9ed Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Tue, 27 Feb 2024 17:27:13 -0500 Subject: [PATCH 111/143] fix job names --- .github/workflows/build-images.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 3f52b84e6d9..fe93a51a5a7 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -118,7 +118,7 @@ jobs: build-create-env: name: Build create-env - needs: [build-env, base-busybox] + needs: [build-build-env, build-base-busybox] outputs: TAG_EXISTS_create-env: ${{ steps.create-env.outputs.TAG_EXISTS_create-env }} runs-on: ubuntu-22.04 @@ -155,6 +155,7 @@ jobs: # START TESTING test: + if: false name: test bioconda-utils with images runs-on: ubuntu-20.04 needs: [build-base-debian, build-base-busybox, build-build-env, build-create-env] From f889d090bdde82a4a5c382f9d03cb5614446ff8b Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Tue, 27 Feb 2024 17:34:09 -0500 Subject: [PATCH 112/143] move ghcr login back to main workflow --- .github/workflows/build-images.yml | 4 ++++ images/versions.sh | 2 -- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index fe93a51a5a7..203a37372b7 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -47,6 +47,7 @@ jobs: - name: push to ghcr if: '${{ ! steps.base-debian.outputs.TAG_EXISTS_base-debian }}' run: | + echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin source images/versions.sh push_to_ghcr $BASE_DEBIAN_IMAGE_NAME $BASE_TAG push_to_ghcr $BASE_DEBIAN_IMAGE_NAME latest @@ -80,6 +81,7 @@ jobs: - name: push to ghcr if: '${{ ! steps.base-busybox.outputs.TAG_EXISTS_base-busybox }}' run: | + echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin source images/versions.sh push_to_ghcr $BASE_BUSYBOX_IMAGE_NAME $BASE_TAG push_to_ghcr $BASE_BUSYBOX_IMAGE_NAME latest @@ -112,6 +114,7 @@ jobs: - name: push to ghcr if: '${{ ! steps.build-env.outputs.TAG_EXISTS_build-env }}' run: | + echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin source images/versions.sh push_to_ghcr $BUILD_ENV_IMAGE_NAME $BIOCONDA_IMAGE_TAG push_to_ghcr $BUILD_ENV_IMAGE_NAME latest @@ -145,6 +148,7 @@ jobs: - name: push to ghcr if: '${{ ! steps.create-env.outputs.TAG_EXISTS_create-env }}' run: | + echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin source images/versions.sh push_to_ghcr $CREATE_ENV_IMAGE_NAME $BIOCONDA_IMAGE_TAG push_to_ghcr $CREATE_ENV_IMAGE_NAME latest diff --git a/images/versions.sh b/images/versions.sh index 5a6e8aeb3f1..083d1807609 100644 --- a/images/versions.sh +++ b/images/versions.sh @@ -57,7 +57,5 @@ function tag_exists () { } function push_to_ghcr () { - # Log in to GitHub container registry and push image - echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin podman push localhost/${1}:${2} ghcr.io/bioconda/${1}:${2} } From 8ff41e1b75523dfb17cb554b078ee9f78cfdf87b Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Tue, 27 Feb 2024 17:51:27 -0500 Subject: [PATCH 113/143] log in to and use ghcr.io, not localhost registry --- .github/workflows/build-images.yml | 1 + images/create-env/prepare.sh | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 203a37372b7..ce1a704f863 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -139,6 +139,7 @@ jobs: id: create-env run: | source images/versions.sh + echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin if [ $(tag_exists $CREATE_ENV_IMAGE_NAME $BIOCONDA_IMAGE_TAG) ]; then echo "TAG_EXISTS_create-env=true" >> $GITHUB_OUTPUT else diff --git a/images/create-env/prepare.sh b/images/create-env/prepare.sh index eafa7b76633..0b626734dd1 100644 --- a/images/create-env/prepare.sh +++ b/images/create-env/prepare.sh @@ -13,7 +13,7 @@ BUILD_ARGS=() if [ $(tag_exists $BUILD_ENV_IMAGE_NAME $BIOCONDA_IMAGE_TAG) ]; then REGISTRY=quay.io/bioconda else - REGISTRY=localhost + REGISTRY=ghcr.io/bioconda fi CONDA_VERSION=$( @@ -35,7 +35,7 @@ BUILD_ARGS+=("--build-arg=MAMBA_VERSION=$MAMBA_VERSION") if [ $(tag_exists $BASE_BUSYBOX_IMAGE_NAME $TAG) ]; then REGISTRY=quay.io/bioconda else - REGISTRY=localhost + REGISTRY=ghcr.io/bioconda fi BUILD_ARGS+=("--build-arg=BUSYBOX_IMAGE=${REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:${TAG}") From 4713899a079bef235ff138615be5b6ffab6e25e0 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Wed, 28 Feb 2024 15:53:22 -0500 Subject: [PATCH 114/143] don't use localhost --- images/create-env/prepare.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/images/create-env/prepare.sh b/images/create-env/prepare.sh index 0b626734dd1..e77cec5f33d 100644 --- a/images/create-env/prepare.sh +++ b/images/create-env/prepare.sh @@ -21,7 +21,7 @@ CONDA_VERSION=$( bash -c "/opt/conda/bin/conda list --export '^conda$'| sed -n 's/=[^=]*$//p'" ) MAMBA_VERSION=$( - podman run -t localhost/${BUILD_ENV_IMAGE_NAME}:${TAG} \ + podman run -t $REGISTRY/${BUILD_ENV_IMAGE_NAME}:${TAG} \ bash -c "/opt/conda/bin/conda list --export '^mamba$'| sed -n 's/=[^=]*$//p'" ) # Remove trailing \r with parameter expansion From f9c3216a7f572882b29149a780c6a29f4fe2a533 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Wed, 28 Feb 2024 16:43:33 -0500 Subject: [PATCH 115/143] disable test for now --- images/create-env/prepare.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/images/create-env/prepare.sh b/images/create-env/prepare.sh index e77cec5f33d..3eedda17ff6 100644 --- a/images/create-env/prepare.sh +++ b/images/create-env/prepare.sh @@ -40,5 +40,5 @@ fi BUILD_ARGS+=("--build-arg=BUSYBOX_IMAGE=${REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:${TAG}") -TEST_BUILD_ARGS=() -TEST_BUILD_ARGS+=("--build-arg=BUSYBOX_IMAGE=$BUSYBOX_IMAGE") +# TEST_BUILD_ARGS=() +# TEST_BUILD_ARGS+=("--build-arg=BUSYBOX_IMAGE=$BUSYBOX_IMAGE") From d6bf0e6f4151427316bbc611e03afb24e25c238e Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Wed, 28 Feb 2024 17:36:42 -0500 Subject: [PATCH 116/143] manifest push --- images/versions.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/images/versions.sh b/images/versions.sh index 083d1807609..dc237fd8926 100644 --- a/images/versions.sh +++ b/images/versions.sh @@ -57,5 +57,5 @@ function tag_exists () { } function push_to_ghcr () { - podman push localhost/${1}:${2} ghcr.io/bioconda/${1}:${2} + podman manifest push localhost/${1}:${2} ghcr.io/bioconda/${1}:${2} } From 1242b9a55108e19c7da66a3ef4715e7070b5a6cb Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Wed, 28 Feb 2024 17:53:38 -0500 Subject: [PATCH 117/143] be explicit about base vs bioconda tags --- images/create-env/prepare.sh | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/images/create-env/prepare.sh b/images/create-env/prepare.sh index 3eedda17ff6..f32282249a5 100644 --- a/images/create-env/prepare.sh +++ b/images/create-env/prepare.sh @@ -1,6 +1,5 @@ source ../versions.sh IMAGE_NAME="${CREATE_ENV_IMAGE_NAME}" -TAG="${BIOCONDA_UTILS_VERSION}_base$BASE_TAG" BUILD_ARGS=() @@ -17,11 +16,11 @@ else fi CONDA_VERSION=$( - podman run -t $REGISTRY/${BUILD_ENV_IMAGE_NAME}:${TAG} \ + podman run -t $REGISTRY/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_IMAGE_TAG} \ bash -c "/opt/conda/bin/conda list --export '^conda$'| sed -n 's/=[^=]*$//p'" ) MAMBA_VERSION=$( - podman run -t $REGISTRY/${BUILD_ENV_IMAGE_NAME}:${TAG} \ + podman run -t $REGISTRY/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_IMAGE_TAG} \ bash -c "/opt/conda/bin/conda list --export '^mamba$'| sed -n 's/=[^=]*$//p'" ) # Remove trailing \r with parameter expansion @@ -32,13 +31,13 @@ BUILD_ARGS+=("--build-arg=CONDA_VERSION=$CONDA_VERSION") BUILD_ARGS+=("--build-arg=MAMBA_VERSION=$MAMBA_VERSION") # Needs busybox image to copy some items over -if [ $(tag_exists $BASE_BUSYBOX_IMAGE_NAME $TAG) ]; then +if [ $(tag_exists $BASE_BUSYBOX_IMAGE_NAME $BASE_TAG) ]; then REGISTRY=quay.io/bioconda else REGISTRY=ghcr.io/bioconda fi -BUILD_ARGS+=("--build-arg=BUSYBOX_IMAGE=${REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:${TAG}") +BUILD_ARGS+=("--build-arg=BUSYBOX_IMAGE=${REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}") # TEST_BUILD_ARGS=() # TEST_BUILD_ARGS+=("--build-arg=BUSYBOX_IMAGE=$BUSYBOX_IMAGE") From e55d5fa94490349afccf6fedad22c091a0628edb Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Wed, 28 Feb 2024 18:15:08 -0500 Subject: [PATCH 118/143] still need TAG for build.sh --- images/create-env/prepare.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/images/create-env/prepare.sh b/images/create-env/prepare.sh index f32282249a5..e9fb937e741 100644 --- a/images/create-env/prepare.sh +++ b/images/create-env/prepare.sh @@ -1,8 +1,10 @@ source ../versions.sh IMAGE_NAME="${CREATE_ENV_IMAGE_NAME}" +TAG=$BIOCONDA_IMAGE_TAG BUILD_ARGS=() + # Get the exact versions of mamba and conda that were installed in build-env. # # If this tag exists on quay.io (that is, this create-env is being built in From 552488b167ef71da936dd0920a5b7e27bd3a68dc Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Fri, 1 Mar 2024 20:51:51 -0500 Subject: [PATCH 119/143] add function to move manifests from ghcr to quay.io --- images/versions.sh | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/images/versions.sh b/images/versions.sh index dc237fd8926..201da93b4ce 100644 --- a/images/versions.sh +++ b/images/versions.sh @@ -57,5 +57,16 @@ function tag_exists () { } function push_to_ghcr () { - podman manifest push localhost/${1}:${2} ghcr.io/bioconda/${1}:${2} + buildah manifest push localhost/${1}:${2} ghcr.io/bioconda/${1}:${2} +} + +function move_from_ghcr_to_quay () { + local image_name=$1 + local tag=$2 + buildah manifest create "local_${image_name}:${tag}" + for arch in $ARCHS; do + imgid=$(buildah pull --arch=$arch "ghcr.io/bioconda/${image_name}:${tag}") + buildah manifest add "local_${image_name}:${tag}" "${imgid}" + done + buildah manifest push "local_${image_name}:${tag}" "quay.io/bioconda/${image_name}:${tag}" } From 54ed24376b06b27ae511ca8d7402a76304abe326 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Fri, 1 Mar 2024 20:52:21 -0500 Subject: [PATCH 120/143] don't use "latest" --- .github/workflows/build-images.yml | 4 ---- images/build.sh | 26 +++++--------------------- 2 files changed, 5 insertions(+), 25 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index ce1a704f863..3ab50be7501 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -50,7 +50,6 @@ jobs: echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin source images/versions.sh push_to_ghcr $BASE_DEBIAN_IMAGE_NAME $BASE_TAG - push_to_ghcr $BASE_DEBIAN_IMAGE_NAME latest build-base-busybox: name: Build base-busybox @@ -84,7 +83,6 @@ jobs: echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin source images/versions.sh push_to_ghcr $BASE_BUSYBOX_IMAGE_NAME $BASE_TAG - push_to_ghcr $BASE_BUSYBOX_IMAGE_NAME latest build-build-env: name: Build build-env @@ -117,7 +115,6 @@ jobs: echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin source images/versions.sh push_to_ghcr $BUILD_ENV_IMAGE_NAME $BIOCONDA_IMAGE_TAG - push_to_ghcr $BUILD_ENV_IMAGE_NAME latest build-create-env: name: Build create-env @@ -152,7 +149,6 @@ jobs: echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin source images/versions.sh push_to_ghcr $CREATE_ENV_IMAGE_NAME $BIOCONDA_IMAGE_TAG - push_to_ghcr $CREATE_ENV_IMAGE_NAME latest # END OF BUILDING IMAGES diff --git a/images/build.sh b/images/build.sh index 36433205036..d707672aaa2 100644 --- a/images/build.sh +++ b/images/build.sh @@ -18,10 +18,6 @@ # ARCHS: space-separated string of archs to build # IMAGE_NAME: name of image; created manifest will be IMAGE_NAME:tag # BUILD_ARGS: array of arguments like ("--build-arg=argument1=the-value", "--build-arg=arg2=a") -# -# After successfully building, a metadata.txt file will be created in the image -# directory containing the manifest names that can be used to upload to -# a registry. set -xeu @@ -31,13 +27,8 @@ cd $IMAGE_DIR [ -e prepare.sh ] && source prepare.sh -# Add "latest" to tags -TAGS=$(echo "$TAG latest") - -for tag in ${TAGS} ; do - buildah manifest rm "${IMAGE_NAME}:${tag}" || true - buildah manifest create "${IMAGE_NAME}:${tag}" -done +buildah manifest rm "${IMAGE_NAME}:${TAG}" || true +buildah manifest create "${IMAGE_NAME}:${TAG}" for arch in $ARCHS; do @@ -74,14 +65,7 @@ for arch in $ARCHS; do image_id="$( buildah commit "${container}" )" buildah rm "${container}" - # Add image to respective manifests. - for tag in ${TAGS} ; do - buildah tag "${image_id}" "${IMAGE_NAME}:${tag}-${arch}" - buildah manifest add "${IMAGE_NAME}:${tag}" "${image_id}" - done -done - -cat /dev/null > metadata.txt -for tag in ${TAGS} ; do - echo $IMAGE_NAME:$tag >> metadata.txt + # Add image to manifest + buildah tag "${image_id}" "${IMAGE_NAME}:${TAG}-${arch}" + buildah manifest add "${IMAGE_NAME}:${TAG}" "${image_id}" done From 628ab7906b1e8d46e5897c08802485e9acd46ac7 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Fri, 1 Mar 2024 20:57:01 -0500 Subject: [PATCH 121/143] re-enable tests --- .github/workflows/build-images.yml | 136 +++++++++-------------------- 1 file changed, 42 insertions(+), 94 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 3ab50be7501..8244c0c6b0c 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -188,65 +188,24 @@ jobs: - name: test run: | - - BIOCONDA_UTILS_VERSION='${{ needs.build-others.outputs.BIOCONDA_UTILS_TAG }}' - - # bioconda-utils uses docker, so log in to ghcr.io with docker. - echo '${{ secrets.GITHUB_TOKEN }}' | docker login ghcr.io -u '${{ github.actor }}' --password-stdin - - # we also want to use podman to push to quay.io, but we need the images - # locally to this runner to do so, hence also logging in with podman. - echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin - - # Decide, for each image, whether it was just built as part of this run - # (in which case it would have been just uploaded to ghcr.io) or - # otherwise pull from quay.io. - # - # If ghcr.io, then also pull the image with podman so it will be - # available to upload to quay.io in subsequent steps. We do this even - # for base-debian, even if it's not used for the test. - if [ ${{ ! needs.build-base-debian.outputs.TAG_EXISTS_base-debian }} ]; then - podman pull "ghcr.io/bioconda/${BASE_DEBIAN_IMAGE_NAME}:${BASE_TAG}" - podman pull "ghcr.io/bioconda/${BASE_DEBIAN_IMAGE_NAME}:latest" - fi - - if [ ${{ needs.build-others.outputs.TAG_EXISTS_base-busybox }} ]; then - DEST_BASE_IMAGE_REGISTRY='quay.io/bioconda' - else - DEST_BASE_IMAGE_REGISTRY="ghcr.io/bioconda" - podman pull "${DEST_BASE_IMAGE_REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}" - podman pull "${DEST_BASE_IMAGE_REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:latest" - fi - - if [ ${{ needs.build-others.outputs.TAG_EXISTS_build-env }} ]; then - BUILD_ENV_REGISTRY='quay.io/bioconda' - else - BUILD_ENV_REGISTRY="ghcr.io/bioconda" - podman pull "${BUILD_ENV_REGISTRY}/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" - podman pull "${BUILD_ENV_REGISTRY}/${BUILD_ENV_IMAGE_NAME}:latest" - fi - - if [ ${{ needs.build-others.outputs.TAG_EXISTS_create-env }} ]; then - CREATE_ENV_REGISTRY='quay.io/bioconda' - else - CREATE_ENV_REGISTRY="ghcr.io/bioconda" - podman pull "${CREATE_ENV_REGISTRY}/${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" - podman pull "${CREATE_ENV_REGISTRY}/${CREATE_ENV_IMAGE_NAME}:latest" - fi - - cd recipes - - # Run a test build, specifying the exact images to use. eval "$(conda shell.bash hook)" conda activate bioconda - # Used to tell mulled-build which image to use + source versions.sh + + # Figure out which registry to use for each image, based on what was built. + [ ${{ needs.build-build-env.outputs.TAG_EXISTS_build-env }} ] && BUILD_ENV_REGISTRY='quay.io/bioconda' || BUILD_ENV_REGISTRY="ghcr.io/bioconda" + [ ${{ needs.build-create-env.outputs.TAG_EXISTS_create-env }} ] && CREATE_ENV_REGISTRY='quay.io/bioconda' || CREATE_ENV_REGISTRY="ghcr.io/bioconda" + [ ${{ needs.build-base-busybox.outputs.TAG_EXISTS_base_busybox }} ] && DEST_BASE_IMAGE_REGISTRY='quay.io/bioconda' || DEST_BASE_REGISTRY="ghcr.io/bioconda" + + # Tell mulled-build which image to use export DEST_BASE_IMAGE="${DEST_BASE_IMAGE_REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}" # Build a package with containers. + cd recipes bioconda-utils build \ - --docker-base-image "${BUILD_ENV_REGISTRY}/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ - --mulled-conda-image "${CREATE_ENV_REGISTRY}/${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_UTILS_VERSION}-base${BASE_TAG}" \ + --docker-base-image "${BUILD_ENV_REGISTRY}/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_IMAGE_TAG}" \ + --mulled-conda-image "${CREATE_ENV_REGISTRY}/${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_IMAGE_TAG}" \ --packages seqtk \ --docker \ --mulled-test \ @@ -269,46 +228,35 @@ jobs: # Note that "latest" is built by generic_build.bash as well, and we're # including it here in the upload. - - name: Push base-debian - id: push-base-debian - uses: redhat-actions/push-to-registry@v2 + push: + name: push images + runs-on: ubuntu-20.04 + needs: [build-base-debian, build-base-busybox, build-build-env, build-create-env, test] + steps: + - name: push base-debian if: ${{ ! needs.base-debian.outputs.TAG_EXISTS_base-debian }} - with: - image: ${{ env.BASE_DEBIAN_IMAGE_NAME }} - tags: latest ${{ env.BASE_TAG }} - registry: quay.io/bioconda - username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} - password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} - - - name: Push base-busybox - id: push-base-busybox - uses: redhat-actions/push-to-registry@v2 - if: ${{ ! needs.build-others.outputs.TAG_EXISTS_base-busybox }} - with: - image: ${{ env.BASE_BUSYBOX_IMAGE_NAME }} - tags: latest ${{ env.BASE_TAG }} - registry: quay.io/bioconda - username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} - password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} - - - name: Push build-env - id: push-build-env - uses: redhat-actions/push-to-registry@v2 - if: ${{ ! needs.build-others.outputs.TAG_EXISTS_build-env }} - with: - image: ${{ env.BUILD_ENV_IMAGE_NAME }} - tags: latest ${{ needs.build-others.outputs.BIOCONDA_UTILS_TAG }}-base${{ env.BASE_TAG }} - registry: quay.io/bioconda - username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} - password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} - - - name: Push create-env - id: push-create-env - uses: redhat-actions/push-to-registry@v2 - if: ${{ ! needs.build-others.outputs.TAG_EXISTS_create-env }} - with: - image: ${{ env.CREATE_ENV_IMAGE_NAME }} - tags: latest ${{ needs.build-others.outputs.BIOCONDA_UTILS_TAG }}-base${{ env.BASE_TAG }} - registry: quay.io/bioconda - username: ${{ secrets.QUAY_BIOCONDA_USERNAME }} - password: ${{ secrets.QUAY_BIOCONDA_TOKEN }} + run: | + echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin + source versions.sh + push_to_ghcr ${BASE_DEBIAN_IMAGE_NAME}:${TAG} + + - name: push base-busybox + if: ${{ ! needs.base-busybox.outputs.TAG_EXISTS_base-busybox }} + run: | + echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin + source versions.sh + push_to_ghcr ${BASE_BUSYBOX_IMAGE_NAME}:${TAG} + + - name: push create-env + if: ${{ ! needs.create-env.outputs.TAG_EXISTS_create-env }} + run: | + echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin + source versions.sh + push_to_ghcr ${CREATE_ENV_IMAGE_NAME}:${TAG} + + - name: push build-env + if: ${{ ! needs.build-env.outputs.TAG_EXISTS_build-env }} + run: | + echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin + source versions.sh + push_to_ghcr ${BUILD_ENV_IMAGE_NAME}:${TAG} From 5a959ac6e2c83c543945df8b4a31377e8a1222ad Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Fri, 1 Mar 2024 21:05:38 -0500 Subject: [PATCH 122/143] back to podman --- .github/workflows/build-images.yml | 1 - images/versions.sh | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 8244c0c6b0c..0edbf081cd8 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -156,7 +156,6 @@ jobs: # START TESTING test: - if: false name: test bioconda-utils with images runs-on: ubuntu-20.04 needs: [build-base-debian, build-base-busybox, build-build-env, build-create-env] diff --git a/images/versions.sh b/images/versions.sh index 201da93b4ce..77aaa4e34e1 100644 --- a/images/versions.sh +++ b/images/versions.sh @@ -57,7 +57,7 @@ function tag_exists () { } function push_to_ghcr () { - buildah manifest push localhost/${1}:${2} ghcr.io/bioconda/${1}:${2} + podman manifest push localhost/${1}:${2} ghcr.io/bioconda/${1}:${2} } function move_from_ghcr_to_quay () { @@ -68,5 +68,5 @@ function move_from_ghcr_to_quay () { imgid=$(buildah pull --arch=$arch "ghcr.io/bioconda/${image_name}:${tag}") buildah manifest add "local_${image_name}:${tag}" "${imgid}" done - buildah manifest push "local_${image_name}:${tag}" "quay.io/bioconda/${image_name}:${tag}" + podman manifest push "local_${image_name}:${tag}" "quay.io/bioconda/${image_name}:${tag}" } From 699b55fd7c02a715cf99ab5c1352ed53eff15ed7 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Fri, 1 Mar 2024 21:39:37 -0500 Subject: [PATCH 123/143] fix path --- .github/workflows/build-images.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 0edbf081cd8..05a98e83cf2 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -190,7 +190,7 @@ jobs: eval "$(conda shell.bash hook)" conda activate bioconda - source versions.sh + source images/versions.sh # Figure out which registry to use for each image, based on what was built. [ ${{ needs.build-build-env.outputs.TAG_EXISTS_build-env }} ] && BUILD_ENV_REGISTRY='quay.io/bioconda' || BUILD_ENV_REGISTRY="ghcr.io/bioconda" From f30738f0870b5e6e5a81a20cd8503577c218f525 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 2 Mar 2024 12:18:42 -0500 Subject: [PATCH 124/143] fix path --- .github/workflows/build-images.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 05a98e83cf2..c1e189fcf6d 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -236,26 +236,26 @@ jobs: if: ${{ ! needs.base-debian.outputs.TAG_EXISTS_base-debian }} run: | echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin - source versions.sh + source images/versions.sh push_to_ghcr ${BASE_DEBIAN_IMAGE_NAME}:${TAG} - name: push base-busybox if: ${{ ! needs.base-busybox.outputs.TAG_EXISTS_base-busybox }} run: | echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin - source versions.sh + source images/versions.sh push_to_ghcr ${BASE_BUSYBOX_IMAGE_NAME}:${TAG} - name: push create-env if: ${{ ! needs.create-env.outputs.TAG_EXISTS_create-env }} run: | echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin - source versions.sh + source images/versions.sh push_to_ghcr ${CREATE_ENV_IMAGE_NAME}:${TAG} - name: push build-env if: ${{ ! needs.build-env.outputs.TAG_EXISTS_build-env }} run: | echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin - source versions.sh + source images/versions.sh push_to_ghcr ${BUILD_ENV_IMAGE_NAME}:${TAG} From d4b8557a801b73d7433f366b2729fc8c5d8e70e6 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 2 Mar 2024 12:53:13 -0500 Subject: [PATCH 125/143] still need to clone repo for versions.sh --- .github/workflows/build-images.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index c1e189fcf6d..1d41c778f1e 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -232,6 +232,11 @@ jobs: runs-on: ubuntu-20.04 needs: [build-base-debian, build-base-busybox, build-build-env, build-create-env, test] steps: + + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: push base-debian if: ${{ ! needs.base-debian.outputs.TAG_EXISTS_base-debian }} run: | From edeea83ee613cab785f2e0f0f431ff954a9bd109 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 2 Mar 2024 13:50:14 -0500 Subject: [PATCH 126/143] now try moving from ghcr to quay --- .github/workflows/build-images.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 1d41c778f1e..40dafdb76a0 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -242,25 +242,25 @@ jobs: run: | echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin source images/versions.sh - push_to_ghcr ${BASE_DEBIAN_IMAGE_NAME}:${TAG} + move_from_ghcr_to_quay ${BASE_DEBIAN_IMAGE_NAME} ${BASE_TAG} - name: push base-busybox if: ${{ ! needs.base-busybox.outputs.TAG_EXISTS_base-busybox }} run: | echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin source images/versions.sh - push_to_ghcr ${BASE_BUSYBOX_IMAGE_NAME}:${TAG} + move_from_ghcr_to_quay ${BASE_BUSYBOX_IMAGE_NAME} ${BASE_TAG} - name: push create-env if: ${{ ! needs.create-env.outputs.TAG_EXISTS_create-env }} run: | echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin source images/versions.sh - push_to_ghcr ${CREATE_ENV_IMAGE_NAME}:${TAG} + move_from_ghcr_to_quay ${CREATE_ENV_IMAGE_NAME} ${BIOCONDA_IMAGE_TAG} - name: push build-env if: ${{ ! needs.build-env.outputs.TAG_EXISTS_build-env }} run: | echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin source images/versions.sh - push_to_ghcr ${BUILD_ENV_IMAGE_NAME}:${TAG} + move_from_ghcr_to_quay ${BUILD_ENV_IMAGE_NAME} ${BIOCONDA_IMAGE_TAG} From e998ce76c884c80a3ddb0d865dcb948581bad620 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 2 Mar 2024 15:15:39 -0500 Subject: [PATCH 127/143] login to quay --- .github/workflows/build-images.yml | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 40dafdb76a0..1097c5db5ee 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -240,27 +240,28 @@ jobs: - name: push base-debian if: ${{ ! needs.base-debian.outputs.TAG_EXISTS_base-debian }} run: | - echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin + echo '${{ secrets.GITHUB_TOKEN }}' | podman login quay.io -u '${{ github.actor }}' --password-stdin source images/versions.sh move_from_ghcr_to_quay ${BASE_DEBIAN_IMAGE_NAME} ${BASE_TAG} + - name: push base-busybox if: ${{ ! needs.base-busybox.outputs.TAG_EXISTS_base-busybox }} run: | - echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin + echo '${{ secrets.GITHUB_TOKEN }}' | podman login quay.io -u '${{ github.actor }}' --password-stdin source images/versions.sh move_from_ghcr_to_quay ${BASE_BUSYBOX_IMAGE_NAME} ${BASE_TAG} - name: push create-env if: ${{ ! needs.create-env.outputs.TAG_EXISTS_create-env }} run: | - echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin + echo '${{ secrets.GITHUB_TOKEN }}' | podman login quay.io -u '${{ github.actor }}' --password-stdin source images/versions.sh move_from_ghcr_to_quay ${CREATE_ENV_IMAGE_NAME} ${BIOCONDA_IMAGE_TAG} - name: push build-env if: ${{ ! needs.build-env.outputs.TAG_EXISTS_build-env }} run: | - echo '${{ secrets.GITHUB_TOKEN }}' | podman login ghcr.io -u '${{ github.actor }}' --password-stdin + echo '${{ secrets.GITHUB_TOKEN }}' | podman login quay.io -u '${{ github.actor }}' --password-stdin source images/versions.sh move_from_ghcr_to_quay ${BUILD_ENV_IMAGE_NAME} ${BIOCONDA_IMAGE_TAG} From 0dc924248418abe241e877a76e18d1217422cf3c Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 2 Mar 2024 15:47:17 -0500 Subject: [PATCH 128/143] use right creds --- .github/workflows/build-images.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 1097c5db5ee..106ead9e373 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -240,7 +240,7 @@ jobs: - name: push base-debian if: ${{ ! needs.base-debian.outputs.TAG_EXISTS_base-debian }} run: | - echo '${{ secrets.GITHUB_TOKEN }}' | podman login quay.io -u '${{ github.actor }}' --password-stdin + echo '${{ secrets.QUAY_BIOCONDA_TOKEN }}' | podman login quay.io -u '${{ secrets.QUAY_BIOCONDA_USERNAME }}' --password-stdin source images/versions.sh move_from_ghcr_to_quay ${BASE_DEBIAN_IMAGE_NAME} ${BASE_TAG} @@ -248,20 +248,20 @@ jobs: - name: push base-busybox if: ${{ ! needs.base-busybox.outputs.TAG_EXISTS_base-busybox }} run: | - echo '${{ secrets.GITHUB_TOKEN }}' | podman login quay.io -u '${{ github.actor }}' --password-stdin + echo '${{ secrets.QUAY_BIOCONDA_TOKEN }}' | podman login quay.io -u '${{ secrets.QUAY_BIOCONDA_USERNAME }}' --password-stdin source images/versions.sh move_from_ghcr_to_quay ${BASE_BUSYBOX_IMAGE_NAME} ${BASE_TAG} - name: push create-env if: ${{ ! needs.create-env.outputs.TAG_EXISTS_create-env }} run: | - echo '${{ secrets.GITHUB_TOKEN }}' | podman login quay.io -u '${{ github.actor }}' --password-stdin + echo '${{ secrets.QUAY_BIOCONDA_TOKEN }}' | podman login quay.io -u '${{ secrets.QUAY_BIOCONDA_USERNAME }}' --password-stdin source images/versions.sh move_from_ghcr_to_quay ${CREATE_ENV_IMAGE_NAME} ${BIOCONDA_IMAGE_TAG} - name: push build-env if: ${{ ! needs.build-env.outputs.TAG_EXISTS_build-env }} run: | - echo '${{ secrets.GITHUB_TOKEN }}' | podman login quay.io -u '${{ github.actor }}' --password-stdin + echo '${{ secrets.QUAY_BIOCONDA_TOKEN }}' | podman login quay.io -u '${{ secrets.QUAY_BIOCONDA_USERNAME }}' --password-stdin source images/versions.sh move_from_ghcr_to_quay ${BUILD_ENV_IMAGE_NAME} ${BIOCONDA_IMAGE_TAG} From 589df6ba46c225d0f8c7758c055d0725c0c3449b Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 2 Mar 2024 16:30:52 -0500 Subject: [PATCH 129/143] re-enable orig tests --- .github/workflows/GithubActionTests.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/GithubActionTests.yml b/.github/workflows/GithubActionTests.yml index 1df118499e1..d59a85ccc7d 100644 --- a/.github/workflows/GithubActionTests.yml +++ b/.github/workflows/GithubActionTests.yml @@ -6,7 +6,6 @@ concurrency: jobs: test-linux: - if: false name: Linux tests runs-on: ubuntu-latest strategy: @@ -44,7 +43,6 @@ jobs: echo "Skipping pytest - only docs modified" fi test-macosx: - if: false name: OSX tests runs-on: macos-latest steps: @@ -76,7 +74,6 @@ jobs: fi autobump-test: - if: false name: autobump test runs-on: ubuntu-latest steps: From 59d5325de0eb9a0771bb4f87c0378b0f538a9c3f Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sat, 10 Aug 2024 15:11:22 +0000 Subject: [PATCH 130/143] A round of comments and documentation --- .github/workflows/build-images.yml | 53 +++++++++++++---------- images/README.md | 30 ++++++++----- images/base-glibc-busybox-bash/prepare.sh | 8 ++-- images/build.sh | 30 ++++++++----- images/locale/Dockerfile | 2 + images/versions.sh | 18 +++++--- 6 files changed, 88 insertions(+), 53 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 106ead9e373..9e61c42ff1b 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -1,7 +1,5 @@ # Build all container images. # -# Most of the work is done in generic_build.bash, so see that file for details. - name: Build images concurrency: group: ${{ github.workflow }}-${{ github.ref }} @@ -14,14 +12,24 @@ on: - 'docs/**' - 'test/**' env: + # Used to override BIOCONDA_UTILS_VERSION in images/versions.sh BIOCONDA_UTILS_VERSION: ${{ github.event.release && github.event.release.tag_name || github.head_ref || github.ref_name }} jobs: + # JOBS FOR BUILDING IMAGES + # ---------------------------------------------------------------------- + # These jobs will build images for archs, put them into a manifest, and push + # that to GitHub Container Registry. Later, the testing jobs will test and + # push to quay.io. + build-base-debian: name: Build base-debian runs-on: ubuntu-22.04 outputs: + # A note on these TAG_EXISTS_* outputs: these allow subsequent jobs to + # change behavior (e.g., skip building or skip pushing to ghcr) depending + # on whether an image has already been created. TAG_EXISTS_base-debian: ${{ steps.base-debian.outputs.TAG_EXISTS_base-debian }} steps: @@ -34,7 +42,7 @@ jobs: sudo apt-get update sudo apt-get install -y qemu-user-static - - name: Build base-debian + - name: base-debian id: base-debian run: | source images/versions.sh @@ -67,7 +75,7 @@ jobs: sudo apt-get update sudo apt-get install -y qemu-user-static - - name: Build base-busybox + - name: base-busybox id: base-busybox run: | source images/versions.sh @@ -99,7 +107,7 @@ jobs: sudo apt-get update sudo apt-get install -y qemu-user-static - - name: Build build-env + - name: build-env id: build-env run: | source images/versions.sh @@ -151,9 +159,11 @@ jobs: push_to_ghcr $CREATE_ENV_IMAGE_NAME $BIOCONDA_IMAGE_TAG - # END OF BUILDING IMAGES - # ---------------------------------------------------------------------- - # START TESTING + # END OF BUILDING IMAGES + # ---------------------------------------------------------------------- + # START TESTING + # These testing jobs will run the respective Dockerfile.test in each image + # directory. test: name: test bioconda-utils with images @@ -210,22 +220,17 @@ jobs: --mulled-test \ --force - # END TESTING - # ------------------------------------------------------------------------ - # START PUSHING IMAGES - - # For these push steps, a repository must first exist on quay.io/bioconda - # AND that repository must also be configured to allow write access for the - # appropriate service account. This must be done by a user with admin - # access to quay.io/bioconda. - # - # generic_build.bash reported whether the tag exists to the log; that was - # added to GITHUB_OUTPUT, those outputs are exposed to the jobs, and - # those jobs are dependencies of this job. So now we can use those - # outputs to determine if we should upload. - # - # Note that "latest" is built by generic_build.bash as well, and we're - # including it here in the upload. + # END TESTING + # ------------------------------------------------------------------------ + # START PUSHING IMAGES + + # For these push steps, a repository must first exist on quay.io/bioconda + # AND that repository must also be configured to allow write access for the + # appropriate service account. This must be done by a user with admin + # access to quay.io/bioconda. + # + # This uses the TAG_EXISTS_* outputs from previous jobs to determine if + # a push to quay.io should happen. push: name: push images diff --git a/images/README.md b/images/README.md index 00eea08d295..565ca37c550 100644 --- a/images/README.md +++ b/images/README.md @@ -1,16 +1,26 @@ The intended use is to run `build.sh`, providing it an image directory. -Each image directory contains a `prepare.sh` script, a `Dockerfile` for -building, and a `Dockerfile.test` for testing. -Each `prepare.sh` should at least in turn source `versions.sh`. +Image directories must at least contain the following: -`build.sh` sources `prepare.sh` to populate the env vars needed for that -particular image or do any other needed work in preparation for building. +- `prepare.sh` script, where the first line should be `source ../versions.sh` +- `Dockerfile` for building +- `Dockerfile.test` for testing. -To avoid inter-image dependencies, we prepare the C.utf8 locale ahead of time -in `locale.sh`. This can be copied over to image dirs if/when needed by -`prepare.sh`. +`build.sh` sources `/prepare.sh`, which sources `versions.sh` to +populate the env vars needed for that particular image. +`/prepare.sh` should also do any other needed work in preparation +for building. -`build.sh` will write to a `metadata.json` file in the image dir with the name -of the manifest created, so that subsequent jobs can use it. +A note on locale: we prepare the C.utf8 locale ahead of time in +`locale/generate_locale.sh`. This can be copied over to image dirs if/when +needed by `prepare.sh`. Previously, we were preparing the locale each time in +an image and copying that out to subsequent image. Since this is expected to +change infrequently, storing it separately like this in the repo allows us to +remove the dependency of building that first image. + +E.g., + +``` +bash build.sh base-glibc-busybox-bash +``` diff --git a/images/base-glibc-busybox-bash/prepare.sh b/images/base-glibc-busybox-bash/prepare.sh index b11f305bfaa..c97209955da 100644 --- a/images/base-glibc-busybox-bash/prepare.sh +++ b/images/base-glibc-busybox-bash/prepare.sh @@ -2,12 +2,14 @@ source ../versions.sh IMAGE_NAME="${BASE_BUSYBOX_IMAGE_NAME}" TAG="$BASE_TAG" +# Build busybox binaries for each arch. +# +# The respective busybox base containers for each arch will later extract the +# relevant binary from this image. + BUILD_ARGS=() BUILD_ARGS+=("--build-arg=debian_version=${DEBIAN_VERSION}") BUILD_ARGS+=("--build-arg=busybox_version=${BUSYBOX_VERSION}") - -# Build busybox binaries for each arch; respective busybox base containers will -# extract the relevant binary from this image. iidfile="$( mktemp )" buildah bud \ --iidfile="${iidfile}" \ diff --git a/images/build.sh b/images/build.sh index d707672aaa2..caa695243eb 100644 --- a/images/build.sh +++ b/images/build.sh @@ -8,12 +8,13 @@ # build.sh # # The only arg directly provided to this script is the image directory, -# containing at least a Dockerfile. In that directory, if prepare.sh exists it -# will be sourced to get all other env vars used here, as well as do any -# image-specific prep work. +# containing at least a Dockerfile. # -# Expected env vars populated by prepare.sh -# ----------------------------------------- +# In that directory, if prepare.sh exists it will be sourced. Use prepare.sh to +# create all required env vars and do any image-specific prep work. +# +# Expected env vars to be populated by prepare.sh +# ----------------------------------------------- # TAG: tag to build # ARCHS: space-separated string of archs to build # IMAGE_NAME: name of image; created manifest will be IMAGE_NAME:tag @@ -27,13 +28,18 @@ cd $IMAGE_DIR [ -e prepare.sh ] && source prepare.sh + +# Clean up any manifests before we start. +# IMAGE_NAME and TAG should be created by prepare.sh buildah manifest rm "${IMAGE_NAME}:${TAG}" || true buildah manifest create "${IMAGE_NAME}:${TAG}" +# ARCHS should be created by prepare.sh for arch in $ARCHS; do - # This is specific to the build-env: we need to decide on the base image - # depending on the arch. + # This logic is specific to the build-env. We need an arch-specific base + # image, but the nomenclature is inconsistent. So we directly map arch names + # to conda-forge base images. BASE_IMAGE_BUILD_ARG="" if [ "${IS_BUILD_ENV:-false}" == "true" ]; then if [ "$arch" == "amd64" ]; then @@ -44,7 +50,8 @@ for arch in $ARCHS; do fi fi - # Actual building happens here. Keep track of the built image in $iidfile. + # Actual building happens here. We will keep track of the built image in + # $image_id. iidfile="$( mktemp )" buildah bud \ --arch="${arch}" \ @@ -55,9 +62,10 @@ for arch in $ARCHS; do image_id="$( cat "${iidfile}" )" rm "${iidfile}" - # Add a label needed for GitHub Actions to inherit container permissions from - # repo permissions. Must be set on container, not image. Then save resulting - # image. + # In order for GitHub Actions to inherit container permissions from the repo + # permissions, we need to add a special label. However `buildah config + # --label` operates on a container, not an image. So we add the label to + # a temporary container and then save the resulting image. container="$( buildah from "${image_id}" )" buildah config \ --label="org.opencontainers.image.source=https://github.com/bioconda/bioconda-utils" \ diff --git a/images/locale/Dockerfile b/images/locale/Dockerfile index d13aa4fb6fd..fcc9124e9ae 100644 --- a/images/locale/Dockerfile +++ b/images/locale/Dockerfile @@ -1,3 +1,5 @@ +# Generate UTF-8 local to be used in other images + FROM "debian:12.1-slim" RUN apt-get update -qq \ && \ diff --git a/images/versions.sh b/images/versions.sh index 77aaa4e34e1..a724dedaa60 100644 --- a/images/versions.sh +++ b/images/versions.sh @@ -10,13 +10,12 @@ BUILD_ENV_IMAGE_NAME="tmp-build-env" CREATE_ENV_IMAGE_NAME="tmp-create-env" BASE_TAG="0.1" -# This assumes you've already checked out whatever branch/commit to use. -# -# Respects setting outside this script, if e.g. you want GitHub Actions to -# handle naming based on branch. +# Inspect this repo to get the currently-checked-out version, but if +# BIOCONDA_UTILS_VERSION was set outside this script, use that instead. BIOCONDA_UTILS_VERSION=${BIOCONDA_UTILS_VERSION:-$(git describe --tags --dirty --always)} -# Used as the tag for create-env and build-env, which depend on bioconda-utils +# This will be used as the tag for create-env and build-env images, which +# depend on bioconda-utils BIOCONDA_IMAGE_TAG=${BIOCONDA_UTILS_VERSION}_base${BASE_TAG} # FUNCTIONS -------------------------------------------------------------------- @@ -56,17 +55,26 @@ function tag_exists () { done } +# Helper function to push a just-built image to GitHub Container +# Respository, which is used as a temporary storage mechanism. function push_to_ghcr () { podman manifest push localhost/${1}:${2} ghcr.io/bioconda/${1}:${2} } +# Helper function to move an image from gchr to quay.io for public use. function move_from_ghcr_to_quay () { local image_name=$1 local tag=$2 + + # Locally-named manifest to which we'll add the different archs. buildah manifest create "local_${image_name}:${tag}" + + # Expects images for archs to be built already; add them to local manifest. for arch in $ARCHS; do imgid=$(buildah pull --arch=$arch "ghcr.io/bioconda/${image_name}:${tag}") buildah manifest add "local_${image_name}:${tag}" "${imgid}" done + + # Publish podman manifest push "local_${image_name}:${tag}" "quay.io/bioconda/${image_name}:${tag}" } From 5d534eff69f6ae1d1c2d80b1f2b147b73fcdd3ac Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 11 Aug 2024 02:35:13 +0000 Subject: [PATCH 131/143] fix image tag --- test/test_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/test_utils.py b/test/test_utils.py index 4ec044dd7f8..ab63850e176 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -39,7 +39,7 @@ # docker, once without). On OSX, only the non-docker runs. # Docker ref for build container -DOCKER_BASE_IMAGE = "quay.io/bioconda/bioconda-utils-test-env-cos7:latest" +DOCKER_BASE_IMAGE = "quay.io/bioconda/bioconda-utils-build-env-cos7:latest" SKIP_DOCKER_TESTS = sys.platform.startswith('darwin') SKIP_NOT_OSX = not sys.platform.startswith('darwin') From 1f97cf2a1eb0c6a4012f91a2d29305ba8b8746f7 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 11 Aug 2024 13:43:58 +0000 Subject: [PATCH 132/143] temporarily disable full tests --- .github/workflows/GithubActionTests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/GithubActionTests.yml b/.github/workflows/GithubActionTests.yml index 7f32ebe0796..9356003b5da 100644 --- a/.github/workflows/GithubActionTests.yml +++ b/.github/workflows/GithubActionTests.yml @@ -6,6 +6,7 @@ concurrency: jobs: test-linux: + if: ${{ false }} # FIXME name: Linux tests runs-on: ubuntu-latest strategy: @@ -44,6 +45,7 @@ jobs: echo "Skipping pytest - only docs modified" fi test-macosx: + if: ${{ false }} # FIXME name: OSX tests runs-on: macos-13 steps: @@ -76,6 +78,7 @@ jobs: fi autobump-test: + if: ${{ false }} # FIXME name: autobump test runs-on: ubuntu-latest steps: From 523acf302f5591f82b7ce3a525341399e6516198 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 11 Aug 2024 13:44:14 +0000 Subject: [PATCH 133/143] first try at enabling docker tests for just-built containers --- .github/workflows/build-images.yml | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 9e61c42ff1b..4afd459a2ec 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -205,20 +205,28 @@ jobs: # Figure out which registry to use for each image, based on what was built. [ ${{ needs.build-build-env.outputs.TAG_EXISTS_build-env }} ] && BUILD_ENV_REGISTRY='quay.io/bioconda' || BUILD_ENV_REGISTRY="ghcr.io/bioconda" [ ${{ needs.build-create-env.outputs.TAG_EXISTS_create-env }} ] && CREATE_ENV_REGISTRY='quay.io/bioconda' || CREATE_ENV_REGISTRY="ghcr.io/bioconda" - [ ${{ needs.build-base-busybox.outputs.TAG_EXISTS_base_busybox }} ] && DEST_BASE_IMAGE_REGISTRY='quay.io/bioconda' || DEST_BASE_REGISTRY="ghcr.io/bioconda" + [ ${{ needs.build-base-busybox.outputs.TAG_EXISTS_base_busybox }} ] && DEST_BASE_REGISTRY='quay.io/bioconda' || DEST_BASE_REGISTRY="ghcr.io/bioconda" + [ ${{ needs.build-base-debian.outputs.TAG_EXISTS_base_debian }} ] && DEST_EXTENDED_BASE_REGISTRY='quay.io/bioconda' || DEST_EXTENDED_BASE_REGISTRY="ghcr.io/bioconda" # Tell mulled-build which image to use export DEST_BASE_IMAGE="${DEST_BASE_IMAGE_REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}:${BASE_TAG}" + export DEFAULT_BASE_IMAGE="${DEST_BASE_REGISTRY}/${BASE_BUSYBOX_IMAGE_NAME}" + export DEFAULT_EXTENDED_BASE_IMAGE="${DEST_EXTENDED_BASE_REGISTRY}/${BASE_DEBIAN_IMAGE_NAME}" + export BUILD_ENV_IMAGE="${BUILD_ENV_REGISTRY}/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_IMAGE_TAG}" + export CREATE_ENV_IMAGE="${CREATE_ENV_REGISTRY}/${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_IMAGE_TAG}" + + # # Build a package with containers. + # cd recipes + # bioconda-utils build \ + # --docker-base-image "${BUILD_ENV_REGISTRY}/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_IMAGE_TAG}" \ + # --mulled-conda-image "${CREATE_ENV_REGISTRY}/${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_IMAGE_TAG}" \ + # --packages seqtk \ + # --docker \ + # --mulled-test \ + # --force - # Build a package with containers. cd recipes - bioconda-utils build \ - --docker-base-image "${BUILD_ENV_REGISTRY}/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_IMAGE_TAG}" \ - --mulled-conda-image "${CREATE_ENV_REGISTRY}/${CREATE_ENV_IMAGE_NAME}:${BIOCONDA_IMAGE_TAG}" \ - --packages seqtk \ - --docker \ - --mulled-test \ - --force + py.test --durations=0 test/ -v --log-level=DEBUG -k "docker" --tb=native # END TESTING # ------------------------------------------------------------------------ From 5ad88428c01812492a52e81ce5d81472a9ffe042 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 11 Aug 2024 13:48:02 +0000 Subject: [PATCH 134/143] consider env var for build image --- bioconda_utils/cli.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/bioconda_utils/cli.py b/bioconda_utils/cli.py index 8f75948ce0f..0414d3548ab 100644 --- a/bioconda_utils/cli.py +++ b/bioconda_utils/cli.py @@ -476,7 +476,12 @@ def build(recipe_folder, config, packages="*", git_range=None, testonly=False, logger.warning(f"Using tag {image_tag} for docker image, since there is no image for a not yet release version ({VERSION}).") else: image_tag = VERSION - docker_base_image = docker_base_image or f"quay.io/bioconda/bioconda-utils-build-env-cos7:{image_tag}" + + docker_base_image = ( + docker_base_image or + os.getenv("BUILD_ENV_IMAGE", None) or + docker_base_image or f"quay.io/bioconda/bioconda-utils-build-env-cos7:{image_tag}" + ) logger.info(f"Using docker image {docker_base_image} for building.") docker_builder = docker_utils.RecipeBuilder( From 9195c9eca642f504b5da76650dcfd3e9e5509db7 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 11 Aug 2024 13:56:50 +0000 Subject: [PATCH 135/143] use utils, not recipes, for test --- .github/workflows/build-images.yml | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 4afd459a2ec..c79e171a4aa 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -175,15 +175,15 @@ jobs: with: fetch-depth: 0 - # Clone bioconda-recipes to use as part of the tests. - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - repository: bioconda/bioconda-recipes - path: recipes + # # Clone bioconda-recipes to use as part of the tests. + # - uses: actions/checkout@v4 + # with: + # fetch-depth: 0 + # repository: bioconda/bioconda-recipes + # path: recipes - - name: set path - run: echo "/opt/mambaforge/bin" >> $GITHUB_PATH + # - name: set path + # run: echo "/opt/mambaforge/bin" >> $GITHUB_PATH - name: Install bioconda-utils run: | @@ -225,7 +225,6 @@ jobs: # --mulled-test \ # --force - cd recipes py.test --durations=0 test/ -v --log-level=DEBUG -k "docker" --tb=native # END TESTING From f311612d41f530b69fa9220f42e3d6db5c925840 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 11 Aug 2024 14:21:27 +0000 Subject: [PATCH 136/143] rm mamba from images --- .github/workflows/build-images.yml | 12 +----------- bioconda_utils/bioconda_utils-requirements.txt | 1 - images/bioconda-utils-build-env-cos7/Dockerfile | 8 ++++---- images/create-env/Dockerfile | 5 ++--- images/create-env/Dockerfile.test | 3 --- images/create-env/install-conda | 9 ++++----- images/create-env/prepare.sh | 8 +------- 7 files changed, 12 insertions(+), 34 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index c79e171a4aa..a60e446a46f 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -175,23 +175,13 @@ jobs: with: fetch-depth: 0 - # # Clone bioconda-recipes to use as part of the tests. - # - uses: actions/checkout@v4 - # with: - # fetch-depth: 0 - # repository: bioconda/bioconda-recipes - # path: recipes - - # - name: set path - # run: echo "/opt/mambaforge/bin" >> $GITHUB_PATH - - name: Install bioconda-utils run: | export BIOCONDA_DISABLE_BUILD_PREP=1 wget https://raw.githubusercontent.com/bioconda/bioconda-common/master/{common,install-and-set-up-conda,configure-conda}.sh bash install-and-set-up-conda.sh eval "$(conda shell.bash hook)" - mamba create -n bioconda -y --file test-requirements.txt --file bioconda_utils/bioconda_utils-requirements.txt + conda create -n bioconda -y --file test-requirements.txt --file bioconda_utils/bioconda_utils-requirements.txt conda activate bioconda python setup.py install diff --git a/bioconda_utils/bioconda_utils-requirements.txt b/bioconda_utils/bioconda_utils-requirements.txt index 1fec6cc898d..e96b6cfd6b2 100644 --- a/bioconda_utils/bioconda_utils-requirements.txt +++ b/bioconda_utils/bioconda_utils-requirements.txt @@ -12,7 +12,6 @@ conda-libmamba-solver=24.1.* conda-build=24.5.* conda-index=0.4.* conda-package-streaming=0.9.* -mamba=1.5.* boa=0.17.* argh=0.31.* # CLI diff --git a/images/bioconda-utils-build-env-cos7/Dockerfile b/images/bioconda-utils-build-env-cos7/Dockerfile index 39291edfbc4..2e05410fcc4 100644 --- a/images/bioconda-utils-build-env-cos7/Dockerfile +++ b/images/bioconda-utils-build-env-cos7/Dockerfile @@ -50,14 +50,14 @@ RUN . /opt/conda/etc/profile.d/conda.sh && conda activate base && \ sed -nE \ '/^conda([> requirements.txt +RUN echo -e "$CONDA_VERSION" > requirements.txt RUN ./install-conda ./requirements.txt /opt/create-env ARG BUSYBOX_IMAGE diff --git a/images/create-env/Dockerfile.test b/images/create-env/Dockerfile.test index 5de59c76993..f04649005ef 100644 --- a/images/create-env/Dockerfile.test +++ b/images/create-env/Dockerfile.test @@ -5,7 +5,6 @@ RUN set -x && \ CONDA_PKGS_DIRS="/tmp/pkgs" \ /opt/create-env/env-execute \ create-env \ - --conda=mamba \ /usr/local \ file findutils grep RUN set -x && \ @@ -34,7 +33,6 @@ FROM "${base}" as build_bioconda_package RUN set -x && \ /opt/create-env/env-execute \ create-env \ - --conda=mamba \ --strip-files=\* \ /usr/local \ python @@ -58,7 +56,6 @@ FROM "${base}" as build_conda RUN set -x && \ /opt/create-env/env-execute \ create-env \ - --conda=mamba \ --env-activate-args='--prefix-is-base' \ --strip-files=\* \ --remove-paths=\*.a \ diff --git a/images/create-env/install-conda b/images/create-env/install-conda index a3b9b33272e..9bae392f935 100755 --- a/images/create-env/install-conda +++ b/images/create-env/install-conda @@ -29,16 +29,16 @@ miniconda_boostrap_prefix="$( pwd )/miniconda" # Only need `strip` executable from binutils. Other binaries from the package # and especially the "sysroot" dependency is only bloat for this container # image. (NOTE: The binary needs libgcc-ng which is explicitly added later.) - mamba create --yes \ + conda create --yes \ --prefix="${conda_install_prefix}" \ --channel=conda-forge \ binutils cp -aL "${conda_install_prefix}/bin/strip" ./strip conda run --prefix="${conda_install_prefix}" strip -- ./strip - mamba remove --yes --all \ + conda remove --yes --all \ --prefix="${conda_install_prefix}" - mamba create --yes \ + conda create --yes \ --prefix="${conda_install_prefix}" \ --channel=conda-forge \ \ @@ -116,8 +116,7 @@ conda config \ # This is intentional as it speeds up conda startup time. conda list --name=base conda info --all -mamba --version -# Make sure we have the requested conda, mamba versions installed. +# Make sure we have the requested conda version conda list \ --export '^(conda|mamba)$' \ | sed -n 's/=[^=]*$//p' \ diff --git a/images/create-env/prepare.sh b/images/create-env/prepare.sh index e9fb937e741..01411d31757 100644 --- a/images/create-env/prepare.sh +++ b/images/create-env/prepare.sh @@ -5,7 +5,7 @@ BUILD_ARGS=() -# Get the exact versions of mamba and conda that were installed in build-env. +# Get the exact versions of conda that wasinstalled in build-env. # # If this tag exists on quay.io (that is, this create-env is being built in # a subsequent run), then use that. Otherwise, we assume this tag has already @@ -21,16 +21,10 @@ CONDA_VERSION=$( podman run -t $REGISTRY/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_IMAGE_TAG} \ bash -c "/opt/conda/bin/conda list --export '^conda$'| sed -n 's/=[^=]*$//p'" ) -MAMBA_VERSION=$( - podman run -t $REGISTRY/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_IMAGE_TAG} \ - bash -c "/opt/conda/bin/conda list --export '^mamba$'| sed -n 's/=[^=]*$//p'" -) # Remove trailing \r with parameter expansion export CONDA_VERSION=${CONDA_VERSION%$'\r'} -export MAMBA_VERSION=${MAMBA_VERSION%$'\r'} BUILD_ARGS+=("--build-arg=CONDA_VERSION=$CONDA_VERSION") -BUILD_ARGS+=("--build-arg=MAMBA_VERSION=$MAMBA_VERSION") # Needs busybox image to copy some items over if [ $(tag_exists $BASE_BUSYBOX_IMAGE_NAME $BASE_TAG) ]; then From 39838597bd5e223e527f647701c80e023746eba7 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 11 Aug 2024 14:23:31 +0000 Subject: [PATCH 137/143] bump version to trigger container build --- images/versions.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/images/versions.sh b/images/versions.sh index a724dedaa60..ea99c0b00db 100644 --- a/images/versions.sh +++ b/images/versions.sh @@ -8,7 +8,7 @@ BASE_DEBIAN_IMAGE_NAME="tmp-debian" BASE_BUSYBOX_IMAGE_NAME="tmp-busybox" BUILD_ENV_IMAGE_NAME="tmp-build-env" CREATE_ENV_IMAGE_NAME="tmp-create-env" -BASE_TAG="0.1" +BASE_TAG="0.2" # Inspect this repo to get the currently-checked-out version, but if # BIOCONDA_UTILS_VERSION was set outside this script, use that instead. From 4a0b20db3836706c6976c81750d1fc8fc8306887 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 11 Aug 2024 15:18:23 +0000 Subject: [PATCH 138/143] be more clear about passing image name to mulled --- bioconda_utils/pkg_test.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/bioconda_utils/pkg_test.py b/bioconda_utils/pkg_test.py index 97cea7f38b6..8d78d9f5a18 100644 --- a/bioconda_utils/pkg_test.py +++ b/bioconda_utils/pkg_test.py @@ -18,7 +18,8 @@ logger = logging.getLogger(__name__) -MULLED_CONDA_IMAGE = "quay.io/bioconda/create-env:latest" +# Will be provided to mulled-build via "CONDA_IMAGE" env var. +CREATE_ENV_IMAGE = os.getenv("CREATE_ENV_IMAGE", "quay.io/bioconda/create-env:latest") def get_tests(path): @@ -98,7 +99,7 @@ def test_package( channels=("conda-forge", "local", "bioconda"), mulled_args="", base_image=None, - conda_image=MULLED_CONDA_IMAGE, + conda_image=CREATE_ENV_IMAGE, live_logs = True, ): """ @@ -179,7 +180,10 @@ def test_package( env = os.environ.copy() if base_image is not None: env["DEST_BASE_IMAGE"] = base_image - env["CONDA_IMAGE"] = conda_image + if os.getenv("CONDA_IMAGE", None): + raise ValueError("CONDA_IMAGE env var already exists!") + else: + env["CONDA_IMAGE"] = conda_image with tempfile.TemporaryDirectory() as d: with utils.Progress(): p = utils.run(cmd, env=env, cwd=d, mask=False, live=live_logs) From cf71d12a20702ed6dc4dd4602db9d8d5821a8f8f Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 11 Aug 2024 15:19:19 +0000 Subject: [PATCH 139/143] Revert "rm mamba from images" This reverts commit f311612d41f530b69fa9220f42e3d6db5c925840. --- .github/workflows/build-images.yml | 12 +++++++++++- bioconda_utils/bioconda_utils-requirements.txt | 1 + images/bioconda-utils-build-env-cos7/Dockerfile | 8 ++++---- images/create-env/Dockerfile | 5 +++-- images/create-env/Dockerfile.test | 3 +++ images/create-env/install-conda | 9 +++++---- images/create-env/prepare.sh | 8 +++++++- 7 files changed, 34 insertions(+), 12 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index a60e446a46f..c79e171a4aa 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -175,13 +175,23 @@ jobs: with: fetch-depth: 0 + # # Clone bioconda-recipes to use as part of the tests. + # - uses: actions/checkout@v4 + # with: + # fetch-depth: 0 + # repository: bioconda/bioconda-recipes + # path: recipes + + # - name: set path + # run: echo "/opt/mambaforge/bin" >> $GITHUB_PATH + - name: Install bioconda-utils run: | export BIOCONDA_DISABLE_BUILD_PREP=1 wget https://raw.githubusercontent.com/bioconda/bioconda-common/master/{common,install-and-set-up-conda,configure-conda}.sh bash install-and-set-up-conda.sh eval "$(conda shell.bash hook)" - conda create -n bioconda -y --file test-requirements.txt --file bioconda_utils/bioconda_utils-requirements.txt + mamba create -n bioconda -y --file test-requirements.txt --file bioconda_utils/bioconda_utils-requirements.txt conda activate bioconda python setup.py install diff --git a/bioconda_utils/bioconda_utils-requirements.txt b/bioconda_utils/bioconda_utils-requirements.txt index e96b6cfd6b2..1fec6cc898d 100644 --- a/bioconda_utils/bioconda_utils-requirements.txt +++ b/bioconda_utils/bioconda_utils-requirements.txt @@ -12,6 +12,7 @@ conda-libmamba-solver=24.1.* conda-build=24.5.* conda-index=0.4.* conda-package-streaming=0.9.* +mamba=1.5.* boa=0.17.* argh=0.31.* # CLI diff --git a/images/bioconda-utils-build-env-cos7/Dockerfile b/images/bioconda-utils-build-env-cos7/Dockerfile index 2e05410fcc4..39291edfbc4 100644 --- a/images/bioconda-utils-build-env-cos7/Dockerfile +++ b/images/bioconda-utils-build-env-cos7/Dockerfile @@ -50,14 +50,14 @@ RUN . /opt/conda/etc/profile.d/conda.sh && conda activate base && \ sed -nE \ '/^conda([> requirements.txt +ARG MAMBA_VERSION +RUN echo -e "$CONDA_VERSION\n$MAMBA_VERSION" > requirements.txt RUN ./install-conda ./requirements.txt /opt/create-env ARG BUSYBOX_IMAGE diff --git a/images/create-env/Dockerfile.test b/images/create-env/Dockerfile.test index f04649005ef..5de59c76993 100644 --- a/images/create-env/Dockerfile.test +++ b/images/create-env/Dockerfile.test @@ -5,6 +5,7 @@ RUN set -x && \ CONDA_PKGS_DIRS="/tmp/pkgs" \ /opt/create-env/env-execute \ create-env \ + --conda=mamba \ /usr/local \ file findutils grep RUN set -x && \ @@ -33,6 +34,7 @@ FROM "${base}" as build_bioconda_package RUN set -x && \ /opt/create-env/env-execute \ create-env \ + --conda=mamba \ --strip-files=\* \ /usr/local \ python @@ -56,6 +58,7 @@ FROM "${base}" as build_conda RUN set -x && \ /opt/create-env/env-execute \ create-env \ + --conda=mamba \ --env-activate-args='--prefix-is-base' \ --strip-files=\* \ --remove-paths=\*.a \ diff --git a/images/create-env/install-conda b/images/create-env/install-conda index 9bae392f935..a3b9b33272e 100755 --- a/images/create-env/install-conda +++ b/images/create-env/install-conda @@ -29,16 +29,16 @@ miniconda_boostrap_prefix="$( pwd )/miniconda" # Only need `strip` executable from binutils. Other binaries from the package # and especially the "sysroot" dependency is only bloat for this container # image. (NOTE: The binary needs libgcc-ng which is explicitly added later.) - conda create --yes \ + mamba create --yes \ --prefix="${conda_install_prefix}" \ --channel=conda-forge \ binutils cp -aL "${conda_install_prefix}/bin/strip" ./strip conda run --prefix="${conda_install_prefix}" strip -- ./strip - conda remove --yes --all \ + mamba remove --yes --all \ --prefix="${conda_install_prefix}" - conda create --yes \ + mamba create --yes \ --prefix="${conda_install_prefix}" \ --channel=conda-forge \ \ @@ -116,7 +116,8 @@ conda config \ # This is intentional as it speeds up conda startup time. conda list --name=base conda info --all -# Make sure we have the requested conda version +mamba --version +# Make sure we have the requested conda, mamba versions installed. conda list \ --export '^(conda|mamba)$' \ | sed -n 's/=[^=]*$//p' \ diff --git a/images/create-env/prepare.sh b/images/create-env/prepare.sh index 01411d31757..e9fb937e741 100644 --- a/images/create-env/prepare.sh +++ b/images/create-env/prepare.sh @@ -5,7 +5,7 @@ BUILD_ARGS=() -# Get the exact versions of conda that wasinstalled in build-env. +# Get the exact versions of mamba and conda that were installed in build-env. # # If this tag exists on quay.io (that is, this create-env is being built in # a subsequent run), then use that. Otherwise, we assume this tag has already @@ -21,10 +21,16 @@ CONDA_VERSION=$( podman run -t $REGISTRY/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_IMAGE_TAG} \ bash -c "/opt/conda/bin/conda list --export '^conda$'| sed -n 's/=[^=]*$//p'" ) +MAMBA_VERSION=$( + podman run -t $REGISTRY/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_IMAGE_TAG} \ + bash -c "/opt/conda/bin/conda list --export '^mamba$'| sed -n 's/=[^=]*$//p'" +) # Remove trailing \r with parameter expansion export CONDA_VERSION=${CONDA_VERSION%$'\r'} +export MAMBA_VERSION=${MAMBA_VERSION%$'\r'} BUILD_ARGS+=("--build-arg=CONDA_VERSION=$CONDA_VERSION") +BUILD_ARGS+=("--build-arg=MAMBA_VERSION=$MAMBA_VERSION") # Needs busybox image to copy some items over if [ $(tag_exists $BASE_BUSYBOX_IMAGE_NAME $BASE_TAG) ]; then From 46cf3211683779aabf5753b7f25819dc5470f1a8 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Sun, 11 Aug 2024 15:25:32 +0000 Subject: [PATCH 140/143] try reverting truststore removal --- images/bioconda-utils-build-env-cos7/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/images/bioconda-utils-build-env-cos7/Dockerfile b/images/bioconda-utils-build-env-cos7/Dockerfile index 39291edfbc4..9b23b16a790 100644 --- a/images/bioconda-utils-build-env-cos7/Dockerfile +++ b/images/bioconda-utils-build-env-cos7/Dockerfile @@ -54,7 +54,7 @@ RUN . /opt/conda/etc/profile.d/conda.sh && conda activate base && \ # FIXME: "remove truststore" only necessary due to python downgrade. # Updating requirements should fix that. # (Also this removal will break in future.) - mamba remove --yes truststore && \ + # mamba remove --yes truststore && \ mamba install --yes --file /opt/bioconda-utils/bioconda_utils-requirements.txt && \ pip install --no-deps --find-links /opt/bioconda-utils bioconda_utils && \ mamba clean --yes --index --tarballs && \ From c946860318d6be3b45d59361c237682b72598758 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Mon, 12 Aug 2024 19:02:08 +0000 Subject: [PATCH 141/143] another attempt at removing mamba --- .github/workflows/build-images.yml | 2 +- images/bioconda-utils-build-env-cos7/Dockerfile | 8 ++++---- images/create-env/Dockerfile | 3 +-- images/create-env/Dockerfile.test | 3 --- images/create-env/README.md | 7 ++----- images/create-env/install-conda | 11 +++++------ images/create-env/prepare.sh | 6 ------ 7 files changed, 13 insertions(+), 27 deletions(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index c79e171a4aa..0796c98d1d8 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -191,7 +191,7 @@ jobs: wget https://raw.githubusercontent.com/bioconda/bioconda-common/master/{common,install-and-set-up-conda,configure-conda}.sh bash install-and-set-up-conda.sh eval "$(conda shell.bash hook)" - mamba create -n bioconda -y --file test-requirements.txt --file bioconda_utils/bioconda_utils-requirements.txt + conda create -n bioconda -y --file test-requirements.txt --file bioconda_utils/bioconda_utils-requirements.txt conda activate bioconda python setup.py install diff --git a/images/bioconda-utils-build-env-cos7/Dockerfile b/images/bioconda-utils-build-env-cos7/Dockerfile index 9b23b16a790..bae6f61430f 100644 --- a/images/bioconda-utils-build-env-cos7/Dockerfile +++ b/images/bioconda-utils-build-env-cos7/Dockerfile @@ -50,14 +50,14 @@ RUN . /opt/conda/etc/profile.d/conda.sh && conda activate base && \ sed -nE \ '/^conda([> requirements.txt +RUN echo -e "$CONDA_VERSION" > requirements.txt RUN ./install-conda ./requirements.txt /opt/create-env ARG BUSYBOX_IMAGE diff --git a/images/create-env/Dockerfile.test b/images/create-env/Dockerfile.test index 5de59c76993..f04649005ef 100644 --- a/images/create-env/Dockerfile.test +++ b/images/create-env/Dockerfile.test @@ -5,7 +5,6 @@ RUN set -x && \ CONDA_PKGS_DIRS="/tmp/pkgs" \ /opt/create-env/env-execute \ create-env \ - --conda=mamba \ /usr/local \ file findutils grep RUN set -x && \ @@ -34,7 +33,6 @@ FROM "${base}" as build_bioconda_package RUN set -x && \ /opt/create-env/env-execute \ create-env \ - --conda=mamba \ --strip-files=\* \ /usr/local \ python @@ -58,7 +56,6 @@ FROM "${base}" as build_conda RUN set -x && \ /opt/create-env/env-execute \ create-env \ - --conda=mamba \ --env-activate-args='--prefix-is-base' \ --strip-files=\* \ --remove-paths=\*.a \ diff --git a/images/create-env/README.md b/images/create-env/README.md index ca9a7ed9a47..cc84f3b4765 100644 --- a/images/create-env/README.md +++ b/images/create-env/README.md @@ -1,6 +1,6 @@ # bioconda/create-env -The `create-env` container image, available as [`quay.io/bioconda/create-env`](https://quay.io/repository/bioconda/create-env?tab=tags), provides [`conda`](https://github.com/conda/conda/) (and [`mamba`](https://github.com/mamba-org/mamba)) alongside a convenience wrapper `create-env` to create small container images based on Conda packages. +The `create-env` container image, available as [`quay.io/bioconda/create-env`](https://quay.io/repository/bioconda/create-env?tab=tags), provides [`conda`](https://github.com/conda/conda/) alongside a convenience wrapper `create-env` to create small container images based on Conda packages. ## Options @@ -37,13 +37,12 @@ Post-processing steps are triggered by arguments to `create-env`: ## Usage example: ```Dockerfile FROM quay.io/bioconda/create-env:2.1.0 as build -# Create an environment containing python=3.9 at /usr/local using mamba, strip +# Create an environment containing python=3.9 at /usr/local, strip # files and remove some less important files: RUN export CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY=0 \ && \ /opt/create-env/env-execute \ create-env \ - --conda=mamba \ --strip-files='bin/*' \ --strip-files='lib/*' \ --remove-paths='*.a' \ @@ -87,8 +86,6 @@ RUN /usr/local/env-activate.sh && python -c 'import sys; print(sys.version)' ```sh . /opt/create-env/env-activate.sh export CONDA_ALWAYS_COPY=0 - create-env --conda=mamba /opt/python-3.8 python=3.8 - create-env --conda=mamba /opt/python-3.9 python=3.9 create-env --conda=: --strip-files=\* /opt/python-3.8 create-env --conda=: --strip-files=\* /opt/python-3.9 ``` diff --git a/images/create-env/install-conda b/images/create-env/install-conda index a3b9b33272e..7ce597bc323 100755 --- a/images/create-env/install-conda +++ b/images/create-env/install-conda @@ -22,23 +22,23 @@ miniconda_boostrap_prefix="$( pwd )/miniconda" # Install the base Conda installation. . "${miniconda_boostrap_prefix}/etc/profile.d/conda.sh" - # Install conda, mamba and some additional tools: + # Install conda and some additional tools: # - tini: init program, # - binutils, findutils: tools to strip down image/environment size, # Only need `strip` executable from binutils. Other binaries from the package # and especially the "sysroot" dependency is only bloat for this container # image. (NOTE: The binary needs libgcc-ng which is explicitly added later.) - mamba create --yes \ + conda create --yes \ --prefix="${conda_install_prefix}" \ --channel=conda-forge \ binutils cp -aL "${conda_install_prefix}/bin/strip" ./strip conda run --prefix="${conda_install_prefix}" strip -- ./strip - mamba remove --yes --all \ + conda remove --yes --all \ --prefix="${conda_install_prefix}" - mamba create --yes \ + conda create --yes \ --prefix="${conda_install_prefix}" \ --channel=conda-forge \ \ @@ -116,8 +116,7 @@ conda config \ # This is intentional as it speeds up conda startup time. conda list --name=base conda info --all -mamba --version -# Make sure we have the requested conda, mamba versions installed. +# Make sure we have the requested conda version installed, and no mamba conda list \ --export '^(conda|mamba)$' \ | sed -n 's/=[^=]*$//p' \ diff --git a/images/create-env/prepare.sh b/images/create-env/prepare.sh index e9fb937e741..d98ad01b4fd 100644 --- a/images/create-env/prepare.sh +++ b/images/create-env/prepare.sh @@ -21,16 +21,10 @@ CONDA_VERSION=$( podman run -t $REGISTRY/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_IMAGE_TAG} \ bash -c "/opt/conda/bin/conda list --export '^conda$'| sed -n 's/=[^=]*$//p'" ) -MAMBA_VERSION=$( - podman run -t $REGISTRY/${BUILD_ENV_IMAGE_NAME}:${BIOCONDA_IMAGE_TAG} \ - bash -c "/opt/conda/bin/conda list --export '^mamba$'| sed -n 's/=[^=]*$//p'" -) # Remove trailing \r with parameter expansion export CONDA_VERSION=${CONDA_VERSION%$'\r'} -export MAMBA_VERSION=${MAMBA_VERSION%$'\r'} BUILD_ARGS+=("--build-arg=CONDA_VERSION=$CONDA_VERSION") -BUILD_ARGS+=("--build-arg=MAMBA_VERSION=$MAMBA_VERSION") # Needs busybox image to copy some items over if [ $(tag_exists $BASE_BUSYBOX_IMAGE_NAME $BASE_TAG) ]; then From 9b4c928e1a3c4f1457a5d3c3f5ac3eece1820e40 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Mon, 12 Aug 2024 19:53:55 +0000 Subject: [PATCH 142/143] use new name for image sent to mulled --- bioconda_utils/build.py | 4 ++-- bioconda_utils/cli.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bioconda_utils/build.py b/bioconda_utils/build.py index 9ba35d7495b..358db668f33 100644 --- a/bioconda_utils/build.py +++ b/bioconda_utils/build.py @@ -56,7 +56,7 @@ def build(recipe: str, pkg_paths: List[str] = None, docker_builder: docker_utils.RecipeBuilder = None, raise_error: bool = False, linter=None, - mulled_conda_image: str = pkg_test.MULLED_CONDA_IMAGE, + mulled_conda_image: str = pkg_test.CREATE_ENV_IMAGE, record_build_failure: bool = False, dag: Optional[nx.DiGraph] = None, skiplist_leafs: bool = False, @@ -317,7 +317,7 @@ def build_recipes(recipe_folder: str, config_path: str, recipes: List[str], n_workers: int = 1, worker_offset: int = 0, keep_old_work: bool = False, - mulled_conda_image: str = pkg_test.MULLED_CONDA_IMAGE, + mulled_conda_image: str = pkg_test.CREATE_ENV_IMAGE, record_build_failures: bool = False, skiplist_leafs: bool = False, live_logs: bool = True, diff --git a/bioconda_utils/cli.py b/bioconda_utils/cli.py index 0414d3548ab..eaed2c40f8b 100644 --- a/bioconda_utils/cli.py +++ b/bioconda_utils/cli.py @@ -445,7 +445,7 @@ def build(recipe_folder, config, packages="*", git_range=None, testonly=False, pkg_dir=None, anaconda_upload=False, mulled_upload_target=None, build_image=False, keep_image=False, lint=False, lint_exclude=None, check_channels=None, n_workers=1, worker_offset=0, keep_old_work=False, - mulled_conda_image=pkg_test.MULLED_CONDA_IMAGE, + mulled_conda_image=pkg_test.CREATE_ENV_IMAGE, docker_base_image=None, record_build_failures=False, skiplist_leafs=False, From af16e9a6ee1ea2e49e8c1775691ea0dab9e61437 Mon Sep 17 00:00:00 2001 From: Ryan Dale Date: Tue, 13 Aug 2024 08:44:14 -0400 Subject: [PATCH 143/143] use matched branch on bioconda-common need to be able to override common.sh so that bioconda-utils in host matches that in just-built container --- .github/workflows/build-images.yml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index 0796c98d1d8..bcd3586e7f2 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -188,7 +188,13 @@ jobs: - name: Install bioconda-utils run: | export BIOCONDA_DISABLE_BUILD_PREP=1 - wget https://raw.githubusercontent.com/bioconda/bioconda-common/master/{common,install-and-set-up-conda,configure-conda}.sh + BRANCH=simplify-unify-containers + wget https://raw.githubusercontent.com/bioconda/bioconda-common/${BRANCH}/{common,install-and-set-up-conda,configure-conda}.sh + source images/versions.sh + + # Ensure install-and-set-up-conda uses same version as in the container + # (which uses images/versions.sh + export BIOCONDA_UTILS_TAG=$BIOCONDA_UTILS_VERSION bash install-and-set-up-conda.sh eval "$(conda shell.bash hook)" conda create -n bioconda -y --file test-requirements.txt --file bioconda_utils/bioconda_utils-requirements.txt