Skip to content

Commit

Permalink
Merge pull request #2602 from ggiguash/optimize_ci_caching
Browse files Browse the repository at this point in the history
USHIFT-1885: Reorganize test image blueprints to maximally build in layer1
  • Loading branch information
openshift-merge-bot[bot] authored Nov 14, 2023
2 parents 0b46b9e + b0283da commit 428c95d
Show file tree
Hide file tree
Showing 9 changed files with 85 additions and 35 deletions.
26 changes: 22 additions & 4 deletions test/bin/build_images.sh
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,14 @@ get_image_parent() {
base=$(basename "${blueprint_filename}" .toml)
if [[ "${base}" =~ '-' ]]; then
base="${base//-*/}"
get_blueprint_name "${IMAGEDIR}/blueprints/${base}.toml"

local name
name=$(find "${TESTDIR}/image-blueprints" -name "${base}.toml")
if [ -n "${name}" ] ; then
get_blueprint_name "${name}"
else
echo ""
fi
else
echo ""
fi
Expand Down Expand Up @@ -251,6 +258,11 @@ do_group() {
record_junit "${groupdir}" "${blueprint}" "depsolve" "FAILED"
fi

if ${COMPOSER_DRY_RUN} ; then
echo "Skipping the composer start operation"
continue
fi

parent_args=""
parent=$(get_image_parent "${template}")
if [ -n "${parent}" ]; then
Expand All @@ -271,7 +283,7 @@ do_group() {
buildid_list="${buildid_list} ${buildid}"
done

if ${BUILD_INSTALLER}; then
if ${BUILD_INSTALLER} && ! ${COMPOSER_DRY_RUN}; then
for image_installer in "${groupdir}"/*.image-installer; do
blueprint=$("${GOMPLATE}" --file "${image_installer}")
echo "Building image-installer from ${blueprint}"
Expand Down Expand Up @@ -382,7 +394,7 @@ usage() {
fi

cat - <<EOF
build_images.sh [-iIs] [-l layer-dir | -g group-dir] [-t template]
build_images.sh [-iIsd] [-l layer-dir | -g group-dir] [-t template]
-h Show this help
Expand All @@ -392,6 +404,8 @@ build_images.sh [-iIs] [-l layer-dir | -g group-dir] [-t template]
-s Only build source images (implies -I).
-d Dry run by skipping the composer start commands.
-l DIR Build only one layer (cannot be used with -g or -t).
The DIR should be the path to the layer to build.
Expand All @@ -408,12 +422,13 @@ EOF

BUILD_INSTALLER=true
ONLY_SOURCE=false
COMPOSER_DRY_RUN=false
LAYER=""
GROUP=""
TEMPLATE=""

selCount=0
while getopts "iIl:g:st:h" opt; do
while getopts "iIl:g:sdt:h" opt; do
case "${opt}" in
h)
usage
Expand All @@ -429,6 +444,9 @@ while getopts "iIl:g:st:h" opt; do
BUILD_INSTALLER=false
ONLY_SOURCE=true
;;
d)
COMPOSER_DRY_RUN=true
;;
l)
LAYER="$(realpath "${OPTARG}")"
selCount=$((selCount+1))
Expand Down
41 changes: 21 additions & 20 deletions test/bin/ci_phase_iso_build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -44,13 +44,13 @@ download_build_cache() {
return 1
}

# Run image build for installer images only and update the cache:
# Run image build for the 'base' layer and update the cache:
# - Upload build artifacts
# - Update 'last' to point to the current build tag
# - Clean up older images, preserving the 'last' and the previous build tag
update_build_cache() {
# Build the images to be cached
$(dry_run) bash -x ./bin/build_images.sh -i -l ./image-blueprints/layer1-base
# Build the base layer to be cached
$(dry_run) bash -x ./bin/build_images.sh -l ./image-blueprints/layer1-base

# Upload the images and update the 'last' setting
./bin/manage_build_cache.sh upload -b "${SCENARIO_BUILD_BRANCH}" -t "${SCENARIO_BUILD_TAG}"
Expand All @@ -61,31 +61,32 @@ update_build_cache() {
./bin/manage_build_cache.sh keep -b "${SCENARIO_BUILD_BRANCH}" -t "${SCENARIO_BUILD_TAG_PREV}"
}

# Run image build, skipping the installer images if instructed.
# If 'with_cached_data' argument is 'true', do not build installer images.
# Run image build, potentially skipping the 'base' and 'periodic' layers in CI builds.
# Full builds are run if the 'CI_JOB_NAME' environment variable is not set.
#
# When the 'CI_JOB_NAME' environment variable is set:
# - If the 'with_cached_data' argument is 'true', only dry run the 'base' layer.
# - Always build the 'presubmit' layer.
# - Only build the 'periodic' layer when 'CI_JOB_NAME' contains 'periodic' token.
run_image_build() {
local -r with_cached_data=$1
local build_opts

build_opts=""
if ${with_cached_data} ; then
# Skip installer image builds as they were downloaded from cache
build_opts="-I"
fi

# Build the images
# Image build can be optimized in CI based on the job type
# - Always build 'base' and 'presubmit' layers
# - Only build 'periodic' layer in periodic jobs
if [ -v CI_JOB_NAME ] ; then
$(dry_run) bash -x ./bin/build_images.sh ${build_opts} -l ./image-blueprints/layer1-base
$(dry_run) bash -x ./bin/build_images.sh ${build_opts} -l ./image-blueprints/layer2-presubmit
# Conditional per-layer builds when running in CI
if ${with_cached_data} ; then
$(dry_run) bash -x ./bin/build_images.sh -d -l ./image-blueprints/layer1-base
else
$(dry_run) bash -x ./bin/build_images.sh -l ./image-blueprints/layer1-base
fi

$(dry_run) bash -x ./bin/build_images.sh -l ./image-blueprints/layer2-presubmit

if [[ "${CI_JOB_NAME}" =~ .*periodic.* ]]; then
$(dry_run) bash -x ./bin/build_images.sh ${build_opts} -l ./image-blueprints/layer3-periodic
$(dry_run) bash -x ./bin/build_images.sh -l ./image-blueprints/layer3-periodic
fi
else
# Fall back to full build when not running in CI
$(dry_run) bash -x ./bin/build_images.sh ${build_opts}
$(dry_run) bash -x ./bin/build_images.sh
fi
}

Expand Down
53 changes: 42 additions & 11 deletions test/bin/manage_build_cache.sh
Original file line number Diff line number Diff line change
Expand Up @@ -57,23 +57,54 @@ EOF
}

action_upload() {
local -r src_dir="${IMAGEDIR}/${VM_POOL_BASENAME}"
local -r dst_dir="s3://${AWS_BUCKET_NAME}/${BCH_SUBDIR}/${UNAME_M}/${TAG_SUBDIR}/${VM_POOL_BASENAME}"
local -r src_size=$(du -csh "${IMAGEDIR}/${VM_POOL_BASENAME}" | awk 'END{print $1}')
local -r src_base="${IMAGEDIR}"
local -r dst_base="s3://${AWS_BUCKET_NAME}/${BCH_SUBDIR}/${UNAME_M}/${TAG_SUBDIR}"

echo "Uploading ${src_size} of data to '${dst_dir}'"
"${AWSCLI}" s3 sync --quiet --include '*.iso' "${src_dir}" "${dst_dir}"
# Upload ISO images
local -r iso_base="${src_base}/${VM_POOL_BASENAME}"
local -r iso_size="$(du -csh "${iso_base}" | awk 'END{print $1}')"
local -r iso_dest="${dst_base}/${VM_POOL_BASENAME}"

echo "Uploading ${iso_size} of ISO images to '${iso_dest}'"
"${AWSCLI}" s3 sync --quiet --include '*.iso' "${iso_base}" "${iso_dest}"

# Upload ostree commits
local -r repo_base="${src_base}/repo"
local -r repo_size="$(du -csh "${repo_base}" | awk 'END{print $1}')"
local -r repo_dest="${dst_base}/repo"

# Create dummy files in empty directories
find "${repo_base}" -type d -empty | while IFS= read -r dir; do
touch "${dir}/.s3-sync-empty-dir"
done

echo "Uploading ${repo_size} of ostree commits to '${repo_dest}'"
"${AWSCLI}" s3 sync --quiet "${repo_base}" "${repo_dest}"
}

action_download() {
local -r src_dir="s3://${AWS_BUCKET_NAME}/${BCH_SUBDIR}/${UNAME_M}/${TAG_SUBDIR}/${VM_POOL_BASENAME}"
local -r dst_dir="${IMAGEDIR}/${VM_POOL_BASENAME}"
local -r src_base="s3://${AWS_BUCKET_NAME}/${BCH_SUBDIR}/${UNAME_M}/${TAG_SUBDIR}"
local -r dst_base="${IMAGEDIR}"

# Download ISO images
local -r iso_base="${src_base}/${VM_POOL_BASENAME}"
local -r iso_dest="${dst_base}/${VM_POOL_BASENAME}"

echo "Downloading ISO images from '${iso_base}'"
"${AWSCLI}" s3 sync --quiet --include '*.iso' "${iso_base}" "${iso_dest}"

local -r iso_size="$(du -csh "${iso_dest}" | awk 'END{print $1}')"
echo "Downloaded ${iso_size} of ISO images"

# Download ostree commits
local -r repo_base="${src_base}/repo"
local -r repo_dest="${dst_base}/repo"

echo "Downloading data from '${src_dir}'"
"${AWSCLI}" s3 sync --quiet --include '*.iso' "${src_dir}" "${dst_dir}"
echo "Downloading ostree commits from '${repo_base}'"
"${AWSCLI}" s3 sync --quiet "${repo_base}" "${repo_dest}"

local -r dst_size=$(du -csh "${IMAGEDIR}/${VM_POOL_BASENAME}" | awk 'END{print $1}')
echo "Downloaded ${dst_size} data"
local -r repo_size="$(du -csh "${repo_dest}" | awk 'END{print $1}')"
echo "Downloaded ${repo_size} of ostree commits"
}

action_verify() {
Expand Down

0 comments on commit 428c95d

Please sign in to comment.