Skip to content

a fix for cached to avoid clashing of cached files using different ve… #204

a fix for cached to avoid clashing of cached files using different ve…

a fix for cached to avoid clashing of cached files using different ve… #204

Workflow file for this run

# =================================================================================
# This file is part of pipeVFX.
#
# pipeVFX is a software system initally authored back in 2006 and currently
# developed by Roberto Hradec - https://bitbucket.org/robertohradec/pipevfx
#
# pipeVFX is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pipeVFX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pipeVFX. If not, see <http://www.gnu.org/licenses/>.
# =================================================================================
# boost (2h ws30)
# cmake (1:30h ws30)
# pyilmbase (1:46 ws30)
name: CI
on:
workflow_dispatch:
branches:
- '*'
push:
branches:
- '*'
pull_request:
branches:
- '*'
release:
types: [published]
env:
DISPLAY: ":99.0"
TRAVIS: "1" # this triggers -t instead of -ti in docker run
ARNOLD_LICENSE_ORDER: none # Don't waste time looking for a license that doesn't exist
BUILD_DIR: "pipeline/build/"
CACHE_DIR: "pipeline/libs/"
STUDIO: "pipevfx"
GITHUB_ACCESS_TOKEN: ${{ secrets.GITHUB_TOKEN }}
TIMEOUT_HOURS: 7
# concurrency:
# group: ${{ github.ref_name }}
# cancel-in-progress: true
jobs:
# ================================================================================================================
# This initial job just runs scons to define how many phases we have, so we can create the same amount of
# matrix elements.
# we do this since there's a 6 hour limit for each job in github, but there's a total of 72 hours limit
# for the whole workflow. Spliting the build in phases allows us to use the full 72 hours!
# ================================================================================================================
matrix:
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
matrix_cleanup: ${{ steps.set-matrix.outputs.matrix_cleanup }}
steps:
- uses: actions/checkout@v2
with:
lfs: false
submodules: false
- uses: actions/setup-python@v2
with:
python-version: '2.x' # Version range or exact version of a Python version to use, using SemVer's version range syntax
architecture: 'x64' # optional x64 or x86. Defaults to x64 if not specified - name: 'Install Scons'
- name: 'Install Scons'
run: python -m pip install scons
- id: set-matrix
run: |
make matrix | tee output.txt
matrix=$(cat output.txt | grep -v Error | grep name)
matrix_cleanup=$(echo "$matrix" | sed 's/,..all.//')
echo $matrix
echo $matrix | jq .
echo "::set-output name=matrix::$matrix"
echo "::set-output name=matrix_cleanup::$matrix_cleanup"
check-matrix:
runs-on: ubuntu-latest
needs: matrix
steps:
- name: Install json2yaml
run: |
sudo npm install -g json2yaml
- name: Check matrix definition
run: |
matrix='${{ needs.matrix.outputs.matrix }}'
matrix_cleanup='${{ needs.matrix.outputs.matrix_cleanup }}'
echo $matrix
echo $matrix_cleanup
echo $matrix | jq .
echo $matrix_cleanup | jq .
echo $matrix | json2yaml
echo $matrix_cleanup | json2yaml
# ================================================================================================================
# the actual build!!
# ================================================================================================================
build:
needs: matrix
strategy:
# cancel other jobs in the build matrix if one job fails.
fail-fast: true
max-parallel: 1
matrix: ${{fromJson(needs.matrix.outputs.matrix)}}
# runs-on: [self-hosted, linux]
runs-on: ubuntu-latest
outputs:
matrix_cleanup: ${{ steps.set-matrix.outputs.matrix_cleanup }}
# env:
# DO_BUILD: '0'
steps:
# - name: 'self-hosted: Cleanup build folder'
# run: |
# ls -la ./
# rm -rf ./* || true
# rm -rf ./.??* || true
# ls -la ./
# apt update
# apt upgrade -y
- name: calcule and store next phase name
run: |
if [ "${{ matrix.name }}" == "all" ] ; then
export NEXT_MATRIX_NAME=""
else
export NEXT=$(expr $(echo ${{ matrix.name }} | sed 's/phase//') + 1)
export MATRIX='${{ needs.matrix.outputs.matrix }}'
export NEXT_MATRIX_NAME=$(echo $MATRIX | sed -e 's/[{}",:[]//g' -e 's/]//g' -e 's/name//g' | awk '{print $('$NEXT')}')
[ "$NEXT_MATRIX_NAME" == "" ] && export NEXT_MATRIX_NAME="all"
export NEXT_MATRIX_NAME="$NEXT_MATRIX_NAME"
fi
echo $NEXT_MATRIX_NAME ;\
echo "NEXT_MATRIX_NAME=$NEXT_MATRIX_NAME" >> $GITHUB_ENV
# also, update TIMEOUT_MINUTES since we need it in minutes!
let mins=$TIMEOUT_HOURS*60
echo "TIMEOUT_MINUTES=$mins" >> $GITHUB_ENV
# - name: try to download artifact from this phase - if works, skip this phase build (retrying the build)
# continue-on-error: true
# uses: actions/download-artifact@v2
# with:
# name: ${{ env.STUDIO }}-${{ github.ref_name }}-${{ env.NEXT_MATRIX_NAME }}-${{ github.sha }}.tar.gz
#
# - name: check if we need to run this build phase, or else skip to the next!
# if: always()
# run: |
# if [ -e ${{ env.STUDIO }}-${{ github.ref_name }}-$NEXT_MATRIX_NAME-${{ github.sha }}.tar.gz ] ; then
# rm -rf ${{ env.STUDIO }}-${{ github.ref_name }}-$NEXT_MATRIX_NAME-${{ github.sha }}.tar.gz
# echo "DO_BUILD='0'" >> $GITHUB_ENV
# echo "DO_DOWNLOAD_ARTIFACT='0'" >> $GITHUB_ENV
# else
# echo "DO_BUILD='1'" >> $GITHUB_ENV
# if [ "${{ matrix.name }}" == 'phase1' ] ; then
# echo "DO_DOWNLOAD_ARTIFACT='0'" >> $GITHUB_ENV
# else
# echo "DO_DOWNLOAD_ARTIFACT='1'" >> $GITHUB_ENV
# fi
# fi
- uses: actions/checkout@v2
if: ${{ env.DO_BUILD }} == '1'
with:
lfs: false
submodules: false
- uses: actions/cache@v1
if: ${{ env.DO_BUILD }} == '1'
with:
path: ./apps
key: |
${{ env.STUDIO }}-${{ github.ref_name }}-${{ github.sha }}
restore-keys: |
${{ env.STUDIO }}-${{ github.ref_name }}-${{ github.sha }}
# rm -rf /usr/local/lib/android # will release about 10 GB if you don't need Android
# rm -rf /usr/share/dotnet ex# will release about 20GB if you don't need .NET
- name: free up disk space removing things we don't need for this build
if: ${{ env.DO_BUILD }} == '1'
run: |
df -h
# python3 -c "print('='*80)"
# du -shc /opt/hostedtoolcache/* | sort -h
# python3 -c "print('='*80)"
sudo rm -rf /usr/local/lib/android* &
sudo rm -rf /usr/local/lib/node_modules* &
sudo rm -rf /usr/local/lib/heroku* &
sudo rm -rf /usr/share/swift* &
sudo rm -rf /usr/share/dotnet* &
sudo rm -rf /usr/share/miniconda* &
sudo rm -rf /opt/hostedtoolcache* &
sudo rm -rf /opt/az* &
sudo rm -rf /opt/microsoft* &
sudo rm -rf /opt/google* &
wait
# python3 -c "print('='*80)"
# du -shc /usr/share/* | sort -h
# python3 -c "print('='*80)"
# du -shc /usr/local/lib/* | sort -h
# python3 -c "print('='*80)"
# du -shc /opt/* | sort -h
# python3 -c "print('='*80)"
# du -shc /mnt/* | sort -h
python3 -c "print('='*80)"
df -h
python3 -c "print('='*80)"
cat /proc/meminfo | grep Mem
# ================================================================================================================
# restore pipeline/libs from previous run
# we use artifacts to pass on the built packages from the previous job to the next!
# this allows us to continue building the libs folder during the whole workflow.
# ================================================================================================================
- name: download artifact from previous run 1/2
uses: actions/download-artifact@v2
# if: ${{ env.DO_DOWNLOAD_ARTIFACT }} == '1'
if: matrix.name != 'phase1'
continue-on-error: true
with:
name: ${{ env.STUDIO }}-${{ github.ref_name }}-${{ matrix.name }}-${{ github.sha }}.tar.gz00
# name: ${{ env.STUDIO }}-${{ github.ref_name }}-${{ github.sha }}.tar.gz
- name: download artifact from previous run 2/2
uses: actions/download-artifact@v2
# if: ${{ env.DO_DOWNLOAD_ARTIFACT }} == '1'
if: matrix.name != 'phase1'
continue-on-error: true
with:
name: ${{ env.STUDIO }}-${{ github.ref_name }}-${{ matrix.name }}-${{ github.sha }}.tar.gz01
# name: ${{ env.STUDIO }}-${{ github.ref_name }}-${{ github.sha }}.tar.gz
- name: extract pipeline/libs from the previous uploaded artifact so we can continue building
# if: ${{ env.DO_DOWNLOAD_ARTIFACT }} == '1'
if: matrix.name != 'phase1'
uses: nick-fields/retry@v2
with:
timeout_minutes: ${{ env.TIMEOUT_MINUTES }}
max_attempts: 3
command: |
if [ "${{ matrix.name }}" != 'phase1' ] ; then
ls -lh --color=yes .
cat /proc/meminfo | grep Mem
df -h
if [ $(du -k ${{ env.STUDIO }}-${{ github.ref_name }}-${{ matrix.name }}-${{ github.sha }}.tar.gz00 | cut -f1) -eq 0 ] ; then
echo "artifact has zero size!"
exit -1
fi
cat ${{ env.STUDIO }}-${{ github.ref_name }}-${{ matrix.name }}-${{ github.sha }}.tar.gz* | tar xzf -
rm -rf *tar.gz*
ls -lh --color=yes pipeline/libs/linux/x86_64/pipevfx.5.0.0/
df -h
fi
# ================================================================================================================
# run scons and do the actual building
# ================================================================================================================
- name: build PipeVFX ${{ matrix.name }}
# if: ${{ env.DO_BUILD }} == '1'
uses: nick-fields/retry@v2
with:
timeout_minutes: ${{ env.TIMEOUT_MINUTES }}
max_attempts: 3
command: |
mkdir -p pipeline/libs/linux/x86_64/pipevfx.5.0.0/ ; \
ls -l pipeline/libs/linux/x86_64/pipevfx.5.0.0/ ; \
nice -n 19 make build DOCKER=0 PKG=${{ matrix.name }}
# ================================================================================================================
# upload pipeline/libs to an artifact with the next phase name, so we can restore it on the next phase
# ================================================================================================================
# after building, we save the pipeline/libs folder as an artifact with the
# name of the next phase, so the next phase can download and extract!
- name: save out pipeline/libs/ folder as artifacts
# if: ${{ env.DO_BUILD }} == '1'
run: |
df -h
sudo rm -rf pipeline/build/.build
df -h
sudo tar -czf - ${{ env.CACHE_DIR }}/ | split --bytes=5GB -d - ${{ env.STUDIO }}-${{ github.ref_name }}-$NEXT_MATRIX_NAME-${{ github.sha }}.tar.gz
# upload next-phase artifact
- name: upload pipeline/libs as artifact 1/2
# if: ${{ env.DO_BUILD }} == '1'
uses: actions/upload-artifact@v2
with:
name: ${{ env.STUDIO }}-${{ github.ref_name }}-${{ env.NEXT_MATRIX_NAME }}-${{ github.sha }}.tar.gz00
path: ${{ env.STUDIO }}-${{ github.ref_name }}-${{ env.NEXT_MATRIX_NAME }}-${{ github.sha }}.tar.gz00
# upload next-phase artifact
- name: upload pipeline/libs as artifact 2/2
# if: ${{ env.DO_BUILD }} == '1'
uses: actions/upload-artifact@v2
with:
name: ${{ env.STUDIO }}-${{ github.ref_name }}-${{ env.NEXT_MATRIX_NAME }}-${{ github.sha }}.tar.gz01
path: ${{ env.STUDIO }}-${{ github.ref_name }}-${{ env.NEXT_MATRIX_NAME }}-${{ github.sha }}.tar.gz01
# remove all tar archive files just in case
- name: Remove archive after upload so it won't be cached
# if: ${{ env.DO_BUILD }} == '1'
run: sudo rm -rf ./*.tar*
# update matrix_cleanup for the cleanup step, with all the phases, but the
# last one!
- id: set-matrix
# if: ${{ env.DO_BUILD }} == '1'
run: |
m='{ "name": [ '
n=$(( $(echo ${{ matrix.name }} | sed 's/phase//') - 1))
for each in $(seq 1 $n) ; do
m="$m \"phase$each\","
done
m="$m ] }"
matrix_cleanup=$(echo "$m" | sed 's/, ]/ ]/')
echo "$matrix_cleanup" | jq .
echo "::set-output name=matrix_cleanup::$matrix_cleanup"
# - name: Cleanup artifacts we don't need!
# if: ${{ matrix.name != 'all' && env.DO_BUILD == '1' }}
# uses: geekyeggo/delete-artifact@v1
# with:
# failOnError: false
# name: ${{ env.STUDIO }}-${{ github.ref_name }}-${{ matrix.name }}-${{ github.sha }}.tar.gz
# ================================================================================================================
# cleanup artifacts we dont need!
# ================================================================================================================
cleanup:
needs: [build, matrix]
if: ${{ always() }}
strategy:
matrix:
name: ${{fromJson(needs.matrix.outputs.matrix_cleanup)}}
# # we retrieve matrix_cleanup from the build step, so we can only delete
# # the artifacts that where actually uploaded, but the last one!
# # since artifacts are incremental, there's no point on keep previous
# # ones since the last will have everything.
# name: ${{fromJson(needs.build.outputs.matrix_cleanup)}}
runs-on: ubuntu-latest
steps:
- name: Cleanup artifacts we don't need!
uses: geekyeggo/delete-artifact@v1
with:
failOnError: false
name: ${{ env.STUDIO }}-${{ github.ref_name }}-${{ matrix.name }}-${{ github.sha }}.tar.gz
# libs_cache:
# needs: build
# if: ${{ always() }}
#
# runs-on: ubuntu-latest
#
# steps:
# - name: Cache
# uses: actions/cache@v1
# with:
# path: ${{ env.CACHE_DIR }}
# key: |
# ${{ env.STUDIO }}-${{ github.ref_name }}-${{ github.sha }}
# restore-keys: |
# ${{ env.STUDIO }}-${{ github.ref_name }}-${{ github.sha }}
#
# - name: Publish Cache
# run: tar -czf ${{ env.STUDIO }}-${{ github.ref_name }}-${{ github.sha }}.tar.gz /pipeline/libs/
#
# - uses: actions/upload-artifact@v2
# with:
# name: ${{ env.STUDIO }}-${{ github.ref_name }}-${{ github.sha }}.tar.gz
# path: ${{ env.STUDIO }}-${{ github.ref_name }}-${{ github.sha }}.tar.gz
#
# libs_release:
# needs: build
#
# runs-on: ubuntu-latest
#
# steps:
# - name: Cache
# uses: actions/cache@v1
# with:
# path: ${{ env.CACHE_DIR }}
# key: |
# ${{ env.STUDIO }}-${{ github.ref_name }}-${{ env.PREVIOUS_MATRIX_NAME }}-${{ github.sha }}
# restore-keys: |
# ${{ env.STUDIO }}-${{ github.ref_name }}-${{ env.PREVIOUS_MATRIX_NAME }}-${{ github.sha }}
#
# - name: Publish Cache
# run: tar -czf ${{ env.STUDIO }}-${{ github.ref_name }}-${{ github.sha }}.tar.gz ${{ env.CACHE_DIR }}/
#
# - uses: actions/upload-artifact@v2
# with:
# name: ${{ env.STUDIO }}-${{ github.ref_name }}-${{ github.sha }}.tar.gz
# path: ${{ env.STUDIO }}-${{ github.ref_name }}-${{ github.sha }}.tar.gz