From 1f2b5563c800d657df7e4caee35e6b17df4f8379 Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Tue, 17 May 2022 15:23:24 +0000 Subject: [PATCH 01/98] set the environment variables at the end, remove conda lib from LD_LIBRARY_PATH --- jupyterhub/Dockerfile | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/jupyterhub/Dockerfile b/jupyterhub/Dockerfile index fd525d04..ad8671de 100644 --- a/jupyterhub/Dockerfile +++ b/jupyterhub/Dockerfile @@ -13,18 +13,6 @@ RUN conda install -c conda-forge -c intel -c ccpi -c astra-toolbox --file requir COPY --from=synerbi/sirf:sirf-core /opt/SIRF-SuperBuild/INSTALL/ /opt/SIRF-SuperBuild/INSTALL -# Switch back to jovyan to avoid accidental container runs as root -# From https://github.com/paskino/SIRF-SuperBuild/blob/301c2274621e4729cadbd2a1705d8c4d9e3b7e50/docker/Dockerfile#L212-L219 -# Set environment variables for SIRF -USER jovyan -ENV PATH "/opt/conda/bin:/opt/SIRF-SuperBuild/INSTALL/bin:$PATH" -ENV LD_LIBRARY_PATH "/opt/SIRF-SuperBuild/INSTALL/lib:/opt/SIRF-SuperBuild/INSTALL/lib64:/opt/conda/lib/:$LD_LIBRARY_PATH" -ENV PYTHONPATH "/opt/SIRF-SuperBuild/INSTALL/python" -ENV SIRF_INSTALL_PATH "/opt/SIRF-SuperBuild/INSTALL" -ENV SIRF_EXERCISES_DATA_PATH "/mnt/materials/SIRF/Fully3D/SIRF/" -ENV SIRF_PATH "/opt/SIRF-SuperBuild/sources/SIRF" -RUN echo $PATH - # Make sure the image has the same libraries as the standard SIRF docker image # Add to the docker image the appropriate stuff user root @@ -57,6 +45,19 @@ RUN chmod +x /usr/bin/tini RUN conda install deprecation nibabel nose docopt -c conda-forge -c intel -c astra-toolbox/label/dev -c ccpi RUN python -m pip install git+https://github.com/ismrmrd/ismrmrd-python-tools.git@master#egg=ismrmrd-python-tools + +# Switch back to jovyan to avoid accidental container runs as root +# From https://github.com/paskino/SIRF-SuperBuild/blob/301c2274621e4729cadbd2a1705d8c4d9e3b7e50/docker/Dockerfile#L212-L219 +# Set environment variables for SIRF +USER jovyan +ENV PATH "/opt/conda/bin:/opt/SIRF-SuperBuild/INSTALL/bin:$PATH" +ENV LD_LIBRARY_PATH "/opt/SIRF-SuperBuild/INSTALL/lib:/opt/SIRF-SuperBuild/INSTALL/lib64:$LD_LIBRARY_PATH" +ENV PYTHONPATH "/opt/SIRF-SuperBuild/INSTALL/python" +ENV SIRF_INSTALL_PATH "/opt/SIRF-SuperBuild/INSTALL" +ENV SIRF_EXERCISES_DATA_PATH "/mnt/materials/SIRF/Fully3D/SIRF/" +ENV SIRF_PATH "/opt/SIRF-SuperBuild/sources/SIRF" +RUN echo $PATH + # switch back to USER jovyan From 000ec9a315087257874bd108542e249c0957bfc0 Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Tue, 17 May 2022 15:47:39 +0000 Subject: [PATCH 02/98] requirements for CIL --- docker/requirements_conda_forge.txt | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/docker/requirements_conda_forge.txt b/docker/requirements_conda_forge.txt index 81e9bbe9..4f9b3d77 100644 --- a/docker/requirements_conda_forge.txt +++ b/docker/requirements_conda_forge.txt @@ -1,4 +1,20 @@ +# cil +# cil-astra +# ccpi-regulariser ipp ipp-devel ipp-include -tigre +tigre +# tomophantom=1.4.10 +python-wget +matplotlib<3.5 # CIL +Cython # CIL +numpy=1.20 # CIL +scipy # CIL +h5py # CIL +Pillow # CIL +wget # CIL +six # CIL +olefile # CIL +pywavelets # CIL +h5py From 7e42a9543527e4326153ec20a61ae251589d017a Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Tue, 17 May 2022 15:48:04 +0000 Subject: [PATCH 03/98] do not run pip install after conda install --- docker/user_python-ubuntu.sh | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100755 => 100644 docker/user_python-ubuntu.sh diff --git a/docker/user_python-ubuntu.sh b/docker/user_python-ubuntu.sh old mode 100755 new mode 100644 From 9560bf5a417e2babef14df649329717789fc0111 Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Tue, 17 May 2022 16:14:12 +0000 Subject: [PATCH 04/98] do not set LD_LIBRARY_PATH --- docker/.bashrc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/.bashrc b/docker/.bashrc index 6b19b6bc..de6785c7 100644 --- a/docker/.bashrc +++ b/docker/.bashrc @@ -26,7 +26,7 @@ export PS1='sirf$ ' # Need to add this as we have built using some of these shared libraries # See https://github.com/SyneRBI/SIRF-SuperBuild/issues/573 -export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:PYTHON_INSTALL_DIR/lib +# export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:PYTHON_INSTALL_DIR/lib # .local/bin (used by pip for instance) export PATH="${PATH}":~/.local/bin From ec6c98b219de9693e177d4bb9347279b46008bfe Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Wed, 18 May 2022 08:52:31 +0000 Subject: [PATCH 05/98] updates --- docker/Dockerfile | 1 + docker/requirements_conda_forge.txt | 5 +++++ jupyterhub/README.md | 2 +- 3 files changed, 7 insertions(+), 1 deletion(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 8d94c9ca..ae6d42fb 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -147,6 +147,7 @@ RUN bash user_sirf-ubuntu.sh \ && rm user_sirf-ubuntu.sh \ && chmod -R go+rwX /opt/SIRF-SuperBuild/INSTALL + ENV DEBIAN_FRONTEND '' # go back to root (entrypoint.sh will switch users) diff --git a/docker/requirements_conda_forge.txt b/docker/requirements_conda_forge.txt index 4f9b3d77..9591e4ec 100644 --- a/docker/requirements_conda_forge.txt +++ b/docker/requirements_conda_forge.txt @@ -18,3 +18,8 @@ six # CIL olefile # CIL pywavelets # CIL h5py +nose +docopt +nibabel +deprecation +nose diff --git a/jupyterhub/README.md b/jupyterhub/README.md index e3b31800..89be09a8 100644 --- a/jupyterhub/README.md +++ b/jupyterhub/README.md @@ -129,4 +129,4 @@ cd ../datascience-notebook docker build --build-arg BASE_CONTAINER=paskino/jupyter:scipy-notebook-cuda11 . ``` -Build of SIRF on Ubuntu 20.04 [fails](https://github.com/SyneRBI/SIRF-SuperBuild/issues/649) \ No newline at end of file +Build of SIRF on Ubuntu 20.04 [fails](https://github.com/SyneRBI/SIRF-SuperBuild/issues/649) From 21978c2f6f2e5860478aceed28ff64f60d8988bc Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Thu, 19 May 2022 07:49:13 +0000 Subject: [PATCH 06/98] updates for PSMR2022 --- docker/requirements_conda_forge.txt | 1 + jupyterhub/Dockerfile | 9 +++------ 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/docker/requirements_conda_forge.txt b/docker/requirements_conda_forge.txt index 9591e4ec..825c31cc 100644 --- a/docker/requirements_conda_forge.txt +++ b/docker/requirements_conda_forge.txt @@ -17,6 +17,7 @@ wget # CIL six # CIL olefile # CIL pywavelets # CIL +dxchange h5py nose docopt diff --git a/jupyterhub/Dockerfile b/jupyterhub/Dockerfile index ad8671de..d448e742 100644 --- a/jupyterhub/Dockerfile +++ b/jupyterhub/Dockerfile @@ -2,11 +2,6 @@ ARG BASE_IMAGE=ubuntu:18.04 # paskino/jupyter datascience-notebook-cuda11 FROM ${BASE_IMAGE} as base -# Install CIL with all packages from conda: https://github.com/TomographicImaging/CIL#installation -COPY requirements_conda_forge.txt . -RUN conda install -c conda-forge -c intel -c ccpi -c astra-toolbox --file requirements_conda_forge.txt - - # https://docs.docker.com/develop/develop-images/multistage-build/#use-an-external-image-as-a-stage # not documented in https://docs.docker.com/engine/reference/builder/#copy # FROM --from=nginx:latest /etc/nginx/nginx.conf /nginx.conf @@ -42,9 +37,11 @@ RUN cp /tini /usr/bin RUN chmod +x /usr/bin/tini # install requirements skipping CIL ones that are taken care of by conda -RUN conda install deprecation nibabel nose docopt -c conda-forge -c intel -c astra-toolbox/label/dev -c ccpi +COPY requirements_conda_forge.txt . +RUN conda install -c conda-forge -c intel -c astra-toolbox -c ccpi --file requirements_conda_forge.txt RUN python -m pip install git+https://github.com/ismrmrd/ismrmrd-python-tools.git@master#egg=ismrmrd-python-tools +RUN python -m pip install torch==1.7.1 scikit-image # Switch back to jovyan to avoid accidental container runs as root # From https://github.com/paskino/SIRF-SuperBuild/blob/301c2274621e4729cadbd2a1705d8c4d9e3b7e50/docker/Dockerfile#L212-L219 From 83d0df76aabb513b42113d7491da24726688d6af Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Fri, 20 May 2022 10:39:41 +0000 Subject: [PATCH 07/98] added CIL dependencies --- docker/requirements_conda_forge.txt | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/docker/requirements_conda_forge.txt b/docker/requirements_conda_forge.txt index 825c31cc..0e5829dd 100644 --- a/docker/requirements_conda_forge.txt +++ b/docker/requirements_conda_forge.txt @@ -17,8 +17,11 @@ wget # CIL six # CIL olefile # CIL pywavelets # CIL -dxchange -h5py +olefile>=0.46 # CIL +dxchange # CIL +h5py # CIL +tqdm # CIL +numba # CIL nose docopt nibabel From 6352d47c6358c329f4cbb219e696723c0ccfcb52 Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Fri, 20 May 2022 10:41:32 +0000 Subject: [PATCH 08/98] readme updates --- jupyterhub/README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/jupyterhub/README.md b/jupyterhub/README.md index 89be09a8..b80f825f 100644 --- a/jupyterhub/README.md +++ b/jupyterhub/README.md @@ -73,6 +73,9 @@ Please see [here](https://github.com/SyneRBI/SIRF-SuperBuild#building-ccpi-cil) docker build --build-arg BASE_IMAGE=nvidia/cuda:10.2-cudnn8-devel-ubuntu18.04 --build-arg PYTHON_INSTALL_DIR=/opt/conda --build-arg EXTRA_BUILD_FLAGS="-DBUILD_CIL=ON -DIPP_LIBRARY=/opt/conda/lib -DIPP_INCLUDE=/opt/conda/include -DBUILD_ASTRA=ON" --target sirf . +# build for PSMRTBP2022 + nohup docker build --build-arg BASE_IMAGE=nvidia/cuda:10.0-cudnn7-devel-ubuntu18.04 --build-arg PYTHON_INSTALL_DIR=/opt/conda --build-arg EXTRA_BUILD_FLAGS="-DBUILD_CIL=ON -DIPP_LIBRARY=/opt/conda/lib -DIPP_INCLUDE=/opt/conda/include -DSTIR_URL=https://github.com/paskino/STIR.git -DSTIR_TAG=bump_parallelproj" --build-arg SIRF_SB_URL="https://github.com/paskino/SIRF-SuperBuild.git" --build-arg SIRF_SB_TAG="jupyterhub_env" --build-arg NUM_PARALLEL_BUILDS=6 --target sirf . > build_jupyterhub_env.log & +``` # tag as synerbi/sirf:sirf-core docker tag cd1ed7d07d11 synerbi/sirf:sirf-core From 5e9a790f63365e61e18cbe8dba135ff2a74caa35 Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Fri, 20 May 2022 12:37:45 +0000 Subject: [PATCH 09/98] add build command --- jupyterhub/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/jupyterhub/README.md b/jupyterhub/README.md index b80f825f..b18f5fe9 100644 --- a/jupyterhub/README.md +++ b/jupyterhub/README.md @@ -74,7 +74,7 @@ Please see [here](https://github.com/SyneRBI/SIRF-SuperBuild#building-ccpi-cil) docker build --build-arg BASE_IMAGE=nvidia/cuda:10.2-cudnn8-devel-ubuntu18.04 --build-arg PYTHON_INSTALL_DIR=/opt/conda --build-arg EXTRA_BUILD_FLAGS="-DBUILD_CIL=ON -DIPP_LIBRARY=/opt/conda/lib -DIPP_INCLUDE=/opt/conda/include -DBUILD_ASTRA=ON" --target sirf . # build for PSMRTBP2022 - nohup docker build --build-arg BASE_IMAGE=nvidia/cuda:10.0-cudnn7-devel-ubuntu18.04 --build-arg PYTHON_INSTALL_DIR=/opt/conda --build-arg EXTRA_BUILD_FLAGS="-DBUILD_CIL=ON -DIPP_LIBRARY=/opt/conda/lib -DIPP_INCLUDE=/opt/conda/include -DSTIR_URL=https://github.com/paskino/STIR.git -DSTIR_TAG=bump_parallelproj" --build-arg SIRF_SB_URL="https://github.com/paskino/SIRF-SuperBuild.git" --build-arg SIRF_SB_TAG="jupyterhub_env" --build-arg NUM_PARALLEL_BUILDS=6 --target sirf . > build_jupyterhub_env.log & + nohup docker build --build-arg BASE_IMAGE=nvidia/cuda:10.0-cudnn7-devel-ubuntu18.04 --build-arg PYTHON_INSTALL_DIR=/opt/conda --build-arg EXTRA_BUILD_FLAGS="-DBUILD_CIL=ON -DIPP_LIBRARY=/opt/conda/lib -DIPP_INCLUDE=/opt/conda/include -DSTIR_URL=https://github.com/paskino/STIR.git -DSTIR_TAG=bump_parallelproj -DSIRF_TAG=origin/master" --build-arg SIRF_SB_URL="https://github.com/paskino/SIRF-SuperBuild.git" --build-arg SIRF_SB_TAG="jupyterhub_env" --build-arg NUM_PARALLEL_BUILDS=6 --target sirf . > build_jupyterhub_env.log & ``` # tag as synerbi/sirf:sirf-core From 017e44712ab5e457932197e9c2408c5374ae0348 Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Sat, 21 May 2022 21:26:21 +0000 Subject: [PATCH 10/98] update build command --- jupyterhub/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/jupyterhub/README.md b/jupyterhub/README.md index b18f5fe9..891bd45f 100644 --- a/jupyterhub/README.md +++ b/jupyterhub/README.md @@ -74,8 +74,8 @@ Please see [here](https://github.com/SyneRBI/SIRF-SuperBuild#building-ccpi-cil) docker build --build-arg BASE_IMAGE=nvidia/cuda:10.2-cudnn8-devel-ubuntu18.04 --build-arg PYTHON_INSTALL_DIR=/opt/conda --build-arg EXTRA_BUILD_FLAGS="-DBUILD_CIL=ON -DIPP_LIBRARY=/opt/conda/lib -DIPP_INCLUDE=/opt/conda/include -DBUILD_ASTRA=ON" --target sirf . # build for PSMRTBP2022 - nohup docker build --build-arg BASE_IMAGE=nvidia/cuda:10.0-cudnn7-devel-ubuntu18.04 --build-arg PYTHON_INSTALL_DIR=/opt/conda --build-arg EXTRA_BUILD_FLAGS="-DBUILD_CIL=ON -DIPP_LIBRARY=/opt/conda/lib -DIPP_INCLUDE=/opt/conda/include -DSTIR_URL=https://github.com/paskino/STIR.git -DSTIR_TAG=bump_parallelproj -DSIRF_TAG=origin/master" --build-arg SIRF_SB_URL="https://github.com/paskino/SIRF-SuperBuild.git" --build-arg SIRF_SB_TAG="jupyterhub_env" --build-arg NUM_PARALLEL_BUILDS=6 --target sirf . > build_jupyterhub_env.log & ``` + nohup docker build --build-arg BASE_IMAGE=nvidia/cuda:10.0-cudnn7-devel-ubuntu18.04 --build-arg PYTHON_INSTALL_DIR=/opt/conda --build-arg EXTRA_BUILD_FLAGS="-DBUILD_CIL=ON -DIPP_LIBRARY=/opt/conda/lib -DIPP_INCLUDE=/opt/conda/include -DSTIR_URL=https://github.com/UCL/STIR.git -DSTIR_TAG=origin/master -DSIRF_URL=https://github.com/paskino/SIRF.git -DSIRF_TAG=edo_lm-recon -Dparallelproj_TAG=v0.8.0" --build-arg SIRF_SB_URL="https://github.com/paskino/SIRF-SuperBuild.git" --build-arg SIRF_SB_TAG="jupyterhub_env" --build-arg NUM_PARALLEL_BUILDS=6 --target sirf . > build_jupyterhub_env.log & # tag as synerbi/sirf:sirf-core docker tag cd1ed7d07d11 synerbi/sirf:sirf-core From e43b037121be01dba6f63ab3e6640ca404b2ce90 Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Tue, 24 May 2022 15:24:56 +0000 Subject: [PATCH 11/98] copy stuff at the end --- jupyterhub/Dockerfile | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/jupyterhub/Dockerfile b/jupyterhub/Dockerfile index d448e742..fedd3236 100644 --- a/jupyterhub/Dockerfile +++ b/jupyterhub/Dockerfile @@ -2,12 +2,6 @@ ARG BASE_IMAGE=ubuntu:18.04 # paskino/jupyter datascience-notebook-cuda11 FROM ${BASE_IMAGE} as base -# https://docs.docker.com/develop/develop-images/multistage-build/#use-an-external-image-as-a-stage -# not documented in https://docs.docker.com/engine/reference/builder/#copy -# FROM --from=nginx:latest /etc/nginx/nginx.conf /nginx.conf -COPY --from=synerbi/sirf:sirf-core /opt/SIRF-SuperBuild/INSTALL/ /opt/SIRF-SuperBuild/INSTALL - - # Make sure the image has the same libraries as the standard SIRF docker image # Add to the docker image the appropriate stuff user root @@ -43,6 +37,11 @@ RUN python -m pip install git+https://github.com/ismrmrd/ismrmrd-python-tools.gi RUN python -m pip install torch==1.7.1 scikit-image +# https://docs.docker.com/develop/develop-images/multistage-build/#use-an-external-image-as-a-stage +# not documented in https://docs.docker.com/engine/reference/builder/#copy +# FROM --from=nginx:latest /etc/nginx/nginx.conf /nginx.conf +COPY --from=synerbi/sirf:sirf-core /opt/SIRF-SuperBuild/INSTALL/ /opt/SIRF-SuperBuild/INSTALL + # Switch back to jovyan to avoid accidental container runs as root # From https://github.com/paskino/SIRF-SuperBuild/blob/301c2274621e4729cadbd2a1705d8c4d9e3b7e50/docker/Dockerfile#L212-L219 # Set environment variables for SIRF From f5b0dc5dea6b44ac4fb2bc142ea686cc58926d10 Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Wed, 25 May 2022 21:12:30 +0000 Subject: [PATCH 12/98] copy SIRF sources --- jupyterhub/Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/jupyterhub/Dockerfile b/jupyterhub/Dockerfile index fedd3236..b39ce123 100644 --- a/jupyterhub/Dockerfile +++ b/jupyterhub/Dockerfile @@ -41,6 +41,7 @@ RUN python -m pip install torch==1.7.1 scikit-image # not documented in https://docs.docker.com/engine/reference/builder/#copy # FROM --from=nginx:latest /etc/nginx/nginx.conf /nginx.conf COPY --from=synerbi/sirf:sirf-core /opt/SIRF-SuperBuild/INSTALL/ /opt/SIRF-SuperBuild/INSTALL +COPY --from=synerbi/sirf:sirf-core /opt/SIRF-SuperBuild/sources/SIRF/ /opt/SIRF-SuperBuild/sources/SIRF/ # Switch back to jovyan to avoid accidental container runs as root # From https://github.com/paskino/SIRF-SuperBuild/blob/301c2274621e4729cadbd2a1705d8c4d9e3b7e50/docker/Dockerfile#L212-L219 From 8c0e86755285d601f592c3aecb6a4ab096eea0ec Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Thu, 23 Jun 2022 15:27:20 +0100 Subject: [PATCH 13/98] fix formatting --- jupyterhub/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/jupyterhub/README.md b/jupyterhub/README.md index 891bd45f..65b1c7c6 100644 --- a/jupyterhub/README.md +++ b/jupyterhub/README.md @@ -72,7 +72,7 @@ Please see [here](https://github.com/SyneRBI/SIRF-SuperBuild#building-ccpi-cil) ``` docker build --build-arg BASE_IMAGE=nvidia/cuda:10.2-cudnn8-devel-ubuntu18.04 --build-arg PYTHON_INSTALL_DIR=/opt/conda --build-arg EXTRA_BUILD_FLAGS="-DBUILD_CIL=ON -DIPP_LIBRARY=/opt/conda/lib -DIPP_INCLUDE=/opt/conda/include -DBUILD_ASTRA=ON" --target sirf . - +``` # build for PSMRTBP2022 ``` nohup docker build --build-arg BASE_IMAGE=nvidia/cuda:10.0-cudnn7-devel-ubuntu18.04 --build-arg PYTHON_INSTALL_DIR=/opt/conda --build-arg EXTRA_BUILD_FLAGS="-DBUILD_CIL=ON -DIPP_LIBRARY=/opt/conda/lib -DIPP_INCLUDE=/opt/conda/include -DSTIR_URL=https://github.com/UCL/STIR.git -DSTIR_TAG=origin/master -DSIRF_URL=https://github.com/paskino/SIRF.git -DSIRF_TAG=edo_lm-recon -Dparallelproj_TAG=v0.8.0" --build-arg SIRF_SB_URL="https://github.com/paskino/SIRF-SuperBuild.git" --build-arg SIRF_SB_TAG="jupyterhub_env" --build-arg NUM_PARALLEL_BUILDS=6 --target sirf . > build_jupyterhub_env.log & From 0125633ca52000a78ba9de2f1f8615a718cea68b Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Thu, 7 Jul 2022 13:08:12 +0000 Subject: [PATCH 14/98] wip --- jupyterhub/Dockerfile | 3 +++ jupyterhub/README.md | 9 ++++++++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/jupyterhub/Dockerfile b/jupyterhub/Dockerfile index b39ce123..2fd02f7f 100644 --- a/jupyterhub/Dockerfile +++ b/jupyterhub/Dockerfile @@ -36,6 +36,7 @@ RUN conda install -c conda-forge -c intel -c astra-toolbox -c ccpi --file requir RUN python -m pip install git+https://github.com/ismrmrd/ismrmrd-python-tools.git@master#egg=ismrmrd-python-tools RUN python -m pip install torch==1.7.1 scikit-image +RUN python -m pip install brainweb # https://docs.docker.com/develop/develop-images/multistage-build/#use-an-external-image-as-a-stage # not documented in https://docs.docker.com/engine/reference/builder/#copy @@ -53,6 +54,8 @@ ENV PYTHONPATH "/opt/SIRF-SuperBuild/INSTALL/python" ENV SIRF_INSTALL_PATH "/opt/SIRF-SuperBuild/INSTALL" ENV SIRF_EXERCISES_DATA_PATH "/mnt/materials/SIRF/Fully3D/SIRF/" ENV SIRF_PATH "/opt/SIRF-SuperBuild/sources/SIRF" +#Suppress output from Gadgetron which gives some problems on notebooks (QUIERO) +ENV GADGETRON_LOG_MASK "" RUN echo $PATH # switch back to diff --git a/jupyterhub/README.md b/jupyterhub/README.md index 65b1c7c6..1f8ee302 100644 --- a/jupyterhub/README.md +++ b/jupyterhub/README.md @@ -51,6 +51,13 @@ docker tag 5c63287f0aee paskino/jupyter:datascience-notebook-cuda10-cudnn8-devel Finally we have the base `datascience-notebook` with the `nvidia/cuda:11.5.0-cudnn8-devel-ubuntu18.04` base image. + +Possible fix for plotting. +``` +conda update -c conda-forge jupyterlab ipympl +%matplotlib widget +``` + ### Start building SIRF Build the `sirf` target of the SIRF Dockerfile with the `nvidia/cuda:11.5.0-cudnn8-devel-ubuntu18.04` base image. @@ -75,7 +82,7 @@ docker build --build-arg BASE_IMAGE=nvidia/cuda:10.2-cudnn8-devel-ubuntu18.04 -- ``` # build for PSMRTBP2022 ``` - nohup docker build --build-arg BASE_IMAGE=nvidia/cuda:10.0-cudnn7-devel-ubuntu18.04 --build-arg PYTHON_INSTALL_DIR=/opt/conda --build-arg EXTRA_BUILD_FLAGS="-DBUILD_CIL=ON -DIPP_LIBRARY=/opt/conda/lib -DIPP_INCLUDE=/opt/conda/include -DSTIR_URL=https://github.com/UCL/STIR.git -DSTIR_TAG=origin/master -DSIRF_URL=https://github.com/paskino/SIRF.git -DSIRF_TAG=edo_lm-recon -Dparallelproj_TAG=v0.8.0" --build-arg SIRF_SB_URL="https://github.com/paskino/SIRF-SuperBuild.git" --build-arg SIRF_SB_TAG="jupyterhub_env" --build-arg NUM_PARALLEL_BUILDS=6 --target sirf . > build_jupyterhub_env.log & + nohup docker build --build-arg BASE_IMAGE=nvidia/cuda:10.0-cudnn7-devel-ubuntu18.04 --build-arg PYTHON_INSTALL_DIR=/opt/conda --build-arg EXTRA_BUILD_FLAGS="-DBUILD_CIL=ON -DIPP_LIBRARY=/opt/conda/lib -DIPP_INCLUDE=/opt/conda/include -DSTIR_URL=https://github.com/UCL/STIR.git -DSTIR_TAG=master -DBUILD_STIR_EXECUTABLES=ON -DSIRF_URL=https://github.com/SyneRBI/SIRF.git -DSIRF_TAG=lm-recon -Dparallelproj_TAG=v0.8" --build-arg SIRF_SB_URL="https://github.com/paskino/SIRF-SuperBuild.git" --build-arg SIRF_SB_TAG="jupyterhub_env" --build-arg NUM_PARALLEL_BUILDS=6 --target sirf . > build_jupyterhub_lm.log & # tag as synerbi/sirf:sirf-core docker tag cd1ed7d07d11 synerbi/sirf:sirf-core From 92a00475e4726b4b710d69368d1b0c1a73922df4 Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Mon, 23 Jan 2023 14:23:36 +0000 Subject: [PATCH 15/98] updates jupyterhub for Ubuntu 22.04 --- CHANGES.md | 3 ++ jupyterhub/Dockerfile | 23 ++++------ jupyterhub/README.md | 90 +++++++++++-------------------------- jupyterhub/requirements.yml | 13 ++++++ 4 files changed, 51 insertions(+), 78 deletions(-) create mode 100644 jupyterhub/requirements.yml diff --git a/CHANGES.md b/CHANGES.md index f4b5adc0..1048e328 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -54,6 +54,9 @@ - fixes documentation - use the VMSVGA graphics controller - use environment variables in Vagrantfile for easier building +- jupyterhub updates: + - Ubuntu: 22.04 and nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 + - added requirements.yml for jupyterhub - updated versions: - SIRF: v3.4.0 - CIL: a6062410028c9872c5b355be40b96ed1497fed2a > 22.1.0 diff --git a/jupyterhub/Dockerfile b/jupyterhub/Dockerfile index 2fd02f7f..73ffe439 100644 --- a/jupyterhub/Dockerfile +++ b/jupyterhub/Dockerfile @@ -5,38 +5,33 @@ FROM ${BASE_IMAGE} as base # Make sure the image has the same libraries as the standard SIRF docker image # Add to the docker image the appropriate stuff user root -COPY build_essential-ubuntu.sh . +COPY docker/build_essential-ubuntu.sh . RUN bash build_essential-ubuntu.sh RUN rm build_essential-ubuntu.sh # Python (build) -COPY build_python-ubuntu.sh . +COPY docker/build_python-ubuntu.sh . RUN bash build_python-ubuntu.sh RUN rm build_python-ubuntu.sh # Gadgetron -COPY build_gadgetron-ubuntu.sh . +COPY docker/build_gadgetron-ubuntu.sh . RUN bash build_gadgetron-ubuntu.sh RUN rm build_gadgetron-ubuntu.sh # SIRF external deps -COPY build_system-ubuntu.sh . +COPY docker/build_system-ubuntu.sh . RUN bash build_system-ubuntu.sh RUN rm build_system-ubuntu.sh -# copy tini in /usr/bin because I installed tini in / rather than in the path -# in the base-notebook, see -# https://github.com/paskino/docker-stacks/blob/8d4c9922710debc22e229dbcdc91f8fcd613db52/base-notebook/Dockerfile#L22-L27 -RUN cp /tini /usr/bin -RUN chmod +x /usr/bin/tini # install requirements skipping CIL ones that are taken care of by conda -COPY requirements_conda_forge.txt . -RUN conda install -c conda-forge -c intel -c astra-toolbox -c ccpi --file requirements_conda_forge.txt -RUN python -m pip install git+https://github.com/ismrmrd/ismrmrd-python-tools.git@master#egg=ismrmrd-python-tools +# COPY requirements_conda_forge.txt . +# RUN conda install -c conda-forge -c intel -c astra-toolbox -c ccpi --file requirements_conda_forge.txt -RUN python -m pip install torch==1.7.1 scikit-image -RUN python -m pip install brainweb +USER root +COPY jupyterhub/requirements.yml . +RUN mamba env update -n base -f requirements.yml # https://docs.docker.com/develop/develop-images/multistage-build/#use-an-external-image-as-a-stage # not documented in https://docs.docker.com/engine/reference/builder/#copy diff --git a/jupyterhub/README.md b/jupyterhub/README.md index 1f8ee302..80a389e9 100644 --- a/jupyterhub/README.md +++ b/jupyterhub/README.md @@ -6,50 +6,49 @@ A few mods for use with Ubuntu 18.04 are in the fork https://github.com/paskino/ However, we require GPU access (for [CIL](https://github.com/TomographicImaging/CIL.git)) so we need one of the NVIDIA docker images https://hub.docker.com/r/nvidia/cuda/tags?page=1&name=cudnn8-devel-ubuntu18.04 The strategy is: - 1. to modify the `datascience-notebook` to have the `nvidia/cuda:10.2-cudnn8-devel-ubuntu18.04` base image, `paskino/jupyter:datascience-notebook-cuda10-cudnn8-devel-ubuntu18.04` - 1. build the `synerbi/sirf:sirf-core` image with the `nvidia/cuda:10.2-cudnn8-devel-ubuntu18.04` base image + 1. to modify the `datascience-notebook` to have the `nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04` base image, `paskino/jupyter:datascience-notebook-cuda10-cudnn8-devel-ubuntu18.04` + 1. build the `synerbi/sirf:sirf-core` image with the `nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04` base image 1. build the jupyterhub image from the image at point 1, copy the SIRF `INSTALL` directory from the `synerbi/sirf:sirf-core` (previous step), set the appropriate environmental variable and install CIL via conda -### Create the base image for jupyterhub with NVIDIA runtime on Ubuntu 18.04 +### Create the base image for jupyterhub with NVIDIA runtime on Ubuntu 22.04 -Currently the `base-notebook` in [`jupyter/docker-stacks`](`https://github.com/jupyter/docker-stacks`) builds on top of Ubuntu 20.04. The `tini` package is [required](https://github.com/jupyter/docker-stacks/blob/f27d615c5052c3a567835ceba3c21ab5d7b0416a/base-notebook/Dockerfile#L39-L42), but it is not available in Ubuntu 18.04 as apt package. +Currently the `base-notebook` in [`jupyter/docker-stacks`](`https://github.com/jupyter/docker-stacks`) builds on top of Ubuntu 22.04. -So to be able to use the `nvidia/cuda:10.2-cudnn8-devel-ubuntu18.04` base image we need to modify the `base-notebook` and install `tini` in another way. -The modifications are available at https://github.com/paskino/docker-stacks/tree/base_image_ubuntu18.04 +To be able to use the `nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04` base image we need to modify the `base-notebook` up to `datascience-notebook`. -Below a list of commands that will build the `paskino/jupyter:datascience-notebook-cuda10-cudnn8-devel-ubuntu18.04` +Below a list of commands that will build the `datascience-notebook` with the NVIDIA cuda base image, which I then tag as `paskino/jupyter:datascience-notebook-cuda11-cudnn8-devel-ubuntu22.04` +#### Clone the docker-stacks repo + +``` +git clone git@github.com:jupyter/docker-stacks.git ``` -git clone git@github.com:paskino/docker-stacks.git -cd docker-stacks -git checkout base_image_ubuntu18.04 -cd .. +#### Build the images + +``` # base notebook cd docker-stacks/base-notebook # change the base class with the ROOT_CONTAINER argument -docker build --build-arg ROOT_CONTAINER=nvidia/cuda:10.2-cudnn8-devel-ubuntu18.04 . -# tag the created image -docker tag 4dbe50ddc554 paskino/jupyter:base-notebook-cuda10-cudnn8-devel-ubuntu18.04 +# build and tag +docker build --build-arg ROOT_CONTAINER=nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 -t paskino/jupyter:base-notebook-cuda11-cudnn8-devel-ubuntu22.04 . # minimal notebook cd ../minimal-notebook -docker build --build-arg BASE_CONTAINER=paskino/jupyter:base-notebook-cuda10-cudnn8-devel-ubuntu18.04 . -docker tag 89a140d2318c paskino/jupyter:minimal-notebook-cuda10-cudnn8-devel-ubuntu18.04 +docker build --build-arg BASE_CONTAINER=paskino/jupyter:base-notebook-cuda11-cudnn8-devel-ubuntu22.04 -t paskino/jupyter:minimal-notebook-cuda11-cudnn8-devel-ubuntu22.04 . + # scipy-notebook cd ../scipy-notebook -docker build --build-arg BASE_CONTAINER=paskino/jupyter:minimal-notebook-cuda10-cudnn8-devel-ubuntu18.04 . -docker tag 36ca7783b57d paskino/jupyter:scipy-notebook-cuda10-cudnn8-devel-ubuntu18.04 +docker build --build-arg BASE_CONTAINER=paskino/jupyter:minimal-notebook-cuda11-cudnn8-devel-ubuntu22.04 -t paskino/jupyter:scipy-notebook-cuda11-cudnn8-devel-ubuntu22.04 . # datascience-notebook cd ../datascience-notebook -docker build --build-arg BASE_CONTAINER=paskino/jupyter:scipy-notebook-cuda10-cudnn8-devel-ubuntu18.04 . -docker tag 5c63287f0aee paskino/jupyter:datascience-notebook-cuda10-cudnn8-devel-ubuntu18.04 +docker build --build-arg BASE_CONTAINER=paskino/jupyter:scipy-notebook-cuda11-cudnn8-devel-ubuntu22.04 -t paskino/jupyter:datascience-notebook-cuda11-cudnn8-devel-ubuntu22.04 . ``` -Finally we have the base `datascience-notebook` with the `nvidia/cuda:11.5.0-cudnn8-devel-ubuntu18.04` base image. +Finally we have the base `datascience-notebook` with the `nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04` base image. Possible fix for plotting. @@ -60,14 +59,14 @@ conda update -c conda-forge jupyterlab ipympl ### Start building SIRF -Build the `sirf` target of the SIRF Dockerfile with the `nvidia/cuda:11.5.0-cudnn8-devel-ubuntu18.04` base image. +Build the `sirf` target of the SIRF Dockerfile with the `nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04` base image. ``` git clone git@github.com:SyneRBI/SIRF-SuperBuild.git cd SIRF-SuperBuild/docker # build standard SIRF docker -docker build --build-arg BASE_IMAGE=nvidia/cuda:10.2-cudnn8-devel-ubuntu18.04 --build-arg PYTHON_INSTALL_DIR=/opt/conda --target sirf . +docker build --build-arg BASE_IMAGE=nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 --build-arg PYTHON_INSTALL_DIR=/opt/conda --target sirf . ``` @@ -78,14 +77,11 @@ Please see [here](https://github.com/SyneRBI/SIRF-SuperBuild#building-ccpi-cil) ``` -docker build --build-arg BASE_IMAGE=nvidia/cuda:10.2-cudnn8-devel-ubuntu18.04 --build-arg PYTHON_INSTALL_DIR=/opt/conda --build-arg EXTRA_BUILD_FLAGS="-DBUILD_CIL=ON -DIPP_LIBRARY=/opt/conda/lib -DIPP_INCLUDE=/opt/conda/include -DBUILD_ASTRA=ON" --target sirf . + docker build --build-arg BASE_IMAGE=nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 --build-arg PYTHON_INSTALL_DIR=/opt/conda --build-arg EXTRA_BUILD_FLAGS="-DBUILD_CIL=ON -DIPP_LIBRARY=/opt/conda/lib -DIPP_INCLUDE=/opt/conda/include" --build-arg SIRF_SB_URL="https://github.com/paskino/SIRF-SuperBuild.git" --build-arg SIRF_SB_TAG="jupyterhub_env" --build-arg NUM_PARALLEL_BUILDS=6 --target sirf -t synerbi/sirf:sirf-core . ``` # build for PSMRTBP2022 ``` - nohup docker build --build-arg BASE_IMAGE=nvidia/cuda:10.0-cudnn7-devel-ubuntu18.04 --build-arg PYTHON_INSTALL_DIR=/opt/conda --build-arg EXTRA_BUILD_FLAGS="-DBUILD_CIL=ON -DIPP_LIBRARY=/opt/conda/lib -DIPP_INCLUDE=/opt/conda/include -DSTIR_URL=https://github.com/UCL/STIR.git -DSTIR_TAG=master -DBUILD_STIR_EXECUTABLES=ON -DSIRF_URL=https://github.com/SyneRBI/SIRF.git -DSIRF_TAG=lm-recon -Dparallelproj_TAG=v0.8" --build-arg SIRF_SB_URL="https://github.com/paskino/SIRF-SuperBuild.git" --build-arg SIRF_SB_TAG="jupyterhub_env" --build-arg NUM_PARALLEL_BUILDS=6 --target sirf . > build_jupyterhub_lm.log & - -# tag as synerbi/sirf:sirf-core -docker tag cd1ed7d07d11 synerbi/sirf:sirf-core + nohup docker build --build-arg BASE_IMAGE=nvidia/cuda:10.0-cudnn7-devel-ubuntu18.04 --build-arg PYTHON_INSTALL_DIR=/opt/conda --build-arg EXTRA_BUILD_FLAGS="-DBUILD_CIL=ON -DIPP_LIBRARY=/opt/conda/lib -DIPP_INCLUDE=/opt/conda/include -DSTIR_URL=https://github.com/UCL/STIR.git -DSTIR_TAG=master -DBUILD_STIR_EXECUTABLES=ON -DSIRF_URL=https://github.com/SyneRBI/SIRF.git -DSIRF_TAG=lm-recon -Dparallelproj_TAG=v0.8" --build-arg SIRF_SB_URL="https://github.com/paskino/SIRF-SuperBuild.git" --build-arg SIRF_SB_TAG="jupyterhub_env" --build-arg NUM_PARALLEL_BUILDS=6 --target sirf -t synerbi/sirf:psmrtbp2022 . > build_jupyterhub_lm.log & ``` ### Putting things together @@ -95,9 +91,8 @@ docker tag cd1ed7d07d11 synerbi/sirf:sirf-core To install SIRF we can literally _copy_ the SIRF INSTALL directory to the `datascience-notebook` image and set the required environment variables. ``` -cd SIRF-SuperBuild/docker -docker build --build-arg BASE_IMAGE=paskino/jupyter:datascience-notebook-cuda10-cudnn8-devel-ubuntu18.04 -f ../jupyterhub/Dockerfile . -docker tag 4970647d72ea paskino/sirfcil:service-gpu +cd SIRF-SuperBuild +docker build --build-arg BASE_IMAGE=paskino/jupyter:datascience-notebook-cuda11-cudnn8-devel-ubuntu22.04 -f jupyterhub/Dockerfile -t paskino/sirfcil:service-gpu . ``` ### Testing @@ -107,36 +102,3 @@ The cloud is set to update the image `paskino/sirfcil:service-gpu`, therefore it docker tag 4970647d72ea paskino/sirfcil:service-gpu ``` -### Work in progress -#### base image Ubuntu 20.04 NVIDIA - -Work in progress - -``` -git clone git@github.com:jupyter/docker-stacks.git - -# base notebook -cd docker-stacks/base-notebook -# change the base class with the ROOT_CONTAINER argument -docker build --build-arg ROOT_CONTAINER=nvidia/cuda:11.4.2-cudnn8-devel-ubuntu20.04 . -# tag the created image -docker tag 4dbe50ddc554 paskino/jupyter:base-notebook-cuda11 -# push to dockerhub -docker push paskino/jupyter:base-notebook-cuda11 - -# minimal notebook -cd ../minimal-notebook -docker build --build-arg BASE_CONTAINER=paskino/jupyter:base-notebook-cuda11 . -docker tag 4bfaa92b9ed6 paskino/jupyter:minimal-notebook-cuda11 - -# scipy-notebook -cd ../scipy-notebook -docker build --build-arg BASE_CONTAINER=paskino/jupyter:minimal-notebook-cuda11 -docker tag 9cbadae5917e paskino/jupyter:scipy-notebook-cuda11 - -# datascience-notebook -cd ../datascience-notebook -docker build --build-arg BASE_CONTAINER=paskino/jupyter:scipy-notebook-cuda11 . -``` - -Build of SIRF on Ubuntu 20.04 [fails](https://github.com/SyneRBI/SIRF-SuperBuild/issues/649) diff --git a/jupyterhub/requirements.yml b/jupyterhub/requirements.yml new file mode 100644 index 00000000..53bb9471 --- /dev/null +++ b/jupyterhub/requirements.yml @@ -0,0 +1,13 @@ +name: base +channels: + - conda-forge + - intel + - ccpi + - defaults +dependencies: + - scikit-image + - pytorch + - torchvision + - pip: + - brainweb # CIL + - git+https://github.com/ismrmrd/ismrmrd-python-tools.git@master#egg=ismrmrd-python-tools \ No newline at end of file From 7297b87f1245e68b101a5075425a1add31a868bb Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Mon, 23 Jan 2023 17:10:37 +0000 Subject: [PATCH 16/98] update readme --- jupyterhub/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/jupyterhub/README.md b/jupyterhub/README.md index 80a389e9..7e5298fa 100644 --- a/jupyterhub/README.md +++ b/jupyterhub/README.md @@ -63,7 +63,7 @@ Build the `sirf` target of the SIRF Dockerfile with the `nvidia/cuda:11.7.1-cudn ``` git clone git@github.com:SyneRBI/SIRF-SuperBuild.git -cd SIRF-SuperBuild/docker +cd SIRF-SuperBuild/ # build standard SIRF docker docker build --build-arg BASE_IMAGE=nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 --build-arg PYTHON_INSTALL_DIR=/opt/conda --target sirf . From 06d4f21a333e5203e2bec8edcd55f39c18e17f51 Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Mon, 23 Jan 2023 17:11:00 +0000 Subject: [PATCH 17/98] removed old text --- jupyterhub/README.md | 6 ------ 1 file changed, 6 deletions(-) diff --git a/jupyterhub/README.md b/jupyterhub/README.md index 7e5298fa..b32928dc 100644 --- a/jupyterhub/README.md +++ b/jupyterhub/README.md @@ -51,12 +51,6 @@ docker build --build-arg BASE_CONTAINER=paskino/jupyter:scipy-notebook-cuda11-cu Finally we have the base `datascience-notebook` with the `nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04` base image. -Possible fix for plotting. -``` -conda update -c conda-forge jupyterlab ipympl -%matplotlib widget -``` - ### Start building SIRF Build the `sirf` target of the SIRF Dockerfile with the `nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04` base image. From b611c0e859be6b61d41d34e5b891574764a4a1a8 Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Tue, 24 Jan 2023 11:01:46 +0000 Subject: [PATCH 18/98] add astra-toolbox --- jupyterhub/requirements.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/jupyterhub/requirements.yml b/jupyterhub/requirements.yml index 53bb9471..6a97099b 100644 --- a/jupyterhub/requirements.yml +++ b/jupyterhub/requirements.yml @@ -3,11 +3,15 @@ channels: - conda-forge - intel - ccpi + - astra-toolbox - defaults dependencies: - scikit-image - pytorch - torchvision + - cil + - astra-toolbox + - tigre - pip: - brainweb # CIL - git+https://github.com/ismrmrd/ismrmrd-python-tools.git@master#egg=ismrmrd-python-tools \ No newline at end of file From 52716e421b25b9cd57e03acd828f19cc508da4ea Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Wed, 25 Jan 2023 10:18:03 +0000 Subject: [PATCH 19/98] update to jupyterhub dockerfile --- jupyterhub/Dockerfile | 13 ++++++++++--- jupyterhub/README.md | 2 +- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/jupyterhub/Dockerfile b/jupyterhub/Dockerfile index 73ffe439..441e7257 100644 --- a/jupyterhub/Dockerfile +++ b/jupyterhub/Dockerfile @@ -25,9 +25,16 @@ RUN bash build_system-ubuntu.sh RUN rm build_system-ubuntu.sh -# install requirements skipping CIL ones that are taken care of by conda -# COPY requirements_conda_forge.txt . -# RUN conda install -c conda-forge -c intel -c astra-toolbox -c ccpi --file requirements_conda_forge.txt +# install requirements from docker/requirements.yml and additional requirements from jupyterhub/requirements.yml +# and requirements for SIRF exercises +ARG PYTHON_INSTALL_DIR="/opt/conda" +ARG PYTHON_EXECUTABLE="miniconda" +COPY docker/user_service-ubuntu.sh . +COPY docker/install-sirf-exercises-dep.py . +RUN PYTHON_EXECUTABLE=${PYTHON_EXECUTABLE} PYTHON_INSTALL_DIR=${PYTHON_INSTALL_DIR} bash user_service-ubuntu.sh +RUN rm user_service-ubuntu.sh install-sirf-exercises-dep.py +# remove the SIRF-Exercises and CIL-Demos that get installed by user_service-ubuntu.sh +RUN rm -rf /opt/SIRF-Exercises; rm -rf /opt/CIL-Demos USER root COPY jupyterhub/requirements.yml . diff --git a/jupyterhub/README.md b/jupyterhub/README.md index b32928dc..78f1ef72 100644 --- a/jupyterhub/README.md +++ b/jupyterhub/README.md @@ -86,7 +86,7 @@ To install SIRF we can literally _copy_ the SIRF INSTALL directory to the `datas ``` cd SIRF-SuperBuild -docker build --build-arg BASE_IMAGE=paskino/jupyter:datascience-notebook-cuda11-cudnn8-devel-ubuntu22.04 -f jupyterhub/Dockerfile -t paskino/sirfcil:service-gpu . +docker build --build-arg BASE_IMAGE=paskino/jupyter:datascience-notebook-cuda11-cudnn8-devel-ubuntu22.04 -f jupyterhub/Dockerfile -t harbor.stfc.ac.uk/imaging-tomography/test:sirfcil-jupyterhub-gpu . ``` ### Testing From 6574861bbe8bab3c8679f086c64b9764e9f7e05f Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Thu, 26 Jan 2023 09:46:33 +0000 Subject: [PATCH 20/98] update the BASE_IMAGE --- jupyterhub/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/jupyterhub/Dockerfile b/jupyterhub/Dockerfile index 441e7257..3060513b 100644 --- a/jupyterhub/Dockerfile +++ b/jupyterhub/Dockerfile @@ -1,4 +1,4 @@ -ARG BASE_IMAGE=ubuntu:18.04 +ARG BASE_IMAGE=nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 # paskino/jupyter datascience-notebook-cuda11 FROM ${BASE_IMAGE} as base From 430b86dca99fb1bd710629679a83f52937e2bbfa Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Thu, 9 Mar 2023 20:09:24 +0000 Subject: [PATCH 21/98] install py3.9 and deal with consequences requires to build Boost and boost python --- docker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index ae6d42fb..77762d30 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -134,7 +134,7 @@ ARG NUM_PARALLEL_BUILDS="2" ARG BUILD_FLAGS="\ -DCMAKE_BUILD_TYPE=Release\ -DSTIR_ENABLE_OPENMP=ON -DUSE_SYSTEM_ACE=ON\ - -DUSE_SYSTEM_Armadillo=ON -DUSE_SYSTEM_Boost=ON\ + -DUSE_SYSTEM_Armadillo=ON -DUSE_SYSTEM_Boost=OFF\ -DUSE_SYSTEM_FFTW3=ON -DUSE_SYSTEM_HDF5=OFF -DUSE_ITK=ON\ -DGadgetron_USE_CUDA=OFF\ -DUSE_SYSTEM_SWIG=ON\ From 5cc13d0e38a747cf1bc05ce801e373d8fd2fb677 Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Thu, 9 Mar 2023 20:09:45 +0000 Subject: [PATCH 22/98] simplify the dockerfile to reduce layers --- jupyterhub/Dockerfile | 42 +++++++++++++++++++++++------------------- 1 file changed, 23 insertions(+), 19 deletions(-) diff --git a/jupyterhub/Dockerfile b/jupyterhub/Dockerfile index 3060513b..ac306679 100644 --- a/jupyterhub/Dockerfile +++ b/jupyterhub/Dockerfile @@ -5,36 +5,40 @@ FROM ${BASE_IMAGE} as base # Make sure the image has the same libraries as the standard SIRF docker image # Add to the docker image the appropriate stuff user root -COPY docker/build_essential-ubuntu.sh . -RUN bash build_essential-ubuntu.sh -RUN rm build_essential-ubuntu.sh +COPY docker/*.sh ./scripts/ +RUN bash ./scripts/build_essential-ubuntu.sh &&\ + bash ./scripts/build_python-ubuntu.sh &&\ + bash ./scripts/build_gadgetron-ubuntu.sh &&\ + bash ./scripts/build_system-ubuntu.sh # Python (build) -COPY docker/build_python-ubuntu.sh . -RUN bash build_python-ubuntu.sh -RUN rm build_python-ubuntu.sh +# COPY docker/build_python-ubuntu.sh . +# RUN bash build_python-ubuntu.sh +# RUN rm build_python-ubuntu.sh -# Gadgetron -COPY docker/build_gadgetron-ubuntu.sh . -RUN bash build_gadgetron-ubuntu.sh -RUN rm build_gadgetron-ubuntu.sh +# # Gadgetron +# COPY docker/build_gadgetron-ubuntu.sh . +# RUN bash build_gadgetron-ubuntu.sh +# RUN rm build_gadgetron-ubuntu.sh -# SIRF external deps -COPY docker/build_system-ubuntu.sh . -RUN bash build_system-ubuntu.sh -RUN rm build_system-ubuntu.sh +# # SIRF external deps +# COPY docker/build_system-ubuntu.sh . +# RUN bash build_system-ubuntu.sh +# RUN rm build_system-ubuntu.sh # install requirements from docker/requirements.yml and additional requirements from jupyterhub/requirements.yml # and requirements for SIRF exercises ARG PYTHON_INSTALL_DIR="/opt/conda" ARG PYTHON_EXECUTABLE="miniconda" -COPY docker/user_service-ubuntu.sh . -COPY docker/install-sirf-exercises-dep.py . -RUN PYTHON_EXECUTABLE=${PYTHON_EXECUTABLE} PYTHON_INSTALL_DIR=${PYTHON_INSTALL_DIR} bash user_service-ubuntu.sh -RUN rm user_service-ubuntu.sh install-sirf-exercises-dep.py +# COPY docker/user_service-ubuntu.sh . # remove the SIRF-Exercises and CIL-Demos that get installed by user_service-ubuntu.sh -RUN rm -rf /opt/SIRF-Exercises; rm -rf /opt/CIL-Demos +COPY docker/install-sirf-exercises-dep.py . +RUN PYTHON_EXECUTABLE=${PYTHON_EXECUTABLE} PYTHON_INSTALL_DIR=${PYTHON_INSTALL_DIR} bash ./scripts/user_service-ubuntu.sh &&\ + rm install-sirf-exercises-dep.py &&\ + rm -rf ./scripts &&\ + rm -rf /opt/SIRF-Exercises&&\ + rm -rf /opt/CIL-Demos USER root COPY jupyterhub/requirements.yml . From 6187f11c3001263eb850dea302253a399840ea7e Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Thu, 9 Mar 2023 20:52:08 +0000 Subject: [PATCH 23/98] add boost_python --- SuperBuild/External_Boost_configureboost.cmake | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/SuperBuild/External_Boost_configureboost.cmake b/SuperBuild/External_Boost_configureboost.cmake index 2544d5d1..2436be8e 100644 --- a/SuperBuild/External_Boost_configureboost.cmake +++ b/SuperBuild/External_Boost_configureboost.cmake @@ -24,7 +24,7 @@ if(WIN32) else() message(STATUS "Build dir is : ${BUILD_DIR}") execute_process(COMMAND ./bootstrap.sh --prefix=${BOOST_INSTALL_DIR} - --with-libraries=system,filesystem,thread,program_options,chrono,date_time,atomic,timer,regex,test,coroutine,context,random + --with-libraries=system,filesystem,thread,program_options,chrono,date_time,atomic,timer,regex,test,coroutine,context,random,python #--with-libraries=system,thread,program_options,log,math... #--without-libraries=atomic... From 173d5c0cb29348398775452b662ab0060967ef95 Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Tue, 14 Mar 2023 21:51:44 +0000 Subject: [PATCH 24/98] several updates --- docker/requirements-service.yml | 2 +- docker/requirements.yml | 8 ++--- jupyterhub/Dockerfile | 60 ++++++++++++++----------------- jupyterhub/README.md | 11 ++++-- jupyterhub/build_docker_stacks.sh | 27 ++++++++++++++ jupyterhub/requirements.yml | 10 ++++-- 6 files changed, 74 insertions(+), 44 deletions(-) create mode 100644 jupyterhub/build_docker_stacks.sh diff --git a/docker/requirements-service.yml b/docker/requirements-service.yml index a6ed7897..866ba6e6 100644 --- a/docker/requirements-service.yml +++ b/docker/requirements-service.yml @@ -7,6 +7,6 @@ channels: dependencies: - jupyterlab - jupyter - - ipywidgets + - ipywidgets<8 - widgetsnbextension - nodejs diff --git a/docker/requirements.yml b/docker/requirements.yml index 9f0d69c5..1733fd7d 100644 --- a/docker/requirements.yml +++ b/docker/requirements.yml @@ -27,10 +27,10 @@ dependencies: - nibabel - deprecation - nose - - ipp - - ipp-devel - - ipp-include - - tigre + - ipp # CIL + - ipp-devel # CIL + - ipp-include # CIL + - tigre=2.4 # CIL - pip - pip: - git+https://github.com/data-exchange/dxchange.git # CIL diff --git a/jupyterhub/Dockerfile b/jupyterhub/Dockerfile index ac306679..8a95806c 100644 --- a/jupyterhub/Dockerfile +++ b/jupyterhub/Dockerfile @@ -5,44 +5,11 @@ FROM ${BASE_IMAGE} as base # Make sure the image has the same libraries as the standard SIRF docker image # Add to the docker image the appropriate stuff user root -COPY docker/*.sh ./scripts/ +COPY --chown=jovyan:users docker/build_essential-ubuntu.sh docker/build_gadgetron-ubuntu.sh docker/build_system-ubuntu.sh docker/install-sirf-exercises-dep.py ./scripts/ RUN bash ./scripts/build_essential-ubuntu.sh &&\ - bash ./scripts/build_python-ubuntu.sh &&\ bash ./scripts/build_gadgetron-ubuntu.sh &&\ bash ./scripts/build_system-ubuntu.sh -# Python (build) -# COPY docker/build_python-ubuntu.sh . -# RUN bash build_python-ubuntu.sh -# RUN rm build_python-ubuntu.sh - -# # Gadgetron -# COPY docker/build_gadgetron-ubuntu.sh . -# RUN bash build_gadgetron-ubuntu.sh -# RUN rm build_gadgetron-ubuntu.sh - -# # SIRF external deps -# COPY docker/build_system-ubuntu.sh . -# RUN bash build_system-ubuntu.sh -# RUN rm build_system-ubuntu.sh - - -# install requirements from docker/requirements.yml and additional requirements from jupyterhub/requirements.yml -# and requirements for SIRF exercises -ARG PYTHON_INSTALL_DIR="/opt/conda" -ARG PYTHON_EXECUTABLE="miniconda" -# COPY docker/user_service-ubuntu.sh . -# remove the SIRF-Exercises and CIL-Demos that get installed by user_service-ubuntu.sh -COPY docker/install-sirf-exercises-dep.py . -RUN PYTHON_EXECUTABLE=${PYTHON_EXECUTABLE} PYTHON_INSTALL_DIR=${PYTHON_INSTALL_DIR} bash ./scripts/user_service-ubuntu.sh &&\ - rm install-sirf-exercises-dep.py &&\ - rm -rf ./scripts &&\ - rm -rf /opt/SIRF-Exercises&&\ - rm -rf /opt/CIL-Demos - -USER root -COPY jupyterhub/requirements.yml . -RUN mamba env update -n base -f requirements.yml # https://docs.docker.com/develop/develop-images/multistage-build/#use-an-external-image-as-a-stage # not documented in https://docs.docker.com/engine/reference/builder/#copy @@ -50,10 +17,35 @@ RUN mamba env update -n base -f requirements.yml COPY --from=synerbi/sirf:sirf-core /opt/SIRF-SuperBuild/INSTALL/ /opt/SIRF-SuperBuild/INSTALL COPY --from=synerbi/sirf:sirf-core /opt/SIRF-SuperBuild/sources/SIRF/ /opt/SIRF-SuperBuild/sources/SIRF/ +from base as jup +# remove the SIRF-Exercises and CIL-Demos that get installed by user_service-ubuntu.sh +USER jovyan +ARG NB_USER="jovyan" +ARG NB_GROUP="users" + + +COPY --chown=jovyan:users docker/requirements.yml docker/ +COPY --chown=jovyan:users jupyterhub/requirements.yml docker/requirements-service.yml jupyterhub/ + + +# RUN mamba env update -n base -f docker/requirements.yml +# RUN mamba env update -n base -f jupyterhub/requirements.yml + +# RUN bash ./scripts/user_service-ubuntu.sh &&\ +RUN mamba env update -n base -f docker/requirements.yml &&\ + mamba env update -n base -f jupyterhub/requirements.yml &&\ + git clone https://github.com/SyneRBI/SIRF-Exercises --recursive -b master SIRF-Exercises &&\ + python scripts/install-sirf-exercises-dep.py SIRF-Exercises/requirements.txt &&\ + mamba env update --file jupyterhub/requirements-service.yml &&\ + mamba env update -n base -f jupyterhub/requirements.yml &&\ + rm -rf docker && rm -rf jupyterhub &&\ + rm -rf ./scripts + # Switch back to jovyan to avoid accidental container runs as root # From https://github.com/paskino/SIRF-SuperBuild/blob/301c2274621e4729cadbd2a1705d8c4d9e3b7e50/docker/Dockerfile#L212-L219 # Set environment variables for SIRF USER jovyan +COPY docker/.bashrc /home/jovyan ENV PATH "/opt/conda/bin:/opt/SIRF-SuperBuild/INSTALL/bin:$PATH" ENV LD_LIBRARY_PATH "/opt/SIRF-SuperBuild/INSTALL/lib:/opt/SIRF-SuperBuild/INSTALL/lib64:$LD_LIBRARY_PATH" ENV PYTHONPATH "/opt/SIRF-SuperBuild/INSTALL/python" diff --git a/jupyterhub/README.md b/jupyterhub/README.md index 78f1ef72..4848071a 100644 --- a/jupyterhub/README.md +++ b/jupyterhub/README.md @@ -28,11 +28,15 @@ git clone git@github.com:jupyter/docker-stacks.git #### Build the images ``` +pushd ../../docker-stacks/docker-stacks-foundations +# docker-stacks-foundations +docker build --build-arg PYTHON_VERSION=3.9 --build-arg ROOT_CONTAINER=nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 -t paskino/jupyter:docker-stacks-foundations-cuda11-cudnn8-devel-ubuntu22.04 . + # base notebook -cd docker-stacks/base-notebook +cd ../base-notebook # change the base class with the ROOT_CONTAINER argument # build and tag -docker build --build-arg ROOT_CONTAINER=nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 -t paskino/jupyter:base-notebook-cuda11-cudnn8-devel-ubuntu22.04 . +docker build --build-arg ROOT_CONTAINER=paskino/jupyter:docker-stacks-foundations-cuda11-cudnn8-devel-ubuntu22.04 -t paskino/jupyter:base-notebook-cuda11-cudnn8-devel-ubuntu22.04 . # minimal notebook cd ../minimal-notebook @@ -46,6 +50,9 @@ docker build --build-arg BASE_CONTAINER=paskino/jupyter:minimal-notebook-cuda11- # datascience-notebook cd ../datascience-notebook docker build --build-arg BASE_CONTAINER=paskino/jupyter:scipy-notebook-cuda11-cudnn8-devel-ubuntu22.04 -t paskino/jupyter:datascience-notebook-cuda11-cudnn8-devel-ubuntu22.04 . + +popd + ``` Finally we have the base `datascience-notebook` with the `nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04` base image. diff --git a/jupyterhub/build_docker_stacks.sh b/jupyterhub/build_docker_stacks.sh new file mode 100644 index 00000000..d655c572 --- /dev/null +++ b/jupyterhub/build_docker_stacks.sh @@ -0,0 +1,27 @@ +#! /bin/sh +set -ex + +# change the base class with the ROOT_CONTAINER argument +pushd ../../docker-stacks/docker-stacks-foundation +# docker-stacks-foundations +docker build --build-arg PYTHON_VERSION=3.9 --build-arg ROOT_CONTAINER=nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 -t paskino/jupyter:docker-stacks-foundations-cuda11-cudnn8-devel-ubuntu22.04 . + +# base notebook +cd ../base-notebook +# build and tag +docker build --build-arg BASE_CONTAINER=paskino/jupyter:docker-stacks-foundations-cuda11-cudnn8-devel-ubuntu22.04 -t paskino/jupyter:base-notebook-cuda11-cudnn8-devel-ubuntu22.04 . + +# minimal notebook +cd ../minimal-notebook +docker build --build-arg BASE_CONTAINER=paskino/jupyter:base-notebook-cuda11-cudnn8-devel-ubuntu22.04 -t paskino/jupyter:minimal-notebook-cuda11-cudnn8-devel-ubuntu22.04 . + + +# scipy-notebook +cd ../scipy-notebook +docker build --build-arg BASE_CONTAINER=paskino/jupyter:minimal-notebook-cuda11-cudnn8-devel-ubuntu22.04 -t paskino/jupyter:scipy-notebook-cuda11-cudnn8-devel-ubuntu22.04 . + +# datascience-notebook +cd ../datascience-notebook +docker build --build-arg BASE_CONTAINER=paskino/jupyter:scipy-notebook-cuda11-cudnn8-devel-ubuntu22.04 -t paskino/jupyter:datascience-notebook-cuda11-cudnn8-devel-ubuntu22.04 . + +popd \ No newline at end of file diff --git a/jupyterhub/requirements.yml b/jupyterhub/requirements.yml index 6a97099b..96aa8caf 100644 --- a/jupyterhub/requirements.yml +++ b/jupyterhub/requirements.yml @@ -6,12 +6,16 @@ channels: - astra-toolbox - defaults dependencies: + - numpy=1.22 - scikit-image - pytorch - torchvision - - cil + - ipywidgets<8 - astra-toolbox - - tigre + - tigre=2.4 + - dxchange - pip: - brainweb # CIL - - git+https://github.com/ismrmrd/ismrmrd-python-tools.git@master#egg=ismrmrd-python-tools \ No newline at end of file + - cuqipy-cil + - git+https://github.com/ismrmrd/ismrmrd-python-tools.git@master#egg=ismrmrd-python-tools + \ No newline at end of file From a6800a57e3de3ad3e4e243133b74d6e3d391da5b Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Thu, 23 Nov 2023 10:40:56 +0000 Subject: [PATCH 25/98] mainly updates to instruction on jupyterhub image build --- jupyterhub/README.md | 4 ++-- jupyterhub/build_docker_stacks.sh | 21 +++++++++++---------- jupyterhub/requirements.yml | 2 +- 3 files changed, 14 insertions(+), 13 deletions(-) diff --git a/jupyterhub/README.md b/jupyterhub/README.md index 4848071a..42d75f7b 100644 --- a/jupyterhub/README.md +++ b/jupyterhub/README.md @@ -34,9 +34,9 @@ docker build --build-arg PYTHON_VERSION=3.9 --build-arg ROOT_CONTAINER=nvidia/cu # base notebook cd ../base-notebook -# change the base class with the ROOT_CONTAINER argument +# change the base class with the BASE_CONTAINER argument # build and tag -docker build --build-arg ROOT_CONTAINER=paskino/jupyter:docker-stacks-foundations-cuda11-cudnn8-devel-ubuntu22.04 -t paskino/jupyter:base-notebook-cuda11-cudnn8-devel-ubuntu22.04 . +docker build --build-arg BASE_CONTAINER=paskino/jupyter:docker-stacks-foundations-cuda11-cudnn8-devel-ubuntu22.04 -t paskino/jupyter:base-notebook-cuda11-cudnn8-devel-ubuntu22.04 . # minimal notebook cd ../minimal-notebook diff --git a/jupyterhub/build_docker_stacks.sh b/jupyterhub/build_docker_stacks.sh index d655c572..171f34b2 100644 --- a/jupyterhub/build_docker_stacks.sh +++ b/jupyterhub/build_docker_stacks.sh @@ -1,27 +1,28 @@ -#! /bin/sh -set -ex +# /bin/bash -# change the base class with the ROOT_CONTAINER argument -pushd ../../docker-stacks/docker-stacks-foundation +set -ex +pushd ../../docker-stacks/images/docker-stacks-foundation # docker-stacks-foundations -docker build --build-arg PYTHON_VERSION=3.9 --build-arg ROOT_CONTAINER=nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 -t paskino/jupyter:docker-stacks-foundations-cuda11-cudnn8-devel-ubuntu22.04 . +docker build --build-arg PYTHON_VERSION=3.9 --build-arg ROOT_CONTAINER=nvidia/cuda:11.7.1-cudnn8-runtime-ubuntu22.04 -t paskino/jupyter:docker-stacks-foundation-cuda11-cudnn8-runtime-ubuntu22.04 . # base notebook cd ../base-notebook +# change the base class with the BASE_CONTAINER argument # build and tag -docker build --build-arg BASE_CONTAINER=paskino/jupyter:docker-stacks-foundations-cuda11-cudnn8-devel-ubuntu22.04 -t paskino/jupyter:base-notebook-cuda11-cudnn8-devel-ubuntu22.04 . +docker build --build-arg BASE_CONTAINER=paskino/jupyter:docker-stacks-foundation-cuda11-cudnn8-runtime-ubuntu22.04 -t paskino/jupyter:base-notebook-cuda11-cudnn8-runtime-ubuntu22.04 . # minimal notebook cd ../minimal-notebook -docker build --build-arg BASE_CONTAINER=paskino/jupyter:base-notebook-cuda11-cudnn8-devel-ubuntu22.04 -t paskino/jupyter:minimal-notebook-cuda11-cudnn8-devel-ubuntu22.04 . +docker build --build-arg BASE_CONTAINER=paskino/jupyter:base-notebook-cuda11-cudnn8-runtime-ubuntu22.04 -t paskino/jupyter:minimal-notebook-cuda11-cudnn8-runtime-ubuntu22.04 . # scipy-notebook cd ../scipy-notebook -docker build --build-arg BASE_CONTAINER=paskino/jupyter:minimal-notebook-cuda11-cudnn8-devel-ubuntu22.04 -t paskino/jupyter:scipy-notebook-cuda11-cudnn8-devel-ubuntu22.04 . +docker build --build-arg BASE_CONTAINER=paskino/jupyter:minimal-notebook-cuda11-cudnn8-runtime-ubuntu22.04 -t paskino/jupyter:scipy-notebook-cuda11-cudnn8-runtime-ubuntu22.04 . # datascience-notebook cd ../datascience-notebook -docker build --build-arg BASE_CONTAINER=paskino/jupyter:scipy-notebook-cuda11-cudnn8-devel-ubuntu22.04 -t paskino/jupyter:datascience-notebook-cuda11-cudnn8-devel-ubuntu22.04 . +docker build --build-arg BASE_CONTAINER=paskino/jupyter:scipy-notebook-cuda11-cudnn8-runtime-ubuntu22.04 -t paskino/jupyter:datascience-notebook-cuda11-cudnn8-runtime-ubuntu22.04 . + +popd -popd \ No newline at end of file diff --git a/jupyterhub/requirements.yml b/jupyterhub/requirements.yml index 96aa8caf..451f6143 100644 --- a/jupyterhub/requirements.yml +++ b/jupyterhub/requirements.yml @@ -18,4 +18,4 @@ dependencies: - brainweb # CIL - cuqipy-cil - git+https://github.com/ismrmrd/ismrmrd-python-tools.git@master#egg=ismrmrd-python-tools - \ No newline at end of file + From 7a8cbcdc7750adbc70866d91c271fb0427d96bef Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Thu, 23 Nov 2023 20:33:01 +0000 Subject: [PATCH 26/98] remove set e from bash script --- docker/user_sirf-ubuntu.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/user_sirf-ubuntu.sh b/docker/user_sirf-ubuntu.sh index 1babcc8c..ab76a395 100755 --- a/docker/user_sirf-ubuntu.sh +++ b/docker/user_sirf-ubuntu.sh @@ -4,7 +4,7 @@ # outside of the "docker build" setting [ -f .bashrc ] && . .bashrc -set -ev +set -v # set default installation location INSTALL_DIR="${1:-/opt}" # set default URL/tag From e3abf7e55630326b2ede9b5fde01f0d64cd60ff1 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Mon, 27 Nov 2023 15:23:36 +0000 Subject: [PATCH 27/98] auto OMP_NUM_THREADS = cpu_count//2 --- jupyterhub/Dockerfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/jupyterhub/Dockerfile b/jupyterhub/Dockerfile index 8a95806c..c51dd997 100644 --- a/jupyterhub/Dockerfile +++ b/jupyterhub/Dockerfile @@ -55,6 +55,8 @@ ENV SIRF_PATH "/opt/SIRF-SuperBuild/sources/SIRF" #Suppress output from Gadgetron which gives some problems on notebooks (QUIERO) ENV GADGETRON_LOG_MASK "" RUN echo $PATH +USER root +RUN echo "export OMP_NUM_THREADS=\$(python -c 'import multiprocessing as mc; print(mc.cpu_count() // 2)')" > /usr/local/bin/before-notebook.d/omp_num_threads.sh # switch back to USER jovyan From 5b29dd8875cd512d234ba93359136f102a265e87 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Mon, 27 Nov 2023 15:27:16 +0000 Subject: [PATCH 28/98] add docker-stacks submodule, cleanup build --- .gitmodules | 3 +++ jupyterhub/Dockerfile | 15 ++++++++------- jupyterhub/build_docker_stacks.sh | 10 ++++++---- jupyterhub/docker-stacks | 1 + 4 files changed, 18 insertions(+), 11 deletions(-) create mode 100644 .gitmodules mode change 100644 => 100755 jupyterhub/build_docker_stacks.sh create mode 160000 jupyterhub/docker-stacks diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..95ebb758 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "jupyterhub/docker-stacks"] + path = jupyterhub/docker-stacks + url = https://github.com/jupyter/docker-stacks diff --git a/jupyterhub/Dockerfile b/jupyterhub/Dockerfile index c51dd997..40ef092e 100644 --- a/jupyterhub/Dockerfile +++ b/jupyterhub/Dockerfile @@ -1,10 +1,11 @@ +# syntax=docker/dockerfile:1 ARG BASE_IMAGE=nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 # paskino/jupyter datascience-notebook-cuda11 FROM ${BASE_IMAGE} as base # Make sure the image has the same libraries as the standard SIRF docker image # Add to the docker image the appropriate stuff -user root +USER root COPY --chown=jovyan:users docker/build_essential-ubuntu.sh docker/build_gadgetron-ubuntu.sh docker/build_system-ubuntu.sh docker/install-sirf-exercises-dep.py ./scripts/ RUN bash ./scripts/build_essential-ubuntu.sh &&\ bash ./scripts/build_gadgetron-ubuntu.sh &&\ @@ -14,10 +15,10 @@ RUN bash ./scripts/build_essential-ubuntu.sh &&\ # https://docs.docker.com/develop/develop-images/multistage-build/#use-an-external-image-as-a-stage # not documented in https://docs.docker.com/engine/reference/builder/#copy # FROM --from=nginx:latest /etc/nginx/nginx.conf /nginx.conf -COPY --from=synerbi/sirf:sirf-core /opt/SIRF-SuperBuild/INSTALL/ /opt/SIRF-SuperBuild/INSTALL -COPY --from=synerbi/sirf:sirf-core /opt/SIRF-SuperBuild/sources/SIRF/ /opt/SIRF-SuperBuild/sources/SIRF/ +COPY --from=synerbi/sirf:latest --link /opt/SIRF-SuperBuild/INSTALL/ /opt/SIRF-SuperBuild/INSTALL +COPY --from=synerbi/sirf:latest --link /opt/SIRF-SuperBuild/sources/SIRF/ /opt/SIRF-SuperBuild/sources/SIRF/ -from base as jup +FROM base AS jup # remove the SIRF-Exercises and CIL-Demos that get installed by user_service-ubuntu.sh USER jovyan ARG NB_USER="jovyan" @@ -44,7 +45,7 @@ RUN mamba env update -n base -f docker/requirements.yml &&\ # Switch back to jovyan to avoid accidental container runs as root # From https://github.com/paskino/SIRF-SuperBuild/blob/301c2274621e4729cadbd2a1705d8c4d9e3b7e50/docker/Dockerfile#L212-L219 # Set environment variables for SIRF -USER jovyan +USER jovyan COPY docker/.bashrc /home/jovyan ENV PATH "/opt/conda/bin:/opt/SIRF-SuperBuild/INSTALL/bin:$PATH" ENV LD_LIBRARY_PATH "/opt/SIRF-SuperBuild/INSTALL/lib:/opt/SIRF-SuperBuild/INSTALL/lib64:$LD_LIBRARY_PATH" @@ -58,7 +59,7 @@ RUN echo $PATH USER root RUN echo "export OMP_NUM_THREADS=\$(python -c 'import multiprocessing as mc; print(mc.cpu_count() // 2)')" > /usr/local/bin/before-notebook.d/omp_num_threads.sh -# switch back to +# switch back to USER jovyan -#/opt/SIRF-SuperBuild/INSTALL/lib:/opt/SIRF-SuperBuild/INSTALL/lib64:/usr/local/nvidia/lib:/usr/local/nvidia/lib64::/opt/conda/lib \ No newline at end of file +#/opt/SIRF-SuperBuild/INSTALL/lib:/opt/SIRF-SuperBuild/INSTALL/lib64:/usr/local/nvidia/lib:/usr/local/nvidia/lib64::/opt/conda/lib diff --git a/jupyterhub/build_docker_stacks.sh b/jupyterhub/build_docker_stacks.sh old mode 100644 new mode 100755 index 171f34b2..4a5cac2a --- a/jupyterhub/build_docker_stacks.sh +++ b/jupyterhub/build_docker_stacks.sh @@ -1,9 +1,10 @@ -# /bin/bash +#!/usr/bin/env bash +set -exuo pipefail -set -ex -pushd ../../docker-stacks/images/docker-stacks-foundation +git submodule update --init --recursive +pushd $(dirname "$0")/docker-stacks/images/docker-stacks-foundation # docker-stacks-foundations -docker build --build-arg PYTHON_VERSION=3.9 --build-arg ROOT_CONTAINER=nvidia/cuda:11.7.1-cudnn8-runtime-ubuntu22.04 -t paskino/jupyter:docker-stacks-foundation-cuda11-cudnn8-runtime-ubuntu22.04 . +docker build --build-arg PYTHON_VERSION=3.9 --build-arg BASE_CONTAINER=nvidia/cuda:11.7.1-cudnn8-runtime-ubuntu22.04 -t paskino/jupyter:docker-stacks-foundation-cuda11-cudnn8-runtime-ubuntu22.04 . # base notebook cd ../base-notebook @@ -26,3 +27,4 @@ docker build --build-arg BASE_CONTAINER=paskino/jupyter:scipy-notebook-cuda11-cu popd +docker build --build-arg BASE_IMAGE=paskino/jupyter:datascience-notebook-cuda11-cudnn8-runtime-ubuntu22.04 -t paskino/jupyter:sirf -f jupyterhub/Dockerfile . diff --git a/jupyterhub/docker-stacks b/jupyterhub/docker-stacks new file mode 160000 index 00000000..d91bb62b --- /dev/null +++ b/jupyterhub/docker-stacks @@ -0,0 +1 @@ +Subproject commit d91bb62be0cb525529e6028d9dfcb53d20775fe5 From c66e745c3a360c81f8036fee413e18ab2e856562 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Tue, 28 Nov 2023 17:11:09 +0000 Subject: [PATCH 29/98] better image tags --- jupyterhub/Dockerfile | 3 +-- jupyterhub/build_docker_stacks.sh | 20 +++++++------------- 2 files changed, 8 insertions(+), 15 deletions(-) diff --git a/jupyterhub/Dockerfile b/jupyterhub/Dockerfile index 40ef092e..761c4085 100644 --- a/jupyterhub/Dockerfile +++ b/jupyterhub/Dockerfile @@ -1,6 +1,5 @@ # syntax=docker/dockerfile:1 -ARG BASE_IMAGE=nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 -# paskino/jupyter datascience-notebook-cuda11 +ARG BASE_CONTAINER=nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 FROM ${BASE_IMAGE} as base # Make sure the image has the same libraries as the standard SIRF docker image diff --git a/jupyterhub/build_docker_stacks.sh b/jupyterhub/build_docker_stacks.sh index 4a5cac2a..2e0be393 100755 --- a/jupyterhub/build_docker_stacks.sh +++ b/jupyterhub/build_docker_stacks.sh @@ -3,28 +3,22 @@ set -exuo pipefail git submodule update --init --recursive pushd $(dirname "$0")/docker-stacks/images/docker-stacks-foundation -# docker-stacks-foundations -docker build --build-arg PYTHON_VERSION=3.9 --build-arg BASE_CONTAINER=nvidia/cuda:11.7.1-cudnn8-runtime-ubuntu22.04 -t paskino/jupyter:docker-stacks-foundation-cuda11-cudnn8-runtime-ubuntu22.04 . -# base notebook +docker build --build-arg PYTHON_VERSION=3.9 --build-arg BASE_CONTAINER=nvidia/cuda:11.7.1-cudnn8-runtime-ubuntu22.04 -t synerbi/jupyter:foundation . + cd ../base-notebook -# change the base class with the BASE_CONTAINER argument -# build and tag -docker build --build-arg BASE_CONTAINER=paskino/jupyter:docker-stacks-foundation-cuda11-cudnn8-runtime-ubuntu22.04 -t paskino/jupyter:base-notebook-cuda11-cudnn8-runtime-ubuntu22.04 . +docker build --build-arg BASE_CONTAINER=synerbi/jupyter:foundation -t synerbi/jupyter:base . -# minimal notebook cd ../minimal-notebook -docker build --build-arg BASE_CONTAINER=paskino/jupyter:base-notebook-cuda11-cudnn8-runtime-ubuntu22.04 -t paskino/jupyter:minimal-notebook-cuda11-cudnn8-runtime-ubuntu22.04 . +docker build --build-arg BASE_CONTAINER=synerbi/jupyter:base -t synerbi/jupyter:minimal . -# scipy-notebook cd ../scipy-notebook -docker build --build-arg BASE_CONTAINER=paskino/jupyter:minimal-notebook-cuda11-cudnn8-runtime-ubuntu22.04 -t paskino/jupyter:scipy-notebook-cuda11-cudnn8-runtime-ubuntu22.04 . +docker build --build-arg BASE_CONTAINER=synerbi/jupyter:minimal -t synerbi/jupyter:scipy . -# datascience-notebook cd ../datascience-notebook -docker build --build-arg BASE_CONTAINER=paskino/jupyter:scipy-notebook-cuda11-cudnn8-runtime-ubuntu22.04 -t paskino/jupyter:datascience-notebook-cuda11-cudnn8-runtime-ubuntu22.04 . +docker build --build-arg BASE_CONTAINER=synerbi/jupyter:scipy -t synerbi/jupyter:datascience . popd -docker build --build-arg BASE_IMAGE=paskino/jupyter:datascience-notebook-cuda11-cudnn8-runtime-ubuntu22.04 -t paskino/jupyter:sirf -f jupyterhub/Dockerfile . +docker build --build-arg BASE_CONTAINER=synerbi/jupyter:datascience -t synerbi/jupyter:sirf -f jupyterhub/Dockerfile . From e4fb34a8282052948c937b17cdcf0cd1e9cced1f Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Wed, 29 Nov 2023 11:58:25 +0000 Subject: [PATCH 30/98] update name of variable --- jupyterhub/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/jupyterhub/Dockerfile b/jupyterhub/Dockerfile index 761c4085..d06168d3 100644 --- a/jupyterhub/Dockerfile +++ b/jupyterhub/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 ARG BASE_CONTAINER=nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 -FROM ${BASE_IMAGE} as base +FROM ${BASE_CONTAINER} as base # Make sure the image has the same libraries as the standard SIRF docker image # Add to the docker image the appropriate stuff From fbb69b9507dff080b2aebf62efc154a394bf354d Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Tue, 28 Nov 2023 21:31:49 +0000 Subject: [PATCH 31/98] safer directory switching --- jupyterhub/build_docker_stacks.sh | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/jupyterhub/build_docker_stacks.sh b/jupyterhub/build_docker_stacks.sh index 2e0be393..de278b86 100755 --- a/jupyterhub/build_docker_stacks.sh +++ b/jupyterhub/build_docker_stacks.sh @@ -1,9 +1,10 @@ #!/usr/bin/env bash set -exuo pipefail +pushd "$(dirname "${BASH_SOURCE[0]}")" git submodule update --init --recursive -pushd $(dirname "$0")/docker-stacks/images/docker-stacks-foundation +cd docker-stacks/images/docker-stacks-foundation docker build --build-arg PYTHON_VERSION=3.9 --build-arg BASE_CONTAINER=nvidia/cuda:11.7.1-cudnn8-runtime-ubuntu22.04 -t synerbi/jupyter:foundation . cd ../base-notebook @@ -12,13 +13,13 @@ docker build --build-arg BASE_CONTAINER=synerbi/jupyter:foundation -t synerbi/ju cd ../minimal-notebook docker build --build-arg BASE_CONTAINER=synerbi/jupyter:base -t synerbi/jupyter:minimal . - cd ../scipy-notebook docker build --build-arg BASE_CONTAINER=synerbi/jupyter:minimal -t synerbi/jupyter:scipy . cd ../datascience-notebook docker build --build-arg BASE_CONTAINER=synerbi/jupyter:scipy -t synerbi/jupyter:datascience . -popd - +cd ../../../.. docker build --build-arg BASE_CONTAINER=synerbi/jupyter:datascience -t synerbi/jupyter:sirf -f jupyterhub/Dockerfile . + +popd From f3aebea5637d0a892e436f77a533ecbd2bc136be Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Tue, 28 Nov 2023 22:17:41 +0000 Subject: [PATCH 32/98] update jupyterhub README --- jupyterhub/README.md | 127 +++++++++---------------------------------- 1 file changed, 27 insertions(+), 100 deletions(-) diff --git a/jupyterhub/README.md b/jupyterhub/README.md index 42d75f7b..da4c258f 100644 --- a/jupyterhub/README.md +++ b/jupyterhub/README.md @@ -1,105 +1,32 @@ -## Build the Jupyterhub image +## Build the JupyterHub image -To create the image with SIRF and all the other stuff required by jupyterhub we start from the `datascience-notebook` from https://github.com/jupyter/docker-stacks. -A few mods for use with Ubuntu 18.04 are in the fork https://github.com/paskino/docker-stacks/tree/base_image_ubuntu18.04 which is used as base for creating the new `datascience-notebook`. +The image contains SIRF & all dependencies required by jupyterhub. -However, we require GPU access (for [CIL](https://github.com/TomographicImaging/CIL.git)) so we need one of the NVIDIA docker images https://hub.docker.com/r/nvidia/cuda/tags?page=1&name=cudnn8-devel-ubuntu18.04 +We use an NVIDIA CUDA Ubuntu 22.04 base image (for [CIL](https://github.com/TomographicImaging/CIL) GPU features), build https://github.com/jupyter/docker-stacks `datascience-notebook` on top, and then install SIRF & its depdendencies. The strategy is: - 1. to modify the `datascience-notebook` to have the `nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04` base image, `paskino/jupyter:datascience-notebook-cuda10-cudnn8-devel-ubuntu18.04` - 1. build the `synerbi/sirf:sirf-core` image with the `nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04` base image - 1. build the jupyterhub image from the image at point 1, copy the SIRF `INSTALL` directory from the `synerbi/sirf:sirf-core` (previous step), set the appropriate environmental variable and install CIL via conda - - -### Create the base image for jupyterhub with NVIDIA runtime on Ubuntu 22.04 - -Currently the `base-notebook` in [`jupyter/docker-stacks`](`https://github.com/jupyter/docker-stacks`) builds on top of Ubuntu 22.04. - -To be able to use the `nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04` base image we need to modify the `base-notebook` up to `datascience-notebook`. - -Below a list of commands that will build the `datascience-notebook` with the NVIDIA cuda base image, which I then tag as `paskino/jupyter:datascience-notebook-cuda11-cudnn8-devel-ubuntu22.04` - -#### Clone the docker-stacks repo - -``` -git clone git@github.com:jupyter/docker-stacks.git -``` - -#### Build the images - -``` -pushd ../../docker-stacks/docker-stacks-foundations -# docker-stacks-foundations -docker build --build-arg PYTHON_VERSION=3.9 --build-arg ROOT_CONTAINER=nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 -t paskino/jupyter:docker-stacks-foundations-cuda11-cudnn8-devel-ubuntu22.04 . - -# base notebook -cd ../base-notebook -# change the base class with the BASE_CONTAINER argument -# build and tag -docker build --build-arg BASE_CONTAINER=paskino/jupyter:docker-stacks-foundations-cuda11-cudnn8-devel-ubuntu22.04 -t paskino/jupyter:base-notebook-cuda11-cudnn8-devel-ubuntu22.04 . - -# minimal notebook -cd ../minimal-notebook -docker build --build-arg BASE_CONTAINER=paskino/jupyter:base-notebook-cuda11-cudnn8-devel-ubuntu22.04 -t paskino/jupyter:minimal-notebook-cuda11-cudnn8-devel-ubuntu22.04 . - - -# scipy-notebook -cd ../scipy-notebook -docker build --build-arg BASE_CONTAINER=paskino/jupyter:minimal-notebook-cuda11-cudnn8-devel-ubuntu22.04 -t paskino/jupyter:scipy-notebook-cuda11-cudnn8-devel-ubuntu22.04 . - -# datascience-notebook -cd ../datascience-notebook -docker build --build-arg BASE_CONTAINER=paskino/jupyter:scipy-notebook-cuda11-cudnn8-devel-ubuntu22.04 -t paskino/jupyter:datascience-notebook-cuda11-cudnn8-devel-ubuntu22.04 . - -popd - -``` - -Finally we have the base `datascience-notebook` with the `nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04` base image. - - -### Start building SIRF - -Build the `sirf` target of the SIRF Dockerfile with the `nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04` base image. - -``` -git clone git@github.com:SyneRBI/SIRF-SuperBuild.git -cd SIRF-SuperBuild/ - -# build standard SIRF docker -docker build --build-arg BASE_IMAGE=nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 --build-arg PYTHON_INSTALL_DIR=/opt/conda --target sirf . - -``` - -#### Building CIL - -Please see [here](https://github.com/SyneRBI/SIRF-SuperBuild#building-ccpi-cil) for detailed info on the command below. - - -``` - - docker build --build-arg BASE_IMAGE=nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 --build-arg PYTHON_INSTALL_DIR=/opt/conda --build-arg EXTRA_BUILD_FLAGS="-DBUILD_CIL=ON -DIPP_LIBRARY=/opt/conda/lib -DIPP_INCLUDE=/opt/conda/include" --build-arg SIRF_SB_URL="https://github.com/paskino/SIRF-SuperBuild.git" --build-arg SIRF_SB_TAG="jupyterhub_env" --build-arg NUM_PARALLEL_BUILDS=6 --target sirf -t synerbi/sirf:sirf-core . -``` -# build for PSMRTBP2022 -``` - nohup docker build --build-arg BASE_IMAGE=nvidia/cuda:10.0-cudnn7-devel-ubuntu18.04 --build-arg PYTHON_INSTALL_DIR=/opt/conda --build-arg EXTRA_BUILD_FLAGS="-DBUILD_CIL=ON -DIPP_LIBRARY=/opt/conda/lib -DIPP_INCLUDE=/opt/conda/include -DSTIR_URL=https://github.com/UCL/STIR.git -DSTIR_TAG=master -DBUILD_STIR_EXECUTABLES=ON -DSIRF_URL=https://github.com/SyneRBI/SIRF.git -DSIRF_TAG=lm-recon -Dparallelproj_TAG=v0.8" --build-arg SIRF_SB_URL="https://github.com/paskino/SIRF-SuperBuild.git" --build-arg SIRF_SB_TAG="jupyterhub_env" --build-arg NUM_PARALLEL_BUILDS=6 --target sirf -t synerbi/sirf:psmrtbp2022 . > build_jupyterhub_lm.log & -``` - -### Putting things together - - - -To install SIRF we can literally _copy_ the SIRF INSTALL directory to the `datascience-notebook` image and set the required environment variables. - -``` -cd SIRF-SuperBuild -docker build --build-arg BASE_IMAGE=paskino/jupyter:datascience-notebook-cuda11-cudnn8-devel-ubuntu22.04 -f jupyterhub/Dockerfile -t harbor.stfc.ac.uk/imaging-tomography/test:sirfcil-jupyterhub-gpu . -``` - -### Testing - -The cloud is set to update the image `paskino/sirfcil:service-gpu`, therefore it is sufficient to tag the image produced in the section above as -``` -docker tag 4970647d72ea paskino/sirfcil:service-gpu -``` +1. Use a recent Ubuntu CuDNN runtime image from https://hub.docker.com/r/nvidia/cuda as base +2. Build https://github.com/jupyter/docker-stacks/tree/main/images/datascience-notebook on top +3. Copy & run the SIRF `docker/build_*.sh` scripts +4. Copy the SIRF installation directories from the `synerbi/sirf:latest` image +5. Install CIL (via `conda`) + +All of this is done by [`build_docker_stacks.sh`](./build_docker_stacks.sh). + +### More info + +https://github.com/jupyter/docker-stacks is used to gradually build up images: + +- `BASE_CONTAINER=nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04` +- `docker-stacks-foundation` -> `synerbi/jupyter:foundation` +- `base-notebook` -> `synerbi/jupyter:base` +- `minimal-notebook` -> `synerbi/jupyter:minimal` +- `scipy-notebook` -> `synerbi/jupyter:scipy` +- `datascience-notebook` -> `synerbi/jupyter:datascience` +- [`Dockerfile`](./Dockerfile) -> `synerbi/jupyter:sirf` + + Copy & run the SIRF `build_{essential,gadgetron,system}.sh` scripts from [`../docker`](../docker) + + Copy `/opt/SIRF-SuperBuild/{INSTALL,sources/SIRF}` directories from the `synerbi/sirf:latest` image + + Install docker/requirements.yml, jupyterhub/requirements.yml, jupyterhub/requirements-service.yml + + Clone & setup https://github.com/SyneRBI/SIRF-Exercises + + Set some environment variables (e.g. `PYTHONPATH=/opt/SIRF-SuperBuild/INSTALL/python`, `OMP_NUM_THREADS=$(( cpu_count/2 ))`) From df3536492d179a743944849f89268c41ddf58710 Mon Sep 17 00:00:00 2001 From: Edoardo Pasca Date: Fri, 1 Dec 2023 11:02:29 +0000 Subject: [PATCH 33/98] add SIRF_IMAGE variable --- jupyterhub/Dockerfile | 6 +++--- jupyterhub/build_docker_stacks.sh | 6 +++++- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/jupyterhub/Dockerfile b/jupyterhub/Dockerfile index d06168d3..8ed2c682 100644 --- a/jupyterhub/Dockerfile +++ b/jupyterhub/Dockerfile @@ -11,11 +11,12 @@ RUN bash ./scripts/build_essential-ubuntu.sh &&\ bash ./scripts/build_system-ubuntu.sh +ARG SIRF_IMAGE="synerbi/sirf:latest" # https://docs.docker.com/develop/develop-images/multistage-build/#use-an-external-image-as-a-stage # not documented in https://docs.docker.com/engine/reference/builder/#copy # FROM --from=nginx:latest /etc/nginx/nginx.conf /nginx.conf -COPY --from=synerbi/sirf:latest --link /opt/SIRF-SuperBuild/INSTALL/ /opt/SIRF-SuperBuild/INSTALL -COPY --from=synerbi/sirf:latest --link /opt/SIRF-SuperBuild/sources/SIRF/ /opt/SIRF-SuperBuild/sources/SIRF/ +COPY --from=${SIRF_IMAGE} --link /opt/SIRF-SuperBuild/INSTALL/ /opt/SIRF-SuperBuild/INSTALL +COPY --from=${SIRF_IMAGE} --link /opt/SIRF-SuperBuild/sources/SIRF/ /opt/SIRF-SuperBuild/sources/SIRF/ FROM base AS jup # remove the SIRF-Exercises and CIL-Demos that get installed by user_service-ubuntu.sh @@ -36,7 +37,6 @@ RUN mamba env update -n base -f docker/requirements.yml &&\ mamba env update -n base -f jupyterhub/requirements.yml &&\ git clone https://github.com/SyneRBI/SIRF-Exercises --recursive -b master SIRF-Exercises &&\ python scripts/install-sirf-exercises-dep.py SIRF-Exercises/requirements.txt &&\ - mamba env update --file jupyterhub/requirements-service.yml &&\ mamba env update -n base -f jupyterhub/requirements.yml &&\ rm -rf docker && rm -rf jupyterhub &&\ rm -rf ./scripts diff --git a/jupyterhub/build_docker_stacks.sh b/jupyterhub/build_docker_stacks.sh index de278b86..208b502a 100755 --- a/jupyterhub/build_docker_stacks.sh +++ b/jupyterhub/build_docker_stacks.sh @@ -1,6 +1,10 @@ #!/usr/bin/env bash set -exuo pipefail +# set default sirf image +#: ${SIRF_IMAGE:=synerbi/sirf:latest} +SIRF_IMAGE="${1:-synerbi/sirf:latest}" + pushd "$(dirname "${BASH_SOURCE[0]}")" git submodule update --init --recursive @@ -20,6 +24,6 @@ cd ../datascience-notebook docker build --build-arg BASE_CONTAINER=synerbi/jupyter:scipy -t synerbi/jupyter:datascience . cd ../../../.. -docker build --build-arg BASE_CONTAINER=synerbi/jupyter:datascience -t synerbi/jupyter:sirf -f jupyterhub/Dockerfile . +docker build --build-arg BASE_CONTAINER=synerbi/jupyter:datascience --build-arg SIRF_IMAGE=${SIRF_IMAGE} -t synerbi/jupyter:sirf -f jupyterhub/Dockerfile . popd From 4164ddcba7a6063d04270894e1b2dfd41281679b Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Fri, 1 Dec 2023 12:15:03 +0000 Subject: [PATCH 34/98] make SIRF_IMAGE configurable --- jupyterhub/Dockerfile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/jupyterhub/Dockerfile b/jupyterhub/Dockerfile index 8ed2c682..a0d4169a 100644 --- a/jupyterhub/Dockerfile +++ b/jupyterhub/Dockerfile @@ -1,5 +1,7 @@ # syntax=docker/dockerfile:1 +ARG SIRF_IMAGE="synerbi/sirf:latest" ARG BASE_CONTAINER=nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 +FROM ${SIRF_IMAGE} as sirf_image FROM ${BASE_CONTAINER} as base # Make sure the image has the same libraries as the standard SIRF docker image @@ -10,13 +12,11 @@ RUN bash ./scripts/build_essential-ubuntu.sh &&\ bash ./scripts/build_gadgetron-ubuntu.sh &&\ bash ./scripts/build_system-ubuntu.sh - -ARG SIRF_IMAGE="synerbi/sirf:latest" # https://docs.docker.com/develop/develop-images/multistage-build/#use-an-external-image-as-a-stage # not documented in https://docs.docker.com/engine/reference/builder/#copy # FROM --from=nginx:latest /etc/nginx/nginx.conf /nginx.conf -COPY --from=${SIRF_IMAGE} --link /opt/SIRF-SuperBuild/INSTALL/ /opt/SIRF-SuperBuild/INSTALL -COPY --from=${SIRF_IMAGE} --link /opt/SIRF-SuperBuild/sources/SIRF/ /opt/SIRF-SuperBuild/sources/SIRF/ +COPY --from=sirf_image --link /opt/SIRF-SuperBuild/INSTALL/ /opt/SIRF-SuperBuild/INSTALL +COPY --from=sirf_image --link /opt/SIRF-SuperBuild/sources/SIRF/ /opt/SIRF-SuperBuild/sources/SIRF/ FROM base AS jup # remove the SIRF-Exercises and CIL-Demos that get installed by user_service-ubuntu.sh From d02886e8bbbbc016461055adced5c3c58aee0e87 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Wed, 13 Dec 2023 22:14:57 +0000 Subject: [PATCH 35/98] fix some review comments --- CHANGES.md | 14 +++++++------- SuperBuild/External_Boost_configureboost.cmake | 2 +- docker/.bashrc | 2 +- docker/Dockerfile | 2 +- docker/requirements-service.yml | 2 +- docker/user_sirf-ubuntu.sh | 3 +-- 6 files changed, 12 insertions(+), 13 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 1048e328..06868ba3 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -24,8 +24,8 @@ the CTests will be run while building the image. - remove obsolete copying of gadgetron.xml - Installing requirements for SIRF-Exercises uses its environment.yml or requirements.txt depending on settings. -- Build Gadgetron master. Requires new Ubuntu packages: libdcmtk-dev, libpugixml-dev, libgflags-dev, - libssl-dev, libcurl4-openssl-dev, pkg-config, golang, libboost-coroutine-dev, libboost-context-dev, libboost-random-dev. +- Build Gadgetron master. Requires new Ubuntu packages: libdcmtk-dev, libpugixml-dev, libgflags-dev, + libssl-dev, libcurl4-openssl-dev, pkg-config, golang, libboost-coroutine-dev, libboost-context-dev, libboost-random-dev. - Added SuperBuild project dependencies for Gadgetron: range-v3, RocksDB, Date, mrd-storage-server - updated versions: - Gadgetron: 42f11bf14b77b16f1ca5bcfbfa435d5ee8cb22a6 (master) @@ -37,7 +37,7 @@ - CIL: v23.1.0 - CCPi-Regularisation: v22.0.0 - TomoPhantom: v2.0.0 - + ## v3.4.0 - Removed CIL-ASTRA as it has been merged into CIL code base. - docker images updates @@ -55,7 +55,7 @@ - use the VMSVGA graphics controller - use environment variables in Vagrantfile for easier building - jupyterhub updates: - - Ubuntu: 22.04 and nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 + - Ubuntu: 22.04 and nvidia/cuda:11.7.1-cudnn8-runtime-ubuntu22.04 - added requirements.yml for jupyterhub - updated versions: - SIRF: v3.4.0 @@ -67,7 +67,7 @@ - Boost: 1.78.0 ## v3.3.1 -- VM: +- VM: - "update_VM.sh -s" (i.e. "UPDATE.sh -s") no longer runs configure_gnome.sh. If you have a very old VM, run it manually instead. - Updates to run using docker scripts - installs custom pip and all python prerequisites with pip @@ -79,7 +79,7 @@ - no longer force numpy<=1.20 - CMake: - FindCython allows hints - + ## v3.3.0 - known problems: - VM and jupyterhub scripts need merging various fixes @@ -93,7 +93,7 @@ - docker: - fix problems with CUDA repo keys - minor fixes to scripts for use elsewhere (including preparation for more recent Ubuntu) -- VM: +- VM: - set BUILD_CIL=ON - add CITATION.cff (and remove .zenodo.json) - added numba as dependency in docker files diff --git a/SuperBuild/External_Boost_configureboost.cmake b/SuperBuild/External_Boost_configureboost.cmake index 2436be8e..2544d5d1 100644 --- a/SuperBuild/External_Boost_configureboost.cmake +++ b/SuperBuild/External_Boost_configureboost.cmake @@ -24,7 +24,7 @@ if(WIN32) else() message(STATUS "Build dir is : ${BUILD_DIR}") execute_process(COMMAND ./bootstrap.sh --prefix=${BOOST_INSTALL_DIR} - --with-libraries=system,filesystem,thread,program_options,chrono,date_time,atomic,timer,regex,test,coroutine,context,random,python + --with-libraries=system,filesystem,thread,program_options,chrono,date_time,atomic,timer,regex,test,coroutine,context,random #--with-libraries=system,thread,program_options,log,math... #--without-libraries=atomic... diff --git a/docker/.bashrc b/docker/.bashrc index de6785c7..679ea639 100644 --- a/docker/.bashrc +++ b/docker/.bashrc @@ -26,7 +26,7 @@ export PS1='sirf$ ' # Need to add this as we have built using some of these shared libraries # See https://github.com/SyneRBI/SIRF-SuperBuild/issues/573 -# export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:PYTHON_INSTALL_DIR/lib +[ -f PYTHON_INSTALL_DIR/bin/activate ] && export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:PYTHON_INSTALL_DIR/lib # .local/bin (used by pip for instance) export PATH="${PATH}":~/.local/bin diff --git a/docker/Dockerfile b/docker/Dockerfile index 77762d30..ae6d42fb 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -134,7 +134,7 @@ ARG NUM_PARALLEL_BUILDS="2" ARG BUILD_FLAGS="\ -DCMAKE_BUILD_TYPE=Release\ -DSTIR_ENABLE_OPENMP=ON -DUSE_SYSTEM_ACE=ON\ - -DUSE_SYSTEM_Armadillo=ON -DUSE_SYSTEM_Boost=OFF\ + -DUSE_SYSTEM_Armadillo=ON -DUSE_SYSTEM_Boost=ON\ -DUSE_SYSTEM_FFTW3=ON -DUSE_SYSTEM_HDF5=OFF -DUSE_ITK=ON\ -DGadgetron_USE_CUDA=OFF\ -DUSE_SYSTEM_SWIG=ON\ diff --git a/docker/requirements-service.yml b/docker/requirements-service.yml index 866ba6e6..204297ad 100644 --- a/docker/requirements-service.yml +++ b/docker/requirements-service.yml @@ -7,6 +7,6 @@ channels: dependencies: - jupyterlab - jupyter - - ipywidgets<8 + - ipywidgets<8 # vis. https://github.com/TomographicImaging/CIL/pull/1599 - widgetsnbextension - nodejs diff --git a/docker/user_sirf-ubuntu.sh b/docker/user_sirf-ubuntu.sh index ab76a395..c3fd05d9 100755 --- a/docker/user_sirf-ubuntu.sh +++ b/docker/user_sirf-ubuntu.sh @@ -4,7 +4,7 @@ # outside of the "docker build" setting [ -f .bashrc ] && . .bashrc -set -v +set -ev # set default installation location INSTALL_DIR="${1:-/opt}" # set default URL/tag @@ -53,4 +53,3 @@ if [ "$REMOVE_BUILD_FILES" = 1 ]; then else echo "Keeping build files" fi - From 5fbdb2dda28727b8ab621b26a9f87f9005eb5562 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Tue, 2 Jan 2024 22:26:08 +0000 Subject: [PATCH 36/98] migrate build to docker-stacks --- jupyterhub/Dockerfile | 138 +++++++++++++++++++++++++++--------------- 1 file changed, 88 insertions(+), 50 deletions(-) diff --git a/jupyterhub/Dockerfile b/jupyterhub/Dockerfile index a0d4169a..4fb6be30 100644 --- a/jupyterhub/Dockerfile +++ b/jupyterhub/Dockerfile @@ -1,64 +1,102 @@ # syntax=docker/dockerfile:1 -ARG SIRF_IMAGE="synerbi/sirf:latest" ARG BASE_CONTAINER=nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 -FROM ${SIRF_IMAGE} as sirf_image FROM ${BASE_CONTAINER} as base -# Make sure the image has the same libraries as the standard SIRF docker image -# Add to the docker image the appropriate stuff USER root -COPY --chown=jovyan:users docker/build_essential-ubuntu.sh docker/build_gadgetron-ubuntu.sh docker/build_system-ubuntu.sh docker/install-sirf-exercises-dep.py ./scripts/ -RUN bash ./scripts/build_essential-ubuntu.sh &&\ - bash ./scripts/build_gadgetron-ubuntu.sh &&\ - bash ./scripts/build_system-ubuntu.sh - -# https://docs.docker.com/develop/develop-images/multistage-build/#use-an-external-image-as-a-stage -# not documented in https://docs.docker.com/engine/reference/builder/#copy -# FROM --from=nginx:latest /etc/nginx/nginx.conf /nginx.conf -COPY --from=sirf_image --link /opt/SIRF-SuperBuild/INSTALL/ /opt/SIRF-SuperBuild/INSTALL -COPY --from=sirf_image --link /opt/SIRF-SuperBuild/sources/SIRF/ /opt/SIRF-SuperBuild/sources/SIRF/ - -FROM base AS jup -# remove the SIRF-Exercises and CIL-Demos that get installed by user_service-ubuntu.sh -USER jovyan -ARG NB_USER="jovyan" -ARG NB_GROUP="users" - - -COPY --chown=jovyan:users docker/requirements.yml docker/ -COPY --chown=jovyan:users jupyterhub/requirements.yml docker/requirements-service.yml jupyterhub/ - - -# RUN mamba env update -n base -f docker/requirements.yml -# RUN mamba env update -n base -f jupyterhub/requirements.yml - -# RUN bash ./scripts/user_service-ubuntu.sh &&\ -RUN mamba env update -n base -f docker/requirements.yml &&\ - mamba env update -n base -f jupyterhub/requirements.yml &&\ - git clone https://github.com/SyneRBI/SIRF-Exercises --recursive -b master SIRF-Exercises &&\ - python scripts/install-sirf-exercises-dep.py SIRF-Exercises/requirements.txt &&\ - mamba env update -n base -f jupyterhub/requirements.yml &&\ - rm -rf docker && rm -rf jupyterhub &&\ - rm -rf ./scripts - -# Switch back to jovyan to avoid accidental container runs as root -# From https://github.com/paskino/SIRF-SuperBuild/blob/301c2274621e4729cadbd2a1705d8c4d9e3b7e50/docker/Dockerfile#L212-L219 + +# suppress warnings +ENV DEBIAN_FRONTEND noninteractive +COPY docker/raw-ubuntu.sh /opt/scripts/ +RUN bash /opt/scripts/raw-ubuntu.sh +#ENV LC_ALL en_GB.UTF-8 +ENV LANG en_GB.UTF-8 +ENV LANGUAGE en_GB:en + +FROM base as build + +COPY docker/update_nvidia_keys.sh /opt/scripts/ +RUN bash /opt/scripts/update_nvidia_keys.sh + +COPY docker/build_essential-ubuntu.sh /opt/scripts/ +RUN bash /opt/scripts/build_essential-ubuntu.sh + +COPY docker/build_gadgetron-ubuntu.sh /opt/scripts/ +RUN bash /opt/scripts/build_gadgetron-ubuntu.sh + +COPY docker/build_system-ubuntu.sh /opt/scripts/ +RUN bash /opt/scripts/build_system-ubuntu.sh +ENV PATH="/opt/cmake/bin:${PATH}" + +# ccache +COPY --link docker/devel/.ccache/ /opt/ccache/ +RUN ccache -o cache_dir=/opt/ccache +ENV PATH="/usr/lib/ccache:${PATH}" + +# SIRF-SuperBuild config +ARG SIRF_SB_URL="https://github.com/SyneRBI/SIRF-SuperBuild" +ARG SIRF_SB_TAG="master" +ARG REMOVE_BUILD_FILES=1 +ARG RUN_CTEST=1 +ARG NUM_PARALLEL_BUILDS=" " +ARG BUILD_FLAGS="\ + -DCMAKE_BUILD_TYPE=Release\ + -DSTIR_ENABLE_OPENMP=ON -DUSE_SYSTEM_ACE=ON\ + -DUSE_SYSTEM_Armadillo=ON -DUSE_SYSTEM_Boost=ON\ + -DUSE_SYSTEM_FFTW3=ON -DUSE_SYSTEM_HDF5=OFF -DUSE_ITK=ON\ + -DUSE_SYSTEM_SWIG=ON\ + -DUSE_NiftyPET=OFF\ + -DBUILD_siemens_to_ismrmrd=ON -DBUILD_pet_rd_tools=ON" +ARG EXTRA_BUILD_FLAGS="-DGadgetron_USE_CUDA=OFF -DBUILD_CIL=OFF" + +# build, install in /opt/SIRF-SuperBuild/{INSTALL,sources/SIRF}, test (if RUN_CTEST) +COPY docker/user_sirf-ubuntu.sh /opt/scripts/ +RUN bash /opt/scripts/user_sirf-ubuntu.sh \ + && fix-permissions /opt/SIRF-SuperBuild \ + && fix-permissions /opt/ccache + +FROM base as sirf + +# X11 forwarding +RUN apt update -qq && apt install -yq --no-install-recommends \ + libx11-xcb1 \ + && apt clean +RUN mkdir -p /usr/share/X11/xkb +RUN [ -e /usr/bin/X ] || ln -s /usr/bin/Xorg /usr/bin/X + +RUN echo "export OMP_NUM_THREADS=\$(python -c 'import multiprocessing as mc; print(mc.cpu_count() // 2)')" > /usr/local/bin/before-notebook.d/omp_num_threads.sh + +COPY --chown=${NB_USER} --chmod=644 --link docker/.bashrc /home/${NB_USER}/ +# RUN sed -i s:PYTHON_INSTALL_DIR:${CONDA_DIR}:g /home/${NB_USER}/.bashrc + +# install /opt/{SIRF-Exercises,CIL-Demos} +COPY docker/user_service-ubuntu.sh /opt/scripts/ +RUN bash /opt/scripts/user_service-ubuntu.sh \ + && fix-permissions /opt/SIRF-Exercises \ + && fix-permissions /opt/CIL-Demos + +USER ${NB_UID} +COPY --chown=${NB_USER} docker/requirements.yml /opt/scripts/docker-requirements.yaml +RUN mamba env update -n base -f /opt/scripts/docker-requirements.yaml + +COPY --chown=${NB_USER} jupyterhub/requirements.yml /opt/scripts/jupyterhub-requirements.yaml +RUN mamba env update -n base -f /opt/scripts/jupyterhub-requirements.yaml + +# install from build +COPY --from=build --link --chown=${NB_USER} /opt/SIRF-SuperBuild/INSTALL/ /opt/SIRF-SuperBuild/INSTALL/ +COPY --from=build --link --chown=${NB_USER} /opt/SIRF-SuperBuild/sources/SIRF/ /opt/SIRF-SuperBuild/sources/SIRF/ +# TODO: COPY --from=build /usr/local/{bin,lib} ? + # Set environment variables for SIRF -USER jovyan -COPY docker/.bashrc /home/jovyan ENV PATH "/opt/conda/bin:/opt/SIRF-SuperBuild/INSTALL/bin:$PATH" ENV LD_LIBRARY_PATH "/opt/SIRF-SuperBuild/INSTALL/lib:/opt/SIRF-SuperBuild/INSTALL/lib64:$LD_LIBRARY_PATH" +#/usr/local/nvidia/lib:/usr/local/nvidia/lib64:/opt/conda/lib ENV PYTHONPATH "/opt/SIRF-SuperBuild/INSTALL/python" ENV SIRF_INSTALL_PATH "/opt/SIRF-SuperBuild/INSTALL" -ENV SIRF_EXERCISES_DATA_PATH "/mnt/materials/SIRF/Fully3D/SIRF/" ENV SIRF_PATH "/opt/SIRF-SuperBuild/sources/SIRF" -#Suppress output from Gadgetron which gives some problems on notebooks (QUIERO) +#ENV SIRF_EXERCISES_DATA_PATH "/mnt/materials/SIRF/Fully3D/SIRF/" +# Suppress output from Gadgetron which gives some problems on notebooks (QUIERO) ENV GADGETRON_LOG_MASK "" -RUN echo $PATH -USER root -RUN echo "export OMP_NUM_THREADS=\$(python -c 'import multiprocessing as mc; print(mc.cpu_count() // 2)')" > /usr/local/bin/before-notebook.d/omp_num_threads.sh -# switch back to -USER jovyan +ENV DEBIAN_FRONTEND '' -#/opt/SIRF-SuperBuild/INSTALL/lib:/opt/SIRF-SuperBuild/INSTALL/lib64:/usr/local/nvidia/lib:/usr/local/nvidia/lib64::/opt/conda/lib +# TODO: CMD ["jupyterhub/service.sh"] From e416e34c831459b0586271841f58b1e4e7ba2af5 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Wed, 3 Jan 2024 15:50:04 +0000 Subject: [PATCH 37/98] build fixes & ccaching --- jupyterhub/Dockerfile | 21 ++++++++++----------- jupyterhub/build_docker_stacks.sh | 9 ++++++++- 2 files changed, 18 insertions(+), 12 deletions(-) diff --git a/jupyterhub/Dockerfile b/jupyterhub/Dockerfile index 4fb6be30..9d65fc75 100644 --- a/jupyterhub/Dockerfile +++ b/jupyterhub/Dockerfile @@ -46,13 +46,12 @@ ARG BUILD_FLAGS="\ -DUSE_SYSTEM_SWIG=ON\ -DUSE_NiftyPET=OFF\ -DBUILD_siemens_to_ismrmrd=ON -DBUILD_pet_rd_tools=ON" -ARG EXTRA_BUILD_FLAGS="-DGadgetron_USE_CUDA=OFF -DBUILD_CIL=OFF" +ARG EXTRA_BUILD_FLAGS="-DGadgetron_USE_CUDA=ON -DBUILD_CIL=OFF" # build, install in /opt/SIRF-SuperBuild/{INSTALL,sources/SIRF}, test (if RUN_CTEST) COPY docker/user_sirf-ubuntu.sh /opt/scripts/ RUN bash /opt/scripts/user_sirf-ubuntu.sh \ - && fix-permissions /opt/SIRF-SuperBuild \ - && fix-permissions /opt/ccache + && fix-permissions /opt/SIRF-SuperBuild /opt/ccache FROM base as sirf @@ -71,15 +70,14 @@ COPY --chown=${NB_USER} --chmod=644 --link docker/.bashrc /home/${NB_USER}/ # install /opt/{SIRF-Exercises,CIL-Demos} COPY docker/user_service-ubuntu.sh /opt/scripts/ RUN bash /opt/scripts/user_service-ubuntu.sh \ - && fix-permissions /opt/SIRF-Exercises \ - && fix-permissions /opt/CIL-Demos + && fix-permissions /opt/SIRF-Exercises /opt/CIL-Demos "${CONDA_DIR}" /home/${NB_USER} -USER ${NB_UID} -COPY --chown=${NB_USER} docker/requirements.yml /opt/scripts/docker-requirements.yaml -RUN mamba env update -n base -f /opt/scripts/docker-requirements.yaml - -COPY --chown=${NB_USER} jupyterhub/requirements.yml /opt/scripts/jupyterhub-requirements.yaml -RUN mamba env update -n base -f /opt/scripts/jupyterhub-requirements.yaml +COPY docker/requirements.yml /opt/scripts/docker-requirements.yaml +RUN mamba env update -n base -f /opt/scripts/docker-requirements.yaml \ + && fix-permissions "${CONDA_DIR}" /home/${NB_USER} +COPY jupyterhub/requirements.yml /opt/scripts/jupyterhub-requirements.yaml +RUN mamba env update -n base -f /opt/scripts/jupyterhub-requirements.yaml \ + && fix-permissions "${CONDA_DIR}" /home/${NB_USER} # install from build COPY --from=build --link --chown=${NB_USER} /opt/SIRF-SuperBuild/INSTALL/ /opt/SIRF-SuperBuild/INSTALL/ @@ -97,6 +95,7 @@ ENV SIRF_PATH "/opt/SIRF-SuperBuild/sources/SIRF" # Suppress output from Gadgetron which gives some problems on notebooks (QUIERO) ENV GADGETRON_LOG_MASK "" +USER ${NB_UID} ENV DEBIAN_FRONTEND '' # TODO: CMD ["jupyterhub/service.sh"] diff --git a/jupyterhub/build_docker_stacks.sh b/jupyterhub/build_docker_stacks.sh index 208b502a..47e9c143 100755 --- a/jupyterhub/build_docker_stacks.sh +++ b/jupyterhub/build_docker_stacks.sh @@ -24,6 +24,13 @@ cd ../datascience-notebook docker build --build-arg BASE_CONTAINER=synerbi/jupyter:scipy -t synerbi/jupyter:datascience . cd ../../../.. -docker build --build-arg BASE_CONTAINER=synerbi/jupyter:datascience --build-arg SIRF_IMAGE=${SIRF_IMAGE} -t synerbi/jupyter:sirf -f jupyterhub/Dockerfile . +SIRF_BUILD_ARGS=( + --build-arg BASE_CONTAINER=synerbi/jupyter:datascience -f jupyterhub/Dockerfile . + --build-arg EXTRA_BUILD_FLAGS="-DGadgetron_USE_CUDA=ON -DBUILD_CIL=OFF -DUSE_SYSTEM_Boost=OFF") +# ccache build +docker build -t synerbi/sirf:jupyter-build --target build "${SIRF_BUILD_ARGS[@]}" +docker run --rm -it -v ./docker/devel/.ccache:/opt/ccache synerbi/sirf:jupyter-build echo copied ccache +# full build +docker build -t synerbi/sirf:jupyter "${SIRF_BUILD_ARGS[@]}" popd From 654b178df3e10fad7f1dc760c406947d5269341e Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Thu, 4 Jan 2024 09:41:24 +0000 Subject: [PATCH 38/98] configurable cmake options --- jupyterhub/Dockerfile | 43 ++++++++++++++++++++++++------- jupyterhub/build_docker_stacks.sh | 5 +++- 2 files changed, 37 insertions(+), 11 deletions(-) diff --git a/jupyterhub/Dockerfile b/jupyterhub/Dockerfile index 9d65fc75..1c872ccc 100644 --- a/jupyterhub/Dockerfile +++ b/jupyterhub/Dockerfile @@ -38,19 +38,42 @@ ARG SIRF_SB_TAG="master" ARG REMOVE_BUILD_FILES=1 ARG RUN_CTEST=1 ARG NUM_PARALLEL_BUILDS=" " -ARG BUILD_FLAGS="\ - -DCMAKE_BUILD_TYPE=Release\ - -DSTIR_ENABLE_OPENMP=ON -DUSE_SYSTEM_ACE=ON\ - -DUSE_SYSTEM_Armadillo=ON -DUSE_SYSTEM_Boost=ON\ - -DUSE_SYSTEM_FFTW3=ON -DUSE_SYSTEM_HDF5=OFF -DUSE_ITK=ON\ - -DUSE_SYSTEM_SWIG=ON\ - -DUSE_NiftyPET=OFF\ - -DBUILD_siemens_to_ismrmrd=ON -DBUILD_pet_rd_tools=ON" -ARG EXTRA_BUILD_FLAGS="-DGadgetron_USE_CUDA=ON -DBUILD_CIL=OFF" +# CMake options +ARG CMAKE_BUILD_TYPE="Release" +ARG STIR_ENABLE_OPENMP="ON" +ARG USE_SYSTEM_ACE="ON" +ARG USE_SYSTEM_Armadillo="ON" +ARG USE_SYSTEM_Boost="ON" +ARG USE_SYSTEM_FFTW3="ON" +ARG USE_SYSTEM_HDF5="OFF" +ARG USE_ITK="ON" +ARG USE_SYSTEM_SWIG="ON" +ARG USE_NiftyPET="OFF" +ARG BUILD_siemens_to_ismrmrd="ON" +ARG BUILD_pet_rd_tools="ON" +ARG Gadgetron_USE_CUDA="ON" +ARG BUILD_CIL="OFF" +ARG EXTRA_BUILD_FLAGS="" # build, install in /opt/SIRF-SuperBuild/{INSTALL,sources/SIRF}, test (if RUN_CTEST) COPY docker/user_sirf-ubuntu.sh /opt/scripts/ -RUN bash /opt/scripts/user_sirf-ubuntu.sh \ +RUN BUILD_FLAGS="\ + -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}\ + -DSTIR_ENABLE_OPENMP=${STIR_ENABLE_OPENMP}\ + -DUSE_SYSTEM_ACE=${USE_SYSTEM_ACE}\ + -DUSE_SYSTEM_Armadillo=${USE_SYSTEM_Armadillo}\ + -DUSE_SYSTEM_Boost=${USE_SYSTEM_Boost}\ + -DUSE_SYSTEM_FFTW3=${USE_SYSTEM_FFTW3}\ + -DUSE_SYSTEM_HDF5=${USE_SYSTEM_HDF5}\ + -DUSE_ITK=${USE_ITK}\ + -DUSE_SYSTEM_SWIG=${USE_SYSTEM_SWIG}\ + -DUSE_NiftyPET=${USE_NiftyPET}\ + -DBUILD_siemens_to_ismrmrd=${BUILD_siemens_to_ismrmrd}\ + -DBUILD_pet_rd_tools=${BUILD_pet_rd_tools}\ + -DGadgetron_USE_CUDA=${Gadgetron_USE_CUDA}\ + -DBUILD_CIL=${BUILD_CIL}" \ + EXTRA_BUILD_FLAGS="${EXTRA_BUILD_FLAGS}" \ + bash /opt/scripts/user_sirf-ubuntu.sh \ && fix-permissions /opt/SIRF-SuperBuild /opt/ccache FROM base as sirf diff --git a/jupyterhub/build_docker_stacks.sh b/jupyterhub/build_docker_stacks.sh index 47e9c143..33c0d6f0 100755 --- a/jupyterhub/build_docker_stacks.sh +++ b/jupyterhub/build_docker_stacks.sh @@ -26,7 +26,10 @@ docker build --build-arg BASE_CONTAINER=synerbi/jupyter:scipy -t synerbi/jupyter cd ../../../.. SIRF_BUILD_ARGS=( --build-arg BASE_CONTAINER=synerbi/jupyter:datascience -f jupyterhub/Dockerfile . - --build-arg EXTRA_BUILD_FLAGS="-DGadgetron_USE_CUDA=ON -DBUILD_CIL=OFF -DUSE_SYSTEM_Boost=OFF") + --build-arg Gadgetron_USE_CUDA=ON + --build-arg BUILD_CIL=OFF + --build-arg USE_SYSTEM_Boost=OFF +) # ccache build docker build -t synerbi/sirf:jupyter-build --target build "${SIRF_BUILD_ARGS[@]}" docker run --rm -it -v ./docker/devel/.ccache:/opt/ccache synerbi/sirf:jupyter-build echo copied ccache From a4491b734b19e8ba39f701f9ddf338fa114aa48e Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Thu, 4 Jan 2024 17:08:18 +0000 Subject: [PATCH 39/98] fix build & test --- jupyterhub/Dockerfile | 27 ++++++++++++++++----------- jupyterhub/build_docker_stacks.sh | 12 +++++------- jupyterhub/docker-stacks | 2 +- 3 files changed, 22 insertions(+), 19 deletions(-) diff --git a/jupyterhub/Dockerfile b/jupyterhub/Dockerfile index 1c872ccc..e0882527 100644 --- a/jupyterhub/Dockerfile +++ b/jupyterhub/Dockerfile @@ -1,5 +1,5 @@ # syntax=docker/dockerfile:1 -ARG BASE_CONTAINER=nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 +ARG BASE_CONTAINER=quay.io/jupyter/scipy-notebook:latest FROM ${BASE_CONTAINER} as base USER root @@ -23,10 +23,17 @@ RUN bash /opt/scripts/build_essential-ubuntu.sh COPY docker/build_gadgetron-ubuntu.sh /opt/scripts/ RUN bash /opt/scripts/build_gadgetron-ubuntu.sh +# SIRF external deps COPY docker/build_system-ubuntu.sh /opt/scripts/ RUN bash /opt/scripts/build_system-ubuntu.sh ENV PATH="/opt/cmake/bin:${PATH}" +# SIRF python deps +COPY docker/requirements.yml /opt/scripts/docker-requirements.yaml +# https://jupyter-docker-stacks.readthedocs.io/en/latest/using/common.html#conda-environments +RUN mamba env update -n base -f /opt/scripts/docker-requirements.yaml \ + && mamba clean --all -f -y && fix-permissions "${CONDA_DIR}" /home/${NB_USER} + # ccache COPY --link docker/devel/.ccache/ /opt/ccache/ RUN ccache -o cache_dir=/opt/ccache @@ -35,7 +42,7 @@ ENV PATH="/usr/lib/ccache:${PATH}" # SIRF-SuperBuild config ARG SIRF_SB_URL="https://github.com/SyneRBI/SIRF-SuperBuild" ARG SIRF_SB_TAG="master" -ARG REMOVE_BUILD_FILES=1 +ARG REMOVE_BUILD_FILES=0 ARG RUN_CTEST=1 ARG NUM_PARALLEL_BUILDS=" " # CMake options @@ -95,17 +102,13 @@ COPY docker/user_service-ubuntu.sh /opt/scripts/ RUN bash /opt/scripts/user_service-ubuntu.sh \ && fix-permissions /opt/SIRF-Exercises /opt/CIL-Demos "${CONDA_DIR}" /home/${NB_USER} -COPY docker/requirements.yml /opt/scripts/docker-requirements.yaml -RUN mamba env update -n base -f /opt/scripts/docker-requirements.yaml \ - && fix-permissions "${CONDA_DIR}" /home/${NB_USER} -COPY jupyterhub/requirements.yml /opt/scripts/jupyterhub-requirements.yaml -RUN mamba env update -n base -f /opt/scripts/jupyterhub-requirements.yaml \ - && fix-permissions "${CONDA_DIR}" /home/${NB_USER} - # install from build COPY --from=build --link --chown=${NB_USER} /opt/SIRF-SuperBuild/INSTALL/ /opt/SIRF-SuperBuild/INSTALL/ -COPY --from=build --link --chown=${NB_USER} /opt/SIRF-SuperBuild/sources/SIRF/ /opt/SIRF-SuperBuild/sources/SIRF/ -# TODO: COPY --from=build /usr/local/{bin,lib} ? +#COPY --from=build --link --chown=${NB_USER} /opt/SIRF-SuperBuild/sources/SIRF/ /opt/SIRF-SuperBuild/sources/SIRF/ +#COPY --from=build --link /opt/conda/ /opt/conda/ +COPY docker/requirements.yml /opt/scripts/docker-requirements.yaml +RUN mamba env update -n base -f /opt/scripts/docker-requirements.yaml \ + && mamba clean --all -f -y && fix-permissions "${CONDA_DIR}" /home/${NB_USER} # Set environment variables for SIRF ENV PATH "/opt/conda/bin:/opt/SIRF-SuperBuild/INSTALL/bin:$PATH" @@ -120,5 +123,7 @@ ENV GADGETRON_LOG_MASK "" USER ${NB_UID} ENV DEBIAN_FRONTEND '' +ENV DOCKER_STACKS_JUPYTER_CMD="notebook" +ENV RESTARTABLE="yes" # TODO: CMD ["jupyterhub/service.sh"] diff --git a/jupyterhub/build_docker_stacks.sh b/jupyterhub/build_docker_stacks.sh index 33c0d6f0..9e90067e 100755 --- a/jupyterhub/build_docker_stacks.sh +++ b/jupyterhub/build_docker_stacks.sh @@ -1,15 +1,11 @@ #!/usr/bin/env bash set -exuo pipefail -# set default sirf image -#: ${SIRF_IMAGE:=synerbi/sirf:latest} -SIRF_IMAGE="${1:-synerbi/sirf:latest}" - pushd "$(dirname "${BASH_SOURCE[0]}")" git submodule update --init --recursive cd docker-stacks/images/docker-stacks-foundation -docker build --build-arg PYTHON_VERSION=3.9 --build-arg BASE_CONTAINER=nvidia/cuda:11.7.1-cudnn8-runtime-ubuntu22.04 -t synerbi/jupyter:foundation . +docker build --build-arg PYTHON_VERSION=3.9 --build-arg ROOT_CONTAINER=nvidia/cuda:11.7.1-cudnn8-runtime-ubuntu22.04 -t synerbi/jupyter:foundation . cd ../base-notebook docker build --build-arg BASE_CONTAINER=synerbi/jupyter:foundation -t synerbi/jupyter:base . @@ -28,11 +24,13 @@ SIRF_BUILD_ARGS=( --build-arg BASE_CONTAINER=synerbi/jupyter:datascience -f jupyterhub/Dockerfile . --build-arg Gadgetron_USE_CUDA=ON --build-arg BUILD_CIL=OFF - --build-arg USE_SYSTEM_Boost=OFF + --build-arg USE_SYSTEM_Boost=ON + --build-arg EXTRA_BUILD_FLAGS="-DGadgetron_TAG=6202fb7352a14fb82817b57a97d928c988eb0f4b -DISMRMRD_TAG=v1.13.7 -Dsiemens_to_ismrmrd_TAG=v1.2.11" ) # ccache build docker build -t synerbi/sirf:jupyter-build --target build "${SIRF_BUILD_ARGS[@]}" -docker run --rm -it -v ./docker/devel/.ccache:/opt/ccache synerbi/sirf:jupyter-build echo copied ccache +sudo rm -r ./docker/devel/.ccache/* +docker run --rm -it -v ./docker/devel/.ccache:/mnt/local synerbi/sirf:jupyter-build bash -c 'cp -r /opt/ccache/* /mnt/local/' # full build docker build -t synerbi/sirf:jupyter "${SIRF_BUILD_ARGS[@]}" diff --git a/jupyterhub/docker-stacks b/jupyterhub/docker-stacks index d91bb62b..fcb20a91 160000 --- a/jupyterhub/docker-stacks +++ b/jupyterhub/docker-stacks @@ -1 +1 @@ -Subproject commit d91bb62be0cb525529e6028d9dfcb53d20775fe5 +Subproject commit fcb20a914ed20e44a96053caf43eef6e12fb4c04 From 5990ce23744a0f39b025aceea578d0a61a50b795 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Thu, 4 Jan 2024 17:09:56 +0000 Subject: [PATCH 40/98] split CPU & GPU images --- jupyterhub/build_docker_stacks.sh | 48 +++++++++++++++++++------------ 1 file changed, 29 insertions(+), 19 deletions(-) diff --git a/jupyterhub/build_docker_stacks.sh b/jupyterhub/build_docker_stacks.sh index 9e90067e..ed1ab9bc 100755 --- a/jupyterhub/build_docker_stacks.sh +++ b/jupyterhub/build_docker_stacks.sh @@ -5,33 +5,43 @@ pushd "$(dirname "${BASH_SOURCE[0]}")" git submodule update --init --recursive cd docker-stacks/images/docker-stacks-foundation -docker build --build-arg PYTHON_VERSION=3.9 --build-arg ROOT_CONTAINER=nvidia/cuda:11.7.1-cudnn8-runtime-ubuntu22.04 -t synerbi/jupyter:foundation . +docker build --build-arg PYTHON_VERSION=3.9 --build-arg ROOT_CONTAINER=ubuntu:22.04 -t synerbi/jupyter:foundation-cpu . +docker build --build-arg PYTHON_VERSION=3.9 --build-arg ROOT_CONTAINER=nvidia/cuda:11.7.1-cudnn8-runtime-ubuntu22.04 -t synerbi/jupyter:foundation-gpu . -cd ../base-notebook -docker build --build-arg BASE_CONTAINER=synerbi/jupyter:foundation -t synerbi/jupyter:base . +for ver in cpu gpu; do + cd ../base-notebook + docker build --build-arg BASE_CONTAINER=synerbi/jupyter:foundation-${ver} -t synerbi/jupyter:base-${ver} . -cd ../minimal-notebook -docker build --build-arg BASE_CONTAINER=synerbi/jupyter:base -t synerbi/jupyter:minimal . + cd ../minimal-notebook + docker build --build-arg BASE_CONTAINER=synerbi/jupyter:base-${ver} -t synerbi/jupyter:minimal-${ver} . -cd ../scipy-notebook -docker build --build-arg BASE_CONTAINER=synerbi/jupyter:minimal -t synerbi/jupyter:scipy . - -cd ../datascience-notebook -docker build --build-arg BASE_CONTAINER=synerbi/jupyter:scipy -t synerbi/jupyter:datascience . + cd ../scipy-notebook + docker build --build-arg BASE_CONTAINER=synerbi/jupyter:minimal-${ver} -t synerbi/jupyter:scipy-${ver} . +done cd ../../../.. SIRF_BUILD_ARGS=( - --build-arg BASE_CONTAINER=synerbi/jupyter:datascience -f jupyterhub/Dockerfile . - --build-arg Gadgetron_USE_CUDA=ON - --build-arg BUILD_CIL=OFF - --build-arg USE_SYSTEM_Boost=ON + build -f jupyterhub/Dockerfile . --build-arg EXTRA_BUILD_FLAGS="-DGadgetron_TAG=6202fb7352a14fb82817b57a97d928c988eb0f4b -DISMRMRD_TAG=v1.13.7 -Dsiemens_to_ismrmrd_TAG=v1.2.11" ) -# ccache build -docker build -t synerbi/sirf:jupyter-build --target build "${SIRF_BUILD_ARGS[@]}" +SIRF_CPU_BUILD_ARGS=( + --build-arg BASE_CONTAINER=synerbi/jupyter:scipy-cpu + --build-arg Gadgetron_USE_CUDA=OFF +) +SIRF_GPU_BUILD_ARGS=( + --build-arg BASE_CONTAINER=synerbi/jupyter:scipy-gpu + --build-arg Gadgetron_USE_CUDA=ON +) +# build +docker "${SIRF_BUILD_ARGS[@]}" "${SIRF_CPU_BUILD_ARGS[@]}" --target build -t synerbi/jupyter:sirf-build-cpu +docker "${SIRF_BUILD_ARGS[@]}" "${SIRF_GPU_BUILD_ARGS[@]}" --target build -t synerbi/jupyter:sirf-build-gpu +# install +docker "${SIRF_BUILD_ARGS[@]}" "${SIRF_CPU_BUILD_ARGS[@]}" -t synerbi/sirf:jupyter +docker "${SIRF_BUILD_ARGS[@]}" "${SIRF_GPU_BUILD_ARGS[@]}" -t synerbi/sirf:jupyter-gpu +# ccache sudo rm -r ./docker/devel/.ccache/* -docker run --rm -it -v ./docker/devel/.ccache:/mnt/local synerbi/sirf:jupyter-build bash -c 'cp -r /opt/ccache/* /mnt/local/' -# full build -docker build -t synerbi/sirf:jupyter "${SIRF_BUILD_ARGS[@]}" +for ver in cpu gpu; do + docker run --rm -it -v ./docker/devel/.ccache:/ccache synerbi/jupyter:sirf-build-${ver} bash -c 'cp -r /opt/ccache/* /ccache/' +done popd From 6ebeb790f365e82c4ba19570a87522a70b861df0 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Thu, 4 Jan 2024 17:20:09 +0000 Subject: [PATCH 41/98] merge jupyterhub => docker --- .github/workflows/c-cpp.yml | 11 +- .github/workflows/docker_build.yml | 2 - .gitmodules | 4 +- jupyterhub/Dockerfile => Dockerfile | 0 docker/README-old.md | 389 +++++++++++++++++ docker/README.md | 411 ++---------------- {jupyterhub => docker}/build_docker_stacks.sh | 2 +- {jupyterhub => docker}/docker-stacks | 0 jupyterhub/README.md | 32 -- jupyterhub/requirements.yml | 21 - 10 files changed, 428 insertions(+), 444 deletions(-) rename jupyterhub/Dockerfile => Dockerfile (100%) create mode 100644 docker/README-old.md rename {jupyterhub => docker}/build_docker_stacks.sh (98%) rename {jupyterhub => docker}/docker-stacks (100%) delete mode 100644 jupyterhub/README.md delete mode 100644 jupyterhub/requirements.yml diff --git a/.github/workflows/c-cpp.yml b/.github/workflows/c-cpp.yml index 5dc6b6c5..52e85e76 100644 --- a/.github/workflows/c-cpp.yml +++ b/.github/workflows/c-cpp.yml @@ -7,7 +7,6 @@ on: - 'docker/Dockerfile' - 'docker/*yml' - '**.md' - - 'jupyterhub/**' - 'VirtualBox/**' - '.github/workflows/*docker*' - 'CITATION.cff' @@ -15,14 +14,12 @@ on: - '.travis.yml' - 'NOTICE.txt' - 'LICENSE.txt' - pull_request: branches: [ master ] paths-ignore: - 'docker/Dockerfile' - 'docker/*yml' - '**.md' - - 'jupyterhub/**' - 'VirtualBox/**' - '.github/workflows/*docker*' - 'CITATION.cff' @@ -30,8 +27,6 @@ on: - '.travis.yml' - 'NOTICE.txt' - 'LICENSE.txt' - - jobs: build: @@ -94,7 +89,7 @@ jobs: esac - name: install_dependencies - run: + run: cd docker; sudo bash raw-ubuntu.sh; sudo bash build_essential-ubuntu.sh; @@ -146,7 +141,7 @@ jobs: cmake -S ${GITHUB_WORKSPACE} ${BUILD_FLAGS} ${EXTRA_BUILD_FLAGS} ${DEVEL_BUILD}; - name: build shell: bash - run: | + run: | cd ${GITHUB_WORKSPACE}/build; source ~/virtualenv/bin/activate; cmake --build . -j 2; @@ -167,5 +162,5 @@ jobs: - name: tests shell: bash - run: + run: bash docker/ctest_sirf.sh diff --git a/.github/workflows/docker_build.yml b/.github/workflows/docker_build.yml index e7fa4c0b..d43031f7 100644 --- a/.github/workflows/docker_build.yml +++ b/.github/workflows/docker_build.yml @@ -5,7 +5,6 @@ on: branches: [ master ] paths-ignore: - '**.md' - - 'jupyterhub/**' - 'VirtualBox/**' - '.github/workflows/c-cpp.yml' - 'CITATION.cff' @@ -18,7 +17,6 @@ on: branches: [ master ] paths-ignore: - '**.md' - - 'jupyterhub/**' - 'VirtualBox/**' - '.github/workflows/c-cpp.yml' - 'CITATION.cff' diff --git a/.gitmodules b/.gitmodules index 95ebb758..b67d9285 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,3 @@ -[submodule "jupyterhub/docker-stacks"] - path = jupyterhub/docker-stacks +[submodule "docker/docker-stacks"] + path = docker/docker-stacks url = https://github.com/jupyter/docker-stacks diff --git a/jupyterhub/Dockerfile b/Dockerfile similarity index 100% rename from jupyterhub/Dockerfile rename to Dockerfile diff --git a/docker/README-old.md b/docker/README-old.md new file mode 100644 index 00000000..444bbfd9 --- /dev/null +++ b/docker/README-old.md @@ -0,0 +1,389 @@ +# SIRF in Docker + +Docker wrapper for CCP SyneRBI SIRF. + +## TL;DR, I want a Jupyter notebook service NOW +These instructions assume you have a knowledge of Docker and Docker Compose. If you don't it is highly recommended you keep reading ahead to [Introduction](#introduction) and beyond. + +1. Install [docker CE][docker-ce] and [`docker-compose`][docker-compose]. (If you are on a Mac, these are installed when you install [Docker Desktop](https://www.docker.com/products/docker-desktop)). + - (optional) If you are on Linux/CentOS/similar and have a GPU, +install the [NVidia container runtime][NVidia-container-runtime]. Be sure to run the [Engine Setup](https://github.com/nvidia/nvidia-container-runtime#docker-engine-setup). +2. Download the SIRF-SuperBuild ([current master](https://github.com/SyneRBI/SIRF-SuperBuild/archive/master.zip), or +[latest release](https://github.com/SyneRBI/SIRF-SuperBuild/releases)) or +``` +git clone https://github.com/SyneRBI/SIRF-SuperBuild.git +``` +and change directory to this folder, `SIRF-SuperBuild/docker`. + +3. Optionally pull the pre-built image with `docker pull synerbi/sirf:service` (or `docker pull synerbi/sirf:service-gpu`), otherwise +the next line will build it, resulting in a much smaller download but longer build time. +4. Run `./sirf-compose-server up -d sirf` (or `./sirf-compose-server-gpu up -d sirf`) + - You can use a `--build` flag in this command, or `./sirf-compose-server[-gpu] build` to re-build your image if you have an old version. +5. Open a browser at . +Note that starting the container may take a few seconds the first +time, but will be very quick afterwards. +(Run `docker logs -f sirf` to see the container's progress - +eventually there should be a message stating the notebook has started.) +6. Stop the container (preserving its status) with `docker stop sirf`. +7. Next time, just do `docker start sirf`. + +[docker-ce]: https://docs.docker.com/install/ +[docker-compose]: https://github.com/docker/compose/releases +[NVidia-container-runtime]: https://github.com/nvidia/nvidia-container-runtime#installation +[SIRF-Exercises]: https://github.com/SyneRBI/SIRF-Exercises + +### Important notes: +- The `Jupyter` password is `virtual`. +- The directory is mounted at `/devel` in the docker container +from `./devel` (in this folder) on the host. The container will copy +[SIRF-Exercises] into this folder if not present. This means that +files and notebooks in `./devel` will persist between sessions and +even docker-image upgrades. +- If on Windows, `localhost` probably won't work. +Find out the service IP address using: +``` +docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' sirf +``` +and use the resultant IP instead of `localhost` (e.g.: `172.18.0.2:9999`). + +## Introduction + +Docker is a low-overhead, container-based replacement for virtual machines (VMs). + +This works on Unix-type systems, MacOS and Windows 10, but best on a linux host system due to: + +1. Possibility to get CUDA support within the container +2. `X11` windows displayed natively without needing e.g. a `vnc` server or desktop in the container + +This is probably the easiest way to directly use `SIRF` due to short +installation instructions. + + +## Prerequisites + +- Docker + + The free [Community Edition (CE)][docker-ce] is sufficient + + If you are installing on Linux, you will also have to follow the steps to [enable managing docker as a non-root user](https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user). + + [`docker-compose`][docker-compose] + + If you are on Linux/CentOS/similar and have a GPU, install the [NVidia container runtime][NVidia-container-runtime]. +- The [`SIRF-SuperBuild` repository](https://github.com/SyneRBI/SIRF-SuperBuild) + + download and unzip or `git clone` this locally + +## Tags + +The docker images are hosted at [hub.docker.com][dockerhub-SIRF]. We upload 2 types of images (see below for more information): +- Command Line Interface (CLI)-only +- "Service" images that will serve `Jupyter` notebooks + +And additionally the Docker Tag can specify a given SuperBuild version. + +To pull directly, use: + +```sh +docker pull synerbi/sirf: +``` + +| CLI-only `` | Service (i.e. `Jupyter`) `` | [SuperBuild] branch/tag | +|:--- |:--- |:--- | +| `release` | `release-service` | `` | +| `` | `-service` | `` | +| `latest` | `service` and `service-gpu` | `master` | +| `devel` | `devel-service` | `master` with `cmake -DDEVEL_BUILD=ON` | + +Service images are intended to be run in the background, and expose: + +| Port(s) | Notes | +| --- | --- | +| 9999 | `Jupyter` (in folder `/devel`) | +| 8890-9 | `Jupyter` (in folder `/devel/SIRF-Exercises-<0-9>`) | +| 9002 | `Gadgetron` | + +[dockerhub-SIRF]: https://hub.docker.com/r/synerbi/sirf/ +[SuperBuild]: https://github.com/SyneRBI/SIRF-SuperBuild/ + +### Windows specific notes + +Note that Docker for Windows uses the +[Hyper-V backend][hyper-vbox]. Unfortunately, this conflicts with VirtualBox (last checked start 2021) so you would +have to en/disable Hyper-V and reboot. +You can use the older VirtualBox backend instead by using [Docker Machine]. + +You may want to consult [SIRF on Windows Subsystem for Linux][wiki-wsl] +regarding setting up Xserver/VNCserver and other UNIX-for-Windows-users tips. + +[wiki-wsl]: https://github.com/SyneRBI/SIRF/wiki/SIRF-SuperBuild-on-Bash-on-Ubuntu-on-Windows-10 +[hyper-vbox]: https://docs.docker.com/docker-for-windows/install/#what-to-know-before-you-install +[Docker Machine]: https://docs.docker.com/machine/overview/#whats-the-difference-between-docker-engine-and-docker-machine + +## Glossary + +A brief list of everything important to know for a basic working knowledge of docker. + +- *Base image*: the starting point for building a Docker image + + analogous to a clean OS (in this case `ubuntu:20.04`) +- *Layer*: a single build step + + usually represented by a single `RUN` or `COPY` line in a `Dockerfile` (e.g. `RUN apt-get install cmake`) +- *Image*: a sequence of *layers* (applied on top of a *base image*) + + analogous to a clean OS with `SIRF` installed (in this case *tagged* `synerbi/sirf`) +- *Container*: a sandboxed workspace derived from an *image* + + analogous to a running virtual machine (in this case named `sirf`) + + easily stoppable, restartable, disposable + + can be thought of as end-user-created *layers* which would never be formally part of a redistributable *image* + + each container has its own filesystem, but can share files, network connections, and devices with the host computer + +*Images* are *built* or *pulled*. *Containers* are *created* from *images*: + +- *Build*: typically refers to *pulling* a *base image*, then *building* all the *layers* necessary to form an *image* + + usually one-off +- *Pull*: typically refers to downloading an *image* from the internet (which someone else *built*) + + usually only required when there is no source code available to allow for *building* locally +- *Create*: typically refers to making a *container* from an *image* + + often recreated for a semi-clean slate - especially if data is shared with the host computer so that no data is lost on disposal + +[`docker-compose`][docker-compose] provides a way to create and launch images and containers, specifying port forwarding etc. +`docker-compose` is used to help with creating containers (and even building images). It should be added to your `PATH` or at least have the executable copied to `SIRF-SuperBuild/docker`. + +## Creating and using SIRF containers + +The docker images can be built from source or pulled using `SyneRBI/SIRF-SuperBuild`, and containers created, by following the steps below. + +**Warnings**: + +When building an image yourself, by default the current `master` branch of the `SIRF-SuperBuild` is used. It might be +safer to specify a tag. This can be done by first setting an environment variable. e.g. in `bash` and similar shells: +```bash +export SIRF_SB_TAG=v3.1.0 +``` + +These instructions will mount the `SIRF-SuperBuild/docker/devel` folder on the host as `/devel` in the docker container. +When using a `service*` image, the container will copy +[SIRF-Exercises] into this folder if not present. This means that +files and notebooks in `/devel` will be persistent between sessions and +even docker-image upgrades. You should therefore remove the contents of +`SIRF-SuperBuild/docker/devel` if you want to really start afresh. + +### Creating a container providing a Linux *CLI* with SIRF +The default "CLI" images provide an Ubuntu environment with the SuperBuild built (see [Tags](#tags)) as a convenient environment. + +#### Using a Linux or MacOS CLI +Build/pull the image: +```bash +# Either: +SIRF-SuperBuild/docker$ docker pull synerbi/sirf +# Or: +SIRF-SuperBuild/docker$ ./sirf-compose build core sirf +``` + +For easier file and window sharing, use the provided script, `sirf-compose`, which calls `docker-compose` but handles the host user's ID and some environment variables. + +We can now create a container. + +```bash +SIRF-SuperBuild/docker$ ./sirf-compose up --no-start sirf +``` + +Here, `--no-start` delays actually starting the container. +We can now use this interactively, by starting the containder with flags `-ai`. +```bash +SIRF-SuperBuild/docker$ docker start -ai sirf +(py2) sirf:~$ gadgetron >> /dev/null & # launch Gadgetron as a "background" process +(py2) sirf:~$ python SIRF-SuperBuild/SIRF/examples/Python/MR/fully_sampled_recon.py # run a SIRF demo +(py2) sirf:~$ exit +``` + +The first line starts the `sirf` docker container. +The second line starts `gadgetron` within the container as a background process (optional, but needed for using Gadgetron, i.e. most SIRF MR functionality). +We can then run an example (or you could start an interactive python session). +We then exit the container (which also stops it). + +#### Using a Windows CLI + +``` +Either: +SIRF-SuperBuild/docker> docker pull synerbi/sirf +Or: +SIRF-SuperBuild/docker> docker-compose build core sirf +``` + +Instead of passing user IDs, Windows requires that +[file sharing is enabled](https://docs.docker.com/docker-for-windows/#shared-drives). Then: + +``` +SIRF-SuperBuild/docker> docker-compose up --no-start sirf +``` + +Using the container works in the same way as above. + +### Creating a container providing a (Linux-based) Jupyter Server with SIRF +The "server" images build upon the CLI images and automatically start a Jupyter service when run. These are convenient if you use Notebooks in your experiments, or are learning and want to run the [SIRF Exercises](https://github.com/SyneRBI/SIRF-Exercises). + +```bash +# Linux without GPU or MacOS: +SIRF-SuperBuild/docker$ ./sirf-compose-server up -d sirf +# Linux with GPU +SIRF-SuperBuild/docker$ ./sirf-compose-server-gpu up -d sirf +# Windows: +SIRF-SuperBuild/docker> sirf-compose-server up -d sirf +``` +(You may with to use the `--build` flag before `-d sirf` on any of the above commands to re-build the image at any point) + +This starts the `sirf` docker container, including `gadgetron` and +`jupyter` within the container as background processes. + +Open your favourite web browser on your host OS, and go to +. +If the browser is giving you a connection error, +`docker logs -f sirf` will give you the current status of the server +(there should be an eventual message about Jupyter being started). + +To stop the server and container, run `docker stop sirf`. If you also +want to remove the container, you can use instead `./sirf-compose-server down`, +see below. + +Please note that you cannot start a second `gadgetron` in a `service` container, as you would experience port conflicts. + +If you need a shell for any reason for your `service` container, you can ask the container to run Bash and drop into the shell using: + +``` +docker exec -w /devel -ti sirf /bin/bash +``` + +### sirf-compose information +The `./sirf-compose*` scripts are simple wrappers around `docker-compose`. +(You could check the corresponding `.yml` files, or even edit them to change +names or add mounts etc.) + +- For a service (Jupyter) container: + + `./sirf-compose-server` + + `./sirf-compose-server-gpu` +- For a container hosting 10 Jupyter servers: + + `./sirf-compose-server-multi` +- For a basic interactive container: + + on Linux: `./sirf-compose` + + on Windows: `docker-compose` + +Run any of the above commands without arguments for help. + +For example, to host multiple Jupyter servers in one container, simply: +``` +./sirf-compose-server-multi up -d sirf # start 10 jupyter servers +./sirf-compose-server-multi ps # print out exposed ports +./sirf-compose-server-multi stop # stop and remove the container +``` + + + +### More information on usage + +You can use `docker exec` to execute commands in a container that is already running. +This could be useful for debugging problems with the `Jupyter server` container. For instance +```sh +# check what processes are running in the container +docker exec sirf ps aux +# start an interactive bash session +docker exec -ti sirf /bin/bash +``` +Note that `exec` logs in as `root`. + +You can check which containers are running (or not) via +```sh +docker ps -a +``` +and stop and even remove them +```sh +docker stop sirf +docker rm sirf +``` +Note that `sirf-compose down` both stops and removes. + +If you choose to remove the container, +next time you will start afresh (which might not be desirable of course). +Stopped containers do not use CPU time and only some additional disk-space. However, the images are quite large. +You can check which images you have with +```sh +docker image ls +``` +(Note that this reports the "total" size, not taking into account any overlap between different layers). + +If you decide you no longer need one, you can use +```sh +docker rmi +``` + +## Notes + +- Since SIRF 3.5, by default the build files are removed on the docker image. This can be changed by [setting an environment variable](./DocForDevelopers.md#Useful-environment-variables). +If you have built the image while keeping all build files (or re-build them in an existing container), tests of SIRF and other installed packages can be run as follows: +```bash +sudo -Hu jovyan bash --login -c /devel/test.sh +``` +- Currently all `compose` files call the container `sirf`. You could edit the `.yml` file if you +want to run different versions. +- "Cannot connect to display" errors are usually fixed by running `xhost +local:""` on the host linux system. +- Non-linux users (e.g. Windows) will need to set up a desktop and vnc server in order to have a GUI. +- On host systems with less than 16GB RAM, you might want to set the number of parallel builds used by cmake when creating +the image to `1` (it currently defaults to `2`) by [setting an environment variable](./DocForDevelopers.md#Useful-environment-variables) +before running `compose`. + +### Links + +- [SIRF docker source] +- [Synergistic Image Reconstruction Framework (SIRF) project][SIRF] + + [SIRF wiki] +- [Collaborative Computational Project in Synergistic Reconstruction for Biomedical Imaging (CCP SyneRBI)][CCP SyneRBI] + +[SIRF docker source]: https://github.com/SyneRBI/SIRF-SuperBuild/tree/master/docker +[SIRF SuperBuild on Docker wiki]: https://github.com/SyneRBI/SIRF/wiki/SIRF-SuperBuild-on-Docker +[SIRF]: https://github.com/SyneRBI/SIRF +[SIRF wiki]: https://github.com/SyneRBI/SIRF/wiki +[CCP SyneRBI]: https://www.ccpsynerbi.ac.uk/ + +### Common errors +#### Unknown runtime specified nvidia +Problem: When trying to run `/sirf-compose-server-gpu up -d sirf` I get: +``` +ERROR: for sirf Cannot create container for service sirf: Unknown runtime specified nvidia +``` +Solutions: +- If you are on Linux, did you install the [NVidia container runtime][NVidia-container-runtime] and run the [Engine Setup](https://github.com/nvidia/nvidia-container-runtime#docker-engine-setup)? +- If you are not on Linux, docker currently (June 2021) does not yet support GPU access. + + + +Problem: When trying to run `/sirf-compose-server-gpu up -d sirf` I get: +``` +ERROR: The Compose file './docker-compose.srv-gpu.yml' is invalid because: +Unsupported config option for services.gadgetron: 'runtime' +Unsupported config option for services.sirf: 'runtime' +``` +Solution: +The most likely issue is that you have an old version of `docker-compose` (you need 1.19.0 or newer). Update your docker compose as (you may need root permissions). +Note that if you have python 2 and python 3 installed you may need to use `pip` instead of `pip3`, as your docker-compose may be installed in a different python version. + +``` +pip3 uninstall docker-compose +pip3 install docker-compose +``` + +### Building problems related to gpg +When building a new docker image, you could see errors such as +``` +W: http://archive.ubuntu.com/ubuntu/dists/jammy/InRelease: The key(s) in the keyring /etc/apt/trusted.gpg.d/ubuntu-keyring-2012-cdimage.gpg are ignored as the file is not readable by user '_apt' executing apt-key. +W: http://archive.ubuntu.com/ubuntu/dists/jammy/InRelease: The key(s) in the keyring /etc/apt/trusted.gpg.d/ubuntu-keyring-2018-archive.gpg are ignored as the file is not readable by user '_apt' executing apt-key. +W: GPG error: http://archive.ubuntu.com/ubuntu jammy InRelease: The following signatures couldn't be verified because the public key is not available: NO_PUBKEY 871920D1991BC93C +E: The repository 'http://archive.ubuntu.com/ubuntu jammy InRelease' is not signed. +``` +or +``` +E: Problem executing scripts DPkg::Post-Invoke 'rm -f /var/cache/apt/archives/*.deb /var/cache/apt/archives/partial/*.deb /var/cache/apt/*.bin || true' +E: Sub-process returned an error code +``` +This likely means that your docker version is too old. Try upgrading it. + + +#### MacOS: Cannot connect to the Docker daemon +``` +% ./sirf-compose-server up -d sirf +Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running? +``` +To the best of our knowledge, this is a confusing error message by Docker. The Mac version does not seem run a daemon. Instead, you have to run Docker Desktop before typing any `docker` commands in the terminal (as suggested on https://stackoverflow.com/a/44719239/15030207) diff --git a/docker/README.md b/docker/README.md index c2ecc894..a6710290 100644 --- a/docker/README.md +++ b/docker/README.md @@ -1,396 +1,51 @@ -# SIRF in Docker +# SIRF Docker image -Docker wrapper for CCP SyneRBI SIRF. +The image contains SIRF & all dependencies required by JupyterHub. -## TL;DR, I want a Jupyter notebook service NOW -These instructions assume you have a knowledge of Docker and Docker Compose. If you don't it is highly recommended you keep reading ahead to [Introduction](#introduction) and beyond. - -1. Install [docker CE][docker-ce] and [`docker-compose`][docker-compose]. (If you are on a Mac, these are installed when you install [Docker Desktop](https://www.docker.com/products/docker-desktop)). - - (optional) If you are on Linux/CentOS/similar and have a GPU, -install the [NVidia container runtime][NVidia-container-runtime]. Be sure to run the [Engine Setup](https://github.com/nvidia/nvidia-container-runtime#docker-engine-setup). -2. Download the SIRF-SuperBuild ([current master](https://github.com/SyneRBI/SIRF-SuperBuild/archive/master.zip), or -[latest release](https://github.com/SyneRBI/SIRF-SuperBuild/releases)) or -``` -git clone https://github.com/SyneRBI/SIRF-SuperBuild.git -``` -and change directory to this folder, `SIRF-SuperBuild/docker`. - -3. Optionally pull the pre-built image with `docker pull synerbi/sirf:service` (or `docker pull synerbi/sirf:service-gpu`), otherwise -the next line will build it, resulting in a much smaller download but longer build time. -4. Run `./sirf-compose-server up -d sirf` (or `./sirf-compose-server-gpu up -d sirf`) - - You can use a `--build` flag in this command, or `./sirf-compose-server[-gpu] build` to re-build your image if you have an old version. -5. Open a browser at . -Note that starting the container may take a few seconds the first -time, but will be very quick afterwards. -(Run `docker logs -f sirf` to see the container's progress - -eventually there should be a message stating the notebook has started.) -6. Stop the container (preserving its status) with `docker stop sirf`. -7. Next time, just do `docker start sirf`. - -[docker-ce]: https://docs.docker.com/install/ -[docker-compose]: https://github.com/docker/compose/releases -[NVidia-container-runtime]: https://github.com/nvidia/nvidia-container-runtime#installation -[SIRF-Exercises]: https://github.com/SyneRBI/SIRF-Exercises - -### Important notes: -- The `Jupyter` password is `virtual`. -- The directory is mounted at `/devel` in the docker container -from `./devel` (in this folder) on the host. The container will copy -[SIRF-Exercises] into this folder if not present. This means that -files and notebooks in `./devel` will persist between sessions and -even docker-image upgrades. -- If on Windows, `localhost` probably won't work. -Find out the service IP address using: -``` -docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' sirf -``` -and use the resultant IP instead of `localhost` (e.g.: `172.18.0.2:9999`). - -## Introduction - -Docker is a low-overhead, container-based replacement for virtual machines (VMs). - -This works on Unix-type systems, MacOS and Windows 10, but best on a linux host system due to: - -1. Possibility to get CUDA support within the container -2. `X11` windows displayed natively without needing e.g. a `vnc` server or desktop in the container - -This is probably the easiest way to directly use `SIRF` due to short -installation instructions. - - -## Prerequisites - -- Docker - + The free [Community Edition (CE)][docker-ce] is sufficient - + If you are installing on Linux, you will also have to follow the steps to [enable managing docker as a non-root user](https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user). - + [`docker-compose`][docker-compose] - + If you are on Linux/CentOS/similar and have a GPU, install the [NVidia container runtime][NVidia-container-runtime]. -- The [`SIRF-SuperBuild` repository](https://github.com/SyneRBI/SIRF-SuperBuild) - + download and unzip or `git clone` this locally - -## Tags - -The docker images are hosted at [hub.docker.com][dockerhub-SIRF]. We upload 2 types of images (see below for more information): -- Command Line Interface (CLI)-only -- "Service" images that will serve `Jupyter` notebooks - -And additionally the Docker Tag can specify a given SuperBuild version. - -To pull directly, use: +## Usage ```sh -docker pull synerbi/sirf: +# CPU version +docker run --rm -it -p 8888:8888 synerbi/sirf:jupyter +# GPU version +docker run --rm -it -p 8888:8888 --gpus all synerbi/sirf:jupyter-gpu ``` -| CLI-only `` | Service (i.e. `Jupyter`) `` | [SuperBuild] branch/tag | -|:--- |:--- |:--- | -| `release` | `release-service` | `` | -| `` | `-service` | `` | -| `latest` | `service` and `service-gpu` | `master` | -| `devel` | `devel-service` | `master` with `cmake -DDEVEL_BUILD=ON` | - -Service images are intended to be run in the background, and expose: - -| Port(s) | Notes | -| --- | --- | -| 9999 | `Jupyter` (in folder `/devel`) | -| 8890-9 | `Jupyter` (in folder `/devel/SIRF-Exercises-<0-9>`) | -| 9002 | `Gadgetron` | - -[dockerhub-SIRF]: https://hub.docker.com/r/synerbi/sirf/ -[SuperBuild]: https://github.com/SyneRBI/SIRF-SuperBuild/ - -### Windows specific notes - -Note that Docker for Windows uses either WSL or Hyper-V backend (the latter might be incompatible with VirtualBox), -see its [documentation for more information][docker-windows]. - -You may want to consult [SIRF on Windows Subsystem for Linux][wiki-wsl] -regarding setting up Xserver/VNCserver and other UNIX-for-Windows-users tips. - -[wiki-wsl]: https://github.com/SyneRBI/SIRF/wiki/SIRF-SuperBuild-on-Bash-on-Ubuntu-on-Windows-10 -[docker-windows]: https://docs.docker.com/docker-for-windows/install/ - -## Glossary +To make the container user same as host user (useful when sharing folders), use `--user` and `--group-add`: -A brief list of everything important to know for a basic working knowledge of docker. - -- *Base image*: the starting point for building a Docker image - + analogous to a clean OS (in this case `ubuntu:20.04`) -- *Layer*: a single build step - + usually represented by a single `RUN` or `COPY` line in a `Dockerfile` (e.g. `RUN apt-get install cmake`) -- *Image*: a sequence of *layers* (applied on top of a *base image*) - + analogous to a clean OS with `SIRF` installed (in this case *tagged* `synerbi/sirf`) -- *Container*: a sandboxed workspace derived from an *image* - + analogous to a running virtual machine (in this case named `sirf`) - + easily stoppable, restartable, disposable - + can be thought of as end-user-created *layers* which would never be formally part of a redistributable *image* - + each container has its own filesystem, but can share files, network connections, and devices with the host computer - -*Images* are *built* or *pulled*. *Containers* are *created* from *images*: - -- *Build*: typically refers to *pulling* a *base image*, then *building* all the *layers* necessary to form an *image* - + usually one-off -- *Pull*: typically refers to downloading an *image* from the internet (which someone else *built*) - + usually only required when there is no source code available to allow for *building* locally -- *Create*: typically refers to making a *container* from an *image* - + often recreated for a semi-clean slate - especially if data is shared with the host computer so that no data is lost on disposal - -[`docker-compose`][docker-compose] provides a way to create and launch images and containers, specifying port forwarding etc. -`docker-compose` is used to help with creating containers (and even building images). It should be added to your `PATH` or at least have the executable copied to `SIRF-SuperBuild/docker`. - -## Creating and using SIRF containers - -The docker images can be built from source or pulled using `SyneRBI/SIRF-SuperBuild`, and containers created, by following the steps below. - -**Warnings**: - -When building an image yourself, by default the current `master` branch of the `SIRF-SuperBuild` is used. It might be -safer to specify a tag. This can be done by first setting an environment variable. e.g. in `bash` and similar shells: -```bash -export SIRF_SB_TAG=v3.1.0 -``` - -These instructions will mount the `SIRF-SuperBuild/docker/devel` folder on the host as `/devel` in the docker container. -When using a `service*` image, the container will copy -[SIRF-Exercises] into this folder if not present. This means that -files and notebooks in `/devel` will be persistent between sessions and -even docker-image upgrades. You should therefore remove the contents of -`SIRF-SuperBuild/docker/devel` if you want to really start afresh. - -### Creating a container providing a Linux *CLI* with SIRF -The default "CLI" images provide an Ubuntu environment with the SuperBuild built (see [Tags](#tags)) as a convenient environment. - -#### Using a Linux or MacOS CLI -Build/pull the image: -```bash -# Either: -SIRF-SuperBuild/docker$ docker pull synerbi/sirf -# Or: -SIRF-SuperBuild/docker$ ./sirf-compose build core sirf -``` - -For easier file and window sharing, use the provided script, `sirf-compose`, which calls `docker-compose` but handles the host user's ID and some environment variables. - -We can now create a container. - -```bash -SIRF-SuperBuild/docker$ ./sirf-compose up --no-start sirf -``` - -Here, `--no-start` delays actually starting the container. -We can now use this interactively, by starting the containder with flags `-ai`. -```bash -SIRF-SuperBuild/docker$ docker start -ai sirf -(py2) sirf:~$ gadgetron >> /dev/null & # launch Gadgetron as a "background" process -(py2) sirf:~$ python SIRF-SuperBuild/SIRF/examples/Python/MR/fully_sampled_recon.py # run a SIRF demo -(py2) sirf:~$ exit -``` - -The first line starts the `sirf` docker container. -The second line starts `gadgetron` within the container as a background process (optional, but needed for using Gadgetron, i.e. most SIRF MR functionality). -We can then run an example (or you could start an interactive python session). -We then exit the container (which also stops it). - -#### Using a Windows CLI - -``` -Either: -SIRF-SuperBuild/docker> docker pull synerbi/sirf -Or: -SIRF-SuperBuild/docker> docker-compose build core sirf -``` - -Instead of passing user IDs, Windows requires that -[file sharing is enabled](https://docs.docker.com/docker-for-windows/#shared-drives). Then: - -``` -SIRF-SuperBuild/docker> docker-compose up --no-start sirf -``` - -Using the container works in the same way as above. - -### Creating a container providing a (Linux-based) Jupyter Server with SIRF -The "server" images build upon the CLI images and automatically start a Jupyter service when run. These are convenient if you use Notebooks in your experiments, or are learning and want to run the [SIRF Exercises](https://github.com/SyneRBI/SIRF-Exercises). - -```bash -# Linux without GPU or MacOS: -SIRF-SuperBuild/docker$ ./sirf-compose-server up -d sirf -# Linux with GPU -SIRF-SuperBuild/docker$ ./sirf-compose-server-gpu up -d sirf -# Windows: -SIRF-SuperBuild/docker> sirf-compose-server up -d sirf -``` -(You may with to use the `--build` flag before `-d sirf` on any of the above commands to re-build the image at any point) - -This starts the `sirf` docker container, including `gadgetron` and -`jupyter` within the container as background processes. - -Open your favourite web browser on your host OS, and go to -. -If the browser is giving you a connection error, -`docker logs -f sirf` will give you the current status of the server -(there should be an eventual message about Jupyter being started). - -To stop the server and container, run `docker stop sirf`. If you also -want to remove the container, you can use instead `./sirf-compose-server down`, -see below. - -Please note that you cannot start a second `gadgetron` in a `service` container, as you would experience port conflicts. - -If you need a shell for any reason for your `service` container, you can ask the container to run Bash and drop into the shell using: - -``` -docker exec -w /devel -ti sirf /bin/bash -``` - -### sirf-compose information -The `./sirf-compose*` scripts are simple wrappers around `docker-compose`. -(You could check the corresponding `.yml` files, or even edit them to change -names or add mounts etc.) - -- For a service (Jupyter) container: - + `./sirf-compose-server` - + `./sirf-compose-server-gpu` -- For a container hosting 10 Jupyter servers: - + `./sirf-compose-server-multi` -- For a basic interactive container: - + on Linux: `./sirf-compose` - + on Windows: `docker-compose` - -Run any of the above commands without arguments for help. - -For example, to host multiple Jupyter servers in one container, simply: -``` -./sirf-compose-server-multi up -d sirf # start 10 jupyter servers -./sirf-compose-server-multi ps # print out exposed ports -./sirf-compose-server-multi stop # stop and remove the container -``` - - - -### More information on usage - -You can use `docker exec` to execute commands in a container that is already running. -This could be useful for debugging problems with the `Jupyter server` container. For instance -```sh -# check what processes are running in the container -docker exec sirf ps aux -# start an interactive bash session -docker exec -ti sirf /bin/bash -``` -Note that `exec` logs in as `root`. - -You can check which containers are running (or not) via -```sh -docker ps -a -``` -and stop and even remove them -```sh -docker stop sirf -docker rm sirf -``` -Note that `sirf-compose down` both stops and removes. - -If you choose to remove the container, -next time you will start afresh (which might not be desirable of course). -Stopped containers do not use CPU time and only some additional disk-space. However, the images are quite large. -You can check which images you have with -```sh -docker image ls -``` -(Note that this reports the "total" size, not taking into account any overlap between different layers). - -If you decide you no longer need one, you can use ```sh -docker rmi +docker run --rm -it -p 8888:8888 --user $(id -u) --group-add users -v ./docker/devel:/home/jovyan/work synerbi/sirf:jupyter ``` -## Notes +More config: https://jupyter-docker-stacks.readthedocs.io/en/latest/using/common.html#user-related-configurations -- Since SIRF 3.5, by default the build files are removed on the docker image. This can be changed by [setting an environment variable](./DocForDevelopers.md#Useful-environment-variables). -If you have built the image while keeping all build files (or re-build them in an existing container), tests of SIRF and other installed packages can be run (on the docker image) as follows: -```bash -sudo -Hu jovyan bash --login -c /devel/test.sh -``` -- Currently all `compose` files call the container `sirf`. You could edit the `.yml` file if you -want to run different versions. -- "Cannot connect to display" errors are usually fixed by running `xhost +local:""` on the host linux system. -- Non-linux users (e.g. Windows) will need to set up a desktop and vnc server in order to have a GUI. -- On host systems with less than 16GB RAM, you might want to set the number of parallel builds used by cmake when creating -the image to `1` (it currently defaults to `2`) by [setting an environment variable](./DocForDevelopers.md#Useful-environment-variables) -before running `compose`. +## Build the image -### Links +We use an NVIDIA CUDA Ubuntu 22.04 base image (for [CIL](https://github.com/TomographicImaging/CIL) GPU features), build https://github.com/jupyter/docker-stacks `datascience-notebook` on top, and then install SIRF & its depdendencies. -- [SIRF docker source] -- [Synergistic Image Reconstruction Framework (SIRF) project][SIRF] - + [SIRF wiki] -- [Collaborative Computational Project in Synergistic Reconstruction for Biomedical Imaging (CCP SyneRBI)][CCP SyneRBI] +The strategy is: -[SIRF docker source]: https://github.com/SyneRBI/SIRF-SuperBuild/tree/master/docker -[SIRF SuperBuild on Docker wiki]: https://github.com/SyneRBI/SIRF/wiki/SIRF-SuperBuild-on-Docker -[SIRF]: https://github.com/SyneRBI/SIRF -[SIRF wiki]: https://github.com/SyneRBI/SIRF/wiki -[CCP SyneRBI]: https://www.ccpsynerbi.ac.uk/ +1. Use a recent Ubuntu CuDNN runtime image from https://hub.docker.com/r/nvidia/cuda as base +2. Build https://github.com/jupyter/docker-stacks/tree/main/images/datascience-notebook on top +3. Copy & run the SIRF `docker/build_*.sh` scripts +4. Copy the SIRF installation directories from the `synerbi/sirf:latest` image +5. Install CIL (via `conda`) -### Common errors -#### Unknown runtime specified nvidia -Problem: When trying to run `/sirf-compose-server-gpu up -d sirf` I get: -``` -ERROR: for sirf Cannot create container for service sirf: Unknown runtime specified nvidia -``` -Solutions: -- If you are on Linux, did you install the [NVidia container runtime][NVidia-container-runtime] and run the [Engine Setup](https://github.com/nvidia/nvidia-container-runtime#docker-engine-setup)? -- If you are not on Linux, docker currently (June 2021) does not yet support GPU access. +All of this is done by [`build_docker_stacks.sh`](./build_docker_stacks.sh). +### More info +https://github.com/jupyter/docker-stacks is used to gradually build up images: -Problem: When trying to run `./sirf-compose-server-gpu up -d sirf` I get: -``` -ERROR: The Compose file './docker-compose.srv-gpu.yml' is invalid because: -Unsupported config option for services.gadgetron: 'runtime' -Unsupported config option for services.sirf: 'runtime' -``` -Solution: -The most likely issue is that you have an old version of `docker-compose` (you need 1.19.0 or newer). Update your docker compose as (you may need root permissions). -Note that if you have python 2 and python 3 installed you may need to use `pip` instead of `pip3`, as your docker-compose may be installed in a different python version. - -``` -pip3 uninstall docker-compose -pip3 install docker-compose -``` -#### docker build fails with "unknown flag" -If you see an error likely -``` -Error response from daemon: dockerfile parse error line 58: Unknown flag: link -``` -Your docker might default to an older version (this happens on Ubuntu 20.04 for instance). It might be resolved by doing -```bash -export DOCKER_BUILDKIT=1 -``` -(or the equivalent for your shell). If not, try to upgrade your docker daemon. - -#### Building problems related to gpg -When building a new docker image, you could see errors such as -``` -W: http://archive.ubuntu.com/ubuntu/dists/jammy/InRelease: The key(s) in the keyring /etc/apt/trusted.gpg.d/ubuntu-keyring-2012-cdimage.gpg are ignored as the file is not readable by user '_apt' executing apt-key. -W: http://archive.ubuntu.com/ubuntu/dists/jammy/InRelease: The key(s) in the keyring /etc/apt/trusted.gpg.d/ubuntu-keyring-2018-archive.gpg are ignored as the file is not readable by user '_apt' executing apt-key. -W: GPG error: http://archive.ubuntu.com/ubuntu jammy InRelease: The following signatures couldn't be verified because the public key is not available: NO_PUBKEY 871920D1991BC93C -E: The repository 'http://archive.ubuntu.com/ubuntu jammy InRelease' is not signed. -``` -or -``` -E: Problem executing scripts DPkg::Post-Invoke 'rm -f /var/cache/apt/archives/*.deb /var/cache/apt/archives/partial/*.deb /var/cache/apt/*.bin || true' -E: Sub-process returned an error code -``` -This likely means that your docker version is too old. Try upgrading it. - - -#### MacOS: Cannot connect to the Docker daemon -``` -% ./sirf-compose-server up -d sirf -Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running? -``` -To the best of our knowledge, this is a confusing error message by Docker. The Mac version does not seem run a daemon. Instead, you have to run Docker Desktop before typing any `docker` commands in the terminal (as suggested on https://stackoverflow.com/a/44719239/15030207) +- `BASE_CONTAINER=nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04` +- `docker-stacks-foundation` -> `synerbi/jupyter:foundation` +- `base-notebook` -> `synerbi/jupyter:base` +- `minimal-notebook` -> `synerbi/jupyter:minimal` +- `scipy-notebook` -> `synerbi/jupyter:scipy` +- `datascience-notebook` -> `synerbi/jupyter:datascience` +- [`Dockerfile`](./Dockerfile) -> `synerbi/jupyter:sirf` + + Copy & run the SIRF `build_{essential,gadgetron,system}.sh` scripts from [`../docker`](../docker) + + Copy `/opt/SIRF-SuperBuild/{INSTALL,sources/SIRF}` directories from the `synerbi/sirf:latest` image + + Install docker/requirements.yml + + Clone & setup https://github.com/SyneRBI/SIRF-Exercises + + Set some environment variables (e.g. `PYTHONPATH=/opt/SIRF-SuperBuild/INSTALL/python`, `OMP_NUM_THREADS=$(( cpu_count/2 ))`) diff --git a/jupyterhub/build_docker_stacks.sh b/docker/build_docker_stacks.sh similarity index 98% rename from jupyterhub/build_docker_stacks.sh rename to docker/build_docker_stacks.sh index ed1ab9bc..c1fd9f94 100755 --- a/jupyterhub/build_docker_stacks.sh +++ b/docker/build_docker_stacks.sh @@ -21,7 +21,7 @@ done cd ../../../.. SIRF_BUILD_ARGS=( - build -f jupyterhub/Dockerfile . + build . --build-arg EXTRA_BUILD_FLAGS="-DGadgetron_TAG=6202fb7352a14fb82817b57a97d928c988eb0f4b -DISMRMRD_TAG=v1.13.7 -Dsiemens_to_ismrmrd_TAG=v1.2.11" ) SIRF_CPU_BUILD_ARGS=( diff --git a/jupyterhub/docker-stacks b/docker/docker-stacks similarity index 100% rename from jupyterhub/docker-stacks rename to docker/docker-stacks diff --git a/jupyterhub/README.md b/jupyterhub/README.md deleted file mode 100644 index da4c258f..00000000 --- a/jupyterhub/README.md +++ /dev/null @@ -1,32 +0,0 @@ -## Build the JupyterHub image - -The image contains SIRF & all dependencies required by jupyterhub. - -We use an NVIDIA CUDA Ubuntu 22.04 base image (for [CIL](https://github.com/TomographicImaging/CIL) GPU features), build https://github.com/jupyter/docker-stacks `datascience-notebook` on top, and then install SIRF & its depdendencies. - -The strategy is: - -1. Use a recent Ubuntu CuDNN runtime image from https://hub.docker.com/r/nvidia/cuda as base -2. Build https://github.com/jupyter/docker-stacks/tree/main/images/datascience-notebook on top -3. Copy & run the SIRF `docker/build_*.sh` scripts -4. Copy the SIRF installation directories from the `synerbi/sirf:latest` image -5. Install CIL (via `conda`) - -All of this is done by [`build_docker_stacks.sh`](./build_docker_stacks.sh). - -### More info - -https://github.com/jupyter/docker-stacks is used to gradually build up images: - -- `BASE_CONTAINER=nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04` -- `docker-stacks-foundation` -> `synerbi/jupyter:foundation` -- `base-notebook` -> `synerbi/jupyter:base` -- `minimal-notebook` -> `synerbi/jupyter:minimal` -- `scipy-notebook` -> `synerbi/jupyter:scipy` -- `datascience-notebook` -> `synerbi/jupyter:datascience` -- [`Dockerfile`](./Dockerfile) -> `synerbi/jupyter:sirf` - + Copy & run the SIRF `build_{essential,gadgetron,system}.sh` scripts from [`../docker`](../docker) - + Copy `/opt/SIRF-SuperBuild/{INSTALL,sources/SIRF}` directories from the `synerbi/sirf:latest` image - + Install docker/requirements.yml, jupyterhub/requirements.yml, jupyterhub/requirements-service.yml - + Clone & setup https://github.com/SyneRBI/SIRF-Exercises - + Set some environment variables (e.g. `PYTHONPATH=/opt/SIRF-SuperBuild/INSTALL/python`, `OMP_NUM_THREADS=$(( cpu_count/2 ))`) diff --git a/jupyterhub/requirements.yml b/jupyterhub/requirements.yml deleted file mode 100644 index 451f6143..00000000 --- a/jupyterhub/requirements.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: base -channels: - - conda-forge - - intel - - ccpi - - astra-toolbox - - defaults -dependencies: - - numpy=1.22 - - scikit-image - - pytorch - - torchvision - - ipywidgets<8 - - astra-toolbox - - tigre=2.4 - - dxchange - - pip: - - brainweb # CIL - - cuqipy-cil - - git+https://github.com/ismrmrd/ismrmrd-python-tools.git@master#egg=ismrmrd-python-tools - From 44ddc587c6fafc0d53a109d47b7d4fac1e4be526 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Thu, 4 Jan 2024 17:48:19 +0000 Subject: [PATCH 42/98] mamba install cmake --- Dockerfile | 1 - docker/build_essential-ubuntu.sh | 14 ++++++++++---- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/Dockerfile b/Dockerfile index e0882527..1903808e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -26,7 +26,6 @@ RUN bash /opt/scripts/build_gadgetron-ubuntu.sh # SIRF external deps COPY docker/build_system-ubuntu.sh /opt/scripts/ RUN bash /opt/scripts/build_system-ubuntu.sh -ENV PATH="/opt/cmake/bin:${PATH}" # SIRF python deps COPY docker/requirements.yml /opt/scripts/docker-requirements.yaml diff --git a/docker/build_essential-ubuntu.sh b/docker/build_essential-ubuntu.sh index 1c6234b1..521d2ea1 100755 --- a/docker/build_essential-ubuntu.sh +++ b/docker/build_essential-ubuntu.sh @@ -20,10 +20,16 @@ apt-get clean pushd $INSTALL_DIR # CMake -curl -o cmake.tgz -L https://github.com/Kitware/CMake/releases/download/v3.25.1/cmake-3.25.1-linux-x86_64.tar.gz -tar xzf cmake.tgz && rm cmake.tgz -ln -s cmake-*x86_64 cmake || true -export PATH="$PWD/cmake/bin:$PATH" +if test -n "$(command -v mamba)" -a -n "$(command -v fix-permissions)"; then + mamba install -y cmake + mamba clean --all -f -y + fix-permissions "${CONDA_DIR}" /home/${NB_USER} +else + curl -o cmake.tgz -L https://github.com/Kitware/CMake/releases/download/v3.25.1/cmake-3.25.1-linux-x86_64.tar.gz + tar xzf cmake.tgz && rm cmake.tgz + ln -s cmake-*x86_64 cmake || true + export PATH="$PWD/cmake/bin:$PATH" +fi # ccache mkdir -p bin From 813dcd8ba055320ea57d720363eec743cb37ee80 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Thu, 4 Jan 2024 17:50:03 +0000 Subject: [PATCH 43/98] CIL-Demos and CIL-Exercises in workdir --- Dockerfile | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 1903808e..ce012eba 100644 --- a/Dockerfile +++ b/Dockerfile @@ -96,15 +96,16 @@ RUN echo "export OMP_NUM_THREADS=\$(python -c 'import multiprocessing as mc; pri COPY --chown=${NB_USER} --chmod=644 --link docker/.bashrc /home/${NB_USER}/ # RUN sed -i s:PYTHON_INSTALL_DIR:${CONDA_DIR}:g /home/${NB_USER}/.bashrc -# install /opt/{SIRF-Exercises,CIL-Demos} +# install {SIRF-Exercises,CIL-Demos} COPY docker/user_service-ubuntu.sh /opt/scripts/ -RUN bash /opt/scripts/user_service-ubuntu.sh \ - && fix-permissions /opt/SIRF-Exercises /opt/CIL-Demos "${CONDA_DIR}" /home/${NB_USER} +RUN INSTALL_DIR=. bash /opt/scripts/user_service-ubuntu.sh \ + && fix-permissions SIRF-Exercises CIL-Demos "${CONDA_DIR}" /home/${NB_USER} # install from build COPY --from=build --link --chown=${NB_USER} /opt/SIRF-SuperBuild/INSTALL/ /opt/SIRF-SuperBuild/INSTALL/ #COPY --from=build --link --chown=${NB_USER} /opt/SIRF-SuperBuild/sources/SIRF/ /opt/SIRF-SuperBuild/sources/SIRF/ #COPY --from=build --link /opt/conda/ /opt/conda/ +# SIRF python deps COPY docker/requirements.yml /opt/scripts/docker-requirements.yaml RUN mamba env update -n base -f /opt/scripts/docker-requirements.yaml \ && mamba clean --all -f -y && fix-permissions "${CONDA_DIR}" /home/${NB_USER} From e5ef7008958746774de2023a398aeb04618d5d83 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Thu, 4 Jan 2024 20:02:52 +0000 Subject: [PATCH 44/98] kill old docker --- docker-compose.yml | 63 ++++ docker/Dockerfile | 228 --------------- docker/README-old.md | 389 ------------------------- docker/build_docker_stacks.sh | 2 + docker/docker-compose.devel.yml | 25 +- docker/docker-compose.gpu.yml | 36 +++ docker/docker-compose.nix.yml | 9 +- docker/docker-compose.srv-gpu.yml | 25 -- docker/docker-compose.srv-multi.yml | 13 - docker/docker-compose.srv.yml | 20 -- docker/docker-compose.yml | 58 ---- docker/sirf-compose | 5 - docker/sirf-compose-devel | 3 - docker/sirf-compose-devel-server | 3 - docker/sirf-compose-devel-server-gpu | 3 - docker/sirf-compose-devel-server-multi | 3 - docker/sirf-compose-server | 3 - docker/sirf-compose-server-gpu | 3 - docker/sirf-compose-server-multi | 3 - docker/sirf-compose-server-multi.bat | 2 - docker/sirf-compose-server.bat | 2 - 21 files changed, 124 insertions(+), 774 deletions(-) create mode 100644 docker-compose.yml delete mode 100644 docker/Dockerfile delete mode 100644 docker/README-old.md create mode 100644 docker/docker-compose.gpu.yml delete mode 100644 docker/docker-compose.srv-gpu.yml delete mode 100644 docker/docker-compose.srv-multi.yml delete mode 100644 docker/docker-compose.srv.yml delete mode 100644 docker/docker-compose.yml delete mode 100755 docker/sirf-compose delete mode 100755 docker/sirf-compose-devel delete mode 100755 docker/sirf-compose-devel-server delete mode 100755 docker/sirf-compose-devel-server-gpu delete mode 100755 docker/sirf-compose-devel-server-multi delete mode 100755 docker/sirf-compose-server delete mode 100755 docker/sirf-compose-server-gpu delete mode 100755 docker/sirf-compose-server-multi delete mode 100644 docker/sirf-compose-server-multi.bat delete mode 100755 docker/sirf-compose-server.bat diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..a3489aa1 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,63 @@ +version: '3.2' +services: + foundation: + image: synerbi/jupyter:foundation-cpu + build: + context: docker/docker-stacks/images/docker-stacks-foundation + args: + ROOT_CONTAINER: ubuntu:22.04 + PYTHON_VERSION: 3.9 + base: + image: synerbi/jupyter:base-cpu + build: + context: docker/docker-stacks/images/base-notebook + args: {BASE_CONTAINER: synerbi/jupyter:foundation-cpu} + minimal: + image: synerbi/jupyter:minimal-cpu + build: + context: docker/docker-stacks/images/minimal-notebook + args: {BASE_CONTAINER: synerbi/jupyter:base-cpu} + scipy: + image: synerbi/jupyter:scipy-cpu + build: + context: docker/docker-stacks/images/scipy-notebook + args: {BASE_CONTAINER: synerbi/jupyter:minimal-cpu} + sirf-build: + image: synerbi/jupyter:sirf-build-cpu + build: + context: . + target: build + args: + BASE_CONTAINER: synerbi/jupyter:scipy-cpu + Gadgetron_USE_CUDA: "OFF" + EXTRA_BUILD_FLAGS: > + -DGadgetron_TAG=6202fb7352a14fb82817b57a97d928c988eb0f4b + -DISMRMRD_TAG=v1.13.7 + -Dsiemens_to_ismrmrd_TAG=v1.2.11 + sirf: + container_name: sirf # for scaling, comment this out https://github.com/docker/compose/issues/3729 + image: synerbi/sirf:jupyter + build: + context: . + args: + BASE_CONTAINER: synerbi/jupyter:scipy-cpu + Gadgetron_USE_CUDA: "OFF" + EXTRA_BUILD_FLAGS: > + -DGadgetron_TAG=6202fb7352a14fb82817b57a97d928c988eb0f4b + -DISMRMRD_TAG=v1.13.7 + -Dsiemens_to_ismrmrd_TAG=v1.2.11 + cache_from: [synerbi/jupyter:sirf-build-cpu] + stdin_open: true + tty: true + user: ${USER_ID:-1000} + group_add: [users] + volumes: [./devel:/home/jovyan/work] + restart: unless-stopped + environment: {GADGETRON_RELAY_HOST: 0.0.0.0} + ports: + - "9002:9002" # gadgetron + - "9999:8888" # jupyter + # for scaling, use this instead for random port assignment + # (https://github.com/docker/compose/issues/2260): + #- "9000-9099:9002" + #- "8800-8899:8888" diff --git a/docker/Dockerfile b/docker/Dockerfile deleted file mode 100644 index ae6d42fb..00000000 --- a/docker/Dockerfile +++ /dev/null @@ -1,228 +0,0 @@ -# syntax=docker/dockerfile:1.4 -ARG BASE_IMAGE=ubuntu:22.04 -FROM ${BASE_IMAGE} as base - -LABEL \ - author.name="Casper da Costa-Luis, Kris Thielemans, Edoardo Pasca" \ - author.email=imaging@caspersci.uk.to \ - maintainer.email=ccppetmr@stfc.ac.uk \ - maintainer.url=https://www.ccpsynerbi.ac.uk/ \ - source.url=https://github.com/SyneRBI/SIRF-SuperBuild/ \ - licence="MPLv2.0 (https://www.mozilla.org/en-GB/MPL/2.0/)" \ - description="CCP SyneRBI Ubuntu" - -USER root -ENV DEBIAN_FRONTEND noninteractive - -COPY update_nvidia_keys.sh . -RUN bash update_nvidia_keys.sh && rm update_nvidia_keys.sh - -COPY raw-ubuntu.sh . -RUN bash raw-ubuntu.sh && rm raw-ubuntu.sh - -# Set locale, suppress warnings -#ENV LC_ALL en_GB.UTF-8 -ENV LANG en_GB.UTF-8 -ENV LANGUAGE en_GB:en - -FROM base as core - -COPY build_essential-ubuntu.sh . -RUN bash build_essential-ubuntu.sh && rm build_essential-ubuntu.sh - -# Python (build) -COPY build_python-ubuntu.sh . -RUN bash build_python-ubuntu.sh && rm build_python-ubuntu.sh - -# Gadgetron -COPY build_gadgetron-ubuntu.sh . -RUN bash build_gadgetron-ubuntu.sh && rm build_gadgetron-ubuntu.sh - -# SIRF external deps -COPY build_system-ubuntu.sh . -RUN bash build_system-ubuntu.sh && rm build_system-ubuntu.sh - -# X11 forwarding -RUN apt-get update -qq && apt-get install -yq --no-install-recommends \ - libx11-xcb1 \ - && apt-get clean -RUN mkdir -p /usr/share/X11/xkb -RUN [ -e /usr/bin/X ] || ln -s /usr/bin/Xorg /usr/bin/X - -RUN apt-get update -qq && apt-get install -yq --no-install-recommends \ - gosu \ - && apt-get clean - -ENV DEBIAN_FRONTEND '' -WORKDIR /home-away -COPY --link entrypoint.sh /usr/local/bin/ -ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] -CMD ["/bin/bash"] - -FROM core as sirf -LABEL description="CCP SyneRBI SIRF" - -# create user jovyan (jupyterhub seems to require this name) -ARG NB_USER="jovyan" -ARG NB_GROUP="users" -# some crazy number, hopefully not conflicting -ARG NB_UID="22222" -# standard guid for users -ARG NB_GID="100" -ARG HOME="/home/${NB_USER}" -ARG PYTHON_INSTALL_DIR="/opt/conda" -ARG PYTHON_EXECUTABLE="miniconda" - -ENV DEBIAN_FRONTEND noninteractive - -# make /opt writeable for all users -RUN chmod go+rwx /opt - -# copy to $WORKDIR (i.e. /home-away) as this is where entrypoint.sh will pick them up -COPY --link .bashrc .profile ./ -RUN sed -i s:PYTHON_INSTALL_DIR:${PYTHON_INSTALL_DIR}:g .bashrc - -# Create NB_GROUP and NB_USER -RUN (addgroup --gid ${NB_GID} "${NB_GROUP}" || true) && \ - (useradd -l -m -s /bin/bash -N -u "${NB_UID}" -g "${NB_GID}" "${NB_USER}" || true) -# TODO add into sudo ? (probably not for safety) - -WORKDIR ${HOME} - -# copy to home of NB_USER -COPY --link .bashrc .profile ./ -RUN chmod 644 .bashrc .profile && chown -R "${NB_USER}":"${NB_GROUP}" "${HOME}" -# update bashrc with the location of PYTHON_INSTALL_DIR -RUN sed -i s:PYTHON_INSTALL_DIR:${PYTHON_INSTALL_DIR}:g .bashrc - -# run user scripts as NB_USER -USER ${NB_USER}:${NB_GROUP} - -# Python (virtualenv or conda) -COPY requirements.txt requirements.yml user_python-ubuntu.sh ./ -RUN PYTHON_EXECUTABLE=${PYTHON_EXECUTABLE} PYTHON_INSTALL_DIR=${PYTHON_INSTALL_DIR} bash user_python-ubuntu.sh && \ - rm requirements.txt requirements.yml user_python-ubuntu.sh - - -# ccache -USER root -COPY --link devel/.ccache/ /opt/ccache/ -# give everyone write permisions -RUN chmod -R go+w /opt/ccache - - -# SIRF -USER "${NB_USER}" - -RUN ccache -o cache_dir=/opt/ccache - -COPY user_sirf-ubuntu.sh . - -# SIRF-SuperBuild version -ARG SIRF_SB_URL="https://github.com/SyneRBI/SIRF-SuperBuild" -ARG SIRF_SB_TAG="master" - -# set if we want to remove the build files -ARG REMOVE_BUILD_FILES=1 -# set if we want to run the tests during build -ARG RUN_CTEST=1 - -# speeding up the cmake build -# (implementation note: changing this in your environment will invalidate the docker cache sadly) -ARG NUM_PARALLEL_BUILDS="2" - -ARG BUILD_FLAGS="\ - -DCMAKE_BUILD_TYPE=Release\ - -DSTIR_ENABLE_OPENMP=ON -DUSE_SYSTEM_ACE=ON\ - -DUSE_SYSTEM_Armadillo=ON -DUSE_SYSTEM_Boost=ON\ - -DUSE_SYSTEM_FFTW3=ON -DUSE_SYSTEM_HDF5=OFF -DUSE_ITK=ON\ - -DGadgetron_USE_CUDA=OFF\ - -DUSE_SYSTEM_SWIG=ON\ - -DUSE_NiftyPET=OFF\ - -DBUILD_siemens_to_ismrmrd=ON -DBUILD_pet_rd_tools=ON" -ARG EXTRA_BUILD_FLAGS="-DBUILD_CIL=OFF" - -# build it, run ctest if required, and give everyone write and execute permission -RUN bash user_sirf-ubuntu.sh \ - && rm user_sirf-ubuntu.sh \ - && chmod -R go+rwX /opt/SIRF-SuperBuild/INSTALL - - -ENV DEBIAN_FRONTEND '' - -# go back to root (entrypoint.sh will switch users) -USER root -CMD ["/bin/bash"] - -FROM sirf as service - -ENV DEBIAN_FRONTEND noninteractive - -ARG NB_USER="jovyan" -ARG NB_GROUP="users" -ARG HOME="/home/${NB_USER}" - -USER "${NB_USER}" - -COPY requirements-service.txt requirements-service.yml requirements.yml user_service-ubuntu.sh install-sirf-exercises-dep.py ./ -RUN PYTHON_EXECUTABLE=${PYTHON_EXECUTABLE} PYTHON_INSTALL_DIR=${PYTHON_INSTALL_DIR} bash user_service-ubuntu.sh && \ - rm requirements-service.txt requirements-service.yml requirements.yml user_service-ubuntu.sh install-sirf-exercises-dep.py - -USER root -COPY --link download_data.sh /usr/local/bin/ - -COPY --link service.sh /usr/local/bin/ -ENV DEBIAN_FRONTEND '' -CMD ["/usr/local/bin/service.sh"] - -# note: remain root (entrypoint.sh will switch users) - -FROM service as multi -COPY --link service.multi.sh /usr/local/bin/ - -FROM sirf as jupyterhub - -ENV DEBIAN_FRONTEND noninteractive - -ARG NB_USER="jovyan" -ARG NB_GROUP="users" -ARG HOME="/home/${NB_USER}" - -USER root - -# Install all OS dependencies for notebook server that starts but lacks all -# features (e.g., download as all possible file formats) -ENV DEBIAN_FRONTEND noninteractive -RUN apt-get update --yes -qq && \ - apt-get install --yes --no-install-recommends -qq \ - wget \ - ca-certificates \ - sudo \ - tini \ - locales \ - fonts-liberation \ - run-one \ - && apt-get clean && rm -rf /var/lib/apt/lists/* - -# remove some sudo permissions -RUN echo "auth requisite pam_deny.so" >> /etc/pam.d/su && \ - sed -i.bak -e 's/^%admin/#%admin/' /etc/sudoers && \ - sed -i.bak -e 's/^%sudo/#%sudo/' /etc/sudoers - -## Add Tini (note: no tini apt-package yet in Ubuntu 18.04) -## but now commented out as we've moved to more recent Ubuntu -#ENV TINI_VERSION v0.19.0 -#ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /tini -#RUN chmod +x /tini - -# TODO install jupyterhub requirements - -USER "${NB_USER}" - -ENTRYPOINT ["/tini", "-g", "--"] -# TODO some stuff here -#CMD ["/usr/local/bin/service-jupyterhub.sh"] - -# TODO set some env variables (or do it in a compose file) - -ENV DEBIAN_FRONTEND '' diff --git a/docker/README-old.md b/docker/README-old.md deleted file mode 100644 index 444bbfd9..00000000 --- a/docker/README-old.md +++ /dev/null @@ -1,389 +0,0 @@ -# SIRF in Docker - -Docker wrapper for CCP SyneRBI SIRF. - -## TL;DR, I want a Jupyter notebook service NOW -These instructions assume you have a knowledge of Docker and Docker Compose. If you don't it is highly recommended you keep reading ahead to [Introduction](#introduction) and beyond. - -1. Install [docker CE][docker-ce] and [`docker-compose`][docker-compose]. (If you are on a Mac, these are installed when you install [Docker Desktop](https://www.docker.com/products/docker-desktop)). - - (optional) If you are on Linux/CentOS/similar and have a GPU, -install the [NVidia container runtime][NVidia-container-runtime]. Be sure to run the [Engine Setup](https://github.com/nvidia/nvidia-container-runtime#docker-engine-setup). -2. Download the SIRF-SuperBuild ([current master](https://github.com/SyneRBI/SIRF-SuperBuild/archive/master.zip), or -[latest release](https://github.com/SyneRBI/SIRF-SuperBuild/releases)) or -``` -git clone https://github.com/SyneRBI/SIRF-SuperBuild.git -``` -and change directory to this folder, `SIRF-SuperBuild/docker`. - -3. Optionally pull the pre-built image with `docker pull synerbi/sirf:service` (or `docker pull synerbi/sirf:service-gpu`), otherwise -the next line will build it, resulting in a much smaller download but longer build time. -4. Run `./sirf-compose-server up -d sirf` (or `./sirf-compose-server-gpu up -d sirf`) - - You can use a `--build` flag in this command, or `./sirf-compose-server[-gpu] build` to re-build your image if you have an old version. -5. Open a browser at . -Note that starting the container may take a few seconds the first -time, but will be very quick afterwards. -(Run `docker logs -f sirf` to see the container's progress - -eventually there should be a message stating the notebook has started.) -6. Stop the container (preserving its status) with `docker stop sirf`. -7. Next time, just do `docker start sirf`. - -[docker-ce]: https://docs.docker.com/install/ -[docker-compose]: https://github.com/docker/compose/releases -[NVidia-container-runtime]: https://github.com/nvidia/nvidia-container-runtime#installation -[SIRF-Exercises]: https://github.com/SyneRBI/SIRF-Exercises - -### Important notes: -- The `Jupyter` password is `virtual`. -- The directory is mounted at `/devel` in the docker container -from `./devel` (in this folder) on the host. The container will copy -[SIRF-Exercises] into this folder if not present. This means that -files and notebooks in `./devel` will persist between sessions and -even docker-image upgrades. -- If on Windows, `localhost` probably won't work. -Find out the service IP address using: -``` -docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' sirf -``` -and use the resultant IP instead of `localhost` (e.g.: `172.18.0.2:9999`). - -## Introduction - -Docker is a low-overhead, container-based replacement for virtual machines (VMs). - -This works on Unix-type systems, MacOS and Windows 10, but best on a linux host system due to: - -1. Possibility to get CUDA support within the container -2. `X11` windows displayed natively without needing e.g. a `vnc` server or desktop in the container - -This is probably the easiest way to directly use `SIRF` due to short -installation instructions. - - -## Prerequisites - -- Docker - + The free [Community Edition (CE)][docker-ce] is sufficient - + If you are installing on Linux, you will also have to follow the steps to [enable managing docker as a non-root user](https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user). - + [`docker-compose`][docker-compose] - + If you are on Linux/CentOS/similar and have a GPU, install the [NVidia container runtime][NVidia-container-runtime]. -- The [`SIRF-SuperBuild` repository](https://github.com/SyneRBI/SIRF-SuperBuild) - + download and unzip or `git clone` this locally - -## Tags - -The docker images are hosted at [hub.docker.com][dockerhub-SIRF]. We upload 2 types of images (see below for more information): -- Command Line Interface (CLI)-only -- "Service" images that will serve `Jupyter` notebooks - -And additionally the Docker Tag can specify a given SuperBuild version. - -To pull directly, use: - -```sh -docker pull synerbi/sirf: -``` - -| CLI-only `` | Service (i.e. `Jupyter`) `` | [SuperBuild] branch/tag | -|:--- |:--- |:--- | -| `release` | `release-service` | `` | -| `` | `-service` | `` | -| `latest` | `service` and `service-gpu` | `master` | -| `devel` | `devel-service` | `master` with `cmake -DDEVEL_BUILD=ON` | - -Service images are intended to be run in the background, and expose: - -| Port(s) | Notes | -| --- | --- | -| 9999 | `Jupyter` (in folder `/devel`) | -| 8890-9 | `Jupyter` (in folder `/devel/SIRF-Exercises-<0-9>`) | -| 9002 | `Gadgetron` | - -[dockerhub-SIRF]: https://hub.docker.com/r/synerbi/sirf/ -[SuperBuild]: https://github.com/SyneRBI/SIRF-SuperBuild/ - -### Windows specific notes - -Note that Docker for Windows uses the -[Hyper-V backend][hyper-vbox]. Unfortunately, this conflicts with VirtualBox (last checked start 2021) so you would -have to en/disable Hyper-V and reboot. -You can use the older VirtualBox backend instead by using [Docker Machine]. - -You may want to consult [SIRF on Windows Subsystem for Linux][wiki-wsl] -regarding setting up Xserver/VNCserver and other UNIX-for-Windows-users tips. - -[wiki-wsl]: https://github.com/SyneRBI/SIRF/wiki/SIRF-SuperBuild-on-Bash-on-Ubuntu-on-Windows-10 -[hyper-vbox]: https://docs.docker.com/docker-for-windows/install/#what-to-know-before-you-install -[Docker Machine]: https://docs.docker.com/machine/overview/#whats-the-difference-between-docker-engine-and-docker-machine - -## Glossary - -A brief list of everything important to know for a basic working knowledge of docker. - -- *Base image*: the starting point for building a Docker image - + analogous to a clean OS (in this case `ubuntu:20.04`) -- *Layer*: a single build step - + usually represented by a single `RUN` or `COPY` line in a `Dockerfile` (e.g. `RUN apt-get install cmake`) -- *Image*: a sequence of *layers* (applied on top of a *base image*) - + analogous to a clean OS with `SIRF` installed (in this case *tagged* `synerbi/sirf`) -- *Container*: a sandboxed workspace derived from an *image* - + analogous to a running virtual machine (in this case named `sirf`) - + easily stoppable, restartable, disposable - + can be thought of as end-user-created *layers* which would never be formally part of a redistributable *image* - + each container has its own filesystem, but can share files, network connections, and devices with the host computer - -*Images* are *built* or *pulled*. *Containers* are *created* from *images*: - -- *Build*: typically refers to *pulling* a *base image*, then *building* all the *layers* necessary to form an *image* - + usually one-off -- *Pull*: typically refers to downloading an *image* from the internet (which someone else *built*) - + usually only required when there is no source code available to allow for *building* locally -- *Create*: typically refers to making a *container* from an *image* - + often recreated for a semi-clean slate - especially if data is shared with the host computer so that no data is lost on disposal - -[`docker-compose`][docker-compose] provides a way to create and launch images and containers, specifying port forwarding etc. -`docker-compose` is used to help with creating containers (and even building images). It should be added to your `PATH` or at least have the executable copied to `SIRF-SuperBuild/docker`. - -## Creating and using SIRF containers - -The docker images can be built from source or pulled using `SyneRBI/SIRF-SuperBuild`, and containers created, by following the steps below. - -**Warnings**: - -When building an image yourself, by default the current `master` branch of the `SIRF-SuperBuild` is used. It might be -safer to specify a tag. This can be done by first setting an environment variable. e.g. in `bash` and similar shells: -```bash -export SIRF_SB_TAG=v3.1.0 -``` - -These instructions will mount the `SIRF-SuperBuild/docker/devel` folder on the host as `/devel` in the docker container. -When using a `service*` image, the container will copy -[SIRF-Exercises] into this folder if not present. This means that -files and notebooks in `/devel` will be persistent between sessions and -even docker-image upgrades. You should therefore remove the contents of -`SIRF-SuperBuild/docker/devel` if you want to really start afresh. - -### Creating a container providing a Linux *CLI* with SIRF -The default "CLI" images provide an Ubuntu environment with the SuperBuild built (see [Tags](#tags)) as a convenient environment. - -#### Using a Linux or MacOS CLI -Build/pull the image: -```bash -# Either: -SIRF-SuperBuild/docker$ docker pull synerbi/sirf -# Or: -SIRF-SuperBuild/docker$ ./sirf-compose build core sirf -``` - -For easier file and window sharing, use the provided script, `sirf-compose`, which calls `docker-compose` but handles the host user's ID and some environment variables. - -We can now create a container. - -```bash -SIRF-SuperBuild/docker$ ./sirf-compose up --no-start sirf -``` - -Here, `--no-start` delays actually starting the container. -We can now use this interactively, by starting the containder with flags `-ai`. -```bash -SIRF-SuperBuild/docker$ docker start -ai sirf -(py2) sirf:~$ gadgetron >> /dev/null & # launch Gadgetron as a "background" process -(py2) sirf:~$ python SIRF-SuperBuild/SIRF/examples/Python/MR/fully_sampled_recon.py # run a SIRF demo -(py2) sirf:~$ exit -``` - -The first line starts the `sirf` docker container. -The second line starts `gadgetron` within the container as a background process (optional, but needed for using Gadgetron, i.e. most SIRF MR functionality). -We can then run an example (or you could start an interactive python session). -We then exit the container (which also stops it). - -#### Using a Windows CLI - -``` -Either: -SIRF-SuperBuild/docker> docker pull synerbi/sirf -Or: -SIRF-SuperBuild/docker> docker-compose build core sirf -``` - -Instead of passing user IDs, Windows requires that -[file sharing is enabled](https://docs.docker.com/docker-for-windows/#shared-drives). Then: - -``` -SIRF-SuperBuild/docker> docker-compose up --no-start sirf -``` - -Using the container works in the same way as above. - -### Creating a container providing a (Linux-based) Jupyter Server with SIRF -The "server" images build upon the CLI images and automatically start a Jupyter service when run. These are convenient if you use Notebooks in your experiments, or are learning and want to run the [SIRF Exercises](https://github.com/SyneRBI/SIRF-Exercises). - -```bash -# Linux without GPU or MacOS: -SIRF-SuperBuild/docker$ ./sirf-compose-server up -d sirf -# Linux with GPU -SIRF-SuperBuild/docker$ ./sirf-compose-server-gpu up -d sirf -# Windows: -SIRF-SuperBuild/docker> sirf-compose-server up -d sirf -``` -(You may with to use the `--build` flag before `-d sirf` on any of the above commands to re-build the image at any point) - -This starts the `sirf` docker container, including `gadgetron` and -`jupyter` within the container as background processes. - -Open your favourite web browser on your host OS, and go to -. -If the browser is giving you a connection error, -`docker logs -f sirf` will give you the current status of the server -(there should be an eventual message about Jupyter being started). - -To stop the server and container, run `docker stop sirf`. If you also -want to remove the container, you can use instead `./sirf-compose-server down`, -see below. - -Please note that you cannot start a second `gadgetron` in a `service` container, as you would experience port conflicts. - -If you need a shell for any reason for your `service` container, you can ask the container to run Bash and drop into the shell using: - -``` -docker exec -w /devel -ti sirf /bin/bash -``` - -### sirf-compose information -The `./sirf-compose*` scripts are simple wrappers around `docker-compose`. -(You could check the corresponding `.yml` files, or even edit them to change -names or add mounts etc.) - -- For a service (Jupyter) container: - + `./sirf-compose-server` - + `./sirf-compose-server-gpu` -- For a container hosting 10 Jupyter servers: - + `./sirf-compose-server-multi` -- For a basic interactive container: - + on Linux: `./sirf-compose` - + on Windows: `docker-compose` - -Run any of the above commands without arguments for help. - -For example, to host multiple Jupyter servers in one container, simply: -``` -./sirf-compose-server-multi up -d sirf # start 10 jupyter servers -./sirf-compose-server-multi ps # print out exposed ports -./sirf-compose-server-multi stop # stop and remove the container -``` - - - -### More information on usage - -You can use `docker exec` to execute commands in a container that is already running. -This could be useful for debugging problems with the `Jupyter server` container. For instance -```sh -# check what processes are running in the container -docker exec sirf ps aux -# start an interactive bash session -docker exec -ti sirf /bin/bash -``` -Note that `exec` logs in as `root`. - -You can check which containers are running (or not) via -```sh -docker ps -a -``` -and stop and even remove them -```sh -docker stop sirf -docker rm sirf -``` -Note that `sirf-compose down` both stops and removes. - -If you choose to remove the container, -next time you will start afresh (which might not be desirable of course). -Stopped containers do not use CPU time and only some additional disk-space. However, the images are quite large. -You can check which images you have with -```sh -docker image ls -``` -(Note that this reports the "total" size, not taking into account any overlap between different layers). - -If you decide you no longer need one, you can use -```sh -docker rmi -``` - -## Notes - -- Since SIRF 3.5, by default the build files are removed on the docker image. This can be changed by [setting an environment variable](./DocForDevelopers.md#Useful-environment-variables). -If you have built the image while keeping all build files (or re-build them in an existing container), tests of SIRF and other installed packages can be run as follows: -```bash -sudo -Hu jovyan bash --login -c /devel/test.sh -``` -- Currently all `compose` files call the container `sirf`. You could edit the `.yml` file if you -want to run different versions. -- "Cannot connect to display" errors are usually fixed by running `xhost +local:""` on the host linux system. -- Non-linux users (e.g. Windows) will need to set up a desktop and vnc server in order to have a GUI. -- On host systems with less than 16GB RAM, you might want to set the number of parallel builds used by cmake when creating -the image to `1` (it currently defaults to `2`) by [setting an environment variable](./DocForDevelopers.md#Useful-environment-variables) -before running `compose`. - -### Links - -- [SIRF docker source] -- [Synergistic Image Reconstruction Framework (SIRF) project][SIRF] - + [SIRF wiki] -- [Collaborative Computational Project in Synergistic Reconstruction for Biomedical Imaging (CCP SyneRBI)][CCP SyneRBI] - -[SIRF docker source]: https://github.com/SyneRBI/SIRF-SuperBuild/tree/master/docker -[SIRF SuperBuild on Docker wiki]: https://github.com/SyneRBI/SIRF/wiki/SIRF-SuperBuild-on-Docker -[SIRF]: https://github.com/SyneRBI/SIRF -[SIRF wiki]: https://github.com/SyneRBI/SIRF/wiki -[CCP SyneRBI]: https://www.ccpsynerbi.ac.uk/ - -### Common errors -#### Unknown runtime specified nvidia -Problem: When trying to run `/sirf-compose-server-gpu up -d sirf` I get: -``` -ERROR: for sirf Cannot create container for service sirf: Unknown runtime specified nvidia -``` -Solutions: -- If you are on Linux, did you install the [NVidia container runtime][NVidia-container-runtime] and run the [Engine Setup](https://github.com/nvidia/nvidia-container-runtime#docker-engine-setup)? -- If you are not on Linux, docker currently (June 2021) does not yet support GPU access. - - - -Problem: When trying to run `/sirf-compose-server-gpu up -d sirf` I get: -``` -ERROR: The Compose file './docker-compose.srv-gpu.yml' is invalid because: -Unsupported config option for services.gadgetron: 'runtime' -Unsupported config option for services.sirf: 'runtime' -``` -Solution: -The most likely issue is that you have an old version of `docker-compose` (you need 1.19.0 or newer). Update your docker compose as (you may need root permissions). -Note that if you have python 2 and python 3 installed you may need to use `pip` instead of `pip3`, as your docker-compose may be installed in a different python version. - -``` -pip3 uninstall docker-compose -pip3 install docker-compose -``` - -### Building problems related to gpg -When building a new docker image, you could see errors such as -``` -W: http://archive.ubuntu.com/ubuntu/dists/jammy/InRelease: The key(s) in the keyring /etc/apt/trusted.gpg.d/ubuntu-keyring-2012-cdimage.gpg are ignored as the file is not readable by user '_apt' executing apt-key. -W: http://archive.ubuntu.com/ubuntu/dists/jammy/InRelease: The key(s) in the keyring /etc/apt/trusted.gpg.d/ubuntu-keyring-2018-archive.gpg are ignored as the file is not readable by user '_apt' executing apt-key. -W: GPG error: http://archive.ubuntu.com/ubuntu jammy InRelease: The following signatures couldn't be verified because the public key is not available: NO_PUBKEY 871920D1991BC93C -E: The repository 'http://archive.ubuntu.com/ubuntu jammy InRelease' is not signed. -``` -or -``` -E: Problem executing scripts DPkg::Post-Invoke 'rm -f /var/cache/apt/archives/*.deb /var/cache/apt/archives/partial/*.deb /var/cache/apt/*.bin || true' -E: Sub-process returned an error code -``` -This likely means that your docker version is too old. Try upgrading it. - - -#### MacOS: Cannot connect to the Docker daemon -``` -% ./sirf-compose-server up -d sirf -Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running? -``` -To the best of our knowledge, this is a confusing error message by Docker. The Mac version does not seem run a daemon. Instead, you have to run Docker Desktop before typing any `docker` commands in the terminal (as suggested on https://stackoverflow.com/a/44719239/15030207) diff --git a/docker/build_docker_stacks.sh b/docker/build_docker_stacks.sh index c1fd9f94..5dcd3829 100755 --- a/docker/build_docker_stacks.sh +++ b/docker/build_docker_stacks.sh @@ -1,4 +1,6 @@ #!/usr/bin/env bash +# Release images: synerbi/sirf:jupyter, synerbi/sirf:jupyter-gpu +# Also creates intermediate (temp) images: synerbi/jupyter set -exuo pipefail pushd "$(dirname "${BASH_SOURCE[0]}")" diff --git a/docker/docker-compose.devel.yml b/docker/docker-compose.devel.yml index b0d4c5f2..f8fef85e 100644 --- a/docker/docker-compose.devel.yml +++ b/docker/docker-compose.devel.yml @@ -1,10 +1,23 @@ -version: '2.3' +version: '3.2' services: + sirf-build: + image: synerbi/jupyter:sirf-build-devel-cpu + build: + args: + EXTRA_BUILD_FLAGS: > + -DGadgetron_TAG=6202fb7352a14fb82817b57a97d928c988eb0f4b + -DISMRMRD_TAG=v1.13.7 + -Dsiemens_to_ismrmrd_TAG=v1.2.11 + -DDEVEL_BUILD=ON sirf: - image: synerbi/sirf:devel + container_name: sirf # for scaling, comment this out https://github.com/docker/compose/issues/3729 + image: synerbi/sirf:jupyter-devel build: + context: . args: - EXTRA_BUILD_FLAGS: "-DDEVEL_BUILD=ON -DBUILD_CIL=OFF" - cache_from: - - synerbi/sirf:core - - synerbi/sirf:devel + EXTRA_BUILD_FLAGS: > + -DGadgetron_TAG=6202fb7352a14fb82817b57a97d928c988eb0f4b + -DISMRMRD_TAG=v1.13.7 + -Dsiemens_to_ismrmrd_TAG=v1.2.11 + -DDEVEL_BUILD=ON + cache_from: [synerbi/jupyter:sirf-build-devel-cpu] diff --git a/docker/docker-compose.gpu.yml b/docker/docker-compose.gpu.yml new file mode 100644 index 00000000..f003a67a --- /dev/null +++ b/docker/docker-compose.gpu.yml @@ -0,0 +1,36 @@ + +version: '3.2' +services: + foundation: + image: synerbi/jupyter:foundation-gpu + build: + args: + ROOT_CONTAINER: nvidia/cuda:11.7.1-cudnn8-runtime-ubuntu22.04 + PYTHON_VERSION: 3.9 + base: + image: synerbi/jupyter:base-gpu + build: + args: {BASE_CONTAINER: synerbi/jupyter:foundation-gpu} + minimal: + image: synerbi/jupyter:minimal-gpu + build: + args: {BASE_CONTAINER: synerbi/jupyter:base-gpu} + scipy: + image: synerbi/jupyter:scipy-gpu + build: + args: {BASE_CONTAINER: synerbi/jupyter:minimal-gpu} + sirf-build: + image: synerbi/jupyter:sirf-build-gpu + build: + target: build + args: + BASE_CONTAINER: synerbi/jupyter:scipy-gpu + Gadgetron_USE_CUDA: "ON" + sirf: + deploy: {resources: {reservations: {devices: [{driver: nvidia, count: all, capabilities: [gpu]}]}}} + image: synerbi/sirf:jupyter-gpu + build: + args: + BASE_CONTAINER: synerbi/jupyter:scipy-gpu + Gadgetron_USE_CUDA: "ON" + cache_from: [synerbi/jupyter:sirf-build-gpu] diff --git a/docker/docker-compose.nix.yml b/docker/docker-compose.nix.yml index 26c2bb3a..0d479e34 100644 --- a/docker/docker-compose.nix.yml +++ b/docker/docker-compose.nix.yml @@ -1,8 +1,7 @@ -version: '2.3' +version: '3.2' services: sirf: - environment: - - DISPLAY + environment: [DISPLAY] volumes: - - /tmp/.X11-unix:/tmp/.X11-unix - - /tmp/.docker.xauth:/tmp/.docker.xauth + - /tmp/.X11-unix:/tmp/.X11-unix + - /tmp/.docker.xauth:/tmp/.docker.xauth diff --git a/docker/docker-compose.srv-gpu.yml b/docker/docker-compose.srv-gpu.yml deleted file mode 100644 index dbb4cb9d..00000000 --- a/docker/docker-compose.srv-gpu.yml +++ /dev/null @@ -1,25 +0,0 @@ -version: '2.3' -services: - sirf: - runtime: nvidia - image: synerbi/sirf:service-gpu - build: - target: service - args: - BASE_IMAGE: nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 - EXTRA_BUILD_FLAGS: "-DBUILD_CIL=OFF" - cache_from: - - synerbi/sirf:core-gpu - - synerbi/sirf:service-gpu - core: - image: synerbi/sirf:core-gpu - build: - args: - BASE_IMAGE: nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 - cache_from: - - ubuntu:22.04 - - nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04 - - synerbi/sirf:core-gpu - gadgetron: - image: gadgetron/ubuntu_2004_cuda90_cudnn7:latest - runtime: nvidia diff --git a/docker/docker-compose.srv-multi.yml b/docker/docker-compose.srv-multi.yml deleted file mode 100644 index b66bb1ed..00000000 --- a/docker/docker-compose.srv-multi.yml +++ /dev/null @@ -1,13 +0,0 @@ -version: '2.3' -services: - sirf: - image: synerbi/sirf:service-multi - build: - target: multi - cache_from: - - synerbi/sirf:core - - synerbi/sirf:latest - - synerbi/sirf:service - - synerbi/sirf:service-multi - ports: - - "8890-8899:8890-8899" diff --git a/docker/docker-compose.srv.yml b/docker/docker-compose.srv.yml deleted file mode 100644 index 06700ceb..00000000 --- a/docker/docker-compose.srv.yml +++ /dev/null @@ -1,20 +0,0 @@ -version: '2.3' -services: - sirf: - image: synerbi/sirf:service - restart: unless-stopped - environment: - GADGETRON_RELAY_HOST: 0.0.0.0 - build: - target: service - cache_from: - - synerbi/sirf:core - - synerbi/sirf:latest - - synerbi/sirf:service - ports: - - "9002:9002" # gadgetron - - "9999:8888" # jupyter - # for scaling, use this instead for random port assignment - # (https://github.com/docker/compose/issues/2260): - #- "9000-9099:9002" - #- "8800-8899:8888" diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml deleted file mode 100644 index ba1f3d15..00000000 --- a/docker/docker-compose.yml +++ /dev/null @@ -1,58 +0,0 @@ -version: '2.3' -services: - sirf: - container_name: sirf # for scaling, comment this out https://github.com/docker/compose/issues/3729 - image: synerbi/sirf:latest - environment: - # mainUser: ${USER:-sirfuser} - mainUser: sirfuser - GROUP_ID: ${GROUP_ID:-1000} - USER_ID: ${USER_ID:-1000} - build: - context: . - target: sirf - args: - SIRF_SB_URL: ${SIRF_SB_URL:-https://github.com/SyneRBI/SIRF-SuperBuild} - SIRF_SB_TAG: ${SIRF_SB_TAG:-master} - NUM_PARALLEL_BUILDS: ${NUM_PARALLEL_BUILDS:-2} - REMOVE_BUILD_FILES: ${REMOVE_BUILD_FILES:-1} - RUN_CTEST: ${RUN_CTEST:-1} - cache_from: - - synerbi/sirf:core - - synerbi/sirf:latest - stdin_open: true - tty: true - # cap_add: - # - NET_ADMIN - #TODO: uncomment for external gadgetron - # network_mode: "host" # host only works on linux: https://docs.docker.com/network/host/ - # depends_on: - # - gadgetron - volumes: - - ./devel:/devel - core: - image: synerbi/sirf:core - build: - context: . - target: core - cache_from: - - ubuntu:22.04 - - synerbi/sirf:core - args: - EXTRA_BUILD_FLAGS: "-DBUILD_CIL=OFF" - BASE_IMAGE: "ubuntu:22.04" - gadgetron: - container_name: gadgetron - image: gadgetron/ubuntu_2004_no_cuda:latest - restart: unless-stopped - environment: - GADGETRON_RELAY_HOST: 0.0.0.0 - ports: - - "9002:9002" # gadgetron - - "9080:9080" # ReST API - - "8002:8002" # CloudBus relay - - "9888:8888" # ? https://github.com/gadgetron/gadgetron/wiki/How-to-set-up-Gadgetron-cloud-in-the-LAN-environment - - "18002:18002" # ReST API - - "9001:9001" # supervisord web interface - volumes: - - ./devel/gadgetron_data:/tmp/gadgetron_data diff --git a/docker/sirf-compose b/docker/sirf-compose deleted file mode 100755 index ef52c88f..00000000 --- a/docker/sirf-compose +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash -cd "$(dirname $0)" -export GROUP_ID=$(id -g) -export USER_ID=$(id -u) -docker-compose -f docker-compose.yml -f docker-compose.nix.yml "$@" diff --git a/docker/sirf-compose-devel b/docker/sirf-compose-devel deleted file mode 100755 index 5621c998..00000000 --- a/docker/sirf-compose-devel +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env bash -cd "$(dirname $0)" -./sirf-compose -f docker-compose.devel.yml "$@" diff --git a/docker/sirf-compose-devel-server b/docker/sirf-compose-devel-server deleted file mode 100755 index 25ac9fe1..00000000 --- a/docker/sirf-compose-devel-server +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env bash -cd "$(dirname $0)" -./sirf-compose-devel -f docker-compose.srv.yml "$@" diff --git a/docker/sirf-compose-devel-server-gpu b/docker/sirf-compose-devel-server-gpu deleted file mode 100755 index 3c077bd2..00000000 --- a/docker/sirf-compose-devel-server-gpu +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env bash -cd "$(dirname $0)" -./sirf-compose-devel-server -f docker-compose.srv-gpu.yml "$@" diff --git a/docker/sirf-compose-devel-server-multi b/docker/sirf-compose-devel-server-multi deleted file mode 100755 index 55a48da0..00000000 --- a/docker/sirf-compose-devel-server-multi +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env bash -cd "$(dirname $0)" -./sirf-compose-devel-server -f docker-compose.srv-multi.yml "$@" diff --git a/docker/sirf-compose-server b/docker/sirf-compose-server deleted file mode 100755 index fa13f5a3..00000000 --- a/docker/sirf-compose-server +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env bash -cd "$(dirname $0)" -./sirf-compose -f docker-compose.srv.yml "$@" diff --git a/docker/sirf-compose-server-gpu b/docker/sirf-compose-server-gpu deleted file mode 100755 index 3f6ffc40..00000000 --- a/docker/sirf-compose-server-gpu +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env bash -cd "$(dirname $0)" -./sirf-compose-server -f docker-compose.srv-gpu.yml "$@" diff --git a/docker/sirf-compose-server-multi b/docker/sirf-compose-server-multi deleted file mode 100755 index 097991e1..00000000 --- a/docker/sirf-compose-server-multi +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env bash -cd "$(dirname $0)" -./sirf-compose-server -f docker-compose.srv-multi.yml "$@" diff --git a/docker/sirf-compose-server-multi.bat b/docker/sirf-compose-server-multi.bat deleted file mode 100644 index 71d4a2b3..00000000 --- a/docker/sirf-compose-server-multi.bat +++ /dev/null @@ -1,2 +0,0 @@ -@echo off -docker-compose -f docker-compose.yml -f docker-compose.srv.yml -f docker-compose.srv-multi.yml %* diff --git a/docker/sirf-compose-server.bat b/docker/sirf-compose-server.bat deleted file mode 100755 index ac91b901..00000000 --- a/docker/sirf-compose-server.bat +++ /dev/null @@ -1,2 +0,0 @@ -@echo off -docker-compose -f docker-compose.yml -f docker-compose.srv.yml %* From 94b162b65bf333d38baad70d613082f2687d002d Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Thu, 4 Jan 2024 21:25:33 +0000 Subject: [PATCH 45/98] remove entrypoint, fix cmd --- Dockerfile | 7 +-- docker/entrypoint.sh | 47 -------------------- docker/service.multi.sh | 33 -------------- docker/service.sh | 69 ------------------------------ docker/start-gadgetron-notebook.sh | 58 +++++++++++++++++++++++++ 5 files changed, 62 insertions(+), 152 deletions(-) delete mode 100755 docker/entrypoint.sh delete mode 100755 docker/service.multi.sh delete mode 100755 docker/service.sh create mode 100755 docker/start-gadgetron-notebook.sh diff --git a/Dockerfile b/Dockerfile index ce012eba..f73e619c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -98,8 +98,8 @@ COPY --chown=${NB_USER} --chmod=644 --link docker/.bashrc /home/${NB_USER}/ # install {SIRF-Exercises,CIL-Demos} COPY docker/user_service-ubuntu.sh /opt/scripts/ -RUN INSTALL_DIR=. bash /opt/scripts/user_service-ubuntu.sh \ - && fix-permissions SIRF-Exercises CIL-Demos "${CONDA_DIR}" /home/${NB_USER} +RUN bash /opt/scripts/user_service-ubuntu.sh \ + && fix-permissions /opt/SIRF-Exercises /opt/CIL-Demos "${CONDA_DIR}" /home/${NB_USER} # install from build COPY --from=build --link --chown=${NB_USER} /opt/SIRF-SuperBuild/INSTALL/ /opt/SIRF-SuperBuild/INSTALL/ @@ -126,4 +126,5 @@ ENV DEBIAN_FRONTEND '' ENV DOCKER_STACKS_JUPYTER_CMD="notebook" ENV RESTARTABLE="yes" -# TODO: CMD ["jupyterhub/service.sh"] +COPY --link --chown=${NB_USER} docker/start-gadgetron-notebook.sh /opt/scripts/ +CMD ["/opt/scripts/start-gadgetron-notebook.sh"] diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh deleted file mode 100755 index 34e0f730..00000000 --- a/docker/entrypoint.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/bin/bash - -# This script is expected to be run as root -# at container start-up time - -# Add local user -# Either use runtime USER_ID:GROUP_ID or fallback 1000:1000 - -USER_ID=${USER_ID:-1000} -GROUP_ID=${GROUP_ID:-1000} -mainUser=${mainUser:-sirfuser} -OLD_HOME=/home-away -export HOME=/home/$mainUser - -if [ -d $HOME ]; then - cd $HOME - exec gosu $mainUser "$@" -fi - -# We need to copy files from /home-away to new home and create the user - -# copy files -[ -d $OLD_HOME ] && cp -r $OLD_HOME $HOME -cd $HOME - -echo "Creating $mainUser:$USER_ID:$GROUP_ID" -# groupadd -g $GROUP_ID -o -f $mainUser -addgroup --quiet --system --gid "$GROUP_ID" "$mainUser" -# useradd --shell /bin/bash -u $USER_ID -o -c "" -M -d $HOME \ -# -g $mainUser -G sudo $mainUser \ -# -p $(echo somepassword | openssl passwd -1 -stdin) -adduser --quiet --system --shell /bin/bash \ - --no-create-home --home /home/"$mainUser" \ - --gid "$GROUP_ID" --uid "$USER_ID" "$mainUser" -addgroup "$mainUser" users - -echo "$mainUser ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers.d/"$mainUser" - -for i in "$HOME"; do - if [ -d "$i" ]; then - echo "Updating file ownership for $i" - chown -R "$USER_ID":"$GROUP_ID" "$i" - fi -done - -echo "Switching to $mainUser and executing $@" -exec gosu $mainUser "$@" diff --git a/docker/service.multi.sh b/docker/service.multi.sh deleted file mode 100755 index 5206b6d0..00000000 --- a/docker/service.multi.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env bash - -## Usage: -# service.multi.sh [ []] -# -# Arguments: -# : [default: 10] -# : [default: 8890] -## - -[ -f .bashrc ] && . .bashrc -this=$(dirname "${BASH_SOURCE[0]}") -NUM_INST="${1:-10}" -MIN_PORT="${2:-8890}" -INSTALL_DIR=/devel - -stop_service() -{ - echo "stopping jobs" - for i in $(jobs -p); do kill -n 15 $i; done 2>/dev/null - exit 0 -} - -for JUPYTER_PORT in `seq $MIN_PORT $[MIN_PORT + NUM_INST - 1]`; do - [ -d $INSTALL_DIR/SIRF-Exercises-$JUPYTER_PORT ] \ - || cp -r $INSTALL_DIR/SIRF-Exercises $INSTALL_DIR/SIRF-Exercises-$JUPYTER_PORT - jupyter lab --ip 0.0.0.0 --port $JUPYTER_PORT \ - --no-browser --notebook-dir $INSTALL_DIR/SIRF-Exercises-$JUPYTER_PORT & -done - -trap "stop_service" SIGTERM -trap "stop_service" SIGINT -wait diff --git a/docker/service.sh b/docker/service.sh deleted file mode 100755 index 4b59a6a9..00000000 --- a/docker/service.sh +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/env bash - -## Usage: -# service.sh [ []] -# -# Arguments: -# : [default: 0] -# : [default: 8888] -## - -[ -f .bashrc ] && . .bashrc -this=$(dirname "${BASH_SOURCE[0]}") -DEBUG="${1:-0}" -JUPYTER_PORT="${2:-8888}" - -stop_service() -{ - echo "stopping jobs" - for i in $(jobs -p); do kill -n 15 $i; done 2>/dev/null - - if [ "$DEBUG" != 0 ]; then - if [ -f ~/gadgetron.log ]; then - echo "----------- Last 70 lines of ~/gadgetron.log" - tail -n 70 ~/gadgetron.log - fi - fi - - exit 0 -} - -pushd $SIRF_PATH/../.. - -echo "start gadgetron" -[ -f ./INSTALL/bin/gadgetron ] \ - && ./INSTALL/bin/gadgetron >& ~/gadgetron.log& - -echo "make sure the SIRF-Exercises and CIL-Demos are in the expected location (/devel in the container)" -cd /devel -for notebooks in SIRF-Exercises CIL-Demos -do - [ -d ${notebooks} ] || cp -a $SIRF_PATH/../../../${notebooks} . -done - -# link SIRF-Contrib into it -if [ ! -r SIRF-contrib ]; then - echo "Creating link to SIRF-contrib" - ln -s "$SIRF_INSTALL_PATH"/python/sirf/contrib SIRF-contrib -fi - -echo "start jupyter" -if [ ! -f ~/.jupyter/jupyter_server_config.py ]; then - mkdir -p ~/.jupyter/ - echo "c.ServerApp.password = u'sha1:cbf03843d2bb:8729d2fbec60cacf6485758752789cd9989e756c'" \ - > ~/.jupyter/jupyter_server_config.py -fi - -# serve a master notebook -jupyter notebook --ip 0.0.0.0 --port $JUPYTER_PORT --no-browser \ - --notebook-dir /devel/ & - -# serve 10 notebooks -[ -f $this/service.multi.sh ] \ - && $this/service.multi.sh 10 $[JUPYTER_PORT + 2] & - -popd - -trap "stop_service" SIGTERM -trap "stop_service" SIGINT -wait diff --git a/docker/start-gadgetron-notebook.sh b/docker/start-gadgetron-notebook.sh new file mode 100755 index 00000000..7515f89e --- /dev/null +++ b/docker/start-gadgetron-notebook.sh @@ -0,0 +1,58 @@ +#!/usr/bin/env bash +_help(){ +cat </dev/null + + if test "$DEBUG" != 0; then + if test -f ~/gadgetron.log; then + echo "----------- Last 70 lines of ~/gadgetron.log" + tail -n 70 ~/gadgetron.log + fi + fi + + exit 0 +} + +pushd $SIRF_PATH/../.. + +echo "start gadgetron" +[ -f ./INSTALL/bin/gadgetron ] \ + && ./INSTALL/bin/gadgetron >& ~/gadgetron.log& + +echo "make sure the SIRF-Exercises and CIL-Demos are in the expected location (~/work in the container)" +cd ~/work +for notebooks in SIRF-Exercises CIL-Demos; do + test -d ${notebooks} || cp -a $SIRF_PATH/../../../${notebooks} . +done + +echo "link SIRF-Contrib into ~/work" +if test ! -r SIRF-contrib; then + echo "Creating link to SIRF-contrib" + ln -s "$SIRF_INSTALL_PATH"/python/sirf/contrib SIRF-contrib +fi + +echo "start jupyter" +test -w /etc/jupyter/jupyter_server_config.py \ + && echo "c.ServerApp.password = u'sha1:cbf03843d2bb:8729d2fbec60cacf6485758752789cd9989e756c'" \ + >> /etc/jupyter/jupyter_server_config.py +start-notebook.py "$@" + +popd + +trap "stop_service" EXIT INT TERM +wait From 5a11768347679fbcbb159c85bb116aceb7e3c0e3 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Thu, 4 Jan 2024 22:38:36 +0000 Subject: [PATCH 46/98] ninja build, fix CIL gpu/cpu/implicit deps --- Dockerfile | 9 +++++++-- docker-compose.yml | 2 ++ docker/build_docker_stacks.sh | 2 ++ docker/build_essential-ubuntu.sh | 33 ++++++++++++++------------------ docker/docker-compose.gpu.yml | 2 ++ docker/requirements.yml | 24 +++++++---------------- 6 files changed, 34 insertions(+), 38 deletions(-) diff --git a/Dockerfile b/Dockerfile index f73e619c..9f9f89f5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -28,9 +28,14 @@ COPY docker/build_system-ubuntu.sh /opt/scripts/ RUN bash /opt/scripts/build_system-ubuntu.sh # SIRF python deps +ARG BUILD_GPU=0 COPY docker/requirements.yml /opt/scripts/docker-requirements.yaml # https://jupyter-docker-stacks.readthedocs.io/en/latest/using/common.html#conda-environments -RUN mamba env update -n base -f /opt/scripts/docker-requirements.yaml \ +RUN if test "$BUILD_GPU" != 0; then \ + echo " - tigre" >> /opt/scripts/docker-requirements.yaml; \ + echo " - astra-toolbox" >> /opt/scripts/docker-requirements.yaml; \ + fi \ + && mamba env update -n base -f /opt/scripts/docker-requirements.yaml \ && mamba clean --all -f -y && fix-permissions "${CONDA_DIR}" /home/${NB_USER} # ccache @@ -63,7 +68,7 @@ ARG EXTRA_BUILD_FLAGS="" # build, install in /opt/SIRF-SuperBuild/{INSTALL,sources/SIRF}, test (if RUN_CTEST) COPY docker/user_sirf-ubuntu.sh /opt/scripts/ -RUN BUILD_FLAGS="\ +RUN BUILD_FLAGS="-G Ninja\ -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}\ -DSTIR_ENABLE_OPENMP=${STIR_ENABLE_OPENMP}\ -DUSE_SYSTEM_ACE=${USE_SYSTEM_ACE}\ diff --git a/docker-compose.yml b/docker-compose.yml index a3489aa1..5116ab77 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -30,6 +30,7 @@ services: args: BASE_CONTAINER: synerbi/jupyter:scipy-cpu Gadgetron_USE_CUDA: "OFF" + BUILD_GPU: 0 EXTRA_BUILD_FLAGS: > -DGadgetron_TAG=6202fb7352a14fb82817b57a97d928c988eb0f4b -DISMRMRD_TAG=v1.13.7 @@ -42,6 +43,7 @@ services: args: BASE_CONTAINER: synerbi/jupyter:scipy-cpu Gadgetron_USE_CUDA: "OFF" + BUILD_GPU: 0 EXTRA_BUILD_FLAGS: > -DGadgetron_TAG=6202fb7352a14fb82817b57a97d928c988eb0f4b -DISMRMRD_TAG=v1.13.7 diff --git a/docker/build_docker_stacks.sh b/docker/build_docker_stacks.sh index 5dcd3829..67f72cf1 100755 --- a/docker/build_docker_stacks.sh +++ b/docker/build_docker_stacks.sh @@ -29,10 +29,12 @@ SIRF_BUILD_ARGS=( SIRF_CPU_BUILD_ARGS=( --build-arg BASE_CONTAINER=synerbi/jupyter:scipy-cpu --build-arg Gadgetron_USE_CUDA=OFF + --build-arg BUILD_GPU=0 ) SIRF_GPU_BUILD_ARGS=( --build-arg BASE_CONTAINER=synerbi/jupyter:scipy-gpu --build-arg Gadgetron_USE_CUDA=ON + --build-arg BUILD_GPU=1 ) # build docker "${SIRF_BUILD_ARGS[@]}" "${SIRF_CPU_BUILD_ARGS[@]}" --target build -t synerbi/jupyter:sirf-build-cpu diff --git a/docker/build_essential-ubuntu.sh b/docker/build_essential-ubuntu.sh index 521d2ea1..dbde1646 100755 --- a/docker/build_essential-ubuntu.sh +++ b/docker/build_essential-ubuntu.sh @@ -5,31 +5,26 @@ INSTALL_DIR="${1:-/opt}" apt-get update -qq apt-get install -yq curl apt-get install -yq --no-install-recommends \ - bash-completion \ - build-essential \ - git \ - g++ \ - gcc \ - man \ - make \ - ccache \ - sudo \ + bash-completion \ + build-essential \ + git \ + g++ \ + gcc \ + man \ + cmake \ + ninja-build \ + ccache \ + sudo \ unzip apt-get clean pushd $INSTALL_DIR # CMake -if test -n "$(command -v mamba)" -a -n "$(command -v fix-permissions)"; then - mamba install -y cmake - mamba clean --all -f -y - fix-permissions "${CONDA_DIR}" /home/${NB_USER} -else - curl -o cmake.tgz -L https://github.com/Kitware/CMake/releases/download/v3.25.1/cmake-3.25.1-linux-x86_64.tar.gz - tar xzf cmake.tgz && rm cmake.tgz - ln -s cmake-*x86_64 cmake || true - export PATH="$PWD/cmake/bin:$PATH" -fi +#curl -o cmake.tgz -L https://github.com/Kitware/CMake/releases/download/v3.25.1/cmake-3.25.1-linux-x86_64.tar.gz +#tar xzf cmake.tgz && rm cmake.tgz +#ln -s cmake-*x86_64 cmake || true +#export PATH="$PWD/cmake/bin:$PATH" # ccache mkdir -p bin diff --git a/docker/docker-compose.gpu.yml b/docker/docker-compose.gpu.yml index f003a67a..e7445fd4 100644 --- a/docker/docker-compose.gpu.yml +++ b/docker/docker-compose.gpu.yml @@ -25,6 +25,7 @@ services: target: build args: BASE_CONTAINER: synerbi/jupyter:scipy-gpu + BUILD_GPU: 1 Gadgetron_USE_CUDA: "ON" sirf: deploy: {resources: {reservations: {devices: [{driver: nvidia, count: all, capabilities: [gpu]}]}}} @@ -32,5 +33,6 @@ services: build: args: BASE_CONTAINER: synerbi/jupyter:scipy-gpu + BUILD_GPU: 1 Gadgetron_USE_CUDA: "ON" cache_from: [synerbi/jupyter:sirf-build-gpu] diff --git a/docker/requirements.yml b/docker/requirements.yml index 1733fd7d..7ff25401 100644 --- a/docker/requirements.yml +++ b/docker/requirements.yml @@ -1,37 +1,27 @@ name: base channels: - conda-forge - - intel - - ccpi + - intel # cil + - ccpi # cil - defaults dependencies: - - mamba - setuptools - wheel - pytest - pytest-cov - - numpy # CIL - scipy - docopt - matplotlib - - Cython # CIL - - h5py # CIL - - Pillow # CIL - - wget # CIL - - six # CIL - - olefile # CIL - - opencv # CIL - - numba # CIL - pandas - tifffile - nibabel - deprecation - nose - - ipp # CIL - - ipp-devel # CIL - - ipp-include # CIL - - tigre=2.4 # CIL - pip + - cil + - ccpi-regulariser # cil + # - tigre # cil (GPU) + # - astra-toolbox # cil (GPU) - pip: - - git+https://github.com/data-exchange/dxchange.git # CIL + - git+https://github.com/data-exchange/dxchange.git # cil - git+https://github.com/ismrmrd/ismrmrd-python-tools.git@master#egg=ismrmrd-python-tools From 05b277c921a3bf755bb51577e88ca0317b1235d6 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Fri, 5 Jan 2024 08:28:01 +0000 Subject: [PATCH 47/98] common build & runtime deps --- Dockerfile | 30 +++++++++++++----------------- 1 file changed, 13 insertions(+), 17 deletions(-) diff --git a/Dockerfile b/Dockerfile index 9f9f89f5..521b6966 100644 --- a/Dockerfile +++ b/Dockerfile @@ -12,14 +12,6 @@ RUN bash /opt/scripts/raw-ubuntu.sh ENV LANG en_GB.UTF-8 ENV LANGUAGE en_GB:en -FROM base as build - -COPY docker/update_nvidia_keys.sh /opt/scripts/ -RUN bash /opt/scripts/update_nvidia_keys.sh - -COPY docker/build_essential-ubuntu.sh /opt/scripts/ -RUN bash /opt/scripts/build_essential-ubuntu.sh - COPY docker/build_gadgetron-ubuntu.sh /opt/scripts/ RUN bash /opt/scripts/build_gadgetron-ubuntu.sh @@ -38,6 +30,14 @@ RUN if test "$BUILD_GPU" != 0; then \ && mamba env update -n base -f /opt/scripts/docker-requirements.yaml \ && mamba clean --all -f -y && fix-permissions "${CONDA_DIR}" /home/${NB_USER} +FROM base as build + +COPY docker/update_nvidia_keys.sh /opt/scripts/ +RUN bash /opt/scripts/update_nvidia_keys.sh + +COPY docker/build_essential-ubuntu.sh /opt/scripts/ +RUN bash /opt/scripts/build_essential-ubuntu.sh + # ccache COPY --link docker/devel/.ccache/ /opt/ccache/ RUN ccache -o cache_dir=/opt/ccache @@ -101,19 +101,15 @@ RUN echo "export OMP_NUM_THREADS=\$(python -c 'import multiprocessing as mc; pri COPY --chown=${NB_USER} --chmod=644 --link docker/.bashrc /home/${NB_USER}/ # RUN sed -i s:PYTHON_INSTALL_DIR:${CONDA_DIR}:g /home/${NB_USER}/.bashrc -# install {SIRF-Exercises,CIL-Demos} -COPY docker/user_service-ubuntu.sh /opt/scripts/ -RUN bash /opt/scripts/user_service-ubuntu.sh \ - && fix-permissions /opt/SIRF-Exercises /opt/CIL-Demos "${CONDA_DIR}" /home/${NB_USER} - # install from build COPY --from=build --link --chown=${NB_USER} /opt/SIRF-SuperBuild/INSTALL/ /opt/SIRF-SuperBuild/INSTALL/ #COPY --from=build --link --chown=${NB_USER} /opt/SIRF-SuperBuild/sources/SIRF/ /opt/SIRF-SuperBuild/sources/SIRF/ #COPY --from=build --link /opt/conda/ /opt/conda/ -# SIRF python deps -COPY docker/requirements.yml /opt/scripts/docker-requirements.yaml -RUN mamba env update -n base -f /opt/scripts/docker-requirements.yaml \ - && mamba clean --all -f -y && fix-permissions "${CONDA_DIR}" /home/${NB_USER} + +# install {SIRF-Exercises,CIL-Demos} +COPY docker/user_service-ubuntu.sh /opt/scripts/ +RUN bash /opt/scripts/user_service-ubuntu.sh \ + && fix-permissions /opt/SIRF-Exercises /opt/CIL-Demos "${CONDA_DIR}" /home/${NB_USER} # Set environment variables for SIRF ENV PATH "/opt/conda/bin:/opt/SIRF-SuperBuild/INSTALL/bin:$PATH" From ffc3ba681ccbaa06e0c5f71a514d12ca6a7662b5 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Fri, 5 Jan 2024 08:30:43 +0000 Subject: [PATCH 48/98] minor docker improvements --- Dockerfile | 6 +++--- docker-compose.yml | 10 +++++++++- docker/.dockerignore | 1 + docker/{build_docker_stacks.sh => make.sh} | 2 +- 4 files changed, 14 insertions(+), 5 deletions(-) rename docker/{build_docker_stacks.sh => make.sh} (96%) mode change 100755 => 100644 diff --git a/Dockerfile b/Dockerfile index 521b6966..d1a839c1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -92,9 +92,9 @@ FROM base as sirf # X11 forwarding RUN apt update -qq && apt install -yq --no-install-recommends \ libx11-xcb1 \ - && apt clean -RUN mkdir -p /usr/share/X11/xkb -RUN [ -e /usr/bin/X ] || ln -s /usr/bin/Xorg /usr/bin/X + && apt clean \ + && mkdir -p /usr/share/X11/xkb \ + && test -e /usr/bin/X || ln -s /usr/bin/Xorg /usr/bin/X RUN echo "export OMP_NUM_THREADS=\$(python -c 'import multiprocessing as mc; print(mc.cpu_count() // 2)')" > /usr/local/bin/before-notebook.d/omp_num_threads.sh diff --git a/docker-compose.yml b/docker-compose.yml index 5116ab77..1d50c649 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,7 @@ version: '3.2' services: foundation: + command: echo nop image: synerbi/jupyter:foundation-cpu build: context: docker/docker-stacks/images/docker-stacks-foundation @@ -8,21 +9,25 @@ services: ROOT_CONTAINER: ubuntu:22.04 PYTHON_VERSION: 3.9 base: + command: echo nop image: synerbi/jupyter:base-cpu build: context: docker/docker-stacks/images/base-notebook args: {BASE_CONTAINER: synerbi/jupyter:foundation-cpu} minimal: + command: echo nop image: synerbi/jupyter:minimal-cpu build: context: docker/docker-stacks/images/minimal-notebook args: {BASE_CONTAINER: synerbi/jupyter:base-cpu} scipy: + command: echo nop image: synerbi/jupyter:scipy-cpu build: context: docker/docker-stacks/images/scipy-notebook args: {BASE_CONTAINER: synerbi/jupyter:minimal-cpu} sirf-build: + command: echo nop image: synerbi/jupyter:sirf-build-cpu build: context: . @@ -55,7 +60,10 @@ services: group_add: [users] volumes: [./devel:/home/jovyan/work] restart: unless-stopped - environment: {GADGETRON_RELAY_HOST: 0.0.0.0} + environment: + GADGETRON_RELAY_HOST: 0.0.0.0 + USE_HTTPS: "yes" + PASSWORD: ${SIRFPASS:-virtual} ports: - "9002:9002" # gadgetron - "9999:8888" # jupyter diff --git a/docker/.dockerignore b/docker/.dockerignore index e00cb649..ef3dbb1e 100644 --- a/docker/.dockerignore +++ b/docker/.dockerignore @@ -1,3 +1,4 @@ devel/** !devel/.ccache devel/.ccache/.gitignore +docker-stacks/** diff --git a/docker/build_docker_stacks.sh b/docker/make.sh old mode 100755 new mode 100644 similarity index 96% rename from docker/build_docker_stacks.sh rename to docker/make.sh index 67f72cf1..b095d7b4 --- a/docker/build_docker_stacks.sh +++ b/docker/make.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# Release images: synerbi/sirf:jupyter, synerbi/sirf:jupyter-gpu +# Creates images: synerbi/sirf:jupyter, synerbi/sirf:jupyter-gpu # Also creates intermediate (temp) images: synerbi/jupyter set -exuo pipefail From 61a25b8215f96f3fc4ce68cb43d73bac873670ef Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Fri, 5 Jan 2024 09:16:05 +0000 Subject: [PATCH 49/98] local build --- Dockerfile | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index d1a839c1..e2e9d89f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -44,8 +44,9 @@ RUN ccache -o cache_dir=/opt/ccache ENV PATH="/usr/lib/ccache:${PATH}" # SIRF-SuperBuild config -ARG SIRF_SB_URL="https://github.com/SyneRBI/SIRF-SuperBuild" -ARG SIRF_SB_TAG="master" +COPY ./.git /opt/SIRF-SuperBuild.git +ARG SIRF_SB_URL="file:///opt/SIRF-SuperBuild.git" +ARG SIRF_SB_TAG="HEAD" ARG REMOVE_BUILD_FILES=0 ARG RUN_CTEST=1 ARG NUM_PARALLEL_BUILDS=" " From f2428613e13a6927ca5ba6f335d12eb00545e236 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Fri, 5 Jan 2024 09:27:52 +0000 Subject: [PATCH 50/98] revert SIRF_TAG v3.5.0 <= ignore-acq --- version_config.cmake | 66 ++++++++++++++++++++++---------------------- 1 file changed, 33 insertions(+), 33 deletions(-) diff --git a/version_config.cmake b/version_config.cmake index 29c9c47e..471d97bf 100644 --- a/version_config.cmake +++ b/version_config.cmake @@ -36,19 +36,19 @@ set(Boost_MD5 e193e5089060ed6ce5145c8eb05e67e3) ## Armadillo set(Armadillo_URL https://downloads.sourceforge.net/project/arma/armadillo-9.800.2.tar.xz) -set(Armadillo_MD5 c2fa488ea069b9972363ebad16e51ab5 ) +set(Armadillo_MD5 c2fa488ea069b9972363ebad16e51ab5) ## FFTW3 if(WIN32) # Just use precompiled version # TODO would prefer the next zip file but for KT using an ftp URL times-out (firewall?) - set(FFTW3_URL https://github.com/SyneRBI/assets/releases/download/latest/fftw-3.3.5-dll64.zip ) - set(FFTW3_MD5 cb3c5ad19a89864f036e7a2dd5be168c ) - #set(FFTW3_URL https://s3.amazonaws.com/install-gadgetron-vs2013/Dependencies/FFTW/zip/FFTW3.zip ) - #set(FFTW3_MD5 a42eac92d9ad06d7c53fb82b09df2b6e ) + set(FFTW3_URL https://github.com/SyneRBI/assets/releases/download/latest/fftw-3.3.5-dll64.zip) + set(FFTW3_MD5 cb3c5ad19a89864f036e7a2dd5be168c) + #set(FFTW3_URL https://s3.amazonaws.com/install-gadgetron-vs2013/Dependencies/FFTW/zip/FFTW3.zip) + #set(FFTW3_MD5 a42eac92d9ad06d7c53fb82b09df2b6e) else(WIN32) - set(FFTW3_URL http://www.fftw.org/fftw-3.3.5.tar.gz ) - set(FFTW3_MD5 6cc08a3b9c7ee06fdd5b9eb02e06f569 ) + set(FFTW3_URL http://www.fftw.org/fftw-3.3.5.tar.gz) + set(FFTW3_MD5 6cc08a3b9c7ee06fdd5b9eb02e06f569) endif(WIN32) set(FFTW3double_URL ${FFTW3_URL}) @@ -79,21 +79,21 @@ endif() ## SWIG set (SWIG_REQUIRED_VERSION 2) if (WIN32) - set(SWIG_URL http://downloads.sourceforge.net/swig/swigwin-4.0.2.zip ) - set(SWIG_MD5 009926b512aee9318546bdd4c7eab6f9 ) + set(SWIG_URL http://downloads.sourceforge.net/swig/swigwin-4.0.2.zip) + set(SWIG_MD5 009926b512aee9318546bdd4c7eab6f9) else(WIN32) - set(SWIG_URL http://downloads.sourceforge.net/swig/swig-4.0.2.tar.gz ) - set(SWIG_MD5 7c3e46cb5af2b469722cafa0d91e127b ) + set(SWIG_URL http://downloads.sourceforge.net/swig/swig-4.0.2.tar.gz) + set(SWIG_MD5 7c3e46cb5af2b469722cafa0d91e127b) endif(WIN32) option(DEVEL_BUILD "Use current versions of major packages" OFF) ## Googletest -set(GTest_URL https://github.com/google/googletest ) +set(GTest_URL https://github.com/google/googletest) set(GTest_TAG release-1.12.1) ## glog -set(DEFAULT_glog_URL https://github.com/google/glog ) +set(DEFAULT_glog_URL https://github.com/google/glog) set(DEFAULT_glog_TAG v0.6.0) ## ITK @@ -101,13 +101,13 @@ set(DEFAULT_ITK_URL https://github.com/InsightSoftwareConsortium/ITK.git) set(DEFAULT_ITK_TAG v5.2.1) ## NIFTYREG -set(DEFAULT_NIFTYREG_URL https://github.com/KCL-BMEIS/niftyreg.git ) +set(DEFAULT_NIFTYREG_URL https://github.com/KCL-BMEIS/niftyreg.git) set(DEFAULT_NIFTYREG_TAG 8ad2f11507ddedb09ed74a9bd97377b70532ee75) set(NIFTYREG_REQUIRED_VERSION 1.5.68) ## ISMRMRD set(ISMRMRD_REQUIRED_VERSION "1.11.1") -set(DEFAULT_ISMRMRD_URL https://github.com/ismrmrd/ismrmrd ) +set(DEFAULT_ISMRMRD_URL https://github.com/ismrmrd/ismrmrd) set(DEFAULT_ISMRMRD_TAG v1.13.7) ## siemens_to_ismrmrd @@ -115,46 +115,46 @@ set(DEFAULT_siemens_to_ismrmrd_URL https://github.com/ismrmrd/siemens_to_ismrmrd set(DEFAULT_siemens_to_ismrmrd_TAG v1.2.11) ## Gadgetron -set(DEFAULT_Gadgetron_URL https://github.com/gadgetron/gadgetron ) +set(DEFAULT_Gadgetron_URL https://github.com/gadgetron/gadgetron) set(DEFAULT_Gadgetron_TAG 6202fb7352a14fb82817b57a97d928c988eb0f4b) ## ASTRA -set(DEFAULT_astra-toolbox_URL https://github.com/astra-toolbox/astra-toolbox ) +set(DEFAULT_astra-toolbox_URL https://github.com/astra-toolbox/astra-toolbox) set(DEFAULT_astra-toolbox_TAG origin/master) ## TomoPhantom -set(DEFAULT_TomoPhantom_URL https://github.com/dkazanc/TomoPhantom ) +set(DEFAULT_TomoPhantom_URL https://github.com/dkazanc/TomoPhantom) set(DEFAULT_TomoPhantom_TAG v2.0.0) ## NiftyPET -set(DEFAULT_NiftyPET_URL https://github.com/pjmark/NIPET ) +set(DEFAULT_NiftyPET_URL https://github.com/pjmark/NIPET) set(DEFAULT_NiftyPET_TAG 70b97da0a4eea9445e34831f7393947a37bc77e7) ## parallelproj -set(DEFAULT_parallelproj_URL https://github.com/gschramm/parallelproj ) +set(DEFAULT_parallelproj_URL https://github.com/gschramm/parallelproj) set(DEFAULT_parallelproj_TAG v1.7.3) ## STIR -set(DEFAULT_STIR_URL https://github.com/UCL/STIR ) +set(DEFAULT_STIR_URL https://github.com/UCL/STIR) set(DEFAULT_STIR_TAG rel_5.2.0) ## SIRF -set(DEFAULT_SIRF_URL https://github.com/SyneRBI/SIRF ) +set(DEFAULT_SIRF_URL https://github.com/SyneRBI/SIRF) set(DEFAULT_SIRF_TAG f17f050d86c6e2b249f48a43795986a7aa7a54ea) # pre 3.6.0 ## pet-rd-tools -set(DEFAULT_pet_rd_tools_URL https://github.com/UCL/pet-rd-tools ) +set(DEFAULT_pet_rd_tools_URL https://github.com/UCL/pet-rd-tools) set(DEFAULT_pet_rd_tools_TAG v2.0.1) ## SIRF-Contribs -set(DEFAULT_SIRF-Contribs_URL https://github.com/SyneRBI/SIRF-Contribs ) -set(DEFAULT_SIRF-Contribs_TAG origin/master ) +set(DEFAULT_SIRF-Contribs_URL https://github.com/SyneRBI/SIRF-Contribs) +set(DEFAULT_SIRF-Contribs_TAG origin/master) ## SPM -set(DEFAULT_SPM_URL https://github.com/spm/SPM12.git ) +set(DEFAULT_SPM_URL https://github.com/spm/SPM12.git) set(DEFAULT_SPM_TAG r7771) -set(DEFAULT_JSON_URL https://github.com/nlohmann/json.git ) +set(DEFAULT_JSON_URL https://github.com/nlohmann/json.git) set(DEFAULT_JSON_TAG v3.10.4) # CCPi CIL @@ -205,24 +205,24 @@ if (DEVEL_BUILD) set (DEFAULT_SIRF_TAG origin/master) ## STIR - set(DEFAULT_STIR_URL https://github.com/UCL/STIR ) + set(DEFAULT_STIR_URL https://github.com/UCL/STIR) set(DEFAULT_STIR_TAG origin/master) ## siemens_to_ismrmrd - # set(DEFAULT_siemens_to_ismrmrd_URL https://github.com/ismrmrd/siemens_to_ismrmrd ) + # set(DEFAULT_siemens_to_ismrmrd_URL https://github.com/ismrmrd/siemens_to_ismrmrd) # set(DEFAULT_siemens_to_ismrmrd_TAG b87759e49e53dab4939147eb52b7a0e6465f3d04) ## pet-rd-tools - # set(DEFAULT_pet_rd_tools_URL https://github.com/UCL/pet-rd-tools ) + # set(DEFAULT_pet_rd_tools_URL https://github.com/UCL/pet-rd-tools) # set(DEFAULT_pet_rd_tools_TAG origin/master) # CCPi CIL set(DEFAULT_CIL_URL https://github.com/TomographicImaging/CIL.git) - set(DEFAULT_CIL_TAG origin/master ) + set(DEFAULT_CIL_TAG origin/master) # Gadgetron # set(DEFAULT_Gadgetron_TAG origin/master) - + # ismrmrd # set(DEFAULT_ISMRMRD_TAG origin/master) @@ -233,7 +233,7 @@ endif() # these can be overridden by the user SET(SIRF_URL ${DEFAULT_SIRF_URL} CACHE STRING ON) SET(SIRF_TAG ${DEFAULT_SIRF_TAG} CACHE STRING ON) - + SET(STIR_TAG ${DEFAULT_STIR_TAG} CACHE STRING ON) SET(STIR_URL ${DEFAULT_STIR_URL} CACHE STRING ON) set(STIR_REQUIRED_VERSION "5.1.0") From 0a424da9cc3cce01250ce9403cae3c35c2597fd5 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Fri, 5 Jan 2024 11:06:51 +0000 Subject: [PATCH 51/98] misc docker tidy & fixes --- Dockerfile | 28 ++++++++----------- docker-compose.yml | 4 +-- docker/requirements-service.txt | 5 ---- docker/requirements-service.yml | 12 -------- docker/requirements.yml | 2 +- docker/start-gadgetron-notebook.sh | 18 ++++++------ .../{user_service-ubuntu.sh => user_demos.sh} | 21 ++------------ 7 files changed, 24 insertions(+), 66 deletions(-) delete mode 100644 docker/requirements-service.txt delete mode 100644 docker/requirements-service.yml rename docker/{user_service-ubuntu.sh => user_demos.sh} (73%) diff --git a/Dockerfile b/Dockerfile index e2e9d89f..8291d818 100644 --- a/Dockerfile +++ b/Dockerfile @@ -21,13 +21,12 @@ RUN bash /opt/scripts/build_system-ubuntu.sh # SIRF python deps ARG BUILD_GPU=0 -COPY docker/requirements.yml /opt/scripts/docker-requirements.yaml +COPY docker/requirements.yml /opt/scripts/ # https://jupyter-docker-stacks.readthedocs.io/en/latest/using/common.html#conda-environments RUN if test "$BUILD_GPU" != 0; then \ - echo " - tigre" >> /opt/scripts/docker-requirements.yaml; \ - echo " - astra-toolbox" >> /opt/scripts/docker-requirements.yaml; \ + sed -ri 's/^(\s*)#\s*(- \S+.*#.*GPU.*)$/\1\2/' /opt/scripts/requirements.yml; \ fi \ - && mamba env update -n base -f /opt/scripts/docker-requirements.yaml \ + && mamba env update -n base -f /opt/scripts/requirements.yml \ && mamba clean --all -f -y && fix-permissions "${CONDA_DIR}" /home/${NB_USER} FROM base as build @@ -108,25 +107,20 @@ COPY --from=build --link --chown=${NB_USER} /opt/SIRF-SuperBuild/INSTALL/ /opt/S #COPY --from=build --link /opt/conda/ /opt/conda/ # install {SIRF-Exercises,CIL-Demos} -COPY docker/user_service-ubuntu.sh /opt/scripts/ -RUN bash /opt/scripts/user_service-ubuntu.sh \ +COPY docker/user_demos.sh /opt/scripts/ +RUN bash /opt/scripts/user_demos.sh \ && fix-permissions /opt/SIRF-Exercises /opt/CIL-Demos "${CONDA_DIR}" /home/${NB_USER} -# Set environment variables for SIRF -ENV PATH "/opt/conda/bin:/opt/SIRF-SuperBuild/INSTALL/bin:$PATH" -ENV LD_LIBRARY_PATH "/opt/SIRF-SuperBuild/INSTALL/lib:/opt/SIRF-SuperBuild/INSTALL/lib64:$LD_LIBRARY_PATH" -#/usr/local/nvidia/lib:/usr/local/nvidia/lib64:/opt/conda/lib -ENV PYTHONPATH "/opt/SIRF-SuperBuild/INSTALL/python" -ENV SIRF_INSTALL_PATH "/opt/SIRF-SuperBuild/INSTALL" -ENV SIRF_PATH "/opt/SIRF-SuperBuild/sources/SIRF" -#ENV SIRF_EXERCISES_DATA_PATH "/mnt/materials/SIRF/Fully3D/SIRF/" -# Suppress output from Gadgetron which gives some problems on notebooks (QUIERO) -ENV GADGETRON_LOG_MASK "" - +# docker-stacks notebook USER ${NB_UID} ENV DEBIAN_FRONTEND '' ENV DOCKER_STACKS_JUPYTER_CMD="notebook" ENV RESTARTABLE="yes" +#ENV USE_HTTPS="yes" +# gadgetron +EXPOSE 9002 +ENV GADGETRON_RELAY_HOST="0.0.0.0" +# run gadgetron in the background before start-notebook.py COPY --link --chown=${NB_USER} docker/start-gadgetron-notebook.sh /opt/scripts/ CMD ["/opt/scripts/start-gadgetron-notebook.sh"] diff --git a/docker-compose.yml b/docker-compose.yml index 1d50c649..d78b3e1b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -58,11 +58,9 @@ services: tty: true user: ${USER_ID:-1000} group_add: [users] - volumes: [./devel:/home/jovyan/work] + volumes: [./docker/devel:/home/jovyan/work] restart: unless-stopped environment: - GADGETRON_RELAY_HOST: 0.0.0.0 - USE_HTTPS: "yes" PASSWORD: ${SIRFPASS:-virtual} ports: - "9002:9002" # gadgetron diff --git a/docker/requirements-service.txt b/docker/requirements-service.txt deleted file mode 100644 index ebc6daa2..00000000 --- a/docker/requirements-service.txt +++ /dev/null @@ -1,5 +0,0 @@ -jupyterlab -jupyter -ipywidgets -widgetsnbextension -nodejs diff --git a/docker/requirements-service.yml b/docker/requirements-service.yml deleted file mode 100644 index 204297ad..00000000 --- a/docker/requirements-service.yml +++ /dev/null @@ -1,12 +0,0 @@ -name: base -channels: - - conda-forge - - intel - - ccpi - - defaults -dependencies: - - jupyterlab - - jupyter - - ipywidgets<8 # vis. https://github.com/TomographicImaging/CIL/pull/1599 - - widgetsnbextension - - nodejs diff --git a/docker/requirements.yml b/docker/requirements.yml index 7ff25401..123f41ce 100644 --- a/docker/requirements.yml +++ b/docker/requirements.yml @@ -18,7 +18,7 @@ dependencies: - deprecation - nose - pip - - cil + - cil # cil - ccpi-regulariser # cil # - tigre # cil (GPU) # - astra-toolbox # cil (GPU) diff --git a/docker/start-gadgetron-notebook.sh b/docker/start-gadgetron-notebook.sh index 7515f89e..4cd657d7 100755 --- a/docker/start-gadgetron-notebook.sh +++ b/docker/start-gadgetron-notebook.sh @@ -28,14 +28,13 @@ stop_service(){ exit 0 } -pushd $SIRF_PATH/../.. - echo "start gadgetron" -[ -f ./INSTALL/bin/gadgetron ] \ - && ./INSTALL/bin/gadgetron >& ~/gadgetron.log& +pushd $SIRF_PATH/../.. +test -x ./INSTALL/bin/gadgetron && ./INSTALL/bin/gadgetron >& ~/gadgetron.log& +popd echo "make sure the SIRF-Exercises and CIL-Demos are in the expected location (~/work in the container)" -cd ~/work +pushd ~/work for notebooks in SIRF-Exercises CIL-Demos; do test -d ${notebooks} || cp -a $SIRF_PATH/../../../${notebooks} . done @@ -45,14 +44,13 @@ if test ! -r SIRF-contrib; then echo "Creating link to SIRF-contrib" ln -s "$SIRF_INSTALL_PATH"/python/sirf/contrib SIRF-contrib fi +popd echo "start jupyter" -test -w /etc/jupyter/jupyter_server_config.py \ - && echo "c.ServerApp.password = u'sha1:cbf03843d2bb:8729d2fbec60cacf6485758752789cd9989e756c'" \ - >> /etc/jupyter/jupyter_server_config.py +#test -w /etc/jupyter/jupyter_server_config.py \ +# && echo "c.ServerApp.password = u'sha1:cbf03843d2bb:8729d2fbec60cacf6485758752789cd9989e756c'" \ +# >> /etc/jupyter/jupyter_server_config.py start-notebook.py "$@" -popd - trap "stop_service" EXIT INT TERM wait diff --git a/docker/user_service-ubuntu.sh b/docker/user_demos.sh similarity index 73% rename from docker/user_service-ubuntu.sh rename to docker/user_demos.sh index 3f5f2952..1fdf3759 100755 --- a/docker/user_service-ubuntu.sh +++ b/docker/user_demos.sh @@ -2,7 +2,7 @@ [ -f .bashrc ] && . .bashrc set -ev INSTALL_DIR="${1:-/opt}" -if [ -n "${PYTHON_EXECUTABLE}" ]; then +if [ -n "${PYTHON_EXECUTABLE}" ]; then PYTHON=$PYTHON_EXECUTABLE else PYTHON='miniconda' @@ -11,21 +11,6 @@ fi # SIRF-Exercises git clone https://github.com/SyneRBI/SIRF-Exercises --recursive -b master $INSTALL_DIR/SIRF-Exercises -if [ "$PYTHON" = "miniconda" ]; then - if [ -f requirements-service.yml ]; then - # installs the required packages in the environment with requirements-service.yml. - # Notice that these requirements TEMPORARILY contains also the packages for SIRF-Exercises - mamba env update --file requirements-service.yml - fi - mamba clean -y --all - -# Python (runtime) -else - if [ -f requirements-service.txt ]; then - ${PYTHON} -m pip install -U -r requirements-service.txt - fi -fi - #install SIRF-Exercises requirements cd $INSTALL_DIR/SIRF-Exercises if [ "$PYTHON" = "miniconda" ]; then @@ -36,7 +21,7 @@ if [ "$PYTHON" = "miniconda" ]; then cat requirements.txt # installing the requirements.txt with mamba requires some cleaning of the requirements.txt # Also the requirements.txt contains some packages that are not found on conda-forge, i.e. brainweb - # Therefore, these need to be installed by pip. + # Therefore, these need to be installed by pip. # This is handled by the install-sirf-exercises-dep.py script python ~/install-sirf-exercises-dep.py requirements.txt else @@ -58,7 +43,7 @@ git config --global filter.nbstripout.extrakeys ' metadata.language_info.pygments_lexer metadata.language_info.version' #install nbstripout in the SIRF-Exercises repo -cd $INSTALL_DIR/SIRF-Exercises +cd $INSTALL_DIR/SIRF-Exercises nbstripout --install # jupyter labextension install @jupyter-widgets/jupyterlab-manager From 39d2a6450fcd9b5531bdae42375ec27e78b8e78e Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Fri, 5 Jan 2024 11:15:37 +0000 Subject: [PATCH 52/98] strict priority --- Dockerfile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/Dockerfile b/Dockerfile index 8291d818..4322c9d6 100644 --- a/Dockerfile +++ b/Dockerfile @@ -23,9 +23,12 @@ RUN bash /opt/scripts/build_system-ubuntu.sh ARG BUILD_GPU=0 COPY docker/requirements.yml /opt/scripts/ # https://jupyter-docker-stacks.readthedocs.io/en/latest/using/common.html#conda-environments +# https://github.com/TomographicImaging/CIL/blob/master/Dockerfile RUN if test "$BUILD_GPU" != 0; then \ sed -ri 's/^(\s*)#\s*(- \S+.*#.*GPU.*)$/\1\2/' /opt/scripts/requirements.yml; \ fi \ + && conda config --env --set channel_priority strict \ + && for ch in defaults ccpi intel conda-forge; do conda config --env --add channels $ch; done \ && mamba env update -n base -f /opt/scripts/requirements.yml \ && mamba clean --all -f -y && fix-permissions "${CONDA_DIR}" /home/${NB_USER} From 758edf78c157d978f3b44473f6acbf45367a21df Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Fri, 5 Jan 2024 12:39:15 +0000 Subject: [PATCH 53/98] fix password & mounts --- docker-compose.yml | 5 ++--- docker/start-gadgetron-notebook.sh | 14 +++++++------- 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index d78b3e1b..fce4eb8c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -27,7 +27,8 @@ services: context: docker/docker-stacks/images/scipy-notebook args: {BASE_CONTAINER: synerbi/jupyter:minimal-cpu} sirf-build: - command: echo nop + volumes: [./docker/devel/.ccache:/ccache] + command: "bash -c 'cp -r /opt/ccache/* /ccache/'" image: synerbi/jupyter:sirf-build-cpu build: context: . @@ -60,8 +61,6 @@ services: group_add: [users] volumes: [./docker/devel:/home/jovyan/work] restart: unless-stopped - environment: - PASSWORD: ${SIRFPASS:-virtual} ports: - "9002:9002" # gadgetron - "9999:8888" # jupyter diff --git a/docker/start-gadgetron-notebook.sh b/docker/start-gadgetron-notebook.sh index 4cd657d7..0db0d3ae 100755 --- a/docker/start-gadgetron-notebook.sh +++ b/docker/start-gadgetron-notebook.sh @@ -28,29 +28,29 @@ stop_service(){ exit 0 } +SB_PATH=$(dirname "$(dirname "$SIRF_PATH")") echo "start gadgetron" -pushd $SIRF_PATH/../.. +pushd "${SB_PATH}" test -x ./INSTALL/bin/gadgetron && ./INSTALL/bin/gadgetron >& ~/gadgetron.log& popd echo "make sure the SIRF-Exercises and CIL-Demos are in the expected location (~/work in the container)" pushd ~/work for notebooks in SIRF-Exercises CIL-Demos; do - test -d ${notebooks} || cp -a $SIRF_PATH/../../../${notebooks} . + test -d ${notebooks} || cp -a "${SB_PATH}/../${notebooks}" . done echo "link SIRF-Contrib into ~/work" if test ! -r SIRF-contrib; then echo "Creating link to SIRF-contrib" - ln -s "$SIRF_INSTALL_PATH"/python/sirf/contrib SIRF-contrib + ln -s "${SIRF_INSTALL_PATH}/python/sirf/contrib" SIRF-contrib fi popd echo "start jupyter" -#test -w /etc/jupyter/jupyter_server_config.py \ -# && echo "c.ServerApp.password = u'sha1:cbf03843d2bb:8729d2fbec60cacf6485758752789cd9989e756c'" \ -# >> /etc/jupyter/jupyter_server_config.py -start-notebook.py "$@" +start-notebook.py \ + --PasswordIdentityProvider.hashed_password='sha1:cbf03843d2bb:8729d2fbec60cacf6485758752789cd9989e756c' \ + "$@" trap "stop_service" EXIT INT TERM wait From e08091e0d543046f58b80e1fa00d4bfa2a7cac0a Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Fri, 5 Jan 2024 17:03:50 +0000 Subject: [PATCH 54/98] some fixes and docs --- docker/README.md | 19 ++++++++++++++++--- docker/devel/requirements-test.txt | 2 -- docker/devel/shared.txt | 4 ++-- docker/devel/test.sh | 5 +---- docker/start-gadgetron-notebook.sh | 5 +++-- 5 files changed, 22 insertions(+), 13 deletions(-) delete mode 100644 docker/devel/requirements-test.txt diff --git a/docker/README.md b/docker/README.md index a6710290..1ad1e39f 100644 --- a/docker/README.md +++ b/docker/README.md @@ -4,21 +4,34 @@ The image contains SIRF & all dependencies required by JupyterHub. ## Usage +### Docker + ```sh # CPU version -docker run --rm -it -p 8888:8888 synerbi/sirf:jupyter +docker run --rm -it -p 9999:8888 synerbi/sirf:jupyter # GPU version -docker run --rm -it -p 8888:8888 --gpus all synerbi/sirf:jupyter-gpu +docker run --rm -it -p 9999:8888 --gpus all synerbi/sirf:jupyter-gpu ``` To make the container user same as host user (useful when sharing folders), use `--user` and `--group-add`: ```sh -docker run --rm -it -p 8888:8888 --user $(id -u) --group-add users -v ./docker/devel:/home/jovyan/work synerbi/sirf:jupyter +docker run --rm -it -p 9999:8888 --user $(id -u) --group-add users -v ./docker/devel:/home/jovyan/work synerbi/sirf:jupyter ``` +The Jupyter notebook should be accessible at . + More config: https://jupyter-docker-stacks.readthedocs.io/en/latest/using/common.html#user-related-configurations +### Docker Compose + +```sh +# CPU version +docker compose up sirf +# GPU version +docker compose -f docker-compose.yml -f docker/docker-compose.gpu.yml up sirf +``` + ## Build the image We use an NVIDIA CUDA Ubuntu 22.04 base image (for [CIL](https://github.com/TomographicImaging/CIL) GPU features), build https://github.com/jupyter/docker-stacks `datascience-notebook` on top, and then install SIRF & its depdendencies. diff --git a/docker/devel/requirements-test.txt b/docker/devel/requirements-test.txt deleted file mode 100644 index a6c88f59..00000000 --- a/docker/devel/requirements-test.txt +++ /dev/null @@ -1,2 +0,0 @@ -nose -coverage diff --git a/docker/devel/shared.txt b/docker/devel/shared.txt index 5e17e9a8..5b1c151e 100644 --- a/docker/devel/shared.txt +++ b/docker/devel/shared.txt @@ -4,9 +4,9 @@ Host: `SIRF-SuperBuild/docker/devel` -Docker container (synerbi/sirf:latest): +Docker container (synerbi/sirf:jupyter): -`/devel` +`/home/jovyan/work` This means you can use editors & tools on the host machine with files in the container and vice versa. diff --git a/docker/devel/test.sh b/docker/devel/test.sh index 3db21af9..034687f8 100755 --- a/docker/devel/test.sh +++ b/docker/devel/test.sh @@ -8,11 +8,8 @@ ## DEBUG="${1:-0}" -this=$(dirname "${BASH_SOURCE[0]}") -pip install -U -r "$this"/requirements-test.txt - -pushd $SIRF_PATH/../.. +pushd $(dirname "$(dirname "$SIRF_PATH")") # start gadgetron [ -f ./INSTALL/bin/gadgetron ] && ./INSTALL/bin/gadgetron >& gadgetron.log& diff --git a/docker/start-gadgetron-notebook.sh b/docker/start-gadgetron-notebook.sh index 0db0d3ae..3486d158 100755 --- a/docker/start-gadgetron-notebook.sh +++ b/docker/start-gadgetron-notebook.sh @@ -36,9 +36,10 @@ popd echo "make sure the SIRF-Exercises and CIL-Demos are in the expected location (~/work in the container)" pushd ~/work -for notebooks in SIRF-Exercises CIL-Demos; do - test -d ${notebooks} || cp -a "${SB_PATH}/../${notebooks}" . +for repo in SIRF-Exercises CIL-Demos; do + test -d ${repo} || cp -dR "${SB_PATH}/../${repo}" . done +./SIRF-Exercises/scripts/download_data.sh echo "link SIRF-Contrib into ~/work" if test ! -r SIRF-contrib; then From b34f1bdf7de7caaa433b08750f238d78deb96241 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Fri, 5 Jan 2024 20:05:20 +0000 Subject: [PATCH 55/98] fix UID, slight tidy --- docker-compose.yml | 4 +++- docker/user_demos.sh | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index fce4eb8c..8c600366 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -57,7 +57,9 @@ services: cache_from: [synerbi/jupyter:sirf-build-cpu] stdin_open: true tty: true - user: ${USER_ID:-1000} + # Probably requires `export USER_ID UID` to work. + # Alternatively replace with the output of `id -u`. + user: ${USER_ID:-${UID:-${USER:-1000}}} group_add: [users] volumes: [./docker/devel:/home/jovyan/work] restart: unless-stopped diff --git a/docker/user_demos.sh b/docker/user_demos.sh index 1fdf3759..d283c53d 100755 --- a/docker/user_demos.sh +++ b/docker/user_demos.sh @@ -9,7 +9,7 @@ else fi # SIRF-Exercises -git clone https://github.com/SyneRBI/SIRF-Exercises --recursive -b master $INSTALL_DIR/SIRF-Exercises +git clone https://github.com/SyneRBI/SIRF-Exercises --recursive $INSTALL_DIR/SIRF-Exercises #install SIRF-Exercises requirements cd $INSTALL_DIR/SIRF-Exercises @@ -48,6 +48,6 @@ nbstripout --install # jupyter labextension install @jupyter-widgets/jupyterlab-manager # CIL-Demos -git clone https://github.com/TomographicImaging/CIL-Demos.git --recursive -b main $INSTALL_DIR/CIL-Demos +git clone https://github.com/TomographicImaging/CIL-Demos --recursive $INSTALL_DIR/CIL-Demos cd $INSTALL_DIR/CIL-Demos nbstripout --install From d49c5b79867729073e2594b66b586cad9ee53eee Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Fri, 5 Jan 2024 20:33:21 +0000 Subject: [PATCH 56/98] fix widgets --- Dockerfile | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 4322c9d6..020f4b7b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -24,12 +24,16 @@ ARG BUILD_GPU=0 COPY docker/requirements.yml /opt/scripts/ # https://jupyter-docker-stacks.readthedocs.io/en/latest/using/common.html#conda-environments # https://github.com/TomographicImaging/CIL/blob/master/Dockerfile +# Also fix labextension @jupyter-widgets/jupyterlab-manager +# by updating jupyterlab_widgets ipywidgets ipympl RUN if test "$BUILD_GPU" != 0; then \ sed -ri 's/^(\s*)#\s*(- \S+.*#.*GPU.*)$/\1\2/' /opt/scripts/requirements.yml; \ fi \ && conda config --env --set channel_priority strict \ && for ch in defaults ccpi intel conda-forge; do conda config --env --add channels $ch; done \ && mamba env update -n base -f /opt/scripts/requirements.yml \ + && mamba uninstall -y jupyterlab_widgets \ + && mamba install -y jupyterlab_widgets ipywidgets ipympl \ && mamba clean --all -f -y && fix-permissions "${CONDA_DIR}" /home/${NB_USER} FROM base as build @@ -117,7 +121,7 @@ RUN bash /opt/scripts/user_demos.sh \ # docker-stacks notebook USER ${NB_UID} ENV DEBIAN_FRONTEND '' -ENV DOCKER_STACKS_JUPYTER_CMD="notebook" +#ENV DOCKER_STACKS_JUPYTER_CMD="notebook" ENV RESTARTABLE="yes" #ENV USE_HTTPS="yes" # gadgetron From 2a418db3af1df8c19ae973c1f80d972190c6041c Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Fri, 5 Jan 2024 20:39:15 +0000 Subject: [PATCH 57/98] simplify build script --- docker/docker-compose-build.sh | 24 ++++++++++++++++ docker/make.sh | 51 ---------------------------------- 2 files changed, 24 insertions(+), 51 deletions(-) create mode 100755 docker/docker-compose-build.sh delete mode 100644 docker/make.sh diff --git a/docker/docker-compose-build.sh b/docker/docker-compose-build.sh new file mode 100755 index 00000000..4ad9119d --- /dev/null +++ b/docker/docker-compose-build.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash +# Creates images: synerbi/sirf:jupyter, synerbi/sirf:jupyter-gpu +# Also creates intermediate (temp) images: synerbi/jupyter +set -exuo pipefail + +DCC_CPU=docker compose +DCC_GPU=docker compose -f docker-compose.yml -f docker/docker-compose.gpu.yml + +pushd "$(dirname "${BASH_SOURCE[0]}")" +git submodule update --init --recursive + +# build ccache +$DCC_CPU "$@" build sirf-build +$DCC_GPU "$@" build sirf-build +# build +$DCC_CPU "$@" build +$DCC_GPU "$@" build +# copy ccache +sudo rm -r ./docker/devel/.ccache/* +export USER_ID UID +$DCC_CPU "$@" up sirf-build +$DCC_GPU "$@" up sirf-build + +popd diff --git a/docker/make.sh b/docker/make.sh deleted file mode 100644 index b095d7b4..00000000 --- a/docker/make.sh +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env bash -# Creates images: synerbi/sirf:jupyter, synerbi/sirf:jupyter-gpu -# Also creates intermediate (temp) images: synerbi/jupyter -set -exuo pipefail - -pushd "$(dirname "${BASH_SOURCE[0]}")" -git submodule update --init --recursive - -cd docker-stacks/images/docker-stacks-foundation -docker build --build-arg PYTHON_VERSION=3.9 --build-arg ROOT_CONTAINER=ubuntu:22.04 -t synerbi/jupyter:foundation-cpu . -docker build --build-arg PYTHON_VERSION=3.9 --build-arg ROOT_CONTAINER=nvidia/cuda:11.7.1-cudnn8-runtime-ubuntu22.04 -t synerbi/jupyter:foundation-gpu . - -for ver in cpu gpu; do - cd ../base-notebook - docker build --build-arg BASE_CONTAINER=synerbi/jupyter:foundation-${ver} -t synerbi/jupyter:base-${ver} . - - cd ../minimal-notebook - docker build --build-arg BASE_CONTAINER=synerbi/jupyter:base-${ver} -t synerbi/jupyter:minimal-${ver} . - - cd ../scipy-notebook - docker build --build-arg BASE_CONTAINER=synerbi/jupyter:minimal-${ver} -t synerbi/jupyter:scipy-${ver} . -done - -cd ../../../.. -SIRF_BUILD_ARGS=( - build . - --build-arg EXTRA_BUILD_FLAGS="-DGadgetron_TAG=6202fb7352a14fb82817b57a97d928c988eb0f4b -DISMRMRD_TAG=v1.13.7 -Dsiemens_to_ismrmrd_TAG=v1.2.11" -) -SIRF_CPU_BUILD_ARGS=( - --build-arg BASE_CONTAINER=synerbi/jupyter:scipy-cpu - --build-arg Gadgetron_USE_CUDA=OFF - --build-arg BUILD_GPU=0 -) -SIRF_GPU_BUILD_ARGS=( - --build-arg BASE_CONTAINER=synerbi/jupyter:scipy-gpu - --build-arg Gadgetron_USE_CUDA=ON - --build-arg BUILD_GPU=1 -) -# build -docker "${SIRF_BUILD_ARGS[@]}" "${SIRF_CPU_BUILD_ARGS[@]}" --target build -t synerbi/jupyter:sirf-build-cpu -docker "${SIRF_BUILD_ARGS[@]}" "${SIRF_GPU_BUILD_ARGS[@]}" --target build -t synerbi/jupyter:sirf-build-gpu -# install -docker "${SIRF_BUILD_ARGS[@]}" "${SIRF_CPU_BUILD_ARGS[@]}" -t synerbi/sirf:jupyter -docker "${SIRF_BUILD_ARGS[@]}" "${SIRF_GPU_BUILD_ARGS[@]}" -t synerbi/sirf:jupyter-gpu -# ccache -sudo rm -r ./docker/devel/.ccache/* -for ver in cpu gpu; do - docker run --rm -it -v ./docker/devel/.ccache:/ccache synerbi/jupyter:sirf-build-${ver} bash -c 'cp -r /opt/ccache/* /ccache/' -done - -popd From 3cd942b345816b96ecdbd30b827b0f1ec101092c Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Fri, 5 Jan 2024 22:29:19 +0000 Subject: [PATCH 58/98] compose.sh script options --- docker/compose.sh | 59 ++++++++++++++++++++++++++++++++++ docker/docker-compose-build.sh | 24 -------------- 2 files changed, 59 insertions(+), 24 deletions(-) create mode 100755 docker/compose.sh delete mode 100755 docker/docker-compose-build.sh diff --git a/docker/compose.sh b/docker/compose.sh new file mode 100755 index 00000000..0089a390 --- /dev/null +++ b/docker/compose.sh @@ -0,0 +1,59 @@ +#!/usr/bin/env bash +set -euo pipefail + +build_cpu=1 +build_gpu=1 +run_cpu=0 +run_gpu=0 +while getopts :hCGcg option; do + case "${option}" in + h) cat < Date: Mon, 8 Jan 2024 10:18:47 +0000 Subject: [PATCH 59/98] demos: remove unneeded GPU deps --- docker/user_demos.sh | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/docker/user_demos.sh b/docker/user_demos.sh index d283c53d..00e822ea 100755 --- a/docker/user_demos.sh +++ b/docker/user_demos.sh @@ -15,7 +15,14 @@ git clone https://github.com/SyneRBI/SIRF-Exercises --recursive $INSTALL_DIR/SIR cd $INSTALL_DIR/SIRF-Exercises if [ "$PYTHON" = "miniconda" ]; then if [ -f environment.yml ]; then - mamba env update --file environment.yml + if test "${BUILD_GPU:-0}" != 0; then + # uncomment GPU deps + sed -r 's/^(\s*)#\s*(- \S+.*#.*GPU.*)$/\1\2/' environment.yml > environment-sirf.yml + else + # delete GPU deps + sed -r -e '/^\s*- (astra-toolbox|tigre).*/d' -e '/^\s*- \S+.*#.*GPU/d' environment.yml > environment-sirf.yml + fi + mamba env update --file environment-sirf.yml else if [ -f requirements.txt ]; then cat requirements.txt From 560558b128fc9f701ce0d4d7ee43a8fca097fd7e Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Mon, 8 Jan 2024 11:03:39 +0000 Subject: [PATCH 60/98] fix GPU build --- docker/compose.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/compose.sh b/docker/compose.sh index 0089a390..b01dc183 100755 --- a/docker/compose.sh +++ b/docker/compose.sh @@ -35,7 +35,7 @@ echo "build args: $@" DCC_CPU="docker compose" DCC_GPU="docker compose -f docker-compose.yml -f docker/docker-compose.gpu.yml" -pushd "$(dirname "${BASH_SOURCE[0]}")" +pushd "$(dirname "$(dirname "${BASH_SOURCE[0]}")")" git submodule update --init --recursive echo build ccache From d43d5181b7146e82ae4981c06edf695a901ff19b Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Mon, 8 Jan 2024 12:11:37 +0000 Subject: [PATCH 61/98] fix ipywidgets again --- Dockerfile | 4 ---- docker/requirements.yml | 7 +++++++ 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index 020f4b7b..bd7ded0a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -24,16 +24,12 @@ ARG BUILD_GPU=0 COPY docker/requirements.yml /opt/scripts/ # https://jupyter-docker-stacks.readthedocs.io/en/latest/using/common.html#conda-environments # https://github.com/TomographicImaging/CIL/blob/master/Dockerfile -# Also fix labextension @jupyter-widgets/jupyterlab-manager -# by updating jupyterlab_widgets ipywidgets ipympl RUN if test "$BUILD_GPU" != 0; then \ sed -ri 's/^(\s*)#\s*(- \S+.*#.*GPU.*)$/\1\2/' /opt/scripts/requirements.yml; \ fi \ && conda config --env --set channel_priority strict \ && for ch in defaults ccpi intel conda-forge; do conda config --env --add channels $ch; done \ && mamba env update -n base -f /opt/scripts/requirements.yml \ - && mamba uninstall -y jupyterlab_widgets \ - && mamba install -y jupyterlab_widgets ipywidgets ipympl \ && mamba clean --all -f -y && fix-permissions "${CONDA_DIR}" /home/${NB_USER} FROM base as build diff --git a/docker/requirements.yml b/docker/requirements.yml index 123f41ce..838b1f86 100644 --- a/docker/requirements.yml +++ b/docker/requirements.yml @@ -25,3 +25,10 @@ dependencies: - pip: - git+https://github.com/data-exchange/dxchange.git # cil - git+https://github.com/ismrmrd/ismrmrd-python-tools.git@master#egg=ismrmrd-python-tools + # TODO: labextension @jupyter-widgets/jupyterlab-manager (jupyterlab_widgets ipywidgets ipympl) + # is broken because `conda-forge::cil` downgrades `ipywidgets<8` -> breaks `%matplotlib widgets`. + # - temp fix: install upgraded versions via pip (not conda) + # - proper fix: remove after https://github.com/TomographicImaging/CIL/issues/1600 + - jupyterlab_widgets + - ipywidgets>=8 + - ipympl From 01ed0a0f4b33dfd3e0c3777414cfc693335a19db Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Mon, 8 Jan 2024 18:44:17 +0000 Subject: [PATCH 62/98] cleanup started containers --- docker/compose.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docker/compose.sh b/docker/compose.sh index b01dc183..00fcb54c 100755 --- a/docker/compose.sh +++ b/docker/compose.sh @@ -49,11 +49,11 @@ test $build_gpu = 1 && $DCC_GPU build "$@" echo copy ccache test $build_cpu$build_gpu = 11 && sudo rm -r ./docker/devel/.ccache/* export USER_ID UID -test $build_cpu = 1 && $DCC_CPU up sirf-build -test $build_gpu = 1 && $DCC_GPU up sirf-build +test $build_cpu = 1 && $DCC_CPU up sirf-build && $DCC_CPU down sirf-build +test $build_gpu = 1 && $DCC_GPU up sirf-build && $DCC_GPU down sirf-build echo start -test $run_cpu = 1 && $DCC_CPU up -d sirf -test $run_gpu = 1 && $DCC_GPU up -d sirf +test $run_cpu = 1 && $DCC_CPU up -d sirf && $DCC_CPU down sirf +test $run_gpu = 1 && $DCC_GPU up -d sirf && $DCC_GPU down sirf popd From ff3f04c0a0e6c9b62cbe766fccc451d2db9bc396 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Mon, 8 Jan 2024 19:19:22 +0000 Subject: [PATCH 63/98] CI: docker build CPU & GPU --- .github/workflows/docker.yml | 45 +++++ .github/workflows/docker_build.yml | 303 ----------------------------- 2 files changed, 45 insertions(+), 303 deletions(-) create mode 100644 .github/workflows/docker.yml delete mode 100644 .github/workflows/docker_build.yml diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml new file mode 100644 index 00000000..cce454f0 --- /dev/null +++ b/.github/workflows/docker.yml @@ -0,0 +1,45 @@ +name: docker +on: + pull_request: + branches: [master] + paths-ignore: + - '**.md' + - 'VirtualBox/**' + - '.github/workflows/c-cpp.yml' + - 'CITATION.cff' + - '.mailmap' + push: + branches: [master] + paths-ignore: + - '**.md' + - 'VirtualBox/**' + - '.github/workflows/c-cpp.yml' + - 'CITATION.cff' + - '.mailmap' +defaults: + run: + shell: bash -l {0} +jobs: + build-test-push: + runs-on: ubuntu-latest + strategy: + matrix: + type: [gpu, cpu] + steps: + - uses: actions/checkout@v3 + - run: sudo .github/workflows/install_docker.sh + - run: sudo .github/workflows/GHA_increase_disk_space.sh + - name: cache + uses: actions/cache@v3 + with: + key: ccache-${{ matrix.type }} + path: docker/devel/.ccache + - id: build + name: build + run: | + ./docker/compose.sh ${{ matrix.type == 'gpu' && '-C' || '-G' }} + echo "image=synerbi/sirf:jupyter${{ matrix.type == 'gpu' && '-gpu' || '' }}" >> $GITHUB_OUTPUT + - name: test CIL + run: > + docker run --rm -v ./.github/workflows:/gh --user $(id -u) --group-add users + ${{ steps.build.outputs.image }} /gh/test_cil.sh diff --git a/.github/workflows/docker_build.yml b/.github/workflows/docker_build.yml deleted file mode 100644 index d43031f7..00000000 --- a/.github/workflows/docker_build.yml +++ /dev/null @@ -1,303 +0,0 @@ -name: Build all of the sirf docker images - -on: - pull_request: - branches: [ master ] - paths-ignore: - - '**.md' - - 'VirtualBox/**' - - '.github/workflows/c-cpp.yml' - - 'CITATION.cff' - - '.mailmap' - - '.travis.yml' - - 'NOTICE.txt' - - 'LICENSE.txt' - - push: - branches: [ master ] - paths-ignore: - - '**.md' - - 'VirtualBox/**' - - '.github/workflows/c-cpp.yml' - - 'CITATION.cff' - - '.mailmap' - - '.travis.yml' - - 'NOTICE.txt' - - 'LICENSE.txt' - -jobs: - build-core: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - - name: Install docker and pre-reqs and clean some disk-space - shell: bash -l {0} - run: | - sudo .github/workflows/install_docker.sh - sudo .github/workflows/GHA_increase_disk_space.sh - - - name: Build docker core image - # Builds docker image from Docker file. - shell: bash -l {0} - run: | - docker-compose -f docker/docker-compose.yml build --pull core - - - name : docker save core - # https://docs.docker.com/engine/reference/commandline/save/ - shell: bash -l {0} - run: | - #docker save -o core.tar synerbi/sirf:core - docker image ls - docker save synerbi/sirf:core | gzip > core.tar.gz - ls -l - - - name: Upload artifact of the core image. - uses: actions/upload-artifact@v3.1.2 - with: - name: sirf-core - path: core.tar.gz - - - build-core-gpu: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Install docker and pre-reqs and clean some disk-space - shell: bash -l {0} - run: | - sudo .github/workflows/install_docker.sh - sudo .github/workflows/GHA_increase_disk_space.sh - - - name: Build docker core-gpu image - # Builds docker image from Docker file. - shell: bash -l {0} - run: | - docker-compose -f docker/docker-compose.yml -f docker/docker-compose.srv-gpu.yml build --pull core - - - name : docker save core-gpu - # https://docs.docker.com/engine/reference/commandline/save/ - shell: bash -l {0} - run: | - docker image ls - docker save synerbi/sirf:core-gpu | gzip > core-gpu.tar.gz - ls -l - - - name: Upload artifact of the core image. - uses: actions/upload-artifact@v3.1.2 - with: - name: sirf-core-gpu - path: core-gpu.tar.gz - - build-devel-and-latest: - runs-on: ubuntu-latest - needs: build-core - strategy: - matrix: - tag: ['latest', 'devel'] - steps: - - uses: actions/checkout@v3 - - name: Install docker and pre-reqs and clean some disk-space - shell: bash -l {0} - run: | - sudo .github/workflows/install_docker.sh - sudo .github/workflows/GHA_increase_disk_space.sh - - - name: Download artifact of core image - uses: actions/download-artifact@v3 - with: - name: sirf-core - path: /tmp - - - name: load core - shell: bash -l {0} - run: | - docker load --input /tmp/core.tar.gz - - - name: Build docker image - # Builds docker image from Docker file. - shell: bash -l {0} - run: | - set -ex; - compose_command="docker-compose -f docker/docker-compose.yml" - if [[ ${{ matrix.tag }} == 'devel' ]]; then compose_command="$compose_command -f docker/docker-compose.devel.yml"; fi - # $compose_command pull core - $compose_command build --pull sirf - echo "compose_command=$compose_command" >> $GITHUB_ENV - - - name : docker save image - # https://docs.docker.com/engine/reference/commandline/save/ - shell: bash -l {0} - run: | - echo "info on layers and their sizes from docker history:" - sirf_image=`docker image ls | grep ${{ matrix.tag }} | awk '/sirf/ { print $3}'` - docker history "$sirf_image" - docker save synerbi/sirf:${{ matrix.tag }} | gzip > ${{ matrix.tag }}.tar.gz - - - name: Upload artifact of image. - uses: actions/upload-artifact@v3.1.2 - with: - name: sirf-${{ matrix.tag }} - path: ${{ matrix.tag }}.tar.gz - - build-service-gpu: - runs-on: ubuntu-latest - needs: build-core-gpu - steps: - - uses: actions/checkout@v3 - - name: Install docker and pre-reqs and clean some disk-space - shell: bash -l {0} - run: | - sudo .github/workflows/install_docker.sh - sudo .github/workflows/GHA_increase_disk_space.sh - - - name: Download artifact of core image - uses: actions/download-artifact@v3 - with: - name: sirf-core-gpu - path: /tmp - - - name: load core-gpu - shell: bash -l {0} - run: | - docker load --input /tmp/core-gpu.tar.gz - - - name: Build docker service-gpu image - # Builds docker image from Docker file. - shell: bash -l {0} - run: | - set -ex; - # docker-compose -f docker/docker-compose.yml -f docker/docker-compose.srv-gpu.yml pull core - docker-compose -f docker/docker-compose.yml -f docker/docker-compose.srv-gpu.yml build --pull sirf - - # Temporarily disabling the save and upload of the image as it is too big. - # see https://github.com/SyneRBI/SIRF-SuperBuild/issues/770 - - # - name : docker save image - # # https://docs.docker.com/engine/reference/commandline/save/ - # shell: bash -l {0} - # run: | - # # first we must delete image that is no longer needed, to save space: - # docker rmi synerbi/sirf:core-gpu - # yes | docker system prune - # docker image ls - # docker save synerbi/sirf:service-gpu | gzip > service-gpu.tar.gz - - # - name: Upload artifact of image. - # uses: actions/upload-artifact@v3.1.2 - # with: - # name: sirf-service-gpu - # path: service-gpu.tar.gz - - - - build-service-images: - runs-on: ubuntu-latest - needs: build-devel-and-latest - strategy: - matrix: - tag: ['service', 'devel-service'] - steps: - - uses: actions/checkout@v3 - - name: Install docker and pre-reqs and clean some disk-space - shell: bash -l {0} - run: | - sudo .github/workflows/install_docker.sh - sudo .github/workflows/GHA_increase_disk_space.sh - - - name: setup variables - shell: bash -l {0} - run: | - compose_command="docker-compose -f docker/docker-compose.yml" - - if [[ ${{ matrix.tag }} == 'service' ]]; - then - echo "prereq_image_name="latest"" >> $GITHUB_ENV - compose_command="$compose_command -f docker/docker-compose.srv.yml" - fi - if [[ ${{ matrix.tag }} == 'devel-service' ]] - then - echo "prereq_image_name="devel"" >> $GITHUB_ENV - compose_command="$compose_command -f docker/docker-compose.devel.yml -f docker/docker-compose.srv.yml" - fi - echo "compose_command=$compose_command" >> $GITHUB_ENV - - - name: Download artifact of image - uses: actions/download-artifact@v3 - with: - name: sirf-${{ env.prereq_image_name }} - path: /tmp - - - name: load prerequisite image - shell: bash -l {0} - run: | - docker load --input /tmp/${{ env.prereq_image_name }}.tar.gz - - - name: Build docker image - # Builds docker image from Docker file. - shell: bash -l {0} - run: | - set -ex; - # ${{ env.compose_command }} pull sirf - ${{ env.compose_command }} build --pull sirf - - - name: Test CIL - shell: bash -l {0} - run: | - set -ex; - docker run --rm -v ./.github/workflows:/gh synerbi/sirf:service /gh/test_cil.sh - - - name : docker save image - # https://docs.docker.com/engine/reference/commandline/save/ - shell: bash -l {0} - run: | - # first we must delete image that is no longer needed, to save space: - docker rmi synerbi/sirf:${{ env.prereq_image_name }} - # get rid of everything over 2 days old - docker system prune -a --filter "until=2d" - docker image ls - docker save synerbi/sirf:service | gzip > ${{ matrix.tag }}.tar.gz - - - name: Upload artifact of image. - uses: actions/upload-artifact@v3.1.2 - with: - name: sirf-${{ matrix.tag }} - path: ${{ matrix.tag }}.tar.gz - - # Docker images are not currently uploaded with this action. - # Disable it for now as it would just waste time. - # upload-images: - # runs-on: ubuntu-latest - # needs: [build-service-images, build-service-gpu] - # strategy: - # matrix: - # tag: ['core', 'latest', 'devel', 'service', 'devel-service', 'core-gpu', 'service-gpu'] - # steps: - # - uses: actions/checkout@v3 - # - name: Install docker and pre-reqs and clean some disk-space - # shell: bash -l {0} - # run: | - # sudo .github/workflows/install_docker.sh - # sudo .github/workflows/GHA_increase_disk_space.sh - - # - name: Download artifact of image - # uses: actions/download-artifact@v3 - # with: - # name: sirf-${{ matrix.tag }} - # path: /tmp - - # - name: load image - # shell: bash -l {0} - # run: | - # docker load --input /tmp/${{ matrix.tag }}.tar.gz - - # - name: Upload docker image - # # Builds docker image from Docker file. - # shell: bash -l {0} - # run: | - # # TODO requirement to - # docker image ls - # echo "Here we would upload image with name: ${{ matrix.tag }}" - - - # TODO: publish to come later From a4ec488f2e9f340271bbfd72c665d3bb91999ec1 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Mon, 8 Jan 2024 21:01:52 +0000 Subject: [PATCH 64/98] CI: fix & speedup build --- .github/workflows/docker.yml | 2 -- docker/compose.sh | 6 ++++++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index cce454f0..279daf75 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -27,8 +27,6 @@ jobs: type: [gpu, cpu] steps: - uses: actions/checkout@v3 - - run: sudo .github/workflows/install_docker.sh - - run: sudo .github/workflows/GHA_increase_disk_space.sh - name: cache uses: actions/cache@v3 with: diff --git a/docker/compose.sh b/docker/compose.sh index 00fcb54c..fc3e21fb 100755 --- a/docker/compose.sh +++ b/docker/compose.sh @@ -38,6 +38,12 @@ DCC_GPU="docker compose -f docker-compose.yml -f docker/docker-compose.gpu.yml" pushd "$(dirname "$(dirname "${BASH_SOURCE[0]}")")" git submodule update --init --recursive +echo build base stack +for image in foundation base minimal scipy; do + test $build_cpu = 1 && $DCC_CPU build "$@" $image + test $build_gpu = 1 && $DCC_GPU build "$@" $image +done + echo build ccache test $build_cpu = 1 && $DCC_CPU build "$@" sirf-build test $build_gpu = 1 && $DCC_GPU build "$@" sirf-build From 1911bfd876bfb9f1dec2ca4911ab029c4c05e7a4 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Mon, 8 Jan 2024 21:15:17 +0000 Subject: [PATCH 65/98] CI: drop travis --- .github/workflows/c-cpp.yml | 2 - .travis.yml | 207 ---------------------------- CHANGES.md | 6 + CONTRIBUTING.md | 3 +- README.md | 30 ++-- docker/requirements-travis.txt | 2 - docker/requirements_conda_forge.txt | 29 ---- 7 files changed, 22 insertions(+), 257 deletions(-) delete mode 100644 .travis.yml delete mode 100644 docker/requirements-travis.txt delete mode 100644 docker/requirements_conda_forge.txt diff --git a/.github/workflows/c-cpp.yml b/.github/workflows/c-cpp.yml index 52e85e76..ae03aa87 100644 --- a/.github/workflows/c-cpp.yml +++ b/.github/workflows/c-cpp.yml @@ -11,7 +11,6 @@ on: - '.github/workflows/*docker*' - 'CITATION.cff' - '.mailmap' - - '.travis.yml' - 'NOTICE.txt' - 'LICENSE.txt' pull_request: @@ -24,7 +23,6 @@ on: - '.github/workflows/*docker*' - 'CITATION.cff' - '.mailmap' - - '.travis.yml' - 'NOTICE.txt' - 'LICENSE.txt' jobs: diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 1a120374..00000000 --- a/.travis.yml +++ /dev/null @@ -1,207 +0,0 @@ -# Note: with `language: cpp`, `cache: ccache` works -# but `cache: pip` and `python:` is ignored -os: -- linux -language: cpp -dist: focal # Ubuntu 20.04 LTS -cache: # cache C/C++/pip (shared between builds) -- ccache -- pip -stages: -- docker-core -- name: docker - if: NOT type = pull_request - -# Compilation dependencies -addons: - -jobs: - include: - # docker - - os: linux - stage: docker-core - name: docker +CORE - env: DOCKER_BUILD=CORE - services: - - docker - addons: &docker_addons - apt: - packages: - before_install: &docker_before_install - - | - # login required early to increase pull rate limits - if [[ "$TRAVIS_SECURE_ENV_VARS" == true ]]; then - echo "$DOCKER_PWD" | docker login -u $DOCKER_USR --password-stdin - fi - # custom runtime not supported on travis - - sed -i '/runtime/d' docker/docker-compose.srv-gpu.yml - install: &docker_install - - cd docker - - if [[ -z "$GROUP_ID" ]]; then export GROUP_ID=$(id -g); fi - - if [[ -z "$USER_ID" ]]; then export USER_ID=$(id -u); fi - - export DCC="docker-compose -p "travis_${TRAVIS_JOB_NUMBER/./_}" -f $PWD/docker-compose.yml" - - if [[ "$DOCKER_BUILD" == *"DEVEL"* ]]; then export DCC="$DCC -f $PWD/docker-compose.devel.yml"; fi - - if [[ "$DOCKER_BUILD" == *"SERVICE"* ]]; then export DCC="$DCC -f $PWD/docker-compose.srv.yml"; fi - - if [[ "$DOCKER_BUILD" == *"GPU"* ]]; then export DCC="$DCC -f $PWD/docker-compose.srv-gpu.yml"; fi - # pull previous base image for its layer cache - - $DCC pull core - # rebuild base image (using above docker cache) - - | - export DOCKER_BUILDKIT=1 - if [[ "$DOCKER_BUILD" == *"CORE"* ]]; then ( set -ev - $DCC build --pull core - ); else ( set -ev - # rebuild sirf image (with travis' ccache) - rm -rf devel/.ccache - if [[ -n "$DOCKER_RECREATE_CCACHE" || ! -d ~/.ccache ]]; then - mkdir devel/.ccache - sudo rm -rf ~/.ccache - else - sudo chown -R $USER:$(id -g) ~/.ccache - mv ~/.ccache devel/ - fi - $DCC build sirf - # extract updated ccache - # sudo rm -rf devel/.ccache/* - $DCC run --rm sirf /bin/bash -c 'rm -rf /devel/.ccache/* && cp -a /opt/ccache/* /devel/.ccache/' - # replace travis' ccache with the built images's - mv devel/.ccache ~ - ); fi - script: &docker_script - - | - # run tests within the container - # Disabled tests as we no longer have ctest files in the container - true - # TODO: also add CORE tests! - # TODO: GPU tests are failing even with nvidia run-time https://github.com/SyneRBI/SIRF-SuperBuild/issues/553 - # if [[ "$DOCKER_BUILD" != *"CORE"* && "$DOCKER_BUILD" != *"GPU"* ]]; then - # Need to run as jovyan to be able to write to build directory (needed by ctest) - # $DCC run --rm -u jovyan --entrypoint /bin/bash sirf --login -c /devel/test.sh 1 - # fi - after_success: &docker_after_success - - | - # push images - if [[ "$TRAVIS_SECURE_ENV_VARS" == true ]]; then ( set -ev - dpush() { - # create and push alias $1 -> $2 - if [[ "$1" != "$2" ]]; then - docker tag synerbi/sirf:$1 synerbi/sirf:$2 - fi - docker push synerbi/sirf:$2 - } - if [[ -n "$TRAVIS_TAG" ]]; then ( set -ev - # tag & push - case "$DOCKER_BUILD" in - CORE*) - ;; - LATEST) - dpush latest latest - dpush latest "$TRAVIS_TAG" - dpush latest release - ;; - DEVEL) - ;; - DEVEL_SERVICE) - ;; - SERVICE) - dpush service service - dpush service "$TRAVIS_TAG"-service - dpush service release-service - ;; - SERVICE_GPU) - dpush service-gpu service-gpu - dpush service-gpu "$TRAVIS_TAG"-service-gpu - dpush service-gpu release-service-gpu - ;; - *) - exit 1 - esac - ); elif [[ "$TRAVIS_BRANCH" == master ]]; then ( set -ev - # tag & push - case "$DOCKER_BUILD" in - CORE) - dpush core core - ;; - CORE_GPU) - dpush core-gpu core-gpu - ;; - LATEST) - dpush latest latest - ;; - DEVEL) - dpush devel devel - ;; - DEVEL_SERVICE) - dpush service devel-service - ;; - SERVICE) - dpush service service - ;; - SERVICE_GPU) - dpush service-gpu service-gpu - ;; - *) - exit 1 - esac - ); fi - ); fi - - os: linux - name: docker +CORE +GPU - env: DOCKER_BUILD=CORE_GPU - addons: *docker_addons - before_install: *docker_before_install - install: *docker_install - script: *docker_script - after_success: *docker_after_success - - os: linux - stage: docker - name: docker +LATEST - env: DOCKER_BUILD=LATEST - services: - - docker - addons: *docker_addons - before_install: *docker_before_install - install: *docker_install - script: *docker_script - after_success: *docker_after_success - - os: linux - name: docker +SERVICE - env: DOCKER_BUILD=SERVICE - services: - - docker - addons: *docker_addons - before_install: *docker_before_install - install: *docker_install - script: *docker_script - after_success: *docker_after_success - - os: linux - name: docker +DEVEL - env: DOCKER_BUILD=DEVEL - if: branch = master - services: - - docker - addons: *docker_addons - before_install: *docker_before_install - install: *docker_install - script: *docker_script - after_success: *docker_after_success - - os: linux - name: docker +DEVEL +SERVICE - env: DOCKER_BUILD=DEVEL_SERVICE - services: - - docker - addons: *docker_addons - before_install: *docker_before_install - install: *docker_install - script: *docker_script - after_success: *docker_after_success - - os: linux - name: docker +SERVICE +GPU - env: DOCKER_BUILD=SERVICE_GPU - services: - - docker - addons: *docker_addons - before_install: *docker_before_install - install: *docker_install - script: *docker_script - after_success: *docker_after_success diff --git a/CHANGES.md b/CHANGES.md index 06868ba3..4553f877 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -2,6 +2,12 @@ ## v3.6.0 - build the STIR native Python interface by default (STIR_BUILD_SWIG_PYTHON=ON). You can still switch this off. +- docker image updates: + - uses `docker-stacks` + - CPU: ubuntu:22.04 + - GPU: nvidia/cuda:11.7.1-cudnn8-runtime-ubuntu22.04 + - added requirements.yml +- Drop Travis - updated versions: - SIRF: v3.6.0 - STIR: v6.0.0 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5e5b582e..9878bcc6 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -45,10 +45,9 @@ Please by mindful about the resources used by our Continuous Integration (CI) wo - Use specific keywords in the body of the last commit that you push to prevent CI being run: - `[ci skip]` skips all CI runs (e.g. when you only change documentation, or when your update isn't ready yet) - `[actions skip]` does not run GitHub Actions, see [here](https://github.blog/changelog/2021-02-08-github-actions-skip-pull-request-and-push-workflows-with-skip-ci/).a - - `[travis skip]` does not run Travis-CI, see [here](https://docs.travis-ci.com/user/customizing-the-build/#skipping-a-build). 8. After acceptance of your PR, go home with a nice warm feeling. -Suggested reading: +Suggested reading: https://help.github.com/articles/fork-a-repo/, https://git-scm.com/book/en/v2/GitHub-Contributing-to-a-Project or https://guides.github.com/activities/forking/. ### A note on copyright dates and notices (and licenses) diff --git a/README.md b/README.md index c571b6d7..c3ad5328 100644 --- a/README.md +++ b/README.md @@ -63,7 +63,7 @@ mkdir ~/devel ``` ### Install CMake -If you do not have CMake >= 3.10, install it first. You can probably use a package manager on your OS. Alternatively, you can do that either by following the official instructions ([download link](https://cmake.org/download/)) or running your own shell sript to do so (see an example [here](https://github.com/SyneRBI/SyneRBI_VM/blob/master/scripts/INSTALL_CMake.sh)). +If you do not have CMake >= 3.10, install it first. You can probably use a package manager on your OS. Alternatively, you can do that either by following the official instructions ([download link](https://cmake.org/download/)) or running your own shell sript to do so (see an example [here](https://github.com/SyneRBI/SyneRBI_VM/blob/master/scripts/INSTALL_CMake.sh)). If you use a CMake installer, you will be asked to read and accept CMake's license. If you answered the last question during the CMake installation with yes, then you should use @@ -74,7 +74,7 @@ Note that the above `PATH` statements won't work if you are running csh. The equ ```csh set path = ( /usr/local/cmake/bin $path ) ``` -NOTE: change `/usr/local/` to your chosen installation path, if different. +NOTE: change `/usr/local/` to your chosen installation path, if different. You might want to add the `PATH` line to your start-up file e.g. `.profile`, `.bashrc` or `.cshrc`. @@ -278,7 +278,7 @@ In this case, you would then need to ensure that `PYTHONPATH` and `MATLABPATH` a ### Package specific information -For default versions built, see [version_config.cmake](version_config.cmake) and +For default versions built, see [version_config.cmake](version_config.cmake) and [below on how to change them](#Building-with-specific-versions-of-dependencies). The SuperBuild allows building many packages and sets dependencies correctly. However, the @@ -339,18 +339,18 @@ cmake ../SIRF-SuperBuild -DDISABLE_GIT_CHECKOUT_SIRF=ON -DSIRF_SOURCE_DIR=~/wher 2. Gadgetron's [FindMKL.cmake](https://github.com/gadgetron/gadgetron/blob/master/cmake/FindMKL.cmake#L23) will try to look for MKL libraries in `/opt/intel` on Unix/Apple and in `C:/Program Files (x86)/Intel/Composer XE` in Windows. Make sure that this is the location of the library or pass the vatiable `MKLROOT_PATH` (Unix/Apple) or set the environment variable `MKLROOT_PATH` on Windows. 3. Configure the SuperBuild to pass `Gadgetron_USE_MKL=ON`. -Notice that other packages may look for a blas implementation issuing CMake's [`find_package(BLAS)`](https://github.com/Kitware/CMake/blob/master/Modules/FindBLAS.cmake#L142L148). This will look for MKL taking hint directories from the environment variable `LD_LIBRARY_PATH`, `DYLD_LIBRARY_PATH` and `LIB`, on Unix, Apple and Windows respectively. +Notice that other packages may look for a blas implementation issuing CMake's [`find_package(BLAS)`](https://github.com/Kitware/CMake/blob/master/Modules/FindBLAS.cmake#L142L148). This will look for MKL taking hint directories from the environment variable `LD_LIBRARY_PATH`, `DYLD_LIBRARY_PATH` and `LIB`, on Unix, Apple and Windows respectively. ### Building CCPi CIL -It is possible to build the [CCPi Core Imaging Library CIL](https://www.ccpi.ac.uk/CIL) as part of the SuperBuild. The functionality of `CIL` can be expanded by plugins. Currently available: [`CCPi-Regularisation`](https://github.com/vais-ral/CCPi-Regularisation-Toolkit), [`TomoPhantom`](https://github.com/dkazanc/TomoPhantom) [ASTRA-toolbox](https://github.com/astra-toolbox/astra-toolbox) and [`TIGRE`](https://github.com/CERN/TIGRE)). +It is possible to build the [CCPi Core Imaging Library CIL](https://www.ccpi.ac.uk/CIL) as part of the SuperBuild. The functionality of `CIL` can be expanded by plugins. Currently available: [`CCPi-Regularisation`](https://github.com/vais-ral/CCPi-Regularisation-Toolkit), [`TomoPhantom`](https://github.com/dkazanc/TomoPhantom) [ASTRA-toolbox](https://github.com/astra-toolbox/astra-toolbox) and [`TIGRE`](https://github.com/CERN/TIGRE)). -There is one mandatory flag and 2 optional: +There is one mandatory flag and 2 optional: - `BUILD_CIL=ON`, will build `CIL` and all the following plugins: `CCPi-Regularisation` and `TomoPhantom`; default `OFF` - - Optional `IPP_LIBRARY=` and `IPP_INCLUDE=` if you want to build CIL with [IPP](https://www.intel.com/content/www/us/en/developer/tools/oneapi/ipp.html#gs.dnfk5r) support for optimised [FBP/FDK](https://github.com/TomographicImaging/CIL#dependency) . - - Optional `BUILD_ASTRA=ON`, if you want to use CIL for CT reconstruction with the ASTRA-toolbox engine. Default `OFF` + - Optional `IPP_LIBRARY=` and `IPP_INCLUDE=` if you want to build CIL with [IPP](https://www.intel.com/content/www/us/en/developer/tools/oneapi/ipp.html#gs.dnfk5r) support for optimised [FBP/FDK](https://github.com/TomographicImaging/CIL#dependency) . + - Optional `BUILD_ASTRA=ON`, if you want to use CIL for CT reconstruction with the ASTRA-toolbox engine. Default `OFF` ### Passing CMAKE arguments to specific projects @@ -358,7 +358,7 @@ You may want to change the CMAKE arguments used to build some of the projects. Y ```sh cmake ../SIRF-SuperBuild -D${proj}_EXTRA_CMAKE_ARGS:STRING="-Dflag1:BOOL=ON;-Dflag2:STRING=\"your_string\"" -``` +``` All the flags from the following projects can be set using this technique: @@ -370,13 +370,13 @@ All the flags from the following projects can be set using this technique: - NIFTYREG - NiftyPET - CCPi-Regularisation-Toolkit -- TomoPhantom +- TomoPhantom As an example, the following changes some Gadgetron and NiftyReg flags ```sh cmake ../SIRF-SuperBuild -DGadgetron_EXTRA_CMAKE_ARGS:STRING="-DBUILD_PYTHON_SUPPORT:BOOL=ON;" -DNIFTYREG_EXTRA_CMAKE_ARGS:STRING="-DCUDA_FAST_MATH:BOOL=OFF;-DCMAKE_VERBOSE_MAKEFILE:BOOL=OFF" -``` +``` ### Building with CUDA Some dependencies like Gadgetron, NiftyPET and Parallelproj require building parts of their code base with CUDA. It has been found that version [10.1 update 1](https://github.com/gadgetron/gadgetron/issues/792#issuecomment-786481256) works, but following updates of 10.1 and 10.2 do not build Gadgetron. It is reported that version CUDA toolkit version 11 works. We have not tested lower versions of the toolkit yet. @@ -397,8 +397,10 @@ By the way, if you build with `USE_SYSTEM_FFTW3=OFF` (the default except on Wind ### Armadillo issues CMake does come with `FindArmadillo.cmake` but it currently (at least up to CMake 3.12) has no variable to specify its location at all. This implies that when using `USE_SYSTEM_ARMADILLO=On`, you have to install armadillo in a system location, unless some extra work is done. See [this post on stackoverflow](https://stackoverflow.com/questions/35304513/cmake-find-armadillo-library-installed-in-a-custom-location) for some suggestions, which we haven't tried. -[CI-badge]: https://travis-ci.org/SyneRBI/SIRF-SuperBuild.svg?branch=master -[CI-link]: https://travis-ci.org/SyneRBI/SIRF-SuperBuild +[gh-action-badge]: https://github.com/SyneRBI/SIRF-SuperBuild/actions/workflows/c-cpp.yml/badge.svg +[gh-action-link]: https://github.com/SyneRBI/SIRF-SuperBuild/actions/workflows/c-cpp.yml +[CI-badge]: https://github.com/SyneRBI/SIRF-SuperBuild/actions/workflows/docker.yml/badge.svg +[CI-link]: https://github.com/SyneRBI/SIRF-SuperBuild/actions/workflows/docker.yml [style-badge]: https://api.codacy.com/project/badge/Grade/eefea1a2f11148fabd9a4ec9b822701f [style-link]: https://www.codacy.com/gh/SyneRBI/SIRF-SuperBuild?utm_source=github.com&utm_medium=referral&utm_content=SyneRBI/SIRF-SuperBuild&utm_campaign=Badge_Grade [docker-badge]: https://img.shields.io/docker/pulls/synerbi/sirf.svg @@ -406,5 +408,3 @@ CMake does come with `FindArmadillo.cmake` but it currently (at least up to CMak [install-badge]: https://img.shields.io/badge/dynamic/json.svg?label=users&uri=https%3A//raw.githubusercontent.com/ccp-petmr-codebot/github-stats/SyneRBI/SIRF-SuperBuild/SyneRBI_SIRF_SuperBuild.json&query=total&colorB=8000f0&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAD0ElEQVR4nMSXS2ic5RfGf5lL/pPMZP7JjB0ba53axkvRegGhpFaxVkqjOxetoi6UUjG4ELdVFy50I6gLtfUCFilUiwheilZXIm1FiZSCpYmYRppoUpvRJONMOpmJnOT54DBtxk4CmQPDl/d85z3vc55zeb%2BEWJr0AE8uxcFSAWwE7mwkgE7gqkYCWAGkGwkgBVQaCSACFBsJwPaXgGijAJhMAonlAvAc0OHWlv8pB8BS8hLQtFhAtaRFdD%2BodQz4BngbuEm624BZ4MYFfFwUcD0MdAGjeqKojf4Jx8B1wAhwwwI%2BLuqYegBkgePAKq2Tot9AtEln744Bay7XaS0Are4wk6uBPmCl1gkByDsGMrLx%2B1L61Q3gBeBTt7axe9I5C1LgGcjIxo/nvcC%2BhQ6J1ACwWREmlWeLfNBVeELR5x0AA/czcKXzE6SjSQV6SQbiwLdAt9bNMu5zFZ5REXoAU6F5u4TTjcgfAvI78AuwVrprge%2BB1R7Aw0Leq7VV82mgX3%2BbXAGcd1G0J0Ik18foSoVply4MFFyA64ABpSUI5HEFNndW5NEo2c9LPNQT5fkS7IxBdqDCXRUY6wpxtgKZKGS/KNH6QJTV09D3P1hzqkznvRl2b2%2Bj8P4Qr8%2BGyB6ZoXlbhGwRfjA//RXuaIKJtSFOV6DD/BwuseX%2BKE8VYHcLZJuOZ3hr1zjbDqb5%2BAK0hGFm7xTd6yMM3hNj2HQxyO88zyMfpjkwDfEoFF6bZOPdGTZvbeOfE6McWgVju3Ls2J/ioyLEm6HwTp7udWGGt8YYsn3lWSq9OXr2p%2BbPCkHZWPgAeFr0GC1PAJ%2Bokls17ez5tWz2aRi9uCnOm70reKMzwgHR/6VsrPI3yHaD3h0FtgCvyGY7cMi64DfgXSnN6Bn1/IgrrE4Vk8lfug/ix/IcPFWkmCuzB/i/WhLZrtTwGmI%2B0gv6hjwqm6%2BA260I97g7/QRwq4omkBngGgfobytAa71ZOJcrz%2B1JCEBONsMKol0tbHIEeEyMomJ%2BuXoOmHIHMO50Z4FNYgod0uFG8bQuprTYCfZ0K/JAXgU%2Bc4Dm5FKTcEDtFki/qBvUekLRJsUGmg1Jl4JfgfuAM85PoYrZBQFUy0/69D7pGEi5uyCQtGPOANyifq8ptUZxIFbZz4pWXBGG3fVaUb6DFMyomA//l/PLYaCg/AUyrmj93mmNVl8777nOWRKAavlT94L/uLCauV7v6pLFABhXf09WgboZGFsOAEG%2BR53uDzEwuhwA0HQbdOszOrzuf1LCiwRg7fedi/icfj/W6%2BjfAAAA///cZAAN8LSlZAAAAABJRU5ErkJggg%3D%3D [zenodo-badge]: https://zenodo.org/badge/DOI/10.5281/zenodo.4408776.svg [zenodo-link]: https://doi.org/10.5281/zenodo.4408776 -[gh-action-badge]: https://github.com/SyneRBI/SIRF-SuperBuild/actions/workflows/c-cpp.yml/badge.svg -[gh-action-link]: https://github.com/SyneRBI/SIRF-SuperBuild/actions/workflows/c-cpp.yml/ diff --git a/docker/requirements-travis.txt b/docker/requirements-travis.txt deleted file mode 100644 index 1fd93417..00000000 --- a/docker/requirements-travis.txt +++ /dev/null @@ -1,2 +0,0 @@ -coveralls -codecov diff --git a/docker/requirements_conda_forge.txt b/docker/requirements_conda_forge.txt deleted file mode 100644 index 0e5829dd..00000000 --- a/docker/requirements_conda_forge.txt +++ /dev/null @@ -1,29 +0,0 @@ -# cil -# cil-astra -# ccpi-regulariser -ipp -ipp-devel -ipp-include -tigre -# tomophantom=1.4.10 -python-wget -matplotlib<3.5 # CIL -Cython # CIL -numpy=1.20 # CIL -scipy # CIL -h5py # CIL -Pillow # CIL -wget # CIL -six # CIL -olefile # CIL -pywavelets # CIL -olefile>=0.46 # CIL -dxchange # CIL -h5py # CIL -tqdm # CIL -numba # CIL -nose -docopt -nibabel -deprecation -nose From 466b6fba9714cb7149b393fda99fcc147649f91b Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Mon, 8 Jan 2024 21:56:14 +0000 Subject: [PATCH 66/98] avoid ccache bloat --- .github/workflows/docker.yml | 2 +- docker/compose.sh | 12 +++++++++--- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 279daf75..44d6ece2 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -35,7 +35,7 @@ jobs: - id: build name: build run: | - ./docker/compose.sh ${{ matrix.type == 'gpu' && '-C' || '-G' }} + ./docker/compose.sh -r ${{ matrix.type == 'gpu' && '-C' || '-G' }} echo "image=synerbi/sirf:jupyter${{ matrix.type == 'gpu' && '-gpu' || '' }}" >> $GITHUB_OUTPUT - name: test CIL run: > diff --git a/docker/compose.sh b/docker/compose.sh index fc3e21fb..22a5d661 100755 --- a/docker/compose.sh +++ b/docker/compose.sh @@ -5,7 +5,8 @@ build_cpu=1 build_gpu=1 run_cpu=0 run_gpu=0 -while getopts :hCGcg option; do +regen_ccache=0 +while getopts :hCGcgr option; do case "${option}" in h) cat < Date: Mon, 8 Jan 2024 22:03:13 +0000 Subject: [PATCH 67/98] fix test return status --- .github/workflows/test_cil.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test_cil.sh b/.github/workflows/test_cil.sh index d837c8dd..eb4ee00a 100755 --- a/.github/workflows/test_cil.sh +++ b/.github/workflows/test_cil.sh @@ -11,4 +11,6 @@ done $SIRF_PATH/../../INSTALL/bin/gadgetron >& ~/gadgetron.log& /opt/conda/bin/python -m unittest discover -v ./cil_sirf_test -k GradientPET -k BlockDataContainer -for i in $(jobs -p); do kill -n 15 $i; done 2>/dev/null # kill gadgetron +ret=$? +for i in $(jobs -p); do kill -n 15 $i || : ; done 2>/dev/null # kill gadgetron +exit $ret From f41979c9ac4a4120a2a89ebd9615578740636cc6 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Tue, 9 Jan 2024 19:36:28 +0000 Subject: [PATCH 68/98] CI: fix empty ccache --- docker/compose.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/compose.sh b/docker/compose.sh index 22a5d661..a7aba91e 100755 --- a/docker/compose.sh +++ b/docker/compose.sh @@ -59,7 +59,7 @@ test $build_cpu = 1 && $DCC_CPU build "$@" test $build_gpu = 1 && $DCC_GPU build "$@" echo copy ccache -test $regen_ccache = 1 && sudo rm -r ./docker/devel/.ccache/* +test $regen_ccache = 1 && sudo rm -rf ./docker/devel/.ccache/* export USER_ID UID test $build_cpu = 1 && $DCC_CPU up sirf-build && $DCC_CPU down sirf-build test $build_gpu = 1 && $DCC_GPU up sirf-build && $DCC_GPU down sirf-build From 42f80ae751c0af982f5434536ccd06e60246dfb5 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Tue, 9 Jan 2024 21:27:08 +0000 Subject: [PATCH 69/98] docker tags & pushing --- .github/workflows/docker.yml | 61 ++++++++++++++++++++++++++++++++++-- 1 file changed, 59 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 44d6ece2..bcaef01e 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -10,6 +10,7 @@ on: - '.mailmap' push: branches: [master] + tags: ['v**'] paths-ignore: - '**.md' - 'VirtualBox/**' @@ -26,7 +27,39 @@ jobs: matrix: type: [gpu, cpu] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + ref: ${{ github.event.pull_request.head.sha || github.ref }} # fix SHA + - uses: docker/metadata-action@v5 + id: meta + with: + images: | + synerbi/sirf + ghcr.io/${{ github.repository }} + flavor: | + latest=auto + suffix=${{ matrix.type == 'gpu' && 'gpu,onlatest=true' || '' }} + tags: | + type=edge + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + type=semver,pattern={{major}} + labels: | + org.opencontainers.image.licenses=Apache-2.0${{ matrix.type == 'gpu' && ' AND BSD-3-Clause AND GPL-3.0' || '' }} + - uses: docker/login-action@v3 + if: github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/tags') + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - uses: docker/login-action@v3 + if: github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/tags') + with: + username: ${{ secrets.DOCKER_USR }} + password: ${{ secrets.DOCKER_PWD }} + - name: increase disk space + run: ./.github/workflows/GHA_increase_disk_space.sh - name: cache uses: actions/cache@v3 with: @@ -35,9 +68,33 @@ jobs: - id: build name: build run: | + suffix=${{ matrix.type == 'gpu' && '-gpu' || '' }} + # pull base image + base_image=ghcr.io/${{ github.repository }}:scipy$suffix + docker pull $base_image || : + # pull sirf image + docker pull ghcr.io/${{ github.repository }}:${{ fromJSON(steps.meta.outputs.json).tags[0] }} || : + # rebuild sirf image ./docker/compose.sh -r ${{ matrix.type == 'gpu' && '-C' || '-G' }} - echo "image=synerbi/sirf:jupyter${{ matrix.type == 'gpu' && '-gpu' || '' }}" >> $GITHUB_OUTPUT + image=synerbi/sirf:jupyter$suffix + echo "image=$image" >> "$GITHUB_OUTPUT" + # tag potentially newer base image + docker tag synerbi/sirf:scipy$suffix $base_image + echo "base_image=$base_image" >> "$GITHUB_OUTPUT" + # make a dummy Dockerfile to use with build-push-action + context=$(mktemp -d) + echo "FROM $image" >> "$context/Dockerfile" + echo "context=$context" >> "$GITHUB_OUTPUT" - name: test CIL run: > docker run --rm -v ./.github/workflows:/gh --user $(id -u) --group-add users ${{ steps.build.outputs.image }} /gh/test_cil.sh + - uses: docker/build-push-action@v5 + with: + context: ${{ steps.build.outputs.context }} + push: ${{ github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/tags') }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + - if: github.ref == 'refs/heads/master' + name: push base image + run: docker push ${{ steps.build.outputs.base_image }} From 44a9f5a110cb9f52d5c0832e82905454c023686e Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Tue, 9 Jan 2024 21:40:16 +0000 Subject: [PATCH 70/98] better ccache --- .github/workflows/docker.yml | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index bcaef01e..d287cfcf 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -36,7 +36,7 @@ jobs: with: images: | synerbi/sirf - ghcr.io/${{ github.repository }} + ghcr.io/synerbi/sirf flavor: | latest=auto suffix=${{ matrix.type == 'gpu' && 'gpu,onlatest=true' || '' }} @@ -63,17 +63,22 @@ jobs: - name: cache uses: actions/cache@v3 with: - key: ccache-${{ matrix.type }} + key: ccache-docker-${{ matrix.type }}-${{ github.ref_name }}-${{ github.run_id }} + restore-keys: | + ccache-docker-${{ matrix.type }}-${{ github.ref_name }} + ccache-docker-${{ matrix.type }} + ccache-docker + ccache path: docker/devel/.ccache - id: build name: build run: | suffix=${{ matrix.type == 'gpu' && '-gpu' || '' }} # pull base image - base_image=ghcr.io/${{ github.repository }}:scipy$suffix + base_image=ghcr.io/synerbi/sirf:scipy$suffix docker pull $base_image || : # pull sirf image - docker pull ghcr.io/${{ github.repository }}:${{ fromJSON(steps.meta.outputs.json).tags[0] }} || : + docker pull ghcr.io/synerbi/sirf:${{ fromJSON(steps.meta.outputs.json).tags[0] }} || : # rebuild sirf image ./docker/compose.sh -r ${{ matrix.type == 'gpu' && '-C' || '-G' }} image=synerbi/sirf:jupyter$suffix From 39de615a8f6e804c455e6bced89ad78da6aa2264 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Wed, 10 Jan 2024 07:16:09 +0000 Subject: [PATCH 71/98] minor fixes --- .github/workflows/docker.yml | 4 ++-- docker/compose.sh | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index d287cfcf..25d0a07e 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -75,7 +75,7 @@ jobs: run: | suffix=${{ matrix.type == 'gpu' && '-gpu' || '' }} # pull base image - base_image=ghcr.io/synerbi/sirf:scipy$suffix + base_image=ghcr.io/synerbi/sirf:core$suffix docker pull $base_image || : # pull sirf image docker pull ghcr.io/synerbi/sirf:${{ fromJSON(steps.meta.outputs.json).tags[0] }} || : @@ -84,7 +84,7 @@ jobs: image=synerbi/sirf:jupyter$suffix echo "image=$image" >> "$GITHUB_OUTPUT" # tag potentially newer base image - docker tag synerbi/sirf:scipy$suffix $base_image + docker tag synerbi/jupyter:scipy-${{ matrix.type }} $base_image echo "base_image=$base_image" >> "$GITHUB_OUTPUT" # make a dummy Dockerfile to use with build-push-action context=$(mktemp -d) diff --git a/docker/compose.sh b/docker/compose.sh index a7aba91e..1b2df6a5 100755 --- a/docker/compose.sh +++ b/docker/compose.sh @@ -33,7 +33,7 @@ done # remove processed options shift $((OPTIND-1)) -test $build_cpu$build_gpu = 11 && regen_ccache = 1 +test $build_cpu$build_gpu = 11 && regen_ccache=1 echo "build_cpu: $build_cpu, build_gpu: $build_gpu, regen ccache: $regen_ccache" echo "run_cpu: $run_cpu, run_gpu: $run_gpu" echo "build args: $@" From d7142440811aa29e7324e7b2642d6fdaee65b486 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Wed, 10 Jan 2024 08:15:06 +0000 Subject: [PATCH 72/98] cron weekly & master devel builds --- .github/workflows/docker.yml | 94 ++++++++++++++++++++++++++++++++---- docker-compose.yml | 2 + 2 files changed, 87 insertions(+), 9 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 25d0a07e..56ecf205 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -17,11 +17,13 @@ on: - '.github/workflows/c-cpp.yml' - 'CITATION.cff' - '.mailmap' + schedule: [{cron: '37 13 * * SUN'}] # Sunday at 13:37 defaults: run: shell: bash -l {0} jobs: - build-test-push: + build: + if: github.event_name != 'schedule' runs-on: ubuntu-latest strategy: matrix: @@ -74,18 +76,18 @@ jobs: name: build run: | suffix=${{ matrix.type == 'gpu' && '-gpu' || '' }} - # pull base image - base_image=ghcr.io/synerbi/sirf:core$suffix - docker pull $base_image || : + # pull core image + core_image=ghcr.io/synerbi/sirf:core$suffix + docker pull $core_image || : # pull sirf image docker pull ghcr.io/synerbi/sirf:${{ fromJSON(steps.meta.outputs.json).tags[0] }} || : # rebuild sirf image ./docker/compose.sh -r ${{ matrix.type == 'gpu' && '-C' || '-G' }} image=synerbi/sirf:jupyter$suffix echo "image=$image" >> "$GITHUB_OUTPUT" - # tag potentially newer base image - docker tag synerbi/jupyter:scipy-${{ matrix.type }} $base_image - echo "base_image=$base_image" >> "$GITHUB_OUTPUT" + # tag potentially newer core image + docker tag synerbi/jupyter:scipy-${{ matrix.type }} $core_image + echo "core_image=$core_image" >> "$GITHUB_OUTPUT" # make a dummy Dockerfile to use with build-push-action context=$(mktemp -d) echo "FROM $image" >> "$context/Dockerfile" @@ -101,5 +103,79 @@ jobs: tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} - if: github.ref == 'refs/heads/master' - name: push base image - run: docker push ${{ steps.build.outputs.base_image }} + name: push core image + run: docker push ${{ steps.build.outputs.core_image }} + devel: + if: github.event_name == 'schedule' || github.ref == 'refs/heads/master' + runs-on: ubuntu-latest + strategy: + matrix: + type: [gpu, cpu] + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + ref: ${{ github.event.pull_request.head.sha || github.ref }} # fix SHA + - uses: docker/metadata-action@v5 + id: meta + with: + images: | + synerbi/sirf + ghcr.io/synerbi/sirf + flavor: | + latest=false + suffix=${{ matrix.type == 'gpu' && 'gpu,onlatest=true' || '' }} + tags: devel + labels: | + org.opencontainers.image.licenses=Apache-2.0${{ matrix.type == 'gpu' && ' AND BSD-3-Clause AND GPL-3.0' || '' }} + - uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USR }} + password: ${{ secrets.DOCKER_PWD }} + - name: increase disk space + run: ./.github/workflows/GHA_increase_disk_space.sh + - name: cache + uses: actions/cache@v3 + with: + key: ccache-docker-${{ matrix.type }}-${{ github.ref_name }}-${{ github.run_id }} + restore-keys: | + ccache-docker-${{ matrix.type }}-${{ github.ref_name }} + ccache-docker-${{ matrix.type }} + ccache-docker + ccache + path: docker/devel/.ccache + - id: build + name: build + run: | + suffix=${{ matrix.type == 'gpu' && '-gpu' || '' }} + core_image=ghcr.io/synerbi/sirf:core$suffix + # rebuild sirf image + ./docker/compose.sh -r ${{ matrix.type == 'gpu' && '-C' || '-G' }} \ + -- --build-arg EXTRA_BUILD_FLAGS="-DDEVEL_BUILD=ON -Dsiemens_to_ismrmrd_TAG=origin/master" + # TODO: remove *_TAG after https://github.com/SyneRBI/SIRF-SuperBuild/issues/851 + image=synerbi/sirf:jupyter$suffix + echo "image=$image" >> "$GITHUB_OUTPUT" + # tag potentially newer core image + docker tag synerbi/jupyter:scipy-${{ matrix.type }} $core_image + echo "core_image=$core_image" >> "$GITHUB_OUTPUT" + # make a dummy Dockerfile to use with build-push-action + context=$(mktemp -d) + echo "FROM $image" >> "$context/Dockerfile" + echo "context=$context" >> "$GITHUB_OUTPUT" + - name: test CIL + run: > + docker run --rm -v ./.github/workflows:/gh --user $(id -u) --group-add users + ${{ steps.build.outputs.image }} /gh/test_cil.sh + - uses: docker/build-push-action@v5 + with: + context: ${{ steps.build.outputs.context }} + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + - name: push core image + run: docker push ${{ steps.build.outputs.core_image }} diff --git a/docker-compose.yml b/docker-compose.yml index 8c600366..6628e5db 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -37,6 +37,7 @@ services: BASE_CONTAINER: synerbi/jupyter:scipy-cpu Gadgetron_USE_CUDA: "OFF" BUILD_GPU: 0 + # TODO: remove after https://github.com/SyneRBI/SIRF-SuperBuild/issues/851 EXTRA_BUILD_FLAGS: > -DGadgetron_TAG=6202fb7352a14fb82817b57a97d928c988eb0f4b -DISMRMRD_TAG=v1.13.7 @@ -50,6 +51,7 @@ services: BASE_CONTAINER: synerbi/jupyter:scipy-cpu Gadgetron_USE_CUDA: "OFF" BUILD_GPU: 0 + # TODO: remove after https://github.com/SyneRBI/SIRF-SuperBuild/issues/851 EXTRA_BUILD_FLAGS: > -DGadgetron_TAG=6202fb7352a14fb82817b57a97d928c988eb0f4b -DISMRMRD_TAG=v1.13.7 From e7fa0240dfec812e1c1aff47c177832385b88327 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Thu, 18 Jan 2024 10:01:13 +0000 Subject: [PATCH 73/98] drop #573 work-around --- docker/.bashrc | 4 ---- 1 file changed, 4 deletions(-) diff --git a/docker/.bashrc b/docker/.bashrc index 679ea639..bf3b27cc 100644 --- a/docker/.bashrc +++ b/docker/.bashrc @@ -24,10 +24,6 @@ export PS1='sirf$ ' [ -f /opt/SIRF-SuperBuild/INSTALL/bin/env_sirf.sh ] && \ . /opt/SIRF-SuperBuild/INSTALL/bin/env_sirf.sh -# Need to add this as we have built using some of these shared libraries -# See https://github.com/SyneRBI/SIRF-SuperBuild/issues/573 -[ -f PYTHON_INSTALL_DIR/bin/activate ] && export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:PYTHON_INSTALL_DIR/lib - # .local/bin (used by pip for instance) export PATH="${PATH}":~/.local/bin From f76e9b7a5ca1b8df2030b9dd0208eff7b0f8d703 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Thu, 18 Jan 2024 10:10:17 +0000 Subject: [PATCH 74/98] expose SIRF_DOWNLOAD_DATA_ARGS --- docker-compose.yml | 3 +++ docker/start-gadgetron-notebook.sh | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 6628e5db..4a21c175 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -63,6 +63,9 @@ services: # Alternatively replace with the output of `id -u`. user: ${USER_ID:-${UID:-${USER:-1000}}} group_add: [users] + environment: + # CLI arguments for https://github.com/SyneRBI/SIRF-Exercises/blob/master/scripts/download_data.sh + SIRF_DOWNLOAD_DATA_ARGS: '' volumes: [./docker/devel:/home/jovyan/work] restart: unless-stopped ports: diff --git a/docker/start-gadgetron-notebook.sh b/docker/start-gadgetron-notebook.sh index 3486d158..e16df235 100755 --- a/docker/start-gadgetron-notebook.sh +++ b/docker/start-gadgetron-notebook.sh @@ -39,7 +39,7 @@ pushd ~/work for repo in SIRF-Exercises CIL-Demos; do test -d ${repo} || cp -dR "${SB_PATH}/../${repo}" . done -./SIRF-Exercises/scripts/download_data.sh +./SIRF-Exercises/scripts/download_data.sh ${SIRF_DOWNLOAD_DATA_ARGS:-} echo "link SIRF-Contrib into ~/work" if test ! -r SIRF-contrib; then From 13e70674fc812e621ba7d08ee81e1c301145c164 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Thu, 18 Jan 2024 20:50:07 +0000 Subject: [PATCH 75/98] compose: -U to disable cache update --- docker/compose.sh | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/docker/compose.sh b/docker/compose.sh index 1b2df6a5..c563331f 100755 --- a/docker/compose.sh +++ b/docker/compose.sh @@ -5,6 +5,7 @@ build_cpu=1 build_gpu=1 run_cpu=0 run_gpu=0 +update_ccache=1 regen_ccache=0 while getopts :hCGcgr option; do case "${option}" in @@ -17,8 +18,9 @@ Usage: $0 [-hCGcg] [-- [build options]] -G: disable GPU build -c: start CPU container -g: start GPU container + -U: disable updating docker/devel/.ccache -r: regenerate (rather than append to) docker/devel/.ccache - (default true iff neither -C nor -G are specified) + (true if neither -C nor -G are specified) build options: passed to 'docker compose build' EOF exit 0 ;; @@ -26,6 +28,7 @@ EOF G) build_gpu=0 ;; c) run_cpu=1 ;; g) run_gpu=1 ;; + U) update_ccache=0 ;; r) regen_ccache=1 ;; *) ;; esac @@ -34,7 +37,7 @@ done shift $((OPTIND-1)) test $build_cpu$build_gpu = 11 && regen_ccache=1 -echo "build_cpu: $build_cpu, build_gpu: $build_gpu, regen ccache: $regen_ccache" +echo "build_cpu: $build_cpu, build_gpu: $build_gpu, update ccache: $update_ccache, regen ccache: $regen_ccache" echo "run_cpu: $run_cpu, run_gpu: $run_gpu" echo "build args: $@" @@ -59,10 +62,10 @@ test $build_cpu = 1 && $DCC_CPU build "$@" test $build_gpu = 1 && $DCC_GPU build "$@" echo copy ccache -test $regen_ccache = 1 && sudo rm -rf ./docker/devel/.ccache/* +test $update_ccache$regen_ccache = 11 && sudo rm -rf ./docker/devel/.ccache/* export USER_ID UID -test $build_cpu = 1 && $DCC_CPU up sirf-build && $DCC_CPU down sirf-build -test $build_gpu = 1 && $DCC_GPU up sirf-build && $DCC_GPU down sirf-build +test $build_cpu = 1 && $DCC_CPU up sirf-build && test $update_ccache = 1 && $DCC_CPU down sirf-build +test $build_gpu = 1 && $DCC_GPU up sirf-build && test $update_ccache = 1 && $DCC_GPU down sirf-build echo start test $run_cpu = 1 && $DCC_CPU up -d sirf && $DCC_CPU down sirf From 7813706fc7b0ff1cfbb916d651e1f7c0a13cd77e Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Tue, 23 Jan 2024 13:57:38 +0000 Subject: [PATCH 76/98] misc fixes & tidy --- .github/workflows/docker.yml | 4 +- docker-compose.yml | 2 + docker/compose.sh | 104 +++++++++++++++++--------------- docker/docker-compose.devel.yml | 2 + 4 files changed, 62 insertions(+), 50 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 56ecf205..846ffd3a 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -82,7 +82,7 @@ jobs: # pull sirf image docker pull ghcr.io/synerbi/sirf:${{ fromJSON(steps.meta.outputs.json).tags[0] }} || : # rebuild sirf image - ./docker/compose.sh -r ${{ matrix.type == 'gpu' && '-C' || '-G' }} + ./docker/compose.sh -bR${{ matrix.type == 'gpu' && 'g' || 'c' }} image=synerbi/sirf:jupyter$suffix echo "image=$image" >> "$GITHUB_OUTPUT" # tag potentially newer core image @@ -155,7 +155,7 @@ jobs: suffix=${{ matrix.type == 'gpu' && '-gpu' || '' }} core_image=ghcr.io/synerbi/sirf:core$suffix # rebuild sirf image - ./docker/compose.sh -r ${{ matrix.type == 'gpu' && '-C' || '-G' }} \ + ./docker/compose.sh -bR${{ matrix.type == 'gpu' && 'g' || 'c' }} \ -- --build-arg EXTRA_BUILD_FLAGS="-DDEVEL_BUILD=ON -Dsiemens_to_ismrmrd_TAG=origin/master" # TODO: remove *_TAG after https://github.com/SyneRBI/SIRF-SuperBuild/issues/851 image=synerbi/sirf:jupyter$suffix diff --git a/docker-compose.yml b/docker-compose.yml index 4a21c175..7c31eba9 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -27,6 +27,8 @@ services: context: docker/docker-stacks/images/scipy-notebook args: {BASE_CONTAINER: synerbi/jupyter:minimal-cpu} sirf-build: + # convenience volume & command to copy image::/opt/ccache to localhost::./docker/devel/.ccache + # (executed in ./docker/compose.sh by `docker compose up sirf-build`) volumes: [./docker/devel/.ccache:/ccache] command: "bash -c 'cp -r /opt/ccache/* /ccache/'" image: synerbi/jupyter:sirf-build-cpu diff --git a/docker/compose.sh b/docker/compose.sh index c563331f..bebd39f3 100755 --- a/docker/compose.sh +++ b/docker/compose.sh @@ -1,45 +1,49 @@ #!/usr/bin/env bash set -euo pipefail -build_cpu=1 -build_gpu=1 -run_cpu=0 -run_gpu=0 -update_ccache=1 -regen_ccache=0 -while getopts :hCGcgr option; do - case "${option}" in - h) cat <&2 "WARNING: neither -b nor -r specified" +test $cpu$gpu = 00 && echo >&2 "WARNING: neither -c nor -g specified" +test $build$cpu$gpu = 111 && regen_ccache=1 # force rebuild ccache +echo "cpu: $cpu, gpu: $gpu, update ccache: $update_ccache, regen ccache: $regen_ccache" +echo "docker compose options: $@" DCC_CPU="docker compose" DCC_GPU="docker compose -f docker-compose.yml -f docker/docker-compose.gpu.yml" @@ -47,28 +51,32 @@ DCC_GPU="docker compose -f docker-compose.yml -f docker/docker-compose.gpu.yml" pushd "$(dirname "$(dirname "${BASH_SOURCE[0]}")")" git submodule update --init --recursive -echo build base stack -for image in foundation base minimal scipy; do - test $build_cpu = 1 && $DCC_CPU build "$@" $image - test $build_gpu = 1 && $DCC_GPU build "$@" $image -done +if test $build = 1; then + echo build base stack + for image in foundation base minimal scipy; do + test $cpu = 1 && $DCC_CPU build "$@" $image + test $gpu = 1 && $DCC_GPU build "$@" $image + done -echo build ccache -test $build_cpu = 1 && $DCC_CPU build "$@" sirf-build -test $build_gpu = 1 && $DCC_GPU build "$@" sirf-build + echo build ccache + test $cpu = 1 && $DCC_CPU build "$@" sirf-build + test $gpu = 1 && $DCC_GPU build "$@" sirf-build -echo build -test $build_cpu = 1 && $DCC_CPU build "$@" -test $build_gpu = 1 && $DCC_GPU build "$@" + echo build + test $cpu = 1 && $DCC_CPU build "$@" + test $gpu = 1 && $DCC_GPU build "$@" -echo copy ccache -test $update_ccache$regen_ccache = 11 && sudo rm -rf ./docker/devel/.ccache/* -export USER_ID UID -test $build_cpu = 1 && $DCC_CPU up sirf-build && test $update_ccache = 1 && $DCC_CPU down sirf-build -test $build_gpu = 1 && $DCC_GPU up sirf-build && test $update_ccache = 1 && $DCC_GPU down sirf-build + echo copy ccache + test $update_ccache$regen_ccache = 11 && sudo rm -rf ./docker/devel/.ccache/* + export USER_ID UID + test $cpu = 1 && $DCC_CPU up --no-build sirf-build && test $update_ccache = 1 && $DCC_CPU down sirf-build + test $gpu = 1 && $DCC_GPU up --no-build sirf-build && test $update_ccache = 1 && $DCC_GPU down sirf-build +fi -echo start -test $run_cpu = 1 && $DCC_CPU up -d sirf && $DCC_CPU down sirf -test $run_gpu = 1 && $DCC_GPU up -d sirf && $DCC_GPU down sirf +if test $run = 1; then + echo start + test $cpu = 1 && $DCC_CPU up --no-build -d "$@" sirf && $DCC_CPU down sirf + test $gpu = 1 && $DCC_GPU up --no-build -d "$@" sirf && $DCC_GPU down sirf +fi popd diff --git a/docker/docker-compose.devel.yml b/docker/docker-compose.devel.yml index f8fef85e..898c263b 100644 --- a/docker/docker-compose.devel.yml +++ b/docker/docker-compose.devel.yml @@ -21,3 +21,5 @@ services: -Dsiemens_to_ismrmrd_TAG=v1.2.11 -DDEVEL_BUILD=ON cache_from: [synerbi/jupyter:sirf-build-devel-cpu] + cap_add: [SYS_PTRACE] + security_opt: [seccomp=unconfined] From 8864a08a0394c53695aa41814271bcebe776443a Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Fri, 26 Jan 2024 13:52:03 +0000 Subject: [PATCH 77/98] devel build with CIL --- docker/compose.sh | 18 ++++++++++++------ docker/docker-compose.devel.yml | 2 ++ 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/docker/compose.sh b/docker/compose.sh index bebd39f3..76c79cba 100755 --- a/docker/compose.sh +++ b/docker/compose.sh @@ -11,6 +11,7 @@ Options: -h: print this help -b: build -r: run + -d: use development (main/master) repo branches -c: enable CPU -g: enable GPU -U: disable updating docker/devel/.ccache @@ -21,15 +22,17 @@ EOF build=0 run=0 +devel=0 cpu=0 gpu=0 update_ccache=1 regen_ccache=0 -while getopts :hbrcgUR option; do +while getopts :hbrdcgUR option; do case "${option}" in h) print_help; exit 0 ;; b) build=1 ;; r) run=1 ;; + d) devel=1 ;; c) cpu=1 ;; g) gpu=1 ;; U) update_ccache=0 ;; @@ -42,11 +45,13 @@ shift $((OPTIND-1)) # remove processed options test $build$run = 00 && echo >&2 "WARNING: neither -b nor -r specified" test $cpu$gpu = 00 && echo >&2 "WARNING: neither -c nor -g specified" test $build$cpu$gpu = 111 && regen_ccache=1 # force rebuild ccache -echo "cpu: $cpu, gpu: $gpu, update ccache: $update_ccache, regen ccache: $regen_ccache" +echo "cpu: $cpu, gpu: $gpu, update ccache: $update_ccache, regen ccache: $regen_ccache, devel: $devel" echo "docker compose options: $@" -DCC_CPU="docker compose" -DCC_GPU="docker compose -f docker-compose.yml -f docker/docker-compose.gpu.yml" +DCC_CPU="docker compose -f docker-compose.yml" +DCC_GPU="$DCC_CPU -f docker/docker-compose.gpu.yml" +test $devel = 1 && DCC_CPU+=" -f docker/docker-compose.devel.yml" +test $devel$gpu = 11 && echo >&2 "WARNING: devel gpu not supported" pushd "$(dirname "$(dirname "${BASH_SOURCE[0]}")")" git submodule update --init --recursive @@ -75,8 +80,9 @@ fi if test $run = 1; then echo start - test $cpu = 1 && $DCC_CPU up --no-build -d "$@" sirf && $DCC_CPU down sirf - test $gpu = 1 && $DCC_GPU up --no-build -d "$@" sirf && $DCC_GPU down sirf + export USER_ID UID + test $cpu = 1 && $DCC_CPU up --no-build "$@" sirf; $DCC_CPU down sirf + test $gpu = 1 && $DCC_GPU up --no-build "$@" sirf; $DCC_GPU down sirf fi popd diff --git a/docker/docker-compose.devel.yml b/docker/docker-compose.devel.yml index 898c263b..546bc3c8 100644 --- a/docker/docker-compose.devel.yml +++ b/docker/docker-compose.devel.yml @@ -9,6 +9,7 @@ services: -DISMRMRD_TAG=v1.13.7 -Dsiemens_to_ismrmrd_TAG=v1.2.11 -DDEVEL_BUILD=ON + -DBUILD_CIL=ON sirf: container_name: sirf # for scaling, comment this out https://github.com/docker/compose/issues/3729 image: synerbi/sirf:jupyter-devel @@ -20,6 +21,7 @@ services: -DISMRMRD_TAG=v1.13.7 -Dsiemens_to_ismrmrd_TAG=v1.2.11 -DDEVEL_BUILD=ON + -DBUILD_CIL=ON cache_from: [synerbi/jupyter:sirf-build-devel-cpu] cap_add: [SYS_PTRACE] security_opt: [seccomp=unconfined] From a4ca9c5831adb6b8437eceab308b6f0813a6dab8 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Fri, 26 Jan 2024 16:23:38 +0000 Subject: [PATCH 78/98] compose binary & devel-gpu --- docker/compose.sh | 11 ++++++++--- docker/docker-compose.devel-gpu.yml | 8 ++++++++ 2 files changed, 16 insertions(+), 3 deletions(-) create mode 100644 docker/docker-compose.devel-gpu.yml diff --git a/docker/compose.sh b/docker/compose.sh index 76c79cba..1a41ffd4 100755 --- a/docker/compose.sh +++ b/docker/compose.sh @@ -48,10 +48,15 @@ test $build$cpu$gpu = 111 && regen_ccache=1 # force rebuild ccache echo "cpu: $cpu, gpu: $gpu, update ccache: $update_ccache, regen ccache: $regen_ccache, devel: $devel" echo "docker compose options: $@" -DCC_CPU="docker compose -f docker-compose.yml" -DCC_GPU="$DCC_CPU -f docker/docker-compose.gpu.yml" +# compose binary +DCC="${DCC:-docker compose}" +which docker-compose && DCC=$(which docker-compose) +# CPU config +DCC_CPU="$DCC -f docker-compose.yml" test $devel = 1 && DCC_CPU+=" -f docker/docker-compose.devel.yml" -test $devel$gpu = 11 && echo >&2 "WARNING: devel gpu not supported" +# GPU config +DCC_GPU="$DCC_CPU -f docker/docker-compose.gpu.yml" +test $devel = 1 && DCC_CPU+=" -f docker/docker-compose.devel-gpu.yml" pushd "$(dirname "$(dirname "${BASH_SOURCE[0]}")")" git submodule update --init --recursive diff --git a/docker/docker-compose.devel-gpu.yml b/docker/docker-compose.devel-gpu.yml new file mode 100644 index 00000000..4b02aebf --- /dev/null +++ b/docker/docker-compose.devel-gpu.yml @@ -0,0 +1,8 @@ +version: '3.2' +services: + sirf-build: + image: synerbi/jupyter:sirf-build-devel-gpu + sirf: + image: synerbi/sirf:jupyter-devel-gpu + build: + cache_from: [synerbi/jupyter:sirf-build-devel-gpu] From 1ebf418ef9e6d84063cbd5d21530911afbd56e47 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Fri, 26 Jan 2024 17:03:25 +0000 Subject: [PATCH 79/98] auto-gen help --- docker/compose.sh | 26 +++++++++----------------- 1 file changed, 9 insertions(+), 17 deletions(-) diff --git a/docker/compose.sh b/docker/compose.sh index 1a41ffd4..27820228 100755 --- a/docker/compose.sh +++ b/docker/compose.sh @@ -8,15 +8,7 @@ Also creates intermediate (temp) images: synerbi/jupyter Usage: $0 [options] [-- [docker compose options]] Options: - -h: print this help - -b: build - -r: run - -d: use development (main/master) repo branches - -c: enable CPU - -g: enable GPU - -U: disable updating docker/devel/.ccache - -R: regenerate (rather than append to) docker/devel/.ccache - (always true if both -c and -g are specified) +$(sed -rn 's/^\s*(\w)\) .*;; # (.*)$/ -\1: \2/p' "$0") EOF } @@ -29,14 +21,14 @@ update_ccache=1 regen_ccache=0 while getopts :hbrdcgUR option; do case "${option}" in - h) print_help; exit 0 ;; - b) build=1 ;; - r) run=1 ;; - d) devel=1 ;; - c) cpu=1 ;; - g) gpu=1 ;; - U) update_ccache=0 ;; - R) regen_ccache=1 ;; + h) print_help; exit 0 ;; # print this help + b) build=1 ;; # build + r) run=1 ;; # run + d) devel=1 ;; # use development (main/master) repo branches + c) cpu=1 ;; # enable CPU + g) gpu=1 ;; # enable GPU + U) update_ccache=0 ;; # disable updating docker/devel/.ccache + R) regen_ccache=1 ;; # regenerate (rather than append to) docker/devel/.ccache (always true if both -c and -g are specified) *) ;; esac done From 724c91bf05e21e3ca485c048f691559574a7dc1c Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Thu, 1 Feb 2024 09:59:49 +0000 Subject: [PATCH 80/98] fix GPU devel --- docker/compose.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/compose.sh b/docker/compose.sh index 27820228..741f9f1d 100755 --- a/docker/compose.sh +++ b/docker/compose.sh @@ -48,7 +48,7 @@ DCC_CPU="$DCC -f docker-compose.yml" test $devel = 1 && DCC_CPU+=" -f docker/docker-compose.devel.yml" # GPU config DCC_GPU="$DCC_CPU -f docker/docker-compose.gpu.yml" -test $devel = 1 && DCC_CPU+=" -f docker/docker-compose.devel-gpu.yml" +test $devel = 1 && DCC_GPU+=" -f docker/docker-compose.devel-gpu.yml" pushd "$(dirname "$(dirname "${BASH_SOURCE[0]}")")" git submodule update --init --recursive From 03c3206139b690b1f3f84fc35f3278026e22e6c2 Mon Sep 17 00:00:00 2001 From: Edoardo Pasca <14138589+paskino@users.noreply.github.com> Date: Wed, 14 Feb 2024 11:12:30 +0000 Subject: [PATCH 81/98] added some docs in Readme --- README.md | 44 +++++++++++++++++++++++++++++++++++++++----- 1 file changed, 39 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index c3ad5328..6b5baa88 100644 --- a/README.md +++ b/README.md @@ -17,8 +17,9 @@ describes how to build via CMake. ## Table of Contents -1. [Dependencies](#Dependencies) -2. [Generic instructions](#Generic-instructions) +1. [Running SIRF on Docker](#Running-SIRF-on-Docker) +2. [Dependencies](#Dependencies) +3. [Generic instructions](#Generic-instructions) 1. [Create the SuperBuild directory](#Create-the-SuperBuild-directory) 2. [Install CMake](#Install-CMake) 3. [Clone the SIRF-SuperBuild project](#Clone-the-SIRF-SuperBuild-project) @@ -28,11 +29,11 @@ describes how to build via CMake. 7. [Open a terminal and start Gadgetron](#Open-a-terminal-and-start-Gadgetron) 8. [Testing](#Testing) 9. [Running examples](#Running-examples) -3. [OS specific information](#OS-specific-information) +4. [OS specific information](#OS-specific-information) 1. [Installation instructions for Ubuntu](#Ubuntu-install) 2. [Installation instructions for Mac OS](#OSX-install) 3. [Installation instructions for Docker](#Docker-install) -4. [Advanced installation](#Advanced-installation) +5. [Advanced installation](#Advanced-installation) 1. [Optional libraries](optional-libraries) 2. [use a different compiler than the system default](use-a-different-compiler-than-the-system-default) 3. [Compiling against your own packages](#Compiling-packages) @@ -43,7 +44,40 @@ describes how to build via CMake. 8. [Building CCPi CIL](#Building-CCPi-CIL) 9. [Passing CMAKE arguments to specific projects](#Passing-CMAKE-arguments-to-specific-projects) 10. [Building with CUDA](#Building-with-CUDA) -5. [Notes](#Notes) +6. [Notes](#Notes) + +## Running SIRF on Docker + +The easiest way to run SIRF is to use Docker. We provide a script which will build a docker image with SIRF and all dependencies pre-installed. You can run it with the `docker/compose.sh` command +that accepts the following flags, which can be checked [here](https://github.com/SyneRBI/SIRF-SuperBuild/blob/c21a2a45591550a6e257fc6f3dc343294b2c3127/docker/compose.sh#L24-L31). Additional parameters are available in the +[`docker-compose.yml`](https://github.com/paskino/SIRF-SuperBuild/blob/jupyterhub_env/docker-compose.yml) file. + + +``` + h) print_help; exit 0 ;; # print this help + b) build=1 ;; # build + r) run=1 ;; # run + d) devel=1 ;; # use development (main/master) repo branches + c) cpu=1 ;; # enable CPU + g) gpu=1 ;; # enable GPU + U) update_ccache=0 ;; # disable updating docker/devel/.ccache + R) regen_ccache=1 ;; # regenerate (rather than append to) docker/devel/.ccache (always true if both -c and -g are specified) +``` + +The following example command will build the development branches of SIRF and dependencies, it will build the GPU code, +it specifies some version of dependencies and will not run the tests after the build is complete. +You can pass additional build parameters to `docker/compose.sh` with `--build-arg`. The actual parameters that can be +passed can be found in the `Dockerfile`. + + +```bash + +docker/compose.sh -dgb -- --build-arg EXTRA_BUILD_FLAGS='-DGadgetron_TAG=6202fb7352a14fb82817b57a97d928c988eb0f4b -DISMRMRD_TAG=v1.13.7 -Dsiemens_to_ismrmrd_TAG=v1.2.11 -DDEVEL_BUILD=ON -DBUILD_CIL=ON -DCCPi-Regularisation-Toolkit_TAG=origin/master' --build-arg RUN_CTEST=0 + +``` +where `-dgb` tells to `b`uild the `g`pu and `d`evelopment branches. To run an image you would use the flags `-dgr`. + +Notice that a fairly recent version of docker is required. Install it following the instructions [here](https://docs.docker.com/engine/install/ubuntu/). ## Dependencies From 188f8c14972e449cf147938a51f03beb8e97f9ef Mon Sep 17 00:00:00 2001 From: Edoardo Pasca <14138589+paskino@users.noreply.github.com> Date: Wed, 14 Feb 2024 13:27:24 +0000 Subject: [PATCH 82/98] update to python 3.10 and do not fail if Gadgetron tests cannot be run --- SuperBuild/External_Gadgetron.cmake | 7 ++++--- docker-compose.yml | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/SuperBuild/External_Gadgetron.cmake b/SuperBuild/External_Gadgetron.cmake index 906f32ea..b00e8156 100644 --- a/SuperBuild/External_Gadgetron.cmake +++ b/SuperBuild/External_Gadgetron.cmake @@ -138,11 +138,12 @@ endif() # Gadgetron only adds tests if (GTEST_FOUND AND ARMADILLO_FOUND) if (BUILD_TESTING_${proj}) if(CMAKE_VERSION VERSION_LESS "3.23") - message(FATAL_ERROR "You need at least CMake 3.23 to build Gadgetron tests.") - endif() - add_test(NAME ${proj}_TESTS + message(WARNING "You need at least CMake 3.23 to build Gadgetron tests. Skipping.") + else() + add_test(NAME ${proj}_TESTS COMMAND ${CMAKE_CTEST_COMMAND} -C $ --output-on-failure WORKING_DIRECTORY ${${proj}_BINARY_DIR}/test) + endif() endif() else() diff --git a/docker-compose.yml b/docker-compose.yml index 7c31eba9..d9e9ad51 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -7,7 +7,7 @@ services: context: docker/docker-stacks/images/docker-stacks-foundation args: ROOT_CONTAINER: ubuntu:22.04 - PYTHON_VERSION: 3.9 + PYTHON_VERSION: 3.10 base: command: echo nop image: synerbi/jupyter:base-cpu From dfc9e20a97e977a09f3b44341bc1a16d6e16951b Mon Sep 17 00:00:00 2001 From: Edoardo Pasca <14138589+paskino@users.noreply.github.com> Date: Wed, 14 Feb 2024 15:57:38 +0000 Subject: [PATCH 83/98] install dxchange via conda --- docker/requirements.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/requirements.yml b/docker/requirements.yml index 838b1f86..f359d32c 100644 --- a/docker/requirements.yml +++ b/docker/requirements.yml @@ -20,10 +20,10 @@ dependencies: - pip - cil # cil - ccpi-regulariser # cil + - dxchange # cil # - tigre # cil (GPU) # - astra-toolbox # cil (GPU) - pip: - - git+https://github.com/data-exchange/dxchange.git # cil - git+https://github.com/ismrmrd/ismrmrd-python-tools.git@master#egg=ismrmrd-python-tools # TODO: labextension @jupyter-widgets/jupyterlab-manager (jupyterlab_widgets ipywidgets ipympl) # is broken because `conda-forge::cil` downgrades `ipywidgets<8` -> breaks `%matplotlib widgets`. From 0b9339357986bc5a1926418cf2edc9ec023fd081 Mon Sep 17 00:00:00 2001 From: Edoardo Pasca <14138589+paskino@users.noreply.github.com> Date: Wed, 14 Feb 2024 17:27:50 +0000 Subject: [PATCH 84/98] temporarily disable CIL test 11 --- SuperBuild/External_CIL.cmake | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/SuperBuild/External_CIL.cmake b/SuperBuild/External_CIL.cmake index af028cdd..bd517458 100644 --- a/SuperBuild/External_CIL.cmake +++ b/SuperBuild/External_CIL.cmake @@ -116,9 +116,9 @@ if(NOT ( DEFINED "USE_SYSTEM_${externalProjName}" AND "${USE_SYSTEM_${externalPr add_test(NAME CIL_FRAMEWORK_TESTS_9 COMMAND ${PYTHON_EXECUTABLE} -m unittest discover -p test_Operator.py WORKING_DIRECTORY ${${proj}_SOURCE_DIR}/Wrappers/Python/test) - add_test(NAME CIL_FRAMEWORK_TESTS_11 - COMMAND ${PYTHON_EXECUTABLE} -m unittest discover -p test_quality_measures.py - WORKING_DIRECTORY ${${proj}_SOURCE_DIR}/Wrappers/Python/test) + #add_test(NAME CIL_FRAMEWORK_TESTS_11 + # COMMAND ${PYTHON_EXECUTABLE} -m unittest discover -p test_quality_measures.py + # WORKING_DIRECTORY ${${proj}_SOURCE_DIR}/Wrappers/Python/test) add_test(NAME CIL_FRAMEWORK_TESTS_12 COMMAND ${PYTHON_EXECUTABLE} -m unittest discover -p test_smoothMixedL21Norm.py WORKING_DIRECTORY ${${proj}_SOURCE_DIR}/Wrappers/Python/test) From 56a0db1b7df79f8a7eb9d43edc6ec00c6d9a4187 Mon Sep 17 00:00:00 2001 From: Edoardo Pasca <14138589+paskino@users.noreply.github.com> Date: Thu, 15 Feb 2024 09:48:48 +0000 Subject: [PATCH 85/98] reenable tests as fixed in CIL --- SuperBuild/External_CIL.cmake | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/SuperBuild/External_CIL.cmake b/SuperBuild/External_CIL.cmake index bd517458..af028cdd 100644 --- a/SuperBuild/External_CIL.cmake +++ b/SuperBuild/External_CIL.cmake @@ -116,9 +116,9 @@ if(NOT ( DEFINED "USE_SYSTEM_${externalProjName}" AND "${USE_SYSTEM_${externalPr add_test(NAME CIL_FRAMEWORK_TESTS_9 COMMAND ${PYTHON_EXECUTABLE} -m unittest discover -p test_Operator.py WORKING_DIRECTORY ${${proj}_SOURCE_DIR}/Wrappers/Python/test) - #add_test(NAME CIL_FRAMEWORK_TESTS_11 - # COMMAND ${PYTHON_EXECUTABLE} -m unittest discover -p test_quality_measures.py - # WORKING_DIRECTORY ${${proj}_SOURCE_DIR}/Wrappers/Python/test) + add_test(NAME CIL_FRAMEWORK_TESTS_11 + COMMAND ${PYTHON_EXECUTABLE} -m unittest discover -p test_quality_measures.py + WORKING_DIRECTORY ${${proj}_SOURCE_DIR}/Wrappers/Python/test) add_test(NAME CIL_FRAMEWORK_TESTS_12 COMMAND ${PYTHON_EXECUTABLE} -m unittest discover -p test_smoothMixedL21Norm.py WORKING_DIRECTORY ${${proj}_SOURCE_DIR}/Wrappers/Python/test) From 056f186e64e321f29ea173b15ba2bab208b965ad Mon Sep 17 00:00:00 2001 From: Edoardo Pasca <14138589+paskino@users.noreply.github.com> Date: Thu, 15 Feb 2024 14:58:01 +0000 Subject: [PATCH 86/98] set python version to 3.10 as string --- docker-compose.yml | 4 ++-- docker/docker-compose.gpu.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index d9e9ad51..c1aedea6 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -7,7 +7,7 @@ services: context: docker/docker-stacks/images/docker-stacks-foundation args: ROOT_CONTAINER: ubuntu:22.04 - PYTHON_VERSION: 3.10 + PYTHON_VERSION: "3.10" base: command: echo nop image: synerbi/jupyter:base-cpu @@ -68,7 +68,7 @@ services: environment: # CLI arguments for https://github.com/SyneRBI/SIRF-Exercises/blob/master/scripts/download_data.sh SIRF_DOWNLOAD_DATA_ARGS: '' - volumes: [./docker/devel:/home/jovyan/work] + volumes: [/home/ubuntu/devel:/home/jovyan/work] restart: unless-stopped ports: - "9002:9002" # gadgetron diff --git a/docker/docker-compose.gpu.yml b/docker/docker-compose.gpu.yml index e7445fd4..e7c8c4ce 100644 --- a/docker/docker-compose.gpu.yml +++ b/docker/docker-compose.gpu.yml @@ -6,7 +6,7 @@ services: build: args: ROOT_CONTAINER: nvidia/cuda:11.7.1-cudnn8-runtime-ubuntu22.04 - PYTHON_VERSION: 3.9 + PYTHON_VERSION: "3.10" base: image: synerbi/jupyter:base-gpu build: From 6823d86bebd10ee4d709f5a903714d7105fbd639 Mon Sep 17 00:00:00 2001 From: Edoardo Pasca <14138589+paskino@users.noreply.github.com> Date: Thu, 15 Feb 2024 15:03:59 +0000 Subject: [PATCH 87/98] test CIL PR --- version_config.cmake | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/version_config.cmake b/version_config.cmake index 471d97bf..fc715daf 100644 --- a/version_config.cmake +++ b/version_config.cmake @@ -174,19 +174,19 @@ set(DEFAULT_ACE_URL https://github.com/paskino/libace-conda) set(DEFAULT_ACE_TAG v6.5.9) # range-v3 -set(DEFAULT_range-v3_URL https://github.com/ericniebler/range-v3.git ) +set(DEFAULT_range-v3_URL https://github.com/ericniebler/range-v3.git) set(DEFAULT_range-v3_TAG 0.12.0) -set(DEFAULT_RocksDB_URL https://github.com/facebook/rocksdb.git ) +set(DEFAULT_RocksDB_URL https://github.com/facebook/rocksdb.git) set(DEFAULT_RocksDB_TAG v6.26.0) set(DEFAULT_mrd-storage-server_URL https://github.com/ismrmrd/mrd-storage-server.git) set(DEFAULT_mrd-storage-server_TAG origin/main) -set(DEFAULT_Date_URL https://github.com/HowardHinnant/date.git ) +set(DEFAULT_Date_URL https://github.com/HowardHinnant/date.git) set(DEFAULT_Date_TAG master) -set(DEFAULT_pugixml_URL https://github.com/zeux/pugixml.git ) +set(DEFAULT_pugixml_URL https://github.com/zeux/pugixml.git) set(DEFAULT_pugixml_TAG v1.13) # works only for Linux From 25f6a147d0bc4dc3e4e4e782eec13ab5104b8710 Mon Sep 17 00:00:00 2001 From: Edoardo Pasca <14138589+paskino@users.noreply.github.com> Date: Thu, 15 Feb 2024 15:17:02 +0000 Subject: [PATCH 88/98] revert to default volume mount --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index c1aedea6..736935db 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -68,7 +68,7 @@ services: environment: # CLI arguments for https://github.com/SyneRBI/SIRF-Exercises/blob/master/scripts/download_data.sh SIRF_DOWNLOAD_DATA_ARGS: '' - volumes: [/home/ubuntu/devel:/home/jovyan/work] + volumes: [./docker/devel:/home/jovyan/work] restart: unless-stopped ports: - "9002:9002" # gadgetron From 4a93cac531c038919de35144f26825867d5ce26b Mon Sep 17 00:00:00 2001 From: Edoardo Pasca <14138589+paskino@users.noreply.github.com> Date: Thu, 15 Feb 2024 16:42:50 +0000 Subject: [PATCH 89/98] [ci skip] update readme --- README.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 6b5baa88..e236b48e 100644 --- a/README.md +++ b/README.md @@ -81,7 +81,7 @@ Notice that a fairly recent version of docker is required. Install it following ## Dependencies -The SuperBuild depends on CMake >= 3.10. +The SuperBuild depends on CMake >= 3.16 on Linux and 3.20 on Windows. If you are building Gadgetron there are a series of [additional dependencies](https://github.com/gadgetron/gadgetron/wiki/List-of-Dependencies), which must be met. @@ -97,9 +97,9 @@ mkdir ~/devel ``` ### Install CMake -If you do not have CMake >= 3.10, install it first. You can probably use a package manager on your OS. Alternatively, you can do that either by following the official instructions ([download link](https://cmake.org/download/)) or running your own shell sript to do so (see an example [here](https://github.com/SyneRBI/SyneRBI_VM/blob/master/scripts/INSTALL_CMake.sh)). +If you do not have CMake >= 3.16 on Linux and 3.20 on Windows, install it first. We suggest to follow the [official CMake instructions](https://cmake.org/download/). -If you use a CMake installer, you will be asked to read and accept CMake's license. If you answered the last question during the CMake installation with yes, then you should use +If you use a CMake installer, you will be asked to read and accept CMake's license. If you answered the last question during the CMake installation with yes, then on Linux you should use ``` export PATH=/usr/local/cmake/bin:$PATH @@ -131,11 +131,11 @@ cd ~/devel/build cmake ../SIRF-SuperBuild ``` You can of course use the GUI version of CMake (called `cmake-gui` on Linux/OSX), or the -terminal version `ccmake` to check and set various variables. See the [CMake tutorial on how to run CMake](https://cmake.org/runningcmake/). +terminal version `ccmake` to check and set various variables. See the [CMake tutorial on how to run CMake](https://cmake.org/cmake/help/latest/guide/tutorial/index.html). By default, this will select stable configurations of the various projects. See [the advanced installation section](#advanced-installation). -*Important*: it is currently not recommended to build both MATLAB and Python support, see [below](#matlab-and-python-conflicts). +*Important*: we have currently dropped the support for MATLAB. It is currently not recommended to build both MATLAB and Python support, see [below](#matlab-and-python-conflicts). Then use your build environment to build and install the project. On Linux/OSX etc, you would normally use ```bash From 7e2573eaa8cc6a95fe3614b81ce47e0e7c01184b Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Tue, 20 Feb 2024 16:50:29 +0000 Subject: [PATCH 90/98] cleanup post-rebase --- .github/workflows/docker.yml | 3 +-- README.md | 16 +++++++--------- docker-compose.yml | 10 ---------- docker/docker-compose.devel.yml | 6 ------ version_config.cmake | 24 ++++++++++++------------ 5 files changed, 20 insertions(+), 39 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 846ffd3a..c36c64ff 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -156,8 +156,7 @@ jobs: core_image=ghcr.io/synerbi/sirf:core$suffix # rebuild sirf image ./docker/compose.sh -bR${{ matrix.type == 'gpu' && 'g' || 'c' }} \ - -- --build-arg EXTRA_BUILD_FLAGS="-DDEVEL_BUILD=ON -Dsiemens_to_ismrmrd_TAG=origin/master" - # TODO: remove *_TAG after https://github.com/SyneRBI/SIRF-SuperBuild/issues/851 + -- --build-arg EXTRA_BUILD_FLAGS="-DDEVEL_BUILD=ON" image=synerbi/sirf:jupyter$suffix echo "image=$image" >> "$GITHUB_OUTPUT" # tag potentially newer core image diff --git a/README.md b/README.md index e236b48e..e5def1f5 100644 --- a/README.md +++ b/README.md @@ -48,8 +48,8 @@ describes how to build via CMake. ## Running SIRF on Docker -The easiest way to run SIRF is to use Docker. We provide a script which will build a docker image with SIRF and all dependencies pre-installed. You can run it with the `docker/compose.sh` command -that accepts the following flags, which can be checked [here](https://github.com/SyneRBI/SIRF-SuperBuild/blob/c21a2a45591550a6e257fc6f3dc343294b2c3127/docker/compose.sh#L24-L31). Additional parameters are available in the +The easiest way to run SIRF is to use Docker. We provide a script which will build a docker image with SIRF and all dependencies pre-installed. You can run it with the `docker/compose.sh` command +that accepts the following flags, which can be checked [here](https://github.com/SyneRBI/SIRF-SuperBuild/blob/c21a2a45591550a6e257fc6f3dc343294b2c3127/docker/compose.sh#L24-L31). Additional parameters are available in the [`docker-compose.yml`](https://github.com/paskino/SIRF-SuperBuild/blob/jupyterhub_env/docker-compose.yml) file. @@ -64,17 +64,15 @@ that accepts the following flags, which can be checked [here](https://github.com R) regen_ccache=1 ;; # regenerate (rather than append to) docker/devel/.ccache (always true if both -c and -g are specified) ``` -The following example command will build the development branches of SIRF and dependencies, it will build the GPU code, -it specifies some version of dependencies and will not run the tests after the build is complete. -You can pass additional build parameters to `docker/compose.sh` with `--build-arg`. The actual parameters that can be +The following example command will build the development branches of SIRF and dependencies, it will build the GPU code, +it specifies some version of dependencies and will not run the tests after the build is complete. +You can pass additional build parameters to `docker/compose.sh` with `--build-arg`. The actual parameters that can be passed can be found in the `Dockerfile`. - ```bash - -docker/compose.sh -dgb -- --build-arg EXTRA_BUILD_FLAGS='-DGadgetron_TAG=6202fb7352a14fb82817b57a97d928c988eb0f4b -DISMRMRD_TAG=v1.13.7 -Dsiemens_to_ismrmrd_TAG=v1.2.11 -DDEVEL_BUILD=ON -DBUILD_CIL=ON -DCCPi-Regularisation-Toolkit_TAG=origin/master' --build-arg RUN_CTEST=0 - +docker/compose.sh -dgb -- --build-arg EXTRA_BUILD_FLAGS='-DDEVEL_BUILD=ON -DBUILD_CIL=ON -DCCPi-Regularisation-Toolkit_TAG=origin/master' --build-arg RUN_CTEST=0 ``` + where `-dgb` tells to `b`uild the `g`pu and `d`evelopment branches. To run an image you would use the flags `-dgr`. Notice that a fairly recent version of docker is required. Install it following the instructions [here](https://docs.docker.com/engine/install/ubuntu/). diff --git a/docker-compose.yml b/docker-compose.yml index 736935db..04d8dd52 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -39,11 +39,6 @@ services: BASE_CONTAINER: synerbi/jupyter:scipy-cpu Gadgetron_USE_CUDA: "OFF" BUILD_GPU: 0 - # TODO: remove after https://github.com/SyneRBI/SIRF-SuperBuild/issues/851 - EXTRA_BUILD_FLAGS: > - -DGadgetron_TAG=6202fb7352a14fb82817b57a97d928c988eb0f4b - -DISMRMRD_TAG=v1.13.7 - -Dsiemens_to_ismrmrd_TAG=v1.2.11 sirf: container_name: sirf # for scaling, comment this out https://github.com/docker/compose/issues/3729 image: synerbi/sirf:jupyter @@ -53,11 +48,6 @@ services: BASE_CONTAINER: synerbi/jupyter:scipy-cpu Gadgetron_USE_CUDA: "OFF" BUILD_GPU: 0 - # TODO: remove after https://github.com/SyneRBI/SIRF-SuperBuild/issues/851 - EXTRA_BUILD_FLAGS: > - -DGadgetron_TAG=6202fb7352a14fb82817b57a97d928c988eb0f4b - -DISMRMRD_TAG=v1.13.7 - -Dsiemens_to_ismrmrd_TAG=v1.2.11 cache_from: [synerbi/jupyter:sirf-build-cpu] stdin_open: true tty: true diff --git a/docker/docker-compose.devel.yml b/docker/docker-compose.devel.yml index 546bc3c8..c52bd042 100644 --- a/docker/docker-compose.devel.yml +++ b/docker/docker-compose.devel.yml @@ -5,9 +5,6 @@ services: build: args: EXTRA_BUILD_FLAGS: > - -DGadgetron_TAG=6202fb7352a14fb82817b57a97d928c988eb0f4b - -DISMRMRD_TAG=v1.13.7 - -Dsiemens_to_ismrmrd_TAG=v1.2.11 -DDEVEL_BUILD=ON -DBUILD_CIL=ON sirf: @@ -17,9 +14,6 @@ services: context: . args: EXTRA_BUILD_FLAGS: > - -DGadgetron_TAG=6202fb7352a14fb82817b57a97d928c988eb0f4b - -DISMRMRD_TAG=v1.13.7 - -Dsiemens_to_ismrmrd_TAG=v1.2.11 -DDEVEL_BUILD=ON -DBUILD_CIL=ON cache_from: [synerbi/jupyter:sirf-build-devel-cpu] diff --git a/version_config.cmake b/version_config.cmake index fc715daf..94173008 100644 --- a/version_config.cmake +++ b/version_config.cmake @@ -97,11 +97,11 @@ set(DEFAULT_glog_URL https://github.com/google/glog) set(DEFAULT_glog_TAG v0.6.0) ## ITK -set(DEFAULT_ITK_URL https://github.com/InsightSoftwareConsortium/ITK.git) +set(DEFAULT_ITK_URL https://github.com/InsightSoftwareConsortium/ITK) set(DEFAULT_ITK_TAG v5.2.1) ## NIFTYREG -set(DEFAULT_NIFTYREG_URL https://github.com/KCL-BMEIS/niftyreg.git) +set(DEFAULT_NIFTYREG_URL https://github.com/KCL-BMEIS/niftyreg) set(DEFAULT_NIFTYREG_TAG 8ad2f11507ddedb09ed74a9bd97377b70532ee75) set(NIFTYREG_REQUIRED_VERSION 1.5.68) @@ -151,18 +151,18 @@ set(DEFAULT_SIRF-Contribs_URL https://github.com/SyneRBI/SIRF-Contribs) set(DEFAULT_SIRF-Contribs_TAG origin/master) ## SPM -set(DEFAULT_SPM_URL https://github.com/spm/SPM12.git) +set(DEFAULT_SPM_URL https://github.com/spm/SPM12) set(DEFAULT_SPM_TAG r7771) -set(DEFAULT_JSON_URL https://github.com/nlohmann/json.git) +set(DEFAULT_JSON_URL https://github.com/nlohmann/json) set(DEFAULT_JSON_TAG v3.10.4) # CCPi CIL # minimum supported version of CIL supported is > 22.1.0 or from commit a6062410028c9872c5b355be40b96ed1497fed2a -set(DEFAULT_CIL_URL https://github.com/TomographicImaging/CIL.git) +set(DEFAULT_CIL_URL https://github.com/TomographicImaging/CIL) set(DEFAULT_CIL_TAG db5a2a6cd3bddfbbf53e65f0549ac206096e5b44) # 13 Feb 2024 -set(DEFAULT_CCPi-Regularisation-Toolkit_URL https://github.com/vais-ral/CCPi-Regularisation-Toolkit.git) +set(DEFAULT_CCPi-Regularisation-Toolkit_URL https://github.com/vais-ral/CCPi-Regularisation-Toolkit) set(DEFAULT_CCPi-Regularisation-Toolkit_TAG "71f8d304d804b54d378f0ed05539f01aaaf13758") # CERN ROOT @@ -174,19 +174,19 @@ set(DEFAULT_ACE_URL https://github.com/paskino/libace-conda) set(DEFAULT_ACE_TAG v6.5.9) # range-v3 -set(DEFAULT_range-v3_URL https://github.com/ericniebler/range-v3.git) +set(DEFAULT_range-v3_URL https://github.com/ericniebler/range-v3) set(DEFAULT_range-v3_TAG 0.12.0) -set(DEFAULT_RocksDB_URL https://github.com/facebook/rocksdb.git) +set(DEFAULT_RocksDB_URL https://github.com/facebook/rocksdb) set(DEFAULT_RocksDB_TAG v6.26.0) -set(DEFAULT_mrd-storage-server_URL https://github.com/ismrmrd/mrd-storage-server.git) +set(DEFAULT_mrd-storage-server_URL https://github.com/ismrmrd/mrd-storage-server) set(DEFAULT_mrd-storage-server_TAG origin/main) -set(DEFAULT_Date_URL https://github.com/HowardHinnant/date.git) +set(DEFAULT_Date_URL https://github.com/HowardHinnant/date) set(DEFAULT_Date_TAG master) -set(DEFAULT_pugixml_URL https://github.com/zeux/pugixml.git) +set(DEFAULT_pugixml_URL https://github.com/zeux/pugixml) set(DEFAULT_pugixml_TAG v1.13) # works only for Linux @@ -217,7 +217,7 @@ if (DEVEL_BUILD) # set(DEFAULT_pet_rd_tools_TAG origin/master) # CCPi CIL - set(DEFAULT_CIL_URL https://github.com/TomographicImaging/CIL.git) + set(DEFAULT_CIL_URL https://github.com/TomographicImaging/CIL) set(DEFAULT_CIL_TAG origin/master) # Gadgetron From d8a524902d8cc1d537c67175f6f7ddf0a3ea646f Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Tue, 20 Feb 2024 17:11:50 +0000 Subject: [PATCH 91/98] customisable OMP_NUM_THREADS --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index bd7ded0a..04cfd69b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -99,7 +99,7 @@ RUN apt update -qq && apt install -yq --no-install-recommends \ && mkdir -p /usr/share/X11/xkb \ && test -e /usr/bin/X || ln -s /usr/bin/Xorg /usr/bin/X -RUN echo "export OMP_NUM_THREADS=\$(python -c 'import multiprocessing as mc; print(mc.cpu_count() // 2)')" > /usr/local/bin/before-notebook.d/omp_num_threads.sh +RUN echo 'test -z "$OMP_NUM_THREADS" && export OMP_NUM_THREADS=$(python -c "import multiprocessing as mc; print(mc.cpu_count() // 2)")' > /usr/local/bin/before-notebook.d/omp_num_threads.sh COPY --chown=${NB_USER} --chmod=644 --link docker/.bashrc /home/${NB_USER}/ # RUN sed -i s:PYTHON_INSTALL_DIR:${CONDA_DIR}:g /home/${NB_USER}/.bashrc From 7cb37343c0ac55e6d67dad65765a0c4951dea94d Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Tue, 20 Feb 2024 17:18:42 +0000 Subject: [PATCH 92/98] fix executable permissions --- docker/update_nvidia_keys.sh | 0 docker/user_python-ubuntu.sh | 0 2 files changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 docker/update_nvidia_keys.sh mode change 100644 => 100755 docker/user_python-ubuntu.sh diff --git a/docker/update_nvidia_keys.sh b/docker/update_nvidia_keys.sh old mode 100644 new mode 100755 diff --git a/docker/user_python-ubuntu.sh b/docker/user_python-ubuntu.sh old mode 100644 new mode 100755 From ea73e0585d70f1fe642fb4c191b04ca7dab583e1 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Tue, 20 Feb 2024 18:39:18 +0000 Subject: [PATCH 93/98] CI: optimise devel build --- .github/workflows/docker.yml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index c36c64ff..49f62d8a 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -153,10 +153,13 @@ jobs: name: build run: | suffix=${{ matrix.type == 'gpu' && '-gpu' || '' }} + # pull core image core_image=ghcr.io/synerbi/sirf:core$suffix + docker pull $core_image || : + # pull sirf image + docker pull ghcr.io/synerbi/sirf:${{ fromJSON(steps.meta.outputs.json).tags[0] }} || : # rebuild sirf image - ./docker/compose.sh -bR${{ matrix.type == 'gpu' && 'g' || 'c' }} \ - -- --build-arg EXTRA_BUILD_FLAGS="-DDEVEL_BUILD=ON" + ./docker/compose.sh -dbR${{ matrix.type == 'gpu' && 'g' || 'c' }} image=synerbi/sirf:jupyter$suffix echo "image=$image" >> "$GITHUB_OUTPUT" # tag potentially newer core image From ac7f72f74fc420afb16acaa66beafde1be7ee9c2 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Tue, 20 Feb 2024 20:26:41 +0000 Subject: [PATCH 94/98] docker docs --- README.md | 53 ++++++++++++++++------------ docker/README.md | 92 ++++++++++++++++++++++++++++++++---------------- 2 files changed, 92 insertions(+), 53 deletions(-) diff --git a/README.md b/README.md index e5def1f5..b6ec9372 100644 --- a/README.md +++ b/README.md @@ -48,34 +48,43 @@ describes how to build via CMake. ## Running SIRF on Docker -The easiest way to run SIRF is to use Docker. We provide a script which will build a docker image with SIRF and all dependencies pre-installed. You can run it with the `docker/compose.sh` command -that accepts the following flags, which can be checked [here](https://github.com/SyneRBI/SIRF-SuperBuild/blob/c21a2a45591550a6e257fc6f3dc343294b2c3127/docker/compose.sh#L24-L31). Additional parameters are available in the -[`docker-compose.yml`](https://github.com/paskino/SIRF-SuperBuild/blob/jupyterhub_env/docker-compose.yml) file. +The easiest way to run [SIRF](https://github.com/SyneRBI/SIRF) & all its dependencies is to use Docker. +1. [Install the latest docker version](https://docs.docker.com/engine/install/) +2. (optional) For GPU support (NVIDIA CUDA on Linux or Windows Subsystem for Linux 2 only) + - [Install NVIDIA drivers](https://developer.nvidia.com/cuda-downloads) + - [Install NVIDIA container toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html) -``` - h) print_help; exit 0 ;; # print this help - b) build=1 ;; # build - r) run=1 ;; # run - d) devel=1 ;; # use development (main/master) repo branches - c) cpu=1 ;; # enable CPU - g) gpu=1 ;; # enable GPU - U) update_ccache=0 ;; # disable updating docker/devel/.ccache - R) regen_ccache=1 ;; # regenerate (rather than append to) docker/devel/.ccache (always true if both -c and -g are specified) +```sh +# CPU version +docker run --rm -it -p 9999:8888 ghcr.io/synerbi/sirf:jupyter +# GPU version +docker run --rm -it -p 9999:8888 --gpus all ghcr.io/synerbi/sirf:jupyter-gpu ``` -The following example command will build the development branches of SIRF and dependencies, it will build the GPU code, -it specifies some version of dependencies and will not run the tests after the build is complete. -You can pass additional build parameters to `docker/compose.sh` with `--build-arg`. The actual parameters that can be -passed can be found in the `Dockerfile`. +The Jupyter notebook should be accessible at . -```bash -docker/compose.sh -dgb -- --build-arg EXTRA_BUILD_FLAGS='-DDEVEL_BUILD=ON -DBUILD_CIL=ON -DCCPi-Regularisation-Toolkit_TAG=origin/master' --build-arg RUN_CTEST=0 -``` +> [!WARNING] +> To sync the container user & host user permissions (useful when sharing folders), use `--user` and `--group-add`. +> +> ```sh +> docker run --rm -it -p 9999:8888 --user $(id -u) --group-add users \ +> -v ./docker/devel:/home/jovyan/work \ +> ghcr.io/synerbi/sirf:jupyter +> ``` + +More config: . -where `-dgb` tells to `b`uild the `g`pu and `d`evelopment branches. To run an image you would use the flags `-dgr`. +> [!TIP] +> To pass arguments to [`SIRF-Exercises/scripts/download_data.sh`](https://github.com/SyneRBI/SIRF-Exercises/blob/master/scripts/download_data.sh), use the docker environment variable `SIRF_DOWNLOAD_DATA_ARGS`. +> +> ```sh +> docker run --rm -it -p 9999:8888 --user $(id -u) --group-add users \ +> -v /mnt/data:/share -e SIRF_DOWNLOAD_DATA_ARGS="-pm -D /share" \ +> ghcr.io/synerbi/sirf:jupyter +> ``` -Notice that a fairly recent version of docker is required. Install it following the instructions [here](https://docs.docker.com/engine/install/ubuntu/). +For building docker containers, see [`docker/README.md`](docker/README.md) ## Dependencies @@ -113,7 +122,7 @@ You might want to add the `PATH` line to your start-up file e.g. `.profile`, `.b ### Clone the SIRF-SuperBuild project ```bash cd ~/devel -git clone https://github.com/SyneRBI/SIRF-SuperBuild.git +git clone https://github.com/SyneRBI/SIRF-SuperBuild ``` ### Build and Install diff --git a/docker/README.md b/docker/README.md index 1ad1e39f..fce015ee 100644 --- a/docker/README.md +++ b/docker/README.md @@ -4,51 +4,81 @@ The image contains SIRF & all dependencies required by JupyterHub. ## Usage -### Docker +> [!WARNING] +> The easiest way to run [SIRF](https://github.com/SyneRBI/SIRF) & all its dependencies is to use Docker. See [../README.md](../README.md#running-sirf-on-docker) instead. -```sh +### Extending + +You can build custom images on top of the SIRF ones, likely needing to switch between `root` and default user to install packages: + +```Dockerfile # CPU version -docker run --rm -it -p 9999:8888 synerbi/sirf:jupyter +# FROM synerbi/sirf:jupyter # GPU version -docker run --rm -it -p 9999:8888 --gpus all synerbi/sirf:jupyter-gpu +FROM synerbi/sirf:jupyter-gpu +USER root +RUN mamba install pytorch && fix-permissions "${CONDA_DIR}" /home/${NB_USER} +USER ${NB_UID} ``` -To make the container user same as host user (useful when sharing folders), use `--user` and `--group-add`: +> [!TIP] +> More config: -```sh -docker run --rm -it -p 9999:8888 --user $(id -u) --group-add users -v ./docker/devel:/home/jovyan/work synerbi/sirf:jupyter -``` +## Build -The Jupyter notebook should be accessible at . +To build and/or run with advanced config, it's recommended to use [Docker Compose](https://docs.docker.com/compose/). -More config: https://jupyter-docker-stacks.readthedocs.io/en/latest/using/common.html#user-related-configurations +We use an Ubuntu 22.04 base image (optionally with CUDA GPU support for [CIL](https://github.com/TomographicImaging/CIL) GPU features), build `datascience-notebook` on top, and then install SIRF & its depdendencies. -### Docker Compose +The strategy is: -```sh -# CPU version -docker compose up sirf -# GPU version -docker compose -f docker-compose.yml -f docker/docker-compose.gpu.yml up sirf -``` +1. Use either `ubuntu:latest` or a recent Ubuntu CuDNN runtime image from as base +2. Build on top +3. Copy & run the SIRF `docker/build_*.sh` scripts +4. Clone the SIRF-SuperBuild & run `cmake` +5. Copy some example notebooks & startup scripts -## Build the image +### Docker Compose -We use an NVIDIA CUDA Ubuntu 22.04 base image (for [CIL](https://github.com/TomographicImaging/CIL) GPU features), build https://github.com/jupyter/docker-stacks `datascience-notebook` on top, and then install SIRF & its depdendencies. +All of this is done by [`compose.sh`](./compose.sh). + +1. [Install the latest docker version](https://docs.docker.com/engine/install/) +2. Clone this repository and run the [`docker/compose.sh`](docker/compose.sh) script + + ```bash + git clone https://github.com/SyneRBI/SIRF-SuperBuild + ./SIRF-SuperBuild/docker/compose.sh -h # prints help + ``` + +> [!TIP] +> For example, to `-b`uild the `-d`evelopment (`master`) branches of SIRF and its dependencies, including `-g`pu support and skipping tests: +> +> ```bash +> compose.sh -bdg -- --build-arg RUN_CTEST=0 +> ``` +> +> Then to `-r`un the container: +> +> ```bash +> compose.sh -rdg +> ``` -The strategy is: +### More info -1. Use a recent Ubuntu CuDNN runtime image from https://hub.docker.com/r/nvidia/cuda as base -2. Build https://github.com/jupyter/docker-stacks/tree/main/images/datascience-notebook on top -3. Copy & run the SIRF `docker/build_*.sh` scripts -4. Copy the SIRF installation directories from the `synerbi/sirf:latest` image -5. Install CIL (via `conda`) +> [!TIP] +> +> ```bash +> compose.sh -h # prints help +> ``` -All of this is done by [`build_docker_stacks.sh`](./build_docker_stacks.sh). +CMake build arguments (e.g. for dependency version config) are (in increasing order of precedence) found in: -### More info +- [`../version_config.cmake`](../version_config.cmake) +- [`../Dockerfile`](../Dockerfile) +- docker-compose.*.yml files +- `compose.sh -- --build-arg` arguments -https://github.com/jupyter/docker-stacks is used to gradually build up images: + is used to gradually build up images: - `BASE_CONTAINER=nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04` - `docker-stacks-foundation` -> `synerbi/jupyter:foundation` @@ -57,8 +87,8 @@ https://github.com/jupyter/docker-stacks is used to gradually build up images: - `scipy-notebook` -> `synerbi/jupyter:scipy` - `datascience-notebook` -> `synerbi/jupyter:datascience` - [`Dockerfile`](./Dockerfile) -> `synerbi/jupyter:sirf` - + Copy & run the SIRF `build_{essential,gadgetron,system}.sh` scripts from [`../docker`](../docker) + + Copy & run the SIRF `build_{gadgetron,system}.sh` scripts + Copy `/opt/SIRF-SuperBuild/{INSTALL,sources/SIRF}` directories from the `synerbi/sirf:latest` image - + Install docker/requirements.yml - + Clone & setup https://github.com/SyneRBI/SIRF-Exercises + + Install [`requirements.yml`](requirements.yml) + + Clone & setup & + Set some environment variables (e.g. `PYTHONPATH=/opt/SIRF-SuperBuild/INSTALL/python`, `OMP_NUM_THREADS=$(( cpu_count/2 ))`) From 04e266df41785ddfb8c959d6fab66e047d7ee2fc Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Tue, 20 Feb 2024 20:27:07 +0000 Subject: [PATCH 95/98] compose.sh: better help output --- docker/compose.sh | 24 +++++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/docker/compose.sh b/docker/compose.sh index 741f9f1d..40484655 100755 --- a/docker/compose.sh +++ b/docker/compose.sh @@ -9,9 +9,15 @@ Also creates intermediate (temp) images: synerbi/jupyter Usage: $0 [options] [-- [docker compose options]] Options: $(sed -rn 's/^\s*(\w)\) .*;; # (.*)$/ -\1: \2/p' "$0") +Docker Compose Options: + Forwarded to 'docker compose build/run', e.g.: + $0 -b -- --build-arg BUILD_CIL=ON --build-arg EXTRA_BUILD_FLAGS="-DCIL_TAG=origin/master" +Detected config files: + Dockerfile docker-compose.yml $(echo docker/docker-compose.*.yml) EOF } +verbose=0 build=0 run=0 devel=0 @@ -19,9 +25,10 @@ cpu=0 gpu=0 update_ccache=1 regen_ccache=0 -while getopts :hbrdcgUR option; do +while getopts :hvbrdcgUR option; do case "${option}" in h) print_help; exit 0 ;; # print this help + v) verbose=1 ;; # verbose b) build=1 ;; # build r) run=1 ;; # run d) devel=1 ;; # use development (main/master) repo branches @@ -34,15 +41,13 @@ while getopts :hbrdcgUR option; do done shift $((OPTIND-1)) # remove processed options -test $build$run = 00 && echo >&2 "WARNING: neither -b nor -r specified" -test $cpu$gpu = 00 && echo >&2 "WARNING: neither -c nor -g specified" +test $verbose = 1 && set -x test $build$cpu$gpu = 111 && regen_ccache=1 # force rebuild ccache echo "cpu: $cpu, gpu: $gpu, update ccache: $update_ccache, regen ccache: $regen_ccache, devel: $devel" -echo "docker compose options: $@" # compose binary DCC="${DCC:-docker compose}" -which docker-compose && DCC=$(which docker-compose) +which docker-compose 2>&1 >> /dev/null && DCC=$(which docker-compose) # CPU config DCC_CPU="$DCC -f docker-compose.yml" test $devel = 1 && DCC_CPU+=" -f docker/docker-compose.devel.yml" @@ -50,7 +55,12 @@ test $devel = 1 && DCC_CPU+=" -f docker/docker-compose.devel.yml" DCC_GPU="$DCC_CPU -f docker/docker-compose.gpu.yml" test $devel = 1 && DCC_GPU+=" -f docker/docker-compose.devel-gpu.yml" -pushd "$(dirname "$(dirname "${BASH_SOURCE[0]}")")" +test $cpu$gpu = 00 && echo >&2 "WARNING: neither -c nor -g specified" +test $cpu = 1 && echo "docker compose command: $DCC_CPU" +test $gpu = 1 && echo "docker compose command: $DCC_GPU" +echo "docker compose options: $@" + +pushd "$(dirname "$(dirname "${BASH_SOURCE[0]}")")" >> /dev/null git submodule update --init --recursive if test $build = 1; then @@ -82,4 +92,4 @@ if test $run = 1; then test $gpu = 1 && $DCC_GPU up --no-build "$@" sirf; $DCC_GPU down sirf fi -popd +popd >> /dev/null From ca0e4c388a036494d27e87059cba1c22ae6fd7c5 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Wed, 21 Feb 2024 01:41:02 +0000 Subject: [PATCH 96/98] merge dev docs --- docker/DocForDevelopers.md | 75 -------------------------------------- docker/README.md | 31 ++++++++++++++++ 2 files changed, 31 insertions(+), 75 deletions(-) delete mode 100644 docker/DocForDevelopers.md diff --git a/docker/DocForDevelopers.md b/docker/DocForDevelopers.md deleted file mode 100644 index d639d4e8..00000000 --- a/docker/DocForDevelopers.md +++ /dev/null @@ -1,75 +0,0 @@ -# Extra information on the SIRF Docker set-up for developers - -## Useful environment variables -You can determine which version of the `SIRF-SuperBuild` is built in the docker image, in `bash` and similar shells: -```bash -export SIRF_SB_TAG=v3.1.0 -``` -You can use a `git` hash as well of course. You can also set where to `clone` from: -```bash -export SIRF_SB_URL=https://github.com/KrisThielemans/SIRF-SuperBuild -``` -If you have many cores, you can speed-up the build by saying -```bash -export NUM_PARALLEL_BUILDS=9 -``` -This will be passed to `cmake --build -j`. - -By default, most build files will be removed while building the docker image to save some space. If you are a developer, you can set -the environment variable `REMOVE_BUILD_FILES` to 0 before building the image to prevent this. - -By default, the CTests are normally run while building the docker image. Note that this takes some time. You can switch this off by setting the environment variable `RUN_CTEST=0` before building the image. -Note however that if you have set `REMOVE_BUILD_FILES=1`, you cannot run the CTests on an existing image (unless you rebuild from source of course). - -Of course, if you are using `bash`, you can specify any of these for a specific run, e.g. -``` -NUM_PARALLEL_BUILDS=9 ./sirf-compose build sirf -``` - - -## `ccache` - -`ccache` is used in the container to speed up rebuilding images from scratch. -The cache is pulled from the host machine via the `devel/.ccache` folder. -After building a container, you can optionally replace the cache on the host with the updated one from the container: - -```bash -SIRF-SuperBuild/docker$ sudo rm -rf devel/.ccache/* -SIRF-SuperBuild/docker$ ./sirf-compose run --rm sirf \ - /bin/bash -c 'cp -a /opt/ccache/* /devel/.ccache/' -``` -This way, the cache will be used when you update SIRF in the container, or when you build another container. - -Note that this case is different from the "normal" `ccache` of your host. (If you are only doing SIRF development, you could decide to copy that to -`SIRF-SuperBuild/docker/devel/.ccache` but we will leave that up to you). - -## Some information on how the set-up works - -Clearly, you should read the Docker and `docker-compose` documentation. The following might help to understand the specifics used in this repo. - -`Dockerfile` defines a sequences of images (base, `core`, `sirf`, `service`) that build on top of each-other. In various places, the file uses `ARG` variables can be overriden at build-time, see https://docs.docker.com/engine/reference/builder/#arg. These are set in the `docker-compose*.yml`. - -We use the [`ENTRYPOINT` mechanism](https://docs.docker.com/engine/reference/builder/#entrypoint), setting the command that will be executed at container start-up to [entrypoint.sh](entrypoint.sh) which is passed a `CMD`, specified in the `Dockerfile` (`/bin/bash` for all except `service` which uses [service.sh](service.sh). (Both the `ENTRYPOINT` and `CMD` be overriden by passing relevant options to `docker run`). - -The very first time the container is run, `entrypoint.sh` creates the `sirfuser` user, copies files in correct places, and takes care of file permissions (see below). - -### Why the jovyan user? -The `NB_USER` (by default called `jovyan`) is a convention used by JupyterHub. It launches jupyter -notebooks under this user. Due to permission problems, it should not have `root` access. -We build all files as this user. - -### File permissions - -Quoting from https://blog.gougousis.net/file-permissions-the-painful-side-of-docker/. - - Let me remind you here that file permissions on bind mounts are shared between the host and the containers...Whenever we create a file on host using a user with UID x, this file will have x as owner UID inside the container. - -(Note that this is for Linux. On Mac, Docker uses NFS and apparently this doesn't give UID problems, and Windows uses file sharing). - -We handle this by creating the container `sirfuser` with the same `UID:GID` as the user who executes `sirf-compose*` (by passing `USER_ID` and `GROUP_ID` as environment variables), and execute processes in the container as `sirfuser`. (Note that often Docker containers run processes as `root`). Unfortunately, this means that `entrypoint.sh` also has to `chown` various files. - -As `chown` can take quite some time when the container is created, we try to minimise this by adding both `jovyan` and `sirfuser` to the `users` group, and giving "group" `rw` access to the files created by `jovyan`. This way, `entrypoint.sh` needs to `chown` (or `chmod`) only the `sirfuser` home-directory and a few others. - -## `sirf-compose*` - -This is a sequence of convenience scripts that essentially calls `docker-compose` with relevant `.yml` files. These specify parameters for building the images and running the containers. Note that a `.yml` file can specify different "services", e.g. `sirf`, `core`, `gadgetron`. (There is an unfortunate name-clash with SIRF "service" images (which use the `sirf` "service"). Sorry about that.) diff --git a/docker/README.md b/docker/README.md index fce015ee..ca05eb20 100644 --- a/docker/README.md +++ b/docker/README.md @@ -71,6 +71,8 @@ All of this is done by [`compose.sh`](./compose.sh). > compose.sh -h # prints help > ``` +#### Useful `--build-arg`s + CMake build arguments (e.g. for dependency version config) are (in increasing order of precedence) found in: - [`../version_config.cmake`](../version_config.cmake) @@ -78,6 +80,35 @@ CMake build arguments (e.g. for dependency version config) are (in increasing or - docker-compose.*.yml files - `compose.sh -- --build-arg` arguments +Useful `--build-arg`s: + +You can determine which version of the `SIRF-SuperBuild` is built in the docker image: + +```bash +compose.sh -b -- --build-arg SIRF_SB_TAG= +``` + +By default, the CTests are run while building the docker image. Note that this takes a few minutes. +You can switch this off by setting `--build-arg RUN_CTEST=0` before building the image. + +#### `ccache` + +`ccache` is used in the container to speed up rebuilding images from scratch. +The cache is pulled from the host machine via the `devel/.ccache` folder. + +Building (`compose.sh -b`) automatically updates the cache. + +To disable updating the cache, `-b`uild with `-U`. + +To regenerate the cache, remove it and then `-b`uild with `-R`. + +This way, the cache will be used when you update SIRF in the container, or when you build another container. + +Note that this cache is different from the "normal" `ccache` of your host. (If you are only doing SIRF development, you could decide to copy that to +`SIRF-SuperBuild/docker/devel/.ccache` but we will leave that up to you). + +#### `docker-stacks` + is used to gradually build up images: - `BASE_CONTAINER=nvidia/cuda:11.7.1-cudnn8-devel-ubuntu22.04` From 69b09635cf1ff00be0af6e89e36f52d2668257ed Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Wed, 21 Feb 2024 01:41:24 +0000 Subject: [PATCH 97/98] fix compose.sh -U --- docker/compose.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/compose.sh b/docker/compose.sh index 40484655..96517f0e 100755 --- a/docker/compose.sh +++ b/docker/compose.sh @@ -81,8 +81,8 @@ if test $build = 1; then echo copy ccache test $update_ccache$regen_ccache = 11 && sudo rm -rf ./docker/devel/.ccache/* export USER_ID UID - test $cpu = 1 && $DCC_CPU up --no-build sirf-build && test $update_ccache = 1 && $DCC_CPU down sirf-build - test $gpu = 1 && $DCC_GPU up --no-build sirf-build && test $update_ccache = 1 && $DCC_GPU down sirf-build + test $cpu$update_ccache = 11 && $DCC_CPU up --no-build sirf-build && $DCC_CPU down sirf-build + test $gpu$update_ccache = 11 && $DCC_GPU up --no-build sirf-build && $DCC_GPU down sirf-build fi if test $run = 1; then From aa0478be4ed24a282cf5f26efdc3334882d1bca0 Mon Sep 17 00:00:00 2001 From: Casper da Costa-Luis Date: Wed, 21 Feb 2024 15:17:54 +0000 Subject: [PATCH 98/98] CI: separate pull/build, don't cache devel --- .github/workflows/docker.yml | 40 +++++++++++++++--------------------- 1 file changed, 17 insertions(+), 23 deletions(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 49f62d8a..a6b0bfe1 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -72,8 +72,8 @@ jobs: ccache-docker ccache path: docker/devel/.ccache - - id: build - name: build + - id: pull + name: pull run: | suffix=${{ matrix.type == 'gpu' && '-gpu' || '' }} # pull core image @@ -81,13 +81,17 @@ jobs: docker pull $core_image || : # pull sirf image docker pull ghcr.io/synerbi/sirf:${{ fromJSON(steps.meta.outputs.json).tags[0] }} || : + echo "suffix=$suffix" >> "$GITHUB_OUTPUT" + echo "core_image=$core_image" >> "$GITHUB_OUTPUT" + - id: build + name: build + run: | # rebuild sirf image ./docker/compose.sh -bR${{ matrix.type == 'gpu' && 'g' || 'c' }} - image=synerbi/sirf:jupyter$suffix + image=synerbi/sirf:jupyter${{ steps.pull.outputs.suffix }} echo "image=$image" >> "$GITHUB_OUTPUT" # tag potentially newer core image - docker tag synerbi/jupyter:scipy-${{ matrix.type }} $core_image - echo "core_image=$core_image" >> "$GITHUB_OUTPUT" + docker tag synerbi/jupyter:scipy-${{ matrix.type }} ${{ steps.pull.outputs.core_image }} # make a dummy Dockerfile to use with build-push-action context=$(mktemp -d) echo "FROM $image" >> "$context/Dockerfile" @@ -149,8 +153,8 @@ jobs: ccache-docker ccache path: docker/devel/.ccache - - id: build - name: build + - id: pull + name: pull run: | suffix=${{ matrix.type == 'gpu' && '-gpu' || '' }} # pull core image @@ -158,26 +162,16 @@ jobs: docker pull $core_image || : # pull sirf image docker pull ghcr.io/synerbi/sirf:${{ fromJSON(steps.meta.outputs.json).tags[0] }} || : + echo "suffix=$suffix" >> "$GITHUB_OUTPUT" + echo "core_image=$core_image" >> "$GITHUB_OUTPUT" + - id: build + name: build + run: | # rebuild sirf image ./docker/compose.sh -dbR${{ matrix.type == 'gpu' && 'g' || 'c' }} - image=synerbi/sirf:jupyter$suffix + image=synerbi/sirf:jupyter${{ steps.pull.outputs.suffix }} echo "image=$image" >> "$GITHUB_OUTPUT" - # tag potentially newer core image - docker tag synerbi/jupyter:scipy-${{ matrix.type }} $core_image - echo "core_image=$core_image" >> "$GITHUB_OUTPUT" - # make a dummy Dockerfile to use with build-push-action - context=$(mktemp -d) - echo "FROM $image" >> "$context/Dockerfile" - echo "context=$context" >> "$GITHUB_OUTPUT" - name: test CIL run: > docker run --rm -v ./.github/workflows:/gh --user $(id -u) --group-add users ${{ steps.build.outputs.image }} /gh/test_cil.sh - - uses: docker/build-push-action@v5 - with: - context: ${{ steps.build.outputs.context }} - push: true - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} - - name: push core image - run: docker push ${{ steps.build.outputs.core_image }}