Skip to content

Commit

Permalink
Merge pull request #171 from neutrons/add_readthedocs_badge
Browse files Browse the repository at this point in the history
Adapt to Docker Compose v2
  • Loading branch information
jmborr authored Aug 7, 2024
2 parents a45c070 + b521b97 commit c59e7a3
Show file tree
Hide file tree
Showing 7 changed files with 63 additions and 36 deletions.
24 changes: 24 additions & 0 deletions .github/pull_request_template.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
# Description of the changes


Check all that apply:
- [ ] updated documentation
- [ ] Source added/refactored
- [ ] Added unit tests
- [ ] Added integration tests
- [ ] (If applicable) Verified that manual tests requiring the /SNS and /HFIR filesystems pass without fail

**References:**
- Links to IBM EWM items:
- Links to related issues or pull requests:

# Manual test for the reviewer
(Instructions for testing here)

# Check list for the reviewer
- [ ] best software practices
+ [ ] clearly named variables (better to be verbose in variable names)
+ [ ] code comments explaining the intent of code blocks
- [ ] All the tests are passing
- [ ] The documentation is up to date
- [ ] code comments added when explaining intent
4 changes: 2 additions & 2 deletions .github/workflows/systemtests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ jobs:
conda activate webmon
make all
- name: Stand up docker containers
run: docker-compose up --build -d
run: docker compose up --build -d
env:
DJANGO_SETTINGS_MODULE: reporting.reporting_app.settings.envtest
LDAP_SERVER_URI: .
Expand All @@ -49,4 +49,4 @@ jobs:
LDAP_DOMAIN_COMPONENT: .
LIVE_PLOT_SECRET_KEY: secretKey
- name: Stand down docker containers
run: docker-compose down
run: docker compose down
16 changes: 8 additions & 8 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ REPORT_DB_INIT=/opt/conda/lib/python$(PYTHON_VERSION)/site-packages/reporting/fi
# command to run docker compose. change this to be what you have installed
# this can be overriden on the command line
# DOCKER_COMPOSE="docker compose" make startdev
DOCKER_COMPOSE ?= docker-compose
DOCKER_COMPOSE ?= "docker compose"


help:
Expand Down Expand Up @@ -127,19 +127,19 @@ nginx/nginx.crt nginx/nginx.key:
openssl req -x509 -out nginx/nginx.crt -keyout nginx/nginx.key -newkey rsa:2048 -nodes -sha256 --config nginx/san.cnf

localdev/up: ## create images and start containers for local development. Doesn't update python wheels, though.
docker-compose --file docker-compose.yml up --build
docker compose --file docker-compose.yml up --build

localdev/dbup: ## dbdumpfile=database_dump_file.sql DATABASE_PASS=$(dotenv get DATABASE_PASS) make localdev/dbup
if ! test -f "${dbdumpfile}"; then echo "dbdumpfile does not exists" && false; fi
if test -z "${DATABASE_PASS}"; then echo "DATABASE_PASS undefined" && false; fi
docker-compose --file docker-compose.yml stop
docker-compose --file docker-compose.yml down --volumes
docker compose --file docker-compose.yml stop
docker compose --file docker-compose.yml down --volumes
sleep 2s
docker-compose --file docker-compose.yml up --detach db
docker compose --file docker-compose.yml up --detach db
sleep 10s # give time for the database service to be ready
docker exec -i data_workflow_db_1 /bin/bash -c "pg_restore -d workflow -U workflow" < ${dbdumpfile} | true # continue even if returned errors
docker exec -i data_workflow_db_1 /bin/bash -c "psql -d workflow -U workflow -c \"ALTER ROLE workflow WITH PASSWORD '${DATABASE_PASS}';\""
LOAD_INITIAL_DATA="false" docker-compose --file docker-compose.yml up --build
docker exec -i data_workflow-db-1 /bin/bash -c "pg_restore -d workflow -U workflow" < ${dbdumpfile} | true # continue even if returned errors
docker exec -i data_workflow-db-1 /bin/bash -c "psql -d workflow -U workflow -c \"ALTER ROLE workflow WITH PASSWORD '${DATABASE_PASS}';\""
LOAD_INITIAL_DATA="false" docker compose --file docker-compose.yml up --build

clean: wheel/clean ## delete the SNS data and all the python wheels
rm -f SNSdata.tar.gz
Expand Down
5 changes: 4 additions & 1 deletion README.rst
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
.. image:: https://readthedocs.org/projects/data-workflow/badge/?version=latest
:target: https://data-workflow.readthedocs.io/en/latest/?badge=latest
:alt: Documentation Status
.. image:: https://github.com/neutrons/data_workflow/actions/workflows/ci.yml/badge.svg?branch=next
:alt: CI
:target: https://github.com/neutrons/data_workflow/actions/workflows/ci.yml?query=branch:next
Expand All @@ -19,7 +22,7 @@ Dependencies:
* `MySQLdb <https://sourceforge.net/projects/mysql-python/>`_ if using MySQL
* `psycopg2 <https://www.psycopg.org/>`_ if using PostgreSQL

It consists of 3 applications (Workflow Manager, Web Monitor, and DASMON Listener) which are deployed via docker-compose.
It consists of 3 applications (Workflow Manager, Web Monitor, and DASMON Listener) which are deployed via docker compose.

Workflow Manager
----------------
Expand Down
6 changes: 3 additions & 3 deletions docs/developer/instruction/build.rst
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ The system test are run via `.github/workflow/systemtests.yml <https://github.co
.. code-block:: shell
make all # wheels and test data
LDAP_SERVER_URI=. LDAP_DOMAIN_COMPONENT=. DJANGO_SETTINGS_MODULE=reporting.reporting_app.settings.envtest docker-compose up --build
LDAP_SERVER_URI=. LDAP_DOMAIN_COMPONENT=. DJANGO_SETTINGS_MODULE=reporting.reporting_app.settings.envtest docker compose up --build
Wait for a time for everything to get up and running.
This is normally noted by seeing a collection of worker threads starting.
Expand Down Expand Up @@ -131,7 +131,7 @@ Stoping and deleting the running containers as well as deleting the images and d

.. code-block:: shell
docker-compose down --volumes
docker compose down --volumes
this command will delete the database. Omit ``--volumes`` if preservation of the database is desired.

Expand All @@ -149,7 +149,7 @@ If necessary, delete all existing wheels with ``make wheel/clean``
Rebuild the Images
++++++++++++++++++
Run ``make localdev/up``. This ``make`` target builds the services
with command ``docker-compose up --build`` using settings in ``docker-compose.yml``.
with command ``docker compose up --build`` using settings in ``docker-compose.yml``.

More information on docker commands for this project can be found :doc:`here <docker>`.

Expand Down
20 changes: 10 additions & 10 deletions docs/developer/instruction/docker.rst
Original file line number Diff line number Diff line change
Expand Up @@ -5,23 +5,23 @@ Docker information
:maxdepth: 2

.. note::
This document is updated, however, it may be good to read the ``docker-compose`` and ``Dockerfile.*`` in the repository themselves for the most up-to-date information.
This document is updated, however, it may be good to read the ``docker compose`` and ``Dockerfile.*`` in the repository themselves for the most up-to-date information.

This guide assumes that ``docker`` and `docker-compose`_ are present on your system.
This guide assumes that ``docker`` and `docker compose`_ are present on your system.

Starting and Stopping
---------------------

While docker can be used to start each individual container separately, using ``docker-compose up --build`` is the preferred method because it starts all services in the correct order.
While docker can be used to start each individual container separately, using ``docker compose up --build`` is the preferred method because it starts all services in the correct order.
Pressing ``ctrl-c`` will cleanly shutdown interactive docker.
Pressing ``ctrl-c`` multiple times will kill the running images and leave docker in a somewhat funny state that likely requires running ``docker-compose down`` before starting again
Pressing ``ctrl-c`` multiple times will kill the running images and leave docker in a somewhat funny state that likely requires running ``docker compose down`` before starting again
An additional flag ``-d`` can be supplied to run docker in detached mode.

.. note::
Use ``docker-compose --file <filename>`` to select a different configuration
Use ``docker compose --file <filename>`` to select a different configuration

To start a single image, supply its name as an additional argument to ``docker-compose up``.
To stop all images, including in detached mode, run ``docker-compose down``.
To start a single image, supply its name as an additional argument to ``docker compose up``.
To stop all images, including in detached mode, run ``docker compose down``.

Cleaning docker
---------------
Expand All @@ -46,15 +46,15 @@ Misc
* Add option ``--build`` to force rebuild the container if the local changes are not reflected in the container.
* Add option ``--force-recreate`` to recreate all images if ``--build`` does not work.
* If all fails (e.g. the local changes are not showing up in the runtime instances):
* stop the instance with ``docker-compose down``.
* stop the instance with ``docker compose down``.
* prune caches of images, container and volumes.
* restart the instance with ``docker-compose up -d --build --force-recreate``.
* restart the instance with ``docker compose up -d --build --force-recreate``.

2. If you cannot find web-monitor at ``localhost``, it is possible that the standard http port 80 is used by another application. Here are two possible solutions:

* Stop the service running at port 80 and restart the instance.
* Modify `the port of nginx`_ in the docker compose file to use a different port (e.g. change to ``81:80``).

.. _docker: https://www.docker.com/
.. _docker-compose: https://docs.docker.com/compose/
.. _docker compose: https://docs.docker.com/compose/
.. _the port of nginx: https://github.com/neutrons/data_workflow/blob/next/docker-compose.yml
24 changes: 12 additions & 12 deletions tests/test_ReductionSetupPageView.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,21 +31,21 @@ def logged_in_client(self, next, username, password):
def prepareEnvironmentForReductionScriptGeneration(self):
os.system(
"""
docker exec data_workflow_autoreducer_1 mkdir -p /SNS/ARCS/IPTS-123/nexus
docker exec data_workflow_autoreducer_1 touch /SNS/ARCS/IPTS-123/nexus/ARCS_100.nxs.h5
docker exec data_workflow_autoreducer_1 mkdir -p /SNS/ARCS/shared/autoreduce/vanadium_files
docker exec data_workflow_autoreducer_1 touch /SNS/ARCS/shared/autoreduce/reduce_ARCS_default.py
docker exec data_workflow_autoreducer_1 touch /SNS/ARCS/shared/autoreduce/reduce_ARCS.py
docker exec data_workflow_autoreducer_1 touch /SNS/ARCS/shared/autoreduce/reduce_ARCS.py.template
docker exec -i data_workflow_autoreducer_1 bash -c 'echo "#!/usr/bin/env python3\n# this is a template\ndef init():\nprint(5)\n" > /SNS/ARCS/shared/autoreduce/reduce_ARCS.py.template'
docker exec data_workflow_autoreducer_1 touch /SNS/ARCS/shared/autoreduce/ARCS_2X1_grouping.xml
docker exec data_workflow_autoreducer_1 touch /SNS/ARCS/shared/autoreduce/vanadium_files/test_van201562.nxs
docker exec data_workflow-autoreducer-1 mkdir -p /SNS/ARCS/IPTS-123/nexus
docker exec data_workflow-autoreducer-1 touch /SNS/ARCS/IPTS-123/nexus/ARCS_100.nxs.h5
docker exec data_workflow-autoreducer-1 mkdir -p /SNS/ARCS/shared/autoreduce/vanadium_files
docker exec data_workflow-autoreducer-1 touch /SNS/ARCS/shared/autoreduce/reduce_ARCS_default.py
docker exec data_workflow-autoreducer-1 touch /SNS/ARCS/shared/autoreduce/reduce_ARCS.py
docker exec data_workflow-autoreducer-1 touch /SNS/ARCS/shared/autoreduce/reduce_ARCS.py.template
docker exec -i data_workflow-autoreducer-1 bash -c 'echo "#!/usr/bin/env python3\n# this is a template\ndef init():\nprint(5)\n" > /SNS/ARCS/shared/autoreduce/reduce_ARCS.py.template'
docker exec data_workflow-autoreducer-1 touch /SNS/ARCS/shared/autoreduce/ARCS_2X1_grouping.xml
docker exec data_workflow-autoreducer-1 touch /SNS/ARCS/shared/autoreduce/vanadium_files/test_van201562.nxs
""" # noqa: E501
)

def getReductionScriptContents(self):
return subprocess.check_output(
"docker exec data_workflow_autoreducer_1 cat /SNS/ARCS/shared/autoreduce/reduce_ARCS.py", shell=True
"docker exec data_workflow-autoreducer-1 cat /SNS/ARCS/shared/autoreduce/reduce_ARCS.py", shell=True
).decode()

def initReductionGroup(self, conn, cursor):
Expand Down Expand Up @@ -101,7 +101,7 @@ def getReductionData(self, instrument_scientist_client):
def testReduction(self, instrument_scientist_client):
# backup reduce_ARCS.py
os.system(
"""docker exec -i data_workflow_autoreducer_1 bash -c \
"""docker exec -i data_workflow-autoreducer-1 bash -c \
'cp /SNS/ARCS/shared/autoreduce/reduce_ARCS.py /tmp/reduce_ARCS.py'"""
)
self.prepareEnvironmentForReductionScriptGeneration()
Expand All @@ -127,6 +127,6 @@ def testReduction(self, instrument_scientist_client):

# return reduce_ARCS.py back to starting state
os.system(
"""docker exec -i data_workflow_autoreducer_1 bash -c \
"""docker exec -i data_workflow-autoreducer-1 bash -c \
'cp /tmp/reduce_ARCS.py /SNS/ARCS/shared/autoreduce/reduce_ARCS.py'"""
)

0 comments on commit c59e7a3

Please sign in to comment.