diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 8cd2a082d..d84d38c28 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 2.3.3 +current_version = 2.4.0 commit = True tag = False tag_name = {new_version} @@ -30,11 +30,11 @@ search = {current_version} replace = {new_version} [bumpversion:file:RELEASE.txt] -search = {current_version} 2024-05-29T12:13:00Z +search = {current_version} 2024-06-04T17:41:34Z replace = {new_version} {utcnow:%Y-%m-%dT%H:%M:%SZ} [bumpversion:part:releaseTime] -values = 2024-05-29T12:13:00Z +values = 2024-06-04T17:41:34Z [bumpversion:file(version):birdhouse/components/canarie-api/docker_configuration.py.template] search = 'version': '{current_version}' diff --git a/.github/labeler.yml b/.github/labeler.yml index aecc60cfc..377ad298b 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -23,6 +23,7 @@ ci/deployment: - Vagrantfile - birdhouse/**/*.include.sh - birdhouse/pavics-compose.sh + - birdhouse/birdhouse-compose.sh ci/tests: - "**/test*/**/*" # eg: optional-components/test... diff --git a/CHANGES.md b/CHANGES.md index b0050708a..b735e3253 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -17,6 +17,108 @@ [//]: # (list changes here, using '-' for each new entry, remove this when items are added) +[2.4.0](https://github.com/bird-house/birdhouse-deploy/tree/2.4.0) (2024-06-04) +------------------------------------------------------------------------------------------------------------------ + +## Changes +- Rename variables, constants and files from PAVICS to Birdhouse + + For historical reasons the name PAVICS was used in variable names, constants and filenames in this repo to refer + to the software stack in general. This was because, for a long time, the PAVICS deployment of this stack was the + only one that was being used in production. However, now that multiple deployments of this software exist in + production (that are not named PAVICS), we remove unnecessary references to PAVICS in order to reduce confusion + for maintainers and developers who may not be aware of the historical reasons for the PAVICS name. + + This update makes the following changes: + + * The string ``PAVICS`` in environment variables, constant values, and file names have been changed to + ``BIRDHOUSE`` (case has been preserved where possible). + * For example: + * ``PAVICS_FQDN`` -> ``BIRDHOUSE_FQDN`` + * ``pavics_compose.sh`` -> ``birdhouse_compose.sh`` + * ``THREDDS_DATASET_LOCATION_ON_CONTAINER='/pavics-ncml'`` -> ``THREDDS_DATASET_LOCATION_ON_CONTAINER='/birdhouse-ncml'`` + * Comment strings and documentation that refers to the software stack as ``PAVICS`` have been changed to use + ``Birdhouse``. + * Recreated the ``pavics-compose.sh`` script that runs ``birdhouse-compose.sh`` in backwards compatible mode. + * Backwards compatible mode means that variables in ``env.local`` that contain the string ``PAVICS`` will be used + to set the equivalent variable that contains ``BIRDHOUSE``. For example, the ``PAVICS_FQDN`` variable set in + the ``env.local`` file will be used to set the value of ``BIRDHOUSE_FQDN``. + * Removed unused variables: + * `CMIP5_THREDDS_ROOT` + +- Create a new CLI entrypoint in ``bin/birdhouse`` that can be used to invoke ``pavics-compose.sh`` or + ``birdhouse-compose.sh`` from one convenient location. This script also includes some useful options and provides + a generic entrypoint to the stack that can be extended in the future. In the future, users should treat this + entrypoint as the only stable CLI for interacting with the Birdhouse software. + +### Migration Guide + + - Update ``env.local`` file to replace all variables that contain ``PAVICS`` with ``BIRDHOUSE``. + Variable names have also been updated to ensure that they start with the prefix ``BIRDHOUSE_``. + * see [`env.local.example`](./birdhouse/env.local.example) to see new variable names + * see the ``BIRDHOUSE_BACKWARDS_COMPATIBLE_VARIABLES`` variable (defined in [`default.env`](./birdhouse/default.env)) for a + full list of changed environment variable names. + - Update any external scripts that access the old variable names directly to use the updated variable names. + - Update any external scripts that access any of the following files to use the new file name: + + | old file name | new file name | + |-------------------------|----------------------------| + | pavics-compose.sh | birdhouse-compose.sh | + | PAVICS-deploy.logrotate | birdhouse-deploy.logrotate | + | configure-pavics.sh | configure-birdhouse.sh | + | trigger-pavicscrawler | trigger-birdhousecrawler | + + - Update any external scripts that called ``pavics-compose.sh`` or ``read-configs.include.sh`` to use the CLI + entrypoint in ``bin/birdhouse`` instead. + - The following default values have changed. If your deployment was using the old default value, update your + ``env.local`` file to explicitly set the old default values. + + | old variable name | new variable name | old default value | new default value | + |--------------------------------------------|--------------------------------------|-------------------------|:---------------------------| + | POSTGRES_PAVICS_USERNAME | BIRDHOUSE_POSTGRES_USERNAME | postgres-pavics | postgres-birdhouse | + | THREDDS_DATASET_LOCATION_ON_CONTAINER | (no change) | /pavics-ncml | /birdhouse-ncml | + | THREDDS_SERVICE_DATA_LOCATION_ON_CONTAINER | (no change) | /pavics-data | /birdhouse-data | + | (hardcoded) | BIRDHOUSE_POSTGRES_DB | pavics | birdhouse | + | PAVICS_LOG_DIR | BIRDHOUSE_LOG_DIR | /var/log/PAVICS | /var/log/birdhouse | + | (hardcoded) | GRAFANA_DEFAULT_PROVIDER_FOLDER | Local-PAVICS | Local-Birdhouse | + | (hardcoded) | GRAFANA_DEFAULT_PROVIDER_FOLDER_UUID | local-pavics | local-birdhouse | + | (hardcoded) | GRAFANA_PROMETHEUS_DATASOURCE_UUID | local_pavics_prometheus | local_birdhouse_prometheus | + + Note that the `PAVICS_LOG_DIR` variable was actually hardcoded as `/var/log/PAVICS` in some scripts. If + `PAVICS_LOG_DIR` was set to anything other than `/var/log/PAVICS` you'll end up with inconsistent log outputs as + previously some logs would have been sent to `PAVICS_LOG_DIR` and others to `/var/log/PAVICS`. We recommend merging + these two log files. Going forward, all logs will be sent to `BIRDHOUSE_LOG_DIR`. + + - Update any jupyter notebooks that make use of the `PAVICS_HOST_URL` environment variable to use the new + `BIRDHOUSE_HOST_URL` instead. + - Set the ``BIRDHOUSE_POSTGRES_DB`` variable to ``pavics`` in the ``env.local`` file. This value was previously + hardcoded to the string ``pavics`` so to maintain backwards compatibility with any existing databases this should be + kept the same. If you do want to update to the new database name, you will need to rename the existing database. + For example, the following will update the existing database named ``pavics`` to ``birdhouse`` (assuming the old + default values for the postgres username): + + ```shell + docker exec -it postgres psql -U postgres-pavics -d postgres -c 'ALTER DATABASE pavics RENAME TO birdhouse' + ``` + + You can then update the ``env.local`` file to the new variable name and restart the stack + - Set the ``BIRDHOUSE_POSTGRES_USER`` variable to ``postgres-pavics`` in the ``env.local`` file if you would like to + preserve the old default value. If you would like to change the value of ``BIRDHOUSE_POSTGRES_USER`` then also + update the name for any running postgres instances. For example, the following will update the user named + ``postgres-pavics`` to ``postgres-birdhouse``: + + ```shell + docker exec -it postgres psql -U postgres-pavics -d postgres -c 'CREATE USER "tmpsuperuser" WITH SUPERUSER' + docker exec -it postgres psql -U tmpsuperuser -d postgres -c 'ALTER ROLE "postgres-pavics" RENAME TO "postgres-birdhouse"' + docker exec -it postgres psql -U tmpsuperuser -d postgres -c 'ALTER ROLE "postgres-birdhouse" WITH PASSWORD '\''postgres-qwerty'\' + docker exec -it postgres psql -U postgres-birdhouse -d postgres -c 'DROP ROLE "tmpsuperuser"' + ``` + + Note that the ``postgres-qwerty`` value is meant just for illustration, you should replace this with the value of + the ``BIRDHOUSE_POSTGRES_PASSWORD`` variable. + Note that you'll need to do the same for the ``stac-db`` service as well (assuming that you weren't previously + overriding the ``STAC_POSTGRES_USER`` with a custom value). + [2.3.3](https://github.com/bird-house/birdhouse-deploy/tree/2.3.3) (2024-05-29) ------------------------------------------------------------------------------------------------------------------ diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 4ebb316b9..3f0d7bdbc 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -3,13 +3,13 @@ Contributing Contributions are welcome, and they are greatly appreciated! Every little bit helps, and credit will always be given. -Please read the PAVICS `Developer Documentation`_ to get started. +Please read the Birdhouse `Developer Documentation`_ to get started. .. _Developer Documentation: https://birdhouse-deploy.readthedocs.io/en/latest/ Policy ------ -Since PAVICS is used in production by multiple organizations, this deployment repository also has a policy regarding contributions. +Since Birdhouse is used in production by multiple organizations, this deployment repository also has a policy regarding contributions. Policy objectives ~~~~~~~~~~~~~~~~~~~~~ @@ -74,7 +74,7 @@ Policy rules 10. If patches or contributions are made directly in the production fork, they must also be ported back and approved in the main branch (no code that does not exist in the main branch should exist in a production fork). -11. The main branch will contain the official versions of PAVICS that will evolve according to semantic versioning. +11. The main branch will contain the official versions of Birdhouse that will evolve according to semantic versioning. These versions should be used by the organizations. 12. If contributions are made directly in a production fork (point 10), a tagged version should use the last common one @@ -84,8 +84,8 @@ Policy rules The tag ``2.1.9`` cannot be applied because this version could possibly exists in the main branch. A tag looking like ``2.1.8.orgXrev1`` would be preferred. -PAVICS multi organization git repository management +Birdhouse multi organization git repository management ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. image:: https://raw.githubusercontent.com/bird-house/birdhouse-deploy/master/docs/source/images/multi_organizations_management.jpg - :alt: PAVICS multi organization git repository management + :alt: Birdhouse multi organization git repository management diff --git a/Makefile b/Makefile index b137969c5..9cbbff73d 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,7 @@ # Generic variables override SHELL := bash override APP_NAME := birdhouse-deploy -override APP_VERSION := 2.3.3 +override APP_VERSION := 2.4.0 # utility to remove comments after value of an option variable override clean_opt = $(shell echo "$(1)" | $(_SED) -r -e "s/[ '$'\t'']+$$//g") @@ -213,15 +213,15 @@ version: ## Display project version ### Execution Targets ### -SCRIPT ?= birdhouse/pavics-compose.sh ## Script to run the stack +SCRIPT ?= bin/birdhouse ## Script to run the stack SCRIPT := $(call clean_opt,$(SCRIPT)) .PHONY: start start: ## Start the stack with current env.local definitions @-$(MSG_I) "Starting $(APP_NAME) stack..." - @$(SHELL) $(SCRIPT) up -d + @$(SHELL) $(SCRIPT) compose up -d .PHONY: stop stop: ## Stop the running stack @-$(MSG_I) "Stopping $(APP_NAME) stack..." - @$(SHELL) $(SCRIPT) stop + @$(SHELL) $(SCRIPT) compose stop diff --git a/README.rst b/README.rst index c55ac3b5d..1024eb7db 100644 --- a/README.rst +++ b/README.rst @@ -18,13 +18,13 @@ for a full-fledged production platform. * - citation - | |citation| -.. |commits-since| image:: https://img.shields.io/github/commits-since/bird-house/birdhouse-deploy/2.3.3.svg +.. |commits-since| image:: https://img.shields.io/github/commits-since/bird-house/birdhouse-deploy/2.4.0.svg :alt: Commits since latest release - :target: https://github.com/bird-house/birdhouse-deploy/compare/2.3.3...master + :target: https://github.com/bird-house/birdhouse-deploy/compare/2.4.0...master -.. |latest-version| image:: https://img.shields.io/badge/tag-2.3.3-blue.svg?style=flat +.. |latest-version| image:: https://img.shields.io/badge/tag-2.4.0-blue.svg?style=flat :alt: Latest Tag - :target: https://github.com/bird-house/birdhouse-deploy/tree/2.3.3 + :target: https://github.com/bird-house/birdhouse-deploy/tree/2.4.0 .. |readthedocs| image:: https://readthedocs.org/projects/birdhouse-deploy/badge/?version=latest :alt: ReadTheDocs Build Status (latest version) @@ -41,11 +41,9 @@ for a full-fledged production platform. .. end-badges -PAVICS +Birdhouse ------ -Power Analytics and Visualization for Climate Science - Powered by Birdhouse and other ESGF software - For GitHub navigation, see the following README pages: * `README for general deployment `_ diff --git a/RELEASE.txt b/RELEASE.txt index 2c502a2e5..e01e22df5 100644 --- a/RELEASE.txt +++ b/RELEASE.txt @@ -1 +1 @@ -2.3.3 2024-05-29T12:13:00Z +2.4.0 2024-06-04T17:41:34Z diff --git a/Vagrantfile b/Vagrantfile index 161db7106..dad43df51 100644 --- a/Vagrantfile +++ b/Vagrantfile @@ -24,8 +24,8 @@ Vagrant.configure("2") do |config| # Fix /vagrant shared folders (together with vagrant-vbguest) for Centos 7. config.vm.synced_folder ".", "/vagrant", type: "virtualbox" - # bridge networking to get real DNS name on local network, PAVICS does not - # seems to work with numerical IP address for PAVICS_FQDN + # bridge networking to get real DNS name on local network, Birdhouse does not + # seems to work with numerical IP address for BIRDHOUSE_FQDN if settings.has_key?('hostip') if settings.has_key?('network_bridge') config.vm.network "public_network", ip: settings['hostip'], bridge: settings['network_bridge'] diff --git a/bin/birdhouse b/bin/birdhouse new file mode 100755 index 000000000..ba5654be7 --- /dev/null +++ b/bin/birdhouse @@ -0,0 +1,224 @@ +#!/usr/bin/env sh + +THIS_FILE="$(readlink -f "$0" || realpath "$0")" +THIS_DIR="$(dirname "${THIS_FILE}")" +COMPOSE_DIR="$(dirname "${THIS_DIR}")/birdhouse" + +export BIRDHOUSE_COMPOSE="${BIRDHOUSE_COMPOSE:-"${COMPOSE_DIR}/birdhouse-compose.sh"}" +export __BIRDHOUSE_SUPPORTED_INTERFACE=True + +USAGE="USAGE: $0 [-h|--help] [-b|--backwards-compatible] [-e|--env-file local-env-file] {info|compose|configs}" +HELP="$USAGE + +Manage the Birdhouse software stack. + +Commands: + info Print build information + compose Run a \"docker compose\" command for the Birdhouse project + configs Print a command that can be used to load configuration settings as environment variables + +Options: + -h, --help Print this message and exit + -b, --backwards-compatible Run in backwards compatible mode + -e, --env-file string Override the local environment file, default is ${COMPOSE_DIR}/env.local +" + +CONFIGS_USAGE="USAGE: $0 configs [-h|--help] [-d|--default] {[-p|--print-config-command] | [-c|--command command]}" +CONFIGS_HELP="$CONFIGS_USAGE + +Load or execute commands in the Birdhouse configuration environment. + +Options: + -d, --default Only load/print a command for the default configuration settings, not those specified by the local environment file + -p, --print-config-command Print a command that can be used to load configuration settings as environment variables + -c, --command string Execute the given command after loading configuration settings + -q, --quiet Suppress stdout when loading configuration settings for the '--command' option. + +Example Usage: + + $ ${0} configs -c 'echo \${BIRDHOUSE_FQDN}' + example.com # This is the value of BIRDHOUSE_FQDN as determined by the current configuration settings + $ ${0} configs -p + . /path/to/configs/file/to/source && read_configs + $ eval \$(${0} configs) + $ echo \${BIRDHOUSE_FQDN} + example.com # This is the value of BIRDHOUSE_FQDN as determined by the current configuration settings +" + +READ_CONFIGS_CMD=read_configs + +# Print a command that can be used to load configuration settings as environment variables. +# Modifies the command based on whether the options --backwards-compatible and --env-file are set in order +# to respect these settings if this command is run `eval` later on. +# Tries to set the BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED and BIRDHOUSE_LOCAL_ENV environment variables back to their +# original setting after the command is run with `eval`. +# Known issue: The original setting isn't restored if this script is called with an inline environment variable: +# $ export $BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED=False +# $ BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED=True birdhouse configs -p +# ... +# $ echo $BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED +# True +print_config_command() { + configs_cmd_prefix="export __BIRDHOUSE_SUPPORTED_INTERFACE=True ;" + configs_cmd_suffix="unset __BIRDHOUSE_SUPPORTED_INTERFACE ;" + if [ "${BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED+set}" = 'set' ]; then + configs_cmd_prefix="${configs_cmd_prefix} export BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED='${BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED}' ;" + fi + if [ "${BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED_PREV+set}" = 'set' ]; then + configs_cmd_suffix="${configs_cmd_suffix} export BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED='${BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED_PREV}' ;" + elif [ "${BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED_UNSET}" = 'True' ]; then + configs_cmd_suffix="${configs_cmd_suffix} unset BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED ;" + fi + + if [ "${BIRDHOUSE_LOCAL_ENV+set}" = 'set' ]; then + configs_cmd_prefix="${configs_cmd_prefix} export BIRDHOUSE_LOCAL_ENV='${BIRDHOUSE_LOCAL_ENV}' ;" + fi + if [ "${BIRDHOUSE_LOCAL_ENV_PREV+set}" = 'set' ]; then + configs_cmd_suffix="${configs_cmd_suffix} export BIRDHOUSE_LOCAL_ENV='${BIRDHOUSE_LOCAL_ENV_PREV}' ;" + elif [ "${BIRDHOUSE_LOCAL_ENV_UNSET}" = 'True' ]; then + configs_cmd_suffix="${configs_cmd_suffix} unset BIRDHOUSE_LOCAL_ENV ;" + fi + + echo "${configs_cmd_prefix} . ${COMPOSE_DIR}/read-configs.include.sh; ${READ_CONFIGS_CMD} ; ${configs_cmd_suffix}" +} + +# Support multiple short flags together (ex: -abc instead of -a -b -c) +# The first argument is the parse function to call once the multiple short flags have been parsed +# The rest of the arguments are the rest arguments to be processed. +parse_multiple_short_flags() { + parse_func=$1 + shift + new_flags="${1%%=*}" + arg_value="${1#*=}" + [ "${arg_value}" = "$1" ] && unset arg_value + new_flags="$(echo "${new_flags#-*}" | sed 's/[a-z]/ -&/g')" + shift + if [ "${arg_value+set}" = 'set' ]; then + # shellcheck disable=SC2086 + ${parse_func} ${new_flags} "${arg_value}" "$@" + else + # shellcheck disable=SC2086 + ${parse_func} ${new_flags} "$@" + fi +} + +# Parse arguments and options for the configs subcommand +parse_configs_args() { + case "$1" in + -d|--default) + READ_CONFIGS_CMD=read_basic_configs_only + shift + parse_configs_args "$@" + ;; + -q|--quiet) + CONFIGS_QUIET=True + shift + parse_configs_args "$@" + ;; + -p|--print-config-command) + # Cannot be called with the --command argument as well + [ "${CONFIGS_CMD+set}" = 'set' ] && parse_configs_args 'invalid arg that triggers usage message' + CONFIGS_PRINT=True + shift + parse_configs_args "$@" + ;; + -c=*|--command=*) + arg_value="${1#*=}" + shift + parse_configs_args -c "${arg_value}" "$@" + ;; + -c|--command) + # Cannot be called with the --print-config-command option as well + [ "${CONFIGS_PRINT}" = 'True' ] && parse_configs_args 'invalid arg that triggers usage message' + shift + CONFIGS_CMD="$1" + shift + parse_configs_args "$@" + ;; + -h|--help) + echo "${CONFIGS_HELP}" | more + ;; + -??*) + parse_multiple_short_flags parse_configs_args "$@" + ;; + '') + # Print the configuration settings command or execute the specified + # command once all other options have been parsed + if [ "${CONFIGS_PRINT}" = 'True' ]; then + print_config_command + elif [ "${CONFIGS_CMD+set}" = 'set' ]; then + if [ "${CONFIGS_QUIET}" = "True" ]; then + eval "$(print_config_command)" > /dev/null + else + eval "$(print_config_command)" + fi + exec /usr/bin/env sh -c "${CONFIGS_CMD}" + else + parse_configs_args 'invalid arg that triggers usage message' + fi + ;; + *) + >&2 echo "$CONFIGS_USAGE" + exit 1 + ;; + esac +} + +# Parse arguments and options +parse_args() { + case "$1" in + -b|--backwards-compatible) + shift + if [ "${BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED+set}" = 'set' ]; then + BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED_PREV="${BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED}" + else + BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED_UNSET="True" + fi + export BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED="True" # The argument here takes precedence over the env variable + parse_args "$@" + ;; + -e=*|--env-file=*) + arg_value="${1#*=}" + shift + parse_args -e "${arg_value}" "$@" + ;; + -e|--env-file) + shift + if [ "${BIRDHOUSE_LOCAL_ENV+set}" = 'set' ]; then + BIRDHOUSE_LOCAL_ENV_PREV="${BIRDHOUSE_LOCAL_ENV}" + else + BIRDHOUSE_LOCAL_ENV_UNSET="True" + fi + BIRDHOUSE_LOCAL_ENV=$(realpath "$1") # The argument here takes precedence over the env variable + export BIRDHOUSE_LOCAL_ENV + shift + parse_args "$@" + ;; + info) + shift + "${BIRDHOUSE_COMPOSE}" info "$@" + exit $? + ;; + compose) + shift + "${BIRDHOUSE_COMPOSE}" "$@" + exit $? + ;; + configs) + shift + parse_configs_args "$@" + ;; + -h|--help) + echo "$HELP" + ;; + -??*) + parse_multiple_short_flags parse_configs_args "$@" + ;; + *) + >&2 echo "$USAGE" + exit 1 + ;; + esac +} + +parse_args "$@" diff --git a/birdhouse/README.rst b/birdhouse/README.rst index d9f53f5fe..d79d30254 100644 --- a/birdhouse/README.rst +++ b/birdhouse/README.rst @@ -12,7 +12,7 @@ Requirements * Hostname of Docker host must exist on the network. Must use bridge networking if Docker host is a Virtual Machine. -* User running ``pavics-compose.sh`` below must not be ``root`` but a regular user +* User running ``BIRDHOUSE_COMPOSE`` below must not be ``root`` but a regular user belonging to the ``docker`` group. * Install latest docker-ce and docker-compose for the chosen distro (not the @@ -26,6 +26,47 @@ Requirements will need to access your hostname at port 80 and 443 in order to verify and provide the SSL certificate. +Command Line Interface (CLI) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The command line interface for interacting with the Birdhouse software can be found in +`bin/birdhouse `_ (:download:`download > ~/.profile + +.. warning:: + It is no longer recommended to call scripts other than + `bin/birdhouse `_ (:download:`download `_ (:download:`download `) wrapper script must be used. -This script will source the ``env.local`` file, apply the appropriate variable substitutions on all the configuration files -".template", and run ``docker-compose`` with all the command line arguments given to `pavics-compose.sh `_ (:download:`download `). +To run ``docker-compose`` for Birdhouse, the `bin/birdhouse `_ (:download:`download `_ (:download:`download `) for more details on what can go into the ``env.local`` file. If the file `env.local` is somewhere else, symlink it here, next to `docker-compose.yml `_ (:download:`download `) because many scripts assume this location. To follow infrastructure-as-code, it is encouraged to source control the above -`env.local` file and any override needed to customized this PAVICS deployment +`env.local` file and any override needed to customized this Birdhouse deployment for your organization. For an example of possible override, see how the `emu service `_ (:download:`download `) (`README `_) can be optionally added to the deployment via the `override mechanism `_. Ouranos specific override can be found in this `birdhouse-deploy-ouranos `_ repo. @@ -71,9 +112,10 @@ Suggested deployment layout: .. code-block:: ├── birdhouse-deploy/ # this repo + │   ├── bin/ + │   │   ├── birdhouse │   ├── birdhouse/ │   │   ├── env.local # relative symlink to env.local.real below - │   │   ├── pavics-compose.sh │   │   ├── (...) ├── private-config/ # your private config and override: sibling level of this repo │   ├── docker-compose-extra.yml @@ -83,9 +125,9 @@ Suggested deployment layout: The automatic deployment is able to handle multiple repos, so will trigger if this repo or your private-personalized-config repo changes, giving you automated continuous deployment. See the continuous deployment setup section -below and the variable ``AUTODEPLOY_EXTRA_REPOS`` in `env.local.example `_ (:download:`download `). +below and the variable ``BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS`` in `env.local.example `_ (:download:`download `). -The automatic deployment of the PAVICS platform, of the Jupyter tutorial +The automatic deployment of the Birdhouse platform, of the Jupyter tutorial notebooks and of the automatic deployment mechanism itself can all be enabled by following the `scheduling instructions `_. @@ -96,7 +138,7 @@ To launch all the containers, use the following command: .. code-block:: - ./pavics-compose.sh up -d + ./bin/birdhouse compose up -d If you get a ``'No applicable error code, please check error log'`` error from the WPS processes, please make sure that the WPS databases exists in the postgres instance. See `create-wps-pgsql-databases.sh `_ (:download:`download `). @@ -148,9 +190,9 @@ instructions below. Manual instructions: * Go to - ``https:///magpie/ui/login`` and login with the ``MAGPIE_ADMIN_USERNAME`` user. The password should be in ``env.local``. + ``https:///magpie/ui/login`` and login with the ``MAGPIE_ADMIN_USERNAME`` user. The password should be in ``env.local``. -* Then go to ``https:///magpie/ui/users/add``. +* Then go to ``https:///magpie/ui/users/add``. * Fill in: @@ -175,7 +217,7 @@ to prepare your new instance. Further documentation inside the script. Optional components `all-public-access <./optional-components#give-public-access-to-all-resources-for-testing-purposes>`_ and `secure-thredds <./optional-components/#control-secured-access-to-resources-example>`_ -also need to be enabled in ``env.local`` using ``EXTRA_CONF_DIRS`` variable. +also need to be enabled in ``env.local`` using ``BIRDHOUSE_EXTRA_CONF_DIRS`` variable. ESGF login is also needed for https://github.com/Ouranosinc/pavics-sdi/blob/master/docs/source/notebooks/esgf-dap.ipynb @@ -183,7 +225,7 @@ part of test suite. ESGF credentials can be given to Jenkins via https://github.com/Ouranosinc/jenkins-config/blob/aafaf6c33ea60faede2a32850604c07c901189e8/env.local.example#L11-L13 The canarie monitoring link -``https:///canarie/node/service/stats`` can be used to confirm the +``https:///canarie/node/service/stats`` can be used to confirm the instance is ready to run the automated end-to-end test suite. That link should return the HTTP response code ``200``. @@ -192,7 +234,7 @@ Vagrant instructions -------------------- Vagrant allows us to quickly spin up a VM to easily reproduce the runtime -environment for testing or to have multiple flavors of PAVICS with slightly +environment for testing or to have multiple flavors of Birdhouse with slightly different combinations of the parts all running simultaneously in their respective VM, allowing us to see the differences in behavior. @@ -222,16 +264,16 @@ Starting and managing the lifecycle of the VM: .. code-block:: # start everything, this is the only command needed to bring up the entire - # PAVICS platform + # Birdhouse platform vagrant up # get bridged IP address vagrant ssh -c "ip addr show enp0s8|grep 'inet '" # get inside the VM - # useful to manage the PAVICS platform as if Vagrant is not there - # and use pavics-compose.sh as before - # ex: cd /vagrant/birdhouse; ./pavics-compose.sh ps + # useful to manage the Birdhouse platform as if Vagrant is not there + # and use `birdhouse compose` as before + # ex: birdhouse compose ps vagrant ssh # power-off VM diff --git a/birdhouse/birdhouse-compose.sh b/birdhouse/birdhouse-compose.sh new file mode 100755 index 000000000..04475a12b --- /dev/null +++ b/birdhouse/birdhouse-compose.sh @@ -0,0 +1,180 @@ +#!/bin/sh + +# NOTE, this file and all the extra component pre/post scripts that it executes +# is used by the autodeploy mechanism inside a very minimalistic container, +# therefore: +# +# * When making change to this file or any extra component pre/post scripts, +# should also test that it does not break the autodeploy. +# +# * Try to keep the same behavior/code, inside and outside of the +# autodeploy container to catch error early with the autodeploy. + +# list of all variables to be substituted in templates +# some of these variables *could* employ provided values in 'default.env', +# but they must ultimately be defined one way or another for the server to work +VARS=' + $BIRDHOUSE_COMPOSE + $BIRDHOUSE_FQDN + $BIRDHOUSE_DOC_URL + $BIRDHOUSE_SUPPORT_EMAIL + $BIRDHOUSE_DATA_PERSIST_ROOT + $BIRDHOUSE_DATA_PERSIST_SHARED_ROOT + $BIRDHOUSE_LOCAL_ENV + $BIRDHOUSE_LOG_DIR +' + +# list of vars to be substituted in template but they do not have to be set in env.local +# their default values are from 'default.env', so they do not have to be defined in 'env.local' if values are adequate +# they usually are intended to provide additional features or extended customization of their behavior +# when the value provided explicitly, it will be used instead of guessing it by inferred values from other variables +OPTIONAL_VARS=' + $BIRDHOUSE_FQDN_PUBLIC + $BIRDHOUSE_SSL_CERTIFICATE + $BIRDHOUSE_EXTRA_PYWPS_CONFIG + $BIRDHOUSE_NAME + $BIRDHOUSE_DESCRIPTION + $BIRDHOUSE_INSTITUTION + $BIRDHOUSE_SUBJECT + $BIRDHOUSE_TAGS + $BIRDHOUSE_DOCUMENTATION_URL + $BIRDHOUSE_RELEASE_NOTES_URL + $BIRDHOUSE_SUPPORT_URL + $BIRDHOUSE_LICENSE_URL +' + +THIS_FILE="$(readlink -f "$0" || realpath "$0")" +THIS_DIR="$(dirname "${THIS_FILE}")" + +export BIRDHOUSE_COMPOSE="${BIRDHOUSE_COMPOSE:-"${THIS_FILE}"}" + +# we switch to the real directory of the script, so it still works when used from $PATH +# tip: ln -s /path/to/birdhouse-compose.sh ~/bin/ +# Setup PWD for sourcing env.local. +cd "${THIS_DIR}" || exit 1 + +# Setup COMPOSE_DIR for sourcing env.local. +# Prevent un-expected difference when this script is run inside autodeploy +# container and manually from the host. +COMPOSE_DIR="$(pwd)" + +. "${COMPOSE_DIR}/read-configs.include.sh" +read_configs # this sets ALL_CONF_DIRS + +. "${COMPOSE_DIR}/scripts/get-components-json.include.sh" +. "${COMPOSE_DIR}/scripts/get-services-json.include.sh" +. "${COMPOSE_DIR}/scripts/get-version-json.include.sh" + +check_required_vars || exit $? + +## check fails when root access is required to access this file.. workaround possible by going through docker daemon... but +# will add delay +# if [ ! -f $BIRDHOUSE_SSL_CERTIFICATE ] +# then +# log ERROR "SSL certificate file $BIRDHOUSE_SSL_CERTIFICATE is missing" +# exit 1 +# fi + +TIMEWAIT_REUSE=$(/sbin/sysctl -n net.ipv4.tcp_tw_reuse) +if [ "${TIMEWAIT_REUSE}" -eq 0 ] +then + log WARN "the sysctl net.ipv4.tcp_tw_reuse is not enabled. " \ + "It it suggested to set it to 1, otherwise the birdhousecrawler may fail." +fi + +export BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS_AS_DOCKER_VOLUMES="" +for adir in ${BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS}; do + # 4 spaces in front of '--volume' is important + BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS_AS_DOCKER_VOLUMES="${BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS_AS_DOCKER_VOLUMES} + --volume ${adir}:${adir}:rw" +done +export BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS_AS_DOCKER_VOLUMES + +# we apply all the templates +find ${ALL_CONF_DIRS} -name '*.template' 2>/dev/null | + while read FILE + do + DEST=${FILE%.template} + cat "${FILE}" | envsubst "$VARS" | envsubst "$OPTIONAL_VARS" > "${DEST}" + done + +SHELL_EXEC_FLAGS= +if [ "${BIRDHOUSE_LOG_LEVEL}" = "DEBUG" ]; then + SHELL_EXEC_FLAGS=-x +fi + +create_compose_conf_list # this sets COMPOSE_CONF_LIST +log INFO "Displaying resolved compose configurations:" +echo "COMPOSE_CONF_LIST=" +echo ${COMPOSE_CONF_LIST} | tr ' ' '\n' | grep -v '^-f' + +if [ x"$1" = x"info" ]; then + log INFO "Stopping before execution of docker-compose command." + exit 0 +fi + +COMPOSE_EXTRA_OPTS="" + +if [ x"$1" = x"up" ]; then + COMPOSE_EXTRA_OPTS="${BIRDHOUSE_COMPOSE_UP_EXTRA_OPTS}" + for adir in $ALL_CONF_DIRS; do + COMPONENT_PRE_COMPOSE_UP="$adir/pre-docker-compose-up" + if [ -x "$COMPONENT_PRE_COMPOSE_UP" ]; then + log INFO "Executing '$COMPONENT_PRE_COMPOSE_UP'" + sh ${SHELL_EXEC_FLAGS} "$COMPONENT_PRE_COMPOSE_UP" + fi + done +fi + +log INFO "Executing docker-compose with extra options: $* ${COMPOSE_EXTRA_OPTS}" +# the PROXY_SECURE_PORT is a little trick to make the compose file invalid without the usage of this wrapper script +PROXY_SECURE_PORT=443 HOSTNAME=${BIRDHOUSE_FQDN} docker-compose ${COMPOSE_CONF_LIST} $* ${COMPOSE_EXTRA_OPTS} +ERR=$? +if [ ${ERR} -gt 0 ]; then + log ERROR "docker-compose error, exit code ${ERR}" + exit ${ERR} +fi + +# execute post-compose function if exists and no error occurred +type post-compose 2>&1 | grep 'post-compose is a function' > /dev/null +if [ $? -eq 0 ] +then + [ ${ERR} -gt 0 ] && { log ERROR "Error occurred with docker-compose, not running post-compose"; exit $?; } + post-compose $* +fi + + +while [ $# -gt 0 ] +do + if [ x"$1" = x"up" ]; then + # we restart the proxy after an up to make sure nginx continue to work if any container IP address changes + PROXY_SECURE_PORT=443 HOSTNAME=${BIRDHOUSE_FQDN} docker-compose ${COMPOSE_CONF_LIST} restart proxy + + # run postgres post-startup setup script + # Note: this must run before the post-docker-compose-up scripts since some may expect postgres databases to exist + postgres_id=$(PROXY_SECURE_PORT=443 HOSTNAME=${BIRDHOUSE_FQDN} docker-compose ${COMPOSE_CONF_LIST} ps -q postgres 2> /dev/null) + if [ ! -z "$postgres_id" ]; then + docker exec ${postgres_id} /postgres-setup.sh + fi + + for adir in $ALL_CONF_DIRS; do + COMPONENT_POST_COMPOSE_UP="$adir/post-docker-compose-up" + if [ -x "$COMPONENT_POST_COMPOSE_UP" ]; then + log INFO "Executing '$COMPONENT_POST_COMPOSE_UP'" + sh ${SHELL_EXEC_FLAGS} "$COMPONENT_POST_COMPOSE_UP" + fi + done + + # Note: This command should stay last, as it can take a while depending on network and drive speeds + # immediately cache the new notebook images for faster startup by JupyterHub + for IMAGE in ${JUPYTERHUB_DOCKER_NOTEBOOK_IMAGES} + do + docker pull $IMAGE + done + + fi + shift +done + + +# vi: tabstop=8 expandtab shiftwidth=2 softtabstop=2 diff --git a/birdhouse/components/README.rst b/birdhouse/components/README.rst index a56e460ac..f67a9767e 100644 --- a/birdhouse/components/README.rst +++ b/birdhouse/components/README.rst @@ -1,4 +1,4 @@ -PAVICS Components +Birdhouse Components ################# @@ -9,11 +9,11 @@ Scheduler ========= This component provides automated unattended continuous deployment for the -"PAVICS stack" (all the git repos in var ``AUTODEPLOY_EXTRA_REPOS``), for the +"Birdhouse stack" (all the git repos in var ``BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS``), for the tutorial notebooks on the Jupyter environment and for the automated deployment itself. -It can also be used to schedule other tasks on the PAVICS physical host. +It can also be used to schedule other tasks on the Birdhouse physical host. Everything is dockerized, the deployment runs inside a container that will update all other containers. @@ -27,12 +27,12 @@ The trigger for the deployment is new code change on the server on the current branch (PR merged, push). New code change locally will not trigger deployment so local development workflow is also supported. -Multiple remote repos are supported so the "PAVICS stack" can be made of +Multiple remote repos are supported so the "Birdhouse stack" can be made of multiple checkouts for modularity and extensibility. The autodeploy will -trigger if any of the checkouts (configured in ``AUTODEPLOY_EXTRA_REPOS``) is +trigger if any of the checkouts (configured in ``BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS``) is not up-to-date with its remote repo. -A suggested "PAVICS stack" is made of at least 2 repos, this repo and another +A suggested "Birdhouse stack" is made of at least 2 repos, this repo and another private repo containing the source controlled ``env.local`` file and any other docker-compose override for true infrastructure-as-code. @@ -45,11 +45,11 @@ Usage Given the unattended nature, there is no UI. Logs are used to keep trace. -- ``/var/log/PAVICS/autodeploy.log`` is for the PAVICS deployment. +- ``${BIRDHOUSE_LOG_DIR}/autodeploy.log`` is for the Birdhouse deployment. -- ``/var/log/PAVICS/notebookdeploy.log`` is for the tutorial notebooks deployment. +- ``${BIRDHOUSE_LOG_DIR}/notebookdeploy.log`` is for the tutorial notebooks deployment. -- logrotate is enabled for ``/var/log/PAVICS/*.log`` to avoid filling up the +- logrotate is enabled for ``${BIRDHOUSE_LOG_DIR}/*.log`` to avoid filling up the disk. Any new ``.log`` files in that folder will get logrotate for free. @@ -58,9 +58,9 @@ How to Enable the Component - Edit ``env.local`` (a copy of env.local.example_ (:download:`download <../env.local.example>`)) - - Add "./components/scheduler" to ``EXTRA_CONF_DIRS``. - - Set ``AUTODEPLOY_EXTRA_REPOS``, ``AUTODEPLOY_DEPLOY_KEY_ROOT_DIR``, - ``AUTODEPLOY_PLATFORM_FREQUENCY``, ``AUTODEPLOY_NOTEBOOK_FREQUENCY`` as desired, + - Add "./components/scheduler" to ``BIRDHOUSE_EXTRA_CONF_DIRS``. + - Set ``BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS``, ``BIRDHOUSE_AUTODEPLOY_DEPLOY_KEY_ROOT_DIR``, + ``BIRDHOUSE_AUTODEPLOY_PLATFORM_FREQUENCY``, ``BIRDHOUSE_AUTODEPLOY_NOTEBOOK_FREQUENCY`` as desired, full documentation in `env.local.example`_. - Run once fix-write-perm_ (:download:`download <../deployment/fix-write-perm>`), see doc in script. @@ -77,12 +77,12 @@ Configure logrotate for all following automations to prevent disk full:: deployment/install-logrotate-config .. $USER -To enable continuous deployment of PAVICS:: +To enable continuous deployment of Birdhouse:: deployment/install-automated-deployment.sh .. $USER [daily|5-mins] # read the script for more options/details -If you want to manually force a deployment of PAVICS (note this might not use +If you want to manually force a deployment of Birdhouse (note this might not use latest version of deploy.sh_ script (:download:`download <../deployment/deploy.sh>`):: deployment/deploy.sh . @@ -96,14 +96,14 @@ To enable continuous deployment of tutorial Jupyter notebooks:: To trigger tutorial Jupyter notebooks deploy manually:: # configure logrotate before because this script will log to - # /var/log/PAVICS/notebookdeploy.log + # ${BIRDHOUSE_LOG_DIR}/notebookdeploy.log deployment/trigger-deploy-notebook # read the script for more details Migrating to the new mechanism requires manual deletion of all the artifacts -created by the old install scripts: ``sudo rm /etc/cron.d/PAVICS-deploy -/etc/cron.hourly/PAVICS-deploy-notebooks /etc/logrotate.d/PAVICS-deploy +created by the old install scripts: ``sudo rm /etc/cron.d/Birdhouse-deploy +/etc/cron.hourly/birdhouse-deploy-notebooks /etc/logrotate.d/Birdhouse-deploy /usr/local/sbin/triggerdeploy.sh``. Both can not co-exist at the same time. @@ -112,7 +112,7 @@ Comparison between the old and new autodeploy mechanism Maximum backward-compatibility has been kept with the old install scripts style: -* Still log to the same existing log files under ``/var/log/PAVICS``. +* Still log to the same existing log files under ``${BIRDHOUSE_LOG_DIR}``. * Old single ssh deploy key is still compatible, but the new mechanism allows for different ssh deploy keys for each extra repos (again, public repos should use https clone path to avoid dealing with ssh deploy keys in the first place). @@ -122,7 +122,7 @@ Features missing in old install scripts or how the new mechanism improves on the * Autodeploy of the autodeploy itself ! This is the biggest win. Previously, if triggerdeploy.sh_ (:download:`download <../deployment/triggerdeploy.sh>`) - or the deployed ``/etc/cron.hourly/PAVICS-deploy-notebooks`` script changes, they have to be deployed manually. + or the deployed ``/etc/cron.hourly/birdhouse-deploy-notebooks`` script changes, they have to be deployed manually. It's very annoying. Now they are volume-mount in so are fresh on each run. * ``env.local`` now drives absolutely everything, source control that file and we've got a true DevOPS pipeline. * Configurable platform and notebook autodeploy frequency. Previously, this means manually editing the generated cron @@ -142,7 +142,7 @@ There are 2 tests that need to be performed: * Can autodeploy deploy the PR from ``master`` branch, the stable reference point? - * This could fail if some changes in the PR are incompatible with autodeploy. For example: ``./pavics-compose.sh`` calls some binaries that do not exist in the autodeploy docker image. + * This could fail if some changes in the PR are incompatible with autodeploy. For example: ``birdhouse compose`` calls some binaries that do not exist in the autodeploy docker image. * Can autodeploy be triggered again successfully, after the PR is live? @@ -150,17 +150,17 @@ There are 2 tests that need to be performed: Here is a sample setup to test autodeploy: -* Have 2 checkout directories. One is for starting the stack using ``./pavics-compose.sh``, the other one is to push new bogus changes to trigger the autodeploy mechanism. +* Have 2 checkout directories. One is for starting the stack using ``birdhouse compose``, the other one is to push new bogus changes to trigger the autodeploy mechanism. .. code-block:: shell - # this one for running pavics-compose.sh + # this one for running birdhouse compose git clone git@github.com:bird-house/birdhouse-deploy.git birdhouse-deploy # this one for triggering autodeploy git clone git@github.com:bird-house/birdhouse-deploy.git birdhouse-deploy-trigger -* Set ``AUTODEPLOY_PLATFORM_FREQUENCY`` in ``env.local`` to a very frequent value so you do not have to wait too long for autodeploy to trigger. +* Set ``BIRDHOUSE_AUTODEPLOY_PLATFORM_FREQUENCY`` in ``env.local`` to a very frequent value so you do not have to wait too long for autodeploy to trigger. .. code-block:: shell @@ -168,18 +168,18 @@ Here is a sample setup to test autodeploy: cd birdhouse-deploy/birdhouse # ensure the scheduler component is enabled, otherwise autodeploy will not work - echo 'export EXTRA_CONF_DIRS="$EXTRA_CONF_DIRS ./components/scheduler" >> env.local + echo 'export BIRDHOUSE_EXTRA_CONF_DIRS="$BIRDHOUSE_EXTRA_CONF_DIRS ./components/scheduler" >> env.local - # set AUTODEPLOY_PLATFORM_FREQUENCY + # set BIRDHOUSE_AUTODEPLOY_PLATFORM_FREQUENCY # can set to more frequent than 5 minutes if your machine is capable enough - echo 'export AUTODEPLOY_PLATFORM_FREQUENCY="@every 5m"' >> env.local + echo 'export BIRDHOUSE_AUTODEPLOY_PLATFORM_FREQUENCY="@every 5m"' >> env.local # if scheduler container already running: - # recreate scheduler container for new AUTODEPLOY_PLATFORM_FREQUENCY to be effective - ./pavics-compose.sh stop scheduler && ./pavics-compose.sh rm -vf scheduler && ./pavics-compose.sh up -d + # recreate scheduler container for new BIRDHOUSE_AUTODEPLOY_PLATFORM_FREQUENCY to be effective + birdhouse compose stop scheduler && birdhouse compose rm -vf scheduler && birdhouse compose up -d # if scheduler container not running yet: start the newly added scheduler component - ./pavics-compose.sh up -d + birdhouse compose up -d * Create a ``${USER}-test`` branch so you can add bogus commits without affecting your real PR. Set up your main checkout (birdhouse-deploy) to track that test branch so it will detect new changes on the test branch and trigger the autodeploy. @@ -197,7 +197,7 @@ Here is a sample setup to test autodeploy: # ensure your runnings code is at "master" and is working correctly # if you do not have a working baseline, you will not know if the breakage is due to autodeploy or your code - ./pavics-compose.sh up -d + birdhouse compose up -d * Test scenario 1, from ``master`` to your PR @@ -216,7 +216,7 @@ Here is a sample setup to test autodeploy: # the autodeploy mechanism will detect that the remote branch has changed and attempt to update the local branch # follow logs, check for errors - tail -f /var/log/PAVICS/autodeploy.log + tail -f ${BIRDHOUSE_LOG_DIR}/autodeploy.log # each autodeploy trigger will start the log with # ========== @@ -245,9 +245,9 @@ Here is a sample setup to test autodeploy: # the autodeploy mechanism will detect that the remote branch has changed and attempt to update the local branch # follow logs, check for errors - tail -f /var/log/PAVICS/autodeploy.log + tail -f ${BIRDHOUSE_LOG_DIR}/autodeploy.log -* Test done, clean up the bogus ``${USER}-test`` branch and optionally relax ``AUTODEPLOY_PLATFORM_FREQUENCY`` +* Test done, clean up the bogus ``${USER}-test`` branch and optionally relax ``BIRDHOUSE_AUTODEPLOY_PLATFORM_FREQUENCY`` .. code-block:: shell @@ -266,21 +266,21 @@ Here is a sample setup to test autodeploy: git checkout YOUR_PR_BRANCH git branch -D ${USER}-test - # edit env.local and change AUTODEPLOY_PLATFORM_FREQUENCY to something less frequent to save your cpu + # edit env.local and change BIRDHOUSE_AUTODEPLOY_PLATFORM_FREQUENCY to something less frequent to save your cpu # do not remove the scheduler component from the stack yet or the next command will fail - # recreate scheduler container for new AUTODEPLOY_PLATFORM_FREQUENCY to be effective - ./pavics-compose.sh stop scheduler && ./pavics-compose.sh rm -vf scheduler && ./pavics-compose.sh up -d + # recreate scheduler container for new BIRDHOUSE_AUTODEPLOY_PLATFORM_FREQUENCY to be effective + birdhouse compose stop scheduler && birdhouse compose rm -vf scheduler && birdhouse compose up -d # optionally edit env.local to remove the scheduler component from the stack # then remove the running scheduler container - ./pavics-compose.sh up -d --remove-orphans + birdhouse compose up -d --remove-orphans Monitoring ========== -This component provides monitoring and alerting for the PAVICS physical host and containers. +This component provides monitoring and alerting for the Birdhouse physical host and containers. Prometheus stack is used: @@ -294,9 +294,9 @@ Prometheus stack is used: Usage ----- -- Grafana to view metric graphs: https://PAVICS_FQDN/grafana/d/pf6xQMWGz/docker-and-system-monitoring -- Prometheus alert rules: https://PAVICS_FQDN/prometheus/rules -- AlertManager to manage alerts: https://PAVICS_FQDN/alertmanager +- Grafana to view metric graphs: https://BIRDHOUSE_FQDN/grafana/d/pf6xQMWGz/docker-and-system-monitoring +- Prometheus alert rules: https://BIRDHOUSE_FQDN/prometheus/rules +- AlertManager to manage alerts: https://BIRDHOUSE_FQDN/alertmanager The paths above are by default only accessible to a user logged in to magpie as an administrator or as a member of group ``monitoring``. These routes provide sensitive information about the @@ -313,10 +313,10 @@ How to Enable the Component - Edit ``env.local`` (a copy of `env.local.example`_ (:download:`download <../env.local.example>`)) - - Add "./components/monitoring" to ``EXTRA_CONF_DIRS`` + - Add "./components/monitoring" to ``BIRDHOUSE_EXTRA_CONF_DIRS`` - Set ``GRAFANA_ADMIN_PASSWORD`` to login to Grafana - Set ``ALERTMANAGER_ADMIN_EMAIL_RECEIVER`` for receiving alerts - - Set ``SMTP_SERVER`` for sending alerts + - Set ``ALERTMANAGER_SMTP_SERVER`` for sending alerts - Optionally set - ``ALERTMANAGER_EXTRA_GLOBAL`` to further configure AlertManager @@ -408,17 +408,17 @@ and execution of custom applications and workflows. Usage ----- -Once this component is enabled, `Weaver`_ will be accessible at ``https:///weaver`` endpoint, -where ``PAVICS_FQDN_PUBLIC`` is defined in your ``env.local`` file. +Once this component is enabled, `Weaver`_ will be accessible at ``https:///weaver`` endpoint, +where ``BIRDHOUSE_FQDN_PUBLIC`` is defined in your ``env.local`` file. Full process listing (across WPS providers) should be available using request: .. code-block:: - GET https:///weaver/processes?providers=true + GET https:///weaver/processes?providers=true Please refer to the `Weaver OpenAPI`_ for complete description of available requests. -This description will also be accessible via ``https:///weaver/api`` once the instance is started. +This description will also be accessible via ``https:///weaver/api`` once the instance is started. For any specific details about `Weaver`_ configuration parameters, functionalities or questions, please refer to its `documentation `_. @@ -428,7 +428,7 @@ How to Enable the Component - Edit ``env.local`` (a copy of `env.local.example`_) - - Add ``./components/weaver`` to ``EXTRA_CONF_DIRS``. + - Add ``./components/weaver`` to ``BIRDHOUSE_EXTRA_CONF_DIRS``. - Component ``birdhouse/optional-components/all-public-access`` should also be enabled to ensure that `Weaver`_ can request ``GetCapabilities`` of every WPS provider to be registered. Publicly inaccessible services will not @@ -499,14 +499,14 @@ the various services of the platform when changes are detected. Therefore, it do from users. In case the platform maintainer desires to perform manual syncing operations with Cowbird, its REST API should be used. -It will be accessible under ``https://{PAVICS_FQDN_PUBLIC}/cowbird`` and details of available endpoints will be served +It will be accessible under ``https://{BIRDHOUSE_FQDN_PUBLIC}/cowbird`` and details of available endpoints will be served under ``/cowbird/api``. Note that Magpie administrator credentials will be required to access those endpoints. How to Enable the Component --------------------------- - Edit ``env.local`` (a copy of `env.local.example`_) -- Add ``./components/cowbird`` to ``EXTRA_CONF_DIRS``. +- Add ``./components/cowbird`` to ``BIRDHOUSE_EXTRA_CONF_DIRS``. Customizing the Component ------------------------- @@ -535,11 +535,11 @@ Usage The STAC API can be browsed via the ``stac-browser`` component. By default, the browser will point to the STAC API exposed by the current stack instance. Once this component is enabled, STAC API will be accessible at -``https:///stac`` endpoint and the STAC browser will be available at -``https:///stac-browser`` endpoint. In order to make the STAC browser the default entrypoint, +``https:///stac`` endpoint and the STAC browser will be available at +``https:///stac-browser`` endpoint. In order to make the STAC browser the default entrypoint, define the following in the ``env.local`` file:: - export PROXY_ROOT_LOCATION="return 302 https://\$host/stac-browser;" + export BIRDHOUSE_PROXY_ROOT_LOCATION="return 302 https://\$host/stac-browser;" Here is a sample search query using a CLI:: @@ -556,7 +556,7 @@ How to Enable the Component --------------------------- - Edit ``env.local`` (a copy of `env.local.example`_) -- Add ``./components/stac`` to ``EXTRA_CONF_DIRS``. +- Add ``./components/stac`` to ``BIRDHOUSE_EXTRA_CONF_DIRS``. Canarie-API =========== @@ -566,13 +566,13 @@ An endpoint monitoring tool that shows the current status of other components in Usage ----- -The service is available at ``https://${PAVICS_FQDN_PUBLIC}/canarie`` +The service is available at ``https://${BIRDHOUSE_FQDN_PUBLIC}/canarie`` How to Enable the Component --------------------------- - Edit ``env.local`` (a copy of `env.local.example`_) -- Add ``./components/canarie`` to ``EXTRA_CONF_DIRS``. +- Add ``./components/canarie`` to ``BIRDHOUSE_EXTRA_CONF_DIRS``. data-volume =========== @@ -598,13 +598,13 @@ degree-days of cooling, the duration of heatwaves, etc. This returns annual valu Usage ----- -The service is available at ``https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/finch`` +The service is available at ``https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/finch`` How to Enable the Component --------------------------- - Edit ``env.local`` (a copy of `env.local.example`_) -- Add ``./components/finch`` to ``EXTRA_CONF_DIRS``. +- Add ``./components/finch`` to ``BIRDHOUSE_EXTRA_CONF_DIRS``. Geoserver ========= @@ -617,7 +617,7 @@ Geospatial Web. Usage ----- -The service is available at ``https://${PAVICS_FQDN_PUBLIC}/geoserver``. For usage and +The service is available at ``https://${BIRDHOUSE_FQDN_PUBLIC}/geoserver``. For usage and configuration options please refer to the `Geoserver documentation`_. .. _Geoserver documentation: https://docs.geoserver.org @@ -626,7 +626,7 @@ How to Enable the Component --------------------------- - Edit ``env.local`` (a copy of `env.local.example`_) -- Add ``./components/geoserver`` to ``EXTRA_CONF_DIRS``. +- Add ``./components/geoserver`` to ``BIRDHOUSE_EXTRA_CONF_DIRS``. Hummingbird =========== @@ -636,13 +636,13 @@ A Web Processing Service for compliance checks used in the climate science commu Usage ----- -The service is available at ``https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/hummingbird`` +The service is available at ``https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/hummingbird`` How to Enable the Component --------------------------- - Edit ``env.local`` (a copy of `env.local.example`_) -- Add ``./components/hummingbird`` to ``EXTRA_CONF_DIRS``. +- Add ``./components/hummingbird`` to ``BIRDHOUSE_EXTRA_CONF_DIRS``. Jupyterhub ========== @@ -653,14 +653,14 @@ end-users. Usage ----- -The service is available at ``https://${PAVICS_FQDN_PUBLIC}/jupyter``. Users are able to log in to Jupyterhub using the +The service is available at ``https://${BIRDHOUSE_FQDN_PUBLIC}/jupyter``. Users are able to log in to Jupyterhub using the same user name and password as Magpie. They will then be able to launch a personal jupyterlab server. How to Enable the Component --------------------------- - Edit ``env.local`` (a copy of `env.local.example`_) -- Add ``./components/jupyterhub`` to ``EXTRA_CONF_DIRS``. +- Add ``./components/jupyterhub`` to ``BIRDHOUSE_EXTRA_CONF_DIRS``. - Set the ``JUPYTERHUB_CRYPT_KEY`` environment variable Magpie @@ -672,7 +672,7 @@ User/Group/Service/Resource/Permission management and integrates with Twitcher. Usage ----- -The service is available at ``https://${PAVICS_FQDN_PUBLIC}/magpie``. For usage and configuration options please +The service is available at ``https://${BIRDHOUSE_FQDN_PUBLIC}/magpie``. For usage and configuration options please refer to the `Magpie documentation`_. .. _Magpie documentation: https://pavics-magpie.readthedocs.io @@ -705,14 +705,14 @@ A web based container deployment and management tool. Usage ----- -The service is available at ``https://${PAVICS_FQDN_PUBLIC}/portainer/``. For usage and configuration options please +The service is available at ``https://${BIRDHOUSE_FQDN_PUBLIC}/portainer/``. For usage and configuration options please refer to the `portainer documentation`_. How to Enable the Component --------------------------- - Edit ``env.local`` (a copy of `env.local.example`_) -- Add ``./components/portainer`` to ``EXTRA_CONF_DIRS``. +- Add ``./components/portainer`` to ``BIRDHOUSE_EXTRA_CONF_DIRS``. .. _portainer documentation: https://docs.portainer.io/ @@ -756,13 +756,13 @@ processing as well as time series analysis. Usage ----- -The service is available at ``https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/raven`` +The service is available at ``https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/raven`` How to Enable the Component --------------------------- - Edit ``env.local`` (a copy of `env.local.example`_) -- Add ``./components/raven`` to ``EXTRA_CONF_DIRS``. +- Add ``./components/raven`` to ``BIRDHOUSE_EXTRA_CONF_DIRS``. Thredds ======= @@ -774,13 +774,13 @@ Climate Data Catalog and Format Renderers. See the `Thredds documentation`_ for Usage ----- -The catalog is available at the ``https://${PAVICS_FQDN_PUBLIC}/thredds`` endpoint. +The catalog is available at the ``https://${BIRDHOUSE_FQDN_PUBLIC}/thredds`` endpoint. How to Enable the Component --------------------------- - Edit ``env.local`` (a copy of `env.local.example`_) -- Add ``./components/thredds`` to ``EXTRA_CONF_DIRS``. +- Add ``./components/thredds`` to ``BIRDHOUSE_EXTRA_CONF_DIRS``. Twitcher ======== @@ -810,7 +810,7 @@ of all processes executed by these services. Usage ----- -All outputs from these processes will become available at the ``https://${PAVICS_FQDN_PUBLIC}/wpsoutputs`` endpoint. +All outputs from these processes will become available at the ``https://${BIRDHOUSE_FQDN_PUBLIC}/wpsoutputs`` endpoint. By default, this endpoint is not protected. To secure access to this endpoint it is highly recommended to enable the `./optional-components/secure-data-proxy` component as well. diff --git a/birdhouse/components/canarie-api/docker_configuration.py.template b/birdhouse/components/canarie-api/docker_configuration.py.template index 43ea1079c..154e157c7 100644 --- a/birdhouse/components/canarie-api/docker_configuration.py.template +++ b/birdhouse/components/canarie-api/docker_configuration.py.template @@ -10,7 +10,7 @@ import requests_cache # see entrypoint script logger = logging.getLogger("canarie-api-config") -MY_SERVER_NAME = 'https://${PAVICS_FQDN_PUBLIC}/canarie' +MY_SERVER_NAME = 'https://${BIRDHOUSE_FQDN_PUBLIC}/canarie' DATABASE = { 'filename': '/data/stats.db', @@ -108,25 +108,25 @@ SERVICES = { # NOTE: # Below version and release time auto-managed by 'make VERSION=x.y.z bump'. # Do NOT modify it manually. See 'Tagging policy' in 'birdhouse/README.rst'. - 'version': '2.3.3', - 'releaseTime': '2024-05-29T12:13:00Z', - 'institution': '${SERVER_INSTITUTION}', - 'researchSubject': '${SERVER_SUBJECT}', - 'supportEmail': '${SUPPORT_EMAIL}', + 'version': '2.4.0', + 'releaseTime': '2024-06-04T17:41:34Z', + 'institution': '${BIRDHOUSE_INSTITUTION}', + 'researchSubject': '${BIRDHOUSE_SUBJECT}', + 'supportEmail': '${BIRDHOUSE_SUPPORT_EMAIL}', 'category': 'Resource/Cloud Management', - 'tags': [tag.strip() for tag in "${SERVER_TAGS}".split(",") if tag.strip()], + 'tags': [tag.strip() for tag in "${BIRDHOUSE_TAGS}".split(",") if tag.strip()], }, 'stats': { 'method': '.*', 'route': '(?!)' # this will be set by CANARIE_STATS_ROUTES (see below) }, 'redirect': { - 'doc': '${SERVER_DOCUMENTATION_URL}', - 'releasenotes': '${SERVER_RELEASE_NOTES_URL}', - 'support': '${SERVER_SUPPORT_URL}', + 'doc': '${BIRDHOUSE_DOCUMENTATION_URL}', + 'releasenotes': '${BIRDHOUSE_RELEASE_NOTES_URL}', + 'support': '${BIRDHOUSE_SUPPORT_URL}', 'source': 'https://github.com/bird-house/birdhouse-deploy', - 'tryme': 'https://${PAVICS_FQDN_PUBLIC}', - 'licence': '${SERVER_LICENSE_URL}', + 'tryme': 'https://${BIRDHOUSE_FQDN_PUBLIC}', + 'licence': '${BIRDHOUSE_LICENSE_URL}', 'provenance': 'https://pavics-sdi.readthedocs.io/en/latest/provenance/index.html' }, 'monitoring': {} # filled in after processing everything, see end of script @@ -136,17 +136,17 @@ SERVICES = { PLATFORMS = { 'server': { 'info': { - 'name': '${SERVER_NAME}', - 'synopsis': """${SERVER_DESCRIPTION}""".replace("\n", " ").strip(), + 'name': '${BIRDHOUSE_NAME}', + 'synopsis': """${BIRDHOUSE_DESCRIPTION}""".replace("\n", " ").strip(), # NOTE: # Below version and release time auto-managed by 'make VERSION=x.y.z bump'. # Do NOT modify it manually. See 'Tagging policy' in 'birdhouse/README.rst'. - 'version': '2.3.3', - 'releaseTime': '2024-05-29T12:13:00Z', - 'institution': '${SERVER_INSTITUTION}', - 'researchSubject': '${SERVER_SUBJECT}', - 'supportEmail': '${SUPPORT_EMAIL}', - 'tags': [tag.strip() for tag in "${SERVER_TAGS}".split(",") if tag.strip()], + 'version': '2.4.0', + 'releaseTime': '2024-06-04T17:41:34Z', + 'institution': '${BIRDHOUSE_INSTITUTION}', + 'researchSubject': '${BIRDHOUSE_SUBJECT}', + 'supportEmail': '${BIRDHOUSE_SUPPORT_EMAIL}', + 'tags': [tag.strip() for tag in "${BIRDHOUSE_TAGS}".split(",") if tag.strip()], }, 'stats': { 'method': '.*', @@ -157,7 +157,7 @@ PLATFORMS = { 'releasenotes': 'https://github.com/bird-house/birdhouse-deploy/releases', 'support': 'https://github.com/Ouranosinc/pavics-sdi/issues', 'source': 'https://github.com/Ouranosinc/pavics-sdi', - 'tryme': 'https://pavics.ouranos.ca', + 'tryme': 'https://${BIRDHOUSE_FQDN_PUBLIC}', 'licence': 'https://pavics-sdi.readthedocs.io/en/latest/license.html', 'provenance': 'https://pavics-sdi.readthedocs.io/en/latest/provenance/index.html', 'factsheet': 'http://www.canarie.ca/software/pavics' diff --git a/birdhouse/components/cowbird/config/canarie-api/canarie_api_monitoring.py.template b/birdhouse/components/cowbird/config/canarie-api/canarie_api_monitoring.py.template index f97e0d380..1d693d201 100644 --- a/birdhouse/components/cowbird/config/canarie-api/canarie_api_monitoring.py.template +++ b/birdhouse/components/cowbird/config/canarie-api/canarie_api_monitoring.py.template @@ -6,7 +6,7 @@ SERVICES['Cowbird'] = { 'institution': 'Ouranos, CRIM', 'releaseTime': get_release_time_from_repo_tag("github", "Ouranosinc/cowbird", "${COWBIRD_VERSION}"), 'researchSubject': 'Any', - 'supportEmail': '${SUPPORT_EMAIL}', + 'supportEmail': '${BIRDHOUSE_SUPPORT_EMAIL}', 'category': 'Security', 'tags': ['Security', 'Management', 'Access', 'Policy Decision Point'] }, @@ -19,7 +19,7 @@ SERVICES['Cowbird'] = { 'releasenotes': 'https://github.com/Ouranosinc/cowbird//blob/master/CHANGES.rst', 'support': 'https://github.com/Ouranosinc/cowbird//issues', 'source': 'https://github.com/Ouranosinc/cowbird/', - 'tryme': 'https://${PAVICS_FQDN_PUBLIC}/cowbird/', + 'tryme': 'https://${BIRDHOUSE_FQDN_PUBLIC}/cowbird/', 'licence': 'https://github.com/Ouranosinc/cowbird//blob/${COWBIRD_VERSION}/LICENSE', 'provenance': 'https://github.com/Ouranosinc/cowbird/' }, diff --git a/birdhouse/components/cowbird/config/cowbird/config.yml.template b/birdhouse/components/cowbird/config/cowbird/config.yml.template index 1b5397c67..096dcbc61 100644 --- a/birdhouse/components/cowbird/config/cowbird/config.yml.template +++ b/birdhouse/components/cowbird/config/cowbird/config.yml.template @@ -15,13 +15,13 @@ handlers: Geoserver: active: true # FIXME: replace by the protected/proxy endpoint once component is made available - url: http://${PAVICS_FQDN_PUBLIC}:8765/geoserver + url: http://${BIRDHOUSE_FQDN_PUBLIC}:8765/geoserver workspace_dir: ${WORKSPACE_DIR} admin_user: ${GEOSERVER_ADMIN_USER} admin_password: ${GEOSERVER_ADMIN_PASSWORD} Catalog: active: true - url: https://${PAVICS_FQDN_PUBLIC}/twitcher/ows/proxy/catalog + url: https://${BIRDHOUSE_FQDN_PUBLIC}/twitcher/ows/proxy/catalog workspace_dir: ${WORKSPACE_DIR} Thredds: active: true diff --git a/birdhouse/components/cowbird/config/cowbird/cowbird.ini.template b/birdhouse/components/cowbird/config/cowbird/cowbird.ini.template index 3aa33da29..a1b3daca9 100644 --- a/birdhouse/components/cowbird/config/cowbird/cowbird.ini.template +++ b/birdhouse/components/cowbird/config/cowbird/cowbird.ini.template @@ -32,7 +32,7 @@ mongo_uri = mongodb://${COWBIRD_MONGODB_HOST}:${COWBIRD_MONGODB_PORT}/cowbird # below values are for the external definitions after proxy resolution # internal app access is defined in [server:main] section cowbird.port = -cowbird.url = https://${PAVICS_FQDN_PUBLIC}/cowbird +cowbird.url = https://${BIRDHOUSE_FQDN_PUBLIC}/cowbird [app:api_app] use = egg:Paste#static diff --git a/birdhouse/components/cowbird/config/jupyterhub/docker-compose-extra.yml b/birdhouse/components/cowbird/config/jupyterhub/docker-compose-extra.yml index 4565450e9..8ed8894c9 100644 --- a/birdhouse/components/cowbird/config/jupyterhub/docker-compose-extra.yml +++ b/birdhouse/components/cowbird/config/jupyterhub/docker-compose-extra.yml @@ -13,7 +13,7 @@ services: - ${JUPYTERHUB_USER_DATA_DIR}:${JUPYTERHUB_USER_DATA_DIR} jupyterhub: environment: - WORKSPACE_DIR: ${DATA_PERSIST_SHARED_ROOT}/${USER_WORKSPACES} + WORKSPACE_DIR: ${BIRDHOUSE_DATA_PERSIST_SHARED_ROOT}/${USER_WORKSPACES} PUBLIC_WORKSPACE_WPS_OUTPUTS_SUBDIR: ${COWBIRD_PUBLIC_WORKSPACE_WPS_OUTPUTS_SUBDIR} volumes: - - "${DATA_PERSIST_SHARED_ROOT}/${USER_WORKSPACES}:${DATA_PERSIST_SHARED_ROOT}/${USER_WORKSPACES}" + - "${BIRDHOUSE_DATA_PERSIST_SHARED_ROOT}/${USER_WORKSPACES}:${BIRDHOUSE_DATA_PERSIST_SHARED_ROOT}/${USER_WORKSPACES}" diff --git a/birdhouse/components/cowbird/config/proxy/conf.extra-service.d/cowbird.conf.template b/birdhouse/components/cowbird/config/proxy/conf.extra-service.d/cowbird.conf.template index d0fae67b1..00d297a06 100644 --- a/birdhouse/components/cowbird/config/proxy/conf.extra-service.d/cowbird.conf.template +++ b/birdhouse/components/cowbird/config/proxy/conf.extra-service.d/cowbird.conf.template @@ -1,6 +1,6 @@ location /cowbird { - proxy_pass https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/cowbird; + proxy_pass https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/cowbird; proxy_set_header Host $host; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Host $host:$server_port; diff --git a/birdhouse/components/cowbird/default.env b/birdhouse/components/cowbird/default.env index eed5df5f3..d815c4c77 100644 --- a/birdhouse/components/cowbird/default.env +++ b/birdhouse/components/cowbird/default.env @@ -9,7 +9,7 @@ # are applied and must be added to the list of DELAYED_EVAL. # add any new variables not already in 'VARS' or 'OPTIONAL_VARS' that must be replaced in templates here -# single quotes are important in below list to keep variable names intact until 'pavics-compose' parses them +# single quotes are important in below list to keep variable names intact until 'birdhouse-compose' parses them EXTRA_VARS=' ${COWBIRD_VERSION} ${COWBIRD_MONGODB_VERSION} @@ -21,7 +21,7 @@ EXTRA_VARS=' ${COWBIRD_PUBLIC_WORKSPACE_WPS_OUTPUTS_SUBDIR} ${SECURE_DATA_PROXY_NAME} ' -# extend the original 'VARS' from 'birdhouse/pavics-compose.sh' to employ them for template substitution +# extend the original 'VARS' from 'birdhouse/birdhouse-compose.sh' to employ them for template substitution # adding them to 'VARS', they will also be validated in case of override of 'default.env' using 'env.local' VARS="$VARS $EXTRA_VARS" @@ -47,7 +47,7 @@ export COWBIRD_MONGODB_PORT=27017 # WARN: only potential problems/unexpected results reported export COWBIRD_LOG_LEVEL=INFO -# Subdirectory of DATA_PERSIST_SHARED_ROOT containing the user workspaces used by Cowbird +# Subdirectory of BIRDHOUSE_DATA_PERSIST_SHARED_ROOT containing the user workspaces used by Cowbird export USER_WORKSPACES="user_workspaces" # Path to the README.ipynb for welcoming and guiding new users to Jupyterhub. @@ -64,9 +64,9 @@ export JUPYTERHUB_README="${JUPYTERHUB_README:-${JUPYTERHUB_COWBIRD_README}}" # NOTE: # Most WPS birds do not have a concept of Public vs User-specific outputs. # These birds will employ the same WPS output directory for all jobs, regardless of the user running it. -# By default, WPS output files will be stored under '${WPS_OUTPUTS_DIR}/', and must all be considered 'public'. +# By default, WPS output files will be stored under '${BIRDHOUSE_WPS_OUTPUTS_DIR}/', and must all be considered 'public'. # Some WPS-capable birds such as Weaver do have a concept of Public/User-context for WPS outputs. -# In this case, files under '${WPS_OUTPUTS_DIR}/' should have an additional nesting +# In this case, files under '${BIRDHOUSE_WPS_OUTPUTS_DIR}/' should have an additional nesting # with 'public' and 'users/{user_id}'. Variable 'PUBLIC_WORKSPACE_WPS_OUTPUTS_SUBDIR' will be shared for such cases. export PUBLIC_WORKSPACE_WPS_OUTPUTS_SUBDIR=public export COWBIRD_PUBLIC_WORKSPACE_WPS_OUTPUTS_SUBDIR='${PUBLIC_WORKSPACE_WPS_OUTPUTS_SUBDIR}/wps_outputs' @@ -74,7 +74,7 @@ export COWBIRD_PUBLIC_WORKSPACE_WPS_OUTPUTS_SUBDIR='${PUBLIC_WORKSPACE_WPS_OUTPU # Default name for the secure-data-proxy service from Magpie. export SECURE_DATA_PROXY_NAME="secure-data-proxy" -COWBIRD_MONGODB_DATA_DIR='${DATA_PERSIST_ROOT}/mongodb_cowbird_persist' +COWBIRD_MONGODB_DATA_DIR='${BIRDHOUSE_DATA_PERSIST_ROOT}/mongodb_cowbird_persist' DELAYED_EVAL=" $DELAYED_EVAL diff --git a/birdhouse/components/cowbird/docker-compose-extra.yml b/birdhouse/components/cowbird/docker-compose-extra.yml index 5ad767499..4583ca874 100644 --- a/birdhouse/components/cowbird/docker-compose-extra.yml +++ b/birdhouse/components/cowbird/docker-compose-extra.yml @@ -27,9 +27,9 @@ services: SECURE_DATA_PROXY_NAME: ${SECURE_DATA_PROXY_NAME} # Note that WPS_OUTPUTS_DIR and WORKSPACE_DIR must both point to paths from the same volume. # This is to allow the creation of hardlinks between the wpsoutputs and the user workspace. - # By default, both are mounted under the "${DATA_PERSIST_SHARED_ROOT}" volume. - WPS_OUTPUTS_DIR: ${WPS_OUTPUTS_DIR} - WORKSPACE_DIR: ${DATA_PERSIST_SHARED_ROOT}/${USER_WORKSPACES} + # By default, both are mounted under the "${BIRDHOUSE_DATA_PERSIST_SHARED_ROOT}" volume. + WPS_OUTPUTS_DIR: ${BIRDHOUSE_WPS_OUTPUTS_DIR} + WORKSPACE_DIR: ${BIRDHOUSE_DATA_PERSIST_SHARED_ROOT}/${USER_WORKSPACES} links: - cowbird-mongodb networks: @@ -42,7 +42,7 @@ services: - ./components/cowbird/config/cowbird/cowbird.ini:/opt/local/src/cowbird/config/cowbird.ini # even if not running tasks here, they must be registered to send them off to the right place! - ./components/cowbird/config/cowbird/celeryconfig.py:/opt/local/src/cowbird/config/celeryconfig.py - - "${DATA_PERSIST_SHARED_ROOT}:${DATA_PERSIST_SHARED_ROOT}" + - "${BIRDHOUSE_DATA_PERSIST_SHARED_ROOT}:${BIRDHOUSE_DATA_PERSIST_SHARED_ROOT}" restart: always logging: *default-logging @@ -58,7 +58,7 @@ services: # root user COWBIRD_FILESYSTEM_ADMIN_UID: 0 COWBIRD_FILESYSTEM_ADMIN_GID: 0 - WORKSPACE_DIR: ${DATA_PERSIST_SHARED_ROOT}/${USER_WORKSPACES} + WORKSPACE_DIR: ${BIRDHOUSE_DATA_PERSIST_SHARED_ROOT}/${USER_WORKSPACES} links: - cowbird-mongodb networks: @@ -71,7 +71,7 @@ services: - ./components/cowbird/config/cowbird/config.yml:/opt/local/src/cowbird/config/cowbird.yml - ./components/cowbird/config/cowbird/cowbird.ini:/opt/local/src/cowbird/config/cowbird.ini - ./components/cowbird/config/cowbird/celeryconfig.py:/opt/local/src/cowbird/config/celeryconfig.py - - "${DATA_PERSIST_SHARED_ROOT}/${USER_WORKSPACES}:${DATA_PERSIST_SHARED_ROOT}/${USER_WORKSPACES}" + - "${BIRDHOUSE_DATA_PERSIST_SHARED_ROOT}/${USER_WORKSPACES}:${BIRDHOUSE_DATA_PERSIST_SHARED_ROOT}/${USER_WORKSPACES}" restart: always logging: *default-logging diff --git a/birdhouse/components/finch/config/canarie-api/canarie_api_monitoring.py.template b/birdhouse/components/finch/config/canarie-api/canarie_api_monitoring.py.template index 9b9a8a75c..58b113166 100644 --- a/birdhouse/components/finch/config/canarie-api/canarie_api_monitoring.py.template +++ b/birdhouse/components/finch/config/canarie-api/canarie_api_monitoring.py.template @@ -14,7 +14,7 @@ SERVICES['indices'] = { 'institution': 'Ouranos', 'releaseTime': FINCH_RELEASE_TIME, 'researchSubject': 'Climatology', - 'supportEmail': '${SUPPORT_EMAIL}', + 'supportEmail': '${BIRDHOUSE_SUPPORT_EMAIL}', 'category': 'Data Manipulation', 'tags': ['Climatology', 'Cloud'] }, @@ -34,7 +34,7 @@ SERVICES['indices'] = { 'monitoring': { 'Finch': { 'request': { - 'url': 'https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/finch?service=WPS&version=1.0.0&request=GetCapabilities' + 'url': 'https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/finch?service=WPS&version=1.0.0&request=GetCapabilities' } }, } @@ -47,7 +47,7 @@ SERVICES['slicer'] = { 'institution': 'Ouranos', 'releaseTime': '2021-03-25T00:00:00Z', 'researchSubject': 'Climatology', - 'supportEmail': '${SUPPORT_EMAIL}', + 'supportEmail': '${BIRDHOUSE_SUPPORT_EMAIL}', 'category': 'Data Manipulation', 'tags': ['Climatology', 'Cloud'] }, @@ -85,7 +85,7 @@ SERVICES['Finch'] = { 'institution': 'Ouranos', 'releaseTime': FINCH_RELEASE_TIME, 'researchSubject': 'Climatology', - 'supportEmail': '${SUPPORT_EMAIL}', + 'supportEmail': '${BIRDHOUSE_SUPPORT_EMAIL}', 'category': 'Data Manipulation', 'tags': ['Climatology', 'Cloud'] }, diff --git a/birdhouse/components/finch/service-config.json.template b/birdhouse/components/finch/service-config.json.template index bceffe0fa..37642e732 100644 --- a/birdhouse/components/finch/service-config.json.template +++ b/birdhouse/components/finch/service-config.json.template @@ -14,7 +14,7 @@ { "rel": "service", "type": "text/xml", - "href": "https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/finch?service=WPS&request=GetCapabilities" + "href": "https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/finch?service=WPS&request=GetCapabilities" }, { "rel": "service-doc", @@ -24,7 +24,7 @@ { "rel": "service-desc", "type": "text/xml", - "href": "https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/finch?service=WPS&request=GetCapabilities" + "href": "https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/finch?service=WPS&request=GetCapabilities" }, { "rel": "service-meta", diff --git a/birdhouse/components/finch/wps.cfg.template b/birdhouse/components/finch/wps.cfg.template index e61c8af86..19aee28f1 100644 --- a/birdhouse/components/finch/wps.cfg.template +++ b/birdhouse/components/finch/wps.cfg.template @@ -1,5 +1,5 @@ [server] -outputurl = https://${PAVICS_FQDN_PUBLIC}/wpsoutputs/finch +outputurl = https://${BIRDHOUSE_FQDN_PUBLIC}/wpsoutputs/finch outputpath = /data/wpsoutputs/finch # default 3mb, fix "Broken pipe" between the proxy and the wps service @@ -10,6 +10,6 @@ parallelprocesses = 10 [logging] level = INFO -database=postgresql://${POSTGRES_PAVICS_USERNAME}:${POSTGRES_PAVICS_PASSWORD}@postgres/finch +database=postgresql://${BIRDHOUSE_POSTGRES_USERNAME}:${BIRDHOUSE_POSTGRES_PASSWORD}@postgres/finch -${EXTRA_PYWPS_CONFIG} +${BIRDHOUSE_EXTRA_PYWPS_CONFIG} diff --git a/birdhouse/components/geoserver/config/canarie-api/canarie_api_monitoring.py.template b/birdhouse/components/geoserver/config/canarie-api/canarie_api_monitoring.py.template index 67fe4d41f..876128992 100644 --- a/birdhouse/components/geoserver/config/canarie-api/canarie_api_monitoring.py.template +++ b/birdhouse/components/geoserver/config/canarie-api/canarie_api_monitoring.py.template @@ -11,7 +11,7 @@ SERVICES['GeoServer'] = { 'institution': 'Ouranos', 'releaseTime': get_release_time_from_repo_tag("docker", "${GEOSERVER_DOCKER}", "${GEOSERVER_TAGGED}"), 'researchSubject': 'Geospatial', - 'supportEmail': '${SUPPORT_EMAIL}', + 'supportEmail': '${BIRDHOUSE_SUPPORT_EMAIL}', 'category': 'Data Catalog', 'tags': ['Data', 'Geospatial', 'Catalog', 'OGC', 'WFS', 'WMS', 'WPS'] }, @@ -24,14 +24,14 @@ SERVICES['GeoServer'] = { 'releasenotes': 'https://geoserver.org/release/${GEOSERVER_VERSION}/', 'support': 'https://github.com/kartoza/docker-geoserver/issues', 'source': 'https://github.com/kartoza/docker-geoserver', - 'tryme': 'https://${PAVICS_FQDN_PUBLIC}/geoserver/', + 'tryme': 'https://${BIRDHOUSE_FQDN_PUBLIC}/geoserver/', 'licence': 'https://github.com/geoserver/geoserver/blob/${GEOSERVER_VERSION}/LICENSE.txt', 'provenance': 'https://github.com/kartoza/docker-geoserver' }, "monitoring": { "GeoServer": { 'request': { - 'url': 'https://${PAVICS_FQDN_PUBLIC}/geoserver/web/' + 'url': 'https://${BIRDHOUSE_FQDN_PUBLIC}/geoserver/web/' } } } diff --git a/birdhouse/components/geoserver/config/proxy/conf.extra-service.d/geoserver.conf.template b/birdhouse/components/geoserver/config/proxy/conf.extra-service.d/geoserver.conf.template index 9534a1bf0..8b3e0de6b 100644 --- a/birdhouse/components/geoserver/config/proxy/conf.extra-service.d/geoserver.conf.template +++ b/birdhouse/components/geoserver/config/proxy/conf.extra-service.d/geoserver.conf.template @@ -19,7 +19,7 @@ # If GEOSERVER_SKIP_AUTH is "True" then the following section is skipped and this # location block will always return 200 (which means that the /geoserver/ location, above, # will be publicly available. - proxy_pass https://${PAVICS_FQDN_PUBLIC}${TWITCHER_VERIFY_PATH}/geoserver$request_uri; + proxy_pass https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_VERIFY_PATH}/geoserver$request_uri; proxy_pass_request_body off; proxy_set_header Host $host; proxy_set_header Content-Length ""; diff --git a/birdhouse/components/geoserver/default.env b/birdhouse/components/geoserver/default.env index 9db03a085..35a907baf 100644 --- a/birdhouse/components/geoserver/default.env +++ b/birdhouse/components/geoserver/default.env @@ -25,7 +25,7 @@ export GEOSERVER_STABLE_EXTENSIONS="grib-plugin,netcdf-plugin,netcdf-out-plugin, export GEOSERVER_COMMUNITY_EXTENSIONS="geopkg-plugin" # Must use single-quote for delayed eval. -export GEOSERVER_DATA_DIR='${DATA_PERSIST_ROOT}/geoserver' +export GEOSERVER_DATA_DIR='${BIRDHOUSE_DATA_PERSIST_ROOT}/geoserver' # If set, requests to the geoserver endpoint will not be authorized through twitcher/magpie export GEOSERVER_SKIP_AUTH=False diff --git a/birdhouse/components/geoserver/docker-compose-extra.yml b/birdhouse/components/geoserver/docker-compose-extra.yml index ca11dc0b9..b51c7e395 100644 --- a/birdhouse/components/geoserver/docker-compose-extra.yml +++ b/birdhouse/components/geoserver/docker-compose-extra.yml @@ -20,7 +20,7 @@ services: JAVA_OPTS: --add-exports=java.desktop/com.sun.imageio.plugins.jpeg=ALL-UNNAMED MAXIMUM_MEMORY: 8G # https://github.com/kartoza/docker-geoserver#proxy-base-url - HTTP_PROXY_NAME: ${PAVICS_FQDN_PUBLIC} + HTTP_PROXY_NAME: ${BIRDHOUSE_FQDN_PUBLIC} HTTP_SCHEME: https volumes: # run deployment/fix-geoserver-data-dir-perm on existing diff --git a/birdhouse/components/geoserver/service-config.json.template b/birdhouse/components/geoserver/service-config.json.template index b30a5bfdb..acf7c38aa 100644 --- a/birdhouse/components/geoserver/service-config.json.template +++ b/birdhouse/components/geoserver/service-config.json.template @@ -20,7 +20,7 @@ { "rel": "service", "type": "text/html", - "href": "https://${PAVICS_FQDN_PUBLIC}/geoserver/" + "href": "https://${BIRDHOUSE_FQDN_PUBLIC}/geoserver/" }, { "rel": "service-doc", diff --git a/birdhouse/components/hummingbird/config/canarie-api/canarie_api_monitoring.py.template b/birdhouse/components/hummingbird/config/canarie-api/canarie_api_monitoring.py.template index 935399dbb..cfc5ffc83 100644 --- a/birdhouse/components/hummingbird/config/canarie-api/canarie_api_monitoring.py.template +++ b/birdhouse/components/hummingbird/config/canarie-api/canarie_api_monitoring.py.template @@ -12,7 +12,7 @@ SERVICES['hummingbird'] = { 'institution': 'bird-house', 'releaseTime': HUMMINGBIRD_RELEASE, 'researchSubject': 'Climatology', - 'supportEmail': '${SUPPORT_EMAIL}', + 'supportEmail': '${BIRDHOUSE_SUPPORT_EMAIL}', 'category': 'Processing', 'tags': ['Climatology', 'Checker', 'Compliance', 'CF-conventions', 'WPS', 'OGC'], }, @@ -25,7 +25,7 @@ SERVICES['hummingbird'] = { 'releasenotes': 'https://github.com/bird-house/hummingbird/blob/master/CHANGES.rst', 'support': 'https://github.com/bird-house/hummingbird/issues', 'source': 'https://github.com/bird-house/hummingbird', - 'tryme': 'https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/hummingbird/wps?service=WPS&version=1.0.0&request=GetCapabilities', + 'tryme': 'https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/hummingbird/wps?service=WPS&version=1.0.0&request=GetCapabilities', 'licence': 'https://github.com/bird-house/hummingbird/blob/master/LICENSE.txt', 'provenance': 'https://github.com/bird-house/hummingbird' }, diff --git a/birdhouse/components/hummingbird/custom.cfg.template b/birdhouse/components/hummingbird/custom.cfg.template index 71dd99b10..bf8f1301f 100644 --- a/birdhouse/components/hummingbird/custom.cfg.template +++ b/birdhouse/components/hummingbird/custom.cfg.template @@ -5,4 +5,4 @@ extends = profiles/docker.cfg enable-https = true https-port = 443 https-output-port = 443 -database=postgresql://${POSTGRES_PAVICS_USERNAME}:${POSTGRES_PAVICS_PASSWORD}@postgres/hummingbird +database=postgresql://${BIRDHOUSE_POSTGRES_USERNAME}:${BIRDHOUSE_POSTGRES_PASSWORD}@postgres/hummingbird diff --git a/birdhouse/components/hummingbird/service-config.json.template b/birdhouse/components/hummingbird/service-config.json.template index 0d3890849..7e36d64c2 100644 --- a/birdhouse/components/hummingbird/service-config.json.template +++ b/birdhouse/components/hummingbird/service-config.json.template @@ -14,7 +14,7 @@ { "rel": "service", "type": "text/xml", - "href": "https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/hummingbird?service=WPS&request=GetCapabilities" + "href": "https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/hummingbird?service=WPS&request=GetCapabilities" }, { "rel": "service-doc", @@ -24,7 +24,7 @@ { "rel": "service-desc", "type": "text/xml", - "href": "https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/hummingbird?service=WPS&request=GetCapabilities" + "href": "https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/hummingbird?service=WPS&request=GetCapabilities" }, { "rel": "service-meta", diff --git a/birdhouse/components/jupyterhub/config/canarie-api/canarie_api_monitoring.py.template b/birdhouse/components/jupyterhub/config/canarie-api/canarie_api_monitoring.py.template index 18af01348..1681c8804 100644 --- a/birdhouse/components/jupyterhub/config/canarie-api/canarie_api_monitoring.py.template +++ b/birdhouse/components/jupyterhub/config/canarie-api/canarie_api_monitoring.py.template @@ -6,7 +6,7 @@ SERVICES['Jupyter'] = { 'releaseTime': get_release_time_from_repo_tag("docker", "${JUPYTERHUB_DOCKER}", "${JUPYTERHUB_VERSION}"), 'institution': 'Ouranos', 'researchSubject': 'Any', - 'supportEmail': '${SUPPORT_EMAIL}', + 'supportEmail': '${BIRDHOUSE_SUPPORT_EMAIL}', 'category': 'Research', 'tags': ['Development', 'Research', 'Notebooks'] }, @@ -19,14 +19,14 @@ SERVICES['Jupyter'] = { 'releasenotes': 'https://github.com/Ouranosinc/jupyterhub/tags', # no CHANGES file available 'support': 'https://github.com/Ouranosinc/jupyterhub/issues', 'source': 'https://github.com/Ouranosinc/jupyterhub', - 'tryme': 'https://${PAVICS_FQDN_PUBLIC}/jupyter/', + 'tryme': 'https://${BIRDHOUSE_FQDN_PUBLIC}/jupyter/', 'licence': 'https://github.com/Ouranosinc/jupyterhub/blob/${JUPYTERHUB_VERSION}/LICENSE', 'provenance': '' }, "monitoring": { "Jupyter": { 'request': { - 'url': 'https://${PAVICS_FQDN_PUBLIC}/jupyter/hub/login' + 'url': 'https://${BIRDHOUSE_FQDN_PUBLIC}/jupyter/hub/login' }, } } diff --git a/birdhouse/components/jupyterhub/custom_templates/login.html.template b/birdhouse/components/jupyterhub/custom_templates/login.html.template index 9c986c66f..3832d5bd1 100644 --- a/birdhouse/components/jupyterhub/custom_templates/login.html.template +++ b/birdhouse/components/jupyterhub/custom_templates/login.html.template @@ -10,7 +10,7 @@ ${JUPYTER_LOGIN_BANNER_TOP_SECTION} resources.

- Contact ${SUPPORT_EMAIL} for information on how to + Contact ${BIRDHOUSE_SUPPORT_EMAIL} for information on how to get an account and a private workspace or the password of the public demo account.

diff --git a/birdhouse/components/jupyterhub/default.env b/birdhouse/components/jupyterhub/default.env index 5b08b6936..b52b7ff2e 100644 --- a/birdhouse/components/jupyterhub/default.env +++ b/birdhouse/components/jupyterhub/default.env @@ -10,21 +10,21 @@ export JUPYTERHUB_IMAGE='${JUPYTERHUB_DOCKER}:${JUPYTERHUB_VERSION}' export JUPYTERHUB_IMAGE_URI='registry.hub.docker.com/${JUPYTERHUB_IMAGE}' # Jupyter single-user server images, can be overriden in env.local to have a space separated list of multiple images -export DOCKER_NOTEBOOK_IMAGES="pavics/workflow-tests:py311-240506-update240508" +export JUPYTERHUB_DOCKER_NOTEBOOK_IMAGES="pavics/workflow-tests:py311-240506-update240508" # Name of the image displayed on the JupyterHub image selection page # Can be overriden in env.local to have a space separated list of multiple images, the name order must correspond -# to the order of the DOCKER_NOTEBOOK_IMAGES variable +# to the order of the JUPYTERHUB_DOCKER_NOTEBOOK_IMAGES variable # Note that the selection names are also used as directory names for the tutorial-notebooks directories mounted when # starting the corresponding image. The name can use the '' or the ':' format. The version will be # excluded when mounting the corresponding directory. export JUPYTERHUB_IMAGE_SELECTION_NAMES="pavics" # Folder on the host to persist Jupyter user data (noteboooks, HOME settings) -export JUPYTERHUB_USER_DATA_DIR='${DATA_PERSIST_ROOT}/jupyterhub_user_data' +export JUPYTERHUB_USER_DATA_DIR='${BIRDHOUSE_DATA_PERSIST_ROOT}/jupyterhub_user_data' # Activates mounting a tutorial-notebooks subfolder that has the same name as the spawned image on JupyterHub -export MOUNT_IMAGE_SPECIFIC_NOTEBOOKS=false +export JUPYTERHUB_MOUNT_IMAGE_SPECIFIC_NOTEBOOKS=false # Path to the file containing the clientID for the google drive extension for jupyterlab export JUPYTER_GOOGLE_DRIVE_SETTINGS="" @@ -89,7 +89,7 @@ VARS=" OPTIONAL_VARS=" $OPTIONAL_VARS - \$ENABLE_JUPYTERHUB_MULTI_NOTEBOOKS + \$JUPYTERHUB_ENABLE_MULTI_NOTEBOOKS \$JUPYTER_DEMO_USER \$JUPYTER_LOGIN_BANNER_TOP_SECTION \$JUPYTER_LOGIN_BANNER_BOTTOM_SECTION diff --git a/birdhouse/components/jupyterhub/docker-compose-extra.yml b/birdhouse/components/jupyterhub/docker-compose-extra.yml index d7722d2ee..5c2cc0a1d 100644 --- a/birdhouse/components/jupyterhub/docker-compose-extra.yml +++ b/birdhouse/components/jupyterhub/docker-compose-extra.yml @@ -13,7 +13,7 @@ services: container_name: jupyterhub hostname: jupyterhub environment: - DOCKER_NOTEBOOK_IMAGES: ${DOCKER_NOTEBOOK_IMAGES} + JUPYTERHUB_DOCKER_NOTEBOOK_IMAGES: ${JUPYTERHUB_DOCKER_NOTEBOOK_IMAGES} JUPYTERHUB_IMAGE_SELECTION_NAMES: ${JUPYTERHUB_IMAGE_SELECTION_NAMES} DOCKER_NETWORK_NAME: jupyterhub_network JUPYTERHUB_USER_DATA_DIR: ${JUPYTERHUB_USER_DATA_DIR} @@ -24,7 +24,7 @@ services: JUPYTER_DEMO_USER_CPU_LIMIT: ${JUPYTER_DEMO_USER_CPU_LIMIT} JUPYTER_GOOGLE_DRIVE_SETTINGS: ${JUPYTER_GOOGLE_DRIVE_SETTINGS} JUPYTERHUB_README: ${JUPYTERHUB_README} - MOUNT_IMAGE_SPECIFIC_NOTEBOOKS: ${MOUNT_IMAGE_SPECIFIC_NOTEBOOKS} + JUPYTERHUB_MOUNT_IMAGE_SPECIFIC_NOTEBOOKS: ${JUPYTERHUB_MOUNT_IMAGE_SPECIFIC_NOTEBOOKS} USER_WORKSPACE_UID: ${USER_WORKSPACE_UID} USER_WORKSPACE_GID: ${USER_WORKSPACE_GID} JUPYTERHUB_CRYPT_KEY: ${JUPYTERHUB_CRYPT_KEY} diff --git a/birdhouse/components/jupyterhub/jupyterhub_config.py.template b/birdhouse/components/jupyterhub/jupyterhub_config.py.template index ccc4afb4c..0fce675b0 100644 --- a/birdhouse/components/jupyterhub/jupyterhub_config.py.template +++ b/birdhouse/components/jupyterhub/jupyterhub_config.py.template @@ -16,7 +16,7 @@ c.JupyterHub.hub_ip = 'jupyterhub' c.JupyterHub.authenticator_class = 'jupyterhub_magpie_authenticator.MagpieAuthenticator' c.MagpieAuthenticator.magpie_url = "http://magpie:2001" -c.MagpieAuthenticator.public_fqdn = "${PAVICS_FQDN_PUBLIC}" +c.MagpieAuthenticator.public_fqdn = "${BIRDHOUSE_FQDN_PUBLIC}" c.MagpieAuthenticator.authorization_url = "${JUPYTERHUB_AUTHENTICATOR_AUTHORIZATION_URL}" if os.getenv("JUPYTERHUB_CRYPT_KEY"): @@ -59,7 +59,7 @@ class CustomDockerSpawner(DockerSpawner): return self.user.name async def start(self): - if(os.environ['MOUNT_IMAGE_SPECIFIC_NOTEBOOKS'] == 'true'): + if(os.environ['JUPYTERHUB_MOUNT_IMAGE_SPECIFIC_NOTEBOOKS'] == 'true'): host_dir = join(os.environ['JUPYTERHUB_USER_DATA_DIR'], 'tutorial-notebooks-specific-images') # Mount a volume with a tutorial-notebook subfolder corresponding to the image name, if it exists @@ -90,7 +90,7 @@ class CustomDockerSpawner(DockerSpawner): c.JupyterHub.spawner_class = CustomDockerSpawner # Selects the first image from the list by default -c.DockerSpawner.image = os.environ['DOCKER_NOTEBOOK_IMAGES'].split()[0] +c.DockerSpawner.image = os.environ['JUPYTERHUB_DOCKER_NOTEBOOK_IMAGES'].split()[0] c.DockerSpawner.use_internal_ip = True c.DockerSpawner.network_name = os.environ['DOCKER_NETWORK_NAME'] @@ -107,10 +107,10 @@ c.DockerSpawner.environment = { # Post on Panel forum: # https://discourse.holoviz.org/t/how-to-customize-the-display-url-from-panel-serve-for-use-behind-jupyterhub-with-jupyter-server-proxy/3571 # Issue about Panel Preview: https://github.com/holoviz/panel/issues/3440 - "PAVICS_HOST_URL": "https://${PAVICS_FQDN_PUBLIC}", + "BIRDHOUSE_HOST_URL": "https://${BIRDHOUSE_FQDN_PUBLIC}", # https://docs.dask.org/en/stable/configuration.html # https://jupyterhub-on-hadoop.readthedocs.io/en/latest/dask.html - "DASK_DISTRIBUTED__DASHBOARD__LINK": "https://${PAVICS_FQDN_PUBLIC}{JUPYTERHUB_SERVICE_PREFIX}proxy/{port}/status" + "DASK_DISTRIBUTED__DASHBOARD__LINK": "https://${BIRDHOUSE_FQDN_PUBLIC}{JUPYTERHUB_SERVICE_PREFIX}proxy/{port}/status" } host_user_data_dir = join(os.environ['WORKSPACE_DIR'], "{username}") @@ -177,7 +177,7 @@ c.Spawner.disable_user_config = True c.DockerSpawner.default_url = '/lab' c.DockerSpawner.remove = True # delete containers when servers are stopped -${ENABLE_JUPYTERHUB_MULTI_NOTEBOOKS} # noqa +${JUPYTERHUB_ENABLE_MULTI_NOTEBOOKS} # noqa c.DockerSpawner.pull_policy = "always" # for images not using pinned version c.DockerSpawner.debug = True c.JupyterHub.log_level = logging.DEBUG diff --git a/birdhouse/components/jupyterhub/service-config.json.template b/birdhouse/components/jupyterhub/service-config.json.template index cbf7c8f72..59850d054 100644 --- a/birdhouse/components/jupyterhub/service-config.json.template +++ b/birdhouse/components/jupyterhub/service-config.json.template @@ -14,7 +14,7 @@ { "rel": "service", "type": "text/html", - "href": "https://${PAVICS_FQDN_PUBLIC}/jupyter" + "href": "https://${BIRDHOUSE_FQDN_PUBLIC}/jupyter" }, { "rel": "service-doc", diff --git a/birdhouse/components/magpie/config/canarie-api/canarie_api_monitoring.py.template b/birdhouse/components/magpie/config/canarie-api/canarie_api_monitoring.py.template index 60b030b38..0f9e4b23f 100644 --- a/birdhouse/components/magpie/config/canarie-api/canarie_api_monitoring.py.template +++ b/birdhouse/components/magpie/config/canarie-api/canarie_api_monitoring.py.template @@ -10,7 +10,7 @@ SERVICES['Magpie'] = { 'institution': 'Ouranos', 'releaseTime': get_release_time_from_repo_tag("github", "Ouranosinc/Magpie", "${MAGPIE_VERSION}"), 'researchSubject': 'Security', - 'supportEmail': '${SUPPORT_EMAIL}', + 'supportEmail': '${BIRDHOUSE_SUPPORT_EMAIL}', 'category': 'Security', 'tags': ['Security', 'Management', 'Access', 'Policy Decision Point'] }, @@ -23,14 +23,14 @@ SERVICES['Magpie'] = { 'releasenotes': 'https://github.com/Ouranosinc/Magpie/blob/master/CHANGES.rst', 'support': 'https://github.com/Ouranosinc/Magpie/issues', 'source': 'https://github.com/Ouranosinc/Magpie', - 'tryme': 'https://${PAVICS_FQDN_PUBLIC}/magpie/', + 'tryme': 'https://${BIRDHOUSE_FQDN_PUBLIC}/magpie/', 'licence': 'https://github.com/Ouranosinc/Magpie/blob/${MAGPIE_VERSION}/LICENSE', 'provenance': 'https://ouranosinc.github.io/pavics-sdi/provenance/index.html' }, "monitoring": { "Magpie": { 'request': { - 'url': 'https://${PAVICS_FQDN_PUBLIC}/magpie/version' + 'url': 'https://${BIRDHOUSE_FQDN_PUBLIC}/magpie/version' }, 'response': { 'text': r'\{.*"code": 200.*"type": "application/json".*\}' diff --git a/birdhouse/components/magpie/default.env b/birdhouse/components/magpie/default.env index 6b97051af..6fe22de40 100644 --- a/birdhouse/components/magpie/default.env +++ b/birdhouse/components/magpie/default.env @@ -9,7 +9,7 @@ export MAGPIE_VERSION=3.38.0 export MAGPIE_DB_NAME="magpiedb" -export MAGPIE_PERSIST_DIR='${DATA_PERSIST_ROOT}/magpie_persist' +export MAGPIE_PERSIST_DIR='${BIRDHOUSE_DATA_PERSIST_ROOT}/magpie_persist' # Magpie user registration and approvals export MAGPIE_USER_REGISTRATION_ENABLED=false @@ -48,8 +48,8 @@ VARS=" OPTIONAL_VARS=" $OPTIONAL_VARS - \$GITHUB_CLIENT_ID - \$GITHUB_CLIENT_SECRET + \$MAGPIE_GITHUB_CLIENT_ID + \$MAGPIE_GITHUB_CLIENT_SECRET \$MAGPIE_DB_NAME \$MAGPIE_USER_REGISTRATION_ENABLED \$MAGPIE_USER_REGISTRATION_SUBMISSION_EMAIL_TEMPLATE diff --git a/birdhouse/components/magpie/docker-compose-extra.yml b/birdhouse/components/magpie/docker-compose-extra.yml index 0be77492c..4c464fd3c 100644 --- a/birdhouse/components/magpie/docker-compose-extra.yml +++ b/birdhouse/components/magpie/docker-compose-extra.yml @@ -12,7 +12,7 @@ services: image: pavics/magpie:${MAGPIE_VERSION} container_name: magpie environment: - TWITCHER_PROTECTED_URL: https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH} + TWITCHER_PROTECTED_URL: https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH} # target directories to allow loading multiple config files of corresponding category # each compose override should volume mount its files inside the matching directories # (note: DO NOT use 'MAGPIE_CONFIG_PATH' that would disable multi-config loading capability) diff --git a/birdhouse/components/magpie/magpie.ini.template b/birdhouse/components/magpie/magpie.ini.template index 8f2eb920b..f1c0ed93d 100644 --- a/birdhouse/components/magpie/magpie.ini.template +++ b/birdhouse/components/magpie/magpie.ini.template @@ -28,7 +28,7 @@ pyramid.includes = pyramid_tm ziggurat_foundations.ext.pyramid.sign_in ziggurat_ # other overridable variables available in magpie/constants.py # magpie.port = 2001 -magpie.url = https://${PAVICS_FQDN}/magpie +magpie.url = https://${BIRDHOUSE_FQDN}/magpie magpie.max_restart = 5 magpie.push_phoenix = true # This secret should be the same in Twitcher ! @@ -46,8 +46,8 @@ ziggurat_foundations.sign_in.sign_out_pattern = /signout ziggurat_foundations.session_provider_callable = magpie.models:get_session_callable # github (login provider) -github.client_id=${GITHUB_CLIENT_ID} -github.client_secret=${GITHUB_CLIENT_SECRET} +github.client_id=${MAGPIE_GITHUB_CLIENT_ID} +github.client_secret=${MAGPIE_GITHUB_CLIENT_SECRET} # temporary token definition (defaults below if omitted) # note: token here refers to uuids employed in temporary URL endpoints, not security auth tokens diff --git a/birdhouse/components/mongodb/default.env b/birdhouse/components/mongodb/default.env index e031e852c..4c2e68997 100644 --- a/birdhouse/components/mongodb/default.env +++ b/birdhouse/components/mongodb/default.env @@ -4,7 +4,7 @@ # must use single quotes to avoid early expansion before overrides in env.local # are applied and must be added to the list of DELAYED_EVAL. -export MONGODB_DATA_DIR='${DATA_PERSIST_ROOT}/mongodb_persist' +export MONGODB_DATA_DIR='${BIRDHOUSE_DATA_PERSIST_ROOT}/mongodb_persist' export DELAYED_EVAL=" $DELAYED_EVAL diff --git a/birdhouse/components/monitoring/alertmanager.yml.template b/birdhouse/components/monitoring/alertmanager.yml.template index 4221feaea..c33ea7444 100644 --- a/birdhouse/components/monitoring/alertmanager.yml.template +++ b/birdhouse/components/monitoring/alertmanager.yml.template @@ -1,10 +1,10 @@ # https://prometheus.io/docs/alerting/latest/configuration/ -# http://${PAVICS_FQDN}:9093/#/status +# http://${BIRDHOUSE_FQDN}:9093/#/status global: # The smarthost and SMTP sender used for mail notifications. - smtp_smarthost: '${SMTP_SERVER}' - smtp_from: 'alertmanager@${PAVICS_FQDN}' - smtp_hello: '${PAVICS_FQDN}' + smtp_smarthost: '${ALERTMANAGER_SMTP_SERVER}' + smtp_from: 'alertmanager@${BIRDHOUSE_FQDN}' + smtp_hello: '${BIRDHOUSE_FQDN}' ${ALERTMANAGER_EXTRA_GLOBAL} # Below example of candidates for ALERTMANAGER_EXTRA_GLOBAL # smtp_auth_username: 'alertmanager' diff --git a/birdhouse/components/monitoring/config/proxy/conf.extra-service.d/monitoring.conf.template b/birdhouse/components/monitoring/config/proxy/conf.extra-service.d/monitoring.conf.template index 0c21bdd43..07a62c224 100644 --- a/birdhouse/components/monitoring/config/proxy/conf.extra-service.d/monitoring.conf.template +++ b/birdhouse/components/monitoring/config/proxy/conf.extra-service.d/monitoring.conf.template @@ -22,7 +22,7 @@ location = /secure-grafana-auth { internal; - proxy_pass https://${PAVICS_FQDN_PUBLIC}${TWITCHER_VERIFY_PATH}/grafana$request_uri; + proxy_pass https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_VERIFY_PATH}/grafana$request_uri; proxy_pass_request_body off; proxy_set_header Host $host; proxy_set_header Content-Length ""; @@ -34,7 +34,7 @@ location = /secure-prometheus-auth { internal; - proxy_pass https://${PAVICS_FQDN_PUBLIC}${TWITCHER_VERIFY_PATH}/prometheus$request_uri; + proxy_pass https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_VERIFY_PATH}/prometheus$request_uri; proxy_pass_request_body off; proxy_set_header Host $host; proxy_set_header Content-Length ""; @@ -46,7 +46,7 @@ location = /secure-alertmanager-auth { internal; - proxy_pass https://${PAVICS_FQDN_PUBLIC}${TWITCHER_VERIFY_PATH}/alertmanager$request_uri; + proxy_pass https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_VERIFY_PATH}/alertmanager$request_uri; proxy_pass_request_body off; proxy_set_header Host $host; proxy_set_header Content-Length ""; diff --git a/birdhouse/components/monitoring/default.env b/birdhouse/components/monitoring/default.env index 1ecb61b81..c6f781fde 100644 --- a/birdhouse/components/monitoring/default.env +++ b/birdhouse/components/monitoring/default.env @@ -25,8 +25,14 @@ export ALERTMANAGER_DOCKER=prom/alertmanager export ALERTMANAGER_IMAGE='${ALERTMANAGER_DOCKER}:${ALERTMANAGER_VERSION}' export GRAFANA_ADMIN_PASSWORD="changeme!" +__DEFAULT_GRAFANA_DEFAULT_PROVIDER_FOLDER="Local-Birdhouse" +export GRAFANA_DEFAULT_PROVIDER_FOLDER='${__DEFAULT_GRAFANA_DEFAULT_PROVIDER_FOLDER}' +__DEFAULT_GRAFANA_DEFAULT_PROVIDER_FOLDER_UUID="local-birdhouse" +export GRAFANA_DEFAULT_PROVIDER_FOLDER_UUID='${__DEFAULT_GRAFANA_DEFAULT_PROVIDER_FOLDER_UUID}' +__GRAFANA_DEFAULT_PROVIDER_FOLDER_UUID="local_birdhouse_prometheus" +export GRAFANA_PROMETHEUS_DATASOURCE_UUID='${__GRAFANA_DEFAULT_PROVIDER_FOLDER_UUID}' export ALERTMANAGER_ADMIN_EMAIL_RECEIVER="" # "user1@example.com,user2@example.com" -export SMTP_SERVER="" # "smtp.example.com:25" +export ALERTMANAGER_SMTP_SERVER="" # "smtp.example.com:25" export ALERTMANAGER_EXTRA_GLOBAL="" export ALERTMANAGER_EXTRA_ROUTES="" export ALERTMANAGER_EXTRA_INHIBITION="" @@ -53,17 +59,27 @@ export PROMETHEUS_ContainerMemoryUsage_ALERT=80 # percent use export PROMETHEUS_ContainerVolumeUsage_ALERT=80 # percent use export PROMETHEUS_ContainerVolumeIoUsage_ALERT=80 # percent use +DELAYED_EVAL=" + $DELAYED_EVAL + GRAFANA_DEFAULT_PROVIDER_FOLDER + GRAFANA_DEFAULT_PROVIDER_FOLDER_UUID + GRAFANA_PROMETHEUS_DATASOURCE_UUID +" + # add vars only needed to be substituted in templates VARS=" $VARS \$ALERTMANAGER_ADMIN_EMAIL_RECEIVER - \$SMTP_SERVER + \$ALERTMANAGER_SMTP_SERVER " OPTIONAL_VARS=" $OPTIONAL_VARS + \$GRAFANA_DEFAULT_PROVIDER_FOLDER + \$GRAFANA_DEFAULT_PROVIDER_FOLDER_UUID + \$GRAFANA_PROMETHEUS_DATASOURCE_UUID \$ALERTMANAGER_EXTRA_GLOBAL \$ALERTMANAGER_EXTRA_ROUTES \$ALERTMANAGER_EXTRA_INHIBITION diff --git a/birdhouse/components/monitoring/docker-compose-extra.yml b/birdhouse/components/monitoring/docker-compose-extra.yml index c45d793e0..29f6f46e9 100644 --- a/birdhouse/components/monitoring/docker-compose-extra.yml +++ b/birdhouse/components/monitoring/docker-compose-extra.yml @@ -45,7 +45,7 @@ services: # https://prometheus.io/docs/prometheus/latest/storage/ - --storage.tsdb.retention.time=90d # wrong default was http://container-hash:9090/ - - --web.external-url=https://${PAVICS_FQDN_PUBLIC}/prometheus/ + - --web.external-url=https://${BIRDHOUSE_FQDN_PUBLIC}/prometheus/ restart: always # https://grafana.com/docs/grafana/latest/installation/docker/ @@ -61,9 +61,9 @@ services: - grafana_persistence:/var/lib/grafana:rw environment: GF_SECURITY_ADMIN_PASSWORD: ${GRAFANA_ADMIN_PASSWORD} - GF_SERVER_ROOT_URL: https://${PAVICS_FQDN_PUBLIC}/grafana + GF_SERVER_ROOT_URL: https://${BIRDHOUSE_FQDN_PUBLIC}/grafana GF_SERVER_SERVE_FROM_SUB_PATH: 'true' - GF_SERVER_DOMAIN: ${PAVICS_FQDN_PUBLIC} + GF_SERVER_DOMAIN: ${BIRDHOUSE_FQDN_PUBLIC} restart: always # https://github.com/prometheus/alertmanager @@ -83,7 +83,7 @@ services: # enable debug logging - --log.level=debug # wrong default was http://container-hash:9093/ - - --web.external-url=https://${PAVICS_FQDN_PUBLIC}/alertmanager + - --web.external-url=https://${BIRDHOUSE_FQDN_PUBLIC}/alertmanager restart: always volumes: diff --git a/birdhouse/components/monitoring/grafana_dashboards.yml.template b/birdhouse/components/monitoring/grafana_dashboards.yml.template index 99d0078f3..b1515f12c 100644 --- a/birdhouse/components/monitoring/grafana_dashboards.yml.template +++ b/birdhouse/components/monitoring/grafana_dashboards.yml.template @@ -3,8 +3,8 @@ apiVersion: 1 providers: - name: 'default' - folder: 'Local-PAVICS' - folderUid: 'local-pavics' + folder: '${GRAFANA_DEFAULT_PROVIDER_FOLDER}' + folderUid: '${GRAFANA_DEFAULT_PROVIDER_FOLDER_UUID}' disableDeletion: false type: file editable: false diff --git a/birdhouse/components/monitoring/grafana_datasources.yml.template b/birdhouse/components/monitoring/grafana_datasources.yml.template index b8ba534fb..51dc709fa 100644 --- a/birdhouse/components/monitoring/grafana_datasources.yml.template +++ b/birdhouse/components/monitoring/grafana_datasources.yml.template @@ -5,7 +5,7 @@ datasources: - name: Prometheus type: prometheus access: proxy - uid: local_pavics_prometheus + uid: ${GRAFANA_PROMETHEUS_DATASOURCE_UUID} url: http://prometheus:9090/prometheus isDefault: true editable: false diff --git a/birdhouse/components/monitoring/prometheus.yml.template b/birdhouse/components/monitoring/prometheus.yml.template index 7265da2bb..f5d7cd09e 100644 --- a/birdhouse/components/monitoring/prometheus.yml.template +++ b/birdhouse/components/monitoring/prometheus.yml.template @@ -1,5 +1,5 @@ # https://prometheus.io/docs/prometheus/latest/configuration/configuration/ -# http://PAVICS_FQDN/prometheus/config +# http://BIRDHOUSE_FQDN/prometheus/config global: scrape_interval: 60s evaluation_interval: 30s @@ -13,12 +13,12 @@ scrape_configs: - cadvisor:8080 # Node exporter is required to run on the host network so it is not accessible through the docker network. -# It is only accessible via the host network which can be accessed using the PAVICS_FQDN variable. +# It is only accessible via the host network which can be accessed using the BIRDHOUSE_FQDN variable. - job_name: node-exporter honor_labels: true static_configs: - targets: - - ${PAVICS_FQDN}:9100 + - ${BIRDHOUSE_FQDN}:9100 rule_files: - "/etc/prometheus/*.rules" diff --git a/birdhouse/components/postgres/credentials.env.template b/birdhouse/components/postgres/credentials.env.template index 577d821c5..5fc8ee8f5 100644 --- a/birdhouse/components/postgres/credentials.env.template +++ b/birdhouse/components/postgres/credentials.env.template @@ -1,3 +1,3 @@ -POSTGRES_USER=${POSTGRES_PAVICS_USERNAME} -POSTGRES_PASSWORD=${POSTGRES_PAVICS_PASSWORD} -POSTGRES_DB=pavics +POSTGRES_USER=${BIRDHOUSE_POSTGRES_USERNAME} +POSTGRES_PASSWORD=${BIRDHOUSE_POSTGRES_PASSWORD} +POSTGRES_DB=${BIRDHOUSE_POSTGRES_DB} diff --git a/birdhouse/components/postgres/default.env b/birdhouse/components/postgres/default.env index 38496f27c..741fb9b4b 100644 --- a/birdhouse/components/postgres/default.env +++ b/birdhouse/components/postgres/default.env @@ -4,16 +4,20 @@ # must use single quotes to avoid early expansion before overrides in env.local # are applied and must be added to the list of DELAYED_EVAL. -export POSTGRES_DATA_DIR='${DATA_PERSIST_ROOT}/frontend_persist' +export POSTGRES_DATA_DIR='${BIRDHOUSE_DATA_PERSIST_ROOT}/frontend_persist' +export __DEFAULT_BIRDHOUSE_POSTGRES_DB=birdhouse +export BIRDHOUSE_POSTGRES_DB='${__DEFAULT_BIRDHOUSE_POSTGRES_DB}' export DELAYED_EVAL=" $DELAYED_EVAL POSTGRES_DATA_DIR + BIRDHOUSE_POSTGRES_DB " # add any new variables not already in 'VARS' or 'OPTIONAL_VARS' that must be replaced in templates here VARS=" $VARS - \$POSTGRES_PAVICS_USERNAME - \$POSTGRES_PAVICS_PASSWORD + \$BIRDHOUSE_POSTGRES_USERNAME + \$BIRDHOUSE_POSTGRES_PASSWORD + \$BIRDHOUSE_POSTGRES_DB " diff --git a/birdhouse/components/proxy/conf.d/all-services.include.template b/birdhouse/components/proxy/conf.d/all-services.include.template index 5704bb12b..e9c9fff42 100644 --- a/birdhouse/components/proxy/conf.d/all-services.include.template +++ b/birdhouse/components/proxy/conf.d/all-services.include.template @@ -1,5 +1,5 @@ location / { - ${PROXY_ROOT_LOCATION} + ${BIRDHOUSE_PROXY_ROOT_LOCATION} } location /components { @@ -21,7 +21,7 @@ } location /doc { - return 302 ${DOC_URL}; + return 302 ${BIRDHOUSE_DOC_URL}; } # for other extra components to extend Nginx diff --git a/birdhouse/components/proxy/default.env b/birdhouse/components/proxy/default.env index 9d7ac7b4f..08f9dde89 100644 --- a/birdhouse/components/proxy/default.env +++ b/birdhouse/components/proxy/default.env @@ -11,9 +11,9 @@ export PROXY_READ_TIMEOUT_VALUE="240s" # Note that the default homepage will become the jupyterhub login page if the jupyterhub component is enabled. # If the jupyterhub component is not enabled, it is highly recommended to create a custom homepage since the magpie # landing page is not the most user-friendly option. -export PROXY_ROOT_LOCATION="return 302 https://\$host/jupyter/hub/login;" +export BIRDHOUSE_PROXY_ROOT_LOCATION="return 302 https://\$host/jupyter/hub/login;" -export INCLUDE_FOR_PORT_80='$([ x"$ALLOW_UNSECURE_HTTP" = x"True" ] && echo "include /etc/nginx/conf.d/all-services.include;" || echo "include /etc/nginx/conf.d/redirect-to-https.include;")' +export INCLUDE_FOR_PORT_80='$([ x"$BIRDHOUSE_ALLOW_UNSECURE_HTTP" = x"True" ] && echo "include /etc/nginx/conf.d/all-services.include;" || echo "include /etc/nginx/conf.d/redirect-to-https.include;")' export PROXY_LOG_DIR="/var/log/nginx/" export PROXY_LOG_FILE="access_file.log" @@ -37,7 +37,7 @@ export OPTIONAL_VARS=" $OPTIONAL_VARS \$INCLUDE_FOR_PORT_80 \$PROXY_READ_TIMEOUT_VALUE - \$PROXY_ROOT_LOCATION + \$BIRDHOUSE_PROXY_ROOT_LOCATION \$PROXY_LOG_FILE \$PROXY_LOG_PATH " diff --git a/birdhouse/components/proxy/docker-compose-extra.yml b/birdhouse/components/proxy/docker-compose-extra.yml index 3cad6523e..66ce09ef4 100644 --- a/birdhouse/components/proxy/docker-compose-extra.yml +++ b/birdhouse/components/proxy/docker-compose-extra.yml @@ -17,7 +17,7 @@ services: volumes: - ./components/proxy/conf.d:/etc/nginx/conf.d - ./components/proxy/nginx.conf:/etc/nginx/nginx.conf - - ${SSL_CERTIFICATE}:/etc/nginx/cert.pem + - ${BIRDHOUSE_SSL_CERTIFICATE}:/etc/nginx/cert.pem - ./components/proxy/static:/static environment: # https://github.com/bird-house/birdhouse-deploy/issues/198 diff --git a/birdhouse/components/raven/config/canarie-api/canarie_api_monitoring.py.template b/birdhouse/components/raven/config/canarie-api/canarie_api_monitoring.py.template index 6e11fd02b..44682efb2 100644 --- a/birdhouse/components/raven/config/canarie-api/canarie_api_monitoring.py.template +++ b/birdhouse/components/raven/config/canarie-api/canarie_api_monitoring.py.template @@ -12,7 +12,7 @@ SERVICES['raven'] = { 'institution': 'Ouranos', 'releaseTime': RAVEN_RELEASE, 'researchSubject': 'Hydrology', - 'supportEmail': '${SUPPORT_EMAIL}', + 'supportEmail': '${BIRDHOUSE_SUPPORT_EMAIL}', 'category': 'Data Manipulation', 'tags': ['Hydrology'] }, diff --git a/birdhouse/components/raven/default.env b/birdhouse/components/raven/default.env index a97f6a281..24f115a56 100644 --- a/birdhouse/components/raven/default.env +++ b/birdhouse/components/raven/default.env @@ -4,8 +4,9 @@ # This is the production Geoserver that is always available with appropriate data. # For site that want to run your own Geoserver with your own data, please # override this variable with your own Geoserver instance. -# Ex: RAVEN_GEO_URL="https://${PAVICS_FQDN}/geoserver/" -export RAVEN_GEO_URL="https://pavics.ouranos.ca/geoserver/" +# Ex: RAVEN_GEO_URL="https://${BIRDHOUSE_FQDN}/geoserver/" +__DEFAULT__RAVEN_GEO_URL="https://pavics.ouranos.ca/geoserver/" +export RAVEN_GEO_URL='${__DEFAULT__RAVEN_GEO_URL}' export RAVEN_VERSION="0.18.1" export RAVEN_DOCKER=pavics/raven @@ -34,11 +35,13 @@ export DELAYED_EVAL=" $DELAYED_EVAL RAVEN_IMAGE RAVEN_IMAGE_URI + RAVEN_GEO_URL " OPTIONAL_VARS=" $OPTIONAL_VARS \$RAVEN_VERSION + \$RAVEN_GEO_URL \$RAVEN_DOCKER \$RAVEN_IMAGE \$RAVEN_IMAGE_URI diff --git a/birdhouse/components/raven/service-config.json.template b/birdhouse/components/raven/service-config.json.template index dfaa0ba4a..96517aea4 100644 --- a/birdhouse/components/raven/service-config.json.template +++ b/birdhouse/components/raven/service-config.json.template @@ -14,7 +14,7 @@ { "rel": "service", "type": "text/xml", - "href": "https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/raven?service=WPS&request=GetCapabilities" + "href": "https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/raven?service=WPS&request=GetCapabilities" }, { "rel": "service-doc", @@ -24,7 +24,7 @@ { "rel": "service-desc", "type": "text/xml", - "href": "https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/raven?service=WPS&request=GetCapabilities" + "href": "https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/raven?service=WPS&request=GetCapabilities" }, { "rel": "service-meta", @@ -33,5 +33,4 @@ } ] } - ] diff --git a/birdhouse/components/raven/wps.cfg.template b/birdhouse/components/raven/wps.cfg.template index 3ee21076c..822129ee5 100644 --- a/birdhouse/components/raven/wps.cfg.template +++ b/birdhouse/components/raven/wps.cfg.template @@ -1,5 +1,5 @@ [server] -outputurl = https://${PAVICS_FQDN_PUBLIC}/wpsoutputs/raven +outputurl = https://${BIRDHOUSE_FQDN_PUBLIC}/wpsoutputs/raven outputpath = /data/wpsoutputs/raven # default 3mb, fix "Broken pipe" between the proxy and the wps service @@ -10,6 +10,6 @@ parallelprocesses = 10 [logging] level = INFO -database=postgresql://${POSTGRES_PAVICS_USERNAME}:${POSTGRES_PAVICS_PASSWORD}@postgres/raven +database=postgresql://${BIRDHOUSE_POSTGRES_USERNAME}:${BIRDHOUSE_POSTGRES_PASSWORD}@postgres/raven -${EXTRA_PYWPS_CONFIG} +${BIRDHOUSE_EXTRA_PYWPS_CONFIG} diff --git a/birdhouse/components/scheduler/config.yml.template b/birdhouse/components/scheduler/config.yml.template index 31a056593..67a38285b 100644 --- a/birdhouse/components/scheduler/config.yml.template +++ b/birdhouse/components/scheduler/config.yml.template @@ -1,47 +1,54 @@ --- - name: logrotate - comment: Rotate log files under /var/log/PAVICS + comment: Rotate log files under ${BIRDHOUSE_LOG_DIR} schedule: '@daily' command: bash -c 'cp /etc/logrotate.conf.orig /etc/logrotate.conf && chown root:root /etc/logrotate.conf && chmod 644 /etc/logrotate.conf && /usr/sbin/logrotate -v /etc/logrotate.conf' dockerargs: >- --rm --name logrotate - --volume /var/log/PAVICS:/var/log/PAVICS:rw - --volume ${LOGROTATE_DATA_DIR}:/var/lib:rw - --volume ${COMPOSE_DIR}/deployment/PAVICS-deploy.logrotate:/etc/logrotate.conf.orig:ro + --volume ${BIRDHOUSE_LOG_DIR}:/var/log/birdhouse:rw + --volume ${BIRDHOUSE_LOGROTATE_DATA_DIR}:/var/lib:rw + --volume ${COMPOSE_DIR}/deployment/birdhouse-deploy.logrotate:/etc/logrotate.conf.orig:ro image: 'stakater/logrotate:3.13.0' - name: notebookdeploy comment: Auto-deploy tutorial notebooks - schedule: '${AUTODEPLOY_NOTEBOOK_FREQUENCY}' + schedule: '${BIRDHOUSE_AUTODEPLOY_NOTEBOOK_FREQUENCY}' command: '${COMPOSE_DIR}/deployment/trigger-deploy-notebook' dockerargs: >- --rm --name notebookdeploy --volume /var/run/docker.sock:/var/run/docker.sock:ro - --volume /var/log/PAVICS:/var/log/PAVICS:rw + --volume ${BIRDHOUSE_LOG_DIR}:${BIRDHOUSE_LOG_DIR}:rw --volume ${COMPOSE_DIR}:${COMPOSE_DIR}:ro --volume ${JUPYTERHUB_USER_DATA_DIR}:${JUPYTERHUB_USER_DATA_DIR}:rw --volume /tmp/notebookdeploy:/tmp/notebookdeploy:rw + --volume ${BIRDHOUSE_LOCAL_ENV}:${BIRDHOUSE_LOCAL_ENV} --env COMPOSE_DIR=${COMPOSE_DIR} --env TMP_BASE_DIR=/tmp/notebookdeploy + --env BIRDHOUSE_LOCAL_ENV=${BIRDHOUSE_LOCAL_ENV} + --env BIRDHOUSE_COMPOSE=${BIRDHOUSE_COMPOSE} --env JUPYTERHUB_USER_DATA_DIR=${JUPYTERHUB_USER_DATA_DIR}${AUTODEPLOY_NOTEBOOK_EXTRA_DOCKER_ARGS} image: 'docker:19.03.6-git' - name: autodeploy - comment: Auto-deploy entire PAVICS platform - schedule: '${AUTODEPLOY_PLATFORM_FREQUENCY}' + comment: Auto-deploy entire Birdhouse platform + schedule: '${BIRDHOUSE_AUTODEPLOY_PLATFORM_FREQUENCY}' command: '${COMPOSE_DIR}/deployment/triggerdeploy.sh ${COMPOSE_DIR}' dockerargs: >- - --rm --name autodeploy${AUTODEPLOY_EXTRA_REPOS_AS_DOCKER_VOLUMES} + --rm --name autodeploy${BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS_AS_DOCKER_VOLUMES} --volume /var/run/docker.sock:/var/run/docker.sock:ro - --volume /var/log/PAVICS:/var/log/PAVICS:rw + --volume ${BIRDHOUSE_LOG_DIR}:${BIRDHOUSE_LOG_DIR}:rw --volume ${COMPOSE_DIR}/..:${COMPOSE_DIR}/..:rw - --volume ${AUTODEPLOY_DEPLOY_KEY_ROOT_DIR}:${AUTODEPLOY_DEPLOY_KEY_ROOT_DIR}:ro + --volume ${BIRDHOUSE_AUTODEPLOY_DEPLOY_KEY_ROOT_DIR}:${BIRDHOUSE_AUTODEPLOY_DEPLOY_KEY_ROOT_DIR}:ro --volume ${JUPYTERHUB_USER_DATA_DIR}:${JUPYTERHUB_USER_DATA_DIR}:rw + --volume ${BIRDHOUSE_LOCAL_ENV}:${BIRDHOUSE_LOCAL_ENV} --env COMPOSE_DIR=${COMPOSE_DIR} - --env AUTODEPLOY_DEPLOY_KEY_ROOT_DIR=${AUTODEPLOY_DEPLOY_KEY_ROOT_DIR} + --env BIRDHOUSE_AUTODEPLOY_DEPLOY_KEY_ROOT_DIR=${BIRDHOUSE_AUTODEPLOY_DEPLOY_KEY_ROOT_DIR} --env JUPYTERHUB_USER_DATA_DIR=${JUPYTERHUB_USER_DATA_DIR} --env CODE_OWNERSHIP=${CODE_OWNERSHIP} + --env BIRDHOUSE_LOCAL_ENV=${BIRDHOUSE_LOCAL_ENV} + --env BIRDHOUSE_COMPOSE=${BIRDHOUSE_COMPOSE} + --env BIRDHOUSE_LOG_DIR=${BIRDHOUSE_LOG_DIR} --env AUTODEPLOY_SILENT=true${AUTODEPLOY_PLATFORM_EXTRA_DOCKER_ARGS} image: 'pavics/docker-compose-git:docker-18.09.7-compose-1.25.1' -${AUTODEPLOY_EXTRA_SCHEDULER_JOBS} +${BIRDHOUSE_AUTODEPLOY_EXTRA_SCHEDULER_JOBS} diff --git a/birdhouse/components/scheduler/default.env b/birdhouse/components/scheduler/default.env index 1261f3c08..406480dc7 100644 --- a/birdhouse/components/scheduler/default.env +++ b/birdhouse/components/scheduler/default.env @@ -9,19 +9,19 @@ # Note when overriding this variable in env.local, do not use HOME environment # var, use its fully resolved value. This default value is suitable only for # backward-compatibility when autodeploy do not run in its own container. -export AUTODEPLOY_DEPLOY_KEY_ROOT_DIR="$HOME/.ssh" +export BIRDHOUSE_AUTODEPLOY_DEPLOY_KEY_ROOT_DIR="$HOME/.ssh" # Daily at 5:07 AM -export AUTODEPLOY_PLATFORM_FREQUENCY="7 5 * * *" +export BIRDHOUSE_AUTODEPLOY_PLATFORM_FREQUENCY="7 5 * * *" # Hourly -export AUTODEPLOY_NOTEBOOK_FREQUENCY="@hourly" +export BIRDHOUSE_AUTODEPLOY_NOTEBOOK_FREQUENCY="@hourly" -export LOGROTATE_DATA_DIR='${DATA_PERSIST_ROOT}/logrotate' +export BIRDHOUSE_LOGROTATE_DATA_DIR='${BIRDHOUSE_DATA_PERSIST_ROOT}/logrotate' export DELAYED_EVAL=" $DELAYED_EVAL - LOGROTATE_DATA_DIR + BIRDHOUSE_LOGROTATE_DATA_DIR " if [ -L "$BIRDHOUSE_LOCAL_ENV" ]; then @@ -37,15 +37,15 @@ fi # add any new variables not already in 'VARS' or 'OPTIONAL_VARS' that must be replaced in templates here VARS=" $VARS - \$LOGROTATE_DATA_DIR + \$BIRDHOUSE_LOGROTATE_DATA_DIR " OPTIONAL_VARS=" $OPTIONAL_VARS - \$AUTODEPLOY_EXTRA_REPOS_AS_DOCKER_VOLUMES - \$AUTODEPLOY_PLATFORM_FREQUENCY - \$AUTODEPLOY_NOTEBOOK_FREQUENCY - \$AUTODEPLOY_EXTRA_SCHEDULER_JOBS + \$BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS_AS_DOCKER_VOLUMES + \$BIRDHOUSE_AUTODEPLOY_PLATFORM_FREQUENCY + \$BIRDHOUSE_AUTODEPLOY_NOTEBOOK_FREQUENCY + \$BIRDHOUSE_AUTODEPLOY_EXTRA_SCHEDULER_JOBS \$AUTODEPLOY_NOTEBOOK_EXTRA_DOCKER_ARGS \$AUTODEPLOY_PLATFORM_EXTRA_DOCKER_ARGS " diff --git a/birdhouse/components/scheduler/deploy_data_job.env b/birdhouse/components/scheduler/deploy_data_job.env index ac7ae0d3f..020eac8b2 100644 --- a/birdhouse/components/scheduler/deploy_data_job.env +++ b/birdhouse/components/scheduler/deploy_data_job.env @@ -33,12 +33,12 @@ fi # Location for local cache of git clone to save bandwidth and time from always # re-cloning from scratch. if [ -z "$DEPLOY_DATA_JOB_CHECKOUT_CACHE" ]; then - DEPLOY_DATA_JOB_CHECKOUT_CACHE="${DATA_PERSIST_ROOT:-/data}/deploy_data_cache/${DEPLOY_DATA_JOB_JOB_NAME}" + DEPLOY_DATA_JOB_CHECKOUT_CACHE="${BIRDHOUSE_DATA_PERSIST_ROOT:-/data}/deploy_data_cache/${DEPLOY_DATA_JOB_JOB_NAME}" fi -# Log file location. Default location under /var/log/PAVICS/ has built-in logrotate. +# Log file location. Default location under /var/log/birdhouse/ has built-in logrotate. if [ -z "$DEPLOY_DATA_JOB_LOGFILE" ]; then - DEPLOY_DATA_JOB_LOGFILE="/var/log/PAVICS/${DEPLOY_DATA_JOB_JOB_NAME}.log" + DEPLOY_DATA_JOB_LOGFILE="${BIRDHOUSE_LOG_DIR}/${DEPLOY_DATA_JOB_JOB_NAME}.log" fi # Docker image to run deploy-date script. @@ -61,7 +61,7 @@ fi ############################################################################## -if [ -z "`echo "$AUTODEPLOY_EXTRA_SCHEDULER_JOBS" | grep $DEPLOY_DATA_JOB_JOB_NAME`" ]; then +if [ -z "`echo "$BIRDHOUSE_AUTODEPLOY_EXTRA_SCHEDULER_JOBS" | grep $DEPLOY_DATA_JOB_JOB_NAME`" ]; then # Add job only if not already added (config is read more than once during # autodeploy process). @@ -75,8 +75,8 @@ if [ -z "`echo "$AUTODEPLOY_EXTRA_SCHEDULER_JOBS" | grep $DEPLOY_DATA_JOB_JOB_NA --env DEPLOY_DATA_GIT_SSH_IDENTITY_FILE=${DEPLOY_DATA_JOB_GIT_SSH_IDENTITY_FILE}" fi - export AUTODEPLOY_EXTRA_SCHEDULER_JOBS=" -$AUTODEPLOY_EXTRA_SCHEDULER_JOBS + export BIRDHOUSE_AUTODEPLOY_EXTRA_SCHEDULER_JOBS=" +$BIRDHOUSE_AUTODEPLOY_EXTRA_SCHEDULER_JOBS - name: $DEPLOY_DATA_JOB_JOB_NAME comment: $DEPLOY_DATA_JOB_JOB_DESCRIPTION diff --git a/birdhouse/components/scheduler/docker-compose-extra.yml b/birdhouse/components/scheduler/docker-compose-extra.yml index a7faf401c..cbf0c79d2 100644 --- a/birdhouse/components/scheduler/docker-compose-extra.yml +++ b/birdhouse/components/scheduler/docker-compose-extra.yml @@ -9,8 +9,8 @@ services: - ./components/scheduler/config.yml:/opt/crontab/config.yml:ro environment: COMPOSE_DIR: ${PWD} - AUTODEPLOY_DEPLOY_KEY_ROOT_DIR: ${AUTODEPLOY_DEPLOY_KEY_ROOT_DIR} - CODE_OWNERSHIP: ${AUTODEPLOY_CODE_OWNERSHIP} + BIRDHOUSE_AUTODEPLOY_DEPLOY_KEY_ROOT_DIR: ${BIRDHOUSE_AUTODEPLOY_DEPLOY_KEY_ROOT_DIR} + CODE_OWNERSHIP: ${BIRDHOUSE_AUTODEPLOY_CODE_OWNERSHIP} restart: always # vi: tabstop=8 expandtab shiftwidth=2 softtabstop=2 diff --git a/birdhouse/components/scheduler/renew_letsencrypt_ssl_cert_extra_job.env b/birdhouse/components/scheduler/renew_letsencrypt_ssl_cert_extra_job.env index bf04dbc41..771cc74a5 100644 --- a/birdhouse/components/scheduler/renew_letsencrypt_ssl_cert_extra_job.env +++ b/birdhouse/components/scheduler/renew_letsencrypt_ssl_cert_extra_job.env @@ -2,8 +2,8 @@ # Configuration vars, set in env.local before sourcing this file. # This job assume the "scheduler" component is enabled. # -# This job will write to the value of SSL_CERTIFICATE in env.local so make sure -# this job is sourced after the last definition of SSL_CERTIFICATE. +# This job will write to the value of BIRDHOUSE_SSL_CERTIFICATE in env.local so make sure +# this job is sourced after the last definition of BIRDHOUSE_SSL_CERTIFICATE. # ## Sample way to override default configs here in env.local: # @@ -36,8 +36,8 @@ fi # NOTE: # -# * Make sure SSL_CERTIFICATE is an absolute path for volume-mount to work -# properly. SSL_CERTIFICATE should also *not* be under this repo since this +# * Make sure BIRDHOUSE_SSL_CERTIFICATE is an absolute path for volume-mount to work +# properly. BIRDHOUSE_SSL_CERTIFICATE should also *not* be under this repo since this # repo is volume-mount read-only. # # * If env.local is a relative symlink (absolute symlink unsupported at the @@ -60,13 +60,13 @@ fi ############################################################################## -if [ -z "`echo "$AUTODEPLOY_EXTRA_SCHEDULER_JOBS" | grep renew_letsencrypt_ssl`" ]; then +if [ -z "`echo "$BIRDHOUSE_AUTODEPLOY_EXTRA_SCHEDULER_JOBS" | grep renew_letsencrypt_ssl`" ]; then # Add job only if not already added (config is read twice during # autodeploy process. - export AUTODEPLOY_EXTRA_SCHEDULER_JOBS=" -$AUTODEPLOY_EXTRA_SCHEDULER_JOBS + export BIRDHOUSE_AUTODEPLOY_EXTRA_SCHEDULER_JOBS=" +$BIRDHOUSE_AUTODEPLOY_EXTRA_SCHEDULER_JOBS - name: renew_letsencrypt_ssl comment: Auto-renew LetsEncrypt SSL certificate @@ -76,12 +76,12 @@ $AUTODEPLOY_EXTRA_SCHEDULER_JOBS --rm --name renew_letsencrypt_ssl --volume /var/run/docker.sock:/var/run/docker.sock:ro --volume ${COMPOSE_DIR}/..${RENEW_LETSENCRYPT_SSL_NUM_PARENTS_MOUNT}:${COMPOSE_DIR}/..${RENEW_LETSENCRYPT_SSL_NUM_PARENTS_MOUNT}:ro - --volume /var/log/PAVICS:/var/log/PAVICS:rw - --volume `dirname ${SSL_CERTIFICATE}`:`dirname ${SSL_CERTIFICATE}`:rw + --volume ${BIRDHOUSE_LOG_DIR}:/var/log/birdhouse:rw + --volume `dirname ${BIRDHOUSE_SSL_CERTIFICATE}`:`dirname ${BIRDHOUSE_SSL_CERTIFICATE}`:rw --env COMPOSE_DIR=${COMPOSE_DIR} --env FORCE_CERTBOT_E2E=1 --env CERTBOT_RENEW=1 - --env CERTBOTWRAPPER_LOGFILE=/var/log/PAVICS/renew_letsencrypt_ssl.log + --env CERTBOTWRAPPER_LOGFILE=/var/log/birdhouse/renew_letsencrypt_ssl.log image: 'pavics/docker-compose-git:docker-18.09.7-compose-1.25.1' " diff --git a/birdhouse/components/stac/config/canarie-api/canarie_api_monitoring.py.template b/birdhouse/components/stac/config/canarie-api/canarie_api_monitoring.py.template index 3c728409f..360fd0387 100644 --- a/birdhouse/components/stac/config/canarie-api/canarie_api_monitoring.py.template +++ b/birdhouse/components/stac/config/canarie-api/canarie_api_monitoring.py.template @@ -6,7 +6,7 @@ SERVICES['STAC'] = { 'institution': 'CRIM', 'releaseTime': "2023-06-16T00:00:00Z", 'researchSubject': 'Any', - 'supportEmail': '${SUPPORT_EMAIL}', + 'supportEmail': '${BIRDHOUSE_SUPPORT_EMAIL}', 'category': 'Data Manipulation', 'tags': ['Catalog', 'Data', 'OGC'] }, @@ -19,7 +19,7 @@ SERVICES['STAC'] = { 'releasenotes': 'https://github.com/crim-ca/sac-app/blob/master/CHANGES.rst', 'support': 'https://github.com/crim-ca/stac-app/issues', 'source': 'https://github.com/crim-ca/stac-app', - 'tryme': 'https://${PAVICS_FQDN_PUBLIC}/stac/', + 'tryme': 'https://${BIRDHOUSE_FQDN_PUBLIC}/stac/', 'licence': 'https://github.com/crim-ca/stac-app/blob/master/LICENSE', 'provenance': 'https://github.com/crim-ca/stac-app' }, diff --git a/birdhouse/components/stac/config/proxy/conf.extra-service.d/stac.conf.template b/birdhouse/components/stac/config/proxy/conf.extra-service.d/stac.conf.template index 36dde7605..cdaee3575 100644 --- a/birdhouse/components/stac/config/proxy/conf.extra-service.d/stac.conf.template +++ b/birdhouse/components/stac/config/proxy/conf.extra-service.d/stac.conf.template @@ -4,7 +4,7 @@ # We need the second `/stac` for API redirect in STAC (see `root-path` and `ROUTER_PREFIX`). # See https://github.com/stac-utils/stac-fastapi/issues/427 # See https://github.com/crim-ca/stac-app/blob/main/stac_app.py#L60 - proxy_pass https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/stac/stac; + proxy_pass https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/stac/stac; proxy_set_header Host $host; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Host $host:$server_port; diff --git a/birdhouse/components/stac/default.env b/birdhouse/components/stac/default.env index 85460bd40..97b31cb0e 100644 --- a/birdhouse/components/stac/default.env +++ b/birdhouse/components/stac/default.env @@ -1,7 +1,7 @@ -export STAC_POSTGRES_USER=${POSTGRES_PAVICS_USERNAME} -export STAC_POSTGRES_PASSWORD=${POSTGRES_PAVICS_PASSWORD} -export STAC_PGUSER=${POSTGRES_PAVICS_USERNAME} -export STAC_PGPASSWORD=${POSTGRES_PAVICS_PASSWORD} +export STAC_POSTGRES_USER='${BIRDHOUSE_POSTGRES_USERNAME}' +export STAC_POSTGRES_PASSWORD='${BIRDHOUSE_POSTGRES_PASSWORD}' +export STAC_PGUSER='${BIRDHOUSE_POSTGRES_USERNAME}' +export STAC_PGPASSWORD='${BIRDHOUSE_POSTGRES_PASSWORD}' # 'main' branch points at https://github.com/stac-utils/stac-fastapi/commit/d53e792 # (see: https://github.com/crim-ca/stac-app/blob/40cad1aa7a094d58fca2d3184182761e248f781d/Dockerfile#L15-L20) @@ -18,19 +18,23 @@ export STAC_BROWSER_IMAGE='ghcr.io/crim-ca/stac-browser:docker_image_push' export STAC_BROWSER_IMAGE_URI='${STAC_BROWSER_IMAGE}' # add any new variables not already in 'VARS' or 'OPTIONAL_VARS' that must be replaced in templates here -# single quotes are important in below list to keep variable names intact until 'pavics-compose' parses them +# single quotes are important in below list to keep variable names intact until 'birdhouse-compose' parses them EXTRA_VARS=' $STAC_POSTGRES_USER $STAC_POSTGRES_PASSWORD $STAC_PGUSER $STAC_PGPASSWORD ' -# extend the original 'VARS' from 'birdhouse/pavics-compose.sh' to employ them for template substitution +# extend the original 'VARS' from 'birdhouse/birdhouse-compose.sh' to employ them for template substitution # adding them to 'VARS', they will also be validated in case of override of 'default.env' using 'env.local' VARS="$VARS $EXTRA_VARS" export DELAYED_EVAL=" $DELAYED_EVAL + STAC_POSTGRES_USER + STAC_POSTGRES_PASSWORD + STAC_PGUSER + STAC_PGPASSWORD STAC_IMAGE STAC_IMAGE_URI STAC_BROWSER_IMAGE diff --git a/birdhouse/components/stac/docker-compose-extra.yml b/birdhouse/components/stac/docker-compose-extra.yml index 6b7143ab7..2362aa967 100644 --- a/birdhouse/components/stac/docker-compose-extra.yml +++ b/birdhouse/components/stac/docker-compose-extra.yml @@ -30,7 +30,7 @@ services: container_name: stac-browser image: ${STAC_BROWSER_IMAGE} environment: - - CATALOG_URL=https://${PAVICS_FQDN_PUBLIC}/stac/ + - CATALOG_URL=https://${BIRDHOUSE_FQDN_PUBLIC}/stac/ - ROOT_PATH=/stac-browser/ stac-db: diff --git a/birdhouse/components/stac/service-config.json.template b/birdhouse/components/stac/service-config.json.template index f81be9a05..5334fbc36 100644 --- a/birdhouse/components/stac/service-config.json.template +++ b/birdhouse/components/stac/service-config.json.template @@ -15,7 +15,7 @@ { "rel": "service", "type": "application/json", - "href": "https://${PAVICS_FQDN_PUBLIC}/stac/" + "href": "https://${BIRDHOUSE_FQDN_PUBLIC}/stac/" }, { "rel": "service-doc", @@ -30,7 +30,7 @@ { "rel": "alternate", "type": "text/html", - "href": "https://${PAVICS_FQDN_PUBLIC}/stac-browser/" + "href": "https://${BIRDHOUSE_FQDN_PUBLIC}/stac-browser/" }, { "rel": "service-meta", @@ -55,7 +55,7 @@ { "rel": "service", "type": "text/html", - "href": "https://${PAVICS_FQDN_PUBLIC}/stac-browser/" + "href": "https://${BIRDHOUSE_FQDN_PUBLIC}/stac-browser/" }, { "rel": "service-doc", @@ -65,7 +65,7 @@ { "rel": "alternate", "type": "application/json", - "href": "https://${PAVICS_FQDN_PUBLIC}/stac/" + "href": "https://${BIRDHOUSE_FQDN_PUBLIC}/stac/" }, { "rel": "service-meta", diff --git a/birdhouse/components/thredds/config/canarie-api/canarie_api_monitoring.py.template b/birdhouse/components/thredds/config/canarie-api/canarie_api_monitoring.py.template index 910202d60..f0357e880 100644 --- a/birdhouse/components/thredds/config/canarie-api/canarie_api_monitoring.py.template +++ b/birdhouse/components/thredds/config/canarie-api/canarie_api_monitoring.py.template @@ -9,7 +9,7 @@ SERVICES['renderer'] = { 'institution': 'Unidata', 'releaseTime': '2020-06-16T00:00:00Z', 'researchSubject': 'Climatology', - 'supportEmail': '${SUPPORT_EMAIL}', + 'supportEmail': '${BIRDHOUSE_SUPPORT_EMAIL}', 'category': 'Data Manipulation', 'tags': ['Climatology'] }, @@ -29,7 +29,7 @@ SERVICES['renderer'] = { 'monitoring': { 'ncWMS': { 'request': { - 'url': 'https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/thredds/wms/${THREDDS_SERVICE_DATA_URL_PATH}/testdata/ta_Amon_MRI-CGCM3_decadal1980_r1i1p1_199101-200012.nc?service=WMS&version=1.3.0&request=GetCapabilities' + 'url': 'https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/thredds/wms/${THREDDS_SERVICE_DATA_URL_PATH}/testdata/ta_Amon_MRI-CGCM3_decadal1980_r1i1p1_199101-200012.nc?service=WMS&version=1.3.0&request=GetCapabilities' } }, } @@ -43,7 +43,7 @@ SERVICES['Thredds'] = { 'releaseTime': get_release_time_from_repo_tag("docker", "${THREDDS_DOCKER}", "${THREDDS_VERSION}"), 'institution': 'Ouranos', 'researchSubject': 'Catalog', - 'supportEmail': '${SUPPORT_EMAIL}', + 'supportEmail': '${BIRDHOUSE_SUPPORT_EMAIL}', 'category': 'Catalog', 'tags': ['Catalog', 'Climate Data'] }, @@ -56,7 +56,7 @@ SERVICES['Thredds'] = { 'releasenotes': 'https://docs.unidata.ucar.edu/tds/current/userguide/upgrade.html', 'support': 'https://www.unidata.ucar.edu/software/tds/#help', 'source': 'https://github.com/Unidata/tds', - 'tryme': 'https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/thredds/', + 'tryme': 'https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/thredds/', 'licence': 'https://github.com/Unidata/tds/blob/main/LICENSE', 'provenance': 'https://downloads.unidata.ucar.edu/tds/' }, diff --git a/birdhouse/components/thredds/config/proxy/conf.extra-service.d/thredds.conf.template b/birdhouse/components/thredds/config/proxy/conf.extra-service.d/thredds.conf.template index c67670592..671da15ab 100644 --- a/birdhouse/components/thredds/config/proxy/conf.extra-service.d/thredds.conf.template +++ b/birdhouse/components/thredds/config/proxy/conf.extra-service.d/thredds.conf.template @@ -1,7 +1,7 @@ location /thredds/ { #return 302 /twitcher/ows/proxy$request_uri; - proxy_pass https://${PAVICS_FQDN}${TWITCHER_PROTECTED_PATH}/thredds/; + proxy_pass https://${BIRDHOUSE_FQDN}${TWITCHER_PROTECTED_PATH}/thredds/; # direct hit Thredds, bypassing twitcher, for debugging only # proxy_pass http://thredds:8080${TWITCHER_PROTECTED_PATH}/thredds/; proxy_set_header Host $host; diff --git a/birdhouse/components/thredds/default.env b/birdhouse/components/thredds/default.env index e97a1a41c..e39a11007 100644 --- a/birdhouse/components/thredds/default.env +++ b/birdhouse/components/thredds/default.env @@ -8,22 +8,20 @@ export THREDDS_IMAGE_URI='registry.hub.docker.com/${THREDDS_IMAGE}' export THREDDS_ORGANIZATION="Birdhouse" export THREDDS_ADDITIONAL_CATALOG="" -# All default values for data locations are set to ensure backward compatibility -export THREDDS_DATASET_LOCATION_ON_CONTAINER='/pavics-ncml' # this default is for backward compatibility -export THREDDS_SERVICE_DATA_LOCATION_ON_CONTAINER='/pavics-data' # this default is for backward compatibility -export THREDDS_DATASET_LOCATION_ON_HOST='${DATA_PERSIST_ROOT}/ncml' # this default is for backward compatibility -export THREDDS_SERVICE_DATA_LOCATION_ON_HOST='${DATA_PERSIST_ROOT}/datasets' # this default is for backward compatibility -export THREDDS_DATASET_LOCATION_NAME='Datasets' # this default is for backward compatibility -export THREDDS_SERVICE_DATA_LOCATION_NAME='Birdhouse' # this default is for backward compatibility -export THREDDS_DATASET_URL_PATH='datasets' # this default is for backward compatibility -export THREDDS_SERVICE_DATA_URL_PATH='birdhouse' # this default is for backward compatibility +export THREDDS_DATASET_LOCATION_ON_CONTAINER='/birdhouse-ncml' +export THREDDS_SERVICE_DATA_LOCATION_ON_CONTAINER='/birdhouse-data' +export THREDDS_DATASET_LOCATION_ON_HOST='${BIRDHOUSE_DATA_PERSIST_ROOT}/ncml' +export THREDDS_SERVICE_DATA_LOCATION_ON_HOST='${BIRDHOUSE_DATA_PERSIST_ROOT}/datasets' +export THREDDS_DATASET_LOCATION_NAME='Datasets' +export THREDDS_SERVICE_DATA_LOCATION_NAME='Birdhouse' +export THREDDS_DATASET_URL_PATH='datasets' +export THREDDS_SERVICE_DATA_URL_PATH='birdhouse' # add any new variables not already in 'VARS' or 'OPTIONAL_VARS' that must be replaced in templates here VARS=" $VARS - \$CMIP5_THREDDS_ROOT \$THREDDS_SERVICE_DATA_LOCATION_NAME \$THREDDS_SERVICE_DATA_URL_PATH \$THREDDS_SERVICE_DATA_LOCATION_ON_CONTAINER diff --git a/birdhouse/components/thredds/docker-compose-extra.yml b/birdhouse/components/thredds/docker-compose-extra.yml index 3cc191c31..7bae79bc4 100644 --- a/birdhouse/components/thredds/docker-compose-extra.yml +++ b/birdhouse/components/thredds/docker-compose-extra.yml @@ -15,7 +15,7 @@ services: environment: # for reconstructing proper URL back to user when Thredds behind proxy # because Twitcher eats the "Host" http header set by Nginx - PAVICS_FQDN_PUBLIC: $PAVICS_FQDN_PUBLIC + BIRDHOUSE_FQDN_PUBLIC: $BIRDHOUSE_FQDN_PUBLIC volumes: - thredds_persistence:/usr/local/tomcat/content/thredds - ${THREDDS_SERVICE_DATA_LOCATION_ON_HOST}:${THREDDS_SERVICE_DATA_LOCATION_ON_CONTAINER} diff --git a/birdhouse/components/thredds/entrypointwrapper b/birdhouse/components/thredds/entrypointwrapper index 1572c441b..540253e1b 100755 --- a/birdhouse/components/thredds/entrypointwrapper +++ b/birdhouse/components/thredds/entrypointwrapper @@ -9,10 +9,10 @@ if ! grep ' relaxedQueryChars=' $CONF_FILE; then sed -i 's/' as the rest of the stack -export WEAVER_WPS_OUTPUTS_DIR='${WPS_OUTPUTS_DIR}/weaver' +export WEAVER_WPS_OUTPUTS_DIR='${BIRDHOUSE_WPS_OUTPUTS_DIR}/weaver' export WEAVER_WPS_WORKDIR="/tmp/wps_workdir/weaver" # logging @@ -107,7 +107,7 @@ export WEAVER_WPS_PROVIDERS_RETRY_COUNT=5 # control interval time between retries (duration in seconds, counts toward maximum timeout) export WEAVER_WPS_PROVIDERS_RETRY_AFTER=5 -export WEAVER_MONGODB_DATA_DIR='${DATA_PERSIST_ROOT}/mongodb_weaver_persist' +export WEAVER_MONGODB_DATA_DIR='${BIRDHOUSE_DATA_PERSIST_ROOT}/mongodb_weaver_persist' # If "True", Weaver providers that are no longer working (not responding when deployed) and are not named in # WEAVER_WPS_PROVIDERS will be unregistered. This is useful when deploying Weaver with fewer providers than a previous diff --git a/birdhouse/components/weaver/docker-compose-extra.yml b/birdhouse/components/weaver/docker-compose-extra.yml index 6091cd4a4..f1a7bf518 100644 --- a/birdhouse/components/weaver/docker-compose-extra.yml +++ b/birdhouse/components/weaver/docker-compose-extra.yml @@ -18,7 +18,7 @@ services: # WPS applications themselves are not necessarily completed. Successful HTTP responses ensure they are 'ready'. image: ${WEAVER_MANAGER_IMAGE} environment: - HOSTNAME: ${PAVICS_FQDN} + HOSTNAME: ${BIRDHOUSE_FQDN} FORWARDED_ALLOW_IPS: "*" #env_file: # - ./components/mongodb/credentials.env diff --git a/birdhouse/components/weaver/post-docker-compose-up b/birdhouse/components/weaver/post-docker-compose-up index 29e90c1d3..652a8c95e 100755 --- a/birdhouse/components/weaver/post-docker-compose-up +++ b/birdhouse/components/weaver/post-docker-compose-up @@ -23,7 +23,7 @@ # # WEAVER_WPS_PROVIDERS: # list of provider names (comma or space delimited), all are assumed to be available at -# "https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/" +# "https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/" # WEAVER_WPS_PROVIDERS_MAX_TIME: # limit script execution up to a maximum of this number of seconds # WEAVER_WPS_PROVIDERS_RETRY_COUNT: @@ -34,7 +34,7 @@ # Following configurations are expected to be inherited from bird-house/weaver-component env.local/default.env: # - MAGPIE_ADMIN_USERNAME # - MAGPIE_ADMIN_PASSWORD -# - PAVICS_FQDN_PUBLIC +# - BIRDHOUSE_FQDN_PUBLIC # - TWITCHER_PROTECTED_PATH # - WEAVER_MANAGER_NAME # @@ -52,7 +52,7 @@ # # Parameters: # -# PAVICS_LOG_DIR (optional, default=/tmp/pavics-compose): +# BIRDHOUSE_LOG_DIR (optional, default=/tmp/birdhouse-compose): # Location to log results from celery healthcheck outputs. # @@ -82,8 +82,8 @@ WARN="${PREFIX}${YELLOW}WARNING${NORMAL}: " echo "${PREFIX}Running: $0" -MAGPIE_URL="https://${PAVICS_FQDN_PUBLIC}/magpie" -WEAVER_URL="https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/${WEAVER_MANAGER_NAME}" +MAGPIE_URL="https://${BIRDHOUSE_FQDN_PUBLIC}/magpie" +WEAVER_URL="https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/${WEAVER_MANAGER_NAME}" WEAVER_WPS_PROVIDERS_MAX_TIME=${WEAVER_WPS_PROVIDERS_MAX_TIME:-120} WEAVER_WPS_PROVIDERS_RETRY_AFTER=${WEAVER_WPS_PROVIDERS_RETRY_AFTER:-5} WEAVER_WPS_PROVIDERS_RETRY_COUNT=${WEAVER_WPS_PROVIDERS_RETRY_COUNT:-5} @@ -246,7 +246,7 @@ for prov in ${WEAVER_WPS_PROVIDERS}; do if [ -z "${prov}" ]; then continue fi - prov_url="https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/${prov}" + prov_url="https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/${prov}" prov_cap="${prov_url}?service=WPS&request=GetCapabilities" # wait for WPS provider to respond @@ -367,20 +367,20 @@ fi echo "${PREFIX}Starting Weaver WebApp/Worker Celery tasks validation..." CUR_SCRIPT_DIR="$(dirname "$(realpath "$0")")" -PAVICS_COMPOSE="$(realpath "${CUR_SCRIPT_DIR}/../../pavics-compose.sh")" -PAVICS_LOG_DIR="${PAVICS_LOG_DIR:-/tmp/pavics-compose}" +BIRDHOUSE_COMPOSE="${BIRDHOUSE_COMPOSE:-"$(realpath "${CUR_SCRIPT_DIR}/../../birdhouse-compose.sh")"}" +BIRDHOUSE_LOG_DIR="${BIRDHOUSE_LOG_DIR:-/tmp/birdhouse-compose}" CELERY_HEALTHCHECK="/opt/local/bin/weaver/celery-healthcheck" -mkdir -p "${PAVICS_LOG_DIR}" +mkdir -p "${BIRDHOUSE_LOG_DIR}" # note: use 'tee' instead of capturing in variable to allow displaying results directly when running command -${PAVICS_COMPOSE} exec weaver bash "${CELERY_HEALTHCHECK}" | tee "${PAVICS_LOG_DIR}/weaver.log" +${BIRDHOUSE_COMPOSE} exec weaver bash "${CELERY_HEALTHCHECK}" | tee "${BIRDHOUSE_LOG_DIR}/weaver.log" ret_weaver=$? -out_weaver=$(cat "${PAVICS_LOG_DIR}/weaver.log" | tail -n 1 | grep -c "ERROR") -${PAVICS_COMPOSE} exec weaver-worker bash "${CELERY_HEALTHCHECK}" | tee "${PAVICS_LOG_DIR}/weaver-worker.log" +out_weaver=$(cat "${BIRDHOUSE_LOG_DIR}/weaver.log" | tail -n 1 | grep -c "ERROR") +${BIRDHOUSE_COMPOSE} exec weaver-worker bash "${CELERY_HEALTHCHECK}" | tee "${BIRDHOUSE_LOG_DIR}/weaver-worker.log" ret_worker=$? -out_worker=$(cat "${PAVICS_LOG_DIR}/weaver-worker.log" | tail -n 1 | grep -c "ERROR") +out_worker=$(cat "${BIRDHOUSE_LOG_DIR}/weaver-worker.log" | tail -n 1 | grep -c "ERROR") if [ ${ret_weaver} -ne 0 ] || [ ${ret_worker} -ne 0 ] || [ "${out_weaver}" -ne 0 ] || [ "${out_worker}" -ne 0 ]; then echo "${PREFIX}Weaver WebApp and/or Worker Celery tasks were not ready. Restarting both..." - ${PAVICS_COMPOSE} restart weaver weaver-worker + ${BIRDHOUSE_COMPOSE} restart weaver weaver-worker else echo "${PREFIX}Weaver WebApp and/or Worker Celery tasks are both ready." fi diff --git a/birdhouse/components/weaver/service-config.json.template b/birdhouse/components/weaver/service-config.json.template index 19f5a818a..0e3f79fcb 100644 --- a/birdhouse/components/weaver/service-config.json.template +++ b/birdhouse/components/weaver/service-config.json.template @@ -15,7 +15,7 @@ { "rel": "service", "type": "application/json", - "href": "https://${PAVICS_FQDN_PUBLIC}/${WEAVER_MANAGER_NAME}/" + "href": "https://${BIRDHOUSE_FQDN_PUBLIC}/${WEAVER_MANAGER_NAME}/" }, { "rel": "service-doc", @@ -25,12 +25,12 @@ { "rel": "service-desc", "type": "application/json", - "href": "https://${PAVICS_FQDN_PUBLIC}/${WEAVER_MANAGER_NAME}/" + "href": "https://${BIRDHOUSE_FQDN_PUBLIC}/${WEAVER_MANAGER_NAME}/" }, { "rel": "conformance", "type": "application/json", - "href": "https://${PAVICS_FQDN_PUBLIC}/${WEAVER_MANAGER_NAME}/conformance/" + "href": "https://${BIRDHOUSE_FQDN_PUBLIC}/${WEAVER_MANAGER_NAME}/conformance/" }, { "rel": "service-meta", diff --git a/birdhouse/components/wps_outputs-volume/default.env b/birdhouse/components/wps_outputs-volume/default.env index 84fc84ba8..d9516ca65 100644 --- a/birdhouse/components/wps_outputs-volume/default.env +++ b/birdhouse/components/wps_outputs-volume/default.env @@ -4,11 +4,11 @@ OPTIONAL_VARS=" " # add any new variables not already in 'VARS' or 'OPTIONAL_VARS' that must be replaced in templates here -# single quotes are important in below list to keep variable names intact until 'pavics-compose' parses them +# single quotes are important in below list to keep variable names intact until 'birdhouse-compose' parses them EXTRA_VARS=' ${WPS_OUTPUTS_RES_NAME} ' -# extend the original 'VARS' from 'birdhouse/pavics-compose.sh' to employ them for template substitution +# extend the original 'VARS' from 'birdhouse/birdhouse-compose.sh' to employ them for template substitution # adding them to 'VARS', they will also be validated in case of override of 'default.env' using 'env.local' VARS="$VARS $EXTRA_VARS" diff --git a/birdhouse/default.env b/birdhouse/default.env index bec5cec1c..cefaeae76 100644 --- a/birdhouse/default.env +++ b/birdhouse/default.env @@ -16,85 +16,84 @@ export BASH_IMAGE="bash:5.1.4" # Root directory under which all data persistence should be nested under -export DATA_PERSIST_ROOT="/data" +export BIRDHOUSE_DATA_PERSIST_ROOT="/data" # shellcheck disable=SC2016 # Root directory for all files that are persisted on disk and may contain links (ie. the files # are "shared" between subdirectories). -export DATA_PERSIST_SHARED_ROOT='${DATA_PERSIST_ROOT}' +export BIRDHOUSE_DATA_PERSIST_SHARED_ROOT='${BIRDHOUSE_DATA_PERSIST_ROOT}' # Log directory used for the various scheduler tasks -# TODO: use this variable for other references of the log path (only used in the pavics-jupyter-base's .env file for now) -export PAVICS_LOG_DIR=/var/log/PAVICS +export BIRDHOUSE_LOG_DIR=/var/log/birdhouse # Allow different public and internal hostname. -# Default to PAVICS_FQDN, which must be set in env.local. +# Default to BIRDHOUSE_FQDN, which must be set in env.local. # Must use single-quote for delayed eval. -export PAVICS_FQDN_PUBLIC='${PAVICS_FQDN}' +export BIRDHOUSE_FQDN_PUBLIC='${BIRDHOUSE_FQDN}' # Append to DELAYED_EVAL list. export DELAYED_EVAL=" $DELAYED_EVAL - PAVICS_FQDN_PUBLIC - DOC_URL - SUPPORT_EMAIL - SSL_CERTIFICATE - DATA_PERSIST_SHARED_ROOT - WPS_OUTPUTS_DIR - SERVER_NAME - SERVER_DESCRIPTION - SERVER_INSTITUTION - SERVER_SUBJECT - SERVER_TAGS - SERVER_DOCUMENTATION_URL - SERVER_RELEASE_NOTES_URL - SERVER_SUPPORT_URL - SERVER_LICENSE_URL + BIRDHOUSE_FQDN_PUBLIC + BIRDHOUSE_DOC_URL + BIRDHOUSE_SUPPORT_EMAIL + BIRDHOUSE_SSL_CERTIFICATE + BIRDHOUSE_DATA_PERSIST_SHARED_ROOT + BIRDHOUSE_WPS_OUTPUTS_DIR + BIRDHOUSE_NAME + BIRDHOUSE_DESCRIPTION + BIRDHOUSE_INSTITUTION + BIRDHOUSE_SUBJECT + BIRDHOUSE_TAGS + BIRDHOUSE_DOCUMENTATION_URL + BIRDHOUSE_RELEASE_NOTES_URL + BIRDHOUSE_SUPPORT_URL + BIRDHOUSE_LICENSE_URL " # Server Identification Details # Following definitions should definitenly be updated. # Previous defaults are defined for backward-compatibility. # If not overridden explicitly by their non '__' prefixed variant, -# a WARN message will be displayed by pavics-compose. -__DEFAULT__SERVER_NAME="PAVICS" -__DEFAULT__SERVER_DESCRIPTION=" -The PAVICS (Power Analytics for Visualization of Climate Science) platform is a collection of +# a WARN message will be displayed by birdhouse-compose. +__DEFAULT__BIRDHOUSE_NAME="Birdhouse" +__DEFAULT__BIRDHOUSE_DESCRIPTION=" +The Birdhouse platform is a collection of climate analysis services served through Open Geospatial Consortium (OGC) protocols. These services include data access, processing and visualization. Both data and algorithms can be accessed either programmatically, through OGC-compliant clients such as QGIS or ArcGIS, or a custom web interface. " -__DEFAULT__SERVER_INSTITUTION="Ouranos" -__DEFAULT__SERVER_SUBJECT="Climatology" +__DEFAULT__BIRDHOUSE_INSTITUTION="Ouranos" +__DEFAULT__BIRDHOUSE_SUBJECT="Climatology" # below can be a CSV list of tags -__DEFAULT__SERVER_TAGS="Climatology" -__DEFAULT__SERVER_DOCUMENTATION_URL="https://pavics-sdi.readthedocs.io/en/latest/arch/backend.html" -__DEFAULT__SERVER_RELEASE_NOTES_URL="https://github.com/bird-house/birdhouse-deploy/blob/master/CHANGES.md" -__DEFAULT__SERVER_SUPPORT_URL="https://github.com/bird-house/birdhouse-deploy/issues" +__DEFAULT__BIRDHOUSE_TAGS="Climatology" +__DEFAULT__BIRDHOUSE_DOCUMENTATION_URL="https://pavics-sdi.readthedocs.io/en/latest/arch/backend.html" +__DEFAULT__BIRDHOUSE_RELEASE_NOTES_URL="https://github.com/bird-house/birdhouse-deploy/blob/master/CHANGES.md" +__DEFAULT__BIRDHOUSE_SUPPORT_URL="https://github.com/bird-house/birdhouse-deploy/issues" # NOTE: # This value does not use the previously hard coded default. # Previous default pointed at the wrong repository with a mismatching LICENSE file. -__DEFAULT__SERVER_LICENSE_URL="https://github.com/bird-house/birdhouse-deploy/blob/master/LICENSE" -__DEFAULT__SUPPORT_EMAIL="helpdesk@example.com" -__DEFAULT__DOC_URL="https://www.example.com/" -__DEFAULT__PAVICS_FQDN="hostname.domainname" -__DEFAULT__SSL_CERTIFICATE="/path/to/ssl/cert.pem" +__DEFAULT__BIRDHOUSE_LICENSE_URL="https://github.com/bird-house/birdhouse-deploy/blob/master/LICENSE" +__DEFAULT__BIRDHOUSE_SUPPORT_EMAIL="helpdesk@example.com" +__DEFAULT__BIRDHOUSE_DOC_URL="https://www.example.com/" +__DEFAULT__BIRDHOUSE_FQDN="hostname.domainname" +__DEFAULT__BIRDHOUSE_SSL_CERTIFICATE="/path/to/ssl/cert.pem" # apply overrides or fallback above defaults with delayed evaluation -# exceptions for 'SUPPORT_EMAIL' and 'DOC_URL' using the old name for backward compatibility. -export SUPPORT_EMAIL='${__DEFAULT__SUPPORT_EMAIL}' -export DOC_URL='${__DEFAULT__DOC_URL}' -export SSL_CERTIFICATE='${__DEFAULT__SSL_CERTIFICATE}' -export SERVER_NAME='${__DEFAULT__SERVER_NAME}' -export SERVER_DESCRIPTION='${__DEFAULT__SERVER_DESCRIPTION}' -export SERVER_INSTITUTION='${__DEFAULT__SERVER_INSTITUTION}' -export SERVER_SUBJECT='${__DEFAULT__SERVER_SUBJECT}' -export SERVER_TAGS='${__DEFAULT__SERVER_TAGS}' -export SERVER_DOCUMENTATION_URL='${__DEFAULT__SERVER_DOCUMENTATION_URL}' -export SERVER_RELEASE_NOTES_URL='${__DEFAULT__SERVER_RELEASE_NOTES_URL}' -export SERVER_SUPPORT_URL='${__DEFAULT__SERVER_SUPPORT_URL}' -export SERVER_LICENSE_URL='${__DEFAULT__SERVER_LICENSE_URL}' +# exceptions for 'BIRDHOUSE_SUPPORT_EMAIL' and 'BIRDHOUSE_DOC_URL' using the old name for backward compatibility. +export BIRDHOUSE_SUPPORT_EMAIL='${__DEFAULT__BIRDHOUSE_SUPPORT_EMAIL}' +export BIRDHOUSE_DOC_URL='${__DEFAULT__BIRDHOUSE_DOC_URL}' +export BIRDHOUSE_SSL_CERTIFICATE='${__DEFAULT__BIRDHOUSE_SSL_CERTIFICATE}' +export BIRDHOUSE_NAME='${__DEFAULT__BIRDHOUSE_NAME}' +export BIRDHOUSE_DESCRIPTION='${__DEFAULT__BIRDHOUSE_DESCRIPTION}' +export BIRDHOUSE_INSTITUTION='${__DEFAULT__BIRDHOUSE_INSTITUTION}' +export BIRDHOUSE_SUBJECT='${__DEFAULT__BIRDHOUSE_SUBJECT}' +export BIRDHOUSE_TAGS='${__DEFAULT__BIRDHOUSE_TAGS}' +export BIRDHOUSE_DOCUMENTATION_URL='${__DEFAULT__BIRDHOUSE_DOCUMENTATION_URL}' +export BIRDHOUSE_RELEASE_NOTES_URL='${__DEFAULT__BIRDHOUSE_RELEASE_NOTES_URL}' +export BIRDHOUSE_SUPPORT_URL='${__DEFAULT__BIRDHOUSE_SUPPORT_URL}' +export BIRDHOUSE_LICENSE_URL='${__DEFAULT__BIRDHOUSE_LICENSE_URL}' # Defaults for required variables recommended for override for security reasons. # Those will not be set explicitly as defaults to ensure they are overridden explicitly by the instance. @@ -102,8 +101,8 @@ export SERVER_LICENSE_URL='${__DEFAULT__SERVER_LICENSE_URL}' __DEFAULT__MAGPIE_SECRET="itzaseekrit" __DEFAULT__MAGPIE_ADMIN_USERNAME="admin" __DEFAULT__MAGPIE_ADMIN_PASSWORD="qwertyqwerty!" -__DEFAULT__POSTGRES_PAVICS_USERNAME="postgres-pavics" -__DEFAULT__POSTGRES_PAVICS_PASSWORD="postgres-qwerty" +__DEFAULT__BIRDHOUSE_POSTGRES_USERNAME="postgres-birdhouse" +__DEFAULT__BIRDHOUSE_POSTGRES_PASSWORD="postgres-qwerty" __DEFAULT__POSTGRES_MAGPIE_USERNAME="postgres-magpie" __DEFAULT__POSTGRES_MAGPIE_PASSWORD="postgres-qwerty" __DEFAULT__GEOSERVER_ADMIN_USER="admingeo" @@ -117,7 +116,80 @@ __DEFAULT__CATALOG_PASSWORD="qwerty" __DEFAULT__PHOENIX_PASSWORD="phoenix_pass" __DEFAULT__PHOENIX_PASSWORD_HASH="sha256:123456789012:1234567890123456789012345678901234567890123456789012345678901234" -export DEFAULT_CONF_DIRS=' +# Deprecated variable names are to the left of the equals sign, their non-deprecated equivalent is to the right. +# Note: if adding to this later on, make sure that you add new overrides to the *end* of this list so that they will be +# parsed in the correct order. +BIRDHOUSE_BACKWARDS_COMPATIBLE_VARIABLES=" + PAVICS_FQDN=BIRDHOUSE_FQDN + PAVICS_FQDN_PUBLIC=BIRDHOUSE_FQDN_PUBLIC + POSTGRES_PAVICS_USERNAME=BIRDHOUSE_POSTGRES_USERNAME + POSTGRES_PAVICS_PASSWORD=BIRDHOUSE_POSTGRES_PASSWORD + OWNER_PAVICS_CHECKOUT=BIRDHOUSE_REPO_CHECKOUT_OWNER + PAVICS_LOG_DIR=BIRDHOUSE_LOG_DIR + PAVICS_FRONTEND_IP=BIRDHOUSE_FRONTEND_IP + PAVICS_FRONTEND_PORT=BIRDHOUSE_FRONTEND_PORT + PAVICS_FRONTEND_PROTO=BIRDHOUSE_FRONTEND_PROTO + PAVICS_HOST_URL=BIRDHOUSE_HOST_URL + DATA_PERSIST_ROOT=BIRDHOUSE_DATA_PERSIST_ROOT + DATA_PERSIST_SHARED_ROOT=BIRDHOUSE_DATA_PERSIST_SHARED_ROOT + SSL_CERTIFICATE=BIRDHOUSE_SSL_CERTIFICATE + DOC_URL=BIRDHOUSE_DOC_URL + SUPPORT_EMAIL=BIRDHOUSE_SUPPORT_EMAIL + EXTRA_CONF_DIRS=BIRDHOUSE_EXTRA_CONF_DIRS + DEFAULT_CONF_DIRS=BIRDHOUSE_DEFAULT_CONF_DIRS + AUTODEPLOY_EXTRA_REPOS=BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS + AUTODEPLOY_DEPLOY_KEY_ROOT_DIR=BIRDHOUSE_AUTODEPLOY_DEPLOY_KEY_ROOT_DIR + AUTODEPLOY_PLATFORM_FREQUENCY=BIRDHOUSE_AUTODEPLOY_PLATFORM_FREQUENCY + AUTODEPLOY_NOTEBOOK_FREQUENCY=BIRDHOUSE_AUTODEPLOY_NOTEBOOK_FREQUENCY + AUTODEPLOY_EXTRA_SCHEDULER_JOBS=BIRDHOUSE_AUTODEPLOY_EXTRA_SCHEDULER_JOBS + LOGROTATE_DATA_DIR=BIRDHOUSE_LOGROTATE_DATA_DIR + ALLOW_UNSECURE_HTTP=BIRDHOUSE_ALLOW_UNSECURE_HTTP + DOCKER_NOTEBOOK_IMAGES=JUPYTERHUB_DOCKER_NOTEBOOK_IMAGES + ENABLE_JUPYTERHUB_MULTI_NOTEBOOKS=JUPYTERHUB_ENABLE_MULTI_NOTEBOOKS + MOUNT_IMAGE_SPECIFIC_NOTEBOOKS=JUPYTERHUB_MOUNT_IMAGE_SPECIFIC_NOTEBOOKS + EXTRA_PYWPS_CONFIG=BIRDHOUSE_EXTRA_PYWPS_CONFIG + GITHUB_CLIENT_ID=MAGPIE_GITHUB_CLIENT_ID + GITHUB_CLIENT_SECRET=MAGPIE_GITHUB_CLIENT_SECRET + VERIFY_SSL=BIRDHOUSE_VERIFY_SSL + SMTP_SERVER=ALERTMANAGER_SMTP_SERVER + COMPOSE_UP_EXTRA_OPTS=BIRDHOUSE_COMPOSE_UP_EXTRA_OPTS + WPS_OUTPUTS_DIR=BIRDHOUSE_WPS_OUTPUTS_DIR + SERVER_DOC_URL=BIRDHOUSE_DOC_URL + SERVER_SUPPORT_EMAIL=BIRDHOUSE_SUPPORT_EMAIL + SERVER_SSL_CERTIFICATE=BIRDHOUSE_SSL_CERTIFICATE + SERVER_DATA_PERSIST_SHARED_ROOT=BIRDHOUSE_DATA_PERSIST_SHARED_ROOT + SERVER_WPS_OUTPUTS_DIR=BIRDHOUSE_WPS_OUTPUTS_DIR + SERVER_NAME=BIRDHOUSE_NAME + SERVER_DESCRIPTION=BIRDHOUSE_DESCRIPTION + SERVER_INSTITUTION=BIRDHOUSE_INSTITUTION + SERVER_SUBJECT=BIRDHOUSE_SUBJECT + SERVER_TAGS=BIRDHOUSE_TAGS + SERVER_DOCUMENTATION_URL=BIRDHOUSE_DOCUMENTATION_URL + SERVER_RELEASE_NOTES_URL=BIRDHOUSE_RELEASE_NOTES_URL + SERVER_SUPPORT_URL=BIRDHOUSE_SUPPORT_URL + SERVER_LICENSE_URL=BIRDHOUSE_LICENSE_URL + +" + +# Process only these backwards compatible variables before the components default.env files are processed +BIRDHOUSE_BACKWARDS_COMPATIBLE_VARIABLES_PRE_COMPONENTS=" + EXTRA_CONF_DIRS + DEFAULT_CONF_DIRS +" + +BIRDHOUSE_BACKWARDS_COMPATIBLE_DEFAULTS=""" + POSTGRES_PAVICS_USERNAME=postgres-pavics +""" + +BIRDHOUSE_BACKWARDS_COMPATIBLE_HARDCODED_DEFAULTS=""" + BIRDHOUSE_POSTGRES_DB=pavics + GRAFANA_DEFAULT_PROVIDER_FOLDER=Local-PAVICS + GRAFANA_DEFAULT_PROVIDER_FOLDER_UUID=local-pavics + GRAFANA_PROMETHEUS_DATASOURCE_UUID=local_pavics_prometheus +""" + + +export BIRDHOUSE_DEFAULT_CONF_DIRS=' ./components/proxy ./components/magpie ./components/twitcher @@ -128,4 +200,4 @@ export DEFAULT_CONF_DIRS=' export USER_WORKSPACE_UID=1000 export USER_WORKSPACE_GID=1000 -export WPS_OUTPUTS_DIR='${DATA_PERSIST_SHARED_ROOT}/wps_outputs' +export BIRDHOUSE_WPS_OUTPUTS_DIR='${BIRDHOUSE_DATA_PERSIST_SHARED_ROOT}/wps_outputs' diff --git a/birdhouse/deployment/PAVICS-deploy.logrotate b/birdhouse/deployment/birdhouse-deploy.logrotate similarity index 87% rename from birdhouse/deployment/PAVICS-deploy.logrotate rename to birdhouse/deployment/birdhouse-deploy.logrotate index b67f8f88d..9714b87be 100644 --- a/birdhouse/deployment/PAVICS-deploy.logrotate +++ b/birdhouse/deployment/birdhouse-deploy.logrotate @@ -1,4 +1,4 @@ -/var/log/PAVICS/*.log { +/var/log/birdhouse/*.log { # keep at least 6 weeks' worth of logs (do not rotate # until the file is > 500k) rotate 6 diff --git a/birdhouse/deployment/certbotwrapper b/birdhouse/deployment/certbotwrapper index 34828d67c..1ecf3802c 100755 --- a/birdhouse/deployment/certbotwrapper +++ b/birdhouse/deployment/certbotwrapper @@ -2,8 +2,8 @@ # Renew LetsEncrypt SSL certificate using certbot docker image. # # Important: -# * SSL_CERTIFICATE from env.local will be updated, backup that file first! -# * SUPPORT_EMAIL from env.local is used as renew email, make sure it's valid! +# * BIRDHOUSE_SSL_CERTIFICATE from env.local will be updated, backup that file first! +# * BIRDHOUSE_SUPPORT_EMAIL from env.local is used as renew email, make sure it's valid! # * certbot requires your port 80 and 443 be accessible directly on the internet # # Useful extra options: @@ -52,12 +52,12 @@ SAVED_PWD="`pwd`" . "$THIS_DIR/../read-configs.include.sh" -# Get PAVICS_FQDN_PUBLIC, PAVICS_FQDN, SUPPORT_EMAIL, SSL_CERTIFICATE, BASH_IMAGE. +# Get BIRDHOUSE_FQDN_PUBLIC, BIRDHOUSE_FQDN, BIRDHOUSE_SUPPORT_EMAIL, BIRDHOUSE_SSL_CERTIFICATE, BASH_IMAGE. read_configs -CERT_DOMAIN="$PAVICS_FQDN_PUBLIC" +CERT_DOMAIN="$BIRDHOUSE_FQDN_PUBLIC" if [ -z "$CERT_DOMAIN" ]; then - CERT_DOMAIN="$PAVICS_FQDN" + CERT_DOMAIN="$BIRDHOUSE_FQDN" fi if [ ! -z "$FORCE_CERTBOT_E2E" ]; then @@ -74,7 +74,7 @@ else --agree-tos \ --no-eff-email \ --standalone \ - --email $SUPPORT_EMAIL \ + --email $BIRDHOUSE_SUPPORT_EMAIL \ --domain $CERT_DOMAIN \ --cert-name $CERT_DOMAIN" fi @@ -97,9 +97,9 @@ if [ ! -z "$FORCE_CERTBOT_E2E" ]; then -v "/etc/letsencrypt:/etc/letsencrypt" \ $BASH_IMAGE \ cat $CERTPATH/fullchain.pem $CERTPATH/privkey.pem > $TMP_SSL_CERT - if [ -s "$TMP_SSL_CERT" ] && ! diff $SSL_CERTIFICATE $TMP_SSL_CERT && [ $RC -eq 0 ]; then - # Only modify SSL_CERTIFICATE if there are real changes. - cp -v $TMP_SSL_CERT $SSL_CERTIFICATE + if [ -s "$TMP_SSL_CERT" ] && ! diff $BIRDHOUSE_SSL_CERTIFICATE $TMP_SSL_CERT && [ $RC -eq 0 ]; then + # Only modify BIRDHOUSE_SSL_CERTIFICATE if there are real changes. + cp -v $TMP_SSL_CERT $BIRDHOUSE_SSL_CERTIFICATE fi rm -v $TMP_SSL_CERT if [ -z "$FORCE_CERTBOT_E2E_NO_START_PROXY" ]; then @@ -113,8 +113,8 @@ What to do next: CERTPATH=\"/etc/letsencrypt/live/$CERT_DOMAIN\" cd $THIS_DIR/.. -sudo cat \$CERTPATH/fullchain.pem \$CERTPATH/privkey.pem > $SSL_CERTIFICATE -openssl x509 -noout -text -in $SSL_CERTIFICATE +sudo cat \$CERTPATH/fullchain.pem \$CERTPATH/privkey.pem > $BIRDHOUSE_SSL_CERTIFICATE +openssl x509 -noout -text -in $BIRDHOUSE_SSL_CERTIFICATE docker start proxy " fi diff --git a/birdhouse/deployment/cron.template b/birdhouse/deployment/cron.template index 36136a5e2..4ae09a798 100644 --- a/birdhouse/deployment/cron.template +++ b/birdhouse/deployment/cron.template @@ -1,4 +1,4 @@ PATH="/usr/local/bin:/usr/bin:/bin:/usr/local/sbin:/usr/sbin:/sbin" # check if should deploy ${CRON_FREQUENCY_TXT} -${CRON_SCHEDULE} ${OWNER_PAVICS_CHECKOUT} /usr/local/sbin/triggerdeploy.sh ${PATH_TO_PAVICS_CHECKOUT}/birdhouse >> /var/log/PAVICS/autodeploy.log 2>&1 +${CRON_SCHEDULE} ${BIRDHOUSE_REPO_CHECKOUT_OWNER} /usr/local/sbin/triggerdeploy.sh ${BIRDHOUSE_REPO_CHECKOUT_PATH}/birdhouse >> ${BIRDHOUSE_LOG_DIR}/autodeploy.log 2>&1 diff --git a/birdhouse/deployment/deploy.sh b/birdhouse/deployment/deploy.sh index 8df019bf3..c40a7c59b 100755 --- a/birdhouse/deployment/deploy.sh +++ b/birdhouse/deployment/deploy.sh @@ -1,7 +1,7 @@ #!/bin/sh # Script to automate local deployment process. # -# Log to "/var/log/PAVICS/autodeploy.log" if AUTODEPLOY_SILENT is not empty. +# Log to "${BIRDHOUSE_LOG_DIR}/autodeploy.log" if AUTODEPLOY_SILENT is not empty. # # Still have to ssh to target machine but at least this single script # takes care of all the common steps for a standard deployment (see corner @@ -53,65 +53,70 @@ # are re-read. docker-compose is not aware of any changes outside of the # docker-compose.yml file. -if [ ! -z "${AUTODEPLOY_SILENT}" ]; then - LOG_FILE="/var/log/PAVICS/autodeploy.log" - exec >> "${LOG_FILE}" 2>&1 -fi - usage() { echo "USAGE: $0 [path to env.local]" } COMPOSE_DIR="$1" -ENV_LOCAL_FILE="$2" if [ -z "${COMPOSE_DIR}" ]; then - echo "ERROR: please provide path to PAVICS docker-compose dir." 1>&2 + echo "ERROR: please provide path to Birdhouse docker-compose dir by setting the COMPOSE_DIR variable." 1>&2 usage exit 2 else shift fi -if [ -z "${ENV_LOCAL_FILE}" ]; then - ENV_LOCAL_FILE="${COMPOSE_DIR}/env.local" -else - shift +BIRDHOUSE_LOCAL_ENV="${1:-${BIRDHOUSE_LOCAL_ENV:-"${COMPOSE_DIR}/env.local"}}" + +# Setup COMPOSE_DIR and PWD for sourcing env.local. +# Prevent un-expected difference when this script is run inside autodeploy +# container and manually from the host. +cd "${COMPOSE_DIR}" || exit + +. "${COMPOSE_DIR}/read-configs.include.sh" + +# Read BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS +read_basic_configs_only + +if [ ! -z "${AUTODEPLOY_SILENT}" ]; then + LOG_FILE="${BIRDHOUSE_LOG_DIR}/autodeploy.log" + mkdir -p "${BIRDHOUSE_LOG_DIR}" + exec >> "${LOG_FILE}" 2>&1 fi COMPOSE_DIR="$(realpath "${COMPOSE_DIR}")" +# This `if` block is required when upgrading from a version of the birdhouse-deploy code +# without birdhouse-compose.sh to one with birdhouse-compose.sh. When pavics-compose.sh +# is eventually deprecated and removed we can also remove this block. +if [ -z "${BIRDHOUSE_COMPOSE}" ] && [ ! -f "${COMPOSE_DIR}/birdhouse-compose.sh" ]; then + BIRDHOUSE_COMPOSE="${COMPOSE_DIR}/pavics-compose.sh" +fi + +BIRDHOUSE_COMPOSE=${BIRDHOUSE_COMPOSE:-"${COMPOSE_DIR}/birdhouse-compose.sh"} + if [ ! -f "${COMPOSE_DIR}/docker-compose.yml" ] || \ - [ ! -f "${COMPOSE_DIR}/pavics-compose.sh" ]; then - echo "ERROR: missing docker-compose.yml or pavics-compose.sh file in '${COMPOSE_DIR}'" 1>&2 + [ ! -f "${BIRDHOUSE_COMPOSE}" ]; then + echo "ERROR: missing docker-compose.yml or birdhouse-compose.sh file in '${COMPOSE_DIR}'" 1>&2 exit 2 fi -if [ ! -f "${ENV_LOCAL_FILE}" ]; then - echo "ERROR: env.local '${ENV_LOCAL_FILE}' not found, please instantiate from '${COMPOSE_DIR}/env.local.example'" 1>&2 +if [ ! -f "${BIRDHOUSE_LOCAL_ENV}" ]; then + echo "ERROR: env.local '${BIRDHOUSE_LOCAL_ENV}' not found, please instantiate from '${COMPOSE_DIR}/env.local.example'" 1>&2 exit 2 fi if [ -f "$COMPOSE_DIR/docker-compose.override.yml" ]; then - echo "WARNING: docker-compose.override.yml found, should use EXTRA_CONF_DIRS in env.local instead" + echo "WARNING: docker-compose.override.yml found, should use BIRDHOUSE_EXTRA_CONF_DIRS in env.local instead" fi -# Setup COMPOSE_DIR and PWD for sourcing env.local. -# Prevent un-expected difference when this script is run inside autodeploy -# container and manually from the host. -cd "${COMPOSE_DIR}" || exit - START_TIME="$(date -Isecond)" echo "deploy START_TIME=${START_TIME}" -. "${COMPOSE_DIR}/read-configs.include.sh" - -# Read AUTODEPLOY_EXTRA_REPOS -read_basic_configs_only - set -x -for adir in "${COMPOSE_DIR}" ${AUTODEPLOY_EXTRA_REPOS}; do +for adir in "${COMPOSE_DIR}" ${BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS}; do if [ -d "${adir}" ]; then cd "${adir}" || exit @@ -130,15 +135,15 @@ cd "${COMPOSE_DIR}" || exit read_basic_configs_only # stop all to force reload any changed config that are volume-mount into the containers -./pavics-compose.sh stop +"${BIRDHOUSE_COMPOSE}" stop -for adir in "${COMPOSE_DIR}" ${AUTODEPLOY_EXTRA_REPOS}; do +for adir in "${COMPOSE_DIR}" ${BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS}; do if [ -d "${adir}" ]; then cd "${adir}" || exit EXTRA_REPO="$(git rev-parse --show-toplevel)" - DEPLOY_KEY="${AUTODEPLOY_DEPLOY_KEY_ROOT_DIR}/$(basename "${EXTRA_REPO}")_deploy_key" - DEFAULT_DEPLOY_KEY="${AUTODEPLOY_DEPLOY_KEY_ROOT_DIR}/id_rsa_git_ssh_read_only" + DEPLOY_KEY="${BIRDHOUSE_AUTODEPLOY_DEPLOY_KEY_ROOT_DIR}/$(basename "${EXTRA_REPO}")_deploy_key" + DEFAULT_DEPLOY_KEY="${BIRDHOUSE_AUTODEPLOY_DEPLOY_KEY_ROOT_DIR}/id_rsa_git_ssh_read_only" if [ ! -e "${DEPLOY_KEY}" ] && [ -e "${DEFAULT_DEPLOY_KEY}" ]; then DEPLOY_KEY="${DEFAULT_DEPLOY_KEY}" fi @@ -177,7 +182,7 @@ cd "${COMPOSE_DIR}" || exit read_basic_configs_only # restart everything, only changed containers will be destroyed and recreated -./pavics-compose.sh up -d +"${BIRDHOUSE_COMPOSE}" up -d set +x diff --git a/birdhouse/deployment/fix-write-perm b/birdhouse/deployment/fix-write-perm index 00d73df55..2c5be014c 100755 --- a/birdhouse/deployment/fix-write-perm +++ b/birdhouse/deployment/fix-write-perm @@ -1,7 +1,7 @@ #!/bin/sh -e # Fix write permission lost due to files owned by root user. # -# Run once for this repo and once each time AUTODEPLOY_EXTRA_REPOS changes. +# Run once for this repo and once each time BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS changes. # # Set FIX_WRITE_PERM_EXTRA=1 to also grant write access other data typical # dir, useful for staging server to get update from production server. @@ -9,7 +9,7 @@ # Other data dir can also be given directly as arguments to this script. # # The 'autodeploy' container spawned by the 'scheduler' container runs as -# user root. It writes to all the git checkout in AUTODEPLOY_EXTRA_REPOS +# user root. It writes to all the git checkout in BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS # and the current birdhouse-deploy checkout as well. All the new files # created will be owned by the root user. # @@ -31,8 +31,8 @@ # (https://docs.docker.com/engine/security/userns-remap/): this is a # global configuration for the docker deamon that impacts all containers # on the same host (we only need for the 'autodeploy' container). Not -# portable if a host is used to run other services than PAVICS, we do not -# want to force all other services to the same owner as PAVICS checkout. +# portable if a host is used to run other services than Birdhouse, we do not +# want to force all other services to the same owner as Birdhouse checkout. # # So the setfacl solution is the simplest, most portable/generic and most # localized (only the directories we need) solution. @@ -59,5 +59,5 @@ fi set -x -sudo setfacl -Rdm "u:${USER}:rwX" "${PWD}" ${AUTODEPLOY_EXTRA_REPOS} ${EXTRA_DATA_DIR} "$@" # for future files -sudo setfacl -Rm "u:${USER}:rwX" "${PWD}" ${AUTODEPLOY_EXTRA_REPOS} ${EXTRA_DATA_DIR} "$@" # for existing files +sudo setfacl -Rdm "u:${USER}:rwX" "${PWD}" ${BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS} ${EXTRA_DATA_DIR} "$@" # for future files +sudo setfacl -Rm "u:${USER}:rwX" "${PWD}" ${BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS} ${EXTRA_DATA_DIR} "$@" # for existing files diff --git a/birdhouse/deployment/install-automated-deployment.sh b/birdhouse/deployment/install-automated-deployment.sh index 16c83fc44..b7062d4f4 100755 --- a/birdhouse/deployment/install-automated-deployment.sh +++ b/birdhouse/deployment/install-automated-deployment.sh @@ -5,11 +5,11 @@ # Will deploy: # # * cron job to periodically check if a deployment is needed -# (/etc/cron.d/PAVICS-deploy), update check frequency here +# (/etc/cron.d/birdhouse-deploy), update check frequency here # # * script called by cron job (/usr/local/sbin/triggerdeploy.sh) # -# * deploy check logs can be found in /var/log/PAVICS/autodeploy.log +# * deploy check logs can be found in ${BIRDHOUSE_LOG_DIR}/autodeploy.log # # # 2 cron frequency presets are available: @@ -24,7 +24,7 @@ # usage() { - echo "USAGE: $0 pavics-checkout owner-pavics-checkout [daily | 5-mins]" + echo "USAGE: $0 birdhouse-checkout owner-birdhouse-checkout [daily | 5-mins]" } if [ -z "$1" ]; then @@ -34,9 +34,10 @@ if [ -z "$1" ]; then fi -REPO_ROOT="`realpath "$1"`"; shift # path to PAVICS checkout -REPO_OWNER="$1"; shift # user owning (have write access) the PAVICS checkout +REPO_ROOT="`realpath "$1"`"; shift # path to Birdhouse checkout +REPO_OWNER="$1"; shift # user owning (have write access) the Birdhouse checkout CRON_FREQUENCY="$1" +COMPOSE_DIR="${COMPOSE_DIR:-"${REPO_ROOT}/birdhouse"}" # defaults, overridable if [ -z "$CRON_FREQUENCY_TXT" ]; then @@ -60,7 +61,7 @@ elif [ -n "$CRON_FREQUENCY" ]; then fi if [ ! -e "$REPO_ROOT/birdhouse/deployment/triggerdeploy.sh" ]; then - echo "ERROR: bad/wrong pavics-checkout '$REPO_ROOT' " 1>&2 + echo "ERROR: bad/wrong birdhouse-checkout '$REPO_ROOT' " 1>&2 usage exit 2 fi @@ -71,14 +72,18 @@ set -x sudo cp -v $REPO_ROOT/birdhouse/deployment/triggerdeploy.sh /usr/local/sbin/ -CRON_FILE="/etc/cron.d/PAVICS-deploy" +CRON_FILE=${CRON_FILE:-"/etc/cron.d/birdhouse-deploy"} + +. "${COMPOSE_DIR}/read-configs.include.sh" + +read_basic_configs_only export CRON_FREQUENCY_TXT="$CRON_FREQUENCY_TXT" export CRON_SCHEDULE="$CRON_SCHEDULE" -export OWNER_PAVICS_CHECKOUT="$REPO_OWNER" -export PATH_TO_PAVICS_CHECKOUT="$REPO_ROOT" +export BIRDHOUSE_REPO_CHECKOUT_OWNER="$REPO_OWNER" +export BIRDHOUSE_REPO_CHECKOUT_PATH="$REPO_ROOT" -[ ! -d "/var/log/PAVICS" ] && echo "WARNING: The logging directory doesn't exist. Run 'install-logrotate-config'." +[ ! -d "${BIRDHOUSE_LOG_DIR}" ] && echo "WARNING: The logging directory doesn't exist. Run 'install-logrotate-config'." cat $REPO_ROOT/birdhouse/deployment/cron.template | envsubst | sudo tee $CRON_FILE sudo chown root:root $CRON_FILE diff --git a/birdhouse/deployment/install-deploy-notebook b/birdhouse/deployment/install-deploy-notebook index 2a64b33bb..6982384d4 100755 --- a/birdhouse/deployment/install-deploy-notebook +++ b/birdhouse/deployment/install-deploy-notebook @@ -5,13 +5,14 @@ # Will deploy: # # * cron job to hourly deploy the tutorial notebooks -# (/etc/cron.hourly/PAVICS-deploy-notebooks), update check frequency here +# (/etc/cron.hourly/birdhouse-deploy-notebooks), update check frequency here +# The cron file name can be overridden by setting the CRON_FILE environment variable # -CRON_FILE="/etc/cron.hourly/PAVICS-deploy-notebooks" +CRON_FILE=${CRON_FILE:-"/etc/cron.hourly/birdhouse-deploy-notebooks"} usage() { - echo "USAGE: $0 pavics-checkout" + echo "USAGE: $0 birdhouse-checkout" } if [ -z "$1" ]; then @@ -21,21 +22,22 @@ if [ -z "$1" ]; then fi -REPO_ROOT="$(realpath "$1")"; shift # path to PAVICS checkout +REPO_ROOT="$(realpath "$1")"; shift # path to birdhouse checkout +COMPOSE_DIR="${COMPOSE_DIR:-"${REPO_ROOT}/birdhouse"}" if [ ! -e "${REPO_ROOT}/birdhouse/deployment/trigger-deploy-notebook" ]; then - echo "ERROR: bad/wrong pavics-checkout '${REPO_ROOT}' " 1>&2 + echo "ERROR: bad/wrong birdhouse-checkout '${REPO_ROOT}' " 1>&2 usage exit 2 fi -. "${REPO_ROOT}/birdhouse/read-configs.include.sh" +. "${COMPOSE_DIR}/read-configs.include.sh" # Get JUPYTERHUB_USER_DATA_DIR read_configs set -x -cat "${REPO_ROOT}/birdhouse/deployment/trigger-deploy-notebook" | envsubst '${JUPYTERHUB_USER_DATA_DIR}' | sudo tee "${CRON_FILE}" +cat "${COMPOSE_DIR}/deployment/trigger-deploy-notebook" | envsubst '${JUPYTERHUB_USER_DATA_DIR} ${BIRDHOUSE_LOG_DIR}' | sudo tee "${CRON_FILE}" sudo chown root:root "${CRON_FILE}" sudo chmod 755 "${CRON_FILE}" diff --git a/birdhouse/deployment/install-logrotate-config b/birdhouse/deployment/install-logrotate-config index 4118a9aa3..c902168d4 100755 --- a/birdhouse/deployment/install-logrotate-config +++ b/birdhouse/deployment/install-logrotate-config @@ -4,11 +4,13 @@ # Will deploy: # # * logrotate config for various automation scripts output -# (/etc/logrotate.d/PAVICS-deploy) +# (/etc/logrotate.d/birdhouse-deploy) # +LOGROTATE_FILE=${LOGROTATE_FILE:-"/etc/logrotate.d/birdhouse-deploy"} + usage() { - echo "USAGE: $0 pavics-checkout owner-pavics-checkout" + echo "USAGE: $0 birdhouse-checkout owner-birdhouse-checkout" } if [ -z "$1" ]; then @@ -17,22 +19,25 @@ if [ -z "$1" ]; then exit 2 fi -REPO_ROOT="`realpath "$1"`"; shift # path to PAVICS checkout -REPO_OWNER="$1"; shift # user owning (have write access) the PAVICS checkout +REPO_ROOT="`realpath "$1"`"; shift # path to Birdhouse checkout +REPO_OWNER="$1"; shift # user owning (have write access) the Birdhouse checkout +COMPOSE_DIR="${COMPOSE_DIR:-"${REPO_ROOT}/birdhouse"}" -if [ ! -e "$REPO_ROOT/birdhouse/deployment/PAVICS-deploy.logrotate" ]; then - echo "ERROR: bad/wrong pavics-checkout '$REPO_ROOT' " 1>&2 +if [ ! -e "$REPO_ROOT/birdhouse/deployment/birdhouse-deploy.logrotate" ]; then + echo "ERROR: bad/wrong birdhouse-checkout '$REPO_ROOT' " 1>&2 usage exit 2 fi +. "${COMPOSE_DIR}/read-configs.include.sh" + +read_basic_configs_only set -x -sudo mkdir -p /var/log/PAVICS -sudo chown $REPO_OWNER /var/log/PAVICS +sudo mkdir -p "${BIRDHOUSE_LOG_DIR}" +sudo chown $REPO_OWNER "${BIRDHOUSE_LOG_DIR}" -LOGROTATE_FILE="/etc/logrotate.d/PAVICS-deploy" -sudo cp -v $REPO_ROOT/birdhouse/deployment/PAVICS-deploy.logrotate $LOGROTATE_FILE +sudo cp -v "${COMPOSE_DIR}/deployment/birdhouse-deploy.logrotate" $LOGROTATE_FILE sudo chown root:root $LOGROTATE_FILE sudo chmod 644 $LOGROTATE_FILE diff --git a/birdhouse/deployment/trigger-deploy-notebook b/birdhouse/deployment/trigger-deploy-notebook index c0da0aa70..54b2ac192 100755 --- a/birdhouse/deployment/trigger-deploy-notebook +++ b/birdhouse/deployment/trigger-deploy-notebook @@ -8,15 +8,24 @@ # Same notebooks as tested by Jenkins and same notebooks as the Binder # https://mybinder.org/v2/gh/Ouranosinc/PAVICS-e2e-workflow-tests/master. # -# This is meant to be run on the same host running PAVICS. +# This is meant to be run on the same host running Birdhouse. # -# Logs to /var/log/PAVICS/notebookdeploy.log, re-use existing logrotate. +# Logs to ${BIRDHOUSE_LOG_DIR}/notebookdeploy.log, re-use existing logrotate. THIS_FILE="$(readlink -f "$0" || realpath "$0")" THIS_DIR="$(dirname "${THIS_FILE}")" COMPOSE_DIR="${COMPOSE_DIR:-$(dirname "${THIS_DIR}")}" -LOG_FILE="/var/log/PAVICS/notebookdeploy.log" +# running script manually (not with cron) source env.local file. +if [ -f "${COMPOSE_DIR}/read-configs.include.sh" ]; then + . "${COMPOSE_DIR}/read-configs.include.sh" + + # Get JUPYTERHUB_USER_DATA_DIR + # Get BASH_IMAGE + read_configs +fi + +LOG_FILE="${BIRDHOUSE_LOG_DIR}/notebookdeploy.log" exec >>$LOG_FILE 2>&1 cleanup_on_exit() { @@ -33,15 +42,6 @@ START_TIME="$(date -Isecond)" echo "========== notebookdeploy START_TIME=$START_TIME" -# running script manually (not with cron) source env.local file. -if [ -f "${COMPOSE_DIR}/read-configs.include.sh" ]; then - . "${COMPOSE_DIR}/read-configs.include.sh" - - # Get JUPYTERHUB_USER_DATA_DIR - # Get BASH_IMAGE - read_configs -fi - set -x NOTEBOOK_DIR_MNT="/notebook_dir" diff --git a/birdhouse/deployment/triggerdeploy.sh b/birdhouse/deployment/triggerdeploy.sh index 82752dfb8..d76b96961 100755 --- a/birdhouse/deployment/triggerdeploy.sh +++ b/birdhouse/deployment/triggerdeploy.sh @@ -19,8 +19,8 @@ # # Sample /autodeploy/conditional-trigger content: # ==================== -# if [ -n "`echo "$GIT_CHANGED_FILES" | grep pavics-config/`" ]; then -# # Only changes under pavics-config/ will need to trigger autodeploy. +# if [ -n "`echo "$GIT_CHANGED_FILES" | grep birdhouse-config/`" ]; then +# # Only changes under birdhouse-config/ will need to trigger autodeploy. # echo "trigger autodeploy" # exit 0 # else @@ -33,8 +33,11 @@ # # Follow same instructions in deploy.sh. +BIRDHOUSE_LOG_DIR=${BIRDHOUSE_LOG_DIR:-"/var/log/birdhouse"} + if [ ! -z "$AUTODEPLOY_SILENT" ]; then - LOG_FILE="/var/log/PAVICS/autodeploy.log" + LOG_FILE="${BIRDHOUSE_LOG_DIR}/autodeploy.log" + mkdir -p "${BIRDHOUSE_LOG_DIR}" exec >>$LOG_FILE 2>&1 fi @@ -43,22 +46,16 @@ usage() { } COMPOSE_DIR="$1" -ENV_LOCAL_FILE="$2" +BIRDHOUSE_LOCAL_ENV="${2:-${BIRDHOUSE_LOCAL_ENV:-"${COMPOSE_DIR}/env.local"}}" if [ -z "$COMPOSE_DIR" ]; then - echo "ERROR: please provide path to PAVICS docker-compose dir." 1>&2 + echo "ERROR: please provide path to Birdhouse docker-compose dir." 1>&2 usage exit 2 else shift fi -if [ -z "$ENV_LOCAL_FILE" ]; then - ENV_LOCAL_FILE="$COMPOSE_DIR/env.local" -else - shift -fi - COMPOSE_DIR="$(realpath "$COMPOSE_DIR")" if [ ! -f "$COMPOSE_DIR/docker-compose.yml" ]; then @@ -66,8 +63,8 @@ if [ ! -f "$COMPOSE_DIR/docker-compose.yml" ]; then exit 2 fi -if [ ! -f "$ENV_LOCAL_FILE" ]; then - echo "ERROR: env.local not found at '$ENV_LOCAL_FILE'" 1>&2 +if [ ! -f "$BIRDHOUSE_LOCAL_ENV" ]; then + echo "ERROR: env.local not found at '$BIRDHOUSE_LOCAL_ENV'" 1>&2 exit 2 fi @@ -80,8 +77,8 @@ cd $COMPOSE_DIR should_trigger() { EXTRA_REPO="$(git rev-parse --show-toplevel)" - DEPLOY_KEY="${AUTODEPLOY_DEPLOY_KEY_ROOT_DIR}/$(basename "$EXTRA_REPO")_deploy_key" - DEFAULT_DEPLOY_KEY="${AUTODEPLOY_DEPLOY_KEY_ROOT_DIR}/id_rsa_git_ssh_read_only" + DEPLOY_KEY="${BIRDHOUSE_AUTODEPLOY_DEPLOY_KEY_ROOT_DIR}/$(basename "$EXTRA_REPO")_deploy_key" + DEFAULT_DEPLOY_KEY="${BIRDHOUSE_AUTODEPLOY_DEPLOY_KEY_ROOT_DIR}/id_rsa_git_ssh_read_only" if [ ! -e "$DEPLOY_KEY" ] && [ -e "${DEFAULT_DEPLOY_KEY}" ]; then DEPLOY_KEY="${DEFAULT_DEPLOY_KEY}" fi @@ -179,13 +176,13 @@ triggerdeploy START_TIME=${START_TIME}" . "${COMPOSE_DIR}/read-configs.include.sh" -# Read AUTODEPLOY_EXTRA_REPOS +# Read BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS read_basic_configs_only set -x SHOULD_TRIGGER="" -for adir in "${COMPOSE_DIR}" ${AUTODEPLOY_EXTRA_REPOS}; do +for adir in "${COMPOSE_DIR}" ${BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS}; do if [ -d "${adir}" ]; then cd "${adir}" || exit @@ -207,7 +204,7 @@ if [ -n "${SHOULD_TRIGGER}" ]; then git show "${CURRENT_REMOTE_BRANCH}":./deployment/deploy.sh > "${TMP_SCRIPT}" chmod a+x "${TMP_SCRIPT}" - $TMP_SCRIPT "${COMPOSE_DIR}" "${ENV_LOCAL_FILE}" + $TMP_SCRIPT "${COMPOSE_DIR}" "${BIRDHOUSE_LOCAL_ENV}" EXIT_CODE=$? rm "${TMP_SCRIPT}" fi diff --git a/birdhouse/deprecated-components/README.rst b/birdhouse/deprecated-components/README.rst index a4857d1f0..cbedd0d6f 100644 --- a/birdhouse/deprecated-components/README.rst +++ b/birdhouse/deprecated-components/README.rst @@ -19,9 +19,9 @@ This directory also contains additional configurations for these deprecated comp These contain the settings to extend the deprecated components that have been moved from the corresponding directories under `birdhouse/optional-components`. -To enable these additional configurations; add them to the `EXTRA_CONF_DIRS` variable (in `env.local`) +To enable these additional configurations; add them to the `BIRDHOUSE_EXTRA_CONF_DIRS` variable (in `env.local`) as you would to enable any component. For example, to enable the deprecated malleefowl component as well as the -wps-healthchecks for malleefowl. The `EXTRA_CONF_DIRS` variable should contain: +wps-healthchecks for malleefowl. The `BIRDHOUSE_EXTRA_CONF_DIRS` variable should contain: .. code-block:: shell diff --git a/birdhouse/deprecated-components/catalog/catalog.cfg.template b/birdhouse/deprecated-components/catalog/catalog.cfg.template index 19b74860b..07d6a71f0 100644 --- a/birdhouse/deprecated-components/catalog/catalog.cfg.template +++ b/birdhouse/deprecated-components/catalog/catalog.cfg.template @@ -1,9 +1,9 @@ [catalog] -solr_host=http://${PAVICS_FQDN}:8983/solr/${THREDDS_SERVICE_DATA_URL_PATH}/ +solr_host=http://${BIRDHOUSE_FQDN}:8983/solr/${THREDDS_SERVICE_DATA_URL_PATH}/ # Multiple thredds hosts can be given, comma separated # note: this URL is also used as prefix when comparing authorizations from magpie -thredds_host=https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/thredds +thredds_host=https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/thredds # Multiple esgf nodes can be given, comma separated esgf_nodes=https://esgf-node.llnl.gov/esg-search @@ -11,7 +11,7 @@ esgf_nodes=https://esgf-node.llnl.gov/esg-search # Provide a magpie host to filter results based on access permissions. # Must also provide credentials with read access so that the crawler can parse the thredds host(s) # Leave as a comment for a public catalog. -magpie_host=https://${PAVICS_FQDN_PUBLIC}/magpie +magpie_host=https://${BIRDHOUSE_FQDN_PUBLIC}/magpie magpie_user=${CATALOG_USERNAME} magpie_pw=${CATALOG_PASSWORD} # SSL verification (true or false) @@ -22,13 +22,13 @@ thredds_host_magpie_svc_name=${CATALOG_THREDDS_SERVICE} # WMS service url with replaced by each instance of the thredds_host, # without the port number and replaced by the base url in thredds. # Leave as comment to use the default WMS service -wms_alternate_server=https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/ncWMS2/wms?SERVICE=WMS&REQUEST=GetCapabilities&VERSION=1.3.0&DATASET=outputs/ +wms_alternate_server=https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/ncWMS2/wms?SERVICE=WMS&REQUEST=GetCapabilities&VERSION=1.3.0&DATASET=outputs/ [pywps] -outputurl=https://${PAVICS_FQDN_PUBLIC}/wpsoutputs/catalog +outputurl=https://${BIRDHOUSE_FQDN_PUBLIC}/wpsoutputs/catalog parallelprocesses=30 [logging] #level=DEBUG #file=/tmp/wps.log -database=postgresql://${POSTGRES_PAVICS_USERNAME}:${POSTGRES_PAVICS_PASSWORD}@postgres/catalog +database=postgresql://${BIRDHOUSE_POSTGRES_USERNAME}:${BIRDHOUSE_POSTGRES_PASSWORD}@postgres/catalog diff --git a/birdhouse/deprecated-components/catalog/config/canarie-api/canarie_api_monitoring.py.template b/birdhouse/deprecated-components/catalog/config/canarie-api/canarie_api_monitoring.py.template index d172d058a..73705dcbc 100644 --- a/birdhouse/deprecated-components/catalog/config/canarie-api/canarie_api_monitoring.py.template +++ b/birdhouse/deprecated-components/catalog/config/canarie-api/canarie_api_monitoring.py.template @@ -6,20 +6,20 @@ SERVICES['Catalog'] = { 'institution': 'Ouranos', 'releaseTime': get_release_time_from_repo_tag("docker", "${CATALOG_DOCKER}", "${CATALOG_VERSION}"), 'researchSubject': 'Climate', - 'supportEmail': '${SUPPORT_EMAIL}', + 'supportEmail': '${BIRDHOUSE_SUPPORT_EMAIL}', 'category': 'Catalog', 'tags': ['Catalog', 'Data'] }, 'stats': { 'method': '.*', - 'route': 'http://${PAVICS_FQDN}:8086/.*' + 'route': 'http://${BIRDHOUSE_FQDN}:8086/.*' }, 'redirect': { 'doc': 'https://ouranosinc.github.io/pavics-sdi/arch/data_catalog.html#pavics-datacatalog', 'releasenotes': 'https://github.com/Ouranosinc/PAVICS-DataCatalog/blob/master/CHANGES.md', 'support': 'https://github.com/ouranosinc/PAVICS-DataCatalog/issues', 'source': 'https://github.com/ouranosinc/PAVICS-DataCatalog', - 'tryme': 'http://${PAVICS_FQDN}:8086/pywps?service=WPS&version=1.0.0&request=GetCapabilities', + 'tryme': 'http://${BIRDHOUSE_FQDN}:8086/pywps?service=WPS&version=1.0.0&request=GetCapabilities', 'licence': 'https://ouranosinc.github.io/pavics-sdi/arch/data_catalog.html#credits', 'provenance': 'https://ouranosinc.github.io/pavics-sdi/arch/data_catalog.html#pavics-datacatalog' }, @@ -27,7 +27,7 @@ SERVICES['Catalog'] = { "Catalog": { 'request': { # FIXME: remove port by design (https://github.com/bird-house/birdhouse-deploy/issues/222) - 'url': 'http://${PAVICS_FQDN}:8086/pywps?service=WPS&version=1.0.0&request=GetCapabilities' + 'url': 'http://${BIRDHOUSE_FQDN}:8086/pywps?service=WPS&version=1.0.0&request=GetCapabilities' } } } diff --git a/birdhouse/deprecated-components/catalog/config/magpie/providers.cfg.template b/birdhouse/deprecated-components/catalog/config/magpie/providers.cfg.template index 1f6525cba..a13054089 100644 --- a/birdhouse/deprecated-components/catalog/config/magpie/providers.cfg.template +++ b/birdhouse/deprecated-components/catalog/config/magpie/providers.cfg.template @@ -1,6 +1,6 @@ providers: catalog: - url: http://${PAVICS_FQDN}:8086/pywps + url: http://${BIRDHOUSE_FQDN}:8086/pywps title: Catalog public: true c4i: false diff --git a/birdhouse/deprecated-components/flyingpigeon/config/canarie-api/canarie_api_monitoring.py.template b/birdhouse/deprecated-components/flyingpigeon/config/canarie-api/canarie_api_monitoring.py.template index 6947e396b..ec4110010 100644 --- a/birdhouse/deprecated-components/flyingpigeon/config/canarie-api/canarie_api_monitoring.py.template +++ b/birdhouse/deprecated-components/flyingpigeon/config/canarie-api/canarie_api_monitoring.py.template @@ -11,7 +11,7 @@ SERVICES['flyingpigeon'] = { 'institution': 'bird-house', 'releaseTime': FLYINGPIGEON_RELEASE, 'researchSubject': 'Climatology', - 'supportEmail': '${SUPPORT_EMAIL}', + 'supportEmail': '${BIRDHOUSE_SUPPORT_EMAIL}', 'category': 'Processing', 'tags': ['Climatology', 'WPS', 'OGC'], }, @@ -24,7 +24,7 @@ SERVICES['flyingpigeon'] = { 'releasenotes': 'https://github.com/bird-house/flyingpigeon/blob/master/CHANGES.rst', 'support': 'https://github.com/bird-house/flyingpigeon/issues', 'source': 'https://github.com/bird-house/flyingpigeon', - 'tryme': 'https://${PAVICS_FQDN_PUBLIC}/flyingpigeon/wps?service=WPS&version=1.0.0&request=GetCapabilities', + 'tryme': 'https://${BIRDHOUSE_FQDN_PUBLIC}/flyingpigeon/wps?service=WPS&version=1.0.0&request=GetCapabilities', 'licence': 'https://github.com/bird-house/flyingpigeon/blob/master/LICENSE.txt', 'provenance': 'https://github.com/bird-house/flyingpigeon' }, diff --git a/birdhouse/deprecated-components/flyingpigeon/service-config.json.template b/birdhouse/deprecated-components/flyingpigeon/service-config.json.template index 23dfc61f0..2924e081b 100644 --- a/birdhouse/deprecated-components/flyingpigeon/service-config.json.template +++ b/birdhouse/deprecated-components/flyingpigeon/service-config.json.template @@ -14,7 +14,7 @@ { "rel": "service", "type": "text/xml", - "href": "https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/flyingpigeon?service=WPS&request=GetCapabilities" + "href": "https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/flyingpigeon?service=WPS&request=GetCapabilities" }, { "rel": "service-doc", @@ -24,7 +24,7 @@ { "rel": "service-desc", "type": "text/xml", - "href": "https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/flyingpigeon?service=WPS&request=GetCapabilities" + "href": "https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/flyingpigeon?service=WPS&request=GetCapabilities" }, { "rel": "service-meta", diff --git a/birdhouse/deprecated-components/flyingpigeon/wps.cfg.template b/birdhouse/deprecated-components/flyingpigeon/wps.cfg.template index b239b21e4..ff15cd061 100644 --- a/birdhouse/deprecated-components/flyingpigeon/wps.cfg.template +++ b/birdhouse/deprecated-components/flyingpigeon/wps.cfg.template @@ -1,10 +1,10 @@ [server] -outputurl = https://${PAVICS_FQDN_PUBLIC}/wpsoutputs/flyingpigeon +outputurl = https://${BIRDHOUSE_FQDN_PUBLIC}/wpsoutputs/flyingpigeon outputpath = /data/wpsoutputs/flyingpigeon maxsingleinputsize = 2097152000.0 [logging] level = INFO -database=postgresql://${POSTGRES_PAVICS_USERNAME}:${POSTGRES_PAVICS_PASSWORD}@postgres/flyingpigeon +database=postgresql://${BIRDHOUSE_POSTGRES_USERNAME}:${BIRDHOUSE_POSTGRES_PASSWORD}@postgres/flyingpigeon -${EXTRA_PYWPS_CONFIG} +${BIRDHOUSE_EXTRA_PYWPS_CONFIG} diff --git a/birdhouse/deprecated-components/frontend/config/canarie-api/canarie_api_monitoring.py.template b/birdhouse/deprecated-components/frontend/config/canarie-api/canarie_api_monitoring.py.template index 4c5c0299d..7a0a2175c 100644 --- a/birdhouse/deprecated-components/frontend/config/canarie-api/canarie_api_monitoring.py.template +++ b/birdhouse/deprecated-components/frontend/config/canarie-api/canarie_api_monitoring.py.template @@ -1,5 +1,5 @@ PLATFORMS['server']['monitoring']['Frontend'] = { 'request': { - 'url': 'https://${PAVICS_FQDN_PUBLIC}' + 'url': 'https://${BIRDHOUSE_FQDN_PUBLIC}' } } diff --git a/birdhouse/deprecated-components/frontend/config/proxy/conf.extra-service.d/frontend.conf.template b/birdhouse/deprecated-components/frontend/config/proxy/conf.extra-service.d/frontend.conf.template index be972a983..c68c5d541 100644 --- a/birdhouse/deprecated-components/frontend/config/proxy/conf.extra-service.d/frontend.conf.template +++ b/birdhouse/deprecated-components/frontend/config/proxy/conf.extra-service.d/frontend.conf.template @@ -1,6 +1,6 @@ location /frontend/ { - proxy_pass http://${PAVICS_FQDN}:3000; + proxy_pass http://${BIRDHOUSE_FQDN}:3000; proxy_set_header Host $host; proxy_set_header X-Forwarded-Proto $real_scheme; } diff --git a/birdhouse/deprecated-components/frontend/frontend.env.template b/birdhouse/deprecated-components/frontend/frontend.env.template index 46570df97..e588a127b 100644 --- a/birdhouse/deprecated-components/frontend/frontend.env.template +++ b/birdhouse/deprecated-components/frontend/frontend.env.template @@ -1,8 +1,8 @@ -PAVICS_FRONTEND_IP=${PAVICS_FQDN} +PAVICS_FRONTEND_IP=${BIRDHOUSE_FQDN} PAVICS_FRONTEND_PORT=443 PAVICS_FRONTEND_PROTO=https -BIRDHOUSE_HOST=${PAVICS_FQDN} +BIRDHOUSE_HOST=${BIRDHOUSE_FQDN} NODE_TLS_REJECT_UNAUTHORIZED=0 -NCWMS_HOST=https://${PAVICS_FQDN}${TWITCHER_PROTECTED_PATH}/ncWMS2/wms -CATALOG_HOST=https://${PAVICS_FQDN}${TWITCHER_PROTECTED_PATH}/catalog/pywps -MALLEEFOWL_HOST=https://${PAVICS_FQDN}${TWITCHER_PROTECTED_PATH}/malleefowl/wps +NCWMS_HOST=https://${BIRDHOUSE_FQDN}${TWITCHER_PROTECTED_PATH}/ncWMS2/wms +CATALOG_HOST=https://${BIRDHOUSE_FQDN}${TWITCHER_PROTECTED_PATH}/catalog/pywps +MALLEEFOWL_HOST=https://${BIRDHOUSE_FQDN}${TWITCHER_PROTECTED_PATH}/malleefowl/wps diff --git a/birdhouse/deprecated-components/malleefowl/config/canarie-api/canarie_api_monitoring.py.template b/birdhouse/deprecated-components/malleefowl/config/canarie-api/canarie_api_monitoring.py.template index 84eae18e3..7ce8ef425 100644 --- a/birdhouse/deprecated-components/malleefowl/config/canarie-api/canarie_api_monitoring.py.template +++ b/birdhouse/deprecated-components/malleefowl/config/canarie-api/canarie_api_monitoring.py.template @@ -6,7 +6,7 @@ SERVICES['Malleefowl'] = { 'releaseTime': get_release_time_from_repo_tag("github", "Ouranosinc/malleefowl", "${MALLEEFOWL_VERSION}"), 'institution': 'Ouranos', 'researchSubject': 'Climatology', - 'supportEmail': '${SUPPORT_EMAIL}', + 'supportEmail': '${BIRDHOUSE_SUPPORT_EMAIL}', 'category': 'Resource/Cloud Management', 'tags': ['Climatology'] }, @@ -19,7 +19,7 @@ SERVICES['Malleefowl'] = { 'releasenotes': 'https://github.com/Ouranosinc/malleefowl/blob/master/CHANGES.rst', 'support': 'https://github.com/Ouranosinc/malleefowl/issues', 'source': 'https://github.com/Ouranosinc/malleefowl', - 'tryme': 'https://${PAVICS_FQDN_PUBLIC}/malleefowl/', + 'tryme': 'https://${BIRDHOUSE_FQDN_PUBLIC}/malleefowl/', 'licence': '', 'provenance': '' }, @@ -27,7 +27,7 @@ SERVICES['Malleefowl'] = { "Malleefowl": { 'request': { # FIXME: remove port by design (https://github.com/bird-house/birdhouse-deploy/issues/222) - 'url': 'http://${PAVICS_FQDN}:8091/wps?service=WPS&version=1.0.0&request=GetCapabilities' + 'url': 'http://${BIRDHOUSE_FQDN}:8091/wps?service=WPS&version=1.0.0&request=GetCapabilities' } } } diff --git a/birdhouse/deprecated-components/malleefowl/config/magpie/providers.cfg.template b/birdhouse/deprecated-components/malleefowl/config/magpie/providers.cfg.template index 074db01d5..ea57d9e07 100644 --- a/birdhouse/deprecated-components/malleefowl/config/magpie/providers.cfg.template +++ b/birdhouse/deprecated-components/malleefowl/config/magpie/providers.cfg.template @@ -1,6 +1,6 @@ providers: malleefowl: - url: http://${PAVICS_FQDN}:8091/wps + url: http://${BIRDHOUSE_FQDN}:8091/wps title: Malleefowl public: true c4i: false diff --git a/birdhouse/deprecated-components/malleefowl/custom.cfg.template b/birdhouse/deprecated-components/malleefowl/custom.cfg.template index 9d435a691..02d5cba58 100644 --- a/birdhouse/deprecated-components/malleefowl/custom.cfg.template +++ b/birdhouse/deprecated-components/malleefowl/custom.cfg.template @@ -5,8 +5,8 @@ extends=profiles/docker.cfg persist-path = /data archive-root = / thredds_url = https://${environment:HOSTNAME}/twitcher/ows/proxy/thredds/fileServer/birdhouse -wps_url = https://${environment:PAVICS_FQDN_PUBLIC}/wpsoutputs -wps_url2 = https://${environment:PAVICS_FQDN_PUBLIC}:443/wpsoutputs +wps_url = https://${environment:BIRDHOUSE_FQDN_PUBLIC}/wpsoutputs +wps_url2 = https://${environment:BIRDHOUSE_FQDN_PUBLIC}:443/wpsoutputs ncwms_url = https://${environment:HOSTNAME}/twitcher/ows/proxy/ncWMS2/wms?SERVICE=WMS&REQUEST=GetCapabilities&VERSION=1.3.0&DATASET=outputs opendap_url = https://${environment:HOSTNAME}/twitcher/ows/proxy/thredds/dodsC/birdhouse magpie_hostname = https://${environment:HOSTNAME}/magpie @@ -18,7 +18,7 @@ https-output-port = 443 workers = 10 parallelprocesses = 10 maxprocesses = 100 -database=postgresql://${POSTGRES_PAVICS_USERNAME}:${POSTGRES_PAVICS_PASSWORD}@postgres/malleefowl +database=postgresql://${BIRDHOUSE_POSTGRES_USERNAME}:${BIRDHOUSE_POSTGRES_PASSWORD}@postgres/malleefowl extra-options = archive_root=${settings:archive-root} persist_path=${settings:persist-path} diff --git a/birdhouse/deprecated-components/malleefowl/docker-compose-extra.yml b/birdhouse/deprecated-components/malleefowl/docker-compose-extra.yml index d528ce45f..f1596ff9a 100644 --- a/birdhouse/deprecated-components/malleefowl/docker-compose-extra.yml +++ b/birdhouse/deprecated-components/malleefowl/docker-compose-extra.yml @@ -12,7 +12,7 @@ services: container_name: malleefowl environment: HOSTNAME: $HOSTNAME - PAVICS_FQDN_PUBLIC: $PAVICS_FQDN_PUBLIC + BIRDHOUSE_FQDN_PUBLIC: $BIRDHOUSE_FQDN_PUBLIC HTTP_PORT: 8091 HTTPS_PORT: 28091 OUTPUT_PORT: 38091 @@ -22,7 +22,7 @@ services: - "38091:38091" - "48091:9001" volumes: - - ${DATA_PERSIST_ROOT}/datasets:/data + - ${BIRDHOUSE_DATA_PERSIST_ROOT}/datasets:/data - ./deprecated-components/malleefowl/custom.cfg:/opt/birdhouse/src/malleefowl/custom.cfg depends_on: - postgres diff --git a/birdhouse/deprecated-components/ncops/ncops.cfg.template b/birdhouse/deprecated-components/ncops/ncops.cfg.template index 3845644f1..03b414999 100644 --- a/birdhouse/deprecated-components/ncops/ncops.cfg.template +++ b/birdhouse/deprecated-components/ncops/ncops.cfg.template @@ -1,2 +1,2 @@ -GEOSERVER_HOST=${PAVICS_FQDN}:8087 -WPS_HOST=${PAVICS_FQDN}:8079 +GEOSERVER_HOST=${BIRDHOUSE_FQDN}:8087 +WPS_HOST=${BIRDHOUSE_FQDN}:8079 diff --git a/birdhouse/deprecated-components/ncwms2/config/proxy/conf.extra-service.d/ncwms2.conf.template b/birdhouse/deprecated-components/ncwms2/config/proxy/conf.extra-service.d/ncwms2.conf.template index c09f19b97..7d855cbfa 100644 --- a/birdhouse/deprecated-components/ncwms2/config/proxy/conf.extra-service.d/ncwms2.conf.template +++ b/birdhouse/deprecated-components/ncwms2/config/proxy/conf.extra-service.d/ncwms2.conf.template @@ -1,5 +1,5 @@ location /ncWMS2/ { - # proxy_pass http://${PAVICS_FQDN}:8080; + # proxy_pass http://${BIRDHOUSE_FQDN}:8080; # proxy_set_header Host $host; # proxy_set_header X-Forwarded-Proto $real_scheme; # include /etc/nginx/conf.d/cors.include; diff --git a/birdhouse/deprecated-components/ncwms2/docker-compose-extra.yml b/birdhouse/deprecated-components/ncwms2/docker-compose-extra.yml index 974deb484..34faf3f73 100644 --- a/birdhouse/deprecated-components/ncwms2/docker-compose-extra.yml +++ b/birdhouse/deprecated-components/ncwms2/docker-compose-extra.yml @@ -11,7 +11,7 @@ services: image: pavics/ncwms2:2.0.4 container_name: ncwms2 volumes: - - ${DATA_PERSIST_ROOT}/datasets:/data + - ${BIRDHOUSE_DATA_PERSIST_ROOT}/datasets:/data - ./deprecated-components/ncwms2/custom.cfg:/opt/birdhouse/custom.cfg - ./deprecated-components/ncwms2/server.xml:/opt/birdhouse/eggs/birdhousebuilder.recipe.tomcat-0.2.9-py2.7.egg/birdhousebuilder/recipe/tomcat/server.xml restart: always diff --git a/birdhouse/deprecated-components/phoenix/config/canarie-api/canarie_api_monitoring.py.template b/birdhouse/deprecated-components/phoenix/config/canarie-api/canarie_api_monitoring.py.template index 1f7330255..8bd804ceb 100644 --- a/birdhouse/deprecated-components/phoenix/config/canarie-api/canarie_api_monitoring.py.template +++ b/birdhouse/deprecated-components/phoenix/config/canarie-api/canarie_api_monitoring.py.template @@ -6,7 +6,7 @@ SERVICES['Phoenix'] = { 'institution': 'Ouranos', 'releaseTime': get_release_time_from_repo_tag("docker", "${PHOENIX_DOCKER}", "${PHOENIX_VERSION}"), 'researchSubject': 'Authentication', - 'supportEmail': '${SUPPORT_EMAIL}', + 'supportEmail': '${BIRDHOUSE_SUPPORT_EMAIL}', 'category': 'Authentication', 'tags': ['Authentication', 'Legacy'] }, @@ -19,7 +19,7 @@ SERVICES['Phoenix'] = { 'releasenotes': 'https://github.com/ouranosinc/pyramid-phoenix/CHANGES.rst', 'support': 'https://github.com/ouranosinc/pyramid-phoenix/issues', 'source': 'https://github.com/ouranosinc/pyramid-phoenix', - 'tryme': 'https://${PAVICS_FQDN}:8443/', + 'tryme': 'https://${BIRDHOUSE_FQDN}:8443/', 'licence': 'https://github.com/ouranosinc/pyramid-phoenix/blob/master/LICENSE.txt', 'provenance': 'https://ouranosinc.github.io/pavics-sdi/provenance/index.html' }, @@ -27,7 +27,7 @@ SERVICES['Phoenix'] = { "Phoenix": { 'request': { # FIXME: remove port by design (https://github.com/bird-house/birdhouse-deploy/issues/222) - 'url': 'https://${PAVICS_FQDN}:8443/' + 'url': 'https://${BIRDHOUSE_FQDN}:8443/' } } } diff --git a/birdhouse/deprecated-components/phoenix/docker-compose-extra.yml b/birdhouse/deprecated-components/phoenix/docker-compose-extra.yml index 2205263ec..f157c3065 100644 --- a/birdhouse/deprecated-components/phoenix/docker-compose-extra.yml +++ b/birdhouse/deprecated-components/phoenix/docker-compose-extra.yml @@ -23,7 +23,7 @@ services: - "9001:9001" volumes: - ./deprecated-components/phoenix/custom.cfg:/opt/birdhouse/src/phoenix/custom.cfg - - ${SSL_CERTIFICATE}:/opt/birdhouse/etc/nginx/cert.pem + - ${BIRDHOUSE_SSL_CERTIFICATE}:/opt/birdhouse/etc/nginx/cert.pem links: - mongodb networks: diff --git a/birdhouse/deprecated-components/project-api/config/canarie-api/canarie_api_monitoring.py.template b/birdhouse/deprecated-components/project-api/config/canarie-api/canarie_api_monitoring.py.template index 060448f2b..9b3b4e7e4 100644 --- a/birdhouse/deprecated-components/project-api/config/canarie-api/canarie_api_monitoring.py.template +++ b/birdhouse/deprecated-components/project-api/config/canarie-api/canarie_api_monitoring.py.template @@ -1,6 +1,6 @@ PLATFORMS['server']['monitoring']['Project'] = { 'request': { - 'url': 'https://${PAVICS_FQDN_PUBLIC}/project-api/explorer/' + 'url': 'https://${BIRDHOUSE_FQDN_PUBLIC}/project-api/explorer/' } } PLATFORMS['server']['stats']['route'] = '/project-api/.*' diff --git a/birdhouse/deprecated-components/project-api/config/proxy/conf.extra-service.d/project-api.conf.template b/birdhouse/deprecated-components/project-api/config/proxy/conf.extra-service.d/project-api.conf.template index 902ded2f7..0f85db11a 100644 --- a/birdhouse/deprecated-components/project-api/config/proxy/conf.extra-service.d/project-api.conf.template +++ b/birdhouse/deprecated-components/project-api/config/proxy/conf.extra-service.d/project-api.conf.template @@ -1,6 +1,6 @@ location /project-api/ { - proxy_pass http://${PAVICS_FQDN}:3005/; + proxy_pass http://${BIRDHOUSE_FQDN}:3005/; proxy_set_header Host $host; proxy_set_header X-Forwarded-Proto $real_scheme; } diff --git a/birdhouse/deprecated-components/solr/config/canarie-api/canarie_api_monitoring.py.template b/birdhouse/deprecated-components/solr/config/canarie-api/canarie_api_monitoring.py.template index 26a3f55db..28aaf4e80 100644 --- a/birdhouse/deprecated-components/solr/config/canarie-api/canarie_api_monitoring.py.template +++ b/birdhouse/deprecated-components/solr/config/canarie-api/canarie_api_monitoring.py.template @@ -11,7 +11,7 @@ SERVICES['Solr'] = { 'institution': 'Ouranos', 'releaseTime': get_release_time_from_repo_tag("docker", "${SOLR_DOCKER}", "${SOLR_VERSION}"), 'researchSubject': 'Climatology', - 'supportEmail': '${SUPPORT_EMAIL}', + 'supportEmail': '${BIRDHOUSE_SUPPORT_EMAIL}', 'category': 'Data Manipulation', 'tags': ['Indexation', 'Search'] }, @@ -24,7 +24,7 @@ SERVICES['Solr'] = { 'releasenotes': 'https://github.com/Ouranosinc/PAVICS/tags', 'support': 'https://github.com/Ouranosinc/PAVICS/issues', 'source': 'https://github.com/Ouranosinc/PAVICS/tree/master/birdhouse/docker/solr', - 'tryme': 'http://${PAVICS_FQDN}:8983/solr/', + 'tryme': 'http://${BIRDHOUSE_FQDN}:8983/solr/', 'licence': 'https://github.com/bird-house/finch/blob/master/LICENSE.txt', 'provenance': 'https://ouranosinc.github.io/pavics-sdi/arch/backend.html#indexation' }, @@ -32,7 +32,7 @@ SERVICES['Solr'] = { 'Solr': { 'request': { # FIXME: remove port by design (https://github.com/bird-house/birdhouse-deploy/issues/222) - 'url': 'http://${PAVICS_FQDN}:8983/solr/${THREDDS_SERVICE_DATA_URL_PATH}/select' + 'url': 'http://${BIRDHOUSE_FQDN}:8983/solr/${THREDDS_SERVICE_DATA_URL_PATH}/select' } }, } diff --git a/birdhouse/deprecated-components/solr/docker-compose-extra.yml b/birdhouse/deprecated-components/solr/docker-compose-extra.yml index d61c6e5cb..c9e08e492 100644 --- a/birdhouse/deprecated-components/solr/docker-compose-extra.yml +++ b/birdhouse/deprecated-components/solr/docker-compose-extra.yml @@ -15,6 +15,6 @@ services: - "8983:8983" - "48983:9001" volumes: - - ${DATA_PERSIST_ROOT}/solr:/data + - ${BIRDHOUSE_DATA_PERSIST_ROOT}/solr:/data restart: always logging: *default-logging diff --git a/birdhouse/env.local.example b/birdhouse/env.local.example index 347d79b85..99be5ea12 100644 --- a/birdhouse/env.local.example +++ b/birdhouse/env.local.example @@ -1,8 +1,8 @@ ############################################################################# -# Mandatory vars (will be enforced by pavics-compose.sh) +# Mandatory vars (will be enforced by BIRDHOUSE_COMPOSE) # Can add new vars but do not remove, else automated deployment will break # -# Do NOT use environment variables in here since when pavics-compose.sh runs +# Do NOT use environment variables in here since when BIRDHOUSE_COMPOSE runs # inside a container, the environment vars do not have the same value. # # Any default value that can pose a security concern or that are strongly @@ -13,25 +13,24 @@ ############################################################################# # Override data persistence root directory -# export DATA_PERSIST_ROOT="/data/custom/path" # otherwise use value of 'default.env', directory must exist +# export BIRDHOUSE_DATA_PERSIST_ROOT="/data/custom/path" # otherwise use value of 'default.env', directory must exist # Root directory for all files that are persisted on disk and may contain links (ie. the files # are "shared" between subdirectories). This means that the subdirectory structure is fixed. -#export DATA_PERSIST_SHARED_ROOT='${DATA_PERSIST_ROOT}' # otherwise use the value from 'default.env', must exist +#export BIRDHOUSE_DATA_PERSIST_SHARED_ROOT='${BIRDHOUSE_DATA_PERSIST_ROOT}' # otherwise use the value from 'default.env', must exist -export SSL_CERTIFICATE="${__DEFAULT__SSL_CERTIFICATE}" # *absolute* path to the nginx ssl certificate, path and key bundle -export PAVICS_FQDN="${__DEFAULT__PAVICS_FQDN}" # Fully qualified domain name of this Pavics installation -export DOC_URL="${__DEFAULT__DOC_URL}" # URL where /doc gets redirected +export BIRDHOUSE_SSL_CERTIFICATE="${__DEFAULT__BIRDHOUSE_SSL_CERTIFICATE}" # *absolute* path to the nginx ssl certificate, path and key bundle +export BIRDHOUSE_FQDN="${__DEFAULT__BIRDHOUSE_FQDN}" # Fully qualified domain name of this Birdhouse installation +export BIRDHOUSE_DOC_URL="${__DEFAULT__BIRDHOUSE_DOC_URL}" # URL where /doc gets redirected export MAGPIE_SECRET="${__DEFAULT__MAGPIE_SECRET}" export MAGPIE_ADMIN_USERNAME="${__DEFAULT__MAGPIE_ADMIN_USERNAME}" # Magpie now requires a password length of at least 12 characters # For initial bootstrap only, change in the Magpie Web UI after initial boostrap. export MAGPIE_ADMIN_PASSWORD="${__DEFAULT__MAGPIE_ADMIN_PASSWORD}" export TWITCHER_PROTECTED_PATH="/twitcher/ows/proxy" -export SUPPORT_EMAIL="${__DEFAULT__SUPPORT_EMAIL}" -export CMIP5_THREDDS_ROOT="birdhouse/CMIP5/CCCMA" -export POSTGRES_PAVICS_USERNAME="${__DEFAULT__POSTGRES_PAVICS_USERNAME}" -export POSTGRES_PAVICS_PASSWORD="${__DEFAULT__POSTGRES_PAVICS_PASSWORD}" +export BIRDHOUSE_SUPPORT_EMAIL="${__DEFAULT__BIRDHOUSE_SUPPORT_EMAIL}" +export BIRDHOUSE_POSTGRES_USERNAME="${__DEFAULT__BIRDHOUSE_POSTGRES_USERNAME}" +export BIRDHOUSE_POSTGRES_PASSWORD="${__DEFAULT__BIRDHOUSE_POSTGRES_PASSWORD}" export POSTGRES_MAGPIE_USERNAME="${__DEFAULT__POSTGRES_MAGPIE_USERNAME}" export POSTGRES_MAGPIE_PASSWORD="${__DEFAULT__POSTGRES_MAGPIE_PASSWORD}" export GEOSERVER_ADMIN_USER="${__DEFAULT__GEOSERVER_ADMIN_USER}" @@ -64,7 +63,7 @@ export GEOSERVER_ADMIN_PASSWORD="${__DEFAULT__GEOSERVER_ADMIN_PASSWORD}" # * Assemble different combinations of components/functionalities by including # only the config/docker-compose fragment necessary. # -# Last dir/component in the EXTRA_CONF_DIRS list have highest override +# Last dir/component in the BIRDHOUSE_EXTRA_CONF_DIRS list have highest override # precedence, example: # # * Last docker-compose volume mount to same destination win over the @@ -78,14 +77,14 @@ export GEOSERVER_ADMIN_PASSWORD="${__DEFAULT__GEOSERVER_ADMIN_PASSWORD}" # Suggested to keep the private-config-repo last in the list to be able to # override anything. # -# Note that a component listed in DEFAULT_CONF_DIRS and EXTRA_CONF_DIRS +# Note that a component listed in BIRDHOUSE_DEFAULT_CONF_DIRS and BIRDHOUSE_EXTRA_CONF_DIRS # will load any dependant components (defined in the COMPONENT_DEPENDENCIES # variable) immediately after the specified component. # # Format: space separated list of dirs # -#export EXTRA_CONF_DIRS="/path/to/dir1 ./path/to/dir2 dir3 dir4" -#export EXTRA_CONF_DIRS=" +#export BIRDHOUSE_EXTRA_CONF_DIRS="/path/to/dir1 ./path/to/dir2 dir3 dir4" +#export BIRDHOUSE_EXTRA_CONF_DIRS=" # ./components/canarie-api # ./components/geoserver # ./components/finch @@ -114,7 +113,7 @@ export GEOSERVER_ADMIN_PASSWORD="${__DEFAULT__GEOSERVER_ADMIN_PASSWORD}" # Any changes to these extra repos will also trigger autodeploy. # # Useful to save the instanciated version of this env.local config file and -# any custom docker-compose-extra.yml from the previous EXTRA_CONF_DIRS var. +# any custom docker-compose-extra.yml from the previous BIRDHOUSE_EXTRA_CONF_DIRS var. # # Note: # @@ -123,14 +122,14 @@ export GEOSERVER_ADMIN_PASSWORD="${__DEFAULT__GEOSERVER_ADMIN_PASSWORD}" # it here, out-of-date detection currently only works for git repos. # # * To preserve write permissions for your user, run once for this repo and -# once each time AUTODEPLOY_EXTRA_REPOS changes: +# once each time BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS changes: # deployment/fix-write-perm # # Format: space separated list of full path to dirs -#export AUTODEPLOY_EXTRA_REPOS="/path/to/dir1 /path/to/dir2 /path/to/dir3" -#export AUTODEPLOY_EXTRA_REPOS="/path/to/private-config-containing-env.local" +#export BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS="/path/to/dir1 /path/to/dir2 /path/to/dir3" +#export BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS="/path/to/private-config-containing-env.local" -# For each git repo in AUTODEPLOY_EXTRA_REPOS that use ssh to clone/fetch +# For each git repo in BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS that use ssh to clone/fetch # instead of https, provide its corresponding ssh deploy key in this dir. # # See instructions in deployment/deploy.sh or @@ -153,7 +152,7 @@ export GEOSERVER_ADMIN_PASSWORD="${__DEFAULT__GEOSERVER_ADMIN_PASSWORD}" # private-config-containing-env.local_deploy_key, # id_rsa_git_ssh_read_only # -#export AUTODEPLOY_DEPLOY_KEY_ROOT_DIR="/path/to/ssh-deploy-keys-for-all-repos" +#export BIRDHOUSE_AUTODEPLOY_DEPLOY_KEY_ROOT_DIR="/path/to/ssh-deploy-keys-for-all-repos" # Frequency to trigger the various autodeploy tasks. # See default.env for default. @@ -168,26 +167,26 @@ export GEOSERVER_ADMIN_PASSWORD="${__DEFAULT__GEOSERVER_ADMIN_PASSWORD}" # - every 2 hours: "*/120 * * * *" or "@every 2h" # - every 5 minutes: "*/5 * * * *" or "@every 5m" # -# "Platform" are all the git repos in AUTODEPLOY_EXTRA_REPOS. -#export AUTODEPLOY_PLATFORM_FREQUENCY="@every 5m" +# "Platform" are all the git repos in BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS. +#export BIRDHOUSE_AUTODEPLOY_PLATFORM_FREQUENCY="@every 5m" # # "Notebook" are all the tutorial notebooks on Jupyter. -#export AUTODEPLOY_NOTEBOOK_FREQUENCY="@every 5m" +#export BIRDHOUSE_AUTODEPLOY_NOTEBOOK_FREQUENCY="@every 5m" # Add more jobs to ./components/scheduler/config.yml # # Potential usages: other deployment, backup jobs on the same machine # -#export AUTODEPLOY_EXTRA_SCHEDULER_JOBS="" +#export BIRDHOUSE_AUTODEPLOY_EXTRA_SCHEDULER_JOBS="" # The scheduler runs as the root user so new/updated files will be owned by root after the code is updated. -# If AUTODEPLOY_CODE_OWNERSHIP if set, the ownership of all files and directories in this repo will be set to that user +# If BIRDHOUSE_AUTODEPLOY_CODE_OWNERSHIP if set, the ownership of all files and directories in this repo will be set to that user # after each autodeploy update. -# AUTODEPLOY_CODE_OWNERSHIP should contain uids instead of usernames since usernames within a docker container will +# BIRDHOUSE_AUTODEPLOY_CODE_OWNERSHIP should contain uids instead of usernames since usernames within a docker container will # not necessarily line up with those on the host system. -# AUTODEPLOY_CODE_OWNERSHIP should be set to a constant value (eg. "1000" not "$(id -u)") since this will be evaluated +# BIRDHOUSE_AUTODEPLOY_CODE_OWNERSHIP should be set to a constant value (eg. "1000" not "$(id -u)") since this will be evaluated # within the autodeploy docker container as the root user. -#export AUTODEPLOY_CODE_OWNERSHIP="1000:1000" +#export BIRDHOUSE_AUTODEPLOY_CODE_OWNERSHIP="1000:1000" # Load pre-configured job to auto-renew LetsEncrypt SSL certificate if a # LetsEncrypt SSL certificate has previously been requested. @@ -198,8 +197,8 @@ export GEOSERVER_ADMIN_PASSWORD="${__DEFAULT__GEOSERVER_ADMIN_PASSWORD}" # See the job for additional possible configurations. The "scheduler" # component needs to be enabled for this pre-configured job to work. # -# This job will write to the value of SSL_CERTIFICATE here so make sure this -# job is sourced after the last definition of SSL_CERTIFICATE. +# This job will write to the value of BIRDHOUSE_SSL_CERTIFICATE here so make sure this +# job is sourced after the last definition of BIRDHOUSE_SSL_CERTIFICATE. # #if [ -f "//components/scheduler/renew_letsencrypt_ssl_cert_extra_job.env" ]; then # . //components/scheduler/renew_letsencrypt_ssl_cert_extra_job.env @@ -230,26 +229,26 @@ export GEOSERVER_ADMIN_PASSWORD="${__DEFAULT__GEOSERVER_ADMIN_PASSWORD}" #fi # Mount point on host machine for the scheduler to write data from log rotations -# (note: if using 'DATA_PERSIST_ROOT', it must be defined earlier, either in this file or from 'default.env') -#export LOGROTATE_DATA_DIR='${DATA_PERSIST_ROOT}/logrotate' +# (note: if using 'BIRDHOUSE_DATA_PERSIST_ROOT', it must be defined earlier, either in this file or from 'default.env') +#export BIRDHOUSE_LOGROTATE_DATA_DIR='${BIRDHOUSE_DATA_PERSIST_ROOT}/logrotate' # Content of "location /" in file config/proxy/conf.d/all-services.include.template # Useful to have a custom homepage. # Default: -#export PROXY_ROOT_LOCATION="return 302 https://\$host/magpie;" +#export BIRDHOUSE_PROXY_ROOT_LOCATION="return 302 https://\$host/magpie;" # Note that the default homepage will become the jupyterhub login page if the jupyterhub component is enabled. # If the jupyterhub component is not enabled, it is highly recommended to create a custom homepage since the magpie # landing page is not the most user-friendly option. # Sample, remember to add this /data/homepage volume mount to proxy container. # See PR https://github.com/bird-house/birdhouse-deploy-ouranos/pull/11. -#export PROXY_ROOT_LOCATION="alias /data/homepage/;" +#export BIRDHOUSE_PROXY_ROOT_LOCATION="alias /data/homepage/;" -# Public (on the internet) fully qualified domain name of this Pavics -# installation. This is optional so default to the same internal PAVICS_FQDN if +# Public (on the internet) fully qualified domain name of this Birdhouse +# installation. This is optional so default to the same internal BIRDHOUSE_FQDN if # not set. -#export PAVICS_FQDN_PUBLIC="$PAVICS_FQDN" +#export BIRDHOUSE_FQDN_PUBLIC="$BIRDHOUSE_FQDN" -# If ALLOW_UNSECURE_HTTP is enabled, port 80 will not redirect to 443 +# If BIRDHOUSE_ALLOW_UNSECURE_HTTP is enabled, port 80 will not redirect to 443 # anymore and will have the same service definitions as 443. # # This is so we can use pagekite proper SSL certificate. Pagekite will @@ -260,17 +259,16 @@ export GEOSERVER_ADMIN_PASSWORD="${__DEFAULT__GEOSERVER_ADMIN_PASSWORD}" # Set to "True" to allow traffic on unsecure port 80. # This is for debugging only, do Not set this on a Production server. # -#export ALLOW_UNSECURE_HTTP="" +#export BIRDHOUSE_ALLOW_UNSECURE_HTTP="" # Jupyter single-user server images -# See components/jupyterhub/default.env for the most updated version of pavics/workflow-tests. -#export DOCKER_NOTEBOOK_IMAGES="pavics/workflow-tests:py311-240506-update240508 \ -# pavics/crim-jupyter-eo:0.3.0 \ -# pavics/crim-jupyter-nlp:0.4.0 \ -# birdhouse/pavics-jupyter-base:mlflow-proxy" +#export JUPYTERHUB_DOCKER_NOTEBOOK_IMAGES="pavics/workflow-tests:py311-240506-update240508 \ +# pavics/crim-jupyter-eo:0.3.0 \ +# pavics/crim-jupyter-nlp:0.4.0 \ +# birdhouse/pavics-jupyter-base:mlflow-proxy" # Name of the images displayed on the JupyterHub image selection page -# The name order must correspond to the order of the DOCKER_NOTEBOOK_IMAGES variable, +# The name order must correspond to the order of the JUPYTERHUB_DOCKER_NOTEBOOK_IMAGES variable, # and both variables should have the same number of entries. # Note that the selection names are also used as directory names for the tutorial-notebooks directories mounted when # starting the corresponding image. The name can use the '' or the ':' format. The version will be @@ -282,12 +280,12 @@ export GEOSERVER_ADMIN_PASSWORD="${__DEFAULT__GEOSERVER_ADMIN_PASSWORD}" # allow jupyterhub user selection of which notebook image to run # see https://jupyter-docker-stacks.readthedocs.io/en/latest/using/selecting.html -#export ENABLE_JUPYTERHUB_MULTI_NOTEBOOKS=" +#export JUPYTERHUB_ENABLE_MULTI_NOTEBOOKS=" #c.DockerSpawner.image_whitelist = { -# os.environ['JUPYTERHUB_IMAGE_SELECTION_NAMES'].split()[0]: os.environ['DOCKER_NOTEBOOK_IMAGES'].split()[0], -# os.environ['JUPYTERHUB_IMAGE_SELECTION_NAMES'].split()[1]: os.environ['DOCKER_NOTEBOOK_IMAGES'].split()[1], -# os.environ['JUPYTERHUB_IMAGE_SELECTION_NAMES'].split()[2]: os.environ['DOCKER_NOTEBOOK_IMAGES'].split()[2], -# os.environ['JUPYTERHUB_IMAGE_SELECTION_NAMES'].split()[3]: os.environ['DOCKER_NOTEBOOK_IMAGES'].split()[3], +# os.environ['JUPYTERHUB_IMAGE_SELECTION_NAMES'].split()[0]: os.environ['JUPYTERHUB_DOCKER_NOTEBOOK_IMAGES'].split()[0], +# os.environ['JUPYTERHUB_IMAGE_SELECTION_NAMES'].split()[1]: os.environ['JUPYTERHUB_DOCKER_NOTEBOOK_IMAGES'].split()[1], +# os.environ['JUPYTERHUB_IMAGE_SELECTION_NAMES'].split()[2]: os.environ['JUPYTERHUB_DOCKER_NOTEBOOK_IMAGES'].split()[2], +# os.environ['JUPYTERHUB_IMAGE_SELECTION_NAMES'].split()[3]: os.environ['JUPYTERHUB_DOCKER_NOTEBOOK_IMAGES'].split()[3], # 'jupyter/scipy-notebook': 'jupyter/scipy-notebook', # 'jupyter/r-notebook': 'jupyter/r-notebook', # 'jupyter/tensorflow-notebook': 'jupyter/tensorflow-notebook', @@ -303,7 +301,7 @@ export GEOSERVER_ADMIN_PASSWORD="${__DEFAULT__GEOSERVER_ADMIN_PASSWORD}" # Path to a checked out repo of "pavics-jupyter-base" (https://github.com/bird-house/pavics-jupyter-base) # which contains the config required for the cronjob generation #CHECKOUT_PAVICS_JUPYTER_BASE="/path/to/checkout/pavics-jupyter-base" -#export AUTODEPLOY_EXTRA_REPOS="$AUTODEPLOY_EXTRA_REPOS $CHECKOUT_PAVICS_JUPYTER_BASE" +#export BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS="$BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS $CHECKOUT_PAVICS_JUPYTER_BASE" # Config for the generation of cronjobs, found on external repo #DEPLOY_DATA_PAVICS_JUPYTER_ENV="$CHECKOUT_PAVICS_JUPYTER_BASE/scheduler-jobs/deploy_data_pavics_jupyter.env" @@ -314,16 +312,16 @@ export GEOSERVER_ADMIN_PASSWORD="${__DEFAULT__GEOSERVER_ADMIN_PASSWORD}" #fi # Activates mounting a tutorial-notebooks subfolder that has the same name as the spawned image on JupyterHub -# This variable is only useful if there are more than one image in DOCKER_NOTEBOOK_IMAGES -# and ENABLE_JUPYTERHUB_MULTI_NOTEBOOKS is set with a proper c.DockerSpawner.image_whitelist -# matching the images in DOCKER_NOTEBOOK_IMAGES and their corresponding JUPYTERHUB_IMAGE_SELECTION_NAMES. -# export MOUNT_IMAGE_SPECIFIC_NOTEBOOKS=true +# This variable is only useful if there are more than one image in JUPYTERHUB_DOCKER_NOTEBOOK_IMAGES +# and JUPYTERHUB_ENABLE_MULTI_NOTEBOOKS is set with a proper c.DockerSpawner.image_whitelist +# matching the images in JUPYTERHUB_DOCKER_NOTEBOOK_IMAGES and their corresponding JUPYTERHUB_IMAGE_SELECTION_NAMES. +# export JUPYTERHUB_MOUNT_IMAGE_SPECIFIC_NOTEBOOKS=true # The parent folder where all the user notebooks will be stored. # For example, a user named "bob" will have his data in $JUPYTERHUB_USER_DATA_DIR/bob # and this folder will be mounted when he logs into JupyterHub. -# (note: if using 'DATA_PERSIST_ROOT', it must be defined earlier, either in this file or from 'default.env') -#export JUPYTERHUB_USER_DATA_DIR="$DATA_PERSIST_ROOT/jupyterhub_user_data" +# (note: if using 'BIRDHOUSE_DATA_PERSIST_ROOT', it must be defined earlier, either in this file or from 'default.env') +#export JUPYTERHUB_USER_DATA_DIR="$BIRDHOUSE_DATA_PERSIST_ROOT/jupyterhub_user_data" # Path to the file containing the clientID for the google drive extension for jupyterlab # This file will be mounted into JupyterLab instances. @@ -437,7 +435,7 @@ export GEOSERVER_ADMIN_PASSWORD="${__DEFAULT__GEOSERVER_ADMIN_PASSWORD}" #export JUPYTERHUB_ADMIN_USERS='{\"${MAGPIE_ADMIN_USERNAME}\", \"othername\"}' # python set syntax # Extra PyWPS config for **all** WPS services (currently only Flyingpigeon, Finch and Raven supported). -# export EXTRA_PYWPS_CONFIG=" +# export BIRDHOUSE_EXTRA_PYWPS_CONFIG=" # [logging] # level = DEBUG # " @@ -447,10 +445,10 @@ export GEOSERVER_ADMIN_PASSWORD="${__DEFAULT__GEOSERVER_ADMIN_PASSWORD}" # Name of organization hosting the Thredds server #export THREDDS_ORGANIZATION="Birdhouse" -#export THREDDS_DATASET_LOCATION_ON_CONTAINER='/pavics-ncml' # this default is for backward compatibility -#export THREDDS_SERVICE_DATA_LOCATION_ON_CONTAINER='/pavics-data' # this default is for backward compatibility -#export THREDDS_DATASET_LOCATION_ON_HOST='${DATA_PERSIST_ROOT}/ncml' # this default is for backward compatibility -#export THREDDS_SERVICE_DATA_LOCATION_ON_HOST='${DATA_PERSIST_ROOT}/datasets' # this default is for backward compatibility +#export THREDDS_DATASET_LOCATION_ON_CONTAINER='/birdhouse-ncml' # this default is for backward compatibility +#export THREDDS_SERVICE_DATA_LOCATION_ON_CONTAINER='/birdhouse-data' # this default is for backward compatibility +#export THREDDS_DATASET_LOCATION_ON_HOST='${BIRDHOUSE_DATA_PERSIST_ROOT}/ncml' # this default is for backward compatibility +#export THREDDS_SERVICE_DATA_LOCATION_ON_HOST='${BIRDHOUSE_DATA_PERSIST_ROOT}/datasets' # this default is for backward compatibility #export THREDDS_DATASET_LOCATION_NAME='Datasets' # this default is for backward compatibility #export THREDDS_SERVICE_DATA_LOCATION_NAME='Birdhouse' # this default is for backward compatibility #export THREDDS_DATASET_URL_PATH='datasets' # this default is for backward compatibility @@ -482,13 +480,13 @@ export THREDDS_ADDITIONAL_CATALOG="" # To setup Github as login, goto under section [OAuth Apps] # and create a new Magpie application with configurations: # -# Homepage URL: https://${PAVICS_FQDN} -# Authorization callback URL: https://${PAVICS_FQDN}/magpie/providers/github/signin +# Homepage URL: https://${BIRDHOUSE_FQDN} +# Authorization callback URL: https://${BIRDHOUSE_FQDN}/magpie/providers/github/signin # # Then, specify obtained Github client ID/Secret for this Magpie OAuth App with following variables. # -#export GITHUB_CLIENT_ID=#### -#export GITHUB_CLIENT_SECRET=#### +#export MAGPIE_GITHUB_CLIENT_ID=#### +#export MAGPIE_GITHUB_CLIENT_SECRET=#### # Magpie DB name #export MAGPIE_DB_NAME="magpiedb" @@ -521,7 +519,7 @@ export THREDDS_ADDITIONAL_CATALOG="" #export MAGPIE_SMTP_PASSWORD="" # Set to 'false' if using self-signed SSL certificate -#export VERIFY_SSL="true" +#export BIRDHOUSE_VERIFY_SSL="true" # Jupyter public demo account with limited computing resources for security reasons #export JUPYTER_DEMO_USER="demo" @@ -534,23 +532,23 @@ export THREDDS_ADDITIONAL_CATALOG="" # Raven to use the local Geoserver instead of the default production. # See raven/default.env for more info. -#export RAVEN_GEO_URL="https://${PAVICS_FQDN}/geoserver/" +#export RAVEN_GEO_URL="https://${BIRDHOUSE_FQDN}/geoserver/" # Mount point on host machine to store mongodb server data -# (note: if using 'DATA_PERSIST_ROOT', it must be defined earlier, either in this file or from 'default.env') -#export MONGODB_DATA_DIR='${DATA_PERSIST_ROOT}/mongodb_persist' +# (note: if using 'BIRDHOUSE_DATA_PERSIST_ROOT', it must be defined earlier, either in this file or from 'default.env') +#export MONGODB_DATA_DIR='${BIRDHOUSE_DATA_PERSIST_ROOT}/mongodb_persist' # Mount point on host machine for cowbird to store data from its mongodb server -# (note: if using 'DATA_PERSIST_ROOT', it must be defined earlier, either in this file or from 'default.env') -#export COWBIRD_MONGODB_DATA_DIR='${DATA_PERSIST_ROOT}/mongodb_cowbird_persist' +# (note: if using 'BIRDHOUSE_DATA_PERSIST_ROOT', it must be defined earlier, either in this file or from 'default.env') +#export COWBIRD_MONGODB_DATA_DIR='${BIRDHOUSE_DATA_PERSIST_ROOT}/mongodb_cowbird_persist' # Mount point on host machine to store postgres server data -# (note: if using 'DATA_PERSIST_ROOT', it must be defined earlier, either in this file or from 'default.env') -#export POSTGRES_DATA_DIR='${DATA_PERSIST_ROOT}/frontend_persist' +# (note: if using 'BIRDHOUSE_DATA_PERSIST_ROOT', it must be defined earlier, either in this file or from 'default.env') +#export POSTGRES_DATA_DIR='${BIRDHOUSE_DATA_PERSIST_ROOT}/frontend_persist' # Mount point on host machine for weaver to store data from its mongodb server -# (note: if using 'DATA_PERSIST_ROOT', it must be defined earlier, either in this file or from 'default.env') -#export WEAVER_MONGODB_DATA_DIR='${DATA_PERSIST_ROOT}/mongodb_weaver_persist' +# (note: if using 'BIRDHOUSE_DATA_PERSIST_ROOT', it must be defined earlier, either in this file or from 'default.env') +#export WEAVER_MONGODB_DATA_DIR='${BIRDHOUSE_DATA_PERSIST_ROOT}/mongodb_weaver_persist' # If "True", Weaver providers that are no longer working (not responding when deployed) and are not named in # WEAVER_WPS_PROVIDERS will be unregistered. This is useful when deploying Weaver with fewer providers than a previous @@ -568,7 +566,7 @@ export THREDDS_ADDITIONAL_CATALOG="" # Below are Mandatory if monitoring component is enabled: #export GRAFANA_ADMIN_PASSWORD=changeme! #export ALERTMANAGER_ADMIN_EMAIL_RECEIVER="user1@example.com,user2@example.com" -#export SMTP_SERVER="smtp.example.com:25" +#export ALERTMANAGER_SMTP_SERVER="smtp.example.com:25" # Below are optional for monitoring component #export ALERTMANAGER_EXTRA_GLOBAL="" @@ -580,7 +578,7 @@ export THREDDS_ADDITIONAL_CATALOG="" # Emu optional vars ############################################################################# -# To enable emu: add './optional-components/emu' to EXTRA_CONF_DIRS above. +# To enable emu: add './optional-components/emu' to BIRDHOUSE_EXTRA_CONF_DIRS above. # Emu WPS service image if that testing component is enabled #export EMU_IMAGE="tlvu/emu:watchdog" @@ -605,9 +603,9 @@ export PHOENIX_PASSWORD_HASH="${__DEFAULT__PHOENIX_PASSWORD_HASH}" # Remove orphans containers, useful when disabling components. # Harmless when left enabled all the time. # Not working at the time of this writing, see https://github.com/docker/compose/issues/11374. -# Use COMPOSE_UP_EXTRA_OPTS below as a work-around. +# Use BIRDHOUSE_COMPOSE_UP_EXTRA_OPTS below as a work-around. #export COMPOSE_REMOVE_ORPHANS=true -# Extra options for 'pavics-compose.sh up'. +# Extra options for 'docker-compose up'. # --remove-orphans useful when disabling components. Harmless when left enabled all the time. -#export COMPOSE_UP_EXTRA_OPTS="--remove-orphans" +#export BIRDHOUSE_COMPOSE_UP_EXTRA_OPTS="--remove-orphans" diff --git a/birdhouse/optional-components/README.rst b/birdhouse/optional-components/README.rst index 4c829abcf..b1c0df792 100644 --- a/birdhouse/optional-components/README.rst +++ b/birdhouse/optional-components/README.rst @@ -8,7 +8,7 @@ Optional components Monitor all components in CANARIE node, both public and internal url -------------------------------------------------------------------- -So that the url ``https:///canarie/node/service/stats`` also return +So that the url ``https:///canarie/node/service/stats`` also return what the end user really see (a component might work but is not accessible to the end user). @@ -18,7 +18,7 @@ this config and adjust accordingly. How to enable this config in ``env.local`` (a copy from env.local.example_ (:download:`download `)): -* Add ``./optional-components/canarie-api-full-monitoring`` to ``EXTRA_CONF_DIRS``. +* Add ``./optional-components/canarie-api-full-monitoring`` to ``BIRDHOUSE_EXTRA_CONF_DIRS``. Emu WPS service for testing @@ -34,16 +34,16 @@ instead. How to enable Emu in ``env.local`` (a copy from env.local.example_ (:download:`download `)): -* Add ``./optional-components/emu`` to ``EXTRA_CONF_DIRS``. +* Add ``./optional-components/emu`` to ``BIRDHOUSE_EXTRA_CONF_DIRS``. * Optionally set ``EMU_IMAGE``, ``EMU_NAME``, ``EMU_INTERNAL_PORT``, ``EMU_WPS_OUTPUTS_VOL`` in ``env.local`` for further customizations. Default values are in `optional-components/emu/default.env `_ (:download:`download `). -Emu service will be available at ``http://PAVICS_FQDN:EMU_PORT/wps`` or -``https://PAVICS_FQDN_PUBLIC/TWITCHER_PROTECTED_PATH/EMU_NAME`` where -``PAVICS_FQDN``\ , ``PAVICS_FQDN_PUBLIC`` and ``TWITCHER_PROTECTED_PATH`` are defined +Emu service will be available at ``http://BIRDHOUSE_FQDN:EMU_PORT/wps`` or +``https://BIRDHOUSE_FQDN_PUBLIC/TWITCHER_PROTECTED_PATH/EMU_NAME`` where +``BIRDHOUSE_FQDN``\ , ``BIRDHOUSE_FQDN_PUBLIC`` and ``TWITCHER_PROTECTED_PATH`` are defined in your ``env.local``. Magpie will be automatically configured to give complete public anonymous @@ -58,7 +58,7 @@ A second THREDDS server for testing How to enable in ``env.local`` (a copy from env.local.example_ (:download:`download `)): -* Add ``./optional-components/testthredds`` to ``EXTRA_CONF_DIRS``. +* Add ``./optional-components/testthredds`` to ``BIRDHOUSE_EXTRA_CONF_DIRS``. * Optionally set ``TESTTHREDDS_IMAGE``\ , ``TESTTHREDDS_PORT``\ , ``TESTTHREDDS_CONTEXT_ROOT``\ , ``TESTTHREDDS_WARFILE_NAME``\ , @@ -66,9 +66,9 @@ How to enable in ``env.local`` (a copy from env.local.example_ (:download:`downl customizations. Default values are in: `optional-components/testthredds/default.env `_ (:download:`download `). Test THREDDS service will be available at -``http://PAVICS_FQDN:TESTTHREDDS_PORT/TESTTHREDDS_CONTEXT_ROOT`` or -``https://PAVICS_FQDN_PUBLIC/TESTTHREDDS_CONTEXT_ROOT`` where ``PAVICS_FQDN`` and -``PAVICS_FQDN_PUBLIC`` are defined in your ``env.local``. +``http://BIRDHOUSE_FQDN:TESTTHREDDS_PORT/TESTTHREDDS_CONTEXT_ROOT`` or +``https://BIRDHOUSE_FQDN_PUBLIC/TESTTHREDDS_CONTEXT_ROOT`` where ``BIRDHOUSE_FQDN`` and +``BIRDHOUSE_FQDN_PUBLIC`` are defined in your ``env.local``. Use same docker image as regular THREDDS by default but can be customized. @@ -97,7 +97,7 @@ Good to preview new birds or test alternative configuration of existing birds. How to enable in ``env.local`` (a copy from env.local.example_ (:download:`download `)): -* Add ``./optional-components/generic_bird`` to ``EXTRA_CONF_DIRS``. +* Add ``./optional-components/generic_bird`` to ``BIRDHOUSE_EXTRA_CONF_DIRS``. * Optionally set ``GENERIC_BIRD_IMAGE``, ``GENERIC_BIRD_PORT``, ``GENERIC_BIRD_NAME``, ``GENERIC_BIRD_INTERNAL_PORT``, and @@ -105,16 +105,18 @@ How to enable in ``env.local`` (a copy from env.local.example_ (:download:`downl Default values are in `optional-components/generic_bird/default.env `_ (:download:`download `). -The WPS service will be available at ``http://PAVICS_FQDN:GENERIC_BIRD_PORT/wps`` -or ``https://PAVICS_FQDN_PUBLIC/TWITCHER_PROTECTED_PATH/GENERIC_BIRD_NAME`` where -``PAVICS_FQDN``\ , ``PAVICS_FQDN_PUBLIC`` and ``TWITCHER_PROTECTED_PATH`` are defined +The WPS service will be available at ``http://BIRDHOUSE_FQDN:GENERIC_BIRD_PORT/wps`` +or ``https://BIRDHOUSE_FQDN_PUBLIC/TWITCHER_PROTECTED_PATH/GENERIC_BIRD_NAME`` where +``BIRDHOUSE_FQDN``\ , ``BIRDHOUSE_FQDN_PUBLIC`` and ``TWITCHER_PROTECTED_PATH`` are defined in your ``env.local``. Use same docker image as regular Finch by default but can be customized. Use a separate Postgres DB for this optional component to be completely self-contained and to allow experimenting with different versions of Postgres -DB. +DB. This Postgres DB will be named ``generic_bird`` by default but can be customized by +setting the ``BIRDHOUSE_GENERIC_BIRD_POSTGRES_DB`` environment variable in ``env.local`` +in case that name clashes with the ``BIRDHOUSE_POSTGRES_DB`` variable. Magpie will be automatically configured to give complete public anonymous access for this WPS service. @@ -131,14 +133,14 @@ rapidly identify if a service might be misbehaving. Since the various WPS services are executed using a different applications and dependencies in their respective Docker images, the method required to validate their status can vary a lot for each case. This optional component -defines all the appropriate ``healthcheck`` for all known WPS services in PAVICS. +defines all the appropriate ``healthcheck`` for all known WPS services in Birdhouse. How to enable in ``env.local`` (a copy from env.local.example_ (:download:`download `)): -* Add ``./optional-components/wps-healthchecks`` to ``EXTRA_CONF_DIRS``. +* Add ``./optional-components/wps-healthchecks`` to ``BIRDHOUSE_EXTRA_CONF_DIRS``. Once enabled, every WPS service will be monitored at regular intervals and ``docker-compose`` will indicate in their -health status. Command ``pavics-compose ps`` can be employed to list running images, and along with it, the statuses +health status. Command ``birdhouse-compose ps`` can be employed to list running images, and along with it, the statuses reported by each ``healthcheck``. @@ -158,7 +160,7 @@ https://github.com/Ouranosinc/PAVICS-e2e-workflow-tests. How to enable in ``env.local`` (a copy from `env.local.example`_ (:download:`download `)): -* Add ``./optional-components/all-public-access`` to ``EXTRA_CONF_DIRS``. +* Add ``./optional-components/all-public-access`` to ``BIRDHOUSE_EXTRA_CONF_DIRS``. The anonymous user will now have all the permissions described in |magpie-public-perms|_ (:download:`download `). @@ -191,7 +193,7 @@ request user can obtain access to it. How to enable in ``env.local`` (a copy from `env.local.example`_ (:download:`download `)): -* Add ``./optional-components/secure-data-proxy`` to ``EXTRA_CONF_DIRS``. +* Add ``./optional-components/secure-data-proxy`` to ``BIRDHOUSE_EXTRA_CONF_DIRS``. Once enabled, users will *NOT* have public access to files under ``/wpsoutputs`` anymore, except for items defined with authorized ``read`` permissions for the ``anonymous`` group under |secure-data-proxy-perms|_. As any other Magpie @@ -212,7 +214,7 @@ desired access rules. How to enable in ``env.local`` (a copy from `env.local.example`_ (:download:`download `)): -* Add ``./optional-components/secure-thredds`` to ``EXTRA_CONF_DIRS``. +* Add ``./optional-components/secure-thredds`` to ``BIRDHOUSE_EXTRA_CONF_DIRS``. The anonymous user will *NOT* have access anymore to THREDDS test directory ``birdhouse/testdata/secure`` and any other directories and files under it. Directories above and next to ``secure`` will still be accessible if @@ -236,7 +238,7 @@ This component is intended to automatically map the databases (``PostgreSQL``, ` How to enable in ``env.local`` (a copy from env.local.example_ (:download:`download `)): -* Add ``./optional-components/database-external-ports`` to ``EXTRA_CONF_DIRS``. +* Add ``./optional-components/database-external-ports`` to ``BIRDHOUSE_EXTRA_CONF_DIRS``. That's it. Databases will be accessible using the mapped ports in then optional component configuration. @@ -258,10 +260,10 @@ This optional component is intended to be employed in combination with test note How to enable in ``env.local`` (a copy from `env.local.example`_ (:download:`download `)): -* Add ``./optional-components/test-weaver`` to ``EXTRA_CONF_DIRS`` +* Add ``./optional-components/test-weaver`` to ``BIRDHOUSE_EXTRA_CONF_DIRS`` .. note:: - Definition ``./components/weaver`` is also expected to be in ``EXTRA_CONF_DIRS`` for permissions to have any effect. + Definition ``./components/weaver`` is also expected to be in ``BIRDHOUSE_EXTRA_CONF_DIRS`` for permissions to have any effect. Ensure that ``./optional-components/test-weaver`` is placed **AFTER** ``./components/weaver``. Otherwise, the ``request_options.yml`` override applied by this optional component will be discarded by the main component. @@ -332,7 +334,7 @@ except time required to feed the catalog. To enable this optional-component: - Edit ``env.local`` (a copy of `env.local.example`_) -- Add ``./optional-components/stac-populator`` to ``EXTRA_CONF_DIRS``. +- Add ``./optional-components/stac-populator`` to ``BIRDHOUSE_EXTRA_CONF_DIRS``. Allow public access to STAC catalog @@ -343,13 +345,13 @@ STAC Public Access allows STAC catalog to be accessed by anyone, without authent To enable this optional-component: - Edit ``env.local`` (a copy of `env.local.example`_) -- Add ``./optional-components/stac-public-access`` to ``EXTRA_CONF_DIRS``. +- Add ``./optional-components/stac-public-access`` to ``BIRDHOUSE_EXTRA_CONF_DIRS``. Provide a proxy for local STAC asset hosting -------------------------------------------------------- -STAC data proxy allows to host the URL location defined by ``PAVICS_FQDN_PUBLIC`` and ``STAC_DATA_PROXY_URL_PATH`` +STAC data proxy allows to host the URL location defined by ``BIRDHOUSE_FQDN_PUBLIC`` and ``STAC_DATA_PROXY_URL_PATH`` to provide access to files contained within ``STAC_DATA_PROXY_DIR_PATH``. The ``STAC_DATA_PROXY_DIR_PATH`` location can be used to hold STAC Assets defined by the current server node @@ -359,7 +361,7 @@ location of new data, or to make a new local replication of remote data. To enable this optional-component: - Edit ``env.local`` (a copy of `env.local.example`_) -- Add ``./optional-components/stac-data-proxy`` to ``EXTRA_CONF_DIRS``. +- Add ``./optional-components/stac-data-proxy`` to ``BIRDHOUSE_EXTRA_CONF_DIRS``. - Optionally, add any other relevant components to control access as desired (see below). When using this component, access to the endpoint defined by ``STAC_DATA_PROXY_URL_PATH``, and therefore all @@ -411,7 +413,7 @@ indexing and serving. How to enable X-Robots-Tag Header in ``env.local`` (a copy from `env.local.example`_ (:download:`download `)): -* Add ``./optional-components/x-robots-tag-header`` to ``EXTRA_CONF_DIRS``. +* Add ``./optional-components/x-robots-tag-header`` to ``BIRDHOUSE_EXTRA_CONF_DIRS``. * Optionally set ``X_ROBOTS_TAG_HEADER`` to an alternate directive as desired. Default values are in `optional-components/x-robots-tag-header/default.env `_ (:download:`download `). @@ -429,15 +431,15 @@ How to enable X-Robots-Tag Header in ``env.local`` (a copy from `env.local.examp Note however that most Nginx configurations are predefined for this stack. Custom definitions would need to be added to apply additional operations. One exception to this case is the *Homepage* location - (i.e.: where the ``/`` location will be redirected), which can take advantage of the ``PROXY_ROOT_LOCATION`` + (i.e.: where the ``/`` location will be redirected), which can take advantage of the ``BIRDHOUSE_PROXY_ROOT_LOCATION`` environment variable to override the endpoint as follows: .. code-block:: shell - export PROXY_ROOT_LOCATION=' + export BIRDHOUSE_PROXY_ROOT_LOCATION=' add_header X-Robots-Tag: "all"; alias /data/homepage/; # or any other desired redirection (e.g.: "return 302 ") ' .. seealso:: - See the `env.local.example`_ file for more details about this ``PROXY_ROOT_LOCATION`` behaviour. + See the `env.local.example`_ file for more details about this ``BIRDHOUSE_PROXY_ROOT_LOCATION`` behaviour. diff --git a/birdhouse/optional-components/backwards-compatible-overrides/config/jupyterhub/docker-compose-extra.yml b/birdhouse/optional-components/backwards-compatible-overrides/config/jupyterhub/docker-compose-extra.yml new file mode 100644 index 000000000..dab8f33b0 --- /dev/null +++ b/birdhouse/optional-components/backwards-compatible-overrides/config/jupyterhub/docker-compose-extra.yml @@ -0,0 +1,6 @@ +version: "3.4" +services: + jupyterhub: + environment: + DOCKER_NOTEBOOK_IMAGES: ${JUPYTERHUB_DOCKER_NOTEBOOK_IMAGES} + MOUNT_IMAGE_SPECIFIC_NOTEBOOKS: ${JUPYTERHUB_MOUNT_IMAGE_SPECIFIC_NOTEBOOKS} diff --git a/birdhouse/optional-components/canarie-api-full-monitoring/config/canarie-api/a_demo_override_precedence.py b/birdhouse/optional-components/canarie-api-full-monitoring/config/canarie-api/a_demo_override_precedence.py index 25a0c455a..b3c238cf6 100644 --- a/birdhouse/optional-components/canarie-api-full-monitoring/config/canarie-api/a_demo_override_precedence.py +++ b/birdhouse/optional-components/canarie-api-full-monitoring/config/canarie-api/a_demo_override_precedence.py @@ -5,7 +5,7 @@ # SERVICES['node']['monitoring'].update({ # 'Thredds-public': { # 'request': { -# 'url': 'https://${PAVICS_FQDN_PUBLIC}/toto', +# 'url': 'https://${BIRDHOUSE_FQDN_PUBLIC}/toto', # }, # }, # }) diff --git a/birdhouse/optional-components/canarie-api-full-monitoring/config/canarie-api/z_demo_only_py_file_are_loaded.wrongsuffix b/birdhouse/optional-components/canarie-api-full-monitoring/config/canarie-api/z_demo_only_py_file_are_loaded.wrongsuffix index d9dab430c..a85d12907 100644 --- a/birdhouse/optional-components/canarie-api-full-monitoring/config/canarie-api/z_demo_only_py_file_are_loaded.wrongsuffix +++ b/birdhouse/optional-components/canarie-api-full-monitoring/config/canarie-api/z_demo_only_py_file_are_loaded.wrongsuffix @@ -2,7 +2,7 @@ SERVICES['node']['monitoring'].update({ 'Thredds-public': { 'request': { - 'url': 'https://${PAVICS_FQDN_PUBLIC}/wrong-suffix', + 'url': 'https://${BIRDHOUSE_FQDN_PUBLIC}/wrong-suffix', }, }, }) diff --git a/birdhouse/optional-components/canarie-api-full-monitoring/config/catalog/canarie_api_full_monitoring.py.template b/birdhouse/optional-components/canarie-api-full-monitoring/config/catalog/canarie_api_full_monitoring.py.template index 41bb48c6d..dc8296d8b 100644 --- a/birdhouse/optional-components/canarie-api-full-monitoring/config/catalog/canarie_api_full_monitoring.py.template +++ b/birdhouse/optional-components/canarie-api-full-monitoring/config/catalog/canarie_api_full_monitoring.py.template @@ -3,7 +3,7 @@ import copy service_public = "Catalog-public" service_public_cfg = { 'request': { - 'url': 'https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/catalog?service=WPS&version=1.0.0&request=GetCapabilities' + 'url': 'https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/catalog?service=WPS&version=1.0.0&request=GetCapabilities' }, } diff --git a/birdhouse/optional-components/canarie-api-full-monitoring/config/cowbird/canarie_api_full_monitoring.py.template b/birdhouse/optional-components/canarie-api-full-monitoring/config/cowbird/canarie_api_full_monitoring.py.template index fd4972b79..1e8596a70 100644 --- a/birdhouse/optional-components/canarie-api-full-monitoring/config/cowbird/canarie_api_full_monitoring.py.template +++ b/birdhouse/optional-components/canarie-api-full-monitoring/config/cowbird/canarie_api_full_monitoring.py.template @@ -4,7 +4,7 @@ if "Cowbird" in SERVICES: # See notes in 'components/cowbird/docker-compose-extra.yml' # about the conditional loading of this config in 'proxy' service. cowbird_cfg = copy.deepcopy(SERVICES["Cowbird"]["monitoring"]["Cowbird"]) - cowbird_cfg["request"]["url"] = "https://${PAVICS_FQDN_PUBLIC}/cowbird/" + cowbird_cfg["request"]["url"] = "https://${BIRDHOUSE_FQDN_PUBLIC}/cowbird/" SERVICES["Cowbird"]["monitoring"]["Cowbird-public"] = cowbird_cfg # vi: tabstop=8 expandtab shiftwidth=4 softtabstop=4 syntax=python diff --git a/birdhouse/optional-components/canarie-api-full-monitoring/config/finch/canarie_api_full_monitoring.py.template b/birdhouse/optional-components/canarie-api-full-monitoring/config/finch/canarie_api_full_monitoring.py.template index b30f52ed5..89b2066a8 100644 --- a/birdhouse/optional-components/canarie-api-full-monitoring/config/finch/canarie_api_full_monitoring.py.template +++ b/birdhouse/optional-components/canarie-api-full-monitoring/config/finch/canarie_api_full_monitoring.py.template @@ -3,7 +3,7 @@ import copy service_public = "Finch-public" service_public_cfg = { 'request': { - 'url': 'https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/finch?service=WPS&version=1.0.0&request=GetCapabilities' + 'url': 'https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/finch?service=WPS&version=1.0.0&request=GetCapabilities' } } diff --git a/birdhouse/optional-components/canarie-api-full-monitoring/config/hummingbird/canarie_api_full_monitoring.py.template b/birdhouse/optional-components/canarie-api-full-monitoring/config/hummingbird/canarie_api_full_monitoring.py.template index dd7f0780a..9062bd33a 100644 --- a/birdhouse/optional-components/canarie-api-full-monitoring/config/hummingbird/canarie_api_full_monitoring.py.template +++ b/birdhouse/optional-components/canarie-api-full-monitoring/config/hummingbird/canarie_api_full_monitoring.py.template @@ -3,7 +3,7 @@ import copy service_public = "Hummingbird-public" service_public_cfg = { 'request': { - 'url': 'https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/hummingbird?service=WPS&version=1.0.0&request=GetCapabilities' + 'url': 'https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/hummingbird?service=WPS&version=1.0.0&request=GetCapabilities' } } diff --git a/birdhouse/optional-components/canarie-api-full-monitoring/config/malleefowl/canarie_api_full_monitoring.py.template b/birdhouse/optional-components/canarie-api-full-monitoring/config/malleefowl/canarie_api_full_monitoring.py.template index ffcd08bc4..8db3c6fbc 100644 --- a/birdhouse/optional-components/canarie-api-full-monitoring/config/malleefowl/canarie_api_full_monitoring.py.template +++ b/birdhouse/optional-components/canarie-api-full-monitoring/config/malleefowl/canarie_api_full_monitoring.py.template @@ -3,7 +3,7 @@ import copy service_public = "Malleefowl-public" service_public_cfg = { 'request': { - 'url': 'https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/malleefowl?service=WPS&version=1.0.0&request=GetCapabilities' + 'url': 'https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/malleefowl?service=WPS&version=1.0.0&request=GetCapabilities' } } diff --git a/birdhouse/optional-components/canarie-api-full-monitoring/config/ncwms2/canarie_api_full_monitoring.py.template b/birdhouse/optional-components/canarie-api-full-monitoring/config/ncwms2/canarie_api_full_monitoring.py.template index 6d926c929..3fb67f44a 100644 --- a/birdhouse/optional-components/canarie-api-full-monitoring/config/ncwms2/canarie_api_full_monitoring.py.template +++ b/birdhouse/optional-components/canarie-api-full-monitoring/config/ncwms2/canarie_api_full_monitoring.py.template @@ -3,7 +3,7 @@ import copy service_public = "ncWMS2-public" service_public_cfg = { 'request': { - 'url': 'https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/ncWMS2/wms?SERVICE=WMS&REQUEST=GetCapabilities&VERSION=1.3.0' + 'url': 'https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/ncWMS2/wms?SERVICE=WMS&REQUEST=GetCapabilities&VERSION=1.3.0' } } diff --git a/birdhouse/optional-components/canarie-api-full-monitoring/config/raven/canarie_api_full_monitoring.py.template b/birdhouse/optional-components/canarie-api-full-monitoring/config/raven/canarie_api_full_monitoring.py.template index 2d3df52f1..3420fa0dc 100644 --- a/birdhouse/optional-components/canarie-api-full-monitoring/config/raven/canarie_api_full_monitoring.py.template +++ b/birdhouse/optional-components/canarie-api-full-monitoring/config/raven/canarie_api_full_monitoring.py.template @@ -3,7 +3,7 @@ import copy service_public = "Raven-public" service_public_cfg = { 'request': { - 'url': 'https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/raven?service=WPS&version=1.0.0&request=GetCapabilities' + 'url': 'https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/raven?service=WPS&version=1.0.0&request=GetCapabilities' } } diff --git a/birdhouse/optional-components/canarie-api-full-monitoring/config/thredds/canarie_api_full_monitoring.py.template b/birdhouse/optional-components/canarie-api-full-monitoring/config/thredds/canarie_api_full_monitoring.py.template index 43b82437d..03f44ab94 100644 --- a/birdhouse/optional-components/canarie-api-full-monitoring/config/thredds/canarie_api_full_monitoring.py.template +++ b/birdhouse/optional-components/canarie-api-full-monitoring/config/thredds/canarie_api_full_monitoring.py.template @@ -3,7 +3,7 @@ import copy service_public = "Thredds-public" service_public_cfg = { 'request': { - 'url': 'https://${PAVICS_FQDN_PUBLIC}/thredds/catalog.html', + 'url': 'https://${BIRDHOUSE_FQDN_PUBLIC}/thredds/catalog.html', }, } diff --git a/birdhouse/optional-components/canarie-api-full-monitoring/config/weaver/canarie_api_full_monitoring.py.template b/birdhouse/optional-components/canarie-api-full-monitoring/config/weaver/canarie_api_full_monitoring.py.template index 16f0ee8f4..12ea37be9 100644 --- a/birdhouse/optional-components/canarie-api-full-monitoring/config/weaver/canarie_api_full_monitoring.py.template +++ b/birdhouse/optional-components/canarie-api-full-monitoring/config/weaver/canarie_api_full_monitoring.py.template @@ -4,7 +4,7 @@ if "Weaver" in SERVICES: # See notes in 'components/weaver/docker-compose-extra.yml' # about the conditional loading of this config in 'proxy' service. weaver_cfg = copy.deepcopy(SERVICES["Weaver"]["monitoring"]["Weaver"]) - weaver_cfg["request"]["url"] = "https://${PAVICS_FQDN_PUBLIC}/weaver/" + weaver_cfg["request"]["url"] = "https://${BIRDHOUSE_FQDN_PUBLIC}/weaver/" SERVICES["Weaver"]["monitoring"]["Weaver-public"] = weaver_cfg # vi: tabstop=8 expandtab shiftwidth=4 softtabstop=4 syntax=python diff --git a/birdhouse/optional-components/emu/config/canarie-api/canarie_api_monitoring.py.template b/birdhouse/optional-components/emu/config/canarie-api/canarie_api_monitoring.py.template index d76f21ae4..2b822d1b6 100644 --- a/birdhouse/optional-components/emu/config/canarie-api/canarie_api_monitoring.py.template +++ b/birdhouse/optional-components/emu/config/canarie-api/canarie_api_monitoring.py.template @@ -1,7 +1,7 @@ SERVICES['node']['monitoring'].update({ '${EMU_NAME}-public': { 'request': { - 'url': 'https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/${EMU_NAME}?service=WPS&version=1.0.0&request=GetCapabilities' + 'url': 'https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/${EMU_NAME}?service=WPS&version=1.0.0&request=GetCapabilities' }, }, '${EMU_NAME}': { diff --git a/birdhouse/optional-components/emu/wps.cfg.template b/birdhouse/optional-components/emu/wps.cfg.template index 0ecc1f1cb..53001b15e 100644 --- a/birdhouse/optional-components/emu/wps.cfg.template +++ b/birdhouse/optional-components/emu/wps.cfg.template @@ -1,8 +1,8 @@ [server] -outputurl = https://${PAVICS_FQDN_PUBLIC}/wpsoutputs +outputurl = https://${BIRDHOUSE_FQDN_PUBLIC}/wpsoutputs outputpath = /data/wpsoutputs [logging] level = DEBUG -${EXTRA_PYWPS_CONFIG} +${BIRDHOUSE_EXTRA_PYWPS_CONFIG} diff --git a/birdhouse/optional-components/generic_bird/config/canarie-api/canarie_api_monitoring.py.template b/birdhouse/optional-components/generic_bird/config/canarie-api/canarie_api_monitoring.py.template index 6555dc672..c71b88ef5 100644 --- a/birdhouse/optional-components/generic_bird/config/canarie-api/canarie_api_monitoring.py.template +++ b/birdhouse/optional-components/generic_bird/config/canarie-api/canarie_api_monitoring.py.template @@ -1,7 +1,7 @@ SERVICES['node']['monitoring'].update({ '${GENERIC_BIRD_NAME}-public': { 'request': { - 'url': 'https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/${GENERIC_BIRD_NAME}?service=WPS&version=1.0.0&request=GetCapabilities' + 'url': 'https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/${GENERIC_BIRD_NAME}?service=WPS&version=1.0.0&request=GetCapabilities' }, }, '${GENERIC_BIRD_NAME}': { diff --git a/birdhouse/optional-components/generic_bird/default.env b/birdhouse/optional-components/generic_bird/default.env index 451862382..34b914457 100644 --- a/birdhouse/optional-components/generic_bird/default.env +++ b/birdhouse/optional-components/generic_bird/default.env @@ -9,6 +9,7 @@ export GENERIC_BIRD_INTERNAL_PORT="5000" # name in Twitcher/Magpie and Canarie monitoring export GENERIC_BIRD_NAME="generic_bird" export GENERIC_BIRD_POSTGRES_IMAGE="postgres:10.12" +export BIRDHOUSE_GENERIC_BIRD_POSTGRES_DB="generic_bird" # add vars only needed to be substituted in templates diff --git a/birdhouse/optional-components/generic_bird/postgres/.gitignore b/birdhouse/optional-components/generic_bird/postgres/.gitignore index 983353b4d..532e6ece0 100644 --- a/birdhouse/optional-components/generic_bird/postgres/.gitignore +++ b/birdhouse/optional-components/generic_bird/postgres/.gitignore @@ -1 +1,2 @@ credentials.env +docker-entrypoint-initdb.d/create-wps-databases.sql diff --git a/birdhouse/optional-components/generic_bird/postgres/credentials.env.template b/birdhouse/optional-components/generic_bird/postgres/credentials.env.template index 577d821c5..3615969f7 100644 --- a/birdhouse/optional-components/generic_bird/postgres/credentials.env.template +++ b/birdhouse/optional-components/generic_bird/postgres/credentials.env.template @@ -1,3 +1,3 @@ -POSTGRES_USER=${POSTGRES_PAVICS_USERNAME} -POSTGRES_PASSWORD=${POSTGRES_PAVICS_PASSWORD} -POSTGRES_DB=pavics +POSTGRES_USER=${BIRDHOUSE_POSTGRES_USERNAME} +POSTGRES_PASSWORD=${BIRDHOUSE_POSTGRES_PASSWORD} +POSTGRES_DB=${BIRDHOUSE_GENERIC_BIRD_POSTGRES_DB} diff --git a/birdhouse/optional-components/generic_bird/postgres/docker-entrypoint-initdb.d/create-wps-databases.sql b/birdhouse/optional-components/generic_bird/postgres/docker-entrypoint-initdb.d/create-wps-databases.sql deleted file mode 100644 index 7cebd314d..000000000 --- a/birdhouse/optional-components/generic_bird/postgres/docker-entrypoint-initdb.d/create-wps-databases.sql +++ /dev/null @@ -1 +0,0 @@ -CREATE DATABASE generic_bird; diff --git a/birdhouse/optional-components/generic_bird/postgres/docker-entrypoint-initdb.d/create-wps-databases.sql.template b/birdhouse/optional-components/generic_bird/postgres/docker-entrypoint-initdb.d/create-wps-databases.sql.template new file mode 100644 index 000000000..a3b1c5811 --- /dev/null +++ b/birdhouse/optional-components/generic_bird/postgres/docker-entrypoint-initdb.d/create-wps-databases.sql.template @@ -0,0 +1 @@ +CREATE DATABASE ${BIRDHOUSE_GENERIC_BIRD_POSTGRES_DB}; diff --git a/birdhouse/optional-components/generic_bird/wps.cfg.template b/birdhouse/optional-components/generic_bird/wps.cfg.template index a35a2f011..9a029e823 100644 --- a/birdhouse/optional-components/generic_bird/wps.cfg.template +++ b/birdhouse/optional-components/generic_bird/wps.cfg.template @@ -1,5 +1,5 @@ [server] -outputurl = https://${PAVICS_FQDN_PUBLIC}/wpsoutputs +outputurl = https://${BIRDHOUSE_FQDN_PUBLIC}/wpsoutputs outputpath = /data/wpsoutputs # default 3mb, fix "Broken pipe" between the proxy and the wps service @@ -10,6 +10,6 @@ parallelprocesses = 10 [logging] level = INFO -database=postgresql://${POSTGRES_PAVICS_USERNAME}:${POSTGRES_PAVICS_PASSWORD}@postgres_generic_bird/generic_bird +database=postgresql://${BIRDHOUSE_POSTGRES_USERNAME}:${BIRDHOUSE_POSTGRES_PASSWORD}@postgres_generic_bird/generic_bird -${EXTRA_PYWPS_CONFIG} +${BIRDHOUSE_EXTRA_PYWPS_CONFIG} diff --git a/birdhouse/optional-components/secure-data-proxy/config/proxy/conf.extra-service.d/secure-data-auth.conf.template b/birdhouse/optional-components/secure-data-proxy/config/proxy/conf.extra-service.d/secure-data-auth.conf.template index 0c28a8593..8034910c3 100644 --- a/birdhouse/optional-components/secure-data-proxy/config/proxy/conf.extra-service.d/secure-data-auth.conf.template +++ b/birdhouse/optional-components/secure-data-proxy/config/proxy/conf.extra-service.d/secure-data-auth.conf.template @@ -4,7 +4,7 @@ # note: using 'TWITCHER_VERIFY_PATH' path to avoid performing the request via 'proxy' endpoint # This ensures that the data access is validated for the user, but does not trigger its access/download twice. # Also, avoids getting an error as 'secure-data-proxy' private URL in Magpie doesn't resolve to a valid path. - proxy_pass https://${PAVICS_FQDN_PUBLIC}${TWITCHER_VERIFY_PATH}/secure-data-proxy$request_uri; + proxy_pass https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_VERIFY_PATH}/secure-data-proxy$request_uri; proxy_pass_request_body off; proxy_set_header Host $host; proxy_set_header Content-Length ""; diff --git a/birdhouse/optional-components/secure-data-proxy/default.env b/birdhouse/optional-components/secure-data-proxy/default.env index d1dc388c3..793312bdd 100644 --- a/birdhouse/optional-components/secure-data-proxy/default.env +++ b/birdhouse/optional-components/secure-data-proxy/default.env @@ -7,11 +7,11 @@ # are applied and must be added to the list of DELAYED_EVAL. # add any new variables not already in 'VARS' or 'OPTIONAL_VARS' that must be replaced in templates here -# single quotes are important in below list to keep variable names intact until 'pavics-compose' parses them +# single quotes are important in below list to keep variable names intact until 'birdhouse-compose' parses them EXTRA_VARS=' $SECURE_DATA_PROXY_AUTH_INCLUDE ' -# extend the original 'VARS' from 'birdhouse/pavics-compose.sh' to employ them for template substitution +# extend the original 'VARS' from 'birdhouse/birdhouse-compose.sh' to employ them for template substitution # adding them to 'VARS', they will also be validated in case of override of 'default.env' using 'env.local' VARS="$VARS $EXTRA_VARS" diff --git a/birdhouse/optional-components/stac-data-proxy/default.env b/birdhouse/optional-components/stac-data-proxy/default.env index 8182fcc72..67cd98435 100644 --- a/birdhouse/optional-components/stac-data-proxy/default.env +++ b/birdhouse/optional-components/stac-data-proxy/default.env @@ -7,13 +7,13 @@ # are applied and must be added to the list of DELAYED_EVAL. # add any new variables not already in 'VARS' or 'OPTIONAL_VARS' that must be replaced in templates here -# single quotes are important in below list to keep variable names intact until 'pavics-compose' parses them +# single quotes are important in below list to keep variable names intact until 'birdhouse-compose' parses them EXTRA_VARS=' $STAC_DATA_PROXY_DIR_PATH $STAC_DATA_PROXY_URL_PATH ' -# extend the original 'VARS' from 'birdhouse/pavics-compose.sh' to employ them for template substitution +# extend the original 'VARS' from 'birdhouse/birdhouse-compose.sh' to employ them for template substitution # adding them to 'VARS', they will also be validated in case of override of 'default.env' using 'env.local' VARS="$VARS $EXTRA_VARS" @@ -21,9 +21,9 @@ VARS="$VARS $EXTRA_VARS" # NOTE: # Hosting is not performed by the API itself. Data is expected to already reside in that # location when referenced by STAC Collections and Items to make them accessible externally. -export STAC_DATA_PROXY_DIR_PATH='${DATA_PERSIST_ROOT}/stac-data' +export STAC_DATA_PROXY_DIR_PATH='${BIRDHOUSE_DATA_PERSIST_ROOT}/stac-data' -# URL path (after PAVICS_FQDN_PUBLIC) that will be used to proxy local STAC assets data +# URL path (after BIRDHOUSE_FQDN_PUBLIC) that will be used to proxy local STAC assets data export STAC_DATA_PROXY_URL_PATH="/data/stac" DELAYED_EVAL=" @@ -34,9 +34,9 @@ DELAYED_EVAL=" # add any component that this component requires to run # NOTE: # './optional-component/secure-data-proxy' is purposely omitted from dependencies -# if 'EXTRA_CONF_DIRS' enabled it as well, the proxy path/alias will have relevant auth request enabled +# if 'BIRDHOUSE_EXTRA_CONF_DIRS' enabled it as well, the proxy path/alias will have relevant auth request enabled # otherwise, it will use by default the public access with no prior nginx auth validation COMPONENT_DEPENDENCIES=" ./components/stac - ./config/proxy + ./components/proxy " diff --git a/birdhouse/optional-components/stac-populator/default.env b/birdhouse/optional-components/stac-populator/default.env index 115b28865..8a68eb6eb 100644 --- a/birdhouse/optional-components/stac-populator/default.env +++ b/birdhouse/optional-components/stac-populator/default.env @@ -1,11 +1,11 @@ export STAC_ASSET_GENERATOR_TIMEOUT=200 # add any new variables not already in 'VARS' or 'OPTIONAL_VARS' that must be replaced in templates here -# single quotes are important in below list to keep variable names intact until 'pavics-compose' parses them +# single quotes are important in below list to keep variable names intact until 'birdhouse-compose' parses them EXTRA_VARS=' $STAC_ASSET_GENERATOR_TIMEOUT ' -# extend the original 'VARS' from 'birdhouse/pavics-compose.sh' to employ them for template substitution +# extend the original 'VARS' from 'birdhouse/birdhouse-compose.sh' to employ them for template substitution # adding them to 'VARS', they will also be validated in case of override of 'default.env' using 'env.local' VARS="$VARS $EXTRA_VARS" diff --git a/birdhouse/optional-components/test-cowbird-jupyter-access/default.env b/birdhouse/optional-components/test-cowbird-jupyter-access/default.env index 343df4b8e..de7f11cc1 100644 --- a/birdhouse/optional-components/test-cowbird-jupyter-access/default.env +++ b/birdhouse/optional-components/test-cowbird-jupyter-access/default.env @@ -11,7 +11,7 @@ export COWBIRD_JUPYTER_ACCESS_DIR="$COMPOSE_DIR/optional-components/test-cowbird export TEST_COWBIRD_JUPYTERHUB_USERNAME="testcowbirdjupyter" export TEST_COWBIRD_JUPYTERHUB_PASSWORD="qwertyqwerty" -export DOCKER_NOTEBOOK_WORKFLOW_IMAGE="$(echo ${DOCKER_NOTEBOOK_IMAGES} | grep pavics/workflow-tests | xargs)" +export DOCKER_NOTEBOOK_WORKFLOW_IMAGE="$(echo ${JUPYTERHUB_DOCKER_NOTEBOOK_IMAGES} | grep pavics/workflow-tests | xargs)" export JUPYTERHUB_CONFIG_OVERRIDE=" ${JUPYTERHUB_CONFIG_OVERRIDE} @@ -51,10 +51,10 @@ c.DockerSpawner.environment['WORKSPACE_DIR'] = container_workspace_dir # add any component that this component requires to run COMPONENT_DEPENDENCIES=" - ./config/cowbird - ./config/geoserver - ./config/jupyterhub - ./config/magpie + ./components/cowbird + ./components/geoserver + ./components/jupyterhub + ./components/magpie ./optional-components/all-public-access ./optional-components/test-geoserver-secured-access ./optional-components/secure-data-proxy diff --git a/birdhouse/optional-components/test-cowbird-jupyter-access/docker-compose-extra.yml b/birdhouse/optional-components/test-cowbird-jupyter-access/docker-compose-extra.yml index a279e1884..e32c111cb 100644 --- a/birdhouse/optional-components/test-cowbird-jupyter-access/docker-compose-extra.yml +++ b/birdhouse/optional-components/test-cowbird-jupyter-access/docker-compose-extra.yml @@ -9,15 +9,15 @@ services: environment: MAGPIE_ADMIN_USERNAME: ${MAGPIE_ADMIN_USERNAME} MAGPIE_ADMIN_PASSWORD: ${MAGPIE_ADMIN_PASSWORD} - PAVICS_HOST_URL: https://${PAVICS_FQDN_PUBLIC} - WPS_OUTPUTS_DIR: ${WPS_OUTPUTS_DIR} - WORKSPACE_DIR: ${DATA_PERSIST_SHARED_ROOT}/${USER_WORKSPACES} + BIRDHOUSE_HOST_URL: https://${BIRDHOUSE_FQDN_PUBLIC} + WPS_OUTPUTS_DIR: ${BIRDHOUSE_WPS_OUTPUTS_DIR} + WORKSPACE_DIR: ${BIRDHOUSE_DATA_PERSIST_SHARED_ROOT}/${USER_WORKSPACES} TEST_COWBIRD_JUPYTERHUB_USERNAME: ${TEST_COWBIRD_JUPYTERHUB_USERNAME} TEST_COWBIRD_JUPYTERHUB_PASSWORD: ${TEST_COWBIRD_JUPYTERHUB_PASSWORD} volumes: - ${COWBIRD_JUPYTER_ACCESS_DIR}/test_cowbird_jupyter.py:/test_cowbird_jupyter.py - ./optional-components/test-geoserver-secured-access/test-data:/geoserver-test-data:ro - - ${DATA_PERSIST_SHARED_ROOT}:/data + - ${BIRDHOUSE_DATA_PERSIST_SHARED_ROOT}:/data command: python /test_cowbird_jupyter.py # root access required to create files in the user workspace and other WPS outputs files in the data dir user: root:root diff --git a/birdhouse/optional-components/test-cowbird-jupyter-access/test_cowbird_jupyter.py b/birdhouse/optional-components/test-cowbird-jupyter-access/test_cowbird_jupyter.py index 9f96c6691..f985472a1 100644 --- a/birdhouse/optional-components/test-cowbird-jupyter-access/test_cowbird_jupyter.py +++ b/birdhouse/optional-components/test-cowbird-jupyter-access/test_cowbird_jupyter.py @@ -19,14 +19,14 @@ TIMEOUT_DELAY = 5 MAX_ATTEMPTS = 8 -VERIFY_SSL = False -if not VERIFY_SSL: +BIRDHOUSE_VERIFY_SSL = False +if not BIRDHOUSE_VERIFY_SSL: urllib3.disable_warnings() # disable warnings for using https without certificate verification enabled HEADERS = {"Accept": "application/json", "Content-Type": "application/json"} -PAVICS_HOST_URL = os.getenv("PAVICS_HOST_URL") +BIRDHOUSE_HOST_URL = os.getenv("BIRDHOUSE_HOST_URL") -COWBIRD_URL = f"{PAVICS_HOST_URL}/cowbird" -MAGPIE_URL = f"{PAVICS_HOST_URL}/magpie" +COWBIRD_URL = f"{BIRDHOUSE_HOST_URL}/cowbird" +MAGPIE_URL = f"{BIRDHOUSE_HOST_URL}/magpie" WPS_OUTPUTS_DIR = os.getenv("WPS_OUTPUTS_DIR") WORKSPACE_DIR = os.getenv("WORKSPACE_DIR") @@ -46,7 +46,7 @@ def get_credentials(var_name): TEST_COWBIRD_JUPYTERHUB_USERNAME = os.getenv("TEST_COWBIRD_JUPYTERHUB_USERNAME") TEST_COWBIRD_JUPYTERHUB_PASSWORD = os.getenv("TEST_COWBIRD_JUPYTERHUB_PASSWORD") -print(" Verify SSL : {}".format(VERIFY_SSL)) +print(" Verify SSL : {}".format(BIRDHOUSE_VERIFY_SSL)) print(" Will use Magpie URL: [{}]".format(MAGPIE_URL)) def response_msg(message, response, is_json=True): @@ -66,7 +66,7 @@ def magpie_signin(user_name, password): signin_url = f"{MAGPIE_URL}/signin" data = {"user_name": user_name, "password": password} try: - resp = requests.request(url=signin_url, headers=HEADERS, method="POST", json=data, timeout=10, verify=VERIFY_SSL) + resp = requests.request(url=signin_url, headers=HEADERS, method="POST", json=data, timeout=10, verify=BIRDHOUSE_VERIFY_SSL) except Exception as exc: raise RuntimeError(f"Failed to sign in to Magpie (url: `{signin_url}`) with user `{data['user_name']}`. " f"Exception : {exc}. ") @@ -92,7 +92,7 @@ def create_magpie_user(user_name, password, session): for i in range(MAX_ATTEMPTS): time.sleep(i * TIMEOUT_DELAY) try: - resp = requests.get(svc_url, verify=VERIFY_SSL) + resp = requests.get(svc_url, verify=BIRDHOUSE_VERIFY_SSL) assert resp.status_code == 200 print(f"{svc_name} availability checked successfully.") break @@ -103,12 +103,12 @@ def create_magpie_user(user_name, password, session): # ------------------------------------------------------------------ magpie_admin_session = requests.Session() -magpie_admin_session.verify = VERIFY_SSL +magpie_admin_session.verify = BIRDHOUSE_VERIFY_SSL magpie_admin_session.headers = HEADERS magpie_admin_session.cookies = magpie_signin(TEST_MAGPIE_ADMIN_USERNAME, TEST_MAGPIE_ADMIN_PASSWORD).cookies test_user_session = requests.Session() -test_user_session.verify = VERIFY_SSL +test_user_session.verify = BIRDHOUSE_VERIFY_SSL test_user_session.headers = HEADERS # ------------------------------------------------------------------ diff --git a/birdhouse/optional-components/test-geoserver-secured-access/config/proxy/conf.d/test-geoserver-secured.conf.template b/birdhouse/optional-components/test-geoserver-secured-access/config/proxy/conf.d/test-geoserver-secured.conf.template index c4d83c066..601d1ffde 100644 --- a/birdhouse/optional-components/test-geoserver-secured-access/config/proxy/conf.d/test-geoserver-secured.conf.template +++ b/birdhouse/optional-components/test-geoserver-secured-access/config/proxy/conf.d/test-geoserver-secured.conf.template @@ -1,5 +1,5 @@ location /geoserver-secured/ { - proxy_pass http://${PAVICS_FQDN}${TWITCHER_PROTECTED_PATH}/geoserver-secured/; + proxy_pass http://${BIRDHOUSE_FQDN}${TWITCHER_PROTECTED_PATH}/geoserver-secured/; proxy_set_header Host $host; proxy_set_header X-Forwarded-Proto $real_scheme; proxy_set_header Accept-Encoding ""; diff --git a/birdhouse/optional-components/test-weaver/config/weaver/request_options.yml.template b/birdhouse/optional-components/test-weaver/config/weaver/request_options.yml.template index 40a837908..f4434b3b7 100644 --- a/birdhouse/optional-components/test-weaver/config/weaver/request_options.yml.template +++ b/birdhouse/optional-components/test-weaver/config/weaver/request_options.yml.template @@ -11,5 +11,5 @@ requests: # disable SSL verification for test instance using self-signed certificate # avoid doing this on real instance to keep it secure against man-in-the-middle attacks - - url: https://${PAVICS_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/weaver/providers/hummingbird/processes/ncdump/jobs + - url: https://${BIRDHOUSE_FQDN_PUBLIC}${TWITCHER_PROTECTED_PATH}/weaver/providers/hummingbird/processes/ncdump/jobs verify: false diff --git a/birdhouse/optional-components/testthredds/catalog.xml.template b/birdhouse/optional-components/testthredds/catalog.xml.template index 15f6d3174..7af6a1a20 100644 --- a/birdhouse/optional-components/testthredds/catalog.xml.template +++ b/birdhouse/optional-components/testthredds/catalog.xml.template @@ -14,7 +14,7 @@ - + all diff --git a/birdhouse/optional-components/testthredds/config/canarie-api/canarie_api_monitoring.py.template b/birdhouse/optional-components/testthredds/config/canarie-api/canarie_api_monitoring.py.template index 739323bc8..9c0051f35 100644 --- a/birdhouse/optional-components/testthredds/config/canarie-api/canarie_api_monitoring.py.template +++ b/birdhouse/optional-components/testthredds/config/canarie-api/canarie_api_monitoring.py.template @@ -1,7 +1,7 @@ SERVICES['node']['monitoring'].update({ '${TESTTHREDDS_NAME}-public': { 'request': { - 'url': 'https://${PAVICS_FQDN_PUBLIC}/${TESTTHREDDS_CONTEXT_ROOT}/catalog.html' + 'url': 'https://${BIRDHOUSE_FQDN_PUBLIC}/${TESTTHREDDS_CONTEXT_ROOT}/catalog.html' }, }, '${TESTTHREDDS_NAME}': { diff --git a/birdhouse/optional-components/testthredds/docker-compose-extra.yml b/birdhouse/optional-components/testthredds/docker-compose-extra.yml index 4e7546ec5..655e0c87f 100644 --- a/birdhouse/optional-components/testthredds/docker-compose-extra.yml +++ b/birdhouse/optional-components/testthredds/docker-compose-extra.yml @@ -8,12 +8,12 @@ services: environment: # for reconstructing proper URL back to user when Thredds behind proxy # because Twitcher eats the "Host" http header set by Nginx - PAVICS_FQDN_PUBLIC: $PAVICS_FQDN_PUBLIC + BIRDHOUSE_FQDN_PUBLIC: $BIRDHOUSE_FQDN_PUBLIC WANTED_CONTEXT_ROOT: $TESTTHREDDS_CONTEXT_ROOT WANTED_CONTEXT_ROOT_WARFILE_NAME: $TESTTHREDDS_WARFILE_NAME volumes: - testthredds_persistence:/usr/local/tomcat/content/thredds - - ${DATA_PERSIST_ROOT}/testdatasets:/pavics-testdata:ro + - ${BIRDHOUSE_DATA_PERSIST_ROOT}/testdatasets:/birdhouse-testdata:ro - ${THREDDS_SERVICE_DATA_LOCATION_ON_HOST}:${THREDDS_SERVICE_DATA_LOCATION_ON_CONTAINER}:ro - ${THREDDS_DATASET_LOCATION_ON_HOST}:${THREDDS_DATASET_LOCATION_ON_CONTAINER}:ro - testwps_outputs:/testwps_outputs:ro diff --git a/birdhouse/optional-components/testthredds/entrypointwrapper b/birdhouse/optional-components/testthredds/entrypointwrapper index 1572c441b..540253e1b 100755 --- a/birdhouse/optional-components/testthredds/entrypointwrapper +++ b/birdhouse/optional-components/testthredds/entrypointwrapper @@ -9,10 +9,10 @@ if ! grep ' relaxedQueryChars=' $CONF_FILE; then sed -i 's//dev/null | - while read FILE - do - DEST=${FILE%.template} - cat "${FILE}" | envsubst "$VARS" | envsubst "$OPTIONAL_VARS" > "${DEST}" - done - -SHELL_EXEC_FLAGS= -if [ "${BIRDHOUSE_LOG_LEVEL}" = "DEBUG" ]; then - SHELL_EXEC_FLAGS=-x -fi - -create_compose_conf_list # this sets COMPOSE_CONF_LIST -log INFO "Displaying resolved compose configurations:" -echo "COMPOSE_CONF_LIST=" -echo ${COMPOSE_CONF_LIST} | tr ' ' '\n' | grep -v '^-f' - -if [ x"$1" = x"info" ]; then - log INFO "Stopping before execution of docker-compose command." - exit 0 -fi - -COMPOSE_EXTRA_OPTS="" - -if [ x"$1" = x"up" ]; then - COMPOSE_EXTRA_OPTS="${COMPOSE_UP_EXTRA_OPTS}" - for adir in $ALL_CONF_DIRS; do - COMPONENT_PRE_COMPOSE_UP="$adir/pre-docker-compose-up" - if [ -x "$COMPONENT_PRE_COMPOSE_UP" ]; then - log INFO "Executing '$COMPONENT_PRE_COMPOSE_UP'" - sh ${SHELL_EXEC_FLAGS} "$COMPONENT_PRE_COMPOSE_UP" - fi - done -fi - -log INFO "Executing docker-compose with extra options: $* ${COMPOSE_EXTRA_OPTS}" -# the PROXY_SECURE_PORT is a little trick to make the compose file invalid without the usage of this wrapper script -PROXY_SECURE_PORT=443 HOSTNAME=${PAVICS_FQDN} docker-compose ${COMPOSE_CONF_LIST} $* ${COMPOSE_EXTRA_OPTS} -ERR=$? -if [ ${ERR} -gt 0 ]; then - log ERROR "docker-compose error, exit code ${ERR}" - exit ${ERR} -fi - -# execute post-compose function if exists and no error occurred -type post-compose 2>&1 | grep 'post-compose is a function' > /dev/null -if [ $? -eq 0 ] -then - [ ${ERR} -gt 0 ] && { log ERROR "Error occurred with docker-compose, not running post-compose"; exit $?; } - post-compose $* -fi - - -while [ $# -gt 0 ] -do - if [ x"$1" = x"up" ]; then - # we restart the proxy after an up to make sure nginx continue to work if any container IP address changes - PROXY_SECURE_PORT=443 HOSTNAME=${PAVICS_FQDN} docker-compose ${COMPOSE_CONF_LIST} restart proxy - - # run postgres post-startup setup script - # Note: this must run before the post-docker-compose-up scripts since some may expect postgres databases to exist - postgres_id=$(PROXY_SECURE_PORT=443 HOSTNAME=${PAVICS_FQDN} docker-compose ${COMPOSE_CONF_LIST} ps -q postgres 2> /dev/null) - if [ ! -z "$postgres_id" ]; then - docker exec ${postgres_id} /postgres-setup.sh - fi - - for adir in $ALL_CONF_DIRS; do - COMPONENT_POST_COMPOSE_UP="$adir/post-docker-compose-up" - if [ -x "$COMPONENT_POST_COMPOSE_UP" ]; then - log INFO "Executing '$COMPONENT_POST_COMPOSE_UP'" - sh ${SHELL_EXEC_FLAGS} "$COMPONENT_POST_COMPOSE_UP" - fi - done - - # Note: This command should stay last, as it can take a while depending on network and drive speeds - # immediately cache the new notebook images for faster startup by JupyterHub - for IMAGE in ${DOCKER_NOTEBOOK_IMAGES} - do - docker pull $IMAGE - done - - fi - shift -done - - -# vi: tabstop=8 expandtab shiftwidth=2 softtabstop=2 +"${THIS_DIR}/birdhouse-compose.sh" "$@" diff --git a/birdhouse/read-configs.include.sh b/birdhouse/read-configs.include.sh index 1e59d99c6..bad87dcfa 100644 --- a/birdhouse/read-configs.include.sh +++ b/birdhouse/read-configs.include.sh @@ -11,7 +11,7 @@ # values. # # USAGE: -# # Set variable COMPOSE_DIR to the dir containing pavics-compose.sh and +# # Set variable COMPOSE_DIR to the dir containing birdhouse-compose.sh and # # docker-compose.yml. # # # Source the script providing function read_configs. @@ -30,29 +30,32 @@ # WARNING: cannot use 'log' calls within this function until the following logging script gets resolved and sourced. discover_compose_dir() { if [ -z "${COMPOSE_DIR}" ] || [ ! -e "${COMPOSE_DIR}" ]; then - if [ -f "./pavics-compose.sh" ]; then + if [ -n "${BIRDHOUSE_COMPOSE}" ] && [ -f "${BIRDHOUSE_COMPOSE}" ]; then + # Parent of the BIRDHOUSE_COMPOSE file is the COMPOSE_DIR + COMPOSE_DIR=$(dirname "${COMPOSE_DIR}") + elif [ -f "./birdhouse-compose.sh" ]; then # Current dir is COMPOSE_DIR COMPOSE_DIR="$(realpath .)" - elif [ -f "../pavics-compose.sh" ]; then + elif [ -f "../birdhouse-compose.sh" ]; then # Parent dir is COMPOSE_DIR # Case of all the scripts under deployment/ or scripts/ COMPOSE_DIR="$(realpath ..)" - elif [ -f "../birdhouse/pavics-compose.sh" ]; then + elif [ -f "../birdhouse/birdhouse-compose.sh" ]; then # Sibling dir is COMPOSE_DIR # Case of all the tests under tests/ COMPOSE_DIR="$(realpath ../birdhouse)" - elif [ -f "./birdhouse/pavics-compose.sh" ]; then + elif [ -f "./birdhouse/birdhouse-compose.sh" ]; then # Child dir is COMPOSE_DIR COMPOSE_DIR="$(realpath birdhouse)" # Below assume checkout is named birdhouse-deploy, which might not # always be true. - elif [ -f "../birdhouse-deploy/birdhouse/pavics-compose.sh" ]; then + elif [ -f "../birdhouse-deploy/birdhouse/birdhouse-compose.sh" ]; then # Case of sibling checkout at same level as birdhouse-deploy. COMPOSE_DIR="$(realpath "../birdhouse-deploy/birdhouse")" - elif [ -f "../../birdhouse-deploy/birdhouse/pavics-compose.sh" ]; then + elif [ -f "../../birdhouse-deploy/birdhouse/birdhouse-compose.sh" ]; then # Case of subdir of sibling checkout at same level as birdhouse-deploy. COMPOSE_DIR="$(realpath "../../birdhouse-deploy/birdhouse")" - elif [ -f "../../../birdhouse-deploy/birdhouse/pavics-compose.sh" ]; then + elif [ -f "../../../birdhouse-deploy/birdhouse/birdhouse-compose.sh" ]; then # Case of sub-subdir of sibling checkout at same level as birdhouse-deploy. COMPOSE_DIR="$(realpath "../../../birdhouse-deploy/birdhouse")" fi @@ -60,9 +63,9 @@ discover_compose_dir() { fi # Perform last-chance validation in case 'COMPOSE_DIR' was incorrectly set explicitly # and that 'read-configs.include.sh' was sourced directly from an invalid location. - if [ ! -f "${COMPOSE_DIR}/pavics-compose.sh" ]; then + if [ ! -f "${COMPOSE_DIR}/birdhouse-compose.sh" ]; then echo \ - "CRITICAL: [${COMPOSE_DIR}/pavics-compose.sh] not found," \ + "CRITICAL: [${COMPOSE_DIR}/birdhouse-compose.sh] not found," \ "please set variable 'COMPOSE_DIR' to a valid location." \ "Many features depend on this variable." 1>&2 return 2 @@ -81,6 +84,9 @@ discover_env_local() { BIRDHOUSE_LOCAL_ENV="${COMPOSE_DIR}/env.local" fi + BIRDHOUSE_LOCAL_ENV=$(readlink -f "$BIRDHOUSE_LOCAL_ENV" || realpath "$BIRDHOUSE_LOCAL_ENV") + export BIRDHOUSE_LOCAL_ENV + # env.local can be a symlink to the private config repo where the real # env.local file is source controlled. # Docker volume-mount will need the real dir of the file for symlink to @@ -145,7 +151,7 @@ source_conf_files() { # Allowing not existing conf dir also helps for smooth # transition of component path when they are new/renamed/deleted. # - # New component names can be added to EXTRA_CONF_DIRS before the + # New component names can be added to BIRDHOUSE_EXTRA_CONF_DIRS before the # corresponding PR are merged and old component names can be removed # after the corresponding PR are merge without any impact on the # autodeploy process. @@ -157,7 +163,9 @@ source_conf_files() { unset COMPONENT_DEPENDENCIES dependencies="$(. "${adir}/default.env" && echo "${COMPONENT_DEPENDENCIES}")" if [ -n "${dependencies}" ]; then + old_conf_locations="${conf_locations}" source_conf_files "${dependencies}" "a dependency of ${adir}" + conf_locations="${old_conf_locations}" # reset the adir variable in case it was changed in a recursive call adir="$(printf '%b' "${_adir_stack}" | tail -1)" fi @@ -177,15 +185,15 @@ source_conf_files() { } read_components_default_env() { - # EXTRA_CONF_DIRS and DEFAULT_CONF_DIRS normally set by env.local so should read_env_local() first. + # BIRDHOUSE_EXTRA_CONF_DIRS and BIRDHOUSE_DEFAULT_CONF_DIRS normally set by env.local so should read_env_local() first. - # EXTRA_CONF_DIRS and DEFAULT_CONF_DIRS relative paths are relative to COMPOSE_DIR. + # BIRDHOUSE_EXTRA_CONF_DIRS and BIRDHOUSE_DEFAULT_CONF_DIRS relative paths are relative to COMPOSE_DIR. if [ -d "${COMPOSE_DIR}" ]; then cd "${COMPOSE_DIR}" >/dev/null || true fi - source_conf_files "${DEFAULT_CONF_DIRS}" 'DEFAULT_CONF_DIRS' - source_conf_files "${EXTRA_CONF_DIRS}" 'EXTRA_CONF_DIRS' + source_conf_files "${BIRDHOUSE_DEFAULT_CONF_DIRS}" 'BIRDHOUSE_DEFAULT_CONF_DIRS' + source_conf_files "${BIRDHOUSE_EXTRA_CONF_DIRS}" 'BIRDHOUSE_EXTRA_CONF_DIRS' # Return to previous pwd. if [ -d "${COMPOSE_DIR}" ]; then @@ -238,6 +246,93 @@ check_default_vars() { } +# If BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED is True then allow environment variables listed in +# BIRDHOUSE_BACKWARDS_COMPATIBLE_VARIABLES to override the equivalent deprecated variables as long as that +# equivalent deprecated variable is unset. +set_old_backwards_compatible_variables() { + [ x"${BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED}" = x"True" ] || return 0 + BIRDHOUSE_OLD_VARS_OVERRIDDEN="" + # Reverse the variable list so that old variables are overridden in the correct order. + reverse_backwards_compatible_variables="" + for back_compat_vars in ${BIRDHOUSE_BACKWARDS_COMPATIBLE_VARIABLES} + do + reverse_backwards_compatible_variables="${back_compat_vars} ${reverse_backwards_compatible_variables}" + done + for back_compat_vars in ${reverse_backwards_compatible_variables} + do + old_var="${back_compat_vars%%=*}" + new_var="${back_compat_vars#*=}" + old_var_set="`eval "echo \\${${old_var}+set}"`" # will equal 'set' if the variable is set, null otherwise + new_var_set="`eval "echo \\${${new_var}+set}"`" # will equal 'set' if the variable is set, null otherwise + if [ "${new_var_set}" = "set" ] && [ ! "${old_var_set}" = "set" ]; then + new_value="`eval "echo \\$${new_var}"`" + eval 'export ${old_var}="${new_value}"' + BIRDHOUSE_OLD_VARS_OVERRIDDEN="${BIRDHOUSE_OLD_VARS_OVERRIDDEN} ${old_var} " # space before and after old_var is for grep (below) + log DEBUG "Variable [${new_var}] is being used to set the deprecated variable [${old_var}]." + fi + done + for hardcoded_var in ${BIRDHOUSE_BACKWARDS_COMPATIBLE_HARDCODED_DEFAULTS} + do + new_var="${hardcoded_var%%=*}" + hardcoded_old_value="${hardcoded_var#*=}" + new_value="`eval "echo \\$${new_var}"`" + if [ "${new_value}" = "\${__DEFAULT_${new_var}}" ]; then + log WARN "Variable [${new_var}] is being set to the previously hardcoded default value [${hardcoded_old_value}]." + eval 'export ${new_var}="${hardcoded_old_value}"' + fi + done +} + +# If BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED is True then allow environment variables listed in +# BIRDHOUSE_BACKWARDS_COMPATIBLE_VARIABLES to override the equivalent non-deprecated variable. +# Otherwise, warn the user about deprecated variables that may still exist in the BIRDHOUSE_LOCAL_ENV +# file without overriding. +# If the first argument to this function is "pre-components", only the variables from +# BIRDHOUSE_BACKWARDS_COMPATIBLE_VARIABLES_PRE_COMPONENTS will be processed. +process_backwards_compatible_variables() { + for back_compat_vars in ${BIRDHOUSE_BACKWARDS_COMPATIBLE_VARIABLES} + do + old_var="${back_compat_vars%%=*}" + echo "${BIRDHOUSE_OLD_VARS_OVERRIDDEN}" | grep -q "[[:space:]]${old_var}[[:space:]]" && continue + if [ "$1" = "pre-components" ] && \ + ! echo "${BIRDHOUSE_BACKWARDS_COMPATIBLE_VARIABLES_PRE_COMPONENTS}" | grep -q "^[[:space:]]*${old_var}[[:space:]]*$"; then + continue + fi + + new_var="${back_compat_vars#*=}" + old_var_set="`eval "echo \\${${old_var}+set}"`" # will equal 'set' if the variable is set, null otherwise + if [ "${old_var_set}" = "set" ]; then + old_value="`eval "echo \\$${old_var}"`" + if [ x"${BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED}" = x"True" ]; then + log WARN "Deprecated variable [${old_var}] is overriding [${new_var}]. Check env.local file." + eval 'export ${new_var}="${old_value}"' + else + new_value="`eval "echo \\$${new_var}"`" + if [ x"${old_value}" = x"${new_value}" ]; then + log WARN "Deprecated variable [${old_var}] can be removed as it has been superseded by [${new_var}]. Check env.local file." + else + log WARN "Deprecated variable [${old_var}] is present but ignored in favour of [${new_var}]. Check env.local file." + fi + fi + fi + done + if [ x"${BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED}" = x"True" ]; then + BIRDHOUSE_EXTRA_CONF_DIRS="$BIRDHOUSE_EXTRA_CONF_DIRS ./optional-components/backwards-compatible-overrides" + fi + if [ ! "$1" = "pre-components" ]; then + for default_old_var in ${BIRDHOUSE_BACKWARDS_COMPATIBLE_DEFAULTS} + do + old_var="${default_old_var%%=*}" + default_old_value="${default_old_var#*=}" + old_value="`eval "echo \\$${old_var}"`" + if [ "${old_value}" = "${default_old_value}" ]; then + log WARN "Variable [${old_var}] employs a deprecated default value recommended for override. Check env.local file." + fi + done + fi +} + + # Verify that all required variables are set, and error out otherwise with an error log message. check_required_vars() { for i in ${VARS} @@ -328,16 +423,31 @@ create_compose_conf_list() { fi } +# If unset, BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED is set to True to enable backwards compatible mode by default if this +# file is not being run through a supported interface. +set_backwards_compatible_as_default() { + if [ ! "${__BIRDHOUSE_SUPPORTED_INTERFACE}" = 'True' ]; then + log WARN "This file [$(readlink -f "$0" || realpath "$0")] is being executed through a non-supported interface for the Birdhouse software. This file may be moved or updated without warning." + if [ ! "${BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED+set}" = 'set' ];then + BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED=True + log WARN "The BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED variable is being set to 'True' by default. To avoid this behaviour set this variable or execute this file through a supported interface." + fi + fi +} # Main function to read all config files in appropriate order and call # process_delayed_eval() at the appropriate moment. read_configs() { + set_backwards_compatible_as_default discover_compose_dir discover_env_local read_default_env - read_env_local # for EXTRA_CONF_DIRS and DEFAULT_CONF_DIRS, need discover_env_local - read_components_default_env # uses EXTRA_CONF_DIRS and DEFAULT_CONF_DIRS, sets ALL_CONF_DIRS + read_env_local # for BIRDHOUSE_EXTRA_CONF_DIRS and BIRDHOUSE_DEFAULT_CONF_DIRS, need discover_env_local + process_backwards_compatible_variables pre-components + read_components_default_env # uses BIRDHOUSE_EXTRA_CONF_DIRS and BIRDHOUSE_DEFAULT_CONF_DIRS, sets ALL_CONF_DIRS + set_old_backwards_compatible_variables read_env_local # again to override components default.env, need discover_env_local + process_backwards_compatible_variables check_default_vars process_delayed_eval } @@ -347,10 +457,13 @@ read_configs() { # of various components. Use only when you know what you are doing. Else use # read_configs() to be safe. read_basic_configs_only() { + set_backwards_compatible_as_default discover_compose_dir discover_env_local read_default_env + set_old_backwards_compatible_variables read_env_local # need discover_env_local + process_backwards_compatible_variables check_default_vars process_delayed_eval } diff --git a/birdhouse/scripts/backup-jupyterhub-notebooks.sh b/birdhouse/scripts/backup-jupyterhub-notebooks.sh index b7805afe4..ea24a99d3 100755 --- a/birdhouse/scripts/backup-jupyterhub-notebooks.sh +++ b/birdhouse/scripts/backup-jupyterhub-notebooks.sh @@ -6,7 +6,7 @@ if [ -z "${BACKUP_OUT_DIR}" ]; then fi if [ -z "$JUPYTERHUB_USER_DATA_DIR" ]; then - JUPYTERHUB_USER_DATA_DIR="${DATA_PERSIST_ROOT:-/data}/jupyterhub_user_data" + JUPYTERHUB_USER_DATA_DIR="${BIRDHOUSE_DATA_PERSIST_ROOT:-/data}/jupyterhub_user_data" fi docker run --rm \ diff --git a/birdhouse/scripts/bootstrap-instance-for-testsuite b/birdhouse/scripts/bootstrap-instance-for-testsuite index 7715488ad..e10209d0b 100755 --- a/birdhouse/scripts/bootstrap-instance-for-testsuite +++ b/birdhouse/scripts/bootstrap-instance-for-testsuite @@ -1,11 +1,11 @@ #!/bin/sh -# Bootstrap fresh new PAVICS instance to run testsuite at +# Bootstrap fresh new Birdhouse instance to run testsuite at # https://github.com/Ouranosinc/PAVICS-e2e-workflow-tests. # # This is a stable interface for test automation shielding it from knowing the # intimate details of how to bootstrap a fresh new instance. # -# Assume PAVICS instance is already fully up (`./pavics-compose.sh up -d` has +# Assume Birdhouse instance is already fully up (`birdhouse compose up -d` has # been called). # @@ -16,16 +16,16 @@ SCRIPTS_DIR="${COMPOSE_DIR}/scripts" set -x # Populate test .nc file on Thredds. -# Need to open temporary Thredds "testdata/secure/" on PAVICS production to anonymous group. +# Need to open temporary Thredds "testdata/secure/" on Birdhouse production to anonymous group. # Need write-access to DATASET_ROOT (/data/datasets/). "${SCRIPTS_DIR}/bootstrap-testdata" if [ -n "$(docker ps --filter name=solr)" ]; then # Index Thredds catalog. - # Need to open temporary Thredds "testdata/secure/" on local PAVICS host to anonymous group. + # Need to open temporary Thredds "testdata/secure/" on local Birdhouse host to anonymous group. # Only crawl the subset enough to pass canarie-api monitoring # see config/canarie-api/docker_configuration.py.template - "${SCRIPTS_DIR}/trigger-pavicscrawler" target_files=birdhouse/testdata/flyingpigeon/cmip5 + "${SCRIPTS_DIR}/trigger-birdhousecrawler" target_files=birdhouse/testdata/flyingpigeon/cmip5 # For crawler to complete, assuming minimal dataset from bootstrap-testdata so # should be super fast to finish crawling. diff --git a/birdhouse/scripts/bootstrap-testdata b/birdhouse/scripts/bootstrap-testdata index 63168d61f..0c2f4c278 100755 --- a/birdhouse/scripts/bootstrap-testdata +++ b/birdhouse/scripts/bootstrap-testdata @@ -2,7 +2,7 @@ # Bootstrap minimum test data on Thredds. # To run testsuite in https://github.com/Ouranosinc/PAVICS-e2e-workflow-tests. # -# Need to open temporary Thredds "testdata/secure/" on PAVICS production to anonymous group. +# Need to open temporary Thredds "testdata/secure/" on Birdhouse production to anonymous group. # # To be run locally on the host running Thredds. Will populate test data files # under DATASET_ROOT, customizable by env var. @@ -11,10 +11,10 @@ if [ -z "${DATASET_ROOT}" ]; then - DATASET_ROOT="${DATA_PERSIST_ROOT}/${THREDDS_SERVICE_DATA_LOCATION_ON_HOST}" + DATASET_ROOT="${BIRDHOUSE_DATA_PERSIST_ROOT}/${THREDDS_SERVICE_DATA_LOCATION_ON_HOST}" fi -FROM_SERVER="https://pavics.ouranos.ca/twitcher/ows/proxy/thredds/fileServer/birdhouse" +FROM_SERVER=${FROM_SERVER:-"https://pavics.ouranos.ca/twitcher/ows/proxy/thredds/fileServer/birdhouse"} FILE_LIST=" nrcan/nrcan_canada_daily/tasmin/nrcan_canada_daily_tasmin_2013.nc diff --git a/birdhouse/scripts/check-autodeploy-repos b/birdhouse/scripts/check-autodeploy-repos index 3e623936b..6ddbd4754 100755 --- a/birdhouse/scripts/check-autodeploy-repos +++ b/birdhouse/scripts/check-autodeploy-repos @@ -9,11 +9,11 @@ COMPOSE_DIR="${COMPOSE_DIR:-$(dirname "${THIS_DIR}")}" if [ -f "${COMPOSE_DIR}/read-configs.include.sh" ]; then . "${COMPOSE_DIR}/read-configs.include.sh" - # Get AUTODEPLOY_EXTRA_REPOS + # Get BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS read_configs fi -for r in "${COMPOSE_DIR}" ${AUTODEPLOY_EXTRA_REPOS}; do +for r in "${COMPOSE_DIR}" ${BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS}; do echo "=== $r" cd "$r" git status -v diff --git a/birdhouse/scripts/check-instance-ready b/birdhouse/scripts/check-instance-ready index 6e1ceaee0..122bd6924 100755 --- a/birdhouse/scripts/check-instance-ready +++ b/birdhouse/scripts/check-instance-ready @@ -1,5 +1,5 @@ #!/bin/sh -# Quick smoke test for PAVICS instance. +# Quick smoke test for Birdhouse instance. # # This is absolutely not a comprehensive test. # @@ -14,12 +14,12 @@ COMPOSE_DIR="${COMPOSE_DIR:-$(dirname "${THIS_DIR}")}" if [ -f "${COMPOSE_DIR}/read-configs.include.sh" ]; then . "${COMPOSE_DIR}/read-configs.include.sh" - # Get PAVICS_FQDN + # Get BIRDHOUSE_FQDN read_configs fi set -x -curl --include --silent "https://${PAVICS_FQDN}/canarie/node/service/stats" | head +curl --include --silent "https://${BIRDHOUSE_FQDN}/canarie/node/service/stats" | head set +x echo " @@ -28,7 +28,7 @@ The curl above should return the HTTP response code 200 to confirm instance is r set -x HTTP_RESPONSE_CODE="$( \ - curl --write-out '%{http_code}' --output /dev/null --silent "https://${PAVICS_FQDN}/canarie/node/service/stats" \ + curl --write-out '%{http_code}' --output /dev/null --silent "https://${BIRDHOUSE_FQDN}/canarie/node/service/stats" \ )" if [ "${HTTP_RESPONSE_CODE}" -ne 200 ]; then set +x @@ -41,5 +41,5 @@ Will retry only once more and exit immediately. " set -x sleep 65 - curl --include --silent "https://${PAVICS_FQDN}/canarie/node/service/stats" | head + curl --include --silent "https://${BIRDHOUSE_FQDN}/canarie/node/service/stats" | head fi diff --git a/birdhouse/scripts/clear-running-wps-jobs-in-db.sh b/birdhouse/scripts/clear-running-wps-jobs-in-db.sh index d0ae56177..897e3deb1 100755 --- a/birdhouse/scripts/clear-running-wps-jobs-in-db.sh +++ b/birdhouse/scripts/clear-running-wps-jobs-in-db.sh @@ -18,7 +18,7 @@ shift POSTGRES_USER="$1" if [ -z "$POSTGRES_USER" ]; then - POSTGRES_USER=pavics + POSTGRES_USER=birdhouse else shift fi diff --git a/birdhouse/scripts/create-magpie-authtest-user b/birdhouse/scripts/create-magpie-authtest-user index e7c0700c5..cd318b0cd 100755 --- a/birdhouse/scripts/create-magpie-authtest-user +++ b/birdhouse/scripts/create-magpie-authtest-user @@ -15,7 +15,7 @@ TMP_CONFIG_FILE="/tmp/create-magpie-authtest-user.yml" # Test user is added to 'thredds-secure-authtest-group' to obtain access to 'secure' directory under 'thredds' service. # Those permissions are defined in 'optional-components/secure-thredds/secure-access-magpie-permission.cfg' -# which should also be included in 'EXTRA_CONF_DIRS' of your custom 'env.local' file. +# which should also be included in 'BIRDHOUSE_EXTRA_CONF_DIRS' of your custom 'env.local' file. # This user is also automatically added to 'anonymous' group without the need of explicit membership here. cat <<__OEF__ > "${TMP_CONFIG_FILE}" users: diff --git a/birdhouse/scripts/create-magpie-users b/birdhouse/scripts/create-magpie-users index 1584eab89..c52fefce2 100755 --- a/birdhouse/scripts/create-magpie-users +++ b/birdhouse/scripts/create-magpie-users @@ -4,7 +4,7 @@ # /tmp/create_magpie_users/config.yml (configurable using MAGPIE_CLI_CONF env # var). # -# Should run from checkout on same host running PAVICS to have access to +# Should run from checkout on same host running Birdhouse to have access to # env.local file for Magpie credentials. Else these can be provided via # environment variables. # @@ -35,8 +35,8 @@ # # $ scripts/create-magpie-users # + docker run --rm -it --name create_magpie_users -v /tmp/create_magpie_users:/tmp/create_magpie_users:rw pavics/magpie:2.0.0 magpie_cli batch_update_users https://pavics.ouranos.ca/magpie admin 'sanitized' -f /tmp/create_magpie_users/config.yml -o /tmp/create_magpie_users/ -# Constant could not be found: GITHUB_CLIENT_ID (using default: None) -# Constant could not be found: GITHUB_CLIENT_SECRET (using default: None) +# Constant could not be found: MAGPIE_GITHUB_CLIENT_ID (using default: None) +# Constant could not be found: MAGPIE_GITHUB_CLIENT_SECRET (using default: None) # Constant could not be found: WSO2_HOSTNAME (using default: None) # Constant could not be found: WSO2_CLIENT_ID (using default: None) # Constant could not be found: WSO2_CLIENT_SECRET (using default: None) @@ -72,7 +72,7 @@ COMPOSE_DIR="${COMPOSE_DIR:-$(dirname "${THIS_DIR}")}" if [ -f "${COMPOSE_DIR}/read-configs.include.sh" ]; then . "${COMPOSE_DIR}/read-configs.include.sh" - # Get MAGPIE_VERSION, PAVICS_FQDN, MAGPIE_ADMIN_PASSWORD, MAGPIE_ADMIN_USERNAME + # Get MAGPIE_VERSION, BIRDHOUSE_FQDN, MAGPIE_ADMIN_PASSWORD, MAGPIE_ADMIN_USERNAME read_configs fi @@ -80,7 +80,7 @@ fi # Allow override using same name env var. if [ -z "$MAGPIE_SERVER_URL" ]; then - MAGPIE_SERVER_URL="https://$PAVICS_FQDN/magpie" + MAGPIE_SERVER_URL="https://$BIRDHOUSE_FQDN/magpie" fi if [ -z "$MAGPIE_CLI_USER" ]; then diff --git a/birdhouse/scripts/deprecated/backup-restore-solr-index b/birdhouse/scripts/deprecated/backup-restore-solr-index index 71fc3432c..6f6aec88d 100755 --- a/birdhouse/scripts/deprecated/backup-restore-solr-index +++ b/birdhouse/scripts/deprecated/backup-restore-solr-index @@ -15,7 +15,7 @@ else fi if [ -z "$BACKUP_BASEDIR" ]; then - BACKUP_BASEDIR="/data" # all PAVICS deployment have /data + BACKUP_BASEDIR="/data" # all Birdhouse deployment have /data fi if [ -z "$BACKUP_FILENAME" ]; then diff --git a/birdhouse/scripts/deprecated/trigger-birdhousecrawler b/birdhouse/scripts/deprecated/trigger-birdhousecrawler new file mode 100755 index 000000000..4ea6752ab --- /dev/null +++ b/birdhouse/scripts/deprecated/trigger-birdhousecrawler @@ -0,0 +1,48 @@ +#!/bin/sh +# Trigger birdhousecrawler on local Birdhouse host. +# +# Need to open temporary Thredds "testdata/secure/" on local Birdhouse host to anonymous group. +# +# birdhousecrawler is a method of the catalog WPS service to index Thredds +# catalog into Solr DB for quick searching. +# +# To crawl only 1 file: +# trigger-birdhousecrawler target_files=birdhouse/testdata/secure/tasmax_Amon_MPI-ESM-MR_rcp45_r2i1p1_200601-200612.nc +# +# To crawl only 1 dir: +# trigger-birdhousecrawler target_files=birdhouse/testdata +# +# Set env var BIRDHOUSE_CRAWLER_HOST to target different BIRDHOUSE host. + +THIS_FILE="$(readlink -f "$0" || realpath "$0")" +THIS_DIR="$(dirname "${THIS_FILE}")" +COMPOSE_DIR="${COMPOSE_DIR:-$(dirname "${THIS_DIR}")}" + +if [ -f "${COMPOSE_DIR}/read-configs.include.sh" ]; then + . "${COMPOSE_DIR}/read-configs.include.sh" + + # Get BIRDHOUSE_FQDN + read_configs +fi + +# Allow override using same name env var. +if [ -z "${BIRDHOUSE_CRAWLER_HOST}" ]; then + BIRDHOUSE_CRAWLER_HOST="${BIRDHOUSE_FQDN}" +fi + +set -x + +curl --include "http://${BIRDHOUSE_CRAWLER_HOST}:8086/pywps?service=WPS&request=execute&version=1.0.0&identifier=pavicrawler&storeExecuteResponse=true&status=true&DataInputs=$*" + +set +x + +echo " +NOTE the +statusLocation=\"https://HOST/wpsoutputs/catalog/e31a4914-16e8-11ea-aab9-0242ac130014.xml\" +returned in the XML body of the curl command. The status of the crawl, +whether ongoing, failed or success will be in that link. + +Once crawler is done, go check the Solr DB at +http://${BIRDHOUSE_CRAWLER_HOST}:8983/solr/#/${THREDDS_SERVICE_DATA_URL_PATH}/query for content inserted by the +crawler. Just click on \"Execute Query\". +" diff --git a/birdhouse/scripts/deprecated/trigger-pavicscrawler b/birdhouse/scripts/deprecated/trigger-pavicscrawler deleted file mode 100755 index 13b19cf95..000000000 --- a/birdhouse/scripts/deprecated/trigger-pavicscrawler +++ /dev/null @@ -1,48 +0,0 @@ -#!/bin/sh -# Trigger pavicscrawler on local PAVICS host. -# -# Need to open temporary Thredds "testdata/secure/" on local PAVICS host to anonymous group. -# -# pavicscrawler is a method of the catalog WPS service to index Thredds -# catalog into Solr DB for quick searching. -# -# To crawl only 1 file: -# trigger-pavicscrawler target_files=birdhouse/testdata/secure/tasmax_Amon_MPI-ESM-MR_rcp45_r2i1p1_200601-200612.nc -# -# To crawl only 1 dir: -# trigger-pavicscrawler target_files=birdhouse/testdata -# -# Set env var PAVICS_CRAWLER_HOST to target different PAVICS host. - -THIS_FILE="$(readlink -f "$0" || realpath "$0")" -THIS_DIR="$(dirname "${THIS_FILE}")" -COMPOSE_DIR="${COMPOSE_DIR:-$(dirname "${THIS_DIR}")}" - -if [ -f "${COMPOSE_DIR}/read-configs.include.sh" ]; then - . "${COMPOSE_DIR}/read-configs.include.sh" - - # Get PAVICS_FQDN - read_configs -fi - -# Allow override using same name env var. -if [ -z "${PAVICS_CRAWLER_HOST}" ]; then - PAVICS_CRAWLER_HOST="${PAVICS_FQDN}" -fi - -set -x - -curl --include "http://${PAVICS_CRAWLER_HOST}:8086/pywps?service=WPS&request=execute&version=1.0.0&identifier=pavicrawler&storeExecuteResponse=true&status=true&DataInputs=$*" - -set +x - -echo " -NOTE the -statusLocation=\"https://HOST/wpsoutputs/catalog/e31a4914-16e8-11ea-aab9-0242ac130014.xml\" -returned in the XML body of the curl command. The status of the crawl, -whether ongoing, failed or success will be in that link. - -Once crawler is done, go check the Solr DB at -http://${PAVICS_CRAWLER_HOST}:8983/solr/#/${THREDDS_SERVICE_DATA_URL_PATH}/query for content inserted by the -crawler. Just click on \"Execute Query\". -" diff --git a/birdhouse/scripts/get-components-json.include.sh b/birdhouse/scripts/get-components-json.include.sh index 598f65722..5f770dac5 100755 --- a/birdhouse/scripts/get-components-json.include.sh +++ b/birdhouse/scripts/get-components-json.include.sh @@ -28,11 +28,11 @@ fi # default value in case of error or missing definitions export BIRDHOUSE_DEPLOY_COMPONENTS_JSON='{"components": []}' if [ -z "${ALL_CONF_DIRS}" ]; then - log WARN "No components in DEFAULT_CONF_DIRS and EXTRA_CONF_DIRS. Components JSON list will be empty!" + log WARN "No components in BIRDHOUSE_DEFAULT_CONF_DIRS and BIRDHOUSE_EXTRA_CONF_DIRS. Components JSON list will be empty!" return fi -# resolve path considering if sourced or executed, and whether from current dir, pavics-compose include or another dir +# resolve path considering if sourced or executed, and whether from current dir, birdhouse-compose include or another dir BIRDHOUSE_DEPLOY_COMPONENTS_ROOT=$(dirname -- "$(realpath "$0")") if [ "$(echo "${BIRDHOUSE_DEPLOY_COMPONENTS_ROOT}" | grep -cE "/birdhouse/?\$" 2>/dev/null)" -eq 1 ]; then BIRDHOUSE_DEPLOY_COMPONENTS_ROOT=. diff --git a/birdhouse/scripts/get-services-json.include.sh b/birdhouse/scripts/get-services-json.include.sh index dad1b4f97..931af8f5f 100755 --- a/birdhouse/scripts/get-services-json.include.sh +++ b/birdhouse/scripts/get-services-json.include.sh @@ -20,6 +20,6 @@ for adir in ${ALL_CONF_DIRS}; do done if [ -z "${SERVICES}" ]; then - log WARN "No services in DEFAULT_CONF_DIRS and EXTRA_CONF_DIRS. SERVICES JSON list will be empty!" + log WARN "No services in BIRDHOUSE_DEFAULT_CONF_DIRS and BIRDHOUSE_EXTRA_CONF_DIRS. SERVICES JSON list will be empty!" fi export BIRDHOUSE_DEPLOY_SERVICES_JSON="{\"services\": [${SERVICES}]}" diff --git a/birdhouse/scripts/sync-data b/birdhouse/scripts/sync-data index 7ea415cad..e4024833b 100755 --- a/birdhouse/scripts/sync-data +++ b/birdhouse/scripts/sync-data @@ -53,7 +53,7 @@ for item in ${GEOSERVER_DATA_DIR}/ ${JUPYTERHUB_USER_DATA_DIR}/ ${MAGPIE_PERSIST # FIX_WRITE_PERM_EXTRA=1 which does the same and more so could be slower # than needed. - # Assume DATA_PERSIST_ROOT is same between both hosts ! + # Assume BIRDHOUSE_DATA_PERSIST_ROOT is same between both hosts ! ${RSYNC_BASE_CMD} "${SOURCE_HOST}:${item}" "${item}" done diff --git a/birdhouse/templates/docker-compose_shortcut.sh b/birdhouse/templates/docker-compose_shortcut.sh index 619633c8e..f94e674be 100755 --- a/birdhouse/templates/docker-compose_shortcut.sh +++ b/birdhouse/templates/docker-compose_shortcut.sh @@ -1,3 +1,3 @@ -export SSL_CERTIFICATE=${PATH_TO_CERTIFICATE} +export BIRDHOUSE_SSL_CERTIFICATE=${PATH_TO_CERTIFICATE} export HOSTNAME=localhost docker-compose $* diff --git a/birdhouse/vagrant-utils/configure-pavics.sh b/birdhouse/vagrant-utils/configure-birdhouse.sh similarity index 62% rename from birdhouse/vagrant-utils/configure-pavics.sh rename to birdhouse/vagrant-utils/configure-birdhouse.sh index 935dce9c2..da581ad50 100755 --- a/birdhouse/vagrant-utils/configure-pavics.sh +++ b/birdhouse/vagrant-utils/configure-birdhouse.sh @@ -1,13 +1,13 @@ #!/bin/sh -x if [ -f env.local ]; then - # Get SSL_CERTIFICATE from existing env.local. + # Get BIRDHOUSE_SSL_CERTIFICATE from existing env.local. . ./env.local fi -if [ -z "$SSL_CERTIFICATE" ]; then +if [ -z "$BIRDHOUSE_SSL_CERTIFICATE" ]; then # Overridable by existing env.local or existing env var. - SSL_CERTIFICATE="/home/vagrant/certkey.pem" + BIRDHOUSE_SSL_CERTIFICATE="/home/vagrant/certkey.pem" fi if [ ! -f env.local ]; then @@ -15,13 +15,13 @@ if [ ! -f env.local ]; then cat <> env.local # override with values needed for vagrant -export SSL_CERTIFICATE='$SSL_CERTIFICATE' # *absolute* path to the nginx ssl certificate, path and key bundle -export PAVICS_FQDN='${VM_HOSTNAME}.$VM_DOMAIN' # Fully qualified domain name of this Pavics installation +export BIRDHOUSE_SSL_CERTIFICATE='$BIRDHOUSE_SSL_CERTIFICATE' # *absolute* path to the nginx ssl certificate, path and key bundle +export BIRDHOUSE_FQDN='${VM_HOSTNAME}.$VM_DOMAIN' # Fully qualified domain name of this Birdhouse installation EOF if [ -n "$LETSENCRYPT_EMAIL" ]; then cat <> env.local -export SUPPORT_EMAIL="$LETSENCRYPT_EMAIL" +export BIRDHOUSE_SUPPORT_EMAIL="$LETSENCRYPT_EMAIL" # Modify schedule so test systems do not hit LetsEncrypt at the same time as # prod systems to avoid loading LetsEncrypt server (be a nice netizen). @@ -33,16 +33,16 @@ RENEW_LETSENCRYPT_SSL_NUM_PARENTS_MOUNT="/" # Only source if file exist. Allow for config file to be backward-compat with # older version of the repo where the .env file do not exist yet. # Keep this sourcing of renew_letsencrypt_ssl_cert_extra_job.env after -# latest definition of SSL_CERTIFICATE because it needs the valid value of -# SSL_CERTIFICATE. +# latest definition of BIRDHOUSE_SSL_CERTIFICATE because it needs the valid value of +# BIRDHOUSE_SSL_CERTIFICATE. if [ -f "$PWD/components/scheduler/renew_letsencrypt_ssl_cert_extra_job.env" ]; then . $PWD/components/scheduler/renew_letsencrypt_ssl_cert_extra_job.env fi EOF elif [ -n "$KITENAME" -a -n "$KITESUBDOMAIN" ]; then cat <> env.local -export PAVICS_FQDN_PUBLIC="$KITESUBDOMAIN-$KITENAME" -export ALLOW_UNSECURE_HTTP="True" +export BIRDHOUSE_FQDN_PUBLIC="$KITESUBDOMAIN-$KITENAME" +export BIRDHOUSE_ALLOW_UNSECURE_HTTP="True" EOF fi @@ -50,7 +50,7 @@ else echo "existing env.local file, not overriding" fi -if [ ! -f "$SSL_CERTIFICATE" ]; then +if [ ! -f "$BIRDHOUSE_SSL_CERTIFICATE" ]; then . ./env.local if [ -n "$LETSENCRYPT_EMAIL" ]; then @@ -64,16 +64,18 @@ if [ ! -f "$SSL_CERTIFICATE" ]; then else openssl req -newkey rsa:2048 -new -nodes -x509 -days 3650 -keyout key.pem -out cert.pem \ -subj "/C=CA/ST=Quebec/L=Montreal/O=RnD/CN=${VM_HOSTNAME}.$VM_DOMAIN" - cp cert.pem "$SSL_CERTIFICATE" - cat key.pem >> "$SSL_CERTIFICATE" - if [ -z "$VERIFY_SSL" ]; then + cp cert.pem "$BIRDHOUSE_SSL_CERTIFICATE" + cat key.pem >> "$BIRDHOUSE_SSL_CERTIFICATE" + if [ -z "$BIRDHOUSE_VERIFY_SSL" ]; then cat <> env.local -export VERIFY_SSL="false" +export BIRDHOUSE_VERIFY_SSL="false" EOF fi fi else - echo "existing '$SSL_CERTIFICATE' file, not overriding" + echo "existing '$BIRDHOUSE_SSL_CERTIFICATE' file, not overriding" fi -./pavics-compose.sh up -d +export PATH="$(readlink -f ../bin):$PATH" + +birdhouse compose up -d diff --git a/birdhouse/vagrant-utils/provision.sh b/birdhouse/vagrant-utils/provision.sh index 17bb35afc..9d6eb34f6 100755 --- a/birdhouse/vagrant-utils/provision.sh +++ b/birdhouse/vagrant-utils/provision.sh @@ -2,5 +2,5 @@ cd /vagrant/birdhouse vagrant-utils/install-docker.sh -vagrant-utils/configure-pavics.sh +vagrant-utils/configure-birdhouse.sh vagrant-utils/configure-pagekite diff --git a/docs/Makefile b/docs/Makefile index 358eaccbb..9e648ff21 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -104,9 +104,9 @@ qthelp: @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/PAVICS.qhcp" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/birdhouse.qhcp" @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/PAVICS.qhc" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/birdhouse.qhc" .PHONY: applehelp applehelp: @@ -123,8 +123,8 @@ devhelp: @echo @echo "Build finished." @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/PAVICS" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/PAVICS" + @echo "# mkdir -p $$HOME/.local/share/devhelp/birdhouse" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/birdhouse" @echo "# devhelp" .PHONY: epub diff --git a/docs/source/conf.py b/docs/source/conf.py index a2d993b6b..24ad705cf 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- # -# PAVICS documentation build configuration file, created by +# Birdhouse-deploy documentation build configuration file, created by # sphinx-quickstart on Mon Oct 3 13:56:31 2016. # # This file is execfile()d with the current directory set to its @@ -60,7 +60,7 @@ master_doc = 'index' # General information about the project. -project = 'PAVICS' +project = 'birdhouse-deploy' copyright = '2016, Ouranos & CRIM' author = 'Ouranos & CRIM' @@ -69,9 +69,9 @@ # built documents. # # The short X.Y version. -version = '2.3.3' +version = '2.4.0' # The full version, including alpha/beta/rc tags. -release = '2.3.3' +release = '2.4.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -145,7 +145,7 @@ # The name for this set of Sphinx documents. # " v documentation" by default. # -# html_title = 'PAVICS v0.1' +# html_title = 'birdhouse-deploy v0.1' # A shorter title for the navigation bar. Default is the same as html_title. # @@ -175,7 +175,7 @@ html_extra_path = [ 'birdhouse/README.rst', 'birdhouse/env.local.example', - 'birdhouse/pavics-compose.sh', + 'birdhouse/birdhouse-compose.sh', 'birdhouse/docker-compose.yml', ] @@ -251,7 +251,7 @@ # html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. -htmlhelp_basename = 'PAVICSdoc' +htmlhelp_basename = 'Birdhousedoc' # -- Options for LaTeX output --------------------------------------------- @@ -277,7 +277,7 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'PAVICS.tex', 'PAVICS Documentation', + (master_doc, 'birdhouse.tex', 'Birdhouse-deploy Documentation', 'Ouranos \\& CRIM', 'manual'), ] @@ -319,7 +319,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - (master_doc, 'pavics', 'PAVICS Documentation', + (master_doc, 'birdhouse', 'Birdhouse-deploy Documentation', [author], 1) ] @@ -334,8 +334,8 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'PAVICS', 'PAVICS Documentation', - author, 'PAVICS', 'One line description of project.', + (master_doc, 'Birdhouse', 'Birdhouse-deploy Documentation', + author, 'Birdhouse-deploy', 'One line description of project.', 'Miscellaneous'), ] diff --git a/docs/source/data_catalog.rst b/docs/source/data_catalog.rst index c92c065d9..971fa0eb0 100644 --- a/docs/source/data_catalog.rst +++ b/docs/source/data_catalog.rst @@ -1,10 +1,10 @@ Data Catalog ============ -Pavics bundles a number of cataloguing services that allow you to search +Birdhouse bundles a number of cataloguing services that allow you to search for data and add data to the catalog. -PAVICS Catalogue Search +Birdhouse Catalogue Search ----------------------- +--------------+---------------------------------------------------------------------+ | |deprecated| | this is no longer maintained and may be removed in a future version | diff --git a/docs/source/images/multi_organizations_management.jpg b/docs/source/images/multi_organizations_management.jpg index 4d3adffd2..b9105616c 100644 Binary files a/docs/source/images/multi_organizations_management.jpg and b/docs/source/images/multi_organizations_management.jpg differ diff --git a/docs/source/index.rst b/docs/source/index.rst index 7dd58c9c2..5706c95b9 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -1,4 +1,4 @@ -.. PAVICS documentation master file, created by +.. Birdhouse documentation master file, created by sphinx-quickstart on Mon Oct 3 13:56:31 2016. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. @@ -6,12 +6,12 @@ Introduction ============ -PAVICS is a research platform dedicated to climate analysis and visualization. It bundles +Birdhouse is a research platform dedicated to climate analysis and visualization. It bundles data search, analytics and visualization services. .. include:: ../../README.rst -PAVICS is led by Ouranos to provide climate scientists with a set of tools to acquire and analyze climate data. +Birdhouse is led by Ouranos to provide climate scientists with a set of tools to acquire and analyze climate data. Contents: diff --git a/tests/test_deployment.py b/tests/test_deployment.py index c6d4f4e68..9566ec7fa 100644 --- a/tests/test_deployment.py +++ b/tests/test_deployment.py @@ -10,7 +10,7 @@ COMPONENT_LOCATIONS = ("components", "optional-components", "config") TEMPLATE_SUBSTITUTIONS = { - "PAVICS_FQDN_PUBLIC": os.environ.get("PAVICS_FQDN_PUBLIC", "example.com"), + "BIRDHOUSE_FQDN_PUBLIC": os.environ.get("BIRDHOUSE_FQDN_PUBLIC", "example.com"), "WEAVER_MANAGER_NAME": os.environ.get("WEAVER_MANAGER_NAME", "weaver"), "TWITCHER_PROTECTED_PATH": os.environ.get("TWITCHER_PROTECTED_PATH", "/twitcher/ows/proxy"), } diff --git a/tests/test_read_configs_include.py b/tests/test_read_configs_include.py index 71fb9728f..2abeda355 100644 --- a/tests/test_read_configs_include.py +++ b/tests/test_read_configs_include.py @@ -1,11 +1,19 @@ import io import os +import re import tempfile import pytest import subprocess -from typing import Union, Optional +from typing import Optional, Union ENV_SPLIT_STR: str = "#env for testing#" +ENV_SPLIT_STR_ALT: str = "#env for testing alt#" + +# Set backwards compatible allowed to False explicitly since the current default +# is True when not executing through the CLI. +# tput may add a bunch of messages to stderr if this is not set. This may cause confusion when trying to debug a +# pytest error since these messages are unrelated to failing tests. +DEFAULT_BIRDHOUSE_ENV = {"BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED": "False", "TERM": os.getenv("TERM", "")} @pytest.fixture(scope="module") @@ -32,9 +40,11 @@ def read_config_include_file(root_dir) -> str: def set_local_env(env_file: io.FileIO, content: Union[str, dict]) -> None: env_file.truncate() if isinstance(content, dict): + content = {**DEFAULT_BIRDHOUSE_ENV, **content} env_file.write("\n".join(f"export {k}={v}" for k, v in content.items())) else: - env_file.write(content) + default_content = "\n".join([f"{k}={v}" for k, v in DEFAULT_BIRDHOUSE_ENV.items()]) + env_file.write(f"{default_content}\n{content}") def split_and_strip(s: str, split_on="\n") -> list[str]: @@ -61,16 +71,12 @@ def run_func(self, include_file: str, if command is None: command = self.command - # tmut may add a bunch of messages to stderr if this is not set. This may cause confusion when trying to debug a - # pytest error since these messages are unrelated to failing tests. - env = {"TERM": os.getenv("TERM", "")} - command_sequence = [f". {include_file}", command] if command_suffix: command_sequence.extend([f"echo '{ENV_SPLIT_STR}'", f"{command_suffix}"]) if exit_on_error: - command_sequence.insert(1, "set -e") - command_sequence.insert(3, "set +e") + command_sequence.insert(1, "set -ex") + command_sequence.insert(3, "set +ex") command = " ; ".join(command_sequence) @@ -79,7 +85,7 @@ def run_func(self, include_file: str, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, - env={**env, **(local_env or {})}, + env={**DEFAULT_BIRDHOUSE_ENV, **(local_env or {})}, universal_newlines=True, ) if proc.returncode: @@ -89,9 +95,22 @@ def run_func(self, include_file: str, return proc +class _ReadConfigsFromEnvFile(_ReadConfigs): + def run_func(self, include_file: str, + local_env: dict, + command_suffix: str = "", + command: Optional[str] = None, + exit_on_error: bool = True) -> subprocess.CompletedProcess: + try: + with tempfile.NamedTemporaryFile(delete=False, mode="w") as f: + set_local_env(f, local_env) + return super().run_func(include_file, + {"BIRDHOUSE_LOCAL_ENV": f.name}, command_suffix, command, exit_on_error) + finally: + os.unlink(f.name) -class TestReadConfigs(_ReadConfigs): +class TestReadConfigs(_ReadConfigsFromEnvFile): command: str = "read_configs" default_all_conf_order: list[str] = [ @@ -124,18 +143,6 @@ class TestReadConfigs(_ReadConfigs): "./components/jupyterhub" ] - def run_func(self, include_file: str, - local_env: dict, - command_suffix: str = "", - command: Optional[str] = None, - exit_on_error: bool = True) -> subprocess.CompletedProcess: - try: - with tempfile.NamedTemporaryFile(delete=False, mode="w") as f: - set_local_env(f, local_env) - return super().run_func(include_file, {"BIRDHOUSE_LOCAL_ENV": f.name}, command_suffix, command, exit_on_error) - finally: - os.unlink(f.name) - def test_return_code(self, read_config_include_file, exit_on_error) -> None: """Test that the return code is 0""" proc = self.run_func(read_config_include_file, {}, exit_on_error=exit_on_error) @@ -157,7 +164,7 @@ def test_all_conf_dirs_default_order(self, read_config_include_file, exit_on_err def test_all_conf_dirs_extra_last(self, read_config_include_file, exit_on_error) -> None: """Test that any extra components are loaded last""" - extra = {"EXTRA_CONF_DIRS": '"./components/finch\n./components/weaver"'} + extra = {"BIRDHOUSE_EXTRA_CONF_DIRS": '"./components/finch\n./components/weaver"'} proc = self.run_func(read_config_include_file, extra, 'echo "$ALL_CONF_DIRS"', exit_on_error=exit_on_error) assert split_and_strip(get_command_stdout(proc))[-2:] == [ "./components/finch", @@ -167,7 +174,7 @@ def test_all_conf_dirs_extra_last(self, read_config_include_file, exit_on_error) @pytest.mark.usefixtures("run_in_compose_dir") def test_dependencies_loaded_first(self, read_config_include_file, exit_on_error) -> None: """Test that dependencies are loaded first""" - extra = {"EXTRA_CONF_DIRS": '"./optional-components/test-weaver"'} + extra = {"BIRDHOUSE_EXTRA_CONF_DIRS": '"./optional-components/test-weaver"'} proc = self.run_func(read_config_include_file, extra, 'echo "$ALL_CONF_DIRS"', exit_on_error=exit_on_error) print(proc.stdout) # useful for debugging when assert fail assert split_and_strip(get_command_stdout(proc))[-2:] == [ @@ -177,37 +184,37 @@ def test_dependencies_loaded_first(self, read_config_include_file, exit_on_error def test_non_project_components_included(self, read_config_include_file, exit_on_error) -> None: """Test that extra components can be included""" - extra = {"EXTRA_CONF_DIRS": '"./blah/other-random-component"'} + extra = {"BIRDHOUSE_EXTRA_CONF_DIRS": '"./blah/other-random-component"'} proc = self.run_func(read_config_include_file, extra, 'echo "$ALL_CONF_DIRS"', exit_on_error=exit_on_error) assert split_and_strip(get_command_stdout(proc))[-1] == "./blah/other-random-component" @pytest.mark.usefixtures("run_in_compose_dir") def test_delayed_eval_default_value(self, read_config_include_file, exit_on_error) -> None: """Test delayed eval when value not set in env.local""" - extra = {"PAVICS_FQDN": '"fqdn.example.com"', - "EXTRA_CONF_DIRS": '"./components/jupyterhub ./components/geoserver"'} + extra = {"BIRDHOUSE_FQDN": '"fqdn.example.com"', + "BIRDHOUSE_EXTRA_CONF_DIRS": '"./components/jupyterhub ./components/geoserver"'} proc = self.run_func(read_config_include_file, extra, - 'echo "$PAVICS_FQDN_PUBLIC - $JUPYTERHUB_USER_DATA_DIR - $GEOSERVER_DATA_DIR"', + 'echo "$BIRDHOUSE_FQDN_PUBLIC - $JUPYTERHUB_USER_DATA_DIR - $GEOSERVER_DATA_DIR"', exit_on_error=exit_on_error) print(proc.stdout) # useful for debugging when assert fail - # By default, PAVICS_FQDN_PUBLIC has same value as PAVICS_FQDN. + # By default, BIRDHOUSE_FQDN_PUBLIC has same value as BIRDHOUSE_FQDN. assert (split_and_strip(get_command_stdout(proc))[-1] == "fqdn.example.com - /data/jupyterhub_user_data - /data/geoserver") @pytest.mark.usefixtures("run_in_compose_dir") def test_delayed_eval_custom_value(self, read_config_include_file, exit_on_error) -> None: """Test delayed eval when value is set in env.local""" - extra = {"PAVICS_FQDN": '"fqdn.example.com"', - "PAVICS_FQDN_PUBLIC": '"public.example.com"', - "EXTRA_CONF_DIRS": '"./components/jupyterhub ./components/geoserver"', - "DATA_PERSIST_ROOT": '"/my-data-root"', # indirectly change JUPYTERHUB_USER_DATA_DIR + extra = {"BIRDHOUSE_FQDN": '"fqdn.example.com"', + "BIRDHOUSE_FQDN_PUBLIC": '"public.example.com"', + "BIRDHOUSE_EXTRA_CONF_DIRS": '"./components/jupyterhub ./components/geoserver"', + "BIRDHOUSE_DATA_PERSIST_ROOT": '"/my-data-root"', # indirectly change JUPYTERHUB_USER_DATA_DIR "GEOSERVER_DATA_DIR": '"/my-geoserver-data"', } proc = self.run_func(read_config_include_file, extra, - 'echo "$PAVICS_FQDN_PUBLIC - $JUPYTERHUB_USER_DATA_DIR - $GEOSERVER_DATA_DIR"', + 'echo "$BIRDHOUSE_FQDN_PUBLIC - $JUPYTERHUB_USER_DATA_DIR - $GEOSERVER_DATA_DIR"', exit_on_error=exit_on_error) print(proc.stdout) # useful for debugging when assert fail - # If PAVICS_FQDN_PUBLIC is set in env.local, that value should be effective. + # If BIRDHOUSE_FQDN_PUBLIC is set in env.local, that value should be effective. assert (split_and_strip(get_command_stdout(proc))[-1] == "public.example.com - /my-data-root/jupyterhub_user_data - /my-geoserver-data") @@ -219,6 +226,199 @@ def test_delayed_eval_quoting(self, read_config_include_file, exit_on_error) -> assert split_and_strip(get_command_stdout(proc))[-1] == "{'123'}" +class TestBackwardsCompatible(_ReadConfigsFromEnvFile): + command = "read_configs" + + # copy of BIRDHOUSE_BACKWARDS_COMPATIBLE_VARIABLES from birdhouse/default.env + all_overrides = """ + PAVICS_FQDN=BIRDHOUSE_FQDN + PAVICS_FQDN_PUBLIC=BIRDHOUSE_FQDN_PUBLIC + POSTGRES_PAVICS_USERNAME=BIRDHOUSE_POSTGRES_USERNAME + POSTGRES_PAVICS_PASSWORD=BIRDHOUSE_POSTGRES_PASSWORD + OWNER_PAVICS_CHECKOUT=BIRDHOUSE_REPO_CHECKOUT_OWNER + PAVICS_LOG_DIR=BIRDHOUSE_LOG_DIR + PAVICS_FRONTEND_IP=BIRDHOUSE_FRONTEND_IP + PAVICS_FRONTEND_PORT=BIRDHOUSE_FRONTEND_PORT + PAVICS_FRONTEND_PROTO=BIRDHOUSE_FRONTEND_PROTO + PAVICS_HOST_URL=BIRDHOUSE_HOST_URL + DATA_PERSIST_ROOT=BIRDHOUSE_DATA_PERSIST_ROOT + DATA_PERSIST_SHARED_ROOT=BIRDHOUSE_DATA_PERSIST_SHARED_ROOT + SSL_CERTIFICATE=BIRDHOUSE_SSL_CERTIFICATE + DOC_URL=BIRDHOUSE_DOC_URL + SUPPORT_EMAIL=BIRDHOUSE_SUPPORT_EMAIL + EXTRA_CONF_DIRS=BIRDHOUSE_EXTRA_CONF_DIRS + DEFAULT_CONF_DIRS=BIRDHOUSE_DEFAULT_CONF_DIRS + AUTODEPLOY_EXTRA_REPOS=BIRDHOUSE_AUTODEPLOY_EXTRA_REPOS + AUTODEPLOY_DEPLOY_KEY_ROOT_DIR=BIRDHOUSE_AUTODEPLOY_DEPLOY_KEY_ROOT_DIR + AUTODEPLOY_PLATFORM_FREQUENCY=BIRDHOUSE_AUTODEPLOY_PLATFORM_FREQUENCY + AUTODEPLOY_NOTEBOOK_FREQUENCY=BIRDHOUSE_AUTODEPLOY_NOTEBOOK_FREQUENCY + AUTODEPLOY_EXTRA_SCHEDULER_JOBS=BIRDHOUSE_AUTODEPLOY_EXTRA_SCHEDULER_JOBS + LOGROTATE_DATA_DIR=BIRDHOUSE_LOGROTATE_DATA_DIR + ALLOW_UNSECURE_HTTP=BIRDHOUSE_ALLOW_UNSECURE_HTTP + DOCKER_NOTEBOOK_IMAGES=JUPYTERHUB_DOCKER_NOTEBOOK_IMAGES + ENABLE_JUPYTERHUB_MULTI_NOTEBOOKS=JUPYTERHUB_ENABLE_MULTI_NOTEBOOKS + MOUNT_IMAGE_SPECIFIC_NOTEBOOKS=JUPYTERHUB_MOUNT_IMAGE_SPECIFIC_NOTEBOOKS + EXTRA_PYWPS_CONFIG=BIRDHOUSE_EXTRA_PYWPS_CONFIG + GITHUB_CLIENT_ID=MAGPIE_GITHUB_CLIENT_ID + GITHUB_CLIENT_SECRET=MAGPIE_GITHUB_CLIENT_SECRET + VERIFY_SSL=BIRDHOUSE_VERIFY_SSL + SMTP_SERVER=ALERTMANAGER_SMTP_SERVER + COMPOSE_UP_EXTRA_OPTS=BIRDHOUSE_COMPOSE_UP_EXTRA_OPTS + WPS_OUTPUTS_DIR=BIRDHOUSE_WPS_OUTPUTS_DIR + SERVER_DOC_URL=BIRDHOUSE_DOC_URL + SERVER_SUPPORT_EMAIL=BIRDHOUSE_SUPPORT_EMAIL + SERVER_SSL_CERTIFICATE=BIRDHOUSE_SSL_CERTIFICATE + SERVER_DATA_PERSIST_SHARED_ROOT=BIRDHOUSE_DATA_PERSIST_SHARED_ROOT + SERVER_WPS_OUTPUTS_DIR=BIRDHOUSE_WPS_OUTPUTS_DIR + SERVER_NAME=BIRDHOUSE_NAME + SERVER_DESCRIPTION=BIRDHOUSE_DESCRIPTION + SERVER_INSTITUTION=BIRDHOUSE_INSTITUTION + SERVER_SUBJECT=BIRDHOUSE_SUBJECT + SERVER_TAGS=BIRDHOUSE_TAGS + SERVER_DOCUMENTATION_URL=BIRDHOUSE_DOCUMENTATION_URL + SERVER_RELEASE_NOTES_URL=BIRDHOUSE_RELEASE_NOTES_URL + SERVER_SUPPORT_URL=BIRDHOUSE_SUPPORT_URL + SERVER_LICENSE_URL=BIRDHOUSE_LICENSE_URL + """ + + old_vars = {line.strip().split("=")[0]: "old" for line in all_overrides.splitlines() if line.strip()} + new_vars = {line.strip().split("=")[1]: "new" for line in all_overrides.splitlines() if line.strip()} + + def test_allowed_simple_substitution(self, read_config_include_file, exit_on_error) -> None: + """ + Test that a deprecated variable can be used to set the new version if backwards compatible + variables are allowed. + """ + extra = {"PAVICS_FQDN": "fqdn.example.com", "BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED": "True"} + proc = self.run_func(read_config_include_file, extra, 'echo "${BIRDHOUSE_FQDN}"', exit_on_error=exit_on_error) + assert split_and_strip(get_command_stdout(proc))[-1] == "fqdn.example.com" + + def test_not_allowed_simple_substitution(self, read_config_include_file, exit_on_error): + """ + Test that a deprecated variable cannot be used to set the new version if backwards compatible + variables are not allowed. + """ + extra = {"PAVICS_FQDN": "fqdn.example.com", "BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED": "False"} + proc = self.run_func(read_config_include_file, extra, 'echo "${BIRDHOUSE_FQDN}"', exit_on_error=exit_on_error) + assert not split_and_strip(get_command_stdout(proc)) + + def test_allowed_simple_override(self, read_config_include_file, exit_on_error) -> None: + """ + Test that a deprecated variable can be used to override the new version if backwards compatible + variables are allowed. + """ + extra = {"PAVICS_FQDN": "pavics.example.com", + "BIRDHOUSE_FQDN": "birdhouse.example.com", "BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED": "True"} + proc = self.run_func(read_config_include_file, extra, 'echo "${BIRDHOUSE_FQDN}"', exit_on_error=exit_on_error) + assert split_and_strip(get_command_stdout(proc))[-1] == "pavics.example.com" + + def test_not_allowed_simple_override(self, read_config_include_file, exit_on_error): + """ + Test that a deprecated variable cannot be used to override the new version if backwards compatible + variables are not allowed. + """ + extra = {"PAVICS_FQDN": "pavics.example.com", + "BIRDHOUSE_FQDN": "birdhouse.example.com", "BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED": "False"} + proc = self.run_func(read_config_include_file, extra, 'echo "${BIRDHOUSE_FQDN}"', exit_on_error=exit_on_error) + assert split_and_strip(get_command_stdout(proc))[-1] == "birdhouse.example.com" + + def test_allowed_substitution_all(self, read_config_include_file, exit_on_error): + """ + Test that all deprecated variables can be used to set the new versions if backwards compatible + variables are allowed. + """ + command_suffix = f'echo "{ENV_SPLIT_STR_ALT.join(f"{k}=${k}" for k in self.new_vars)}"' + proc = self.run_func(read_config_include_file, + {"BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED": "True", **self.old_vars}, + command_suffix, + exit_on_error=exit_on_error) + expected = set() + for k in self.new_vars: + if k == "BIRDHOUSE_EXTRA_CONF_DIRS": + expected.add(f"{k}=old ./optional-components/backwards-compatible-overrides") + else: + expected.add(f"{k}=old") + assert {re.sub(r'[\s\n]+', ' ', val.strip()) for val in + get_command_stdout(proc).split(ENV_SPLIT_STR_ALT)} == expected + + def test_not_allowed_substitution_all(self, read_config_include_file, exit_on_error): + """ + Test that all deprecated variables are not used to set the new versions if backwards compatible + variables are not allowed. + """ + command_suffix = f'echo "{ENV_SPLIT_STR_ALT.join(f"{k}=${k}" for k in self.new_vars)}"' + proc = self.run_func(read_config_include_file, + {"BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED": "False", **self.old_vars}, + command_suffix, + exit_on_error=exit_on_error) + expected = set() + for k in self.new_vars: + expected.add(f"{k}=new") + actual = [re.sub(r'[\s\n]+', ' ', val.strip()) for val in get_command_stdout(proc).split(ENV_SPLIT_STR_ALT)] + assert all(val != "new" for val in actual) + + def test_allowed_override_all(self, read_config_include_file, exit_on_error): + """ + Test that all deprecated variables can be used to override the new versions if backwards compatible + variables are allowed. + """ + command_suffix = f'echo "{ENV_SPLIT_STR_ALT.join(f"{k}=${k}" for k in self.new_vars)}"' + proc = self.run_func(read_config_include_file, + {"BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED": "True", **self.old_vars, **self.new_vars}, + command_suffix, + exit_on_error=exit_on_error) + expected = set() + for k in self.new_vars: + if k == "BIRDHOUSE_EXTRA_CONF_DIRS": + expected.add(f"{k}=old ./optional-components/backwards-compatible-overrides") + else: + expected.add(f"{k}=old") + assert {re.sub(r'[\s\n]+', ' ', val.strip()) for val in + get_command_stdout(proc).split(ENV_SPLIT_STR_ALT)} == expected + + def test_not_allowed_override_all(self, read_config_include_file, exit_on_error): + """ + Test that all deprecated variables are not used to override the new versions if backwards compatible + variables are not allowed. + """ + command_suffix = f'echo "{ENV_SPLIT_STR_ALT.join(f"{k}=${k}" for k in self.new_vars)}"' + proc = self.run_func(read_config_include_file, + {"BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED": "False", **self.old_vars, **self.new_vars}, + command_suffix, + exit_on_error=exit_on_error) + assert {re.sub(r'[\s\n]+', ' ', val.strip()) for val in + get_command_stdout(proc).split(ENV_SPLIT_STR_ALT)} == {f"{k}=new" for k in self.new_vars} + + def test_allowed_set_old_variables_when_unset(self, read_config_include_file, exit_on_error): + """ + Test that new variables can be used to set deprecated variables when the deprecated variable is unset if + backwards compatible variables are allowed. + """ + extra = {"BIRDHOUSE_FQDN": "birdhouse.example.com", "BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED": "True"} + proc = self.run_func(read_config_include_file, extra, 'echo "${PAVICS_FQDN}"', exit_on_error=exit_on_error) + assert split_and_strip(get_command_stdout(proc))[-1] == "birdhouse.example.com" + + def test_not_allowed_set_old_variables_when_unset(self, read_config_include_file, exit_on_error): + """ + Test that new variables cannot be used to set deprecated variables when the deprecated variable is unset if + backwards compatible variables are not allowed. + """ + extra = {"BIRDHOUSE_FQDN": "birdhouse.example.com", "BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED": "False"} + proc = self.run_func(read_config_include_file, extra, 'echo "${PAVICS_FQDN}"', exit_on_error=exit_on_error) + assert not split_and_strip(get_command_stdout(proc)) + + def test_allowed_no_override_old_variables_when_set(self, read_config_include_file, exit_on_error): + """ + Test that new variables cannot be used to override deprecated variables when the deprecated variable is set if + backwards compatible variables are allowed. + """ + extra = {"PAVICS_FQDN": "pavics.example.com", "BIRDHOUSE_FQDN": "birdhouse.example.com", + "BIRDHOUSE_BACKWARD_COMPATIBLE_ALLOWED": "True"} + proc = self.run_func(read_config_include_file, extra, 'echo "${PAVICS_FQDN}"', exit_on_error=exit_on_error) + print(proc.stdout) + assert split_and_strip(get_command_stdout(proc))[-1] == "pavics.example.com" + + class TestCreateComposeConfList(_ReadConfigs): command: str = " create_compose_conf_list" @@ -289,7 +489,6 @@ def test_compose_no_overrides(self, read_config_include_file, exit_on_error): 'echo "$COMPOSE_CONF_LIST"', exit_on_error=exit_on_error ) - print(proc.stdout) # useful for debugging when assert fail assert split_and_strip(get_command_stdout(proc), split_on="-f") == [ "docker-compose.yml", "./components/finch/docker-compose-extra.yml", @@ -320,7 +519,6 @@ def test_compose_overrides(self, read_config_include_file, exit_on_error): 'echo "$COMPOSE_CONF_LIST"', exit_on_error=exit_on_error ) - print(proc.stdout) # useful for debugging when assert fail assert split_and_strip(get_command_stdout(proc), split_on="-f") == [ "docker-compose.yml", "./components/finch/docker-compose-extra.yml", diff --git a/vagrant_variables.yml.example b/vagrant_variables.yml.example index 35f423c54..e3e990608 100644 --- a/vagrant_variables.yml.example +++ b/vagrant_variables.yml.example @@ -2,14 +2,14 @@ # Ensure this combinabion of "hostname.domain" name really exist on the DNS of # the network you are bridging onto. # -# "hostname.domain" will be used as PAVICS_FQDN in env.local file. +# "hostname.domain" will be used as BIRDHOUSE_FQDN in env.local file. # # If this combinabion of "hostname.domain" name do not exist on the DNS, use # the override mechanism https://docs.docker.com/compose/extends/ to add the # extra DNS entry and also set it manually in your # /etc/hosts file. -hostname: mypavics -domain: ouranos.ca +hostname: mybirdhouse +domain: example.com # Set network bridge, else you'll simply be prompted on vagrant up. # network_bridge: enp0s25 @@ -35,11 +35,11 @@ domain: ouranos.ca # If you want to provide a SSL certificate yourself, name it # '/home/vagrant/certkey.pem' and it won't be overriden, see -# vagrant-utils/configure-pavics.sh. Otherwise one +# vagrant-utils/configure-birdhouse.sh. Otherwise one # will be generated automatically for you. # If you want to provide an env.local yourself, just create it and it won't be -# overriden, see vagrant-utils/configure-pavics.sh for what values vagrant +# overriden, see vagrant-utils/configure-birdhouse.sh for what values vagrant # expects and try to match it. Otherwise one will be generated automatically # for you from the default values in env.local.example. @@ -55,7 +55,7 @@ domain: ouranos.ca # If this is not possible, use Pagekite below to have a real SSL certificate # and to expose your VM on the internet. # -# letsencrypt_email is also used as SUPPORT_EMAIL in env.local. +# letsencrypt_email is also used as BIRDHOUSE_SUPPORT_EMAIL in env.local. # # If letsencrypt_email is set, will get SSL cert from LetsEncrypt. #