diff --git a/.github/workflows/code_analysis.yml b/.github/workflows/code_analysis.yml index 18028c23..e36a2f2f 100644 --- a/.github/workflows/code_analysis.yml +++ b/.github/workflows/code_analysis.yml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - pythonversion: [3.7] + pythonversion: [3.9] steps: - uses: actions/checkout@v2 @@ -38,7 +38,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - pythonversion: [3.7] + pythonversion: [3.9] steps: - uses: actions/checkout@v2 @@ -66,7 +66,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - pythonversion: [3.7] + pythonversion: [3.9] steps: - uses: actions/checkout@v2 @@ -88,4 +88,3 @@ jobs: - name: prospector run: | prospector dashboard_viewer - prospector docker/superset diff --git a/.github/workflows/publish_docker_images.yml b/.github/workflows/publish_docker_images.yml new file mode 100644 index 00000000..8f8b81e6 --- /dev/null +++ b/.github/workflows/publish_docker_images.yml @@ -0,0 +1,62 @@ +name: Build & Publish Docker Images + +on: + release: + types: [published] + +jobs: + dashboards: + runs-on: ubuntu-latest + steps: + - name: Check out the repo + uses: actions/checkout@v2 + + - name: Extract metadata + id: meta + uses: docker/metadata-action@v3 + with: + images: aspedrosa/networkdashboards + tags: | + type=pep440,pattern={{version}} + + - name: Log in to the Container registry + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Build and push Docker image + uses: docker/build-push-action@v2 + with: + context: dashboard_viewer + push: true + tags: ${{ steps.meta.outputs.tags }} + + superset: + runs-on: ubuntu-latest + steps: + - name: Check out the repo + uses: actions/checkout@v2 + with: + submodules: true + + - name: Extract metadata + id: meta + uses: docker/metadata-action@v3 + with: + images: aspedrosa/networkdashboards_superset + tags: | + type=pep440,pattern={{version}} + + - name: Log in to the Container registry + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Build and push Docker image + uses: docker/build-push-action@v2 + with: + context: superset + push: true + tags: ${{ steps.meta.outputs.tags }} diff --git a/.github/workflows/test_docker_images.yml b/.github/workflows/test_docker_images.yml new file mode 100644 index 00000000..c291db40 --- /dev/null +++ b/.github/workflows/test_docker_images.yml @@ -0,0 +1,50 @@ +name: Test Build Docker Images + +on: + pull_request: + branches: [ dev ] + +jobs: + # JOB to run change detection + changes: + runs-on: ubuntu-latest + # Set job outputs to values from filter step + outputs: + dashboards: ${{ steps.filter.outputs.dashboards }} + superset: ${{ steps.filter.outputs.superset }} + steps: + # For pull requests it's not necessary to checkout the code + - uses: dorny/paths-filter@v2 + id: filter + with: + filters: | + dashboards: + - 'dashboard_viewer/**' + superset: + - 'superset/**' + + dashboards: + needs: changes + if: ${{ needs.changes.outputs.dashboards == 'true' }} + runs-on: ubuntu-latest + steps: + - name: Check out the repo + uses: actions/checkout@v2 + + - name: Test build + run: | + docker build dashboard_viewer + + superset: + needs: changes + if: ${{ needs.changes.outputs.superset == 'true' }} + runs-on: ubuntu-latest + steps: + - name: Check out the repo + uses: actions/checkout@v2 + with: + submodules: true + + - name: Test build + run: | + docker build superset diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 04f3707e..decab2c3 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - pythonversion: [3.7] + pythonversion: [3.9] steps: - uses: actions/checkout@v2 @@ -27,10 +27,8 @@ jobs: ${{ runner.os }}-pip- - name: Setup Data Containers run: | - export $(grep -v '^#' tests/.env | xargs -d '\n') - cp tests/docker-compose.yml docker - cp tests/init-dbs.sh docker/postgres-entrypoint - cd docker + cd tests + export $(grep -v '^#' .env | xargs -d '\n') docker-compose up -d - name: Install Dependencies run: | @@ -40,5 +38,5 @@ jobs: run: | export $(grep -v '^#' tests/.env | xargs -d '\n') cd dashboard_viewer - python manage.py migrate - python manage.py test + python manage.py test --exclude-tag third-party-app + SINGLE_APPLICATION_MODE=n MAIN_APPLICATION_HOST=mainapp.host.com python manage.py test --tag third-party-app diff --git a/.gitignore b/.gitignore index de644992..c3bd3f72 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,6 @@ +# documentation temporary files +docs/src/_book + # dashboard_viewer django app ERRORs logs logs diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 00000000..ede3c35e --- /dev/null +++ b/.pylintrc @@ -0,0 +1,111 @@ +[MESSAGES CONTROL] + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once). You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable=unnecessary-semicolon, + superfluous-parens, + bad-mcs-classmethod-argument, + too-many-lines, + bad-whitespace, + I0014, + relative-import, + E1103, + C0322, + fixme, + too-many-instance-attributes, + no-init, + C0323, + trailing-whitespace, + property-on-old-class, + too-few-public-methods, + invalid-name, + exec-used, + assignment-from-none, + C0324, + abstract-class-little-used, + pointless-except, + redefined-outer-name, + star-args, + missing-final-newline, + bad-mcs-method-argument, + empty-docstring, + no-name-in-module, + global-variable-undefined, + too-many-return-statements, + W0701, + bad-builtin, + W5103, + deprecated-lambda, + abstract-method, + no-member, + unused-format-string-key, + no-self-use, + wildcard-import, + too-many-public-methods, + broad-except, + bad-classmethod-argument, + bad-continuation, + attribute-defined-outside-init, + W0713, + ungrouped-imports, + anomalous-backslash-in-string, + too-many-ancestors, + wrong-import-order, + multiple-statements, + missing-docstring, + no-self-argument, + global-statement, + super-init-not-called, + raise-missing-from, + protected-access, + invalid-str-returned, + invalid-repr-returned, + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable= + + +[FORMAT] + +# Maximum number of characters on a single line. +max-line-length=159 + +# Maximum number of lines in a module. +max-module-lines=1000 + + +[DESIGN] + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Maximum number of branch for function / method body. +max-branches=15 + +# Maximum number of locals for function / method body. +max-locals=15 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body. +max-returns=6 + +# Maximum number of statements in function / method body. +max-statements=60 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=1 diff --git a/backups/README.md b/backups/README.md index 770292e8..c577f93d 100644 --- a/backups/README.md +++ b/backups/README.md @@ -1,100 +1,3 @@ # Backup -1. Create a credentials file (the structure of the file depends on the target cloud server) - -2. Create a `.dashboards_backups.conf` file under your home directory (variable `$HOME`) using `dashboards_backups.conf.example` as base, setting the appropriate value for the several variables. - - For variables associated with files and directories always use *absolute* paths. - - Variables: - - - `RUN`: Set it to `0` if you don't want the next scheduled backup to run. - - This variable allows you to cancel any backup runs while you are doing some maintenance on the application. - - - `CONSTANCE_REDIS_DB`: Number of the Redis database where the django constance config is stored. The default value is 2. This value should be the same as the environment variable `REDIS_CONSTANCE_DB` of the dashboard container. - - - The following variables are associated with the arguemtns of the `backup_uploader` python package. Check its [usage](https://github.com/aspedrosa/BackupUploader#usage) for more details: - - - `APP_NAME`: The backup process will generate some directories with this name in places that are shared with other applications. - - - `SERVER`: The name of the target cloud server to where backups should be uploaded (dropbox or mega). - - - `BACKUP_CHAIN_CONFIG`: Allows having different directories with backups of different ages. - - - `CREDENTIALS_FILE_PATH`: File containing the credentials to access the server to upload the backup file. - -3. Install the `backup_uploader` python package by following its [install](https://github.com/aspedrosa/BackupUploader#install) instructions. - -4. Schedule your backups - - ```sh - * * * * * Command_to_execute - | | | | | - | | | | Day of the Week ( 0 - 6 ) ( Sunday = 0 ) - | | | | - | | | Month ( 1 - 12 ) - | | | - | | Day of Month ( 1 - 31 ) - | | - | Hour ( 0 - 23 ) - | - Min ( 0 - 59 ) - ``` - - (Retrived from: [Tutorialspoint](https://www.tutorialspoint.com/unix_commands/crontab.htm)) - - Ex: To run every day at 3:00 am - - 1. `crontab -e` - - 2. Add entry `0 3 * * * $HOME/NetworkDashboards/backups/backup.sh` (The path to the backup script might be different) - -### Restore - -1. Select the compressed backup you want to restore and decompress it: - - `tar -xJf BACKUP_FILE.tar.xz`. - -2. 1. **Redis** - - 1. Make sure the redis docker container is down. - - 2. (Re)place the file `dump.rdb` on the redis volume by the file `redis.rdb`. By default the redis volume is located where this repository was cloned on the directory `docker/volumes/redis`. - - 3. Change its permissions, owner and group: - - ```shell - chmod 0644 docker/volumes/redis/dump.rdb - sudo chown -R 999:999 docker/volumes/redis - ``` - - 2. **Postgres** - - 1. Make sure all containers that make changes on the database are stopped. - - 2. Copy the file `postgres_backup.sql` into the postgres container - - `docker cp postgres.sql [CONTAINER_ID]:/tmp`. - - 5. Execute the backup script: - - `docker exec -u root dashboard_viewer_postgres_1 psql -f /tmp/postgres_backup.sql -U \$POSTGRES_USER -d \$POSTGRES_DB`. - - 3. **Media Files** If you have a volume pointing to where the media files are stored, replace all files with the ones present on the downloaded backup file. Else: - - 1. Bring the dashoard container up `docker-compose up -d dashboard` - - 2. Enter in the container `docker exec -it [CONTAINER_ID] bash` - - 3. If you don't know where the media files are stored you can check the value of the MEDIA_ROOT variable - - 1. `python manage.py shell` - - 2. `from django.conf import settings` - - 3. `print(settings.MEDIA_ROOT)` - - 4. Remove the entire MEDIA_ROOT directory and exit the container - - 5. Copy the media directory present on the backup file to the catalogue container `docker cp -a collected-media [CONTAINER_ID]:[MEDIA_ROOT_PARENT_PATH]` +Refer to the [Backups](https://ehden.github.io/NetworkDashboards/backups.html) chapter of the documentation \ No newline at end of file diff --git a/backups/backup.sh b/backups/backup.sh index edc23d3a..8d753fe8 100755 --- a/backups/backup.sh +++ b/backups/backup.sh @@ -1,5 +1,11 @@ #!/bin/sh +STEP_COUNT=7 + +echo_step() { + printf "%3s/%s %s\n" "$1" "$STEP_COUNT" "$2" +} + set -e . $HOME/.dashboards_backups.conf @@ -9,64 +15,78 @@ if [ $RUN -eq 0 ] ; then exit 0 fi - +echo_step "1" "Create temporary directory" BACKUP_DIRECTORY_NAME=dashboards_backups_$(head /dev/urandom | tr -dc A-Za-z0-9 | head -c 40) -TMP_BACKUP_DIRECTORY=/tmp/$BACKUP_DIRECTORY_NAME +TMP_BACKUP_DIRECTORY=$TMP_DIRECTORY/$BACKUP_DIRECTORY_NAME mkdir $TMP_BACKUP_DIRECTORY +EXIT_STATUS=0 -# 2. Get into the docker directory -previous_pwd=$(pwd) -cd $(dirname "$0") -cd ../docker +( + echo_step "2" "Get into the docker directory" + PREVIOUS_PWD=$(pwd) + cd $(dirname "$0") + ( + cd ../docker -# postgres -echo "getting postgres backup" -docker-compose exec postgres sh -c "pg_dumpall -c -U \$POSTGRES_USER" > $TMP_BACKUP_DIRECTORY/postgres.sql + echo_step "3" "Extract Dashboards's database" + docker-compose exec -T postgres sh -c "pg_dumpall --clean -U \$POSTGRES_USER -l \$POSTGRES_USER" > $TMP_BACKUP_DIRECTORY/postgres.sql -# redis -LAST_SAVE=$(docker-compose exec redis redis-cli -n $CONSTANCE_REDIS_DB LASTSAVE) + echo_step "4" "Extract Redis's data" + LAST_SAVE=$(docker-compose exec -T redis redis-cli -n $CONSTANCE_REDIS_DB LASTSAVE) -echo "started redis backup" -docker-compose exec redis redis-cli -n $CONSTANCE_REDIS_DB BGSAVE + # started redis backup + docker-compose exec -T redis redis-cli -n $CONSTANCE_REDIS_DB BGSAVE -echo "waiting for redis backup" -while [ "$LAST_SAVE" = "$(docker-compose exec redis redis-cli -n $CONSTANCE_REDIS_DB LASTSAVE)" ] ; do - sleep 5 -done + # waiting for redis backup + while [ "$LAST_SAVE" = "$(docker-compose exec -T redis redis-cli -n $CONSTANCE_REDIS_DB LASTSAVE)" ] ; do + sleep 5 + done -REDIS_CONTAINER_ID=$(docker-compose ps -q redis) + REDIS_CONTAINER_ID=$(docker-compose ps -q redis) -docker cp -a $REDIS_CONTAINER_ID:/data/dump.rdb $TMP_BACKUP_DIRECTORY/redis.rdb + docker cp -a $REDIS_CONTAINER_ID:/data/dump.rdb $TMP_BACKUP_DIRECTORY/redis.rdb -# media files -MEDIA_ROOT=$(docker-compose exec dashboard sh -c """ + echo_step "5" "Extract Dashboards's media files" + MEDIA_ROOT=$(docker-compose exec -T dashboard sh -c """ echo '''from django.conf import settings print(settings.MEDIA_ROOT, end=\"\") -''' | python manage.py shell""") - -# fix MEDIA_ROOT if a relative path is returned -case $MEDIA_ROOT in - "/"*) ;; - *) - MEDIA_ROOT="/app/$MEDIA_ROOT" - ;; -esac - -echo "copy media files to backup folder" -DASHBOARDS_CONTAINER_ID=$(docker-compose ps -q dashboard) -docker cp -a $DASHBOARDS_CONTAINER_ID:$MEDIA_ROOT $TMP_BACKUP_DIRECTORY - -# compression -COMPRESSED_FILE_PATH=/tmp/$(date +"%Y%m%d%H%M%S").tar.xz -echo "compressing files" -tar -C /tmp -cJf $COMPRESSED_FILE_PATH $BACKUP_DIRECTORY_NAME - -# send to server -echo "sending to backup server" -backup_uploader $APP_NAME $SERVER $CREDENTIALS_FILE_PATH $BACKUP_CHAIN_CONFIG $COMPRESSED_FILE_PATH - -# remove temporaty files -echo "removing backup temporary files" -rm -r $TMP_BACKUP_DIRECTORY -rm $COMPRESSED_FILE_PATH +''' | python manage.py shell 2> /dev/null""") + + # fix MEDIA_ROOT if a relative path is returned + case $MEDIA_ROOT in + "/"*) ;; + *) + MEDIA_ROOT="/app/$MEDIA_ROOT" + ;; + esac + + # copy media files to backup folder + DASHBOARDS_CONTAINER_ID=$(docker-compose ps -q dashboard) + docker cp -a $DASHBOARDS_CONTAINER_ID:$MEDIA_ROOT $TMP_BACKUP_DIRECTORY + + echo_step "6" "Compress gathered data" + COMPRESSED_FILE_PATH=$TMP_DIRECTORY/${APP_NAME}_$(date +"%Y%m%d%H%M%S").tar.xz + ( + cd $TMP_DIRECTORY + zip -q -r $COMPRESSED_FILE_PATH $BACKUP_DIRECTORY_NAME + #tar -C $TMP_DIRECTORY -cJf $COMPRESSED_FILE_PATH $BACKUP_DIRECTORY_NAME + + echo_step "7" "Send to $SERVER" + #backup_uploader $APP_NAME $SERVER $CREDENTIALS_FILE_PATH $BACKUP_CHAIN_CONFIG $COMPRESSED_FILE_PATH + ) || EXIT_STATUS=$? + #rm -f $COMPRESSED_FILE_PATH + + exit $EXIT_STATUS + ) || EXIT_STATUS=$? + cd $PREVIOUS_PWD + + exit $EXIT_STATUS +) || EXIT_STATUS=$? + +rm -rf $TMP_BACKUP_DIRECTORY + +if [ $EXIT_STATUS -ne 0 ] ; then + echo "Failed with exit code $EXIT_STATUS" + exit $EXIT_STATUS +fi diff --git a/backups/dashboards_backups.conf.example b/backups/dashboards_backups.conf.example index 14a006d8..3f3b9d7f 100644 --- a/backups/dashboards_backups.conf.example +++ b/backups/dashboards_backups.conf.example @@ -1,6 +1,7 @@ RUN=1 +TMP_DIRECTORY=/tmp CONSTANCE_REDIS_DB=2 APP_NAME=dashboards_backups SERVER=dropbox -BACKUP_CHAIN_CONFIG="week:%H%M%S%f:4:2;month:%9M%S%f:4:2;:%H%M%S%f" +BACKUP_CHAIN_CONFIG="week:%y-%m-%d_%H-%M:7:7;month:%y-%m-%d_week%V:4:4;year:%y-%m-%d_month%m" CREDENTIALS_FILE_PATH=$HOME/NetworkDashboards/backups/.credentials diff --git a/dashboard_viewer/Dockerfile b/dashboard_viewer/Dockerfile index ab460c33..5c345770 100644 --- a/dashboard_viewer/Dockerfile +++ b/dashboard_viewer/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.8.6 +FROM python:3.9.8 # Install Debian packages RUN curl -sL https://deb.nodesource.com/setup_14.x | bash - && \ diff --git a/dashboard_viewer/README.md b/dashboard_viewer/README.md deleted file mode 100644 index 8267c7a4..00000000 --- a/dashboard_viewer/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# Dashboard Viewer - -Django app that displays dashboards in a vertical tab layout - - diff --git a/dashboard_viewer/dashboard_viewer/runners.py b/dashboard_viewer/dashboard_viewer/runners.py index 1c575db0..ac5356da 100644 --- a/dashboard_viewer/dashboard_viewer/runners.py +++ b/dashboard_viewer/dashboard_viewer/runners.py @@ -16,4 +16,4 @@ def _set_eager(): class CeleryTestSuiteRunner(DiscoverRunner): def setup_test_environment(self, **kwargs): _set_eager() - super(CeleryTestSuiteRunner, self).setup_test_environment(**kwargs) + super().setup_test_environment(**kwargs) diff --git a/dashboard_viewer/dashboard_viewer/settings.py b/dashboard_viewer/dashboard_viewer/settings.py index a187eaf4..d1a60a76 100644 --- a/dashboard_viewer/dashboard_viewer/settings.py +++ b/dashboard_viewer/dashboard_viewer/settings.py @@ -11,8 +11,10 @@ """ import os +from distutils.util import strtobool from constance.signals import config_updated +from django.core.validators import _lazy_re_compile, URLValidator from django.dispatch import receiver # Build paths inside the project like this: os.path.join(BASE_DIR, ...) @@ -107,6 +109,7 @@ "django.template.context_processors.request", "django.contrib.auth.context_processors.auth", "django.contrib.messages.context_processors.messages", + "django.template.context_processors.media", ], }, }, @@ -313,4 +316,28 @@ def constance_updated(key, old_value, **_): "hljs": "true", # to enable/disable hljs highlighting in preview } + TEST_RUNNER = "dashboard_viewer.runners.CeleryTestSuiteRunner" + + +# Variables that allow restricting the access to the uploader app if this Django +# app is being used as a third-party tool and is being iframed. +SINGLE_APPLICATION_MODE = strtobool(os.environ.get("SINGLE_APPLICATION_MODE", "y")) == 1 +MAIN_APPLICATION_HOST = os.environ.get("MAIN_APPLICATION_HOST") + +if not SINGLE_APPLICATION_MODE: + if MAIN_APPLICATION_HOST is None: + raise ValueError( + "If the application is not running on single application mode then the " + "MAIN_APPLICATION_HOST variable must be defined." + ) + if not _lazy_re_compile(URLValidator.host_re).fullmatch(MAIN_APPLICATION_HOST): + raise ValueError( + "The variable MAIN_APPLICATION_HOST contains an invalid hostname. " + "Only include the hostname part of the URL." + ) + + X_FRAME_OPTIONS = f"ALLOW-FROM https://{MAIN_APPLICATION_HOST}/" + +# required since django 3.2 +DEFAULT_AUTO_FIELD = "django.db.models.AutoField" diff --git a/dashboard_viewer/dashboard_viewer/urls.py b/dashboard_viewer/dashboard_viewer/urls.py index bd1ecc5d..38b022f1 100644 --- a/dashboard_viewer/dashboard_viewer/urls.py +++ b/dashboard_viewer/dashboard_viewer/urls.py @@ -13,12 +13,20 @@ 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ +import re + from django.conf import settings -from django.conf.urls import static from django.contrib import admin -from django.urls import include, path +from django.urls import include, path, re_path +from django.views.static import serve -from .views import bad_request, forbidden, not_found, server_error +from .views import ( + bad_request, + forbidden, + not_found, + production_media_files, + server_error, +) handler400 = bad_request handler403 = forbidden @@ -30,4 +38,11 @@ path("admin/", admin.site.urls), path("martor/", include("martor.urls")), path("uploader/", include("uploader.urls")), -] + static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) + re_path( + fr'^{re.escape(settings.MEDIA_URL.lstrip("/"))}(?P.*)$', + serve, + kwargs={"document_root": settings.MEDIA_ROOT}, + ) + if settings.DEBUG + else re_path(r"^media/(?P.*)$", production_media_files), +] diff --git a/dashboard_viewer/dashboard_viewer/views.py b/dashboard_viewer/dashboard_viewer/views.py index ee7d2ed7..a943f6b2 100644 --- a/dashboard_viewer/dashboard_viewer/views.py +++ b/dashboard_viewer/dashboard_viewer/views.py @@ -1,5 +1,6 @@ import constance from django.http import ( + HttpResponse, HttpResponseBadRequest, HttpResponseForbidden, HttpResponseNotFound, @@ -8,7 +9,7 @@ from django.template import loader -def server_error(request): +def server_error(_request): template = loader.get_template("500.html") context = { "constance_config": constance.config, @@ -38,3 +39,21 @@ def bad_request(request, exception): # noqa "constance_config": constance.config, } return HttpResponseBadRequest(template.render(context)) + + +def production_media_files(request, path): + response = HttpResponse() + del response["Content-Type"] + + if not path.startswith("achilles_results_files"): + response["X-Accel-Redirect"] = f"/normal_media/{path}" + else: + if not request.user.is_staff: + return HttpResponseForbidden() + + if len(path[23:].split("/")) != 3: # only allow serving files, not directories + return HttpResponseForbidden() + + response["X-Accel-Redirect"] = f"/{path}" + + return response diff --git a/dashboard_viewer/docker-entrypoint.sh b/dashboard_viewer/docker-entrypoint.sh index 6a7ca7bc..b20c7430 100755 --- a/dashboard_viewer/docker-entrypoint.sh +++ b/dashboard_viewer/docker-entrypoint.sh @@ -3,12 +3,12 @@ set -ex python manage.py migrate -python manage.py migrate --database=achilles uploader -python manage.py migrate --database=achilles materialized_queries_manager +python manage.py migrate --database=achilles + +python manage.py compilescss +python manage.py collectstatic --noinput --ignore="*.scss" if [ "${DASHBOARD_VIEWER_ENV}" = "production" ]; then - python manage.py compilescss - python manage.py collectstatic --noinput --ignore="*.scss" exec gunicorn dashboard_viewer.wsgi:application --bind 0.0.0.0:8000 --workers 5 else python manage.py runserver 0.0.0.0:8000 diff --git a/dashboard_viewer/docker-init.sh b/dashboard_viewer/docker-init.sh index c1a55455..f5e48939 100755 --- a/dashboard_viewer/docker-init.sh +++ b/dashboard_viewer/docker-init.sh @@ -8,7 +8,7 @@ wait-for-it "$POSTGRES_ACHILLES_HOST:$POSTGRES_ACHILLES_PORT" # Apply django migrations echo "Applying migrations" python manage.py migrate -python manage.py migrate --database=achilles uploader +python manage.py migrate --database=achilles # Load countries data echo "Loading initial data" diff --git a/dashboard_viewer/materialized_queries_manager/actions.py b/dashboard_viewer/materialized_queries_manager/actions.py index bd77024a..6ca73100 100644 --- a/dashboard_viewer/materialized_queries_manager/actions.py +++ b/dashboard_viewer/materialized_queries_manager/actions.py @@ -1,12 +1,12 @@ from django.contrib import messages -from django.core import serializers +from django.http import HttpResponseRedirect from django.utils.translation import gettext as _, gettext_lazy from .tasks import refresh_materialized_views_task def refresh_materialized_views_action(model_admin, request, queryset): - refresh_materialized_views_task.delay(serializers.serialize("json", queryset)) + refresh_materialized_views_task.delay([obj.matviewname for obj in queryset]) model_admin.message_user( request, @@ -14,6 +14,8 @@ def refresh_materialized_views_action(model_admin, request, queryset): messages.SUCCESS, ) + return HttpResponseRedirect("/admin/") + refresh_materialized_views_action.short_description = gettext_lazy( "Refresh selected %(verbose_name_plural)s" diff --git a/dashboard_viewer/materialized_queries_manager/admin.py b/dashboard_viewer/materialized_queries_manager/admin.py index a0c7500f..08d03cc3 100644 --- a/dashboard_viewer/materialized_queries_manager/admin.py +++ b/dashboard_viewer/materialized_queries_manager/admin.py @@ -43,7 +43,7 @@ def _changeform_view(self, request, object_id, form_url, extra_context): # noqa to_field = request.POST.get(TO_FIELD_VAR, request.GET.get(TO_FIELD_VAR)) if to_field and not self.to_field_allowed(request, to_field): raise DisallowedModelAdminToField( - "The field %s cannot be referenced." % to_field + f"The field {to_field} cannot be referenced." ) opts = self.model._meta @@ -202,7 +202,7 @@ def response_change(self, request, obj): msg = format_html(_(self._get_first_phrase()), **msg_dict) self.message_user(request, msg, messages.SUCCESS) redirect_url = reverse( - "admin:%s_%s_add" % (opts.app_label, opts.model_name), + f"admin:{opts.app_label}_{opts.model_name}_add", current_app=self.admin_site.name, ) redirect_url = add_preserved_filters( @@ -228,8 +228,7 @@ def _get_first_phrase(self): background_task_id = getattr(self, "background_task").id try: task_url = reverse( - "admin:%s_%s_change" - % (TaskResult._meta.app_label, TaskResult._meta.model_name), + f"admin:{TaskResult._meta.app_label}_{TaskResult._meta.model_name}_change", args=( quote(TaskResult.objects.get(task_id=background_task_id).pk), ), diff --git a/dashboard_viewer/materialized_queries_manager/tasks.py b/dashboard_viewer/materialized_queries_manager/tasks.py index b87432a7..a8f5e937 100644 --- a/dashboard_viewer/materialized_queries_manager/tasks.py +++ b/dashboard_viewer/materialized_queries_manager/tasks.py @@ -70,6 +70,7 @@ def create_materialized_view( # noqa f"CREATE MATERIALIZED VIEW {new_obj.matviewname} AS {new_obj.definition}" ) except ProgrammingError as e: + # no need to rename back the materialized view since the transaction will rollback self.update_state( state=states.FAILURE, meta={ @@ -131,6 +132,5 @@ def create_materialized_view( # noqa @shared_task -def refresh_materialized_views_task(query_set): - query_set = serializers.deserialize("json", query_set) - refresh(logger, query_set=[mat_query.object for mat_query in query_set]) +def refresh_materialized_views_task(names): + refresh(logger, query_set=MaterializedQuery.objects.filter(matviewname__in=names)) diff --git a/dashboard_viewer/materialized_queries_manager/tests.py b/dashboard_viewer/materialized_queries_manager/tests.py index 26e8e0ac..162c68b4 100644 --- a/dashboard_viewer/materialized_queries_manager/tests.py +++ b/dashboard_viewer/materialized_queries_manager/tests.py @@ -109,7 +109,7 @@ def test_change_view(self, create_task): "json", [MaterializedQuery(matviewname="outlier", definition="SELECT 2")], ), - [{"changed": {"fields": ["definition"]}}], + [{"changed": {"fields": ["Definition"]}}], ) diff --git a/dashboard_viewer/materialized_queries_manager/utils.py b/dashboard_viewer/materialized_queries_manager/utils.py index 9d9fb0ee..c8509d12 100644 --- a/dashboard_viewer/materialized_queries_manager/utils.py +++ b/dashboard_viewer/materialized_queries_manager/utils.py @@ -16,13 +16,16 @@ def refresh(logger, db_id=None, query_set=None): ) with connections["achilles"].cursor() as cursor: - for materialized_query in ( - MaterializedQuery.objects.all() if not query_set else query_set - ): + to_refresh = MaterializedQuery.objects.all() if not query_set else query_set + total = len(to_refresh) + + for i, materialized_query in enumerate(to_refresh): try: logger.info( - "Refreshing materialized view %s [%s]", + "Refreshing materialized view %s (%d/%d) [%s]", materialized_query.matviewname, + i + 1, + total, "command" if not db_id else f"datasource {db_id}", ) cursor.execute( diff --git a/dashboard_viewer/package-lock.json b/dashboard_viewer/package-lock.json index 176bb5e0..59b868d9 100644 --- a/dashboard_viewer/package-lock.json +++ b/dashboard_viewer/package-lock.json @@ -5,6 +5,7 @@ "requires": true, "packages": { "": { + "name": "dashboard_viewer", "version": "0.0.1", "license": "ISC", "dependencies": { diff --git a/dashboard_viewer/requirements.in b/dashboard_viewer/requirements.in new file mode 100644 index 00000000..597fb0cb --- /dev/null +++ b/dashboard_viewer/requirements.in @@ -0,0 +1,22 @@ +celery==5.2.1 # asynchronous task on uploader app +django-bootstrap-datepicker-plus==3.0.5 # realease date on create data source form on uploader app +django-bootstrap4==3.0.1 # forms beautifier +django-celery-results==2.2.0 # reports results from workers +django-compressor==2.4.1 # combines and minifies linked and inline Javascript or CSS in a Django template into cacheable static files +django-constance==2.8.0 # to implement settings that can change at runtime +django-markdownify==0.9.0 # markdown to html +django-model-utils==4.2.0 # get specific type of subclasses after requesting buttons on tabsManager app +django-sass-processor==1.1 # automate scss devolopment +django-redis==5.1.0 # acess redis through a programmatic API +django==3.2.10 +djangorestframework==3.13.1 # expose tabs content through an API +libsass==0.21.0 # to compile scss files into css +gunicorn==20.1.0 # for production deployment +martor==1.6.7 # markdown editor in admin app +pandas==1.3.5 # to handle achilles results files and their data +Pillow==8.4.0 # image fields (App Logo) +psycopg2-binary==2.9.2 # communicate with postgres +redis==3.5.3 # comunicate with redis (celery) +git+https://github.com/bioinformatics-ua/redis-rw-lock.git#egg=redis-rw-lock + # ensure that only one thread updates records associated with a given datasource +SQLAlchemy==1.4.28 # used by pandas to load achilles results data into the database diff --git a/dashboard_viewer/requirements.txt b/dashboard_viewer/requirements.txt index 52910ea2..3bda8cc2 100644 --- a/dashboard_viewer/requirements.txt +++ b/dashboard_viewer/requirements.txt @@ -1,55 +1,156 @@ -celery==5.0.5 # asynchronous task on uploader app -django-bootstrap-datepicker-plus==3.0.5 # realease date on create data source form on uploader app -django-bootstrap4==2.3.1 # forms beautifier -django-celery-results==2.0.1 # reports results from workers -django-compressor==2.4 # combines and minifies linked and inline Javascript or CSS in a Django template into cacheable static files -django-constance==2.8.0 # to implement settings that can change at runtime -django-markdownify==0.8.2 # markdown to html -django-model-utils==4.1.1 # get specific type of subclasses after requesting buttons on tabsManager app -django-redis==4.12.1 # acess redis through a programmatic API -django-sass-processor==0.8.2 # automate scss devolopment -django==2.2.17 -djangorestframework==3.12.2 # expose tabs content through an API -libsass==0.20.1 # to compile scss files into css -gunicorn==20.0.4 # for production deployment -martor==1.5.8 # markdown editor in admin app -pandas==1.2.1 # to handle achilles results files and their data -Pillow==8.1.0 # image fields (App Logo) -psycopg2-binary==2.8.6 # communicate with postgres -redis==3.5.3 # comunicate with redis (celery) -git+https://github.com/bioinformatics-ua/redis-rw-lock.git#egg=redis-rw-lock - # ensure that only one thread updates records associated with a given datasource -SQLAlchemy==1.3.23 # used by pandas to load achilles results data into the database - -#### depencies of the packages above -amqp==5.0.5 -beautifulsoup4==4.9.3 -billiard==3.6.3.0 -bleach==3.3.0 -certifi==2020.12.5 -chardet==4.0.0 -click==7.1.2 -click-didyoumean==0.0.3 +# +# This file is autogenerated by pip-compile with python 3.9 +# To update, run: +# +# pip-compile requirements.in +# +amqp==5.0.9 + # via kombu +asgiref==3.4.1 + # via django +beautifulsoup4==4.10.0 + # via django-bootstrap4 +billiard==3.6.4.0 + # via celery +bleach==4.1.0 + # via django-markdownify +celery==5.2.1 + # via + # -r requirements.in + # django-celery-results +certifi==2021.10.8 + # via requests +charset-normalizer==2.0.9 + # via requests +click==8.0.3 + # via + # celery + # click-didyoumean + # click-plugins + # click-repl +click-didyoumean==0.3.0 + # via celery click-plugins==1.1.1 -click-repl==0.1.6 -django-appconf==1.0.4 -idna==2.10 -kombu==5.0.2 -Markdown==3.3.3 -numpy==1.20.0 -packaging==20.9 -prompt-toolkit==3.0.14 -pyparsing==2.4.7 -python-dateutil==2.8.1 + # via celery +click-repl==0.2.0 + # via celery +django==3.2.10 + # via + # -r requirements.in + # django-appconf + # django-bootstrap-datepicker-plus + # django-bootstrap4 + # django-markdownify + # django-model-utils + # django-redis + # djangorestframework + # martor +django-appconf==1.0.5 + # via django-compressor +django-bootstrap-datepicker-plus==3.0.5 + # via -r requirements.in +django-bootstrap4==3.0.1 + # via -r requirements.in +django-celery-results==2.2.0 + # via -r requirements.in +django-compressor==2.4.1 + # via -r requirements.in +django-constance==2.8.0 + # via -r requirements.in +django-markdownify==0.9.0 + # via -r requirements.in +django-model-utils==4.2.0 + # via -r requirements.in +django-redis==5.1.0 + # via -r requirements.in +django-sass-processor==1.1 + # via -r requirements.in +djangorestframework==3.13.1 + # via -r requirements.in +greenlet==1.1.2 + # via sqlalchemy +gunicorn==20.1.0 + # via -r requirements.in +idna==3.3 + # via requests +importlib-metadata==4.10.0 + # via + # markdown + # martor +kombu==5.2.2 + # via celery +libsass==0.21.0 + # via -r requirements.in +markdown==3.3.6 + # via + # django-markdownify + # martor +martor==1.6.7 + # via -r requirements.in +numpy==1.21.5 + # via pandas +packaging==21.3 + # via bleach +pandas==1.3.5 + # via -r requirements.in +pillow==8.4.0 + # via -r requirements.in +prompt-toolkit==3.0.24 + # via click-repl +psycopg2-binary==2.9.2 + # via -r requirements.in +pyparsing==3.0.6 + # via packaging +python-dateutil==2.8.2 + # via pandas python-redis-lock==3.7.0 -pytz==2021.1 + # via redis-rw-lock +pytz==2021.3 + # via + # celery + # django + # djangorestframework + # pandas rcssmin==1.0.6 -requests==2.25.1 + # via django-compressor +redis==3.5.3 + # via + # -r requirements.in + # django-redis + # python-redis-lock + # redis-rw-lock +redis-rw-lock @ git+https://github.com/bioinformatics-ua/redis-rw-lock.git + # via -r requirements.in +requests==2.26.0 + # via martor rjsmin==1.1.0 -six==1.15.0 -soupsieve==2.1 -sqlparse==0.4.1 -urllib3==1.26.3 + # via django-compressor +six==1.16.0 + # via + # bleach + # click-repl + # django-compressor + # libsass + # python-dateutil +soupsieve==2.3.1 + # via beautifulsoup4 +sqlalchemy==1.4.28 + # via -r requirements.in +sqlparse==0.4.2 + # via django +urllib3==1.26.7 + # via requests vine==5.0.0 + # via + # amqp + # celery + # kombu wcwidth==0.2.5 -webencodings==0.5.1 \ No newline at end of file + # via prompt-toolkit +webencodings==0.5.1 + # via bleach +zipp==3.6.0 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/dashboard_viewer/screenshot.png b/dashboard_viewer/screenshot.png deleted file mode 100644 index bc2495b0..00000000 Binary files a/dashboard_viewer/screenshot.png and /dev/null differ diff --git a/dashboard_viewer/tabsManager/views.py b/dashboard_viewer/tabsManager/views.py index d139699f..d2c97b27 100644 --- a/dashboard_viewer/tabsManager/views.py +++ b/dashboard_viewer/tabsManager/views.py @@ -91,7 +91,7 @@ def get_menu(): class APITabsView(rest_views.APIView): - def get(self, request): + def get(self, _request): return Response(get_menu()) diff --git a/dashboard_viewer/uploader/admin.py b/dashboard_viewer/uploader/admin.py index eb7ea8f6..9a71fdc4 100644 --- a/dashboard_viewer/uploader/admin.py +++ b/dashboard_viewer/uploader/admin.py @@ -81,9 +81,8 @@ def response_delete(self, request, obj_display, obj_id): request, self.popup_response_template or [ - "admin/%s/%s/popup_response.html" - % (opts.app_label, opts.model_name), - "admin/%s/popup_response.html" % opts.app_label, + f"admin/{opts.app_label}/{opts.model_name}/popup_response.html", + f"admin/{opts.app_label}/popup_response.html", "admin/popup_response.html", ], { @@ -107,7 +106,7 @@ def response_delete(self, request, obj_display, obj_id): if self.has_change_permission(request, None): post_url = reverse( - "admin:%s_%s_changelist" % (opts.app_label, opts.model_name), + f"admin:{opts.app_label}_{opts.model_name}_changelist", current_app=self.admin_site.name, ) preserved_filters = self.get_preserved_filters(request) diff --git a/dashboard_viewer/uploader/decorators.py b/dashboard_viewer/uploader/decorators.py new file mode 100644 index 00000000..bbdf95f8 --- /dev/null +++ b/dashboard_viewer/uploader/decorators.py @@ -0,0 +1,25 @@ +from functools import wraps + +from django.conf import settings +from django.http import HttpResponseForbidden +from django.views.decorators.csrf import csrf_exempt + + +def uploader_decorator(view_func): + """ + If in single application mode + check if the request is being sent from the main application. + If not response with 403 + Else don't do any verification + """ + wrapped_view = csrf_exempt(view_func) + if not settings.SINGLE_APPLICATION_MODE: + + def check_host(request, *args, **kwargs): + if request.get_host() != settings.MAIN_APPLICATION_HOST: + return HttpResponseForbidden() + return view_func(request, *args, **kwargs) + + wrapped_view = wraps(wrapped_view)(check_host) + + return wrapped_view diff --git a/dashboard_viewer/uploader/file_handler/updates.py b/dashboard_viewer/uploader/file_handler/updates.py index ddc9126f..df0ed447 100644 --- a/dashboard_viewer/uploader/file_handler/updates.py +++ b/dashboard_viewer/uploader/file_handler/updates.py @@ -98,42 +98,42 @@ def move_achilles_results_records( cursor.execute( f""" INSERT INTO {destination_model._meta.db_table} ( - {destination_model.analysis_id.field_name}, - {destination_model.stratum_1.field_name}, - {destination_model.stratum_2.field_name}, - {destination_model.stratum_3.field_name}, - {destination_model.stratum_4.field_name}, - {destination_model.stratum_5.field_name}, - {destination_model.count_value.field_name}, - {destination_model.min_value.field_name}, - {destination_model.max_value.field_name}, - {destination_model.avg_value.field_name}, - {destination_model.stdev_value.field_name}, - {destination_model.median_value.field_name}, - {destination_model.p10_value.field_name}, - {destination_model.p25_value.field_name}, - {destination_model.p75_value.field_name}, - {destination_model.p90_value.field_name}, + {destination_model.analysis_id.field.name}, + {destination_model.stratum_1.field.name}, + {destination_model.stratum_2.field.name}, + {destination_model.stratum_3.field.name}, + {destination_model.stratum_4.field.name}, + {destination_model.stratum_5.field.name}, + {destination_model.count_value.field.name}, + {destination_model.min_value.field.name}, + {destination_model.max_value.field.name}, + {destination_model.avg_value.field.name}, + {destination_model.stdev_value.field.name}, + {destination_model.median_value.field.name}, + {destination_model.p10_value.field.name}, + {destination_model.p25_value.field.name}, + {destination_model.p75_value.field.name}, + {destination_model.p90_value.field.name}, {destination_model.data_source.field.column}, {destination_model.upload_info.field.column} ) SELECT - {origin_model.analysis_id.field_name}, - {origin_model.stratum_1.field_name}, - {origin_model.stratum_2.field_name}, - {origin_model.stratum_3.field_name}, - {origin_model.stratum_4.field_name}, - {origin_model.stratum_5.field_name}, - {origin_model.count_value.field_name}, - {origin_model.min_value.field_name}, - {origin_model.max_value.field_name}, - {origin_model.avg_value.field_name}, - {origin_model.stdev_value.field_name}, - {origin_model.median_value.field_name}, - {origin_model.p10_value.field_name}, - {origin_model.p25_value.field_name}, - {origin_model.p75_value.field_name}, - {origin_model.p90_value.field_name}, + {origin_model.analysis_id.field.name}, + {origin_model.stratum_1.field.name}, + {origin_model.stratum_2.field.name}, + {origin_model.stratum_3.field.name}, + {origin_model.stratum_4.field.name}, + {origin_model.stratum_5.field.name}, + {origin_model.count_value.field.name}, + {origin_model.min_value.field.name}, + {origin_model.max_value.field.name}, + {origin_model.avg_value.field.name}, + {origin_model.stdev_value.field.name}, + {origin_model.median_value.field.name}, + {origin_model.p10_value.field.name}, + {origin_model.p25_value.field.name}, + {origin_model.p75_value.field.name}, + {origin_model.p90_value.field.name}, {origin_model.data_source.field.column}, %s FROM {origin_model._meta.db_table} diff --git a/dashboard_viewer/uploader/migrations/0013_auto_20210721_1715.py b/dashboard_viewer/uploader/migrations/0013_auto_20210721_1715.py new file mode 100644 index 00000000..33892b2a --- /dev/null +++ b/dashboard_viewer/uploader/migrations/0013_auto_20210721_1715.py @@ -0,0 +1,83 @@ +# Generated by Django 2.2.17 on 2021-07-21 17:15 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("uploader", "0012_auto_20210615_1828"), + ] + + operations = [ + migrations.AlterField( + model_name="achillesresults", + name="max_value", + field=models.FloatField(null=True), + ), + migrations.AlterField( + model_name="achillesresults", + name="median_value", + field=models.FloatField(null=True), + ), + migrations.AlterField( + model_name="achillesresults", + name="min_value", + field=models.FloatField(null=True), + ), + migrations.AlterField( + model_name="achillesresults", + name="p10_value", + field=models.FloatField(null=True), + ), + migrations.AlterField( + model_name="achillesresults", + name="p25_value", + field=models.FloatField(null=True), + ), + migrations.AlterField( + model_name="achillesresults", + name="p75_value", + field=models.FloatField(null=True), + ), + migrations.AlterField( + model_name="achillesresults", + name="p90_value", + field=models.FloatField(null=True), + ), + migrations.AlterField( + model_name="achillesresultsarchive", + name="max_value", + field=models.FloatField(null=True), + ), + migrations.AlterField( + model_name="achillesresultsarchive", + name="median_value", + field=models.FloatField(null=True), + ), + migrations.AlterField( + model_name="achillesresultsarchive", + name="min_value", + field=models.FloatField(null=True), + ), + migrations.AlterField( + model_name="achillesresultsarchive", + name="p10_value", + field=models.FloatField(null=True), + ), + migrations.AlterField( + model_name="achillesresultsarchive", + name="p25_value", + field=models.FloatField(null=True), + ), + migrations.AlterField( + model_name="achillesresultsarchive", + name="p75_value", + field=models.FloatField(null=True), + ), + migrations.AlterField( + model_name="achillesresultsarchive", + name="p90_value", + field=models.FloatField(null=True), + ), + ] diff --git a/dashboard_viewer/uploader/models.py b/dashboard_viewer/uploader/models.py index 51337874..fc247d35 100644 --- a/dashboard_viewer/uploader/models.py +++ b/dashboard_viewer/uploader/models.py @@ -229,15 +229,15 @@ class Meta: stratum_4 = models.TextField(null=True) stratum_5 = models.TextField(null=True) count_value = models.BigIntegerField() - min_value = models.BigIntegerField(null=True) - max_value = models.BigIntegerField(null=True) + min_value = models.FloatField(null=True) + max_value = models.FloatField(null=True) avg_value = models.FloatField(null=True) stdev_value = models.FloatField(null=True) - median_value = models.BigIntegerField(null=True) - p10_value = models.BigIntegerField(null=True) - p25_value = models.BigIntegerField(null=True) - p75_value = models.BigIntegerField(null=True) - p90_value = models.BigIntegerField(null=True) + median_value = models.FloatField(null=True) + p10_value = models.FloatField(null=True) + p25_value = models.FloatField(null=True) + p75_value = models.FloatField(null=True) + p90_value = models.FloatField(null=True) class AchillesResultsArchive(models.Model): @@ -257,12 +257,12 @@ class Meta: stratum_4 = models.TextField(null=True) stratum_5 = models.TextField(null=True) count_value = models.BigIntegerField() - min_value = models.BigIntegerField(null=True) - max_value = models.BigIntegerField(null=True) + min_value = models.FloatField(null=True) + max_value = models.FloatField(null=True) avg_value = models.FloatField(null=True) stdev_value = models.FloatField(null=True) - median_value = models.BigIntegerField(null=True) - p10_value = models.BigIntegerField(null=True) - p25_value = models.BigIntegerField(null=True) - p75_value = models.BigIntegerField(null=True) - p90_value = models.BigIntegerField(null=True) + median_value = models.FloatField(null=True) + p10_value = models.FloatField(null=True) + p25_value = models.FloatField(null=True) + p75_value = models.FloatField(null=True) + p90_value = models.FloatField(null=True) diff --git a/dashboard_viewer/uploader/tests.py b/dashboard_viewer/uploader/tests.py index 00b0110f..d4bf83ba 100644 --- a/dashboard_viewer/uploader/tests.py +++ b/dashboard_viewer/uploader/tests.py @@ -4,7 +4,7 @@ import numpy from django.core.cache import cache from django.core.files.uploadedfile import SimpleUploadedFile -from django.test import TestCase, TransactionTestCase +from django.test import override_settings, tag, TestCase, TransactionTestCase from .file_handler.checks import ( DuplicatedMetadataRow, @@ -26,6 +26,58 @@ from .tasks import upload_results_file +@tag("third-party-app") +@override_settings(ALLOWED_HOSTS=["thisapp.host.com", "mainapp.host.com"]) +class UploaderRestrictedAccess(TestCase): + """ + Assumes the following environment variables values: + SINGLE_APPLICATION_MODE=n + MAIN_APPLICATION_HOST=mainapp.host.com + """ + + databases = "__all__" + + def test_block_if_wrong_host(self): + response = self.client.get("/uploader/test/", HTTP_HOST="thisapp.host.com") + + self.assertEqual(403, response.status_code) + + def test_not_block_if_correct_host(self): + response = self.client.get("/uploader/test/", HTTP_HOST="mainapp.host.com") + + self.assertEqual(200, response.status_code) + self.assertTrue(response.has_header("X-Frame-Options")) + self.assertEqual( + "ALLOW-FROM HTTPS://MAINAPP.HOST.COM/", response["X-Frame-Options"] + ) + + def test_not_block_other_urls(self): + response = self.client.get("/admin/login/", HTTP_HOST="thisapp.host.com") + + self.assertEqual(200, response.status_code) + self.assertTrue(response.has_header("X-Frame-Options")) + self.assertEqual( + "ALLOW-FROM HTTPS://MAINAPP.HOST.COM/", response["X-Frame-Options"] + ) + + +class UploaderNonRestrictedAccess(TestCase): + """ + Assumes the following environment variables values: + SINGLE_APPLICATION_MODE=y + """ + + databases = "__all__" + + @override_settings(ALLOWED_HOSTS=["some.domain.com"]) + def test_not_block_if_single_application(self): + response = self.client.get("/uploader/test/", HTTP_HOST="some.domain.com") + + self.assertEqual(200, response.status_code) + if response.has_header("X-Frame-Options"): + self.assertNotIn("ALLOW-FROM ", response.get("X-Frame-Options")) + + class DataSourceCreator: def __init__(self): self._counter = 0 diff --git a/dashboard_viewer/uploader/views.py b/dashboard_viewer/uploader/views.py index dddaac85..1dde5f4d 100644 --- a/dashboard_viewer/uploader/views.py +++ b/dashboard_viewer/uploader/views.py @@ -6,10 +6,10 @@ from django.http import JsonResponse from django.shortcuts import get_object_or_404, redirect, render from django.utils.html import format_html, mark_safe -from django.views.decorators.csrf import csrf_exempt from rest_framework.response import Response from rest_framework.viewsets import GenericViewSet +from .decorators import uploader_decorator from .forms import AchillesResultsForm, EditSourceForm, SourceForm from .models import Country, DataSource, PendingUpload, UploadHistory from .serializers import DataSourceSerializer @@ -18,7 +18,7 @@ PAGE_TITLE = "Dashboard Data Upload" -@csrf_exempt +@uploader_decorator def upload_achilles_results(request, *args, **kwargs): data_source = kwargs.get("data_source") try: @@ -72,7 +72,7 @@ def upload_achilles_results(request, *args, **kwargs): ) -def get_upload_task_status(request, data_source, upload_id): +def get_upload_task_status(_request, data_source, upload_id): data_source = get_object_or_404(DataSource, hash=data_source) try: @@ -128,7 +128,7 @@ def _get_fields_initial_values(request, initial): def _leave_valid_fields_values_only(request, initial, aux_form): for field_name, field in SourceForm.base_fields.items(): if isinstance(field, fields.MultiValueField): - decompressed = list() + decompressed = [] for i in range(len(field.widget.widgets)): generated_field_name = f"{field_name}_{i}" @@ -137,7 +137,7 @@ def _leave_valid_fields_values_only(request, initial, aux_form): del initial[generated_field_name] decompressed.append(value) else: - decompressed = list() + decompressed = [] break if decompressed: @@ -152,11 +152,11 @@ def _leave_valid_fields_values_only(request, initial, aux_form): del initial[field_name] -@csrf_exempt +@uploader_decorator def create_data_source(request, *_, **kwargs): data_source = kwargs.get("data_source") if request.method == "GET": - initial = dict() + initial = {} if data_source is not None: initial["hash"] = data_source @@ -172,7 +172,7 @@ def create_data_source(request, *_, **kwargs): obj.data_source = data_source obj.save() - return redirect("/uploader/{}".format(obj.hash)) + return redirect(f"/uploader/{obj.hash}") # since the form isn't valid, lets maintain only the valid fields _leave_valid_fields_values_only(request, initial, aux_form) @@ -216,7 +216,7 @@ def create_data_source(request, *_, **kwargs): obj.name, ), ) - return redirect("/uploader/{}".format(obj.hash)) + return redirect(f"/uploader/{obj.hash}") return render( request, @@ -233,7 +233,7 @@ def create_data_source(request, *_, **kwargs): ) -@csrf_exempt +@uploader_decorator def edit_data_source(request, *_, **kwargs): data_source = kwargs.get("data_source") try: @@ -270,7 +270,7 @@ def edit_data_source(request, *_, **kwargs): request, format_html("Data source {} edited with success.", obj.name), ) - return redirect("/uploader/{}".format(obj.hash)) + return redirect(f"/uploader/{obj.hash}") return render( request, diff --git a/dashboard_viewer/uploader/widgets.py b/dashboard_viewer/uploader/widgets.py index bc6c08e0..0a14d5c5 100644 --- a/dashboard_viewer/uploader/widgets.py +++ b/dashboard_viewer/uploader/widgets.py @@ -12,12 +12,12 @@ class ListTextWidget(forms.TextInput): """ def __init__(self, query_obj, *args, **kwargs): - super(ListTextWidget, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.query_obj = query_obj def render(self, name, value, attrs=None, renderer=None): attrs.update({"list": f"{name}_list", "autocomplete": "off"}) - text_html = super(ListTextWidget, self).render(name, value, attrs=attrs) + text_html = super().render(name, value, attrs=attrs) data_list = f'' for item in self.query_obj.all(): data_list += f'