diff --git a/.github/workflows/code_analysis.yml b/.github/workflows/code_analysis.yml
index e36a2f2f..57faee90 100644
--- a/.github/workflows/code_analysis.yml
+++ b/.github/workflows/code_analysis.yml
@@ -86,5 +86,4 @@ jobs:
pip install --upgrade pip
pip install -r requirements-dev/requirements-prospector.txt -r dashboard_viewer/requirements.txt
- name: prospector
- run: |
- prospector dashboard_viewer
+ run: DJANGO_SETTINGS_MODULE=dashboard_viewer.settings prospector dashboard_viewer
diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
index decab2c3..289d0527 100644
--- a/.github/workflows/tests.yml
+++ b/.github/workflows/tests.yml
@@ -39,4 +39,4 @@ jobs:
export $(grep -v '^#' tests/.env | xargs -d '\n')
cd dashboard_viewer
python manage.py test --exclude-tag third-party-app
- SINGLE_APPLICATION_MODE=n MAIN_APPLICATION_HOST=mainapp.host.com python manage.py test --tag third-party-app
+ #SINGLE_APPLICATION_MODE=n MAIN_APPLICATION_HOST=mainapp.host.com python manage.py test --tag third-party-app
diff --git a/.gitignore b/.gitignore
index c3bd3f72..84add08f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,15 +1,15 @@
# documentation temporary files
docs/src/_book
-# dashboard_viewer django app ERRORs logs
-logs
-
# custom file created at docker/superset
docker-init.sh
# Docker volumes
volumes/
+# backups configurations
+backups.conf
+
# Retrived from https://github.com/github/gitignore/blob/21419e391a78f2487340b3b1240b988aaf15b54f/Node.gitignore
# Logs
logs
diff --git a/backups/backup.sh b/backups/backup.sh
index 026343c4..abdb87cb 100755
--- a/backups/backup.sh
+++ b/backups/backup.sh
@@ -8,9 +8,9 @@ echo_step() {
set -e
-. $HOME/.dashboards_backups.conf
+. "$(dirname "$0")/backups.conf"
-if [ $RUN -eq 0 ] ; then
+if [ "$RUN" -eq 0 ] ; then
echo "run was 0. exitting"
exit 0
fi
@@ -19,13 +19,13 @@ echo_step "1" "Create temporary directory"
BACKUP_DIRECTORY_NAME=dashboards_backups_$(head /dev/urandom | tr -dc A-Za-z0-9 | head -c 40)
TMP_BACKUP_DIRECTORY=$TMP_DIRECTORY/$BACKUP_DIRECTORY_NAME
-mkdir $TMP_BACKUP_DIRECTORY
+mkdir "$TMP_BACKUP_DIRECTORY"
EXIT_STATUS=0
(
echo_step "2" "Get into the docker directory"
PREVIOUS_PWD=$(pwd)
- cd $(dirname "$0")
+ cd "$(dirname "$0")"
(
cd ../docker
@@ -45,7 +45,7 @@ EXIT_STATUS=0
REDIS_CONTAINER_ID=$(docker-compose ps -q redis)
- docker cp -a $REDIS_CONTAINER_ID:/data/dump.rdb $TMP_BACKUP_DIRECTORY/redis.rdb
+ docker cp -a $REDIS_CONTAINER_ID:/data/dump.rdb "$TMP_BACKUP_DIRECTORY/redis.rdb"
echo_step "5" "Extract Dashboards's media files"
MEDIA_ROOT=$(docker-compose exec -T dashboard sh -c """
@@ -63,28 +63,28 @@ print(settings.MEDIA_ROOT, end=\"\")
# copy media files to backup folder
DASHBOARDS_CONTAINER_ID=$(docker-compose ps -q dashboard)
- docker cp -a $DASHBOARDS_CONTAINER_ID:$MEDIA_ROOT $TMP_BACKUP_DIRECTORY
+ docker cp -a $DASHBOARDS_CONTAINER_ID:$MEDIA_ROOT "$TMP_BACKUP_DIRECTORY"
echo_step "6" "Compress gathered data"
COMPRESSED_FILE_PATH=$TMP_DIRECTORY/${APP_NAME}_$(date +"%Y%m%d%H%M%S").zip
(
- cd $TMP_DIRECTORY
- zip -q -r $COMPRESSED_FILE_PATH $BACKUP_DIRECTORY_NAME
- #tar -C $TMP_DIRECTORY -cJf $COMPRESSED_FILE_PATH $BACKUP_DIRECTORY_NAME
+ cd "$TMP_DIRECTORY"
+ zip -q -r "$COMPRESSED_FILE_PATH" $BACKUP_DIRECTORY_NAME
+ #tar -C "$TMP_DIRECTORY" -cJf "$COMPRESSED_FILE_PATH" $BACKUP_DIRECTORY_NAME
echo_step "7" "Send to $SERVER"
- backup_uploader $APP_NAME $SERVER $CREDENTIALS_FILE_PATH $BACKUP_CHAIN_CONFIG $COMPRESSED_FILE_PATH
+ backup_uploader "$APP_NAME" "$SERVER" "$CREDENTIALS_FILE_PATH" "$BACKUP_CHAIN_CONFIG" "$COMPRESSED_FILE_PATH"
) || EXIT_STATUS=$?
- rm -f $COMPRESSED_FILE_PATH
+ rm -f "$COMPRESSED_FILE_PATH"
exit $EXIT_STATUS
) || EXIT_STATUS=$?
- cd $PREVIOUS_PWD
+ cd "$PREVIOUS_PWD"
exit $EXIT_STATUS
) || EXIT_STATUS=$?
-rm -rf $TMP_BACKUP_DIRECTORY
+rm -rf "$TMP_BACKUP_DIRECTORY"
if [ $EXIT_STATUS -ne 0 ] ; then
echo "Failed with exit code $EXIT_STATUS"
diff --git a/backups/dashboards_backups.conf.example b/backups/backups.conf.example
similarity index 100%
rename from backups/dashboards_backups.conf.example
rename to backups/backups.conf.example
diff --git a/backups/restore.sh b/backups/restore.sh
new file mode 100755
index 00000000..28e0332d
--- /dev/null
+++ b/backups/restore.sh
@@ -0,0 +1,96 @@
+#!/bin/sh
+
+set -e
+
+if [ $# -ne 1 ] ; then
+ 1>&2 echo "Usage: restore.sh [zip backup file]"
+ exit 1
+fi
+
+STEP_COUNT=5
+
+echo_step() {
+ printf "%3s/%s %s\n" "$1" "$STEP_COUNT" "$2"
+}
+
+. "$(dirname "$0")/backups.conf"
+
+echo_step 1 "Extracting backup file"
+
+BACKUP_ROOT_DIRECTORY=$(unzip -qql "$1" | head -n1 | sed -r '1 {s/([ ]+[^ ]+){3}\s+//;q}')
+unzip "$1" -d "$TMP_DIRECTORY" >/dev/null
+BACKUP_ROOT_DIRECTORY="$TMP_DIRECTORY/$BACKUP_ROOT_DIRECTORY"
+
+EXIT_STATUS=0
+
+(
+ echo_step 2 "Get into the docker directory"
+ PREVIOUS_PWD=$(pwd)
+ cd "$(dirname "$0")"
+ (
+ cd ../docker
+
+ echo_step 3 "Restoring postgres"
+ docker-compose stop dashboard dashboard_worker superset superset-worker superset-worker-beat >/dev/null 2>&1
+ docker-compose up -d postgres >/dev/null 2>&1
+
+ until docker-compose exec -T postgres sh -c "pg_isready" >/dev/null 2>&1 ; do
+ sleep 2
+ done
+
+ CONTAINER_ID=$(docker-compose ps -q postgres)
+ docker cp "$BACKUP_ROOT_DIRECTORY/postgres.sql" $CONTAINER_ID:/tmp
+ docker-compose exec -T postgres sh -c """
+psql -f /tmp/postgres.sql -U \$POSTGRES_USER -d postgres
+rm /tmp/postgres.sql
+"""
+
+ echo_step 4 "Restoring redis"
+ docker-compose up -d redis >/dev/null 2>&1
+ CONTAINER_ID=$(docker-compose ps -q redis)
+ docker cp "$BACKUP_ROOT_DIRECTORY/redis.rdb" $CONTAINER_ID:/
+ docker-compose exec -T redis sh -c """
+mv /redis.rdb /data
+"""
+ docker-compose restart redis >/dev/null 2>&1
+
+ echo_step 5 "Restoring Dashboards' media files"
+ docker-compose up -d dashboard >/dev/null 2>&1
+ MEDIA_ROOT=$(docker-compose exec -T dashboard sh -c """
+echo '''from django.conf import settings
+print(settings.MEDIA_ROOT, end=\"\")
+''' | python manage.py shell 2>/dev/null""")
+
+ # fix MEDIA_ROOT if a relative path is returned
+ case $MEDIA_ROOT in
+ "/"*) ;;
+ *)
+ MEDIA_ROOT="/app/$MEDIA_ROOT"
+ ;;
+ esac
+
+ CONTAINER_ID=$(docker-compose ps -q dashboard)
+
+ docker-compose exec -T dashboard sh -c """
+cd $MEDIA_ROOT
+find . -mindepth 1 -maxdepth 1 -exec rm -r {} +
+"""
+ docker cp -a "$BACKUP_ROOT_DIRECTORY/$(basename "$MEDIA_ROOT")" $CONTAINER_ID:"$MEDIA_ROOT"
+ docker-compose exec -T dashboard sh -c """
+cd $MEDIA_ROOT
+find $(basename "$MEDIA_ROOT") -mindepth 1 -maxdepth 1 -exec mv -t . -- {} +
+rmdir $(basename "$MEDIA_ROOT")
+"""
+
+ docker-compose up -d
+ ) || EXIT_STATUS=$?
+ cd "$PREVIOUS_PWD"
+
+ exit $EXIT_STATUS
+) || EXIT_STATUS=$?
+rm -rf "$BACKUP_ROOT_DIRECTORY"
+
+if [ $EXIT_STATUS -ne 0 ] ;then
+ echo "Failed with exit code $EXIT_STATUS"
+ exit $EXIT_STATUS
+fi
diff --git a/dashboard_viewer/dashboard_viewer/settings.py b/dashboard_viewer/dashboard_viewer/settings.py
index d1a60a76..2ebac98e 100644
--- a/dashboard_viewer/dashboard_viewer/settings.py
+++ b/dashboard_viewer/dashboard_viewer/settings.py
@@ -9,8 +9,9 @@
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
-
+import logging
import os
+from collections import OrderedDict
from distutils.util import strtobool
from constance.signals import config_updated
@@ -24,43 +25,43 @@
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
-# SECURITY WARNING: keep the secret key used in production secret!
-SECRET_KEY = os.environ["SECRET_KEY"]
-
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = os.environ.get("DASHBOARD_VIEWER_ENV", "development") == "development"
-_LOGS_DIR = os.path.join(BASE_DIR, "logs")
-if not os.path.exists(_LOGS_DIR):
- os.makedirs(_LOGS_DIR, exist_ok=True)
-elif not os.path.isdir(_LOGS_DIR):
- raise TypeError('file "logs" is not a directory.')
-
LOGGING = {
"version": 1,
- "filters": {
- "require_debug_true": {
- "()": "django.utils.log.RequireDebugTrue",
+ "disable_existing_loggers": False,
+ "formatters": {
+ "custom": {
+ "format": "%(asctime)s %(levelname)s %(name)s:%(lineno)s %(message)s",
},
},
"handlers": {
- "file": {
- "level": "ERROR",
- "filters": ["require_debug_true"],
- "class": "logging.FileHandler",
- "filename": os.path.join(_LOGS_DIR, "errors.log"),
- }
+ "console": {
+ "class": "logging.StreamHandler",
+ "formatter": "custom",
+ },
},
"loggers": {
- "django": {
- "handlers": ["file"],
- "level": "ERROR",
- "propagate": True,
+ "root": {
+ "handlers": ["console"],
+ "level": "DEBUG" if DEBUG else "INFO",
},
},
}
+
+_DEFAULT_SECRET_KEY = "CHANGE_ME" # noqa
+
+# SECURITY WARNING: keep the secret key used in production secret!
+SECRET_KEY = os.environ.get("SECRET_KEY", _DEFAULT_SECRET_KEY)
+if not DEBUG and SECRET_KEY == _DEFAULT_SECRET_KEY:
+ logging.getLogger(__name__).warning(
+ "Using the default secret key. If this is a production environment please change it.",
+ )
+
+
ALLOWED_HOSTS = ["*"]
@@ -275,6 +276,21 @@
"If a Data Source owner can change the draft status when editing its details",
bool,
),
+ "SUPERSET_HOST": (
+ "https://superset.ehden.eu",
+ "Host of the target superset installation. Used to redirect to the dashboard of a database",
+ str,
+ ),
+ "DATABASE_DASHBOARD_IDENTIFIER": (
+ "database-level-dashboard",
+ "Identifier of the database dashboard on the Superset installation.",
+ str,
+ ),
+ "DATABASE_FILTER_ID": (
+ 69,
+ "Id of the database filter present in the Database Dashboard",
+ int,
+ ),
"TABS_LOGO_CONTAINER_CSS": (
"padding: 5px 5px 5px 5px;\nheight: 100px;\nmargin-bottom: 10px;\n",
"Css for the div container of the logo image",
@@ -295,6 +311,26 @@
),
}
+CONSTANCE_CONFIG_FIELDSETS = OrderedDict(
+ [
+ ("Application Attributes", ("APP_LOGO_IMAGE", "APP_LOGO_URL", "APP_TITLE")),
+ (
+ "Uploader Texts",
+ (
+ "UPLOADER_EXECUTE_EXPORT_PACKAGE",
+ "UPLOADER_UPLOAD",
+ "UPLOADER_AUTO_UPDATE",
+ ),
+ ),
+ ("Uploader Settings", ("UPLOADER_ALLOW_EDIT_DRAFT_STATUS",)),
+ (
+ "Superset",
+ ("SUPERSET_HOST", "DATABASE_DASHBOARD_IDENTIFIER", "DATABASE_FILTER_ID"),
+ ),
+ ("Tabs (Deprecated)", ("TABS_LOGO_CONTAINER_CSS", "TABS_LOGO_IMG_CSS")),
+ ]
+)
+
@receiver(config_updated)
def constance_updated(key, old_value, **_):
diff --git a/dashboard_viewer/dashboard_viewer/urls.py b/dashboard_viewer/dashboard_viewer/urls.py
index 38b022f1..8315b5bf 100644
--- a/dashboard_viewer/dashboard_viewer/urls.py
+++ b/dashboard_viewer/dashboard_viewer/urls.py
@@ -39,7 +39,7 @@
path("martor/", include("martor.urls")),
path("uploader/", include("uploader.urls")),
re_path(
- fr'^{re.escape(settings.MEDIA_URL.lstrip("/"))}(?P.*)$',
+ rf'^{re.escape(settings.MEDIA_URL.lstrip("/"))}(?P.*)$',
serve,
kwargs={"document_root": settings.MEDIA_ROOT},
)
diff --git a/dashboard_viewer/docker-entrypoint.sh b/dashboard_viewer/docker-entrypoint.sh
index b20c7430..8f619da0 100755
--- a/dashboard_viewer/docker-entrypoint.sh
+++ b/dashboard_viewer/docker-entrypoint.sh
@@ -9,7 +9,7 @@ python manage.py compilescss
python manage.py collectstatic --noinput --ignore="*.scss"
if [ "${DASHBOARD_VIEWER_ENV}" = "production" ]; then
- exec gunicorn dashboard_viewer.wsgi:application --bind 0.0.0.0:8000 --workers 5
+ exec gunicorn dashboard_viewer.wsgi:application --bind 0.0.0.0:8000 --workers 1 --worker-class gthread --thread 16
else
python manage.py runserver 0.0.0.0:8000
fi
diff --git a/dashboard_viewer/materialized_queries_manager/management/commands/refresh_mat_views.py b/dashboard_viewer/materialized_queries_manager/management/commands/refresh_mat_views.py
index 53ca5148..c3b497a6 100644
--- a/dashboard_viewer/materialized_queries_manager/management/commands/refresh_mat_views.py
+++ b/dashboard_viewer/materialized_queries_manager/management/commands/refresh_mat_views.py
@@ -2,6 +2,7 @@
from django.core.cache import cache
from django.core.management.base import BaseCommand
+
from materialized_queries_manager.utils import refresh
logger = logging.getLogger(__name__)
diff --git a/dashboard_viewer/materialized_queries_manager/tasks.py b/dashboard_viewer/materialized_queries_manager/tasks.py
index a8f5e937..aed85222 100644
--- a/dashboard_viewer/materialized_queries_manager/tasks.py
+++ b/dashboard_viewer/materialized_queries_manager/tasks.py
@@ -9,6 +9,7 @@
from django.core import serializers
from django.core.cache import cache
from django.db import connections, ProgrammingError, router, transaction
+
from materialized_queries_manager.models import MaterializedQuery
from materialized_queries_manager.utils import refresh
diff --git a/dashboard_viewer/materialized_queries_manager/utils.py b/dashboard_viewer/materialized_queries_manager/utils.py
index c8509d12..857ea4e7 100644
--- a/dashboard_viewer/materialized_queries_manager/utils.py
+++ b/dashboard_viewer/materialized_queries_manager/utils.py
@@ -1,8 +1,9 @@
from django.core.cache import cache
from django.db import connections
-from materialized_queries_manager.models import MaterializedQuery
from redis_rw_lock import RWLock
+from materialized_queries_manager.models import MaterializedQuery
+
def refresh(logger, db_id=None, query_set=None):
# Only one worker can update the materialized views at the same time -> same as -> only one thread
diff --git a/dashboard_viewer/requirements.in b/dashboard_viewer/requirements.in
index 4192af15..e8fd2b76 100644
--- a/dashboard_viewer/requirements.in
+++ b/dashboard_viewer/requirements.in
@@ -8,13 +8,13 @@ django-markdownify==0.9.0 # markdown to html
django-model-utils==4.2.0 # get specific type of subclasses after requesting buttons on tabsManager app
django-sass-processor==1.1 # automate scss devolopment
django-redis==5.1.0 # acess redis through a programmatic API
-django==3.2.10
+django==3.2.13
djangorestframework==3.13.1 # expose tabs content through an API
libsass==0.21.0 # to compile scss files into css
gunicorn==20.1.0 # for production deployment
martor==1.6.7 # markdown editor in admin app
pandas==1.3.5 # to handle achilles results files and their data
-Pillow==8.4.0 # image fields (App Logo)
+Pillow==9.0.1 # image fields (App Logo)
psycopg2-binary==2.9.2 # communicate with postgres
redis==3.5.3 # comunicate with redis (celery)
git+https://github.com/bioinformatics-ua/redis-rw-lock.git#egg=redis-rw-lock
diff --git a/dashboard_viewer/requirements.txt b/dashboard_viewer/requirements.txt
index ed0067b9..07a87d53 100644
--- a/dashboard_viewer/requirements.txt
+++ b/dashboard_viewer/requirements.txt
@@ -34,7 +34,7 @@ click-plugins==1.1.1
# via celery
click-repl==0.2.0
# via celery
-django==3.2.10
+django==3.2.13
# via
# -r requirements.in
# django-appconf
@@ -73,6 +73,10 @@ gunicorn==20.1.0
# via -r requirements.in
idna==3.3
# via requests
+importlib-metadata==4.11.4
+ # via
+ # markdown
+ # martor
kombu==5.2.2
# via celery
libsass==0.21.0
@@ -89,7 +93,7 @@ packaging==21.3
# via bleach
pandas==1.3.5
# via -r requirements.in
-pillow==8.4.0
+pillow==9.0.1
# via -r requirements.in
prompt-toolkit==3.0.24
# via click-repl
@@ -145,6 +149,8 @@ wcwidth==0.2.5
# via prompt-toolkit
webencodings==0.5.1
# via bleach
+zipp==3.8.0
+ # via importlib-metadata
# The following packages are considered to be unsafe in a requirements file:
# setuptools
diff --git a/dashboard_viewer/uploader/admin.py b/dashboard_viewer/uploader/admin.py
index 9a71fdc4..50056310 100644
--- a/dashboard_viewer/uploader/admin.py
+++ b/dashboard_viewer/uploader/admin.py
@@ -46,7 +46,8 @@ def has_change_permission(self, request, obj=None):
@admin.register(DataSource)
class DataSourceAdmin(admin.ModelAdmin):
- list_display = ("name", "acronym", "database_type", "country")
+ list_display = ("acronym", "name", "draft", "database_type", "country")
+ list_filter = ("draft",)
actions = [custom_delete_selected]
diff --git a/dashboard_viewer/uploader/file_handler/checks.py b/dashboard_viewer/uploader/file_handler/checks.py
index 42fe57f3..c19556da 100644
--- a/dashboard_viewer/uploader/file_handler/checks.py
+++ b/dashboard_viewer/uploader/file_handler/checks.py
@@ -9,6 +9,10 @@ class FileChecksException(Exception):
pass
+class InvalidCSVFile(FileChecksException):
+ pass
+
+
class InvalidFileFormat(FileChecksException):
pass
@@ -25,6 +29,78 @@ class MissingFieldValue(FileChecksException):
pass
+def _generate_file_reader(uploaded_file):
+ """
+ Receives a python file pointer and returns a pandas csv file reader, along with the columns
+ present in the file
+ :param uploaded_file: python file pointer of the uploaded file
+ """
+ columns = [
+ "analysis_id",
+ "stratum_1",
+ "stratum_2",
+ "stratum_3",
+ "stratum_4",
+ "stratum_5",
+ "count_value",
+ ]
+
+ wrapper = io.TextIOWrapper(uploaded_file)
+ csv_reader = csv.reader(wrapper)
+
+ try:
+ first_row = next(csv_reader)
+ except: # noqa
+ raise InvalidCSVFile(
+ "There was an error parsing the provided file. "
+ "Uploaded files must be comma-separated values (CSV) files. "
+ "If you think this is an error, please contact the system administrator."
+ )
+
+ wrapper.detach()
+
+ if len(first_row) == 16:
+ columns.extend(
+ (
+ "min_value",
+ "max_value",
+ "avg_value",
+ "stdev_value",
+ "median_value",
+ "p10_value",
+ "p25_value",
+ "p75_value",
+ "p90_value",
+ )
+ )
+ elif len(first_row) != 7:
+ raise InvalidFileFormat(
+ "The provided file has an invalid number of columns. "
+ "Make sure you uploaded a valid comma-separated values (CSV) file."
+ )
+
+ uploaded_file.seek(0)
+
+ try:
+ file_reader = pandas.read_csv(
+ uploaded_file,
+ header=0,
+ dtype=str,
+ skip_blank_lines=False,
+ index_col=False,
+ names=columns,
+ chunksize=100,
+ )
+ except: # noqa
+ raise InvalidCSVFile(
+ "There was an error parsing the provided file. "
+ "Uploaded files must be comma-separated values (CSV) files. "
+ "If you think this is an error, please contact the system administrator."
+ )
+ else:
+ return file_reader, columns
+
+
def _check_correct(names, values, check, transform=None):
"""
Transforms the values of given fields from the uploaded file
@@ -62,50 +138,7 @@ def _check_correct(names, values, check, transform=None):
def extract_data_from_uploaded_file(uploaded_file):
- columns = [
- "analysis_id",
- "stratum_1",
- "stratum_2",
- "stratum_3",
- "stratum_4",
- "stratum_5",
- "count_value",
- ]
-
- wrapper = io.TextIOWrapper(uploaded_file)
- csv_reader = csv.reader(wrapper)
-
- first_row = next(csv_reader)
- wrapper.detach()
-
- if len(first_row) == 16:
- columns.extend(
- (
- "min_value",
- "max_value",
- "avg_value",
- "stdev_value",
- "median_value",
- "p10_value",
- "p25_value",
- "p75_value",
- "p90_value",
- )
- )
- elif len(first_row) != 7:
- raise InvalidFileFormat("The provided file has an invalid number of columns.")
-
- uploaded_file.seek(0)
-
- file_reader = pandas.read_csv(
- uploaded_file,
- header=0,
- dtype=str,
- skip_blank_lines=False,
- index_col=False,
- names=columns,
- chunksize=100,
- )
+ file_reader, columns = _generate_file_reader(uploaded_file)
types = {
"analysis_id": numpy.int64,
@@ -144,6 +177,12 @@ def extract_data_from_uploaded_file(uploaded_file):
)
except StopIteration:
break
+ except: # noqa
+ raise InvalidCSVFile(
+ "There was an error parsing the provided file. "
+ "Uploaded files must be comma-separated values (CSV) files. "
+ "If you think this is an error, please contact the system administrator."
+ )
if chunk[["analysis_id", "count_value"]].isna().values.any():
raise InvalidFieldValue(
diff --git a/dashboard_viewer/uploader/file_handler/updates.py b/dashboard_viewer/uploader/file_handler/updates.py
index df0ed447..d14ba639 100644
--- a/dashboard_viewer/uploader/file_handler/updates.py
+++ b/dashboard_viewer/uploader/file_handler/updates.py
@@ -2,6 +2,7 @@
from django.conf import settings
from django.db import connections
from sqlalchemy import create_engine
+
from uploader.models import (
AchillesResults,
AchillesResultsArchive,
diff --git a/dashboard_viewer/uploader/tasks.py b/dashboard_viewer/uploader/tasks.py
index 87e5dd6a..8e8e898f 100644
--- a/dashboard_viewer/uploader/tasks.py
+++ b/dashboard_viewer/uploader/tasks.py
@@ -3,9 +3,9 @@
from django.core import serializers
from django.core.cache import cache
from django.db import router, transaction
-from materialized_queries_manager.utils import refresh
from redis_rw_lock import RWLock
+from materialized_queries_manager.utils import refresh
from .file_handler.checks import extract_data_from_uploaded_file
from .file_handler.updates import update_achilles_results_data
from .models import AchillesResults, PendingUpload, UploadHistory
diff --git a/dashboard_viewer/uploader/templates/no_uploads_dashboard.html b/dashboard_viewer/uploader/templates/no_uploads_dashboard.html
new file mode 100644
index 00000000..eed08266
--- /dev/null
+++ b/dashboard_viewer/uploader/templates/no_uploads_dashboard.html
@@ -0,0 +1,22 @@
+
+
+
+
+ Database Dashboard
+
+
+
+
+
No data available: This database was not yet mapped into the CDM
+
+
\ No newline at end of file
diff --git a/dashboard_viewer/uploader/urls.py b/dashboard_viewer/uploader/urls.py
index 07dc9584..bf2236a1 100644
--- a/dashboard_viewer/uploader/urls.py
+++ b/dashboard_viewer/uploader/urls.py
@@ -18,4 +18,9 @@
"/upload//status/", views.get_upload_task_status
),
path("", views.create_data_source, name="create_data_source"),
+ path(
+ "/dashboard/",
+ views.data_source_dashboard,
+ name="data_source_dashboard",
+ ),
]
diff --git a/dashboard_viewer/uploader/views.py b/dashboard_viewer/uploader/views.py
index 78ea20f3..584eba88 100644
--- a/dashboard_viewer/uploader/views.py
+++ b/dashboard_viewer/uploader/views.py
@@ -2,20 +2,21 @@
import constance
from django.contrib import messages
+from django.db import router, transaction
from django.forms import fields
-from django.http import JsonResponse
+from django.http import HttpResponseRedirect, JsonResponse
from django.shortcuts import get_object_or_404, redirect, render
from django.utils.html import format_html, mark_safe
from django.views.decorators.clickjacking import xframe_options_exempt
+from rest_framework import viewsets
from rest_framework.response import Response
-from rest_framework.viewsets import GenericViewSet
-from materialized_queries_manager.tasks import refresh_materialized_views_task
from materialized_queries_manager.models import MaterializedQuery
+from materialized_queries_manager.tasks import refresh_materialized_views_task
+from . import serializers
from .decorators import uploader_decorator
from .forms import AchillesResultsForm, EditSourceForm, SourceForm
from .models import Country, DataSource, PendingUpload, UploadHistory
-from .serializers import DataSourceSerializer
from .tasks import upload_results_file
PAGE_TITLE = "Dashboard Data Upload"
@@ -291,23 +292,61 @@ def edit_data_source(request, *_, **kwargs):
)
-class DataSourceUpdate(GenericViewSet):
- # since the edit and upload views have not authentication, also disable
+@uploader_decorator
+@xframe_options_exempt
+def data_source_dashboard(request, data_source):
+ try:
+ data_source = DataSource.objects.get(hash=data_source)
+ except DataSource.DoesNotExist:
+ return render(request, "no_uploads_dashboard.html")
+
+ if data_source.uploadhistory_set.exists():
+ config = constance.config
+ return HttpResponseRedirect(
+ f"{config.SUPERSET_HOST}/superset/dashboard/{config.DATABASE_DASHBOARD_IDENTIFIER}/"
+ "?standalone=1"
+ f'&preselect_filters={{"{config.DATABASE_FILTER_ID}":{{"acronym":["{data_source.acronym}"]}}}}'
+ )
+
+ return render(request, "no_uploads_dashboard.html")
+
+
+class DataSourceUpdate(viewsets.GenericViewSet):
+ # since the edit and upload views don't have authentication, also disable
# authentication from this
authentication_classes = ()
permission_classes = ()
lookup_field = "hash"
- serializer_class = DataSourceSerializer
+ serializer_class = serializers.DataSourceSerializer
queryset = DataSource.objects.all()
- def partial_update(self, request, *_, **__):
- instance = self.get_object()
- serializer = self.get_serializer(instance, data=request.data, partial=True)
- serializer.is_valid(raise_exception=True)
- serializer.save()
+ def get_object_for_patch(self):
+ # here we get the query set with select_for_update so it lock updates on that record
+ queryset = self.filter_queryset(self.get_queryset().select_for_update())
+
+ assert (
+ self.lookup_field and not self.lookup_url_kwarg
+ ), "Expected lookup_field to be defined and not lookup_url_kwarg."
+
+ filter_kwargs = {self.lookup_field: self.kwargs[self.lookup_field]}
+ obj = get_object_or_404(queryset, **filter_kwargs)
- refresh_materialized_views_task.delay([obj.matviewname for obj in MaterializedQuery.objects.all()])
+ # May raise a permission denied
+ self.check_object_permissions(self.request, obj)
+
+ return obj
+
+ def partial_update(self, request, *_, **__):
+ with transaction.atomic(using=router.db_for_write(DataSource)):
+ instance = self.get_object_for_patch()
+ serializer = self.get_serializer(instance, data=request.data, partial=True)
+ serializer.is_valid(raise_exception=True)
+ serializer.save()
+
+ refresh_materialized_views_task.delay(
+ [obj.matviewname for obj in MaterializedQuery.objects.all()],
+ )
if getattr(instance, "_prefetched_objects_cache", None):
# If 'prefetch_related' has been applied to a queryset, we need to
diff --git a/dashboard_viewer/utils/__init__.py b/dashboard_viewer/utils/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/dashboard_viewer/utils/fix_dates.py b/dashboard_viewer/utils/fix_dates.py
new file mode 100644
index 00000000..94790fa0
--- /dev/null
+++ b/dashboard_viewer/utils/fix_dates.py
@@ -0,0 +1,140 @@
+import datetime
+import re
+
+from django.db import transaction
+
+from uploader.models import AchillesResults
+
+MONTHS = dict(
+ map(
+ lambda t: (t[1], f"{t[0] + 1:02}"),
+ enumerate(
+ (
+ "JAN",
+ "FEV",
+ "MAR",
+ "APR",
+ "MAY",
+ "JUN",
+ "JUL",
+ "AUG",
+ "SEP",
+ "OCT",
+ "NOV",
+ "DEC",
+ )
+ ),
+ )
+)
+
+with_month = re.compile(r"(\d{2})-([A-Z]{3})-(\d{2})") # 02-MAY-2020
+
+
+def with_month_extractor(match: re.Match):
+ day, month, year = match.groups()
+ return f"20{year}", MONTHS[month], day
+
+
+with_hiphen = re.compile(r"(\d{4})-(\d{2})-(\d{2})") # 2020-12-02
+
+
+def with_hiphen_extractor(match: re.Match):
+ return match.groups()
+
+
+no_hiphen = re.compile(r"(\d{4})(\d{2})(\d{2})") # 20201202
+no_hiphen_extractor = with_hiphen_extractor
+
+no_hiphen_float = re.compile(r"(\d{4})(\d{2})(\d{2})\.\d") # 20201202.0
+no_hiphen_float_extractor = with_hiphen_extractor
+
+with_slash_one_digit = re.compile(
+ r"(\d{1,2})/(\d{1,2})/(\d{4})"
+) # 1/31/2020 or 12/1/2020 or 1/1/2020 or 12/31/2020
+
+
+def with_slash_one_digit_extractor(match: re.Match):
+ month, day, year = match.groups()
+ return year, f"{int(month):02}", f"{int(day):02}"
+
+
+with_slash_two_digits = re.compile(
+ r"(\d{2})/(\d{2})/(\d{2,4})"
+) # 31/01/20 or 31/01/2020
+
+
+def with_slash_two_digits_extractor(match: re.Match):
+ day, month, year = match.groups()
+ return ("" if len(year) == 4 else "20") + year, month, day
+
+
+PATTERNS = (
+ (with_month, with_month_extractor),
+ (with_hiphen, with_hiphen_extractor),
+ (no_hiphen, no_hiphen_extractor),
+ (no_hiphen_float, no_hiphen_float_extractor),
+ (
+ with_slash_two_digits,
+ with_slash_two_digits_extractor,
+ ), # try first with two digits then with one
+ (with_slash_one_digit, with_slash_one_digit_extractor),
+)
+
+
+def _convert_value(valid_pattern: re.Pattern, value: str):
+ for pattern, extractor in PATTERNS:
+ if pattern == valid_pattern:
+ continue
+ match = pattern.fullmatch(value)
+ if match:
+ date_tuple = extractor(match)
+ try:
+ datetime.date(*map(int, date_tuple))
+ except ValueError:
+ continue
+ return date_tuple
+ if valid_pattern.fullmatch(value):
+ return None
+ # print(f"unexpected date format -{value}-")
+ # return None
+ raise Exception(f"unexpected date format -{value}-")
+
+
+@transaction.atomic
+def apply_changes():
+ # fix generation_date
+ for ar in AchillesResults.objects.filter(analysis_id=0):
+ # upload = ar.data_source.uploadhistory_set.latest()
+ # print("upload", upload.pk)
+ if (
+ ar.stratum_3
+ and (new_date := _convert_value(with_hiphen, ar.stratum_3)) is not None
+ ):
+ year, month, day = new_date
+ # old = ar.stratum_3
+ ar.stratum_3 = f"{year}-{month}-{day}"
+ # print("from", old, "to", ar.stratum_3)
+ ar.save()
+ for ar in AchillesResults.objects.filter(analysis_id=5000):
+ # upload = ar.data_source.uploadhistory_set.latest()
+ # print("upload", upload.pk)
+ # fix source release date
+ if (
+ ar.stratum_2
+ and (new_date := _convert_value(with_hiphen, ar.stratum_2)) is not None
+ ):
+ year, month, day = new_date
+ # old = ar.stratum_2
+ ar.stratum_2 = f"{year}-{month}-{day}"
+ # print("source release from", old, "to", ar.stratum_2)
+ ar.save()
+ # fix cdm release date
+ if (
+ ar.stratum_3
+ and (new_date := _convert_value(with_hiphen, ar.stratum_3)) is not None
+ ):
+ year, month, day = new_date
+ # old = ar.stratum_3
+ ar.stratum_3 = f"{year}-{month}-{day}"
+ # print("cdm release from", old, "to", ar.stratum_3)
+ ar.save()
diff --git a/dashboard_viewer/utils/get_uploads_information.py b/dashboard_viewer/utils/get_uploads_information.py
new file mode 100644
index 00000000..0d48ed7e
--- /dev/null
+++ b/dashboard_viewer/utils/get_uploads_information.py
@@ -0,0 +1,10 @@
+from uploader.models import DataSource, UploadHistory
+
+for db in DataSource.objects.all():
+ print(f"{db.acronym},{db.name},{db.hash}", end="")
+ try:
+ last_upload = db.uploadhistory_set.latest()
+ except UploadHistory.DoesNotExist:
+ print()
+ else:
+ print(f",{last_upload.upload_date},{last_upload.generation_date}")
diff --git a/docker/.env-example b/docker/.env-example
index 73dbc6c3..29c5de64 100644
--- a/docker/.env-example
+++ b/docker/.env-example
@@ -18,14 +18,15 @@ SUPERSET_MAPBOX_API_KEY=...
POSTGRES_DASHBOARD_VIEWER_USER=cdm
POSTGRES_DASHBOARD_VIEWER_PASSWORD=cdm
POSTGRES_DASHBOARD_VIEWER_DB=cdm
-# vv CHANGE VARIABLE BELOW vv
-DASHBOARD_VIEWER_SECRET_KEY=secret
-
POSTGRES_ACHILLES_USER=achilles
POSTGRES_ACHILLES_PASSWORD=achilles
POSTGRES_ACHILLES_DB=achilles
+# vv CHANGE VARIABLES BELOW vv
+DASHBOARD_VIEWER_SECRET_KEY=secret
+SUPERSET_SECRET_KEY=secret
+
VOLUMES_PATH=./volumes
#production or development
diff --git a/docker/docker-compose-dev.yml b/docker/docker-compose-dev.yml
index dd998383..baea5c2c 100644
--- a/docker/docker-compose-dev.yml
+++ b/docker/docker-compose-dev.yml
@@ -22,6 +22,7 @@ x-superset-environment: &superset-environment
REDIS_HOST: redis
REDIS_PORT: 6379
REDIS_RESULTS_DB: 3
+ SECRET_KEY: $SUPERSET_SECRET_KEY
SUPERSET_ENV: production
x-dashboard-build: &dashboard-build
@@ -43,7 +44,7 @@ x-dashboard-environment: &dashboard-environment
REDIS_CELERY_DB: 1
REDIS_CONSTANCE_DB: 2
SECRET_KEY: ${DASHBOARD_VIEWER_SECRET_KEY}
- DASHBOARD_VIEWER_ENV: ${INSTALLATION_ENV}
+ DASHBOARD_VIEWER_ENV: development
version: "3.7"
services:
@@ -110,11 +111,12 @@ services:
dashboard_worker:
build: *dashboard-build
environment: *dashboard-environment
- command: celery -A dashboard_viewer worker -Ofair -l INFO
+ command: celery -A dashboard_viewer worker -Ofair -l DEBUG
restart: unless-stopped
depends_on: *depends-on
volumes:
- ../dashboard_viewer/media:/app/media
+ - ../dashboard_viewer:/app
dashboard:
build: *dashboard-build
diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml
index 655fc32b..71686796 100644
--- a/docker/docker-compose.yml
+++ b/docker/docker-compose.yml
@@ -19,6 +19,7 @@ x-superset-environment: &superset-environment
REDIS_HOST: redis
REDIS_PORT: 6379
REDIS_RESULTS_DB: 3
+ SECRET_KEY: $SUPERSET_SECRET_KEY
SUPERSET_ENV: production
x-dashboard-environment: &dashboard-environment
diff --git a/docs/02-installation.md b/docs/02-installation.md
index ec3c7548..9f074de1 100644
--- a/docs/02-installation.md
+++ b/docs/02-installation.md
@@ -12,9 +12,9 @@ Currently, we use docker to deploy our environment
1. Clone the repository with the command `git clone --recurse-submodules https://github.com/EHDEN/NetworkDashboards`. If you already cloned the repository without the `--recurse-submodules` option, run `git submodule update --init` to fetch the superset submodule.
-2. Create a `.env` file on the `docker` directory, using `.env-example` as a reference, setting all necessary environment variables (`SUPERSET\_MAPBOX\_API\_KEY` and `DASHBOARD\_VIEWER\_SECRET\_KEY`).
+2. Create a `.env` file on the `docker` directory, using `.env-example` as a reference, setting all necessary environment variables (`SUPERSET_MAPBOX_API_KEY` and `DASHBOARD_VIEWER_SECRET_KEY`).
- 2.1 If you will use this application as a third-party application and will iframe it, set the variable `SINGLE\_APPLICATION\_MODE` to `False` and define the host of the main application on the variable `MAIN\_APPLICATION\_HOST`. Also make sure to add this last host to the list of `ALLOWED\_HOSTS`.
+ 2.1 If you will use this application as a third-party application and will iframe it, set the variable `SINGLE_APPLICATION_MODE` to `False` and define the host of the main application on the variable `MAIN_APPLICATION_HOST`. Also make sure to add this last host to the list of `ALLOWED_HOSTS`.
### Dashboard Viewer setup {-}
@@ -105,7 +105,7 @@ The concepts table is not in the repository due to its dimension, therefore we u
4. By default Superset's admin user credentials are admin/admin.
It is recommended that you change the password if you will use this in a production environment.
-5. To any anonymous user view dashboards, add the following:
+5. To any anonymous user view dashboards, add the following permissions to the public role:
- all datasource access on all_datasource_access
- can csrf token on Superset
@@ -115,6 +115,8 @@ The concepts table is not in the repository due to its dimension, therefore we u
- can read on CssTemplate
- can read on Dashboard
+6. For each dashboard you want anonymous users to be able to access, on the dashboard list page click edit (the pencil on the right) and add the "Admin" and "Public" roles to the "Roles with acess" field.
+
### Dummy data {-}
On a fresh installation, there are no achilles_results data so Superset's dashboards will display "No results". On the root of this repository, you can find the `demo` directory where we have an ACHILLES results file with synthetic data that you can upload to a data source on the uploader app of the dashboard viewer (http://localhost/uploader). If you wish to compare multiple data sources, on the `demo` directory there is also a python script that allows you to generate new ACHILLES results files, where it generates random count values based on the ranges of values for each set of analysis_id and stratums present on a base ACHILLES results file. So, from the one ACHILLES results fill we provided, you can have multiple data sources with different data.
diff --git a/docs/03-general.md b/docs/03-general.md
index 248d83f2..edb6db3d 100644
--- a/docs/03-general.md
+++ b/docs/03-general.md
@@ -40,7 +40,8 @@ SELECT source.name,
source.database_type,
source.acronym
FROM public.data_source AS source
-INNER JOIN public.country AS country ON source.country_id=country.id
+INNER JOIN public.country AS country
+ ON source.country_id=country.id
```
#### Chart settings {-}
@@ -110,8 +111,10 @@ SELECT source.name AS source,
concepts.concept_name AS gender,
achilles.count_value as count
FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
-INNER JOIN public.country AS country ON source.country_id=country.id
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
+INNER JOIN public.country AS country
+ ON source.country_id=country.id
JOIN (
SELECT '8507' AS concept_id, 'Male' AS concept_name
UNION
@@ -147,15 +150,17 @@ WHERE analysis_id = 2
(\#fig:patientsPerCountry)Settings for creating the Patients per Country chart
-#### SQL query {#patientsPerCountryQuery} {-}
+#### SQL query {-#patientsPerCountryQuery}
```sql
SELECT country.country,
source.database_type,
count_value
-FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
-INNER JOIN public.country AS country ON source.country_id=country.id
+FROM public.achilles_results AS achilles
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
+INNER JOIN public.country AS country
+ ON source.country_id=country.id
WHERE analysis_id = 1
```
@@ -221,7 +226,8 @@ SELECT name,
longitude,
country
FROM public.data_source AS source
-INNER JOIN public.country AS country ON source.country_id=country.id
+INNER JOIN public.country AS country
+ ON source.country_id=country.id
```
#### Chart settings {-}
@@ -257,7 +263,8 @@ SELECT
stratum_4 as "cdm_version",
stratum_5 as "vocabulary_version"
FROM achilles_results
-JOIN data_source ON achilles_results.data_source_id = data_source.id
+JOIN data_source
+ ON achilles_results.data_source_id = data_source.id
JOIN country ON data_source.country_id = country.id
WHERE analysis_id=5000
```
diff --git a/docs/04-person.md b/docs/04-person.md
index 59058e78..6ba33981 100644
--- a/docs/04-person.md
+++ b/docs/04-person.md
@@ -69,19 +69,39 @@ No SQL query, use the sql table `data_source` of the `achilles` database.
```sql
SELECT source.name,
source.acronym,
- SUM(CASE WHEN CAST(stratum_2 AS INTEGER) < 10 THEN count_value END) AS "0-10",
- SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 10 AND CAST(stratum_2 AS INTEGER) < 20 THEN count_value END) AS "10-20",
- SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 20 AND CAST(stratum_2 AS INTEGER) < 30 THEN count_value END) AS "20-30",
- SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 30 AND CAST(stratum_2 AS INTEGER) < 40 THEN count_value END) AS "30-40",
- SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 40 AND CAST(stratum_2 AS INTEGER) < 50 THEN count_value END) AS "40-50",
- SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 50 AND CAST(stratum_2 AS INTEGER) < 60 THEN count_value END) AS "50-60",
- SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 60 AND CAST(stratum_2 AS INTEGER) < 70 THEN count_value END) AS "60-70",
- SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 70 AND CAST(stratum_2 AS INTEGER) < 80 THEN count_value END) AS "70-80",
- SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 80 AND CAST(stratum_2 AS INTEGER) < 90 THEN count_value END) AS "80-90",
- SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 90 THEN count_value END) AS "90+"
+ SUM(CASE WHEN CAST(stratum_2 AS INTEGER) < 10
+ THEN count_value END) AS "0-10",
+ SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 10
+ AND CAST(stratum_2 AS INTEGER) < 20
+ THEN count_value END) AS "10-20",
+ SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 20
+ AND CAST(stratum_2 AS INTEGER) < 30
+ THEN count_value END) AS "20-30",
+ SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 30
+ AND CAST(stratum_2 AS INTEGER) < 40
+ THEN count_value END) AS "30-40",
+ SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 40
+ AND CAST(stratum_2 AS INTEGER) < 50
+ THEN count_value END) AS "40-50",
+ SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 50
+ AND CAST(stratum_2 AS INTEGER) < 60
+ THEN count_value END) AS "50-60",
+ SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 60
+ AND CAST(stratum_2 AS INTEGER) < 70
+ THEN count_value END) AS "60-70",
+ SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 70
+ AND CAST(stratum_2 AS INTEGER) < 80
+ THEN count_value END) AS "70-80",
+ SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 80
+ AND CAST(stratum_2 AS INTEGER) < 90
+ THEN count_value END) AS "80-90",
+ SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 90
+ THEN count_value END) AS "90+"
FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
-INNER JOIN public.concept ON CAST(stratum_1 AS BIGINT) = concept_id
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
+INNER JOIN public.concept
+ ON CAST(stratum_1 AS BIGINT) = concept_id
WHERE analysis_id = 102
GROUP BY name, acronym
```
@@ -115,7 +135,8 @@ SELECT source.name,
count_value AS count,
source.acronym
FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
WHERE analysis_id = 101
```
@@ -154,7 +175,8 @@ SELECT source.name,
stratum_1 AS "Birth_year",
count_value AS count
FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
WHERE analysis_id = 3
```
@@ -195,7 +217,8 @@ SELECT source.name,
count_value AS Number_of_persons,
source.acronym
FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
JOIN (
SELECT '8507' AS concept_id, 'Male' AS concept_name
UNION
diff --git a/docs/05-observation_period.md b/docs/05-observation_period.md
index f3273d5a..598d54a7 100644
--- a/docs/05-observation_period.md
+++ b/docs/05-observation_period.md
@@ -47,7 +47,7 @@ No SQL query, use the sql table `data_source` of the `achilles` database.
- Date Filter: off
- Instant Filtering: on
-### Number of Patients in Observation Period {#numInObservationPeriod} {-}
+### Number of Patients in Observation Period {-#numInObservationPeriod}
The Number of Patients in Observation Period plot shows the number of patients that contribute at least one day in a specific month.
@@ -64,7 +64,8 @@ SELECT source.name,
to_date(stratum_1, 'YYYYMM') as Date,
count_value
FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
WHERE analysis_id = 110
```
@@ -103,7 +104,8 @@ SELECT source.name,
to_date(stratum_1, 'YYYYMM') AS year_month,
count_value
FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
WHERE analysis_id = 111
```
@@ -142,7 +144,8 @@ SELECT source.name,
to_date(stratum_1, 'YYYYMM') AS year_month,
count_value
FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
WHERE analysis_id = 112
```
diff --git a/docs/06-visit.md b/docs/06-visit.md
index 589e3e7e..4408015c 100644
--- a/docs/06-visit.md
+++ b/docs/06-visit.md
@@ -49,7 +49,7 @@ No SQL query, use the sql table `data_source` of the `achilles` database.
- Date Filter: off
- Instant Filtering: on
-### Visit Type Table {#visitTypeTable} {-}
+### Visit Type Table {-#visitTypeTable}
@@ -64,8 +64,10 @@ SELECT source.name,
concept_name AS "Type",
MAX(count_value) AS "Count"
FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
-INNER JOIN public.concept ON CAST(stratum_1 AS BIGINT) = concept_id
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
+INNER JOIN public.concept
+ ON CAST(stratum_1 AS BIGINT) = concept_id
WHERE analysis_id = 201
GROUP BY name, acronym, "Type"
ORDER BY "Count" DESC
@@ -97,8 +99,10 @@ SELECT source.name,
concept_name AS "Observation",
count_value
FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
-INNER JOIN public.concept ON CAST(stratum_1 AS BIGINT) = concept_id
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
+INNER JOIN public.concept
+ ON CAST(stratum_1 AS BIGINT) = concept_id
WHERE analysis_id = 201
```
diff --git a/docs/07-death.md b/docs/07-death.md
index 02555010..06a0ff45 100644
--- a/docs/07-death.md
+++ b/docs/07-death.md
@@ -61,7 +61,8 @@ SELECT source.name,
count_value,
source.acronym
FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
WHERE analysis_id = 501
```
@@ -97,7 +98,8 @@ SELECT source.name,
EXTRACT(year FROM TO_DATE(stratum_1, 'YYYYMM')) AS Date,
count_value
FROM public.achilles_results as achilles
-INNER JOIN public.data_source as source ON achilles.data_source_id=source.id
+INNER JOIN public.data_source as source
+ ON achilles.data_source_id=source.id
WHERE analysis_id = 502
```
diff --git a/docs/08-concepts_browser.md b/docs/08-concepts_browser.md
index 6f794ae3..04183f5a 100644
--- a/docs/08-concepts_browser.md
+++ b/docs/08-concepts_browser.md
@@ -37,10 +37,15 @@ SELECT concept_name,
domain_id,
source.name AS source_name,
source.acronym
-FROM achilles_results
-JOIN concept ON cast(stratum_1 AS BIGINT) = concept_id
-INNER JOIN public.data_source AS source ON data_source_id=source.id
-WHERE analysis_id in (201, 401, 601, 701, 801, 901, 1001, 1801, 200, 400, 600, 700, 800, 1800)
+FROM achilles_results JOIN concept
+ ON cast(stratum_1 AS BIGINT) = concept_id
+INNER JOIN public.data_source AS source
+ ON data_source_id=source.id
+WHERE
+ analysis_id in (
+ 201, 401, 601, 701, 801, 901, 1001, 1801,
+ 200, 400, 600, 700, 800, 1800
+ )
```
#### Chart settings {-}
@@ -98,21 +103,33 @@ SELECT
sum(q1.count_value) as "Occurrence_count",
sum(q1.count_person) as "Person_count",
CASE
- WHEN sum(q1.count_value)<=10 THEN '<=10'
- WHEN sum(q1.count_value)<=100 THEN '11-10ˆ2'
- WHEN sum(q1.count_value)<=1000 THEN '10ˆ2-10ˆ3'
- WHEN sum(q1.count_value)<=10000 THEN '10ˆ3-10ˆ4'
- WHEN sum(q1.count_value)<=100000 THEN '10ˆ4-10ˆ5'
- WHEN sum(q1.count_value)<=1000000 THEN '10ˆ5-10ˆ6'
+ WHEN sum(q1.count_value)<=10
+ THEN '<=10'
+ WHEN sum(q1.count_value)<=100
+ THEN '11-10ˆ2'
+ WHEN sum(q1.count_value)<=1000
+ THEN '10ˆ2-10ˆ3'
+ WHEN sum(q1.count_value)<=10000
+ THEN '10ˆ3-10ˆ4'
+ WHEN sum(q1.count_value)<=100000
+ THEN '10ˆ4-10ˆ5'
+ WHEN sum(q1.count_value)<=1000000
+ THEN '10ˆ5-10ˆ6'
ELSE '>10ˆ6'
END as "magnitude_occurrences",
CASE
- WHEN sum(q1.count_person)<=10 THEN '<=10'
- WHEN sum(q1.count_person)<=100 THEN '11-10ˆ2'
- WHEN sum(q1.count_person)<=1000 THEN '10ˆ2-10ˆ3'
- WHEN sum(q1.count_person)<=10000 THEN '10ˆ3-10ˆ4'
- WHEN sum(q1.count_person)<=100000 THEN '10ˆ4-10ˆ5'
- WHEN sum(q1.count_person)<=1000000 THEN '10ˆ5-10ˆ6'
+ WHEN sum(q1.count_person)<=10
+ THEN '<=10'
+ WHEN sum(q1.count_person)<=100
+ THEN '11-10ˆ2'
+ WHEN sum(q1.count_person)<=1000
+ THEN '10ˆ2-10ˆ3'
+ WHEN sum(q1.count_person)<=10000
+ THEN '10ˆ3-10ˆ4'
+ WHEN sum(q1.count_person)<=100000
+ THEN '10ˆ4-10ˆ5'
+ WHEN sum(q1.count_person)<=1000000
+ THEN '10ˆ5-10ˆ6'
ELSE '>10ˆ6'
END AS "magnitude_persons"
FROM (SELECT analysis_id,
@@ -122,8 +139,13 @@ FROM (SELECT analysis_id,
domain_id,
count_value, 0 as count_person
FROM achilles_results
- JOIN concept ON cast(stratum_1 AS BIGINT)=concept_id
- WHERE analysis_id in (201, 301, 401, 601, 701, 801, 901, 1001, 1801)
+ JOIN concept
+ ON cast(stratum_1 AS BIGINT)=concept_id
+ WHERE
+ analysis_id in (
+ 201, 301, 401, 601, 701, 801, 901, 1001,
+ 1801
+ )
UNION (SELECT analysis_id,
stratum_1 concept_id,
data_source_id,
@@ -132,11 +154,27 @@ FROM (SELECT analysis_id,
0 as count_value,
sum(count_value) as count_person
FROM achilles_results
- JOIN concept on cast(stratum_1 as BIGINT)=concept_id
- WHERE analysis_id in (202, 401, 601, 701, 801, 901, 1001, 1801)
- GROUP BY analysis_id,stratum_1,data_source_id,concept_name,domain_id) ) as q1
- INNER JOIN public.data_source AS source ON q1.data_source_id=source.id
-GROUP BY q1.concept_id,q1.concept_name,q1.domain_id,source.name, acronym
+ JOIN concept
+ ON cast(stratum_1 as BIGINT)=concept_id
+ WHERE
+ analysis_id in (
+ 202, 401, 601, 701, 801, 901, 1001, 1801
+ )
+ GROUP BY
+ analysis_id,
+ stratum_1,
+ data_source_id,
+ concept_name,
+ domain_id
+ )) as q1
+ INNER JOIN public.data_source AS source
+ ON q1.data_source_id=source.id
+GROUP BY
+ q1.concept_id,
+ q1.concept_name,
+ q1.domain_id,
+ source.name,
+ acronym
ORDER BY "Person_count" desc
```
diff --git a/docs/09-provenance.md b/docs/09-provenance.md
index 76123c50..ab009bc1 100644
--- a/docs/09-provenance.md
+++ b/docs/09-provenance.md
@@ -61,18 +61,26 @@ All 6 charts use the same sql query.
```sql
SELECT source.name,
source.acronym,
- CASE WHEN analysis_id = 405 THEN 'Condition'
- WHEN analysis_id = 605 THEN 'Procedure'
- WHEN analysis_id = 705 THEN 'Drug'
- WHEN analysis_id = 805 THEN 'Observation'
- WHEN analysis_id = 1805 THEN 'Measurement'
- WHEN analysis_id = 2105 THEN 'Device'
+ CASE WHEN analysis_id = 405
+ THEN 'Condition'
+ WHEN analysis_id = 605
+ THEN 'Procedure'
+ WHEN analysis_id = 705
+ THEN 'Drug'
+ WHEN analysis_id = 805
+ THEN 'Observation'
+ WHEN analysis_id = 1805
+ THEN 'Measurement'
+ WHEN analysis_id = 2105
+ THEN 'Device'
ELSE 'Other' END AS domain_name,
concept_name,
SUM(count_value) AS num_records
FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
-INNER JOIN public.concept AS c1 ON CAST(stratum_2 AS BIGINT) = concept_id
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
+INNER JOIN public.concept AS c1
+ ON CAST(stratum_2 AS BIGINT) = concept_id
WHERE analysis_id IN (405,605,705,805,1805,2105)
GROUP BY source.name, source.acronym, concept_name,
CASE WHEN analysis_id = 405 THEN 'Condition'
diff --git a/docs/10-data_domains.md b/docs/10-data_domains.md
index ec38a952..65daf265 100644
--- a/docs/10-data_domains.md
+++ b/docs/10-data_domains.md
@@ -47,7 +47,7 @@ No SQL query, use the sql table `data_source` of the `achilles` database.
- Date Filter: off
- Instant Filtering: on
-### Average Number of Records per Person {#avgRecordsPerPerson} {-}
+### Average Number of Records per Person {-#avgRecordsPerPerson}
@@ -61,25 +61,41 @@ SELECT
source.name,
source.acronym,
CASE
- WHEN analysis_id = 201 THEN 'Visit'
- WHEN analysis_id = 401 THEN 'Condition'
- WHEN analysis_id = 501 THEN 'Death'
- WHEN analysis_id = 601 THEN 'Procedure'
- WHEN analysis_id = 701 THEN 'Drug Exposure'
- WHEN analysis_id = 801 THEN 'Observation'
- WHEN analysis_id = 1801 THEN 'Measurement'
- WHEN analysis_id = 2101 THEN 'Device'
- WHEN analysis_id = 2201 THEN 'Note'
+ WHEN analysis_id = 201
+ THEN 'Visit'
+ WHEN analysis_id = 401
+ THEN 'Condition'
+ WHEN analysis_id = 501
+ THEN 'Death'
+ WHEN analysis_id = 601
+ THEN 'Procedure'
+ WHEN analysis_id = 701
+ THEN 'Drug Exposure'
+ WHEN analysis_id = 801
+ THEN 'Observation'
+ WHEN analysis_id = 1801
+ THEN 'Measurement'
+ WHEN analysis_id = 2101
+ THEN 'Device'
+ WHEN analysis_id = 2201
+ THEN 'Note'
END AS Data_Domain,
- SUM(count_value) /AVG(num_persons) AS "records_per_person"
+ SUM(count_value) /AVG(num_persons)
+ AS "records_per_person"
FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
INNER JOIN (
SELECT data_source_id , count_value as num_persons
FROM achilles_results
- WHERE analysis_id = 1) counts ON achilles.data_source_id = counts.data_source_id
+ WHERE analysis_id = 1) counts
+ ON achilles.data_source_id = counts.data_source_id
GROUP BY analysis_id, source.name, source.acronym
-HAVING analysis_id IN (201, 401, 501, 601, 701, 801, 1801, 2101, 2201)
+HAVING
+ analysis_id IN (
+ 201, 401, 501, 601, 701, 801, 1801, 2101,
+ 2201
+ )
```
#### Chart settings {-}
diff --git a/docs/404.html b/docs/404.html
index 53be2143..e2042a2d 100644
--- a/docs/404.html
+++ b/docs/404.html
@@ -24,7 +24,7 @@
-
+
@@ -162,8 +162,8 @@
In order to obtain the colors blue and rose in the chart representing the gender distribution,
add the following JSON entry to the JSON object of the JSON Metadata field on the edit dashboard page:
With this every time you want to edit the dashboard layout you have to either comment the CSS inserted
or remove it so the “Edit Dashboard” button can show again.
Create a credentials file (the structure of the file depends on the target cloud server)
-
Create a .dashboards_backups.conf file under your home directory (variable $HOME) using dashboards_backups.conf.example as base, setting the appropriate value for the several variables.
+
Create a backups.conf under the backups directory using backups.conf.example as base, setting the appropriate value for the several variables.
For variables associated with files and directories always use absolute paths.
Variables:
@@ -362,6 +362,40 @@
Chapter 5 Backups
Add entry 0 3 * * * $HOME/NetworkDashboards/backups/backup.sh (The path to the backup script might be different)
+
+
Restore
+
+
Select the compressed backup you want to restore.
+
Make sure that all the environment variables are the same as the ones that were used for the chosen backup file.
+Additionally, the backups.conf file is also necessary to set up, since the TMP_DIRECTORY variable will be used.
+
Run the backups/restore.sh script.
+
+
+
+
Useful stuff
+
+
How to create a shared link to a dropbox directory using its python’s API:
+
pip install dropbox
+
import dropbox
+d = dropbox.Dropbox(API_TOKEN)
+
+# create a shared link for a directory
+from dropbox.sharing import SharedLinkSettings
+sharing_settings = SharedLinkSettings(
+ require_password=True,
+ link_password=DIRECTORY_PASSWORD,
+)
+d.sharing_create_shared_link_with_settings(
+ DIRECTORY_PATH,
+ sharing_settings,
+)
+
+# get all links
+for link in d.sharing_get_shared_links().links:
+print(f"{link.path} -> {link.url}")
+
+
+
@@ -369,7 +403,7 @@
Chapter 5 Backups
-
+
diff --git a/docs/backups.md b/docs/backups.md
index 82c6c507..94914c9f 100644
--- a/docs/backups.md
+++ b/docs/backups.md
@@ -2,7 +2,7 @@
1. Create a credentials file (the structure of the file depends on the target cloud server)
-2. Create a `.dashboards_backups.conf` file under your home directory (variable `$HOME`) using `dashboards_backups.conf.example` as base, setting the appropriate value for the several variables.
+2. Create a `backups.conf` under the `backups` directory using `backups.conf.example` as base, setting the appropriate value for the several variables.
For variables associated with files and directories always use *absolute* paths.
@@ -50,56 +50,16 @@
2. Add entry `0 3 * * * $HOME/NetworkDashboards/backups/backup.sh` (The path to the backup script might be different)
-## Restore
+### Restore {-}
-1. Select the compressed backup you want to restore and decompress it:
-
- `tar -xJf BACKUP_FILE.tar.xz`.
+1. Select the compressed backup you want to restore.
-2. 1. **Redis**
-
- 1. Make sure the redis docker container is down.
-
- 2. (Re)place the file `dump.rdb` on the redis volume by the file `redis.rdb`. By default the redis volume is located where this repository was cloned on the directory `docker/volumes/redis`.
-
- 3. Change its permissions, owner and group:
-
- ```shell
- chmod 0644 docker/volumes/redis/dump.rdb
- sudo chown -R 999:999 docker/volumes/redis
- ```
-
- 2. **Postgres**
-
- 1. Make sure all containers that make changes on the database are stopped.
-
- 2. Copy the file `postgres_backup.sql` into the postgres container
-
- `docker cp postgres.sql [CONTAINER_ID]:/tmp`.
-
- 5. Execute the backup script:
-
- `docker exec -u root dashboard_viewer_postgres_1 psql -f /tmp/postgres_backup.sql -U \$POSTGRES_USER -d \$POSTGRES_DB`.
-
- 3. **Media Files** If you have a volume pointing to where the media files are stored, replace all files with the ones present on the downloaded backup file. Else:
-
- 1. Bring the dashoard container up `docker-compose up -d dashboard`
-
- 2. Enter in the container `docker exec -it [CONTAINER_ID] bash`
-
- 3. If you don't know where the media files are stored you can check the value of the MEDIA_ROOT variable
-
- 1. `python manage.py shell`
-
- 2. `from django.conf import settings`
-
- 3. `print(settings.MEDIA_ROOT)`
-
- 4. Remove the entire MEDIA_ROOT directory and exit the container
-
- 5. Copy the media directory present on the backup file to the catalogue container `docker cp -a collected-media [CONTAINER_ID]:[MEDIA_ROOT_PARENT_PATH]`
+2. Make sure that all the environment variables are the same as the ones that were used for the chosen backup file.
+ Additionally, the `backups.conf` file is also necessary to set up, since the `TMP_DIRECTORY` variable will be used.
+
+3. Run the `backups/restore.sh` script.
-## Useful stuff
+### Useful stuff {-}
- How to create a shared link to a dropbox directory using its python's API:
@@ -113,8 +73,14 @@
# create a shared link for a directory
from dropbox.sharing import SharedLinkSettings
- sharing_settings = SharedLinkSettings(require_password=True, link_password=DIRECTORY_PASSWORD)
- d.sharing_create_shared_link_with_settings(DIRECTORY_PATH, sharing_settings)
+ sharing_settings = SharedLinkSettings(
+ require_password=True,
+ link_password=DIRECTORY_PASSWORD,
+ )
+ d.sharing_create_shared_link_with_settings(
+ DIRECTORY_PATH,
+ sharing_settings,
+ )
# get all links
for link in d.sharing_get_shared_links().links:
diff --git a/docs/code-documentation.html b/docs/code-documentation.html
index 2144ec5b..92095ee4 100644
--- a/docs/code-documentation.html
+++ b/docs/code-documentation.html
@@ -24,7 +24,7 @@
-
+
@@ -162,8 +162,8 @@
Currently, this app is not being used and the URL mapping was delete.
To use it again uncomment the tabsManager line on the dashboard_viewer/dashboard_viewer/urls.py file.
-Then you can access the tabs page through the [BASE_RUL]/tabs/ URL.
+Then you can access the tabs page through the [BASE_URL]/tabs/ URL.
Views
diff --git a/docs/code-documentation.md b/docs/code-documentation.md
index 896a05c4..19fb782f 100644
--- a/docs/code-documentation.md
+++ b/docs/code-documentation.md
@@ -42,7 +42,7 @@ Once again to avoid timeouts, such operations are executed on a background task.
Currently, this app is not being used and the URL mapping was delete.
To use it again uncomment the tabsManager [line](https://github.com/EHDEN/NetworkDashboards/blob/master/dashboard_viewer/dashboard_viewer/urls.py#L29) on the dashboard_viewer/dashboard_viewer/urls.py file.
-Then you can access the tabs page through the `[BASE_RUL]/tabs/` URL.
+Then you can access the tabs page through the `[BASE_URL]/tabs/` URL.
With this every time you want to edit the dashboard layout you have to either comment the CSS inserted
or remove it so the “Edit Dashboard” button can show again.
@@ -346,14 +346,19 @@
Data Source and Domain Filters
For the filters to work the name of the fields to filter should match in all tables used on the charts of this dashboard.
With this every time you want to edit the dashboard layout you have to either comment the CSS inserted
or remove it so the “Edit Dashboard” button can show again.
@@ -374,8 +374,8 @@
Chart settings
-
-
Average Number of Records per Person {#avgRecordsPerPerson}
+
+
Average Number of Records per Person
@@ -384,29 +384,45 @@
Average Number of Records per Person {#avgRecordsPerPerson}
In order to obtain the colors blue and rose in the chart representing the gender distribution,
add the following JSON entry to the JSON object of the JSON Metadata field on the edit dashboard page:
To hide the dashboard header insert the following css code to the CSS field on the edit page:
-
/* hides the filter badges on right side of charts */
-.dashboard-filter-indicators-container {
-display: none;
-}
-
-/* hides the acronym filter */
-.grid-content>.dragdroppable.dragdroppable-row>.with-popover-menu {
-display: none;
-}
-
-/*
- * WARNING panel 1 id hardcoded
- * Hides the X Axis Label of the heatmap on the Data Domains tab
- */
-#TABS-nlIU6H5mcT-pane-1 g.x.axis> g.tick text {
-display: none;
-}
-
-/*
- * WARNING panel 2 id hardcoded
- * Hides the X Axis Labels of the bar charts on the Data Provenance tab
- */
-#TABS-nlIU6H5mcT-pane-2 g.nv-x.nv-axis.nvd3-svg> g.nvd3.nv-wrap.nv-axis> g > g.tick.zero> text {
-display: none;
-}
+
/* hides the filter badges on right side of charts */
+.dashboard-filter-indicators-container {
+display: none;
+}
+
+/* hides the acronym filter */
+.grid-content>.dragdroppable.dragdroppable-row>.with-popover-menu {
+display: none;
+}
+
+/*
+ * WARNING panel 1 id hardcoded
+ * Hides the X Axis Label of the heatmap on the Data Domains tab
+ */
+#TABS-nlIU6H5mcT-pane-1 g.x.axis> g.tick text {
+display: none;
+}
+
+/*
+ * WARNING panel 2 id hardcoded
+ * Hides the X Axis Labels of the bar charts on the Data Provenance tab
+ */
+#TABS-nlIU6H5mcT-pane-2 g.nv-x.nv-axis.nvd3-svg> g.nvd3.nv-wrap.nv-axis> g > g.tick.zero> text {
+display: none;
+}
With this every time you want to edit the dashboard layout you have to either comment the CSS inserted
or remove it so the “Edit Dashboard” button can show again.
With this every time you want to edit the dashboard layout you have to either comment the CSS inserted
or remove it so the “Edit Dashboard” button can show again.
Currently, we have a custom chart plugin on our superset installation which doesn’t allow us to use superset’s pre-built images available on their docker hub, since we have to call npm’s build procedures on the front-end code.
-To build our custom docker image we used superset’s Dockerfile as a base, where we removed the Dev section and added some code to install our chart plugins before building the front-end code.
-Also, to make Superset import our custom chart plugins, some changes have to be made to the superset-frontend/src/visualizations/presets/MainPreset.js file.
-
The changes made to the Dockerfile to install the chart plugins are in this area:
+
Currently, we have made some modifications to the box plot visualization on our superset installation which doesn’t allow us to use superset’s pre-built images available on their docker hub, since we have to call npm’s build procedures on the front-end code.
+To build our custom docker image we used superset’s Dockerfile as a base, where we removed the Dev section and added some code to install our chart plugins before building the front-end code.
+
The changes made to the Dockerfile to install the chart plugins are in this area:
-
L44: First we copy the superset/plugins directory into the container, which contains all the extra and custom chart plugins.
-
L48-51: Then we iterate over the chart plugins and execute npm install ... on each of them.
-This will make changes to both the package.json and package-lock.json files and for that, we copy them into a temporary directory package_json_files.
-
L54: Then all superset’s front-end code is copied into the container, which will override the package*.json files.
-
L56: After this, we copy our custom MainPresets.js file.
-
L60-L63: Finally, we replace the package*.json files with the ones that we saved earlier and then run the npm build command.
+
L46: Repalce some boxplot fiels with ours;
+
L47: Superset’s original version of the controlPanel.ts file is a .ts versions however ours is a .tsx. For that, we have to remove the .ts version to properly override this file.
Update Superset
cd into superset’s submodule directory.
-
Get the latest tags: git fetch.
+
Get the latest tags: git fetch -t.
Checkout to the new desired release tag.
Check if there are any changes made to superset’s Dockerfile (on the root of the repository for the current latest release), adapt them, and insert them on our custom Dockerfile under the docker/superset directory.
-
Check if there are any changes made to superset’s superset-frontend/src/visualizations/presets/MainPreset.js file.
-You can use the script mainpreset_has_changes.py under the plugins directory to check that.
-Apply the new changes, if any, and remember to keep our chart plugins imported and registered (Currently we only have the Box plot plugin).
-
If the version of the frontend package @superset-ui/plugin-chart-echarts changed it’s necessary to update our box plot plugin.
-Follow the instructions present here, also take into account the instruction of the next section.
+
If the version of the plugin package plugin-chart-echarts changed, it’s necessary to update our box plot plugin. If it is greater than 0.18.25, go to the history (https://github.com/apache/superset/commits/[RELEASE-TAG]/superset-frontend/plugins/plugin-chart-echarts) of commits done to the plugin-chart-echarts plugin update to the most recent commit, applying their changes to the files in the superset/box-plot-overrides directory. A fast way check the changes done between two commits: git diff [old_commit_hash] [recent_commit_hash] -- superset-frontend/plugins/plugin-chart-echarts
Chart Plugin Development
-
Instructions on how you can set up your development environment to develop on a custom superset chart plugin:
-
Clone the superset repository.
-IMPORTANT NOTE: Since we build the superset’s docker image using the existing superset’s submodule, it’s better not to use it to develop the plugins.
-If you decide to use it anyways, remember this and this steps.
-They might override directories (superset-frontend/node_modules and superset/static/assets) that are generated during the build process, which can cause frontend compilation errors or the app can serve outdated static files.
-
Clone the superset-ui repository into the directory superset-frontend of superset’s repository.
-
Follow the instructions of this tutorial to create the necessary base files of your plugin.
-
Copy the file MainPreset.js present on this directory into the superset repository into the superset-frontend/src/visualizations/presets/ directory.
-
Add the line npm install -f --no-optional --save ./superset-frontend/superset-ui/plugins/plugin-chart-[your-chart-name] into the file docker/docker-frontend.sh of the superset repository before the existing npm install ... commands.
-
When the development is finished, on the root of the superset-ui repository run yarn install and then yarn build [your-chart-name].
-
Copy the directory of your plugin (including its sub-directory esm), within the superset-ui repository within the directory plugins, into the sub-directory plugins this directory.
-Make sure to run the command yarn build [your-chart-name] before doing this step.
+
Follow the instructions of this tutorial to create the necessary base files of your plugin.
+
To deploy you can either use the DYNAMIC_PLUGINS feature flag or you can add and build your plugins in superset/Dockerfile.
diff --git a/docs/development.md b/docs/development.md
index df5ecc67..7aca10fd 100644
--- a/docs/development.md
+++ b/docs/development.md
@@ -33,57 +33,31 @@
### Superset {-}
-Currently, we have a custom chart plugin on our superset installation which doesn't allow us to use superset's pre-built images available on their docker hub, since we have to call npm's build procedures on the front-end code.
-To build our custom docker image we used superset's [Dockerfile](https://github.com/apache/superset/blob/1.0.1/Dockerfile) as a base, where we removed the Dev section and added some code to install our chart plugins before building the front-end code.
-Also, to make Superset import our custom chart plugins, some changes have to be made to the [superset-frontend/src/visualizations/presets/MainPreset.js](https://github.com/apache/superset/blob/1.0.1/superset-frontend/src/visualizations/presets/MainPreset.js) file.
+Currently, we have made some modifications to the box plot visualization on our superset installation which doesn't allow us to use superset's pre-built images available on their docker hub, since we have to call npm's build procedures on the front-end code.
+To build our custom docker image we used superset's [Dockerfile](https://github.com/apache/superset/blob/1.5.0/Dockerfile) as a base, where we removed the Dev section and added some code to install our chart plugins before building the front-end code.
-The changes made to the Dockerfile to install the chart plugins are in [this](https://github.com/EHDEN/NetworkDashboards/blob/master/docker/superset/Dockerfile#L44-L63) area:
+The changes made to the Dockerfile to install the chart plugins are in [this](https://github.com/EHDEN/NetworkDashboards/blob/master/docker/superset/Dockerfile#L47-L49) area:
-1. L44: First we copy the `superset/plugins` directory into the container, which contains all the extra and custom chart plugins.
-2. L48-51: Then we iterate over the chart plugins and execute `npm install ...` on each of them.
- This will make changes to both the package.json and package-lock.json files and for that, we copy them into a temporary directory `package_json_files`.
-3. L54: Then all superset's front-end code is copied into the container, which will override the package*.json files.
-4. L56: After this, we copy our custom MainPresets.js file.
-5. L60-L63: Finally, we replace the package*.json files with the ones that we saved earlier and then run the npm build command.
+1. L46: Repalce some boxplot fiels with ours;
+2. L47: Superset's original version of the controlPanel.ts file is a `.ts` versions however ours is a `.tsx`. For that, we have to remove the `.ts` version to properly override this file.
#### Update Superset {-}
1. `cd` into superset's submodule directory.
-2. Get the latest tags: `git fetch`.
+2. Get the latest tags: `git fetch -t`.
3. Checkout to the new desired release tag.
4. Check if there are any changes made to superset's Dockerfile (on the root of the repository for the current latest release), adapt them, and insert them on our custom Dockerfile under the `docker/superset` directory.
-5. Check if there are any changes made to superset's `superset-frontend/src/visualizations/presets/MainPreset.js` file.
- You can use the script `mainpreset_has_changes.py` under the `plugins` directory to check that.
- Apply the new changes, if any, and remember to keep our chart plugins imported and registered (Currently we only have the *Box plot* plugin).
-
-6. If the version of the frontend package `@superset-ui/plugin-chart-echarts` changed it's necessary to update our box plot plugin.
- Follow the instructions present [here](https://github.com/EHDEN/NetworkDashboards/tree/master/superset/plugins/plugins/plugin-chart-box-plot#how-to-update), also take into account the instruction of the next section.
+6. If the version of the plugin package `plugin-chart-echarts` changed, it's necessary to update our box plot plugin. If it is greater than 0.18.25, go to the history (`https://github.com/apache/superset/commits/[RELEASE-TAG]/superset-frontend/plugins/plugin-chart-echarts`) of commits done to the plugin-chart-echarts plugin update to the most recent commit, applying their changes to the files in the `superset/box-plot-overrides` directory. A fast way check the changes done between two commits: `git diff [old_commit_hash] [recent_commit_hash] -- superset-frontend/plugins/plugin-chart-echarts`
#### Chart Plugin Development {-}
-Instructions on how you can set up your development environment to develop on a custom superset chart plugin:
-
-1. Clone the [superset](https://github.com/apache/superset) repository.
- **IMPORTANT NOTE**: Since we build the superset's docker image using the existing superset's submodule, it's better not to use it to develop the plugins.
- If you decide to use it anyways, remember [this](https://github.com/EHDEN/NetworkDashboards/blob/master/docker/superset/Dockerfile#L54) and [this](https://github.com/EHDEN/NetworkDashboards/blob/master/docker/superset/Dockerfile#L99) steps.
- They might override directories (`superset-frontend/node_modules` and `superset/static/assets`) that are generated during the build process, which can cause frontend compilation errors or the app can serve outdated static files.
-
-2. Clone the [superset-ui](https://github.com/apache-superset/superset-ui) repository into the directory superset-frontend of superset's repository.
-
-1. Follow the instructions of [this tutorial](https://superset.apache.org/docs/installation/building-custom-viz-plugins) to create the necessary base files of your plugin.
+1. Follow the instructions of [this tutorial](https://superset.apache.org/docs/contributing/creating-viz-plugins) to create the necessary base files of your plugin.
-2. Copy the file `MainPreset.js` present on this directory into the superset repository into the `superset-frontend/src/visualizations/presets/` directory.
-
-3. Add the line `npm install -f --no-optional --save ./superset-frontend/superset-ui/plugins/plugin-chart-[your-chart-name]` into the file `docker/docker-frontend.sh` of the superset repository before the existing `npm install ...` commands.
-
-4. When the development is finished, on the root of the superset-ui repository run `yarn install` and then `yarn build [your-chart-name]`.
-
-5. Copy the directory of your plugin (including its sub-directory `esm`), within the superset-ui repository within the directory `plugins`, into the sub-directory `plugins` this directory.
- Make sure to run the command `yarn build [your-chart-name]` before doing this step.
+2. To deploy you can either use the `DYNAMIC_PLUGINS` feature flag or you can add and build your plugins in `superset/Dockerfile`.
#### Important features {-}
@@ -93,6 +67,7 @@ Instructions on how you can set up your development environment to develop on a
2. Filters:
- check [this](https://superset.apache.org/docs/frequently-asked-questions#how-to-add-dynamic-filters-to-a-dashboard) faq entry
- Append `?preselect_filters={"chartId":{"columnToFilterBy":["value1", "value2"]}}` to the dashboard URL to apply a filter once the dashboard is loaded. E.g. `?preselect_filters={"13":{"name":["Demo University of Aveiro"]}}`
+
3. Custom label colors: check [this](https://superset.apache.org/docs/frequently-asked-questions#is-there-a-way-to-force-the-use-specific-colors) faq entry
### Github Actions {-}
diff --git a/docs/general-deprecated.html b/docs/general-deprecated.html
index e5124b03..d401b242 100644
--- a/docs/general-deprecated.html
+++ b/docs/general-deprecated.html
@@ -24,7 +24,7 @@
-
+
@@ -162,8 +162,8 @@
With this every time you want to edit the dashboard layout you have to either comment the CSS inserted
or remove it so the “Edit Dashboard” button can show again.
@@ -347,12 +347,13 @@
Database Type and Country Filter
For the filters to work the name of the fields to filter should match in all tables used on the charts of this dashboard.
SQL query
-
SELECTsource.name,
- country.country,
-source.database_type,
-source.acronym
-FROMpublic.data_source ASsource
-INNERJOINpublic.country AS country ONsource.country_id=country.id
+
SELECTsource.name,
+ country.country,
+source.database_type,
+source.acronym
+FROMpublic.data_source ASsource
+INNERJOINpublic.country AS country
+ONsource.country_id=country.id
Chart settings
@@ -390,15 +391,15 @@
Total Number of Patients
SQL query
-
SELECT
- country,
- database_type,
- release_date,
-SUM(count_value) OVER (ORDERBY release_date ASC)
-FROM achilles_results
-JOIN data_source ON data_source_id = data_source.id
-JOIN country ON data_source.country_id = country.id
-WHERE analysis_id =1
+
SELECT
+ country,
+ database_type,
+ release_date,
+SUM(count_value) OVER (ORDERBY release_date ASC)
+FROM achilles_results
+JOIN data_source ON data_source_id = data_source.id
+JOIN country ON data_source.country_id = country.id
+WHERE analysis_id =1
Chart settings
@@ -441,21 +442,23 @@
Network Growth by Date
SQL query
-
SELECTsource.name ASsource,
- country.country,
-source.database_type,
-source.release_date,
- concepts.concept_name AS gender,
- achilles.count_value ascount
-FROMpublic.achilles_results AS achilles
-INNERJOINpublic.data_source ASsourceON achilles.data_source_id=source.id
-INNERJOINpublic.country AS country ONsource.country_id=country.id
-JOIN (
-SELECT'8507'AS concept_id, 'Male'AS concept_name
-UNION
-SELECT'8532', 'Female'
-) AS concepts ON achilles.stratum_1 = concept_id
-WHERE analysis_id =2
+
SELECTsource.name ASsource,
+ country.country,
+source.database_type,
+source.release_date,
+ concepts.concept_name AS gender,
+ achilles.count_value ascount
+FROMpublic.achilles_results AS achilles
+INNERJOINpublic.data_source ASsource
+ON achilles.data_source_id=source.id
+INNERJOINpublic.country AS country
+ONsource.country_id=country.id
+JOIN (
+SELECT'8507'AS concept_id, 'Male'AS concept_name
+UNION
+SELECT'8532', 'Female'
+) AS concepts ON achilles.stratum_1 = concept_id
+WHERE analysis_id =2
Chart settings
@@ -501,15 +504,17 @@
Patients per Country
Figure 9.4: Settings for creating the Patients per Country chart
-
-
SQL query {#patientsPerCountryQuery}
-
SELECT country.country,
-source.database_type,
- count_value
-FROMpublic.achilles_results AS achilles
-INNERJOINpublic.data_source ASsourceON achilles.data_source_id=source.id
-INNERJOINpublic.country AS country ONsource.country_id=country.id
-WHERE analysis_id =1
+
+
SQL query
+
SELECT country.country,
+source.database_type,
+ count_value
+FROMpublic.achilles_results AS achilles
+INNERJOINpublic.data_source ASsource
+ON achilles.data_source_id=source.id
+INNERJOINpublic.country AS country
+ONsource.country_id=country.id
+WHERE analysis_id =1
Clone the repository with the command git clone --recurse-submodules https://github.com/EHDEN/NetworkDashboards. If you already cloned the repository without the --recurse-submodules option, run git submodule update --init to fetch the superset submodule.
-
Create a .env file on the docker directory, using .env-example as a reference, setting all necessary environment variables (SUPERSET\_MAPBOX\_API\_KEY and DASHBOARD\_VIEWER\_SECRET\_KEY).
-
2.1 If you will use this application as a third-party application and will iframe it, set the variable SINGLE\_APPLICATION\_MODE to False and define the host of the main application on the variable MAIN\_APPLICATION\_HOST. Also make sure to add this last host to the list of ALLOWED\_HOSTS.
+
Create a .env file on the docker directory, using .env-example as a reference, setting all necessary environment variables (SUPERSET_MAPBOX_API_KEY and DASHBOARD_VIEWER_SECRET_KEY).
+
2.1 If you will use this application as a third-party application and will iframe it, set the variable SINGLE_APPLICATION_MODE to False and define the host of the main application on the variable MAIN_APPLICATION_HOST. Also make sure to add this last host to the list of ALLOWED_HOSTS.
@@ -391,7 +391,7 @@
Superset setup
By default Superset’s admin user credentials are admin/admin.
It is recommended that you change the password if you will use this in a production environment.
-
To any anonymous user view dashboards, add the following:
+
To any anonymous user view dashboards, add the following permissions to the public role:
all datasource access on all_datasource_access
can csrf token on Superset
@@ -401,6 +401,7 @@
Superset setup
can read on CssTemplate
can read on Dashboard
+
For each dashboard you want anonymous users to be able to access, on the dashboard list page click edit (the pencil on the right) and add the “Admin” and “Public” roles to the “Roles with acess” field.
With this every time you want to edit the dashboard layout you have to either comment the CSS inserted
or remove it so the “Edit Dashboard” button can show again.
@@ -374,8 +374,8 @@
Chart settings
-
-
Number of Patients in Observation Period {#numInObservationPeriod}
+
+
Number of Patients in Observation Period
The Number of Patients in Observation Period plot shows the number of patients that contribute at least one day in a specific month.
@@ -385,13 +385,14 @@
Number of Patients in Observation Period {#numInObservationPeriod}
In order to obtain the colors blue and rose in the chart representing the gender distribution,
add the following JSON entry to the JSON object of the JSON Metadata field on the edit dashboard page:
With this every time you want to edit the dashboard layout you have to either comment the CSS inserted
or remove it so the “Edit Dashboard” button can show again.
@@ -392,23 +392,43 @@
Age at first observation - Table {#age1ObservationTable}
diff --git a/docs/processes.md b/docs/processes.md
index b2667c20..415ee269 100644
--- a/docs/processes.md
+++ b/docs/processes.md
@@ -113,8 +113,9 @@ While parsing the uploaded file, some data is extracted to then present on the U
The next table is presented where the previous data is stored on the rows with analysis id 0 and 5000:
+
| Analysis Id | Stratum 1 | Stratum 2 | Stratum 3 | Stratum 4 | Stratum 5 |
-| ----------- | --------- | ------------------- | ---------------- | ----------- | ------------------ |
+| -----------:|:--------- | ------------------- | ---------------- | ----------- | ------------------ |
| 0 | | R Package Version | Generation Date | | |
| 5000 | | Source Release Date | CDM Release Date | CDM Version | Vocabulary Version |
diff --git a/docs/provenance-deprecated.html b/docs/provenance-deprecated.html
index 9ff1c8f6..075f8a0c 100644
--- a/docs/provenance-deprecated.html
+++ b/docs/provenance-deprecated.html
@@ -24,7 +24,7 @@
-
+
@@ -162,8 +162,8 @@
With this every time you want to edit the dashboard layout you have to either comment the CSS inserted
or remove it so the “Edit Dashboard” button can show again.
@@ -385,29 +385,37 @@
Condition & Drug & Procedure & Device & Measurement & Ob
Select the compressed backup you want to restore and decompress it:
-
tar -xJf BACKUP_FILE.tar.xz.
-
-
Redis
-
-
Make sure the redis docker container is down.
-
(Re)place the file dump.rdb on the redis volume by the file redis.rdb. By default the redis volume is located where this repository was cloned on the directory docker/volumes/redis.
Media Files If you have a volume pointing to where the media files are stored, replace all files with the ones present on the downloaded backup file. Else:
-
-
Bring the dashoard container up docker-compose up -d dashboard
-
Enter in the container docker exec -it [CONTAINER_ID] bash
-
If you don’t know where the media files are stored you can check the value of the MEDIA_ROOT variable
-
-
python manage.py shell
-
from django.conf import settings
-
print(settings.MEDIA_ROOT)
-
-
Remove the entire MEDIA_ROOT directory and exit the container
-
Copy the media directory present on the backup file to the catalogue container docker cp -a collected-media [CONTAINER_ID]:[MEDIA_ROOT_PARENT_PATH]
-
-
+
Select the compressed backup you want to restore.
+
Make sure that all the environment variables are the same as the ones that were used for the chosen backup file.
+Additionally, the .dashboards_backups.conf file is also necessary to set up, since the TMP_DIRECTORY variable will be used.
+
Run the backups/restore.sh script.
diff --git a/docs/search_index.json b/docs/search_index.json
index ced15d4f..719c4f4a 100644
--- a/docs/search_index.json
+++ b/docs/search_index.json
@@ -1 +1 @@
-[["index.html", "Data Network Dashboards Chapter 1 Preface", " Data Network Dashboards This document is currently under construction 2022-01-04 Chapter 1 Preface Automated Characterization of Health Information at Large-scale Longitudinal Evidence Systems (ACHILLES) is a profiling tool developed by the OHDSI community to provide descriptive statistics of databases standardized to the OMOP Common Data Model. These characteristics are presented graphically in the ATLAS tool. However, this solution does not allow for database comparison across the data network. The Data Network Dashboards aggregates ACHILLES results files from databases in the network and displays the descriptive statistics through graphical dashboards. This tool is helpful to gain insight in the growth of the data network and is useful for the selection of databases for specific research questions. In the software demonstration we show a first version of this tool that will be further developed in EHDEN in close collaboration with all our stakeholders, including OHDSI. Contributors To develop this tool, EHDEN organized a hack-a-thon (Aveiro, December 2-3, 2019), where we defined and implemented a series of charts and dashboards containing the most relevant information about the OMOP CDM databases. The team involved in this task were composed by the following members: Joo Rafael Almeida1 Andr Pedrosa1 Peter R. Rijnbeek2 Marcel de Wilde2 Michel Van Speybroeck3 Maxim Moinat4 Pedro Freire1 Alina Trifan1 Srgio Matos1 Jos Lus Oliveira1 1 - Institute of Electronics and Informatics Engineering of Aveiro, Department of Electronics and Telecommunication, University of Aveiro, Aveiro, Portugal 2 - Erasmus MC, Rotterdam, Netherlands 3 - Janssen Pharmaceutica NV, Beerse, Belgium 4 - The Hyve, Utrecht, Netherlands Considerations This manual was written to be a guide for a clean installation of this system with all the dashboards that we defined during the project. The first chapter describes the goal of the system and the second how to install the system. The remaining chapters are dedicated to the dashboards, in which chapters describes one dashboard and all its charts. To simplify the representation of the dashboards layout, we used similar schemas as it is presented in Figure 1.1. The white box is the dashboard and the inside boxes are charts. The colour changes in relation to the type of chart. Figure 1.1: Example of a dashboards tool presenting the databases available in the network (simulated data) License The system is open-source and this manual was written in RMarkdown using the bookdown package. Acknowledges This work has been conducted in the context of EHDEN, a project that receives funding from the European Unions Horizon 2020 and EFPIA through IMI2 Joint Undertaking initiative, under grant agreement No 806968. "],["introduction.html", "Chapter 2 Introduction", " Chapter 2 Introduction The OHDSI research network has been growing steadily which results in an increasing number of healthcare databases standardized to the OMOP CDM format. The OHDSI community created the ACHILLES tool (Automated Characterization of Health Information at Large-scale Longitudinal Exploration System) to characterize those databases. The results are available to the data custodian in their local ATLAS tool and helps them to gain insights in their data and helps in assessing the feasibility of a particular research questions. ACHILLES was designed to extract the metadata from a single database, which by itself does not allow the comparison with the remaining databases in the network. However, we believe there is even more value in sharing this information with others to enable network research in a Data Network Dashboard. Data Network Dashboard The European Health Data and Evidence Network (EHDEN) project therefore designed a Data Network Dashboard tool, a web application to aggregate information from distributed OMOP CDM databases. It uses the ACHILLES results files to construct graphical dashboards and enables database comparison (Figure 2.1). The tool is built on Apache Superset, which is an open-source enterprise-ready business intelligence web application that can provide powerful and fully customizable graphical representations of data. Achilles results can be uploaded through the EHDEN Database Catalogue using the dashboards plugin but can also be directly uploaded in the tool. Figure 1. Example of a dashboards tool presenting age and gender distributions (simulated data). Figure 2.1: Example of a dashboards tool presenting the databases available in the network (simulated data) In this tools, we defined and implemented a series of charts and dashboards containing the most relevant information about the databases, such as: General: dashboards that shows the databases types per country, the distribution of data source types, the growth of the Network including the number of database and the number of patients in the databases over time; Person: representing the number of patients per country, age distribution at first observation, year of birth distribution and normalized gender distribution; Population characteristics: dashboard with the cumulative patient time, persons with continuous observation per month, and the start and end dates of those periods; Visit: chart to compare the number and type of visit occurrence records; Death: information about the number of death records by month, and the patient age at time of death; Concepts: bubble chart which shows the number of patients and records per concept over the databases; Data domains: heat map visualization of the major data domains in each database. "],["installation.html", "Chapter 3 Installation", " Chapter 3 Installation Currently, we use docker to deploy our environment First Steps Clone the repository with the command git clone --recurse-submodules https://github.com/EHDEN/NetworkDashboards. If you already cloned the repository without the --recurse-submodules option, run git submodule update --init to fetch the superset submodule. Create a .env file on the docker directory, using .env-example as a reference, setting all necessary environment variables (SUPERSET\\_MAPBOX\\_API\\_KEY and DASHBOARD\\_VIEWER\\_SECRET\\_KEY). 2.1 If you will use this application as a third-party application and will iframe it, set the variable SINGLE\\_APPLICATION\\_MODE to False and define the host of the main application on the variable MAIN\\_APPLICATION\\_HOST. Also make sure to add this last host to the list of ALLOWED\\_HOSTS. Dashboard Viewer setup If you wish to expose the dashboard viewer app through a specific domain(s) you must add it/them to the ALLOWED_HOSTS list on file dashboard_viewer/dashboard_viewer/settings.py and remove the '*' entry. Build containers images: docker-compose build. This might take several minutes. Set up the database and create an admin account for the dashboard viewer app: docker-compose run --rm dashboard ./docker-init.sh. Insert Concepts The concepts table is not in the repository due to its dimension, therefore we use directly the Postgres console to insert this table in the installation. Get your concept csv file from Athena Copy the file into postgres container docker cp concept.csv dashboard_viewer_postgres_1:/tmp/ Enter in the postgres container: docker exec -it dashboard_viewer_postgres_1 bash Enter in the achilles database (value of the variable POSTGRES_ACHILLES_DB on the .env file) with the root user (value of the variable POSTGRES_ROOT_USER on the .env file): psql achilles root Create the concept table CREATE TABLE concept ( concept_id INTEGER NOT NULL, concept_name VARCHAR(255) NOT NULL, domain_id VARCHAR(20) NOT NULL, vocabulary_id VARCHAR(20) NOT NULL, concept_class_id VARCHAR(20) NOT NULL, standard_concept VARCHAR(1) NULL, concept_code VARCHAR(50) NOT NULL, valid_start_date DATE NOT NULL, valid_end_date DATE NOT NULL, invalid_reason VARCHAR(1) NULL ); Copy the CSV file content to the table (this could take a while) To get both ' (single quotes) and \" (double quotes) on the concept_name column we use a workaround by setting the quote character to one that should never be in the text. Here we used \\b (backslash). COPY public.concept FROM '/tmp/concept.csv' WITH CSV HEADER DELIMITER E'\\t' QUOTE E'\\b'; Create index in table (this could take a while): CREATE INDEX concept_concept_id_index ON concept (concept_id); CREATE INDEX concept_concept_name_index ON concept (concept_name); Set the owner of the concept table to the achilles user (value of the variable POSTGRES_ACHILLES_USER on the .env file): ALTER TABLE concept OWNER TO achiller Bring up the containers: docker-compose up -d. Run the command docker-compose run --rm dashboard python manage.py generate_materialized_views to create the materialized views on Postgres. Superset setup Bring up the containers: docker-compose up -d. Make sure that the container superset-init has finished before continuing. It is creating the necessary tables on the database and creating permissions and roles. If you used the default ports: Go to http://localhost to access the dashboard viewer app. Go to http://localhost:8088 to access superset. By default Supersets admin user credentials are admin/admin. It is recommended that you change the password if you will use this in a production environment. To any anonymous user view dashboards, add the following: all datasource access on all_datasource_access can csrf token on Superset can dashboard on Superset can explore json on Superset can read on Chart can read on CssTemplate can read on Dashboard Dummy data On a fresh installation, there are no achilles_results data so Supersets dashboards will display No results. On the root of this repository, you can find the demo directory where we have an ACHILLES results file with synthetic data that you can upload to a data source on the uploader app of the dashboard viewer (http://localhost/uploader). If you wish to compare multiple data sources, on the demo directory there is also a python script that allows you to generate new ACHILLES results files, where it generates random count values based on the ranges of values for each set of analysis_id and stratums present on a base ACHILLES results file. So, from the one ACHILLES results fill we provided, you can have multiple data sources with different data. "],["processes.html", "Chapter 4 Processes", " Chapter 4 Processes Data Sources Target: platform user Before uploading any data to this platform, a data source owner has to create a data source instance to then associated the upload data with. The creation of data source is done through the [BASE_URL]/uploader/ URL, where 7 fields are expected: name: an extensive name acronym: a short name country: where is the data source localized link (Optional): web page of the data source database type: type of OMOP database coordinates: a more accurate representation of the data sources localization hash (Optional): the internal unique identifier of a data source If you access [BASE_URL]/uploader/ the 7th field (hash) is set automatically for something random, however, if you want to set it use the [BASE_URL]/uploader/[HASH]/ URL. To avoid duplication on the database type field, this field is transformed (use title case and trimmed) and then is checked there is already a record (Database Type) with the same value. There are several ways to create a data source: Create through a web form By accessing the [BASE_URL]/uploader/ URL, you will get a form where you can field the fields, where the country field is a dropdown and the coordinates field is set through a map widget. Automatically create when performing a GET to the [BASE_URL]/uploader/ URL If the Network Dashboards platform is being used as a third-party application and the main application has all the data for the required fields, the data source can be automatically created and the user is redirected directly to the upload files page. To perform this, each field should be provided as a URL parameter when accessing the [BASE_URL]/uploader/ URL. If all required fields are provided and are valid the data source is created and the user is redirected to the upload files page. If a required field is missing or is not valid the webform is presented to the user so it can manually fill those fields. Automatically create by performing a POST to the [BASE_URL]/uploader/ URL Since the creation URL does not have csrf cookie protection, you can perform a POST request as you were submitting a form. Notes For the automatic options: Since the coordinates field is composed of two fields (latitude, longitude), it should be submitted as coordinates_0=[latitude] and coordinates_1=[longitude] The country field should match one of the available on the dropdown of the webform. Draft Status After a data owner uploads data into his data source, he might not want to make it public right away. To achieve this a data source has a boolean field telling whether if the data source is in draft mode. This then also allows creating dashboards with data of non-draft data sources only. There are three ways to change the value of this draft status field: Through the Django admin app ([BASE_URL]/admin/) Accessing the respective edit page of the data source. This requires a feature to be enabled, which is more detailed on the Allow Draft Status Updates section of the Customization chapter. Perform a PATCH request to the [BASE_URL]/uploader/[HASH]/ URL. On this request, other fields, other than the draft status, can be changed. The body of the request must be a JSON object with the fields that will suffer changes and their new values. Catalogue Results Files Target: platform user Once a data source is created you can access its upload page by accessing the [BASE_URL]/uploader/[HASH]/. If no data source has the provided hash you will be redirected back to the data source creation form. On the upload page you can: Go to the edit page of your data source Upload a catalogue results file Check the upload history A catalogue results file is a CSV file, the result obtained after running the EHDEN/CatalogueExport R package on an OMOP database. It is a variant of the OHDSI/Achilles where it only extracts a subset of analyses of the ACHILLES original set. The upload form expects a CSV file with the following columns: Name Type Required/Non-Nullable/Non-Empty analysis_id int Yes stratum_1 string No stratum_2 string No stratum_3 string No stratum_4 string No stratum_5 string No count_value int Yes min_value double No max_value double No avg_value double No stdev_value double No median_value double No p10_value double No p25_value double No p75_value double No p90_value double No The uploaded file must: either contain the first 7 columns OR all 16 columns contain the columns in the same order as presented in the table above While parsing the uploaded file, some data is extracted to then present on the Upload history and to update data source information. This data is extracted from the record with analysis id 0, which is required to be present on the file, and 5000, which is optional. Next is presented the data extracted and their description: R Package Version: the version of CatalogueExport R package used Generation Date: date at which the CatalogueExport was executed on the OMOP database Source Release Date: date at which the OMOP database was released CDM Release Date: date at which the used CDM version was released CDM Version: version of the CDM used Vocabulary Version: version of the vocabulary used The next table is presented where the previous data is stored on the rows with analysis id 0 and 5000: Analysis Id Stratum 1 Stratum 2 Stratum 3 Stratum 4 Stratum 5 0 R Package Version Generation Date 5000 Source Release Date CDM Release Date CDM Version Vocabulary Version Materialized Views Target: admin user For each chart, Superset has an underlying SQL query which in our case is run every time a chart is rendered. If one of these queries takes too long to execute the charts will also take too long until they are rendered and eventually users might get timeout messages given a bad user experience. To avoid this problem, instead of executing the raw SQL query we create a postgres materialized view of the query, which is then used to feed the data to the chart. So only a simple SELECT x FROM x query is executed when a chart is rendered. So whenever I create a chart I have to access the Postgres console? No, we created an unmanaged Materialized Queries model that maps to the materialized views on Postgres. With it you can create new materialized views through the Django admin app, by accessing the [BASE_URL]/admin/ URL. You have to provide the materialized view name and its query, which will then be used to execute the query CREATE MATERIALIZED VIEW [name] AS [query], which will be executed on a background task so the browser doesnt hang and times out, in case of complicated queries. Taking this into account, the record associated will not appear on the Django admin app until the CREATE MATERIALIZED VIEW query finishes. To give feedback on the background task we use celery/django-celery-results, so you can check the status of a task on the Task Results model of the Celery Results app After the creation of a Materialized Query, the will be a message telling the id of the task which is executing the CREATE MATERIALIZED VIEW query. You can then check for the record associated with the task, click on the id to get more details. If something went wrong check the error message either on Result Data or Traceback fields under the Result section After all this, the final step is to add the materialized view as a Dataset. Login into Superset, then go to Data -> Datasets and create a new one. Select the Achilles database, the public schema, then the created materialized view and click ADD. After this, the materialized view can be used as a data source for a new chart. Tabs View [Deprecated] Note: This app is no longer maintaned and the associated urls were unlinked. Target: admin user Once there are data sources on the platform, data was uploaded to them and there are dashboards created on Superset, researchers can now browse through the dashboards and analyze and compare the data of the different data sources. One way to allow this would be to let them browse through the dashboard list on Superset. However, if there was some dashboards not ready to show to the public users, they could still access them. For that, it was created a page, with a sidebar, where public users could browse through the available and ready dashboards. It can be accessed through the URL [BASE_URL]/tabs/ The sidebar entries can be configured through the Django admin app, accessing the Tabsmanager app section. Here two models are available to create: Tab Groups: They allow to groups several sidebar entries within a collapsable group. Tabs: Will create a clickable entry on the sidebar that can be presented within a group. When a tab is clicked the associated dashboard will be displayed on the page. Each entry, tab, or group of them, expects: Title/Name Icon: Name of a font awesome version 5 icon Position: Allows to order entries along the sidebar. If a Tab has a group, then this field will order the tabs within that group only. Visible: If whether or not this tab or group should be visible. The goal of this field is to avoid having to delete the record from the database just because a certain tab is not ready and later on created it from scratch. Tabs additionally expect an URL, which will be used to display a Superset dashboard in an iframe. To hide Supersets menu bar, an additional standalone URL parameter should be appended to the provided URL of a tab. The value of the standalone arguments depends on the expected result: 1: menu bar is hidden. the bar where the dashboard title, publish status, and three dots option menu are present will still appear 2: both the menu bar and the dashboard title bar are hidden. By default, the dashboard of the first tab is displayed on the page, however, if one wants a specific tab to be displayed when the page is opened, its title should be present in the hash part of the URL. For example, if there is a tab called People, to make that tab selected at the start the following URL should be used [BASE_URL]/tabs/#People. "],["backups.html", "Chapter 5 Backups", " Chapter 5 Backups Create a credentials file (the structure of the file depends on the target cloud server) Create a .dashboards_backups.conf file under your home directory (variable $HOME) using dashboards_backups.conf.example as base, setting the appropriate value for the several variables. For variables associated with files and directories always use absolute paths. Variables: RUN: Set it to 0 if you dont want the next scheduled backup to run. This variable allows you to cancel any backup runs while you are doing some maintenance on the application. CONSTANCE_REDIS_DB: Number of the Redis database where the django constance config is stored. The default value is 2. This value should be the same as the environment variable REDIS_CONSTANCE_DB of the dashboard container. The following variables are associated with the arguemtns of the backup_uploader python package. Check its usage for more details: APP_NAME: The backup process will generate some directories with this name in places that are shared with other applications. SERVER: The name of the target cloud server to where backups should be uploaded (dropbox or mega). BACKUP_CHAIN_CONFIG: Allows having different directories with backups of different ages. CREDENTIALS_FILE_PATH: File containing the credentials to access the server to upload the backup file. Install the backup_uploader python package by following its install instructions. Schedule your backups * * * * * Command_to_execute | | | | | | | | | Day of the Week ( 0 - 6 ) ( Sunday = 0 ) | | | | | | | Month ( 1 - 12 ) | | | | | Day of Month ( 1 - 31 ) | | | Hour ( 0 - 23 ) | Min ( 0 - 59 ) (Retrived from: Tutorialspoint) Ex: To run every day at 3:00 am crontab -e Add entry 0 3 * * * $HOME/NetworkDashboards/backups/backup.sh (The path to the backup script might be different) "],["restore.html", "5.1 Restore", " 5.1 Restore Select the compressed backup you want to restore and decompress it: tar -xJf BACKUP_FILE.tar.xz. Redis Make sure the redis docker container is down. (Re)place the file dump.rdb on the redis volume by the file redis.rdb. By default the redis volume is located where this repository was cloned on the directory docker/volumes/redis. Change its permissions, owner and group: chmod 0644 docker/volumes/redis/dump.rdb sudo chown -R 999:999 docker/volumes/redis Postgres Make sure all containers that make changes on the database are stopped. Copy the file postgres_backup.sql into the postgres container docker cp postgres.sql [CONTAINER_ID]:/tmp. Execute the backup script: docker exec -u root dashboard_viewer_postgres_1 psql -f /tmp/postgres_backup.sql -U \\$POSTGRES_USER -d \\$POSTGRES_DB. Media Files If you have a volume pointing to where the media files are stored, replace all files with the ones present on the downloaded backup file. Else: Bring the dashoard container up docker-compose up -d dashboard Enter in the container docker exec -it [CONTAINER_ID] bash If you dont know where the media files are stored you can check the value of the MEDIA_ROOT variable python manage.py shell from django.conf import settings print(settings.MEDIA_ROOT) Remove the entire MEDIA_ROOT directory and exit the container Copy the media directory present on the backup file to the catalogue container docker cp -a collected-media [CONTAINER_ID]:[MEDIA_ROOT_PARENT_PATH] "],["useful-stuff.html", "5.2 Useful stuff", " 5.2 Useful stuff How to create a shared link to a dropbox directory using its pythons API: pip install dropbox import dropbox d = dropbox.Dropbox(API_TOKEN) # create a shared link for a directory from dropbox.sharing import SharedLinkSettings sharing_settings = SharedLinkSettings(require_password=True, link_password=DIRECTORY_PASSWORD) d.sharing_create_shared_link_with_settings(DIRECTORY_PATH, sharing_settings) # get all links for link in d.sharing_get_shared_links().links: print(f"{link.path} -> {link.url}") "],["customizations.html", "Chapter 6 Customizations", " Chapter 6 Customizations This platform is currently being used within the scope of the European Health Data & Evidence Network (EHDEN) project. To allow the dashboard viewer Django application to be easily used by another project or company, several components support customization in runtime, removing the need to change such things directly on the source code. To achieve this we make use of Constance that allows configuring several fields which then can be changed through the Django admin app. Platform Logo It is visible both in the Tabs Manager and the Catalogue Results Uploader URLs. The platform allows two possible ways to choose a logo: upload a file or provide an URL to an image. If both fields are provided, the URL one will be used. On the tabs manager app, we also allow customization of the CSS associated both with the image itself and its container. Platform Title All pages of the uploader app use the same base HTML file which contains a header with the platform logo, page title, and platform title. The first was already mentioned before, the second cant be changed. The last can be altered using a Constance field. Uploader Page Texts The data source creation page has three columns with some text providing some instructions for the creation of a data source and the upload of catalogue results. The text of these three columns is customizable, where markdown can be used, which is then transformed into HTML before rending the page. Allow Draft Status Updates In the section Draft Status of the Processes chapter, it was already explained the concept around draft data sources. By default, a user can NOT change the data source status on the edit page of a data source, only being allowed to do it through a PATCH request. Changes through the web edit form can be allowed by changing a Constance field. Them an additional draft field will be available on the edit data source form. "],["development-instructions.html", "Chapter 7 Development Instructions", " Chapter 7 Development Instructions Repository Structure Description backups: Scripts and configuration files to perform backups of all the data involved in the Network Dashboards applications (Dashboard viewer + Superset) dashboard_viewer: The Dashboard Viewer Django application to manage and upload catalogue results data. More detail can be found in the Code Documentation chapter. demo: Files that can be used to test some processes of the platform (Upload catalogue results data and import a simple dashboard) docker: Docker-compose stack-related directories. Environment file Configuration directories (Nginx and Postgres) Custom Superest Dockerfile For more information about docker deployment consult the Installation chapter. docs: Where the files of this gitbook are hosted. Other output formats can also be obtained here. Consult the Documentation section of this chapter for more details. superset: contains a submodule to the latest supported version of Supersets repository and our custom chart plugins tests: contains files to launch a docker-compose stack specific to run tests. requirements-dev: python requirements files to the several tools to either perform code style checks or to run Django tests .pre-commit-config.yaml: configuration for the pre-commit tool. This is not mandatory to use but is a good tool to automatically fix problems related to code style on staged files setup.cfg: configurations for the several code style tools tox.ini: configuration for the tox tool. It helps automate the process to check if the code style is correct and if the Django tests are passing Its extremely useful in this context since different code style check tools that we use have some conflicts with python dependencies. It creates a virtual environment for each tox environment, in our case, for each code style check tool plus Django tests Superset Currently, we have a custom chart plugin on our superset installation which doesnt allow us to use supersets pre-built images available on their docker hub, since we have to call npms build procedures on the front-end code. To build our custom docker image we used supersets Dockerfile as a base, where we removed the Dev section and added some code to install our chart plugins before building the front-end code. Also, to make Superset import our custom chart plugins, some changes have to be made to the superset-frontend/src/visualizations/presets/MainPreset.js file. The changes made to the Dockerfile to install the chart plugins are in this area: L44: First we copy the superset/plugins directory into the container, which contains all the extra and custom chart plugins. L48-51: Then we iterate over the chart plugins and execute npm install ... on each of them. This will make changes to both the package.json and package-lock.json files and for that, we copy them into a temporary directory package_json_files. L54: Then all supersets front-end code is copied into the container, which will override the package*.json files. L56: After this, we copy our custom MainPresets.js file. L60-L63: Finally, we replace the package*.json files with the ones that we saved earlier and then run the npm build command. Update Superset cd into supersets submodule directory. Get the latest tags: git fetch. Checkout to the new desired release tag. Check if there are any changes made to supersets Dockerfile (on the root of the repository for the current latest release), adapt them, and insert them on our custom Dockerfile under the docker/superset directory. Check if there are any changes made to supersets superset-frontend/src/visualizations/presets/MainPreset.js file. You can use the script mainpreset_has_changes.py under the plugins directory to check that. Apply the new changes, if any, and remember to keep our chart plugins imported and registered (Currently we only have the Box plot plugin). If the version of the frontend package @superset-ui/plugin-chart-echarts changed its necessary to update our box plot plugin. Follow the instructions present here, also take into account the instruction of the next section. Chart Plugin Development Instructions on how you can set up your development environment to develop on a custom superset chart plugin: Clone the superset repository. IMPORTANT NOTE: Since we build the supersets docker image using the existing supersets submodule, its better not to use it to develop the plugins. If you decide to use it anyways, remember this and this steps. They might override directories (superset-frontend/node_modules and superset/static/assets) that are generated during the build process, which can cause frontend compilation errors or the app can serve outdated static files. Clone the superset-ui repository into the directory superset-frontend of supersets repository. Follow the instructions of this tutorial to create the necessary base files of your plugin. Copy the file MainPreset.js present on this directory into the superset repository into the superset-frontend/src/visualizations/presets/ directory. Add the line npm install -f --no-optional --save ./superset-frontend/superset-ui/plugins/plugin-chart-[your-chart-name] into the file docker/docker-frontend.sh of the superset repository before the existing npm install ... commands. When the development is finished, on the root of the superset-ui repository run yarn install and then yarn build [your-chart-name]. Copy the directory of your plugin (including its sub-directory esm), within the superset-ui repository within the directory plugins, into the sub-directory plugins this directory. Make sure to run the command yarn build [your-chart-name] before doing this step. Important features Standalone Mode: by appending ?standalone=true to the URL of a dashboard supersets menu bar wont show. New versions support ?standalone=1 or ?standalone=2 where the first does the same as ?standalone=true and the second also hides the bar containing the name of the dashboard, leaving just the charts. Filters: check this faq entry Append ?preselect_filters={\"chartId\":{\"columnToFilterBy\":[\"value1\", \"value2\"]}} to the dashboard URL to apply a filter once the dashboard is loaded. E.g. ?preselect_filters={\"13\":{\"name\":[\"Demo University of Aveiro\"]}} Custom label colors: check this faq entry Github Actions Github has a feature that allows performing automatic actions after a certain event happens on the repository. We use this feature to execute to check if everything is alright with new PR before merging them to dev. Github calls a job a set of steps that are executed after a certain event. Then several jobs can be groups in workflows. Events are defined at the workflow level, so all the jobs in a workflow will execute at the same time. We have two workflows: Code analysis checks Django tests The first has three jobs black: ensures that pythons code format is consistent throughout the project isort: sorts and organizes import statements prospector: executes a set of tools that perform some code analysis The second has just one job that executes the Django tests. Both workflows execute on commits of pull requests that will be merged into the dev branch. Regarding the code analysis workflow, the three tools used have requirements that conflict with each other, for that there is a requirements file for each tool on the requirement-dev directory of the repository. To avoid having three different virtual environments for each tool, you can use the tox. You just need to install the development requirements (pip install -r requirements-dev/requirements-dev.txt) and then just run tox. It will manage the necessary virtual environments and install the requirements for each tool. If you, however, want to run a specific tool manually you can check the tox configuration file (tox.ini). For example for the prospector tool the tox configuration is the following: [testenv:prospector] basepython = python3.8 deps = -r{toxinidir}/requirements-dev/requirements-prospector.txt -r{toxinidir}/dashboard_viewer/requirements.txt commands = prospector dashboard_viewer prospector docker/superset we can see that it installs the requirement for the prospector tool and also the requirements of the Dashboard Viewer Django app and then runs two commands. For both black and isort tools, when you run tox, it will show the necessary changes that are required to make the code good. You can apply the changes automatically by executing the tools manually without the --check and --check-only options respectively. Sometimes prospector can be a pain in the boot, complaining about too much stuff. You can make prospector ignore some bad stuff by adding the comment, # noqa, to the end of the specific line where it is complaining. Tests Our tests use Djangos building testing features, which uses unittest under the hood. Not all featured have tests associated, however, there are already some tests scenarios in mind written as issues on the repository, which have the tag Test Use Case. To run the tests we set up a docker-compose stack, under the test directory which has just the necessary data containers (Redis and Postgres) to avoid having to make changes on the development/production docker-compose stack. Once the stack is up it only necessary to run SECRET_KEY=secret python manage.py test to execute the tests. If you are developing any tests that involve celery, there is no need to have a celery process running, since on Djangos settings.py we set the test runner to the celery one. This way the python manage.py test is enough to test the whole application. Python Requirements The python requirements for the Dashboard Viewer Django app are present on the requirements.txt file of the dashboard_viewer directory. The file is divided into two sections. First are the direct dependencies. Dependencies that are directly used or imported by the Dashboard Viewer Django app. For better maintainability, every direct dependency has a small description in front of it, so any developer knows why it is being mentioned in the requirements file. The second part of the file contains the indirect dependencies. Basically dependencies of our direct dependencies. After any update is made to the direct dependencies the following procedure should be followed: Create a new virtual environment just for the dependencies of this file Delete the indirect dependencies section of the file Install all the direct dependencies pip install -r requirements.txt Append the result of pips freeze to the requirements file pip freeze >> requirements.txt Remove from the second section of the file, duplicated entries of the first section of the file, in other words, remove from the indirect dependencies section the direct dependencies. With #185 we intend to start using the pip-compile tool. With it, we can have a file with the direct dependencies (requirements.in), and then pip-compile reads that file and automatically creates a requirements.txt file with all the dependencies and which package requires that specific dependency. The update process of dependencies will then just be Install the pip-compile tool pip install pip-tools Make the change to the direct dependencies on the requirements.in file (No need for a virtual environment) Call the pip-compile tool on the requirement.in file pip-compile requirements.in Documentation The plan is to have all the documentation on this git book and any other places that might require some description/information should point to this GitBook so we maintain a commonplace for all the documentation. This way we can make sure that the code and the documentation are in the same place since on a pull request for a specific feature or a bug fix, associated documentation should also be changed with it. The manual was written in RMarkdown using the bookdown package. All the code is stored in the docs/src directory as well as the script to build all the documentation. Do not change the files in the root of the docs directory, because those files will be removed during the build processed and replaced by the new ones. Therefore, to update this documentation, apply the changes to the files in the directory docs/src. To build the documentation, you need to have R installed, and if you are using UNIX-based systems, you only need to run sh _build.sh in the docs/src directory. In this documentation, we also describe all the settings around the dashboards that are used on the EHDEN project. To avoid an extensive table of contents and also to avoid having a big chapter page for dashboards, we configured this GitBook to split different sections into different pages. A section on the GitBook is mapped to markdown headings elements of level 2 (H2 or ##). This is, however, inconvenient for small chapters like the preface (index.Rmd). To make it render all the sections on the same page, instead of using headings of level 2 (##) you should use level 3 (###). Although this will make the section numeration start at 0, e.g 1.0.1, 1.0.2, To avoid this we appended {-} to the sections titles so that the numeration does not show. If a new file is created with more documentation, its name should be placed, including extension, in the desired location in this list of the docks/src/_bookdown.yml file. "],["code-documentation.html", "Chapter 8 Code Documentation", " Chapter 8 Code Documentation Apps Materialized Queries Manager Models This app has only one model, MaterializedQuery, which maps to a Postgres materialized view. To avoid having to maintain the consistency between both the records of this Django app and the Postgres materialized views: the managed Meta flag was set to False to avoid Django creating migrations to the model the db_table Meta flag was set to the name of the table where Postgres stores the information about the existing materialized views (pg_matviews). the fields of the model, matviewname and definition, use the same name and type as the ones of the pg_matviews Postgres table. Views This app has no view exposed since all operations to the MaterializedQuery models are expected to be performed in the Django admin app. However, we had to change Djangos default behaviors for the create, update and delete operations of the model. For the delete operation, we overrode the delete method of the MaterializedQuery Django model to just execute a DROP MATERIALIZED VIEW SQL statement. Related to creation and update we had to change some internal methods of Djangos admin app ModelAdmin base class. _changeform_view: where Model records were being created. Instead, CREATE MATERIALIZED VIEW and ALTER MATERIALIZED VIEW SQL statements are executed. However, since some materialized views might take some time to build, create a record like this could lead to a browser timeout. We then decided to execute these statements in a celery background task. The main changes were made here where we launch the background task. response_add: Since the materialized view might not be created the right way, saying A record was created successfully is not adequate. We then changed the message that is presented after the creation to tell in the id of the background task that is creating the materialized query. The result of the query can then be consulted on the associated Task Results record on the Celery Results section app of the Django admin console. response_change: changes here with the same ideas behind as response_add. If any catalogue results files are being uploaded to the platform, any worker attempting to create or change a materialized view will block until such data is uploaded. Also if any worker is creating materialized views, no other worker can upload catalogue results data. Through the admin console, there is also the possibility to refresh a specific MaterializedQuery. To do so, on the list view, select the MaterializedQueries to refresh, then on the actions dropdown select Refresh selected materialized queries. Once again to avoid timeouts, such operations are executed on a background task. Tabs Manager Currently, this app is not being used and the URL mapping was delete. To use it again uncomment the tabsManager line on the dashboard_viewer/dashboard_viewer/urls.py file. Then you can access the tabs page through the [BASE_RUL]/tabs/ URL. Views On this app, there is only one view. It is a simple page with just a sidebar to choose which dashboard to displays on an iframe. Besides the simplicity, all the animations around the sidebar buttons are handled by CSS with some JS that adds and removes classes to HTML elements, as events (hover and click) happen. To facilitate the development process of CSS, SCSS was used to build styling of the view. It prevents duplication with the addition of variables and adds the possibility to express parent classes by nesting their declaration. In cases where there are a lot of buttons on the sidebar, some buttons might get impossible to reach since they are out of the field of view. To avoid this we make use of SimpleBar, which makes the sidebar scrollable, displaying a scroll bar on the right side of the sidebar whenever there are elements outside of the field of view. API There is one endpoint, [BASE_URL]/api/, where a JSON object of tabs and groups of tabs and their sub-tabs are returned. Models Uploader Views This app exposes three views: 1. Creation of a data source 2. Edition of a data source 3. To Upload or consult the history of uploads of catalogue results files. The first one can be accessed through the [BASE_URL]/uploader/[DATA_SOURCE_HASH]/ URL. If no hash is provided on the URL then on the creation of the data source a random one will be assigned. If there is already a data source with the provided hash then the user is redirected to the upload page of that data source. This view also allows creating data sources without displaying the webform, redirecting directly to the uploader page. This can be achieved by providing the data of several fields of the form as URL arguments. E.g. [BASE_URL]/uploader/[DATA_SOURCE_HASH]/?acronym=test.... This is implemented in a way so that whenever a GET is performed, it checks the URL arguments and tries to submit the data source form. If it is valid, all the required fields were provided and are valid, then the user is redirected to the upload page. Else all the valid values are set in the form, the invalid ones are being discarded, and the data source creation page is presented with no error messages. The country field should contain a value from the ones available on the dropdown presented in the webform and since the coordinates is a two-component value it should be provided as coordinates_0=[LATITUDE]&coordinates_1=[LONGITUDE]. It is important to note that this view does not require a CSRF token, so a normal POST form submission can be performed to create a data source. The second one can be accessed through the [BASE_URL]/uploader/[DATA_SOURCE_HASH]/edit/ URL or by clicking on the Edit button on the data source upload page. Finally, on the upload page, a data owner can consult the history of uploads, their state, and eventually error messages if some went wrong. Whenever an upload is made its state will be pending. After the upload, with a 5-second interval, a request is made to the backend to check if the status of the upload changed. If it fails, an error message will be provided in a tooltip above a button with a message icon. Else the state will change to Done and the information about the upload retrieved from the uploaded file, present on the check status request, is filled. Related to file uploading, after the file form is submitted no validation is made and a message is presented to the user telling that the file is being processed in the background, then the fetch status process mentioned before starts. If validations were performed before returning a message to the user, if the validation took too much time, the browser could timeout. Also if some unexpected error happened on the insertion process performed in the background, the user would get any feedback. Related to the background task that validates and uploads the data, the validation can fail if: Error Message Invalid Number of Columns The provided file has an invalid number of columns Invalid CSV File Format The provided file has an invalid CSV format. Make sure is a text file separated by commas and you either have 7 (regular results file) or 13 (results file with dist columns) columns. Missing Required Fields Some rows have null values either on the column analysis_id or count_value Invalid Field Types The provided file has invalid values on some columns. Remember that only the \"stratum_*\" columns accept strings, all the other fields expect numeric types. Duplicated Metadata Rows Analysis id[output] duplicated on multiple rows. Try (re)running the plugin CatalogueExport on your database. Missing Required Metadata Row Analysis id 0 is missing. Try (re)running the plugin CatalogueExport on your database. Any other error is considered an unexpected error and the following message will be presented An unexpected error occurred while processing your file. Please contact the system administrator for more details.. If the file passes all validations, it goes to the upload data phase. Here, if workers are creating or refreshing materialized queries then the worker blocks. If there are other workers inserting data for the same data source it will also block. However, several workers of different data sources can insert data at the same time. All the workers, after inserting the data, check if they are the only worker inserting data. If so they refresh the existing materialized queries. Else the next worker to finish inserting data will do the same check. Widgets For the data source form two custom widgets were created for the following fields: Database Type: To avoid having duplicate entries with the same meaning (e.g.Hospital, hospital), the input of this field has a autocomplete list where existing values are suggested to the user. Also before saving the field to the database spaces are trimmed and the values are transformed into title case here. Coordinates: 1. This is a two-component field; 2. Inserting coordinates by hand is tedious. Considering the previous points, we created a widget with a map built with leaflet where the user just needs to click on the map. API This app provided two API endpoints Update data source information: a PATCH request with a JSON object on the body of the request with the fields and their new values. Pending upload status: a GET request that returns JSON data where there is always a status field that can have three statuses which then can lead to additional data be also present: Pending: the upload in question hasnt finished Done: the upload finished and there was nothing wrong with the uploaded file. Along with the status, there will be a data field with a JSON object with the fields r_package_version, generation_date, cdm_version, and vocabulary_version which are data source information that was extracted from the uploaded file. Failed: the upload finished but there was something wrong with the uploaded file. Along with the status, there will be a failure_msg field telling the reason for the failure. Models Country data is loaded in a fresh installation through the docker-init.sh script if no records are present on the Country table. The DataSource model doesnt have a foreign key to the DatabaseType model to then facilitate the creation of SQL queries to feet Supersets dashboards. The DatabaseType is used anyway to have a faster way to check if a certain database type already exists on the database, avoiding going through every DataSource record. The same situation of the DatabaseType model also happens between the UploadHistory and PendingUpload models. There is no foreign key between a UploadHistory and a PendingUpload. This is because PendingUpload records are deleted once an upload is successful. When the upload view requests the status of a certain upload, it uses the id of the pending upload. If no pending upload is found, it is assumed that the upload was successful and searches for uploads on the UploadHistory model with the pending_upload_id field equal to the certain upload id. Related to where the uploaded files are stored, within the media directory there will be a ACHILLES_RESULTS_STORAGE_PATH directory which will have a directory for each data source. Within this last directory, first, files are uploaded to a failure directory. If the upload is successful the file is moved to a success directory. In both cases, the file name will be the date of when the file is being saved into disk plus its original extensions. JavaScript Packages While developing the templates for Django views, if a certain javascript library is required, like jquery, one option is to insert script tags on the templates and point the src to a CDN. However, this makes the process of maintaining the libraries tedious since a developer has to search and change all the script tags if for example wants to update the librarys version. To avoid this problem we have a package.json file where we define all the libraries that we use and their version. Then we add the node_modules directory as a static file directory. With this alternative, updating a library is as simple as changing a number of the package.json file, run npm install and collect the static files again. "],["dashboards.html", "Chapter 9 Dashboards ", " Chapter 9 Dashboards "],["PerDatabaseDashboard.html", "9.1 Network Dashboard", " 9.1 Network Dashboard Label Colors In order to obtain the colors blue and rose in the chart representing the gender distribution, add the following JSON entry to the JSON object of the JSON Metadata field on the edit dashboard page: "label_colors": { "Male": "#3366FF", "Female": "#FF3399" } CSS To hide the dashboard header insert the following css code to the CSS field on the edit page: .dashboard > div:not(.dashboard-content) { /* dashboard header */ display: none; } With this every time you want to edit the dashboard layout you have to either comment the CSS inserted or remove it so the Edit Dashboard button can show again. Filters Country Filter Dataset: Materialized View meta_data_table Database Type Filter Dataset: Materialized View meta_data_table Data Source Filter Dataset: Materialized View meta_data_table Overview Tab Countries Dataset: Materialized View meta_data_table Data Sources Dataset: Materialized View meta_data_table Datasource Types Dataset: Materialized View meta_data_table Patients Dataset: Materialized View meta_data_table Patients Dataset: Materialized View meta_data_table Patients by Country Dataset: Materialized View meta_data_table Database Types per Country Dataset: Materialized View meta_data_table Meta Data Dataset: Materialized View meta_data_table Demographics Tab Number of Patients Dataset: Materialized View number_of_patients Gender Table Dataset: Materialized View gender Gender Pie Dataset: Materialized View gender Age at first observation Table Dataset: Materialized View age1observation_table Age at first observation Bar Chart Dataset: Materialized View age1observation_bar_chart Distribution of age at first observation period Dataset: Materialized View distribution_of_age_at_first_observation_period Year of Birth Dataset: Materialized View year_of_birth Data Domains Tab Average number of records per person Dataset: Materialized View avg_num_of_records_per_person Total number of records Dataset: Materialized View data_domain_total_num_of_records Number of distinct visit occurrence concepts per person Dataset: Materialized View number_of_distinct_per_person Number of distinct condition occurrence concepts per person Dataset: Materialized View number_of_distinct_per_person Number of distinct procedure occurrence concepts per person Dataset: Materialized View number_of_distinct_per_person Number of distinct drug exposure concepts per person Dataset: Materialized View number_of_distinct_per_person Number of distinct observation occurrence concepts per person Dataset: Materialized View number_of_distinct_per_person Number of distinct mesurement occurrence concepts per person Dataset: Materialized View number_of_distinct_per_person Data Provenance Tab Dataset: Materialized View data_provenance Observation Period Tab Number of Patitents in Observation Period Dataset: Materialized View num_of_patients_in_observation_period Cumulative Observation Period Dataset: Materialized View cumulative_observation_time Number of Observation Periods Dataset: Materialized View number_of_observation_periods Length of observation (days) of first observation period Dataset: Materialized View length_of_observation_of_first_observation_period Visit Tab Visit Type Graph Dataset: Materialized View visit_type_bar_chart Visit Type Dataset: Materialized View visit_type_table Concept Browser Tab Domain Filter Dataset: Materialized View domain_filter Concept Browser Dataset: Materialized View concept_browser_table3 Concept Network Coverage Dataset: Materialized View concept_coverage2 About Tab Markdown dashboard components "],["database-level-dashboard.html", "9.2 Database-Level Dashboard", " 9.2 Database-Level Dashboard This dashboard is an exact copy of the Network Dashboard dashboard but several legends and fields displayed on the original are hidden either through CSS or by changing some chart settings. On the following sections we will only present the things to change on the original charts. Label Colors In order to obtain the colors blue and rose in the chart representing the gender distribution, add the following JSON entry to the JSON object of the JSON Metadata field on the edit dashboard page: "label_colors": { "Male": "#3366FF", "Female": "#FF3399" } CSS To hide the dashboard header insert the following css code to the CSS field on the edit page: /* hides the filter badges on right side of charts */ .dashboard-filter-indicators-container { display: none; } /* hides the acronym filter */ .grid-content > .dragdroppable.dragdroppable-row > .with-popover-menu { display: none; } /* * WARNING panel 1 id hardcoded * Hides the X Axis Label of the heatmap on the Data Domains tab */ #TABS-nlIU6H5mcT-pane-1 g.x.axis > g.tick text { display: none; } /* * WARNING panel 2 id hardcoded * Hides the X Axis Labels of the bar charts on the Data Provenance tab */ #TABS-nlIU6H5mcT-pane-2 g.nv-x.nv-axis.nvd3-svg > g.nvd3.nv-wrap.nv-axis > g > g.tick.zero > text { display: none; } With this every time you want to edit the dashboard layout you have to either comment the CSS inserted or remove it so the Edit Dashboard button can show again. Data Source Filter - hidden Dataset: data_source table of the achilles database. For the filter to work the name of the fields to filter should match in all tables used on the charts of this dashboard. Demographics Tab Number of Patients No changes Gender Table No changes Gender Pie No changes Age at first observation - Bars Remove legend. Customize Tab Chart Options Legend: off Distribution of age at first observation period No changes Year of Birth Remove legend. Customize Tab Chart Options Legend: off Data Domains Tab Average number of records per person No changes Total number of records No changes Data Density Plot Dataset: Materialized View data_density Records per person Dataset: Materialized View records_per_person Concepts per person Dataset: Materialized View number_of_distinct_per_person Data Provenance Tab Type Concepts Dataset: Materialized View data_provenance Observation Period Tab Number of Patitents in Observation Period Remove legend. Customize Tab Chart Options Legend: off Length of observation (days) of first observation period No changes Cumulative Observation Period Remove legend. Customize Tab Chart Options Legend: off Number of Observation Periods No changes Visit Tab Visit Type Graph Remove legend. Customize Tab Chart Options Legend: off Visit Type Table Remove the name field from the columns to display. Data Tab Query Columns: visit_type, num_persons, percent_persons with label persons (%), records_per_person Visit Age Distribution Dataset: Materialized View visit_age_distribution Concept Browser Tab Domain Filter No changes Concept Browser Dataset: Materialized View concept_browser_table2 Meta Data Tab Meta Data Dataset: Materialized View meta_data_table "],["materialized-views-1.html", "9.3 Materialized views", " 9.3 Materialized views meta_data_table SELECT data_source.acronym, data_source.name, data_source.database_type, country.country, p.count_value AS number_of_patients, a.stratum_2 AS source_release_date, a.stratum_3 AS cdm_release_date, a.stratum_4 AS cdm_version, a.stratum_5 AS vocabulary_version, p.stratum_3 AS execution_date, p.stratum_2 AS package_version FROM (((achilles_results a JOIN data_source ON ((a.data_source_id = data_source.id))) JOIN country ON ((data_source.country_id = country.id))) JOIN ( SELECT achilles_results.count_value, achilles_results.data_source_id, achilles_results.stratum_2, achilles_results.stratum_3 FROM achilles_results WHERE (achilles_results.analysis_id = 0)) p ON ((p.data_source_id = data_source.id))) WHERE (a.analysis_id = 5000); patients_per_country_and_database_type SELECT country.country, source.database_type, achilles.count_value FROM ((achilles_results achilles JOIN data_source source ON ((achilles.data_source_id = source.id))) JOIN country country ON ((source.country_id = country.id))) WHERE (achilles.analysis_id = 1); number_of_patients SELECT achilles_results.count_value, data_source.name, data_source.acronym, data_source.database_type, country.country FROM ((achilles_results JOIN data_source ON ((achilles_results.data_source_id = data_source.id))) JOIN country ON ((data_source.country_id = country.id))) WHERE (achilles_results.analysis_id = 1); gender SELECT source.name, source.acronym, source.database_type, country.country, concept.concept_name AS gender, achilles.count_value FROM (((achilles_results achilles JOIN data_source source ON ((achilles.data_source_id = source.id))) JOIN country ON ((country.id = source.country_id))) JOIN concept ON ((achilles.stratum_1 = (concept.concept_id)::text))) WHERE (achilles.analysis_id = 2); age1observation_table SELECT source.name, source.acronym, source.database_type, country.country, sum( CASE WHEN ((achilles.stratum_2)::integer < 10) THEN achilles.count_value ELSE NULL::bigint END) AS "0-10", sum( CASE WHEN (((achilles.stratum_2)::integer >= 10) AND ((achilles.stratum_2)::integer < 20)) THEN achilles.count_value ELSE NULL::bigint END) AS "10-20", sum( CASE WHEN (((achilles.stratum_2)::integer >= 20) AND ((achilles.stratum_2)::integer < 30)) THEN achilles.count_value ELSE NULL::bigint END) AS "20-30", sum( CASE WHEN (((achilles.stratum_2)::integer >= 30) AND ((achilles.stratum_2)::integer < 40)) THEN achilles.count_value ELSE NULL::bigint END) AS "30-40", sum( CASE WHEN (((achilles.stratum_2)::integer >= 40) AND ((achilles.stratum_2)::integer < 50)) THEN achilles.count_value ELSE NULL::bigint END) AS "40-50", sum( CASE WHEN (((achilles.stratum_2)::integer >= 50) AND ((achilles.stratum_2)::integer < 60)) THEN achilles.count_value ELSE NULL::bigint END) AS "50-60", sum( CASE WHEN (((achilles.stratum_2)::integer >= 60) AND ((achilles.stratum_2)::integer < 70)) THEN achilles.count_value ELSE NULL::bigint END) AS "60-70", sum( CASE WHEN (((achilles.stratum_2)::integer >= 70) AND ((achilles.stratum_2)::integer < 80)) THEN achilles.count_value ELSE NULL::bigint END) AS "70-80", sum( CASE WHEN (((achilles.stratum_2)::integer >= 80) AND ((achilles.stratum_2)::integer < 90)) THEN achilles.count_value ELSE NULL::bigint END) AS "80-90", sum( CASE WHEN ((achilles.stratum_2)::integer >= 90) THEN achilles.count_value ELSE NULL::bigint END) AS "90+" FROM (((achilles_results achilles JOIN data_source source ON ((achilles.data_source_id = source.id))) JOIN country ON ((country.id = source.country_id))) JOIN concept ON ((achilles.stratum_1 = (concept.concept_id)::text))) WHERE (achilles.analysis_id = 102) GROUP BY source.name, source.acronym, source.database_type, country.country; age1observation_bar_chart SELECT source.name, (achilles.stratum_1)::integer AS age, achilles.count_value AS count, source.acronym, source.database_type, country.country FROM ((achilles_results achilles JOIN data_source source ON ((achilles.data_source_id = source.id))) JOIN country ON ((country.id = source.country_id))) WHERE (achilles.analysis_id = 101); distribution_of_age_at_first_observation_period SELECT source.name, source.acronym, country.country, achilles.count_value, achilles.p10_value AS p10, achilles.p25_value AS p25, achilles.median_value AS median, achilles.p75_value AS p75, achilles.p90_value AS p90, achilles.max_value, achilles.min_value FROM ((achilles_results achilles JOIN data_source source ON ((achilles.data_source_id = source.id))) JOIN country ON ((source.country_id = country.id))) WHERE (achilles.analysis_id = 103) ORDER BY source.name; year_of_birth SELECT source.name, source.acronym, source.database_type, country.country, achilles.stratum_1 AS "Birth_year", achilles.count_value AS count FROM ((achilles_results achilles JOIN data_source source ON ((achilles.data_source_id = source.id))) JOIN country ON ((country.id = source.country_id))) WHERE (achilles.analysis_id = 3); avg_num_of_records_per_person SELECT source.name, source.acronym, source.database_type, country.country, CASE WHEN (achilles.analysis_id = 201) THEN 'Visit'::text WHEN (achilles.analysis_id = 401) THEN 'Condition'::text WHEN (achilles.analysis_id = 501) THEN 'Death'::text WHEN (achilles.analysis_id = 601) THEN 'Procedure'::text WHEN (achilles.analysis_id = 701) THEN 'Drug Exposure'::text WHEN (achilles.analysis_id = 801) THEN 'Observation'::text WHEN (achilles.analysis_id = 1801) THEN 'Measurement'::text WHEN (achilles.analysis_id = 2101) THEN 'Device'::text WHEN (achilles.analysis_id = 2201) THEN 'Note'::text ELSE NULL::text END AS data_domain, (sum(achilles.count_value) / avg(counts.num_persons)) AS records_per_person FROM (((achilles_results achilles JOIN data_source source ON ((achilles.data_source_id = source.id))) JOIN country ON ((country.id = source.country_id))) JOIN ( SELECT achilles_results.data_source_id, achilles_results.count_value AS num_persons FROM achilles_results WHERE (achilles_results.analysis_id = 1)) counts ON ((achilles.data_source_id = counts.data_source_id))) GROUP BY achilles.analysis_id, source.name, source.acronym, source.database_type, country.country HAVING (achilles.analysis_id = ANY (ARRAY[(201)::bigint, (401)::bigint, (501)::bigint, (601)::bigint, (701)::bigint, (801)::bigint, (1801)::bigint, (2101)::bigint, (2201)::bigint])); data_domain_total_num_of_records SELECT data_source.name, data_source.acronym, data_source.database_type, country.country, CASE WHEN (achilles_results.analysis_id = 201) THEN 'Visit'::text WHEN (achilles_results.analysis_id = 401) THEN 'Condition'::text WHEN (achilles_results.analysis_id = 501) THEN 'Death'::text WHEN (achilles_results.analysis_id = 601) THEN 'Procedure'::text WHEN (achilles_results.analysis_id = 701) THEN 'Drug Exposure'::text WHEN (achilles_results.analysis_id = 801) THEN 'Observation'::text WHEN (achilles_results.analysis_id = 1801) THEN 'Measurement'::text WHEN (achilles_results.analysis_id = 2101) THEN 'Device'::text WHEN (achilles_results.analysis_id = 2201) THEN 'Note'::text ELSE NULL::text END AS data_domain, sum(achilles_results.count_value) AS count FROM ((achilles_results JOIN data_source ON ((achilles_results.data_source_id = data_source.id))) JOIN country ON ((country.id = data_source.country_id))) GROUP BY data_source.name, data_source.acronym, data_source.database_type, country.country, achilles_results.analysis_id HAVING (achilles_results.analysis_id = ANY (ARRAY[(201)::bigint, (401)::bigint, (501)::bigint, (601)::bigint, (701)::bigint, (801)::bigint, (1801)::bigint, (2101)::bigint, (2201)::bigint])); number_of_distinct_per_person SELECT source.name, source.acronym, country.country, achilles.analysis_id, CASE WHEN (achilles.analysis_id = 203) THEN 'Visit'::text WHEN (achilles.analysis_id = 403) THEN 'Condition'::text WHEN (achilles.analysis_id = 603) THEN 'Procedure'::text WHEN (achilles.analysis_id = 703) THEN 'Drug Exposure'::text WHEN (achilles.analysis_id = 803) THEN 'Observation'::text WHEN (achilles.analysis_id = 1803) THEN 'Measurement'::text ELSE NULL::text END AS data_domain, achilles.count_value, achilles.min_value, achilles.p10_value AS p10, achilles.p25_value AS p25, achilles.median_value AS median, achilles.p75_value AS p75, achilles.p90_value AS p90, achilles.max_value FROM ((achilles_results achilles JOIN data_source source ON ((achilles.data_source_id = source.id))) JOIN country ON ((source.country_id = country.id))) WHERE (achilles.analysis_id = ANY (ARRAY[(203)::bigint, (403)::bigint, (603)::bigint, (703)::bigint, (803)::bigint, (183)::bigint])) ORDER BY source.name; data_provenance SELECT source.name, source.acronym, source.database_type, country.country, CASE WHEN (achilles.analysis_id = 405) THEN 'Condition'::text WHEN (achilles.analysis_id = 605) THEN 'Procedure'::text WHEN (achilles.analysis_id = 705) THEN 'Drug'::text WHEN (achilles.analysis_id = 805) THEN 'Observation'::text WHEN (achilles.analysis_id = 1805) THEN 'Measurement'::text WHEN (achilles.analysis_id = 2105) THEN 'Device'::text ELSE 'Other'::text END AS domain_name, c1.concept_name, sum(achilles.count_value) AS num_records FROM (((achilles_results achilles JOIN data_source source ON ((achilles.data_source_id = source.id))) JOIN country ON ((country.id = source.country_id))) JOIN concept c1 ON ((achilles.stratum_2 = (c1.concept_id)::text))) WHERE (achilles.analysis_id = ANY (ARRAY[(405)::bigint, (605)::bigint, (705)::bigint, (805)::bigint, (1805)::bigint, (2105)::bigint])) GROUP BY source.name, source.acronym, source.database_type, country.country, c1.concept_name, CASE WHEN (achilles.analysis_id = 405) THEN 'Condition'::text WHEN (achilles.analysis_id = 605) THEN 'Procedure'::text WHEN (achilles.analysis_id = 705) THEN 'Drug'::text WHEN (achilles.analysis_id = 805) THEN 'Observation'::text WHEN (achilles.analysis_id = 1805) THEN 'Measurement'::text WHEN (achilles.analysis_id = 2105) THEN 'Device'::text ELSE 'Other'::text END; num_of_patients_in_observation_period SELECT source.name, source.acronym, source.database_type, country.country, to_date(achilles.stratum_1, 'YYYYMM'::text) AS date, achilles.count_value AS "Nr_patients" FROM ((achilles_results achilles JOIN data_source source ON ((achilles.data_source_id = source.id))) JOIN country ON ((country.id = source.country_id))) WHERE (achilles.analysis_id = 110); cumulative_observation_time SELECT data_source.name, data_source.acronym, data_source.database_type, country.country, cumulative_sums.xlengthofobservation, round((cumulative_sums.cumulative_sum / (totals.total)::numeric), 5) AS ypercentpersons FROM (((( SELECT achilles_results.data_source_id, ((achilles_results.stratum_1)::integer * 30) AS xlengthofobservation, sum(achilles_results.count_value) OVER (PARTITION BY achilles_results.data_source_id ORDER BY (achilles_results.stratum_1)::integer DESC) AS cumulative_sum FROM achilles_results WHERE (achilles_results.analysis_id = 108)) cumulative_sums JOIN ( SELECT achilles_results.data_source_id, achilles_results.count_value AS total FROM achilles_results WHERE (achilles_results.analysis_id = 1)) totals ON ((cumulative_sums.data_source_id = totals.data_source_id))) JOIN data_source ON ((cumulative_sums.data_source_id = data_source.id))) JOIN country ON ((country.id = data_source.country_id))) ORDER BY data_source.name, cumulative_sums.xlengthofobservation; number_of_observation_periods SELECT ar.data_source_id AS id, ds.acronym, ds.name, country.country, ar.stratum_1, ar.count_value, pa.nrpatients AS patients, round((((100)::numeric * (ar.count_value)::numeric) / (pa.nrpatients)::numeric), 2) AS percentage FROM (((achilles_results ar JOIN data_source ds ON ((ds.id = ar.data_source_id))) JOIN country ON ((ds.country_id = country.id))) JOIN ( SELECT achilles_results.count_value AS nrpatients, achilles_results.data_source_id FROM achilles_results WHERE (achilles_results.analysis_id = 0)) pa ON ((pa.data_source_id = ds.id))) WHERE (ar.analysis_id = 113); length_of_observation_of_first_observation_period SELECT source.name, source.acronym, country.country, achilles.count_value, achilles.min_value, achilles.p10_value AS p10, achilles.p25_value AS p25, achilles.median_value AS median, achilles.p75_value AS p75, achilles.p90_value AS p90, achilles.max_value FROM ((achilles_results achilles JOIN data_source source ON ((achilles.data_source_id = source.id))) JOIN country ON ((source.country_id = country.id))) WHERE (achilles.analysis_id = 105) ORDER BY source.name; visit_type_bar_chart SELECT data_source.name, data_source.acronym, data_source.database_type, country.country, concept.concept_name, achilles_results.count_value AS num_persons FROM (((( SELECT achilles_results_1.id, achilles_results_1.analysis_id, achilles_results_1.stratum_1, achilles_results_1.stratum_2, achilles_results_1.stratum_3, achilles_results_1.stratum_4, achilles_results_1.stratum_5, achilles_results_1.count_value, achilles_results_1.data_source_id, achilles_results_1.avg_value, achilles_results_1.max_value, achilles_results_1.median_value, achilles_results_1.min_value, achilles_results_1.p10_value, achilles_results_1.p25_value, achilles_results_1.p75_value, achilles_results_1.p90_value, achilles_results_1.stdev_value FROM achilles_results achilles_results_1 WHERE (achilles_results_1.analysis_id = 200)) achilles_results JOIN data_source ON ((achilles_results.data_source_id = data_source.id))) JOIN country ON ((country.id = data_source.country_id))) JOIN concept ON (((achilles_results.stratum_1)::integer = concept.concept_id))); visit_type_table SELECT data_source.name, data_source.acronym, data_source.database_type, country.country, concept.concept_name, ar1.count_value AS num_persons, round(((100.0 * (ar1.count_value)::numeric) / (denom.count_value)::numeric), 2) AS percent_persons, round(((1.0 * (ar2.count_value)::numeric) / (ar1.count_value)::numeric), 2) AS records_per_person FROM (((((( SELECT achilles_results.id, achilles_results.analysis_id, achilles_results.stratum_1, achilles_results.stratum_2, achilles_results.stratum_3, achilles_results.stratum_4, achilles_results.stratum_5, achilles_results.count_value, achilles_results.data_source_id, achilles_results.avg_value, achilles_results.max_value, achilles_results.median_value, achilles_results.min_value, achilles_results.p10_value, achilles_results.p25_value, achilles_results.p75_value, achilles_results.p90_value, achilles_results.stdev_value FROM achilles_results WHERE (achilles_results.analysis_id = 200)) ar1 JOIN ( SELECT achilles_results.id, achilles_results.analysis_id, achilles_results.stratum_1, achilles_results.stratum_2, achilles_results.stratum_3, achilles_results.stratum_4, achilles_results.stratum_5, achilles_results.count_value, achilles_results.data_source_id, achilles_results.avg_value, achilles_results.max_value, achilles_results.median_value, achilles_results.min_value, achilles_results.p10_value, achilles_results.p25_value, achilles_results.p75_value, achilles_results.p90_value, achilles_results.stdev_value FROM achilles_results WHERE (achilles_results.analysis_id = 201)) ar2 ON (((ar1.stratum_1 = ar2.stratum_1) AND (ar1.data_source_id = ar2.data_source_id)))) JOIN ( SELECT achilles_results.id, achilles_results.analysis_id, achilles_results.stratum_1, achilles_results.stratum_2, achilles_results.stratum_3, achilles_results.stratum_4, achilles_results.stratum_5, achilles_results.count_value, achilles_results.data_source_id, achilles_results.avg_value, achilles_results.max_value, achilles_results.median_value, achilles_results.min_value, achilles_results.p10_value, achilles_results.p25_value, achilles_results.p75_value, achilles_results.p90_value, achilles_results.stdev_value FROM achilles_results WHERE (achilles_results.analysis_id = 1)) denom ON ((ar1.data_source_id = denom.data_source_id))) JOIN data_source ON ((data_source.id = ar1.data_source_id))) JOIN country ON ((country.id = data_source.country_id))) JOIN concept ON (((ar1.stratum_1)::integer = concept.concept_id))) ORDER BY ar1.data_source_id, ar1.count_value DESC; domain_filter SELECT concept.concept_name, concept.domain_id, source.name, source.acronym, source.database_type, country.country FROM (((achilles_results JOIN concept ON (((achilles_results.stratum_1)::bigint = concept.concept_id))) JOIN data_source source ON ((achilles_results.data_source_id = source.id))) JOIN country ON ((country.id = source.country_id))) WHERE (achilles_results.analysis_id = ANY (ARRAY[(201)::bigint, (401)::bigint, (601)::bigint, (701)::bigint, (801)::bigint, (901)::bigint, (1001)::bigint, (1801)::bigint, (200)::bigint, (400)::bigint, (600)::bigint, (700)::bigint, (800)::bigint, (1800)::bigint])); concept_browser_table3 SELECT source.name, source.acronym, source.database_type, country.country, (((('<a href="https://athena.ohdsi.org/search-terms/terms/'::text || ar1.concept_id) || '"target="_blank">'::text) || ar1.concept_id) || '</a>'::text) AS concept_id, concept.concept_name, concept.domain_id, (ar1.rc)::integer AS rc, (ar2.drc)::integer AS drc FROM ((((( SELECT achilles_results.data_source_id, achilles_results.analysis_id, achilles_results.stratum_1 AS concept_id, achilles_results.count_value AS rc FROM achilles_results WHERE (achilles_results.analysis_id = ANY (ARRAY[(401)::bigint, (601)::bigint, (701)::bigint, (801)::bigint, (1801)::bigint, (2101)::bigint]))) ar1 JOIN ( SELECT ar.data_source_id, ar.analysis_id, ar.stratum_1 AS concept_id, ar.count_value AS drc FROM achilles_results ar WHERE (ar.analysis_id = ANY (ARRAY[(430)::bigint, (630)::bigint, (730)::bigint, (830)::bigint, (1830)::bigint, (2130)::bigint]))) ar2 ON (((ar1.concept_id = ar2.concept_id) AND (ar1.data_source_id = ar2.data_source_id)))) JOIN data_source source ON ((ar1.data_source_id = source.id))) JOIN country ON ((source.country_id = country.id))) JOIN concept concept ON ((ar1.concept_id = (concept.concept_id)::text))) ORDER BY ((ar2.drc)::integer) DESC; concept_coverage2 SELECT source.name AS source_name, source.database_type, country.country, (((('<a href="https://athena.ohdsi.org/search-terms/terms/'::text || concept.concept_id) || '" target="_blank">'::text) || concept.concept_id) || '</a>'::text) AS concept_id, concept.concept_name, concept.domain_id, sum((ar1.rc)::integer) AS rc, sum((ar2.drc)::integer) AS drc FROM ((((( SELECT achilles_results.data_source_id, achilles_results.analysis_id, achilles_results.stratum_1 AS concept_id, achilles_results.count_value AS rc FROM achilles_results WHERE (achilles_results.analysis_id = ANY (ARRAY[(401)::bigint, (601)::bigint, (701)::bigint, (801)::bigint, (1801)::bigint, (2101)::bigint]))) ar1 JOIN ( SELECT ar.data_source_id, ar.analysis_id, ar.stratum_1 AS concept_id, ar.count_value AS drc FROM achilles_results ar WHERE (ar.analysis_id = ANY (ARRAY[(430)::bigint, (630)::bigint, (730)::bigint, (830)::bigint, (1830)::bigint, (2130)::bigint]))) ar2 ON (((ar1.concept_id = ar2.concept_id) AND (ar1.data_source_id = ar2.data_source_id)))) JOIN data_source source ON ((ar1.data_source_id = source.id))) JOIN country ON ((country.id = source.country_id))) JOIN concept concept ON ((ar1.concept_id = (concept.concept_id)::text))) GROUP BY source.name, source.database_type, country.country, concept.domain_id, concept.concept_id, concept.concept_name; data_density SELECT source.acronym, t1.table_name AS series_name, to_date(t1.stratum_1, 'YYYYMM'::text) AS x_calendar_month, t1.count_value AS y_record_count FROM (( SELECT achilles_results.data_source_id AS id, 'Visit occurrence'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 220) UNION ALL SELECT achilles_results.data_source_id AS id, 'Condition occurrence'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 420) UNION ALL SELECT achilles_results.data_source_id AS id, 'Death'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 502) UNION ALL SELECT achilles_results.data_source_id AS id, 'Procedure occurrence'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 620) UNION ALL SELECT achilles_results.data_source_id AS id, 'Drug exposure'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 720) UNION ALL SELECT achilles_results.data_source_id AS id, 'Observation'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 820) UNION ALL SELECT achilles_results.data_source_id AS id, 'Drug era'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 920) UNION ALL SELECT achilles_results.data_source_id AS id, 'Device Exposure'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 2120) UNION ALL SELECT achilles_results.data_source_id AS id, 'Condition era'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 1020) UNION ALL SELECT achilles_results.data_source_id AS id, 'Observation period'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 111) UNION ALL SELECT achilles_results.data_source_id AS id, 'Measurement'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 1820)) t1 JOIN data_source source ON ((source.id = t1.id))) ORDER BY t1.table_name, ( CASE WHEN (t1.stratum_1 ~ '^\\d+\\.?\\d+$'::text) THEN t1.stratum_1 ELSE NULL::text END)::integer; records_per_person SELECT source.acronym, t1.table_name AS series_name, to_date(t1.stratum_1, 'YYYYMM'::text) AS x_calendar_month, round(((1.0 * (t1.count_value)::numeric) / (denom.count_value)::numeric), 5) AS y_record_count FROM ((( SELECT achilles_results.data_source_id AS id, 'Visit occurrence'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 220) UNION ALL SELECT achilles_results.data_source_id AS id, 'Condition occurrence'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 420) UNION ALL SELECT achilles_results.data_source_id AS id, 'Death'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 502) UNION ALL SELECT achilles_results.data_source_id AS id, 'Procedure occurrence'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 620) UNION ALL SELECT achilles_results.data_source_id AS id, 'Drug exposure'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 720) UNION ALL SELECT achilles_results.data_source_id AS id, 'Observation'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 820) UNION ALL SELECT achilles_results.data_source_id AS id, 'Device exposure'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 2120) UNION ALL SELECT achilles_results.data_source_id AS id, 'Drug era'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 920) UNION ALL SELECT achilles_results.data_source_id AS id, 'Condition era'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 1020) UNION ALL SELECT achilles_results.data_source_id AS id, 'Observation period'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 111) UNION ALL SELECT achilles_results.data_source_id AS id, 'Measurement'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 1820)) t1 JOIN ( SELECT achilles_results.id, achilles_results.analysis_id, achilles_results.stratum_1, achilles_results.stratum_2, achilles_results.stratum_3, achilles_results.stratum_4, achilles_results.stratum_5, achilles_results.count_value, achilles_results.data_source_id, achilles_results.avg_value, achilles_results.max_value, achilles_results.median_value, achilles_results.min_value, achilles_results.p10_value, achilles_results.p25_value, achilles_results.p75_value, achilles_results.p90_value, achilles_results.stdev_value FROM achilles_results WHERE (achilles_results.analysis_id = 117)) denom ON (((t1.stratum_1 = denom.stratum_1) AND (t1.id = denom.data_source_id)))) JOIN data_source source ON ((source.id = t1.id))) ORDER BY t1.table_name, ( CASE WHEN (t1.stratum_1 ~ '^\\d+\\.?\\d+$'::text) THEN t1.stratum_1 ELSE NULL::text END)::integer; visit_age_distribution SELECT source.name, source.acronym, c1.concept_name, c2.concept_name AS gender, achilles.count_value, achilles.p10_value AS p10, achilles.p25_value AS p25, achilles.median_value AS median, achilles.p75_value AS p75, achilles.p90_value AS p90 FROM (((achilles_results achilles JOIN data_source source ON ((achilles.data_source_id = source.id))) JOIN concept c1 ON ((achilles.stratum_1 = (c1.concept_id)::text))) JOIN concept c2 ON ((achilles.stratum_2 = (c2.concept_id)::text))) WHERE (achilles.analysis_id = 206) ORDER BY source.name, c1.concept_name, c2.concept_name; concept_browser_table2 SELECT source.acronym, (((('<a href="https://athena.ohdsi.org/search-terms/terms/'::text || ar1.concept_id) || '"target="_blank">'::text) || ar1.concept_id) || '</a>'::text) AS concept_id, concept.concept_name, concept.domain_id, (ar1.rc)::integer AS rc, (ar2.drc)::integer AS drc FROM (((( SELECT achilles_results.data_source_id, achilles_results.analysis_id, achilles_results.stratum_1 AS concept_id, achilles_results.count_value AS rc FROM achilles_results WHERE (achilles_results.analysis_id = ANY (ARRAY[(401)::bigint, (601)::bigint, (701)::bigint, (801)::bigint, (1801)::bigint, (2101)::bigint]))) ar1 JOIN ( SELECT ar.data_source_id, ar.analysis_id, ar.stratum_1 AS concept_id, ar.count_value AS drc FROM achilles_results ar WHERE (ar.analysis_id = ANY (ARRAY[(430)::bigint, (630)::bigint, (730)::bigint, (830)::bigint, (1830)::bigint, (2130)::bigint]))) ar2 ON (((ar1.concept_id = ar2.concept_id) AND (ar1.data_source_id = ar2.data_source_id)))) JOIN data_source source ON ((ar1.data_source_id = source.id))) JOIN concept concept ON ((ar1.concept_id = (concept.concept_id)::text))) ORDER BY ((ar2.drc)::integer) DESC; "],["general-deprecated.html", "9.4 General [Deprecated]", " 9.4 General [Deprecated] CSS To hide the dashboard header insert the following css code to the CSS field on the edit page: .dashboard > div:not(.dashboard-content) { /* dashboard header */ display: none; } With this every time you want to edit the dashboard layout you have to either comment the CSS inserted or remove it so the Edit Dashboard button can show again. Database Type and Country Filter Figure 9.1: Settings for creating filters charts Theses filter were designed to be used in the dashboard aiming the filtering of the data based on the field database_type and country from the table data_source. For the filters to work the name of the fields to filter should match in all tables used on the charts of this dashboard. SQL query SELECT source.name, country.country, source.database_type, source.acronym FROM public.data_source AS source INNER JOIN public.country AS country ON source.country_id=country.id Chart settings Data Tab Datasource & Chart Type Visualization Type: Filter Box Time Time range: No filter Filters Configuration Filters: database_type or country Date Filter: off Instant Filtering: on Total Number of Patients Figure 9.2: Settings for creating the Total Number of Patients chart SQL query SELECT country, database_type, release_date, SUM(count_value) OVER (ORDER BY release_date ASC) FROM achilles_results JOIN data_source ON data_source_id = data_source.id JOIN country ON data_source.country_id = country.id WHERE analysis_id = 1 Chart settings Data Tab Datasource & Chart Type Visualization Type: Big Number with Trendline Time Time range: No filter Query Metrics: MAX(sum) Series: release_date Breakdowns: source Customize Tab Chart Options Big Number Font Size: Small Subheader Font Size: Tiny Network Growth by Date Figure 9.3: Settings for creating the Network Growth by Date chart SQL query SELECT source.name AS source, country.country, source.database_type, source.release_date, concepts.concept_name AS gender, achilles.count_value as count FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id INNER JOIN public.country AS country ON source.country_id=country.id JOIN ( SELECT '8507' AS concept_id, 'Male' AS concept_name UNION SELECT '8532', 'Female' ) AS concepts ON achilles.stratum_1 = concept_id WHERE analysis_id = 2 Chart settings Data Tab Datasource & Chart Type Visualization Type: Bar Chart Time Time range: No filter Query Metrics: SUM(count_value) Series: release_date Breakdowns: source Customize Tab Chart Options Stacked Bars: on Sort Bars: on Extra Controls: on X Axis Reduce X ticks: on Patients per Country Figure 9.4: Settings for creating the Patients per Country chart SQL query {#patientsPerCountryQuery} SELECT country.country, source.database_type, count_value FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id INNER JOIN public.country AS country ON source.country_id=country.id WHERE analysis_id = 1 Chart settings Data Tab Datasource & Chart Type Visualization Type: Bar Chart Time Time range: No filter Query Metrics: SUM(count_value) Series: country Customize Tab Chart Options Legend: off Y Axis Label: N of Patients X Axis X Axis Label: Country Database Types per Country Figure 9.5: Settings for creating the Database Type per Country chart SQL query Same as Patients per Country query Chart settings Data Tab Datasource & Chart Type Visualization Type: Heatmap Time Time range: No filter Query X: country Y: database_type Metric: SUM(countr_value) Heatmap Options Left Margin: 75 Show Percentage: off World Map Figure 9.6: Settings for creating the World Map chart SQL query SELECT name, acronym, database_type, latitude, longitude, country FROM public.data_source AS source INNER JOIN public.country AS country ON source.country_id=country.id Chart settings Data Tab Datasource & Chart Type Visualization Type: MapBox Time Time range: No filter Query Longitude: longitude Latitude: latitude Visual Tweaks Map Style: Streets or Light or Outdoors Meta Data Figure 9.7: Settings for creating the Meta Data chart SQL query SELECT acronym, stratum_1 as "name", database_type, country, stratum_2 as "source_release_date", stratum_3 as "cdm_release_date", stratum_4 as "cdm_version", stratum_5 as "vocabulary_version" FROM achilles_results JOIN data_source ON achilles_results.data_source_id = data_source.id JOIN country ON data_source.country_id = country.id WHERE analysis_id=5000 Chart settings Data Tab Datasource & Chart Type Visualization Type: Table Time Time range: No filter Query Query Mode: Raw Records Columns: name, source_release_date, cdm_release_date, cdm_version, vocabulary_version "],["person-deprecated.html", "9.5 Person [Deprecated]", " 9.5 Person [Deprecated] Label Colors In order to obtain the colors blue and rose in the chart representing the gender distribution, add the following JSON entry to the JSON object of the JSON Metadata field on the edit dashboard page: "label_colors": { "Male": "#3366FF", "Female": "#FF3399" } CSS To hide the dashboard header insert the following css code to the CSS field on the edit page: .dashboard > div:not(.dashboard-content) { /* dashboard header */ display: none; } With this every time you want to edit the dashboard layout you have to either comment the CSS inserted or remove it so the Edit Dashboard button can show again. Data Source Filter Figure 9.8: Settings for creating the Data Source filter chart For the filter to work the name of the fields to filter should match in all tables used on the charts of this dashboard. SQL query No SQL query, use the sql table data_source of the achilles database. Chart settings Data Tab Datasource & Chart Type Visualization Type: Filter Box Time Time range: No filter Filters Configuration Filters: name Date Filter: off Instant Filtering: on Age at first observation - Table {#age1ObservationTable} Figure 9.9: Settings for creating the Age at First Observation Table chart SQL query SELECT source.name, source.acronym, SUM(CASE WHEN CAST(stratum_2 AS INTEGER) < 10 THEN count_value END) AS "0-10", SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 10 AND CAST(stratum_2 AS INTEGER) < 20 THEN count_value END) AS "10-20", SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 20 AND CAST(stratum_2 AS INTEGER) < 30 THEN count_value END) AS "20-30", SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 30 AND CAST(stratum_2 AS INTEGER) < 40 THEN count_value END) AS "30-40", SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 40 AND CAST(stratum_2 AS INTEGER) < 50 THEN count_value END) AS "40-50", SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 50 AND CAST(stratum_2 AS INTEGER) < 60 THEN count_value END) AS "50-60", SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 60 AND CAST(stratum_2 AS INTEGER) < 70 THEN count_value END) AS "60-70", SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 70 AND CAST(stratum_2 AS INTEGER) < 80 THEN count_value END) AS "70-80", SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 80 AND CAST(stratum_2 AS INTEGER) < 90 THEN count_value END) AS "80-90", SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 90 THEN count_value END) AS "90+" FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id INNER JOIN public.concept ON CAST(stratum_1 AS BIGINT) = concept_id WHERE analysis_id = 102 GROUP BY name, acronym Chart settings Data Tab Datasource & Chart Type Visualization Type: Table Time Time range: No filter Query Query Mode: Raw Records Columns: name, 0-10, 10-20, 20-30, 30-40, 40-50, 50-60, 60-70, 70-80, 80-90, 90+ Customize Tab Options Show Cell Bars: off Age at first observation - Bars {#age1ObservationBars} Figure 9.10: Settings for creating the Age at First Observation Bar chart SQL query SELECT source.name, cast(stratum_1 AS int) AS Age, count_value AS count, source.acronym FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id WHERE analysis_id = 101 Chart settings Data Tab Datasource & Chart Type Visualization Type: Bar Chart Time Time range: No filter Query Metrics: MAX(count) Series: age Breakdowns: name Customize Tab Chart Options Stacked Bars: on Sort Bars: on Y Axis Label: Count X Axis X Axis Label: Age Reduce X ticks: on Year of Birth {#yearOfBirth} Figure 9.11: Settings for creating the Year of Birth chart SQL query SELECT source.name, source.acronym, stratum_1 AS "Birth_year", count_value AS count FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id WHERE analysis_id = 3 Chart settings Data Tab Datasource & Chart Type Visualization Type: Bar Chart Time Time range: No filter Query Metrics: SUM(count) Series: Birth_year Breakdowns: name Customize Tab Chart Options Stacked Bars: on Sort Bars: on Y Axis Label: Count Extra Controls: on X Axis X Axis Label: Year Reduce X ticks: on Gender Figure 9.12: Settings for creating the Gender chart SQL query SELECT source.name, concept_name AS Gender, count_value AS Number_of_persons, source.acronym FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id JOIN ( SELECT '8507' AS concept_id, 'Male' AS concept_name UNION SELECT '8532' AS concept_id, 'Female' AS concept_name ) AS concepts ON achilles.stratum_1 = concept_id WHERE analysis_id = 2 Chart settings Data Tab Datasource & Chart Type Visualization Type: Bar Chart Time Time range: No filter Query Metrics: MAX(Number_of_persons) Series: acronym Breakdowns: gender Contribution: on Customize Tab Chart Options Stacked Bars: on Sort Bars: on Extra Controls: on X Axis Reduce X ticks: on "],["observation-period-deprecated.html", "9.6 Observation Period [Deprecated]", " 9.6 Observation Period [Deprecated] CSS To hide the dashboard header insert the following css code to the CSS field on the edit page: .dashboard > div:not(.dashboard-content) { /* dashboard header */ display: none; } With this every time you want to edit the dashboard layout you have to either comment the CSS inserted or remove it so the Edit Dashboard button can show again. Data Source Filter Figure 9.8: Settings for creating the Data Source filter chart For the filter to work the name of the fields to filter should match in all tables used on the charts of this dashboard. SQL query No SQL query, use the sql table data_source of the achilles database. Chart settings Data Tab Datasource & Chart Type Visualization Type: Filter Box Time Time range: No filter Filters Configuration Filters: name Date Filter: off Instant Filtering: on Number of Patients in Observation Period {#numInObservationPeriod} The Number of Patients in Observation Period plot shows the number of patients that contribute at least one day in a specific month. Figure 9.13: Settings for creating the Number of Patients in Observation Period chart SQL query SELECT source.name, source.acronym, to_date(stratum_1, 'YYYYMM') as Date, count_value FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id WHERE analysis_id = 110 Chart settings Data Tab Datasource & Chart Type Visualization Type: Bar Chart Time Time range: No filter Query Metrics: MAX(count_value) with label Num of Patients Series: date Breakdowns: name Customize Tab Chart Options Stacked Bars: on Sort Bars: on Y Axis Label: Number of Patients X Axis X Axis Label: Dates Reduce X ticks: on Observation Period Start Dates Figure 9.14: Settings for creating the Observation Period Start Dates chart SQL query SELECT source.name, source.acronym, to_date(stratum_1, 'YYYYMM') AS year_month, count_value FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id WHERE analysis_id = 111 Chart settings Data Tab Datasource & Chart Type Visualization Type: Bar Chart Time Time range: No filter Query Metrics: SUM(count_value) with label Patients Series: year_month Breakdowns: name Customize Tab Chart Options Stacked Bars: on Sort Bars: on Y Axis Label: Number of Patients X Axis X Axis Label: Year Reduce X ticks: on Observation Period End Dates Figure 9.15: Settings for creating the Observation Period End Dates chart SQL query SELECT source.name, source.acronym, to_date(stratum_1, 'YYYYMM') AS year_month, count_value FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id WHERE analysis_id = 112 Chart settings Data Tab Datasource & Chart Type Visualization Type: Bar Chart Time Time range: No filter Query Metrics: SUM(count_value) with label Patients Series: year_month Breakdowns: name Customize Tab Chart Options Stacked Bars: on Sort Bars: on Y Axis Label: Number of Patients X Axis X Axis Label: Year Reduce X ticks: on "],["visit-deprecated.html", "9.7 Visit [Deprecated]", " 9.7 Visit [Deprecated] This dashboard shows the different types of visits per data source (see Visit Occurence Table) CSS To hide the dashboard header insert the following css code to the CSS field on the edit page: .dashboard > div:not(.dashboard-content) { /* dashboard header */ display: none; } With this every time you want to edit the dashboard layout you have to either comment the CSS inserted or remove it so the Edit Dashboard button can show again. Data Source Filter Figure 9.8: Settings for creating the Data Source filter chart For the filter to work the name of the fields to filter should match in all tables used on the charts of this dashboard. SQL query No SQL query, use the sql table data_source of the achilles database. Chart settings Data Tab Datasource & Chart Type Visualization Type: Filter Box Time Time range: No filter Filters Configuration Filters: name Date Filter: off Instant Filtering: on Visit Type Table {#visitTypeTable} Figure 9.16: Settings for creating the Visit Type Table chart SQL query SELECT source.name, source.acronym, concept_name AS "Type", MAX(count_value) AS "Count" FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id INNER JOIN public.concept ON CAST(stratum_1 AS BIGINT) = concept_id WHERE analysis_id = 201 GROUP BY name, acronym, "Type" ORDER BY "Count" DESC Chart settings Data Tab Datasource & Chart Type Visualization Type: Table Time Time range: No filter Query Query Mode: Raw Records Columns: name with label Data Source, Type, Count Visit Types Bars Figure 9.17: Settings for creating the Visit Types bar chart SQL query SELECT source.name, source.acronym, concept_name AS "Observation", count_value FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id INNER JOIN public.concept ON CAST(stratum_1 AS BIGINT) = concept_id WHERE analysis_id = 201 Chart settings Data Tab Datasource & Chart Type Visualization Type: Bar Chart Time Time range: No filter Query Metrics: MAX(count_value) with label Observations Series: name Breakdowns: Observation Customize Tab Chart Options Stacked Bars: on Sort Bars: on Extra Controls: on X Axis X Axis Label: Databases Reduce X ticks: on "],["death-deprecated.html", "9.8 Death [Deprecated]", " 9.8 Death [Deprecated] CSS To hide the dashboard header insert the following css code to the CSS field on the edit page: .dashboard > div:not(.dashboard-content) { /* dashboard header */ display: none; } With this every time you want to edit the dashboard layout you have to either comment the CSS inserted or remove it so the Edit Dashboard button can show again. Data Source Filter Figure 9.8: Settings for creating the Data Source filter chart For the filter to work the name of the fields to filter should match in all tables used on the charts of this dashboard. SQL query No SQL query, use the sql table data_source of the achilles database. Chart settings Data Tab Datasource & Chart Type Visualization Type: Filter Box Time Time range: No filter Filters Configuration Filters: name Date Filter: off Instant Filtering: on Number of Records Figure 9.18: Settings for creating the Number of Records chart SQL query SELECT source.name, count_value, source.acronym FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id WHERE analysis_id = 501 Chart settings Data Tab Datasource & Chart Type Visualization Type: Bar Chart Time Time range: No filter Query Metrics: MAX(count_value) with label Count Series: name Customize Tab Chart Options Y Axis Label: Number of Patients X Axis X Axis Label: Databases Reduce X ticks: on Death By Year per Thousand People Figure 9.19: Settings for creating the Death by Year per Thousand People chart SQL query SELECT source.name, source.acronym, EXTRACT(year FROM TO_DATE(stratum_1, 'YYYYMM')) AS Date, count_value FROM public.achilles_results as achilles INNER JOIN public.data_source as source ON achilles.data_source_id=source.id WHERE analysis_id = 502 Chart settings Data Tab Datasource & Chart Type Visualization Type: Bar Chart Time Time range: No filter Query Metrics: MAX(count_value) with label Count Series: date Breakdowns: name Customize Tab Chart Options Stacked Bars: on Sort Bars: on Y Axis Label:Number of Patients (in thousands) X Axis X Axis Label: Years Reduce X ticks: on "],["concepts-browser-deprecated.html", "9.9 Concepts Browser [Deprecated]", " 9.9 Concepts Browser [Deprecated] The concepts browser allows you to search for concepts by name or concept_id in all the data sources you select. No exact number of patients or occurrences are provided but the magnitude of both. CSS To hide the dashboard header insert the following css code to the CSS field on the edit page: .dashboard > div:not(.dashboard-content) { /* dashboard header */ display: none; } With this every time you want to edit the dashboard layout you have to either comment the CSS inserted or remove it so the Edit Dashboard button can show again. Data Source and Domain Filters Figure 9.1: Settings for creating the Data Source and Domain filter charts For the filters to work the name of the fields to filter should match in all tables used on the charts of this dashboard. SQL query SELECT concept_name, domain_id, source.name AS source_name, source.acronym FROM achilles_results JOIN concept ON cast(stratum_1 AS BIGINT) = concept_id INNER JOIN public.data_source AS source ON data_source_id=source.id WHERE analysis_id in (201, 401, 601, 701, 801, 901, 1001, 1801, 200, 400, 600, 700, 800, 1800) Chart settings Data Tab Datasource & Chart Type Visualization Type: Filter Box Time Time range: No filter Filters Configuration Filters: source_name or domain_id Date Filter: off Instant Filtering: on Number of Concepts Figure 9.20: Settings for creating the Number of Concepts chart SQL Query Same as Data Source and Domain filters query Chart settings Data Tab Datasource & Chart Type Visualization Type: Big Number Time Time range: No filter Query Metric: COUNT_DISTINCT(concept_name) with label Concepts Customize Tab Big Number Font Size: Small Subheader Font Size: Tiny Concept Browser Table {#conceptBrowserTable} Figure 9.21: Settings for creating the Concepts Table chart SELECT q1.concept_id AS concept_id, q1.concept_name AS concept_name, q1.domain_id, source.name AS source_name, source.acronym, sum(q1.count_value) as "Occurrence_count", sum(q1.count_person) as "Person_count", CASE WHEN sum(q1.count_value)<=10 THEN '<=10' WHEN sum(q1.count_value)<=100 THEN '11-102' WHEN sum(q1.count_value)<=1000 THEN '102-103' WHEN sum(q1.count_value)<=10000 THEN '103-104' WHEN sum(q1.count_value)<=100000 THEN '104-105' WHEN sum(q1.count_value)<=1000000 THEN '105-106' ELSE '>106' END as "magnitude_occurrences", CASE WHEN sum(q1.count_person)<=10 THEN '<=10' WHEN sum(q1.count_person)<=100 THEN '11-102' WHEN sum(q1.count_person)<=1000 THEN '102-103' WHEN sum(q1.count_person)<=10000 THEN '103-104' WHEN sum(q1.count_person)<=100000 THEN '104-105' WHEN sum(q1.count_person)<=1000000 THEN '105-106' ELSE '>106' END AS "magnitude_persons" FROM (SELECT analysis_id, stratum_1 concept_id, data_source_id, concept_name, domain_id, count_value, 0 as count_person FROM achilles_results JOIN concept ON cast(stratum_1 AS BIGINT)=concept_id WHERE analysis_id in (201, 301, 401, 601, 701, 801, 901, 1001, 1801) UNION (SELECT analysis_id, stratum_1 concept_id, data_source_id, concept_name, domain_id, 0 as count_value, sum(count_value) as count_person FROM achilles_results JOIN concept on cast(stratum_1 as BIGINT)=concept_id WHERE analysis_id in (202, 401, 601, 701, 801, 901, 1001, 1801) GROUP BY analysis_id,stratum_1,data_source_id,concept_name,domain_id) ) as q1 INNER JOIN public.data_source AS source ON q1.data_source_id=source.id GROUP BY q1.concept_id,q1.concept_name,q1.domain_id,source.name, acronym ORDER BY "Person_count" desc Chart settings Data Tab Datasource & Chart Type Visualization Type: Table Time Time range: No filter Query Query Mode: Raw Records Columns: source_name, concept_id, concept_name, domain_id, magnitude_persons, magnitude_occurrences Customize Tab Options Table Timestamps Format: %Y-%m-%d %H:%M:%S | 2019-01-14 01:32:10 Page Length: 50 Search Box: on Emit Filter Events: on "],["provenance-deprecated.html", "9.10 Provenance [Deprecated]", " 9.10 Provenance [Deprecated] This Dashboard shows the provenance of the data in the different data domains. CSS To hide the dashboard header insert the following css code to the CSS field on the edit page: .dashboard > div:not(.dashboard-content) { /* dashboard header */ display: none; } With this every time you want to edit the dashboard layout you have to either comment the CSS inserted or remove it so the Edit Dashboard button can show again. Data Source Filter Figure 9.8: Settings for creating the Data Source filter chart For the filter to work the name of the fields to filter should match in all tables used on the charts of this dashboard. SQL query No SQL query, use the sql table data_source of the achilles database. Chart settings Data Tab Datasource & Chart Type Visualization Type: Filter Box Time Time range: No filter Filters Configuration Filters: name Date Filter: off Instant Filtering: on Condition & Drug & Procedure & Device & Measurement & Observation Types {#dataProvenanceCharts} Figure 9.22: Settings for creating the Condition, Drug, Procedure, Device, Measurement and Observation charts SQL query All 6 charts use the same sql query. SELECT source.name, source.acronym, CASE WHEN analysis_id = 405 THEN 'Condition' WHEN analysis_id = 605 THEN 'Procedure' WHEN analysis_id = 705 THEN 'Drug' WHEN analysis_id = 805 THEN 'Observation' WHEN analysis_id = 1805 THEN 'Measurement' WHEN analysis_id = 2105 THEN 'Device' ELSE 'Other' END AS domain_name, concept_name, SUM(count_value) AS num_records FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id INNER JOIN public.concept AS c1 ON CAST(stratum_2 AS BIGINT) = concept_id WHERE analysis_id IN (405,605,705,805,1805,2105) GROUP BY source.name, source.acronym, concept_name, CASE WHEN analysis_id = 405 THEN 'Condition' WHEN analysis_id = 605 THEN 'Procedure' WHEN analysis_id = 705 THEN 'Drug' WHEN analysis_id = 805 THEN 'Observation' WHEN analysis_id = 1805 THEN 'Measurement' WHEN analysis_id = 2105 THEN 'Device' ELSE 'Other' END Chart settings Data Tab Datasource & Chart Type Visualization Type: Bar Chart Time Time range: No filter Query Metrics: SUM(num_records) with label Nr Records Filters: domain_name=Condition or domain_name=Drug or domain_name=Procedure or domain_name=Device or domain_name=Measurement or domain_name=Observation Series: name Breakdowns: concept_name Contribution: on Customize Tab Chart Options Stacked Bars: on "],["data-domains-deprecated.html", "9.11 Data Domains [Deprecated]", " 9.11 Data Domains [Deprecated] CSS To hide the dashboard header insert the following css code to the CSS field on the edit page: .dashboard > div:not(.dashboard-content) { /* dashboard header */ display: none; } With this every time you want to edit the dashboard layout you have to either comment the CSS inserted or remove it so the Edit Dashboard button can show again. Data Source Filter Figure 9.8: Settings for creating the Data Source filter chart For the filter to work the name of the fields to filter should match in all tables used on the charts of this dashboard. SQL query No SQL query, use the sql table data_source of the achilles database. Chart settings Data Tab Datasource & Chart Type Visualization Type: Filter Box Time Time range: No filter Filters Configuration Filters: name Date Filter: off Instant Filtering: on Average Number of Records per Person {#avgRecordsPerPerson} Figure 9.23: Settings for creating the Data Source filter chart SQL query SELECT source.name, source.acronym, CASE WHEN analysis_id = 201 THEN 'Visit' WHEN analysis_id = 401 THEN 'Condition' WHEN analysis_id = 501 THEN 'Death' WHEN analysis_id = 601 THEN 'Procedure' WHEN analysis_id = 701 THEN 'Drug Exposure' WHEN analysis_id = 801 THEN 'Observation' WHEN analysis_id = 1801 THEN 'Measurement' WHEN analysis_id = 2101 THEN 'Device' WHEN analysis_id = 2201 THEN 'Note' END AS Data_Domain, SUM(count_value) /AVG(num_persons) AS "records_per_person" FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id INNER JOIN ( SELECT data_source_id , count_value as num_persons FROM achilles_results WHERE analysis_id = 1) counts ON achilles.data_source_id = counts.data_source_id GROUP BY analysis_id, source.name, source.acronym HAVING analysis_id IN (201, 401, 501, 601, 701, 801, 1801, 2101, 2201) Chart settings Data Tab Datasource & Chart Type Visualization Type: Heatmap Time Time range: No filter Query X: name Y: data_domain Metric: AVG(records_per_person) with a label avg records per person Row limit: None Heatmap Options Left Margin: 100 Show Percentage: off "],["404.html", "Page not found", " Page not found The page you requested cannot be found (perhaps it was moved or renamed). You may want to try searching to find the page's new location, or use the table of contents to find the page you are looking for. "]]
+[["index.html", "Data Network Dashboards Chapter 1 Preface", " Data Network Dashboards This document is currently under construction 2022-06-18 Chapter 1 Preface Automated Characterization of Health Information at Large-scale Longitudinal Evidence Systems (ACHILLES) is a profiling tool developed by the OHDSI community to provide descriptive statistics of databases standardized to the OMOP Common Data Model. These characteristics are presented graphically in the ATLAS tool. However, this solution does not allow for database comparison across the data network. The Data Network Dashboards aggregates ACHILLES results files from databases in the network and displays the descriptive statistics through graphical dashboards. This tool is helpful to gain insight in the growth of the data network and is useful for the selection of databases for specific research questions. In the software demonstration we show a first version of this tool that will be further developed in EHDEN in close collaboration with all our stakeholders, including OHDSI. Contributors To develop this tool, EHDEN organized a hack-a-thon (Aveiro, December 2-3, 2019), where we defined and implemented a series of charts and dashboards containing the most relevant information about the OMOP CDM databases. The team involved in this task were composed by the following members: João Rafael Almeida1 André Pedrosa1 Peter R. Rijnbeek2 Marcel de Wilde2 Michel Van Speybroeck3 Maxim Moinat4 Pedro Freire1 Alina Trifan1 Sérgio Matos1 José Luís Oliveira1 1 - Institute of Electronics and Informatics Engineering of Aveiro, Department of Electronics and Telecommunication, University of Aveiro, Aveiro, Portugal 2 - Erasmus MC, Rotterdam, Netherlands 3 - Janssen Pharmaceutica NV, Beerse, Belgium 4 - The Hyve, Utrecht, Netherlands Considerations This manual was written to be a guide for a clean installation of this system with all the dashboards that we defined during the project. The first chapter describes the goal of the system and the second how to install the system. The remaining chapters are dedicated to the dashboards, in which chapters describes one dashboard and all its charts. To simplify the representation of the dashboard’s layout, we used similar schemas as it is presented in Figure 1.1. The white box is the dashboard and the inside boxes are charts. The colour changes in relation to the type of chart. Figure 1.1: Example of a dashboards tool presenting the databases available in the network (simulated data) License The system is open-source and this manual was written in RMarkdown using the bookdown package. Acknowledges This work has been conducted in the context of EHDEN, a project that receives funding from the European Union’s Horizon 2020 and EFPIA through IMI2 Joint Undertaking initiative, under grant agreement No 806968. "],["introduction.html", "Chapter 2 Introduction", " Chapter 2 Introduction The OHDSI research network has been growing steadily which results in an increasing number of healthcare databases standardized to the OMOP CDM format. The OHDSI community created the ACHILLES tool (Automated Characterization of Health Information at Large-scale Longitudinal Exploration System) to characterize those databases. The results are available to the data custodian in their local ATLAS tool and helps them to gain insights in their data and helps in assessing the feasibility of a particular research questions. ACHILLES was designed to extract the metadata from a single database, which by itself does not allow the comparison with the remaining databases in the network. However, we believe there is even more value in sharing this information with others to enable network research in a Data Network Dashboard. Data Network Dashboard The European Health Data and Evidence Network (EHDEN) project therefore designed a Data Network Dashboard tool, a web application to aggregate information from distributed OMOP CDM databases. It uses the ACHILLES results files to construct graphical dashboards and enables database comparison (Figure 2.1). The tool is built on Apache Superset, which is an open-source enterprise-ready business intelligence web application that can provide powerful and fully customizable graphical representations of data. Achilles results can be uploaded through the EHDEN Database Catalogue using the dashboards plugin but can also be directly uploaded in the tool. Figure 1. Example of a dashboards tool presenting age and gender distributions (simulated data). Figure 2.1: Example of a dashboards tool presenting the databases available in the network (simulated data) In this tools, we defined and implemented a series of charts and dashboards containing the most relevant information about the databases, such as: General: dashboards that shows the databases types per country, the distribution of data source types, the growth of the Network including the number of database and the number of patients in the databases over time; Person: representing the number of patients per country, age distribution at first observation, year of birth distribution and normalized gender distribution; Population characteristics: dashboard with the cumulative patient time, persons with continuous observation per month, and the start and end dates of those periods; Visit: chart to compare the number and type of visit occurrence records; Death: information about the number of death records by month, and the patient age at time of death; Concepts: bubble chart which shows the number of patients and records per concept over the databases; Data domains: heat map visualization of the major data domains in each database. "],["installation.html", "Chapter 3 Installation", " Chapter 3 Installation Currently, we use docker to deploy our environment First Steps Clone the repository with the command git clone --recurse-submodules https://github.com/EHDEN/NetworkDashboards. If you already cloned the repository without the --recurse-submodules option, run git submodule update --init to fetch the superset submodule. Create a .env file on the docker directory, using .env-example as a reference, setting all necessary environment variables (SUPERSET_MAPBOX_API_KEY and DASHBOARD_VIEWER_SECRET_KEY). 2.1 If you will use this application as a third-party application and will iframe it, set the variable SINGLE_APPLICATION_MODE to False and define the host of the main application on the variable MAIN_APPLICATION_HOST. Also make sure to add this last host to the list of ALLOWED_HOSTS. Dashboard Viewer setup If you wish to expose the dashboard viewer app through a specific domain(s) you must add it/them to the ALLOWED_HOSTS list on file dashboard_viewer/dashboard_viewer/settings.py and remove the '*' entry. Build containers’ images: docker-compose build. This might take several minutes. Set up the database and create an admin account for the dashboard viewer app: docker-compose run --rm dashboard ./docker-init.sh. Insert Concepts The concepts table is not in the repository due to its dimension, therefore we use directly the Postgres console to insert this table in the installation. Get your concept csv file from Athena Copy the file into postgres container docker cp concept.csv dashboard_viewer_postgres_1:/tmp/ Enter in the postgres container: docker exec -it dashboard_viewer_postgres_1 bash Enter in the achilles database (value of the variable POSTGRES_ACHILLES_DB on the .env file) with the root user (value of the variable POSTGRES_ROOT_USER on the .env file): psql achilles root Create the concept table CREATE TABLE concept ( concept_id INTEGER NOT NULL, concept_name VARCHAR(255) NOT NULL, domain_id VARCHAR(20) NOT NULL, vocabulary_id VARCHAR(20) NOT NULL, concept_class_id VARCHAR(20) NOT NULL, standard_concept VARCHAR(1) NULL, concept_code VARCHAR(50) NOT NULL, valid_start_date DATE NOT NULL, valid_end_date DATE NOT NULL, invalid_reason VARCHAR(1) NULL ); Copy the CSV file content to the table (this could take a while) To get both ' (single quotes) and \" (double quotes) on the concept_name column we use a workaround by setting the quote character to one that should never be in the text. Here we used \\b (backslash). COPY public.concept FROM '/tmp/concept.csv' WITH CSV HEADER DELIMITER E'\\t' QUOTE E'\\b'; Create index in table (this could take a while): CREATE INDEX concept_concept_id_index ON concept (concept_id); CREATE INDEX concept_concept_name_index ON concept (concept_name); Set the owner of the concept table to the achilles user (value of the variable POSTGRES_ACHILLES_USER on the .env file): ALTER TABLE concept OWNER TO achiller Bring up the containers: docker-compose up -d. Run the command docker-compose run --rm dashboard python manage.py generate_materialized_views to create the materialized views on Postgres. Superset setup Bring up the containers: docker-compose up -d. Make sure that the container superset-init has finished before continuing. It is creating the necessary tables on the database and creating permissions and roles. If you used the default ports: Go to http://localhost to access the dashboard viewer app. Go to http://localhost:8088 to access superset. By default Superset’s admin user credentials are admin/admin. It is recommended that you change the password if you will use this in a production environment. To any anonymous user view dashboards, add the following permissions to the public role: all datasource access on all_datasource_access can csrf token on Superset can dashboard on Superset can explore json on Superset can read on Chart can read on CssTemplate can read on Dashboard For each dashboard you want anonymous users to be able to access, on the dashboard list page click edit (the pencil on the right) and add the “Admin” and “Public” roles to the “Roles with acess” field. Dummy data On a fresh installation, there are no achilles_results data so Superset’s dashboards will display “No results”. On the root of this repository, you can find the demo directory where we have an ACHILLES results file with synthetic data that you can upload to a data source on the uploader app of the dashboard viewer (http://localhost/uploader). If you wish to compare multiple data sources, on the demo directory there is also a python script that allows you to generate new ACHILLES results files, where it generates random count values based on the ranges of values for each set of analysis_id and stratums present on a base ACHILLES results file. So, from the one ACHILLES results fill we provided, you can have multiple data sources with different data. "],["processes.html", "Chapter 4 Processes", " Chapter 4 Processes Data Sources Target: platform user Before uploading any data to this platform, a data source owner has to create a data source instance to then associated the upload data with. The creation of data source is done through the [BASE_URL]/uploader/ URL, where 7 fields are expected: name: an extensive name acronym: a short name country: where is the data source localized link (Optional): web page of the data source database type: type of OMOP database coordinates: a more accurate representation of the data source’s localization hash (Optional): the internal unique identifier of a data source If you access [BASE_URL]/uploader/ the 7th field (hash) is set automatically for something random, however, if you want to set it use the [BASE_URL]/uploader/[HASH]/ URL. To avoid duplication on the database type field, this field is transformed (use title case and trimmed) and then is checked there is already a record (Database Type) with the same value. There are several ways to create a data source: Create through a web form By accessing the [BASE_URL]/uploader/ URL, you will get a form where you can field the fields, where the country field is a dropdown and the coordinates field is set through a map widget. Automatically create when performing a GET to the [BASE_URL]/uploader/ URL If the Network Dashboards platform is being used as a third-party application and the main application has all the data for the required fields, the data source can be automatically created and the user is redirected directly to the upload files page. To perform this, each field should be provided as a URL parameter when accessing the [BASE_URL]/uploader/ URL. If all required fields are provided and are valid the data source is created and the user is redirected to the upload files page. If a required field is missing or is not valid the webform is presented to the user so it can manually fill those fields. Automatically create by performing a POST to the [BASE_URL]/uploader/ URL Since the creation URL does not have csrf cookie protection, you can perform a POST request as you were submitting a form. Notes For the automatic options: Since the coordinates field is composed of two fields (latitude, longitude), it should be submitted as coordinates_0=[latitude] and coordinates_1=[longitude] The country field should match one of the available on the dropdown of the webform. Draft Status After a data owner uploads data into his data source, he might not want to make it public right away. To achieve this a data source has a boolean field telling whether if the data source is in draft mode. This then also allows creating dashboards with data of non-draft data sources only. There are three ways to change the value of this draft status field: Through the Django admin app ([BASE_URL]/admin/) Accessing the respective edit page of the data source. This requires a feature to be enabled, which is more detailed on the Allow Draft Status Updates section of the Customization chapter. Perform a PATCH request to the [BASE_URL]/uploader/[HASH]/ URL. On this request, other fields, other than the draft status, can be changed. The body of the request must be a JSON object with the fields that will suffer changes and their new values. Catalogue Results Files Target: platform user Once a data source is created you can access its upload page by accessing the [BASE_URL]/uploader/[HASH]/. If no data source has the provided hash you will be redirected back to the data source creation form. On the upload page you can: Go to the edit page of your data source Upload a catalogue results file Check the upload history A catalogue results file is a CSV file, the result obtained after running the EHDEN/CatalogueExport R package on an OMOP database. It is a variant of the OHDSI/Achilles where it only extracts a subset of analyses of the ACHILLES’ original set. The upload form expects a CSV file with the following columns: Name Type Required/Non-Nullable/Non-Empty analysis_id int Yes stratum_1 string No stratum_2 string No stratum_3 string No stratum_4 string No stratum_5 string No count_value int Yes min_value double No max_value double No avg_value double No stdev_value double No median_value double No p10_value double No p25_value double No p75_value double No p90_value double No The uploaded file must: either contain the first 7 columns OR all 16 columns contain the columns in the same order as presented in the table above While parsing the uploaded file, some data is extracted to then present on the Upload history and to update data source information. This data is extracted from the record with analysis id 0, which is required to be present on the file, and 5000, which is optional. Next is presented the data extracted and their description: R Package Version: the version of CatalogueExport R package used Generation Date: date at which the CatalogueExport was executed on the OMOP database Source Release Date: date at which the OMOP database was released CDM Release Date: date at which the used CDM version was released CDM Version: version of the CDM used Vocabulary Version: version of the vocabulary used The next table is presented where the previous data is stored on the rows with analysis id 0 and 5000: Analysis Id Stratum 1 Stratum 2 Stratum 3 Stratum 4 Stratum 5 0 R Package Version Generation Date 5000 Source Release Date CDM Release Date CDM Version Vocabulary Version Materialized Views Target: admin user For each chart, Superset has an underlying SQL query which in our case is run every time a chart is rendered. If one of these queries takes too long to execute the charts will also take too long until they are rendered and eventually users might get timeout messages given a bad user experience. To avoid this problem, instead of executing the raw SQL query we create a postgres materialized view of the query, which is then used to feed the data to the chart. So only a simple SELECT x FROM x query is executed when a chart is rendered. So whenever I create a chart I have to access the Postgres console? No, we created an unmanaged Materialized Queries model that maps to the materialized views on Postgres. With it you can create new materialized views through the Django admin app, by accessing the [BASE_URL]/admin/ URL. You have to provide the materialized view name and its query, which will then be used to execute the query CREATE MATERIALIZED VIEW [name] AS [query], which will be executed on a background task so the browser doesn’t hang and times out, in case of complicated queries. Taking this into account, the record associated will not appear on the Django admin app until the CREATE MATERIALIZED VIEW query finishes. To give feedback on the background task we use celery/django-celery-results, so you can check the status of a task on the Task Results model of the Celery Results app After the creation of a Materialized Query, the will be a message telling the id of the task which is executing the CREATE MATERIALIZED VIEW query. You can then check for the record associated with the task, click on the id to get more details. If something went wrong check the error message either on Result Data or Traceback fields under the Result section After all this, the final step is to add the materialized view as a Dataset. Login into Superset, then go to Data -> Datasets and create a new one. Select the Achilles database, the public schema, then the created materialized view and click “ADD”. After this, the materialized view can be used as a data source for a new chart. Tabs View [Deprecated] Note: This app is no longer maintaned and the associated urls were unlinked. Target: admin user Once there are data sources on the platform, data was uploaded to them and there are dashboards created on Superset, researchers can now browse through the dashboards and analyze and compare the data of the different data sources. One way to allow this would be to let them browse through the dashboard list on Superset. However, if there was some dashboards not ready to show to the public users, they could still access them. For that, it was created a page, with a sidebar, where public users could browse through the available and ready dashboards. It can be accessed through the URL [BASE_URL]/tabs/ The sidebar entries can be configured through the Django admin app, accessing the Tabsmanager app section. Here two models are available to create: Tab Groups: They allow to groups several sidebar entries within a collapsable group. Tabs: Will create a clickable entry on the sidebar that can be presented within a group. When a tab is clicked the associated dashboard will be displayed on the page. Each entry, tab, or group of them, expects: Title/Name Icon: Name of a font awesome version 5 icon Position: Allows to order entries along the sidebar. If a Tab has a group, then this field will order the tabs within that group only. Visible: If whether or not this tab or group should be visible. The goal of this field is to avoid having to delete the record from the database just because a certain tab is not ready and later on created it from scratch. Tabs additionally expect an URL, which will be used to display a Superset dashboard in an iframe. To hide Superset’s menu bar, an additional standalone URL parameter should be appended to the provided URL of a tab. The value of the standalone arguments depends on the expected result: 1: menu bar is hidden. the bar where the dashboard title, publish status, and three dots option menu are present will still appear 2: both the menu bar and the dashboard title bar are hidden. By default, the dashboard of the first tab is displayed on the page, however, if one wants a specific tab to be displayed when the page is opened, its title should be present in the hash part of the URL. For example, if there is a tab called People, to make that tab selected at the start the following URL should be used [BASE_URL]/tabs/#People. "],["backups.html", "Chapter 5 Backups", " Chapter 5 Backups Create a credentials file (the structure of the file depends on the target cloud server) Create a backups.conf under the backups directory using backups.conf.example as base, setting the appropriate value for the several variables. For variables associated with files and directories always use absolute paths. Variables: RUN: Set it to 0 if you don’t want the next scheduled backup to run. This variable allows you to cancel any backup runs while you are doing some maintenance on the application. CONSTANCE_REDIS_DB: Number of the Redis database where the django constance config is stored. The default value is 2. This value should be the same as the environment variable REDIS_CONSTANCE_DB of the dashboard container. The following variables are associated with the arguemtns of the backup_uploader python package. Check its usage for more details: APP_NAME: The backup process will generate some directories with this name in places that are shared with other applications. SERVER: The name of the target cloud server to where backups should be uploaded (dropbox or mega). BACKUP_CHAIN_CONFIG: Allows having different directories with backups of different ages. CREDENTIALS_FILE_PATH: File containing the credentials to access the server to upload the backup file. Install the backup_uploader python package by following its install instructions. Schedule your backups * * * * * Command_to_execute | | | | | | | | | Day of the Week ( 0 - 6 ) ( Sunday = 0 ) | | | | | | | Month ( 1 - 12 ) | | | | | Day of Month ( 1 - 31 ) | | | Hour ( 0 - 23 ) | Min ( 0 - 59 ) (Retrived from: Tutorialspoint) Ex: To run every day at 3:00 am crontab -e Add entry 0 3 * * * $HOME/NetworkDashboards/backups/backup.sh (The path to the backup script might be different) Restore Select the compressed backup you want to restore. Make sure that all the environment variables are the same as the ones that were used for the chosen backup file. Additionally, the backups.conf file is also necessary to set up, since the TMP_DIRECTORY variable will be used. Run the backups/restore.sh script. Useful stuff How to create a shared link to a dropbox directory using its python’s API: pip install dropbox import dropbox d = dropbox.Dropbox(API_TOKEN) # create a shared link for a directory from dropbox.sharing import SharedLinkSettings sharing_settings = SharedLinkSettings( require_password=True, link_password=DIRECTORY_PASSWORD, ) d.sharing_create_shared_link_with_settings( DIRECTORY_PATH, sharing_settings, ) # get all links for link in d.sharing_get_shared_links().links: print(f"{link.path} -> {link.url}") "],["customizations.html", "Chapter 6 Customizations", " Chapter 6 Customizations This platform is currently being used within the scope of the European Health Data & Evidence Network (EHDEN) project. To allow the dashboard viewer Django application to be easily used by another project or company, several components support customization in runtime, removing the need to change such things directly on the source code. To achieve this we make use of Constance that allows configuring several fields which then can be changed through the Django admin app. Platform Logo It is visible both in the Tabs Manager and the Catalogue Results Uploader URLs. The platform allows two possible ways to choose a logo: upload a file or provide an URL to an image. If both fields are provided, the URL one will be used. On the tabs manager app, we also allow customization of the CSS associated both with the image itself and its container. Platform Title All pages of the uploader app use the same base HTML file which contains a header with the platform logo, page title, and platform title. The first was already mentioned before, the second can’t be changed. The last can be altered using a Constance field. Uploader Page Texts The data source creation page has three columns with some text providing some instructions for the creation of a data source and the upload of catalogue results. The text of these three columns is customizable, where markdown can be used, which is then transformed into HTML before rending the page. Allow Draft Status Updates In the section Draft Status of the Processes chapter, it was already explained the concept around draft data sources. By default, a user can NOT change the data source status on the edit page of a data source, only being allowed to do it through a PATCH request. Changes through the web edit form can be allowed by changing a Constance field. Them an additional draft field will be available on the edit data source form. "],["development-instructions.html", "Chapter 7 Development Instructions", " Chapter 7 Development Instructions Repository Structure Description backups: Scripts and configuration files to perform backups of all the data involved in the Network Dashboards applications (Dashboard viewer + Superset) dashboard_viewer: The Dashboard Viewer Django application to manage and upload catalogue results data. More detail can be found in the Code Documentation chapter. demo: Files that can be used to test some processes of the platform (Upload catalogue results data and import a simple dashboard) docker: Docker-compose stack-related directories. Environment file Configuration directories (Nginx and Postgres) Custom Superest Dockerfile For more information about docker deployment consult the Installation chapter. docs: Where the files of this gitbook are hosted. Other output formats can also be obtained here. Consult the Documentation section of this chapter for more details. superset: contains a submodule to the latest supported version of Superset’s repository and our custom chart plugins tests: contains files to launch a docker-compose stack specific to run tests. requirements-dev: python requirements files to the several tools to either perform code style checks or to run Django tests .pre-commit-config.yaml: configuration for the pre-commit tool. This is not mandatory to use but is a good tool to automatically fix problems related to code style on staged files setup.cfg: configurations for the several code style tools tox.ini: configuration for the tox tool. It helps automate the process to check if the code style is correct and if the Django tests are passing It’s extremely useful in this context since different code style check tools that we use have some conflicts with python dependencies. It creates a virtual environment for each tox environment, in our case, for each code style check tool plus Django tests Superset Currently, we have made some modifications to the box plot visualization on our superset installation which doesn’t allow us to use superset’s pre-built images available on their docker hub, since we have to call npm’s build procedures on the front-end code. To build our custom docker image we used superset’s Dockerfile as a base, where we removed the Dev section and added some code to install our chart plugins before building the front-end code. The changes made to the Dockerfile to install the chart plugins are in this area: L46: Repalce some boxplot fiels with ours; L47: Superset’s original version of the controlPanel.ts file is a .ts versions however ours is a .tsx. For that, we have to remove the .ts version to properly override this file. Update Superset cd into superset’s submodule directory. Get the latest tags: git fetch -t. Checkout to the new desired release tag. Check if there are any changes made to superset’s Dockerfile (on the root of the repository for the current latest release), adapt them, and insert them on our custom Dockerfile under the docker/superset directory. If the version of the plugin package plugin-chart-echarts changed, it’s necessary to update our box plot plugin. If it is greater than 0.18.25, go to the history (https://github.com/apache/superset/commits/[RELEASE-TAG]/superset-frontend/plugins/plugin-chart-echarts) of commits done to the plugin-chart-echarts plugin update to the most recent commit, applying their changes to the files in the superset/box-plot-overrides directory. A fast way check the changes done between two commits: git diff [old_commit_hash] [recent_commit_hash] -- superset-frontend/plugins/plugin-chart-echarts Chart Plugin Development Follow the instructions of this tutorial to create the necessary base files of your plugin. To deploy you can either use the DYNAMIC_PLUGINS feature flag or you can add and build your plugins in superset/Dockerfile. Important features Standalone Mode: by appending ?standalone=true to the URL of a dashboard superset’s menu bar won’t show. New versions support ?standalone=1 or ?standalone=2 where the first does the same as ?standalone=true and the second also hides the bar containing the name of the dashboard, leaving just the charts. Filters: check this faq entry Append ?preselect_filters={\"chartId\":{\"columnToFilterBy\":[\"value1\", \"value2\"]}} to the dashboard URL to apply a filter once the dashboard is loaded. E.g. ?preselect_filters={\"13\":{\"name\":[\"Demo University of Aveiro\"]}} Custom label colors: check this faq entry Github Actions Github has a feature that allows performing automatic actions after a certain event happens on the repository. We use this feature to execute to check if everything is alright with new PR before merging them to dev. Github calls a job a set of steps that are executed after a certain event. Then several jobs can be groups in workflows. Events are defined at the workflow level, so all the jobs in a workflow will execute at the same time. We have two workflows: Code analysis checks Django tests The first has three jobs black: ensures that python’s code format is consistent throughout the project isort: sorts and organizes import statements prospector: executes a set of tools that perform some code analysis The second has just one job that executes the Django tests. Both workflows execute on commits of pull requests that will be merged into the dev branch. Regarding the code analysis workflow, the three tools used have requirements that conflict with each other, for that there is a requirements file for each tool on the requirement-dev directory of the repository. To avoid having three different virtual environments for each tool, you can use the tox. You just need to install the development requirements (pip install -r requirements-dev/requirements-dev.txt) and then just run tox. It will manage the necessary virtual environments and install the requirements for each tool. If you, however, want to run a specific tool manually you can check the tox configuration file (tox.ini). For example for the prospector tool the tox configuration is the following: [testenv:prospector] basepython = python3.8 deps = -r{toxinidir}/requirements-dev/requirements-prospector.txt -r{toxinidir}/dashboard_viewer/requirements.txt commands = prospector dashboard_viewer prospector docker/superset we can see that it installs the requirement for the prospector tool and also the requirements of the Dashboard Viewer Django app and then runs two commands. For both black and isort tools, when you run tox, it will show the necessary changes that are required to make the code good. You can apply the changes automatically by executing the tools manually without the --check and --check-only options respectively. Sometimes prospector can be a pain in the boot, complaining about too much stuff. You can make prospector ignore some bad stuff by adding the comment, # noqa, to the end of the specific line where it is complaining. Tests Our tests use Django’s building testing features, which uses unittest under the hood. Not all featured have tests associated, however, there are already some tests scenarios in mind written as issues on the repository, which have the tag Test Use Case. To run the tests we set up a docker-compose stack, under the test directory which has just the necessary data containers (Redis and Postgres) to avoid having to make changes on the development/production docker-compose stack. Once the stack is up it only necessary to run SECRET_KEY=secret python manage.py test to execute the tests. If you are developing any tests that involve celery, there is no need to have a celery process running, since on Django’s settings.py we set the test runner to the celery one. This way the python manage.py test is enough to test the whole application. Python Requirements The python requirements for the Dashboard Viewer Django app are present on the requirements.txt file of the dashboard_viewer directory. The file is divided into two sections. First are the direct dependencies. Dependencies that are directly used or imported by the Dashboard Viewer Django app. For better maintainability, every direct dependency has a small description in front of it, so any developer knows why it is being mentioned in the requirements file. The second part of the file contains the indirect dependencies. Basically dependencies of our direct dependencies. After any update is made to the direct dependencies the following procedure should be followed: Create a new virtual environment just for the dependencies of this file Delete the indirect dependencies section of the file Install all the direct dependencies pip install -r requirements.txt Append the result of pip’s freeze to the requirements file pip freeze >> requirements.txt Remove from the second section of the file, duplicated entries of the first section of the file, in other words, remove from the indirect dependencies section the direct dependencies. With #185 we intend to start using the pip-compile tool. With it, we can have a file with the direct dependencies (requirements.in), and then pip-compile reads that file and automatically creates a requirements.txt file with all the dependencies and which package requires that specific dependency. The update process of dependencies will then just be Install the pip-compile tool pip install pip-tools Make the change to the direct dependencies on the requirements.in file (No need for a virtual environment) Call the pip-compile tool on the requirement.in file pip-compile requirements.in Documentation The plan is to have all the documentation on this git book and any other places that might require some description/information should point to this GitBook so we maintain a commonplace for all the documentation. This way we can make sure that the code and the documentation are in the same place since on a pull request for a specific feature or a bug fix, associated documentation should also be changed with it. The manual was written in RMarkdown using the bookdown package. All the code is stored in the docs/src directory as well as the script to build all the documentation. Do not change the files in the root of the docs directory, because those files will be removed during the build processed and replaced by the new ones. Therefore, to update this documentation, apply the changes to the files in the directory docs/src. To build the documentation, you need to have R installed, and if you are using UNIX-based systems, you only need to run sh _build.sh in the docs/src directory. In this documentation, we also describe all the settings around the dashboards that are used on the EHDEN project. To avoid an extensive table of contents and also to avoid having a big chapter page for dashboards, we configured this GitBook to split different sections into different pages. A section on the GitBook is mapped to markdown headings elements of level 2 (H2 or ##). This is, however, inconvenient for small chapters like the preface (index.Rmd). To make it render all the sections on the same page, instead of using headings of level 2 (##) you should use level 3 (###). Although this will make the section numeration start at 0, e.g 1.0.1, 1.0.2, … To avoid this we appended {-} to the sections titles so that the numeration does not show. If a new file is created with more documentation, its name should be placed, including extension, in the desired location in this list of the docks/src/_bookdown.yml file. "],["code-documentation.html", "Chapter 8 Code Documentation", " Chapter 8 Code Documentation Apps Materialized Queries Manager Models This app has only one model, MaterializedQuery, which maps to a Postgres materialized view. To avoid having to maintain the consistency between both the records of this Django app and the Postgres materialized views: the managed Meta flag was set to False to avoid Django creating migrations to the model the db_table Meta flag was set to the name of the table where Postgres stores the information about the existing materialized views (pg_matviews). the fields of the model, matviewname and definition, use the same name and type as the ones of the pg_matviews Postgres table. Views This app has no view exposed since all operations to the MaterializedQuery models are expected to be performed in the Django admin app. However, we had to change Django’s default behaviors for the create, update and delete operations of the model. For the delete operation, we overrode the delete method of the MaterializedQuery Django model to just execute a DROP MATERIALIZED VIEW SQL statement. Related to creation and update we had to change some internal methods of Django’s admin app ModelAdmin base class. _changeform_view: where Model records were being created. Instead, CREATE MATERIALIZED VIEW and ALTER MATERIALIZED VIEW SQL statements are executed. However, since some materialized views might take some time to build, create a record like this could lead to a browser timeout. We then decided to execute these statements in a celery background task. The main changes were made here where we launch the background task. response_add: Since the materialized view might not be created the right way, saying “A record was created successfully” is not adequate. We then changed the message that is presented after the creation to tell in the id of the background task that is creating the materialized query. The result of the query can then be consulted on the associated Task Results record on the Celery Results section app of the Django admin console. response_change: changes here with the same ideas behind as response_add. If any catalogue results files are being uploaded to the platform, any worker attempting to create or change a materialized view will block until such data is uploaded. Also if any worker is creating materialized views, no other worker can upload catalogue results data. Through the admin console, there is also the possibility to refresh a specific MaterializedQuery. To do so, on the list view, select the MaterializedQueries to refresh, then on the actions dropdown select “Refresh selected materialized queries”. Once again to avoid timeouts, such operations are executed on a background task. Tabs Manager Currently, this app is not being used and the URL mapping was delete. To use it again uncomment the tabsManager line on the dashboard_viewer/dashboard_viewer/urls.py file. Then you can access the tabs page through the [BASE_URL]/tabs/ URL. Views On this app, there is only one view. It is a simple page with just a sidebar to choose which dashboard to displays on an iframe. Besides the simplicity, all the animations around the sidebar buttons are handled by CSS with some JS that adds and removes classes to HTML elements, as events (hover and click) happen. To facilitate the development process of CSS, SCSS was used to build styling of the view. It prevents duplication with the addition of variables and adds the possibility to express parent classes by nesting their declaration. In cases where there are a lot of buttons on the sidebar, some buttons might get impossible to reach since they are out of the field of view. To avoid this we make use of SimpleBar, which makes the sidebar scrollable, displaying a scroll bar on the right side of the sidebar whenever there are elements outside of the field of view. API There is one endpoint, [BASE_URL]/api/, where a JSON object of tabs and groups of tabs and their sub-tabs are returned. Models Uploader Views This app exposes three views: 1. Creation of a data source 2. Edition of a data source 3. To Upload or consult the history of uploads of catalogue results files. The first one can be accessed through the [BASE_URL]/uploader/[DATA_SOURCE_HASH]/ URL. If no hash is provided on the URL then on the creation of the data source a random one will be assigned. If there is already a data source with the provided hash then the user is redirected to the upload page of that data source. This view also allows creating data sources without displaying the webform, redirecting directly to the uploader page. This can be achieved by providing the data of several fields of the form as URL arguments. E.g. [BASE_URL]/uploader/[DATA_SOURCE_HASH]/?acronym=test.... This is implemented in a way so that whenever a GET is performed, it checks the URL arguments and tries to submit the data source form. If it is valid, all the required fields were provided and are valid, then the user is redirected to the upload page. Else all the valid values are set in the form, the invalid ones are being discarded, and the data source creation page is presented with no error messages. The country field should contain a value from the ones available on the dropdown presented in the webform and since the coordinates is a two-component value it should be provided as coordinates_0=[LATITUDE]&coordinates_1=[LONGITUDE]. It is important to note that this view does not require a CSRF token, so a normal POST form submission can be performed to create a data source. The second one can be accessed through the [BASE_URL]/uploader/[DATA_SOURCE_HASH]/edit/ URL or by clicking on the Edit button on the data source upload page. Finally, on the upload page, a data owner can consult the history of uploads, their state, and eventually error messages if some went wrong. Whenever an upload is made its state will be pending. After the upload, with a 5-second interval, a request is made to the backend to check if the status of the upload changed. If it fails, an error message will be provided in a tooltip above a button with a message icon. Else the state will change to Done and the information about the upload retrieved from the uploaded file, present on the check status request, is filled. Related to file uploading, after the file form is submitted no validation is made and a message is presented to the user telling that the file is being processed in the background, then the fetch status process mentioned before starts. If validations were performed before returning a message to the user, if the validation took too much time, the browser could timeout. Also if some unexpected error happened on the insertion process performed in the background, the user would get any feedback. Related to the background task that validates and uploads the data, the validation can fail if: Error Message Invalid Number of Columns The provided file has an invalid number of columns Invalid CSV File Format The provided file has an invalid CSV format. Make sure is a text file separated by commas and you either have 7 (regular results file) or 13 (results file with dist columns) columns. Missing Required Fields Some rows have null values either on the column “analysis_id” or “count_value” Invalid Field Types The provided file has invalid values on some columns. Remember that only the \"stratum_*\" columns accept strings, all the other fields expect numeric types. Duplicated Metadata Rows Analysis id[output] duplicated on multiple rows. Try (re)running the plugin CatalogueExport on your database. Missing Required Metadata Row Analysis id 0 is missing. Try (re)running the plugin CatalogueExport on your database. Any other error is considered an unexpected error and the following message will be presented “An unexpected error occurred while processing your file. Please contact the system administrator for more details.”. If the file passes all validations, it goes to the upload data phase. Here, if workers are creating or refreshing materialized queries then the worker blocks. If there are other workers inserting data for the same data source it will also block. However, several workers of different data sources can insert data at the same time. All the workers, after inserting the data, check if they are the only worker inserting data. If so they refresh the existing materialized queries. Else the next worker to finish inserting data will do the same check. Widgets For the data source form two custom widgets were created for the following fields: Database Type: To avoid having duplicate entries with the same meaning (e.g. Hospital, hospital), the input of this field has a autocomplete list where existing values are suggested to the user. Also before saving the field to the database spaces are trimmed and the values are transformed into title case here. Coordinates: 1. This is a two-component field; 2. Inserting coordinates by hand is tedious. Considering the previous points, we created a widget with a map built with leaflet where the user just needs to click on the map. API This app provided two API endpoints Update data source information: a PATCH request with a JSON object on the body of the request with the fields and their new values. Pending upload status: a GET request that returns JSON data where there is always a status field that can have three statuses which then can lead to additional data be also present: Pending: the upload in question hasn’t finished Done: the upload finished and there was nothing wrong with the uploaded file. Along with the status, there will be a data field with a JSON object with the fields r_package_version, generation_date, cdm_version, and vocabulary_version which are data source information that was extracted from the uploaded file. Failed: the upload finished but there was something wrong with the uploaded file. Along with the status, there will be a failure_msg field telling the reason for the failure. Models Country data is loaded in a fresh installation through the docker-init.sh script if no records are present on the Country table. The DataSource model doesn’t have a foreign key to the DatabaseType model to then facilitate the creation of SQL queries to feet Superset’s dashboards. The DatabaseType is used anyway to have a faster way to check if a certain database type already exists on the database, avoiding going through every DataSource record. The same situation of the DatabaseType model also happens between the UploadHistory and PendingUpload models. There is no foreign key between a UploadHistory and a PendingUpload. This is because PendingUpload records are deleted once an upload is successful. When the upload view requests the status of a certain upload, it uses the id of the pending upload. If no pending upload is found, it is assumed that the upload was successful and searches for uploads on the UploadHistory model with the pending_upload_id field equal to the certain upload id. Related to where the uploaded files are stored, within the media directory there will be a ACHILLES_RESULTS_STORAGE_PATH directory which will have a directory for each data source. Within this last directory, first, files are uploaded to a failure directory. If the upload is successful the file is moved to a success directory. In both cases, the file name will be the date of when the file is being saved into disk plus its original extensions. JavaScript Packages While developing the templates for Django views, if a certain javascript library is required, like jquery, one option is to insert script tags on the templates and point the src to a CDN. However, this makes the process of maintaining the libraries tedious since a developer has to search and change all the script tags if for example wants to update the library’s version. To avoid this problem we have a package.json file where we define all the libraries that we use and their version. Then we add the node_modules directory as a static file directory. With this alternative, updating a library is as simple as changing a number of the package.json file, run npm install and collect the static files again. "],["dashboards.html", "Chapter 9 Dashboards ", " Chapter 9 Dashboards "],["PerDatabaseDashboard.html", "9.1 Network Dashboard", " 9.1 Network Dashboard Label Colors In order to obtain the colors blue and rose in the chart representing the gender distribution, add the following JSON entry to the JSON object of the JSON Metadata field on the edit dashboard page: "label_colors": { "Male": "#3366FF", "Female": "#FF3399" } CSS To hide the dashboard header insert the following css code to the CSS field on the edit page: .dashboard > div:not(.dashboard-content) { /* dashboard header */ display: none; } With this every time you want to edit the dashboard layout you have to either comment the CSS inserted or remove it so the “Edit Dashboard” button can show again. Filters Country Filter Dataset: Materialized View meta_data_table Database Type Filter Dataset: Materialized View meta_data_table Data Source Filter Dataset: Materialized View meta_data_table Overview Tab Countries Dataset: Materialized View meta_data_table Data Sources Dataset: Materialized View meta_data_table Datasource Types Dataset: Materialized View meta_data_table Patients Dataset: Materialized View meta_data_table Patients Dataset: Materialized View meta_data_table Patients by Country Dataset: Materialized View meta_data_table Database Types per Country Dataset: Materialized View meta_data_table Meta Data Dataset: Materialized View meta_data_table Demographics Tab Number of Patients Dataset: Materialized View number_of_patients Gender Table Dataset: Materialized View gender Gender Pie Dataset: Materialized View gender Age at first observation Table Dataset: Materialized View age1observation_table Age at first observation Bar Chart Dataset: Materialized View age1observation_bar_chart Distribution of age at first observation period Dataset: Materialized View distribution_of_age_at_first_observation_period Year of Birth Dataset: Materialized View year_of_birth Data Domains Tab Average number of records per person Dataset: Materialized View avg_num_of_records_per_person Total number of records Dataset: Materialized View data_domain_total_num_of_records Number of distinct visit occurrence concepts per person Dataset: Materialized View number_of_distinct_per_person Number of distinct condition occurrence concepts per person Dataset: Materialized View number_of_distinct_per_person Number of distinct procedure occurrence concepts per person Dataset: Materialized View number_of_distinct_per_person Number of distinct drug exposure concepts per person Dataset: Materialized View number_of_distinct_per_person Number of distinct observation occurrence concepts per person Dataset: Materialized View number_of_distinct_per_person Number of distinct mesurement occurrence concepts per person Dataset: Materialized View number_of_distinct_per_person Data Provenance Tab Dataset: Materialized View data_provenance Observation Period Tab Number of Patitents in Observation Period Dataset: Materialized View num_of_patients_in_observation_period Cumulative Observation Period Dataset: Materialized View cumulative_observation_time Number of Observation Periods Dataset: Materialized View number_of_observation_periods Length of observation (days) of first observation period Dataset: Materialized View length_of_observation_of_first_observation_period Visit Tab Visit Type Graph Dataset: Materialized View visit_type_bar_chart Visit Type Dataset: Materialized View visit_type_table Concept Browser Tab Domain Filter Dataset: Materialized View domain_filter Concept Browser Dataset: Materialized View concept_browser_table3 Concept Network Coverage Dataset: Materialized View concept_coverage2 About Tab Markdown dashboard components "],["database-level-dashboard.html", "9.2 Database-Level Dashboard", " 9.2 Database-Level Dashboard This dashboard is an exact copy of the Network Dashboard dashboard but several legends and fields displayed on the original are hidden either through CSS or by changing some chart settings. On the following sections we will only present the things to change on the original charts. Label Colors In order to obtain the colors blue and rose in the chart representing the gender distribution, add the following JSON entry to the JSON object of the JSON Metadata field on the edit dashboard page: "label_colors": { "Male": "#3366FF", "Female": "#FF3399" } CSS To hide the dashboard header insert the following css code to the CSS field on the edit page: /* hides the filter badges on right side of charts */ .dashboard-filter-indicators-container { display: none; } /* hides the acronym filter */ .grid-content > .dragdroppable.dragdroppable-row > .with-popover-menu { display: none; } /* * WARNING panel 1 id hardcoded * Hides the X Axis Label of the heatmap on the Data Domains tab */ #TABS-nlIU6H5mcT-pane-1 g.x.axis > g.tick text { display: none; } /* * WARNING panel 2 id hardcoded * Hides the X Axis Labels of the bar charts on the Data Provenance tab */ #TABS-nlIU6H5mcT-pane-2 g.nv-x.nv-axis.nvd3-svg > g.nvd3.nv-wrap.nv-axis > g > g.tick.zero > text { display: none; } With this every time you want to edit the dashboard layout you have to either comment the CSS inserted or remove it so the “Edit Dashboard” button can show again. Data Source Filter - hidden Dataset: data_source table of the achilles database. For the filter to work the name of the fields to filter should match in all tables used on the charts of this dashboard. Demographics Tab Number of Patients No changes Gender Table No changes Gender Pie No changes Age at first observation - Bars Remove legend. Customize Tab Chart Options Legend: off Distribution of age at first observation period No changes Year of Birth Remove legend. Customize Tab Chart Options Legend: off Data Domains Tab Average number of records per person No changes Total number of records No changes Data Density Plot Dataset: Materialized View data_density Records per person Dataset: Materialized View records_per_person Concepts per person Dataset: Materialized View number_of_distinct_per_person Data Provenance Tab Type Concepts Dataset: Materialized View data_provenance Observation Period Tab Number of Patitents in Observation Period Remove legend. Customize Tab Chart Options Legend: off Length of observation (days) of first observation period No changes Cumulative Observation Period Remove legend. Customize Tab Chart Options Legend: off Number of Observation Periods No changes Visit Tab Visit Type Graph Remove legend. Customize Tab Chart Options Legend: off Visit Type Table Remove the name field from the columns to display. Data Tab Query Columns: visit_type, num_persons, percent_persons with label persons (%), records_per_person Visit Age Distribution Dataset: Materialized View visit_age_distribution Concept Browser Tab Domain Filter No changes Concept Browser Dataset: Materialized View concept_browser_table2 Meta Data Tab Meta Data Dataset: Materialized View meta_data_table "],["materialized-views-1.html", "9.3 Materialized views", " 9.3 Materialized views meta_data_table SELECT data_source.acronym, data_source.name, data_source.database_type, country.country, p.count_value AS number_of_patients, a.stratum_2 AS source_release_date, a.stratum_3 AS cdm_release_date, a.stratum_4 AS cdm_version, a.stratum_5 AS vocabulary_version, p.stratum_3 AS execution_date, p.stratum_2 AS package_version FROM (((achilles_results a JOIN data_source ON ((a.data_source_id = data_source.id))) JOIN country ON ((data_source.country_id = country.id))) JOIN ( SELECT achilles_results.count_value, achilles_results.data_source_id, achilles_results.stratum_2, achilles_results.stratum_3 FROM achilles_results WHERE (achilles_results.analysis_id = 0)) p ON p.data_source_id = data_source.id) WHERE (a.analysis_id = 5000); patients_per_country_and_database_type SELECT country.country, source.database_type, achilles.count_value FROM ( (achilles_results achilles JOIN data_source source ON (achilles.data_source_id = source.id)) JOIN country country ON source.country_id = country.id) WHERE (achilles.analysis_id = 1); number_of_patients SELECT achilles_results.count_value, data_source.name, data_source.acronym, data_source.database_type, country.country FROM ((achilles_results JOIN data_source ON ((achilles_results.data_source_id = data_source.id))) JOIN country ON ((data_source.country_id = country.id))) WHERE (achilles_results.analysis_id = 1); gender SELECT source.name, source.acronym, source.database_type, country.country, concept.concept_name AS gender, achilles.count_value FROM (((achilles_results achilles JOIN data_source source ON ((achilles.data_source_id = source.id))) JOIN country ON ((country.id = source.country_id))) JOIN concept ON ((achilles.stratum_1 = (concept.concept_id)::text))) WHERE (achilles.analysis_id = 2); age1observation_table SELECT source.name, source.acronym, source.database_type, country.country, sum( CASE WHEN ( (achilles.stratum_2):: integer < 10 ) THEN achilles.count_value ELSE NULL :: bigint END ) AS "0-10", sum( CASE WHEN ( ( (achilles.stratum_2):: integer >= 10 ) AND ( (achilles.stratum_2):: integer < 20 ) ) THEN achilles.count_value ELSE NULL :: bigint END ) AS "10-20", sum( CASE WHEN ( ( (achilles.stratum_2):: integer >= 20 ) AND ( (achilles.stratum_2):: integer < 30 ) ) THEN achilles.count_value ELSE NULL :: bigint END ) AS "20-30", sum( CASE WHEN ( ( (achilles.stratum_2):: integer >= 30 ) AND ( (achilles.stratum_2):: integer < 40 ) ) THEN achilles.count_value ELSE NULL :: bigint END ) AS "30-40", sum( CASE WHEN ( ( (achilles.stratum_2):: integer >= 40 ) AND ( (achilles.stratum_2):: integer < 50 ) ) THEN achilles.count_value ELSE NULL :: bigint END ) AS "40-50", sum( CASE WHEN ( ( (achilles.stratum_2):: integer >= 50 ) AND ( (achilles.stratum_2):: integer < 60 ) ) THEN achilles.count_value ELSE NULL :: bigint END ) AS "50-60", sum( CASE WHEN ( ( (achilles.stratum_2):: integer >= 60 ) AND ( (achilles.stratum_2):: integer < 70 ) ) THEN achilles.count_value ELSE NULL :: bigint END ) AS "60-70", sum( CASE WHEN ( ( (achilles.stratum_2):: integer >= 70 ) AND ( (achilles.stratum_2):: integer < 80 ) ) THEN achilles.count_value ELSE NULL :: bigint END ) AS "70-80", sum( CASE WHEN ( ( (achilles.stratum_2):: integer >= 80 ) AND ( (achilles.stratum_2):: integer < 90 ) ) THEN achilles.count_value ELSE NULL :: bigint END ) AS "80-90", sum( CASE WHEN ( (achilles.stratum_2):: integer >= 90 ) THEN achilles.count_value ELSE NULL :: bigint END ) AS "90+" FROM (((achilles_results achilles JOIN data_source source ON ((achilles.data_source_id = source.id))) JOIN country ON ((country.id = source.country_id))) JOIN concept ON ((achilles.stratum_1 = (concept.concept_id)::text))) WHERE (achilles.analysis_id = 102) GROUP BY source.name, source.acronym, source.database_type, country.country; age1observation_bar_chart SELECT source.name, (achilles.stratum_1)::integer AS age, achilles.count_value AS count, source.acronym, source.database_type, country.country FROM ((achilles_results achilles JOIN data_source source ON achilles.data_source_id = source.id) JOIN country ON ((country.id = source.country_id))) WHERE (achilles.analysis_id = 101); distribution_of_age_at_first_observation_period SELECT source.name, source.acronym, country.country, achilles.count_value, achilles.p10_value AS p10, achilles.p25_value AS p25, achilles.median_value AS median, achilles.p75_value AS p75, achilles.p90_value AS p90, achilles.max_value, achilles.min_value FROM ((achilles_results achilles JOIN data_source source ON ((achilles.data_source_id = source.id))) JOIN country ON ((source.country_id = country.id))) WHERE (achilles.analysis_id = 103) ORDER BY source.name; year_of_birth SELECT source.name, source.acronym, source.database_type, country.country, achilles.stratum_1 AS "Birth_year", achilles.count_value AS count FROM ((achilles_results achilles JOIN data_source source ON ((achilles.data_source_id = source.id))) JOIN country ON ((country.id = source.country_id))) WHERE (achilles.analysis_id = 3); avg_num_of_records_per_person SELECT source.name, source.acronym, source.database_type, country.country, CASE WHEN (achilles.analysis_id = 201) THEN 'Visit'::text WHEN (achilles.analysis_id = 401) THEN 'Condition'::text WHEN (achilles.analysis_id = 501) THEN 'Death'::text WHEN (achilles.analysis_id = 601) THEN 'Procedure'::text WHEN (achilles.analysis_id = 701) THEN 'Drug Exposure'::text WHEN (achilles.analysis_id = 801) THEN 'Observation'::text WHEN (achilles.analysis_id = 1801) THEN 'Measurement'::text WHEN (achilles.analysis_id = 2101) THEN 'Device'::text WHEN (achilles.analysis_id = 2201) THEN 'Note'::text ELSE NULL::text END AS data_domain, (sum(achilles.count_value) / avg(counts.num_persons)) AS records_per_person FROM (((achilles_results achilles JOIN data_source source ON ((achilles.data_source_id = source.id))) JOIN country ON ((country.id = source.country_id))) JOIN ( SELECT achilles_results.data_source_id, achilles_results.count_value AS num_persons FROM achilles_results WHERE (achilles_results.analysis_id = 1)) counts ON achilles.data_source_id = counts.data_source_id) GROUP BY achilles.analysis_id, source.name, source.acronym, source.database_type, country.country HAVING ( achilles.analysis_id = ANY ( ARRAY[(201):: bigint, (401):: bigint, (501):: bigint, (601):: bigint, (701):: bigint, (801):: bigint, (1801):: bigint, (2101):: bigint, (2201):: bigint] ) ); data_domain_total_num_of_records SELECT data_source.name, data_source.acronym, data_source.database_type, country.country, CASE WHEN (achilles_results.analysis_id = 201) THEN 'Visit'::text WHEN (achilles_results.analysis_id = 401) THEN 'Condition'::text WHEN (achilles_results.analysis_id = 501) THEN 'Death'::text WHEN (achilles_results.analysis_id = 601) THEN 'Procedure'::text WHEN (achilles_results.analysis_id = 701) THEN 'Drug Exposure'::text WHEN (achilles_results.analysis_id = 801) THEN 'Observation'::text WHEN (achilles_results.analysis_id = 1801) THEN 'Measurement'::text WHEN (achilles_results.analysis_id = 2101) THEN 'Device'::text WHEN (achilles_results.analysis_id = 2201) THEN 'Note'::text ELSE NULL::text END AS data_domain, sum(achilles_results.count_value) AS count FROM ((achilles_results JOIN data_source ON ((achilles_results.data_source_id = data_source.id))) JOIN country ON ((country.id = data_source.country_id))) GROUP BY data_source.name, data_source.acronym, data_source.database_type, country.country, achilles_results.analysis_id HAVING ( achilles_results.analysis_id = ANY ( ARRAY[(201):: bigint, (401):: bigint, (501):: bigint, (601):: bigint, (701):: bigint, (801):: bigint, (1801):: bigint, (2101):: bigint, (2201):: bigint] ) ); number_of_distinct_per_person SELECT source.name, source.acronym, country.country, achilles.analysis_id, CASE WHEN (achilles.analysis_id = 203) THEN 'Visit'::text WHEN (achilles.analysis_id = 403) THEN 'Condition'::text WHEN (achilles.analysis_id = 603) THEN 'Procedure'::text WHEN (achilles.analysis_id = 703) THEN 'Drug Exposure'::text WHEN (achilles.analysis_id = 803) THEN 'Observation'::text WHEN (achilles.analysis_id = 1803) THEN 'Measurement'::text ELSE NULL::text END AS data_domain, achilles.count_value, achilles.min_value, achilles.p10_value AS p10, achilles.p25_value AS p25, achilles.median_value AS median, achilles.p75_value AS p75, achilles.p90_value AS p90, achilles.max_value FROM ((achilles_results achilles JOIN data_source source ON ((achilles.data_source_id = source.id))) JOIN country ON ((source.country_id = country.id))) WHERE ( achilles.analysis_id = ANY ( ARRAY[(203):: bigint, (403):: bigint, (603):: bigint, (703):: bigint, (803):: bigint, (183):: bigint] ) ) ORDER BY source.name; data_provenance SELECT source.name, source.acronym, source.database_type, country.country, CASE WHEN (achilles.analysis_id = 405) THEN 'Condition'::text WHEN (achilles.analysis_id = 605) THEN 'Procedure'::text WHEN (achilles.analysis_id = 705) THEN 'Drug'::text WHEN (achilles.analysis_id = 805) THEN 'Observation'::text WHEN (achilles.analysis_id = 1805) THEN 'Measurement'::text WHEN (achilles.analysis_id = 2105) THEN 'Device'::text ELSE 'Other'::text END AS domain_name, c1.concept_name, sum(achilles.count_value) AS num_records FROM (((achilles_results achilles JOIN data_source source ON ((achilles.data_source_id = source.id))) JOIN country ON ((country.id = source.country_id))) JOIN concept c1 ON ((achilles.stratum_2 = (c1.concept_id)::text))) WHERE ( achilles.analysis_id = ANY ( ARRAY[(405):: bigint, (605):: bigint, (705):: bigint, (805):: bigint, (1805):: bigint, (2105):: bigint] ) ) GROUP BY source.name, source.acronym, source.database_type, country.country, c1.concept_name, CASE WHEN (achilles.analysis_id = 405) THEN 'Condition'::text WHEN (achilles.analysis_id = 605) THEN 'Procedure'::text WHEN (achilles.analysis_id = 705) THEN 'Drug'::text WHEN (achilles.analysis_id = 805) THEN 'Observation'::text WHEN (achilles.analysis_id = 1805) THEN 'Measurement'::text WHEN (achilles.analysis_id = 2105) THEN 'Device'::text ELSE 'Other'::text END; num_of_patients_in_observation_period SELECT source.name, source.acronym, source.database_type, country.country, to_date(achilles.stratum_1, 'YYYYMM'::text) AS date, achilles.count_value AS "Nr_patients" FROM ((achilles_results achilles JOIN data_source source ON ((achilles.data_source_id = source.id))) JOIN country ON ((country.id = source.country_id))) WHERE (achilles.analysis_id = 110); cumulative_observation_time SELECT data_source.name, data_source.acronym, data_source.database_type, country.country, cumulative_sums.xlengthofobservation, round( (cumulative_sums.cumulative_sum / (totals.total):: numeric), 5 ) AS ypercentpersons FROM ( ( ( ( SELECT achilles_results.data_source_id, ( (achilles_results.stratum_1):: integer * 30 ) AS xlengthofobservation, sum(achilles_results.count_value) OVER ( PARTITION BY achilles_results.data_source_id ORDER BY (achilles_results.stratum_1):: integer DESC ) AS cumulative_sum FROM achilles_results WHERE ( achilles_results.analysis_id = 108 ) ) cumulative_sums JOIN ( SELECT achilles_results.data_source_id, achilles_results.count_value AS total FROM achilles_results WHERE (achilles_results.analysis_id = 1) ) totals ON ( ( cumulative_sums.data_source_id = totals.data_source_id ) ) ) JOIN data_source ON ((cumulative_sums.data_source_id = data_source.id)) ) JOIN country ON ((country.id = data_source.country_id)) ) ORDER BY data_source.name, cumulative_sums.xlengthofobservation; number_of_observation_periods SELECT ar.data_source_id AS id, ds.acronym, ds.name, country.country, ar.stratum_1, ar.count_value, pa.nrpatients AS patients, round( ( ((100)::numeric * (ar.count_value)::numeric) / (pa.nrpatients)::numeric ), 2 ) AS percentage FROM (((achilles_results ar JOIN data_source ds ON ((ds.id = ar.data_source_id))) JOIN country ON ((ds.country_id = country.id))) JOIN ( SELECT achilles_results.count_value AS nrpatients, achilles_results.data_source_id FROM achilles_results WHERE (achilles_results.analysis_id = 0)) pa ON ((pa.data_source_id = ds.id))) WHERE (ar.analysis_id = 113); length_of_observation_of_first_observation_period SELECT source.name, source.acronym, country.country, achilles.count_value, achilles.min_value, achilles.p10_value AS p10, achilles.p25_value AS p25, achilles.median_value AS median, achilles.p75_value AS p75, achilles.p90_value AS p90, achilles.max_value FROM ((achilles_results achilles JOIN data_source source ON ((achilles.data_source_id = source.id))) JOIN country ON ((source.country_id = country.id))) WHERE (achilles.analysis_id = 105) ORDER BY source.name; visit_type_bar_chart SELECT data_source.name, data_source.acronym, data_source.database_type, country.country, concept.concept_name, achilles_results.count_value AS num_persons FROM (((( SELECT achilles_results_1.id, achilles_results_1.analysis_id, achilles_results_1.stratum_1, achilles_results_1.stratum_2, achilles_results_1.stratum_3, achilles_results_1.stratum_4, achilles_results_1.stratum_5, achilles_results_1.count_value, achilles_results_1.data_source_id, achilles_results_1.avg_value, achilles_results_1.max_value, achilles_results_1.median_value, achilles_results_1.min_value, achilles_results_1.p10_value, achilles_results_1.p25_value, achilles_results_1.p75_value, achilles_results_1.p90_value, achilles_results_1.stdev_value FROM achilles_results achilles_results_1 WHERE (achilles_results_1.analysis_id = 200) ) achilles_results JOIN data_source ON ((achilles_results.data_source_id = data_source.id))) JOIN country ON ((country.id = data_source.country_id))) JOIN concept ON (achilles_results.stratum_1)::integer = concept.concept_id ); visit_type_table SELECT data_source.name, data_source.acronym, data_source.database_type, country.country, concept.concept_name, ar1.count_value AS num_persons, round( ( (100.0 * (ar1.count_value)::numeric) / (denom.count_value)::numeric ), 2 ) AS percent_persons, round( ( (1.0 * (ar2.count_value)::numeric) / (ar1.count_value)::numeric ), 2 ) AS records_per_person FROM (((((( SELECT achilles_results.id, achilles_results.analysis_id, achilles_results.stratum_1, achilles_results.stratum_2, achilles_results.stratum_3, achilles_results.stratum_4, achilles_results.stratum_5, achilles_results.count_value, achilles_results.data_source_id, achilles_results.avg_value, achilles_results.max_value, achilles_results.median_value, achilles_results.min_value, achilles_results.p10_value, achilles_results.p25_value, achilles_results.p75_value, achilles_results.p90_value, achilles_results.stdev_value FROM achilles_results WHERE (achilles_results.analysis_id = 200)) ar1 JOIN ( SELECT achilles_results.id, achilles_results.analysis_id, achilles_results.stratum_1, achilles_results.stratum_2, achilles_results.stratum_3, achilles_results.stratum_4, achilles_results.stratum_5, achilles_results.count_value, achilles_results.data_source_id, achilles_results.avg_value, achilles_results.max_value, achilles_results.median_value, achilles_results.min_value, achilles_results.p10_value, achilles_results.p25_value, achilles_results.p75_value, achilles_results.p90_value, achilles_results.stdev_value FROM achilles_results WHERE (achilles_results.analysis_id = 201)) ar2 ON (((ar1.stratum_1 = ar2.stratum_1) AND (ar1.data_source_id = ar2.data_source_id)))) JOIN ( SELECT achilles_results.id, achilles_results.analysis_id, achilles_results.stratum_1, achilles_results.stratum_2, achilles_results.stratum_3, achilles_results.stratum_4, achilles_results.stratum_5, achilles_results.count_value, achilles_results.data_source_id, achilles_results.avg_value, achilles_results.max_value, achilles_results.median_value, achilles_results.min_value, achilles_results.p10_value, achilles_results.p25_value, achilles_results.p75_value, achilles_results.p90_value, achilles_results.stdev_value FROM achilles_results WHERE (achilles_results.analysis_id = 1)) denom ON ((ar1.data_source_id = denom.data_source_id))) JOIN data_source ON ((data_source.id = ar1.data_source_id))) JOIN country ON ((country.id = data_source.country_id))) JOIN concept ON (((ar1.stratum_1)::integer = concept.concept_id))) ORDER BY ar1.data_source_id, ar1.count_value DESC; domain_filter SELECT concept.concept_name, concept.domain_id, source.name, source.acronym, source.database_type, country.country FROM ( ( ( achilles_results JOIN concept ON ( ( (achilles_results.stratum_1):: bigint = concept.concept_id ) ) ) JOIN data_source source ON ( ( achilles_results.data_source_id = source.id ) ) ) JOIN country ON ( (country.id = source.country_id) ) ) WHERE ( achilles_results.analysis_id = ANY ( ARRAY[(201):: bigint, (401):: bigint, (601):: bigint, (701):: bigint, (801):: bigint, (901):: bigint, (1001):: bigint, (1801):: bigint, (200):: bigint, (400):: bigint, (600):: bigint, (700):: bigint, (800):: bigint, (1800):: bigint] ) ); concept_browser_table3 SELECT source.name, source.acronym, source.database_type, country.country, ( ( ( ( '<a href="https://athena.ohdsi.org/search-terms/terms/' :: text || ar1.concept_id ) || '"target="_blank">' :: text ) || ar1.concept_id ) || '</a>' :: text ) AS concept_id, concept.concept_name, concept.domain_id, (ar1.rc):: integer AS rc, (ar2.drc):: integer AS drc FROM ( ( ( ( ( SELECT achilles_results.data_source_id, achilles_results.analysis_id, achilles_results.stratum_1 AS concept_id, achilles_results.count_value AS rc FROM achilles_results WHERE ( achilles_results.analysis_id = ANY ( ARRAY[(401):: bigint, (601):: bigint, (701):: bigint, (801):: bigint, (1801):: bigint, (2101):: bigint] ) ) ) ar1 JOIN ( SELECT ar.data_source_id, ar.analysis_id, ar.stratum_1 AS concept_id, ar.count_value AS drc FROM achilles_results ar WHERE ( ar.analysis_id = ANY ( ARRAY[(430):: bigint, (630):: bigint, (730):: bigint, (830):: bigint, (1830):: bigint, (2130):: bigint] ) ) ) ar2 ON ( ( (ar1.concept_id = ar2.concept_id) AND ( ar1.data_source_id = ar2.data_source_id ) ) ) ) JOIN data_source source ON ( (ar1.data_source_id = source.id) ) ) JOIN country ON ( (source.country_id = country.id) ) ) JOIN concept concept ON ( ( ar1.concept_id = (concept.concept_id):: text ) ) ) ORDER BY ( (ar2.drc):: integer ) DESC; concept_coverage2 SELECT source.name AS source_name, source.database_type, country.country, ( ( ( ( '<a href="https://athena.ohdsi.org/search-terms/terms/' :: text || concept.concept_id ) || '" target="_blank">' :: text ) || concept.concept_id ) || '</a>' :: text ) AS concept_id, concept.concept_name, concept.domain_id, sum( (ar1.rc):: integer ) AS rc, sum( (ar2.drc):: integer ) AS drc FROM ( ( ( ( ( SELECT achilles_results.data_source_id, achilles_results.analysis_id, achilles_results.stratum_1 AS concept_id, achilles_results.count_value AS rc FROM achilles_results WHERE ( achilles_results.analysis_id = ANY ( ARRAY[(401):: bigint, (601):: bigint, (701):: bigint, (801):: bigint, (1801):: bigint, (2101):: bigint] ) ) ) ar1 JOIN ( SELECT ar.data_source_id, ar.analysis_id, ar.stratum_1 AS concept_id, ar.count_value AS drc FROM achilles_results ar WHERE ( ar.analysis_id = ANY ( ARRAY[(430):: bigint, (630):: bigint, (730):: bigint, (830):: bigint, (1830):: bigint, (2130):: bigint] ) ) ) ar2 ON ( ( (ar1.concept_id = ar2.concept_id) AND ( ar1.data_source_id = ar2.data_source_id ) ) ) ) JOIN data_source source ON ( (ar1.data_source_id = source.id) ) ) JOIN country ON ( (country.id = source.country_id) ) ) JOIN concept concept ON ( ( ar1.concept_id = (concept.concept_id):: text ) ) ) GROUP BY source.name, source.database_type, country.country, concept.domain_id, concept.concept_id, concept.concept_name; data_density SELECT source.acronym, t1.table_name AS series_name, to_date(t1.stratum_1, 'YYYYMM'::text) AS x_calendar_month, t1.count_value AS y_record_count FROM (( SELECT achilles_results.data_source_id AS id, 'Visit occurrence'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 220) UNION ALL SELECT achilles_results.data_source_id AS id, 'Condition occurrence'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 420) UNION ALL SELECT achilles_results.data_source_id AS id, 'Death'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 502) UNION ALL SELECT achilles_results.data_source_id AS id, 'Procedure occurrence'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 620) UNION ALL SELECT achilles_results.data_source_id AS id, 'Drug exposure'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 720) UNION ALL SELECT achilles_results.data_source_id AS id, 'Observation'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 820) UNION ALL SELECT achilles_results.data_source_id AS id, 'Drug era'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 920) UNION ALL SELECT achilles_results.data_source_id AS id, 'Device Exposure'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 2120) UNION ALL SELECT achilles_results.data_source_id AS id, 'Condition era'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 1020) UNION ALL SELECT achilles_results.data_source_id AS id, 'Observation period'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 111) UNION ALL SELECT achilles_results.data_source_id AS id, 'Measurement'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 1820)) t1 JOIN data_source source ON ((source.id = t1.id))) ORDER BY t1.table_name, ( CASE WHEN (t1.stratum_1 ~ '^\\d+\\.?\\d+$'::text) THEN t1.stratum_1 ELSE NULL::text END)::integer; records_per_person SELECT source.acronym, t1.table_name AS series_name, to_date(t1.stratum_1, 'YYYYMM'::text) AS x_calendar_month, round( ( (1.0 * (t1.count_value)::numeric) / (denom.count_value)::numeric ), 5 ) AS y_record_count FROM ((( SELECT achilles_results.data_source_id AS id, 'Visit occurrence'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 220) UNION ALL SELECT achilles_results.data_source_id AS id, 'Condition occurrence'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 420) UNION ALL SELECT achilles_results.data_source_id AS id, 'Death'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 502) UNION ALL SELECT achilles_results.data_source_id AS id, 'Procedure occurrence'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 620) UNION ALL SELECT achilles_results.data_source_id AS id, 'Drug exposure'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 720) UNION ALL SELECT achilles_results.data_source_id AS id, 'Observation'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 820) UNION ALL SELECT achilles_results.data_source_id AS id, 'Device exposure'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 2120) UNION ALL SELECT achilles_results.data_source_id AS id, 'Drug era'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 920) UNION ALL SELECT achilles_results.data_source_id AS id, 'Condition era'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 1020) UNION ALL SELECT achilles_results.data_source_id AS id, 'Observation period'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 111) UNION ALL SELECT achilles_results.data_source_id AS id, 'Measurement'::text AS table_name, achilles_results.stratum_1, achilles_results.count_value FROM achilles_results WHERE (achilles_results.analysis_id = 1820)) t1 JOIN ( SELECT achilles_results.id, achilles_results.analysis_id, achilles_results.stratum_1, achilles_results.stratum_2, achilles_results.stratum_3, achilles_results.stratum_4, achilles_results.stratum_5, achilles_results.count_value, achilles_results.data_source_id, achilles_results.avg_value, achilles_results.max_value, achilles_results.median_value, achilles_results.min_value, achilles_results.p10_value, achilles_results.p25_value, achilles_results.p75_value, achilles_results.p90_value, achilles_results.stdev_value FROM achilles_results WHERE (achilles_results.analysis_id = 117)) denom ON (((t1.stratum_1 = denom.stratum_1) AND (t1.id = denom.data_source_id)))) JOIN data_source source ON ((source.id = t1.id))) ORDER BY t1.table_name, ( CASE WHEN (t1.stratum_1 ~ '^\\d+\\.?\\d+$'::text) THEN t1.stratum_1 ELSE NULL::text END)::integer; visit_age_distribution SELECT source.name, source.acronym, c1.concept_name, c2.concept_name AS gender, achilles.count_value, achilles.p10_value AS p10, achilles.p25_value AS p25, achilles.median_value AS median, achilles.p75_value AS p75, achilles.p90_value AS p90 FROM (((achilles_results achilles JOIN data_source source ON ((achilles.data_source_id = source.id))) JOIN concept c1 ON ((achilles.stratum_1 = (c1.concept_id)::text))) JOIN concept c2 ON ((achilles.stratum_2 = (c2.concept_id)::text))) WHERE (achilles.analysis_id = 206) ORDER BY source.name, c1.concept_name, c2.concept_name; concept_browser_table2 SELECT source.acronym, ( ( ( ( '<a href="https://athena.ohdsi.org/search-terms/terms/' :: text || ar1.concept_id ) || '"target="_blank">' :: text ) || ar1.concept_id ) || '</a>' :: text ) AS concept_id, concept.concept_name, concept.domain_id, (ar1.rc):: integer AS rc, (ar2.drc):: integer AS drc FROM ( ( ( ( SELECT achilles_results.data_source_id, achilles_results.analysis_id, achilles_results.stratum_1 AS concept_id, achilles_results.count_value AS rc FROM achilles_results WHERE ( achilles_results.analysis_id = ANY ( ARRAY[(401):: bigint, (601):: bigint, (701):: bigint, (801):: bigint, (1801):: bigint, (2101):: bigint] ) ) ) ar1 JOIN ( SELECT ar.data_source_id, ar.analysis_id, ar.stratum_1 AS concept_id, ar.count_value AS drc FROM achilles_results ar WHERE ( ar.analysis_id = ANY ( ARRAY[(430):: bigint, (630):: bigint, (730):: bigint, (830):: bigint, (1830):: bigint, (2130):: bigint] ) ) ) ar2 ON ( ( (ar1.concept_id = ar2.concept_id) AND ( ar1.data_source_id = ar2.data_source_id ) ) ) ) JOIN data_source source ON ( (ar1.data_source_id = source.id) ) ) JOIN concept concept ON ( ( ar1.concept_id = (concept.concept_id):: text ) ) ) ORDER BY ((ar2.drc):: integer) DESC; "],["general-deprecated.html", "9.4 General [Deprecated]", " 9.4 General [Deprecated] CSS To hide the dashboard header insert the following css code to the CSS field on the edit page: .dashboard > div:not(.dashboard-content) { /* dashboard header */ display: none; } With this every time you want to edit the dashboard layout you have to either comment the CSS inserted or remove it so the “Edit Dashboard” button can show again. Database Type and Country Filter Figure 9.1: Settings for creating filters charts Theses filter were designed to be used in the dashboard aiming the filtering of the data based on the field ‘’database_type’’ and “country” from the table ‘’data_source’’. For the filters to work the name of the fields to filter should match in all tables used on the charts of this dashboard. SQL query SELECT source.name, country.country, source.database_type, source.acronym FROM public.data_source AS source INNER JOIN public.country AS country ON source.country_id=country.id Chart settings Data Tab Datasource & Chart Type Visualization Type: Filter Box Time Time range: No filter Filters Configuration Filters: database_type or country Date Filter: off Instant Filtering: on Total Number of Patients Figure 9.2: Settings for creating the Total Number of Patients chart SQL query SELECT country, database_type, release_date, SUM(count_value) OVER (ORDER BY release_date ASC) FROM achilles_results JOIN data_source ON data_source_id = data_source.id JOIN country ON data_source.country_id = country.id WHERE analysis_id = 1 Chart settings Data Tab Datasource & Chart Type Visualization Type: Big Number with Trendline Time Time range: No filter Query Metrics: MAX(sum) Series: release_date Breakdowns: source Customize Tab Chart Options Big Number Font Size: Small Subheader Font Size: Tiny Network Growth by Date Figure 9.3: Settings for creating the Network Growth by Date chart SQL query SELECT source.name AS source, country.country, source.database_type, source.release_date, concepts.concept_name AS gender, achilles.count_value as count FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id INNER JOIN public.country AS country ON source.country_id=country.id JOIN ( SELECT '8507' AS concept_id, 'Male' AS concept_name UNION SELECT '8532', 'Female' ) AS concepts ON achilles.stratum_1 = concept_id WHERE analysis_id = 2 Chart settings Data Tab Datasource & Chart Type Visualization Type: Bar Chart Time Time range: No filter Query Metrics: SUM(count_value) Series: release_date Breakdowns: source Customize Tab Chart Options Stacked Bars: on Sort Bars: on Extra Controls: on X Axis Reduce X ticks: on Patients per Country Figure 9.4: Settings for creating the Patients per Country chart SQL query SELECT country.country, source.database_type, count_value FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id INNER JOIN public.country AS country ON source.country_id=country.id WHERE analysis_id = 1 Chart settings Data Tab Datasource & Chart Type Visualization Type: Bar Chart Time Time range: No filter Query Metrics: SUM(count_value) Series: country Customize Tab Chart Options Legend: off Y Axis Label: Nº of Patients X Axis X Axis Label: Country Database Types per Country Figure 9.5: Settings for creating the Database Type per Country chart SQL query Same as Patients per Country query Chart settings Data Tab Datasource & Chart Type Visualization Type: Heatmap Time Time range: No filter Query X: country Y: database_type Metric: SUM(countr_value) Heatmap Options Left Margin: 75 Show Percentage: off World Map Figure 9.6: Settings for creating the World Map chart SQL query SELECT name, acronym, database_type, latitude, longitude, country FROM public.data_source AS source INNER JOIN public.country AS country ON source.country_id=country.id Chart settings Data Tab Datasource & Chart Type Visualization Type: MapBox Time Time range: No filter Query Longitude: longitude Latitude: latitude Visual Tweaks Map Style: Streets or Light or Outdoors Meta Data Figure 9.7: Settings for creating the Meta Data chart SQL query SELECT acronym, stratum_1 as "name", database_type, country, stratum_2 as "source_release_date", stratum_3 as "cdm_release_date", stratum_4 as "cdm_version", stratum_5 as "vocabulary_version" FROM achilles_results JOIN data_source ON achilles_results.data_source_id = data_source.id JOIN country ON data_source.country_id = country.id WHERE analysis_id=5000 Chart settings Data Tab Datasource & Chart Type Visualization Type: Table Time Time range: No filter Query Query Mode: Raw Records Columns: name, source_release_date, cdm_release_date, cdm_version, vocabulary_version "],["person-deprecated.html", "9.5 Person [Deprecated]", " 9.5 Person [Deprecated] Label Colors In order to obtain the colors blue and rose in the chart representing the gender distribution, add the following JSON entry to the JSON object of the JSON Metadata field on the edit dashboard page: "label_colors": { "Male": "#3366FF", "Female": "#FF3399" } CSS To hide the dashboard header insert the following css code to the CSS field on the edit page: .dashboard > div:not(.dashboard-content) { /* dashboard header */ display: none; } With this every time you want to edit the dashboard layout you have to either comment the CSS inserted or remove it so the “Edit Dashboard” button can show again. Data Source Filter Figure 9.8: Settings for creating the Data Source filter chart For the filter to work the name of the fields to filter should match in all tables used on the charts of this dashboard. SQL query No SQL query, use the sql table data_source of the achilles database. Chart settings Data Tab Datasource & Chart Type Visualization Type: Filter Box Time Time range: No filter Filters Configuration Filters: name Date Filter: off Instant Filtering: on Age at first observation - Table {#age1ObservationTable} Figure 9.9: Settings for creating the Age at First Observation Table chart SQL query SELECT source.name, source.acronym, SUM(CASE WHEN CAST(stratum_2 AS INTEGER) < 10 THEN count_value END) AS "0-10", SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 10 AND CAST(stratum_2 AS INTEGER) < 20 THEN count_value END) AS "10-20", SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 20 AND CAST(stratum_2 AS INTEGER) < 30 THEN count_value END) AS "20-30", SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 30 AND CAST(stratum_2 AS INTEGER) < 40 THEN count_value END) AS "30-40", SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 40 AND CAST(stratum_2 AS INTEGER) < 50 THEN count_value END) AS "40-50", SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 50 AND CAST(stratum_2 AS INTEGER) < 60 THEN count_value END) AS "50-60", SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 60 AND CAST(stratum_2 AS INTEGER) < 70 THEN count_value END) AS "60-70", SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 70 AND CAST(stratum_2 AS INTEGER) < 80 THEN count_value END) AS "70-80", SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 80 AND CAST(stratum_2 AS INTEGER) < 90 THEN count_value END) AS "80-90", SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 90 THEN count_value END) AS "90+" FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id INNER JOIN public.concept ON CAST(stratum_1 AS BIGINT) = concept_id WHERE analysis_id = 102 GROUP BY name, acronym Chart settings Data Tab Datasource & Chart Type Visualization Type: Table Time Time range: No filter Query Query Mode: Raw Records Columns: name, 0-10, 10-20, 20-30, 30-40, 40-50, 50-60, 60-70, 70-80, 80-90, 90+ Customize Tab Options Show Cell Bars: off Age at first observation - Bars {#age1ObservationBars} Figure 9.10: Settings for creating the Age at First Observation Bar chart SQL query SELECT source.name, cast(stratum_1 AS int) AS Age, count_value AS count, source.acronym FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id WHERE analysis_id = 101 Chart settings Data Tab Datasource & Chart Type Visualization Type: Bar Chart Time Time range: No filter Query Metrics: MAX(count) Series: age Breakdowns: name Customize Tab Chart Options Stacked Bars: on Sort Bars: on Y Axis Label: Count X Axis X Axis Label: Age Reduce X ticks: on Year of Birth {#yearOfBirth} Figure 9.11: Settings for creating the Year of Birth chart SQL query SELECT source.name, source.acronym, stratum_1 AS "Birth_year", count_value AS count FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id WHERE analysis_id = 3 Chart settings Data Tab Datasource & Chart Type Visualization Type: Bar Chart Time Time range: No filter Query Metrics: SUM(count) Series: Birth_year Breakdowns: name Customize Tab Chart Options Stacked Bars: on Sort Bars: on Y Axis Label: Count Extra Controls: on X Axis X Axis Label: Year Reduce X ticks: on Gender Figure 9.12: Settings for creating the Gender chart SQL query SELECT source.name, concept_name AS Gender, count_value AS Number_of_persons, source.acronym FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id JOIN ( SELECT '8507' AS concept_id, 'Male' AS concept_name UNION SELECT '8532' AS concept_id, 'Female' AS concept_name ) AS concepts ON achilles.stratum_1 = concept_id WHERE analysis_id = 2 Chart settings Data Tab Datasource & Chart Type Visualization Type: Bar Chart Time Time range: No filter Query Metrics: MAX(Number_of_persons) Series: acronym Breakdowns: gender Contribution: on Customize Tab Chart Options Stacked Bars: on Sort Bars: on Extra Controls: on X Axis Reduce X ticks: on "],["observation-period-deprecated.html", "9.6 Observation Period [Deprecated]", " 9.6 Observation Period [Deprecated] CSS To hide the dashboard header insert the following css code to the CSS field on the edit page: .dashboard > div:not(.dashboard-content) { /* dashboard header */ display: none; } With this every time you want to edit the dashboard layout you have to either comment the CSS inserted or remove it so the “Edit Dashboard” button can show again. Data Source Filter Figure 9.8: Settings for creating the Data Source filter chart For the filter to work the name of the fields to filter should match in all tables used on the charts of this dashboard. SQL query No SQL query, use the sql table data_source of the achilles database. Chart settings Data Tab Datasource & Chart Type Visualization Type: Filter Box Time Time range: No filter Filters Configuration Filters: name Date Filter: off Instant Filtering: on Number of Patients in Observation Period The Number of Patients in Observation Period plot shows the number of patients that contribute at least one day in a specific month. Figure 9.13: Settings for creating the Number of Patients in Observation Period chart SQL query SELECT source.name, source.acronym, to_date(stratum_1, 'YYYYMM') as Date, count_value FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id WHERE analysis_id = 110 Chart settings Data Tab Datasource & Chart Type Visualization Type: Bar Chart Time Time range: No filter Query Metrics: MAX(count_value) with label “Num of Patients” Series: date Breakdowns: name Customize Tab Chart Options Stacked Bars: on Sort Bars: on Y Axis Label: Number of Patients X Axis X Axis Label: Dates Reduce X ticks: on Observation Period Start Dates Figure 9.14: Settings for creating the Observation Period Start Dates chart SQL query SELECT source.name, source.acronym, to_date(stratum_1, 'YYYYMM') AS year_month, count_value FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id WHERE analysis_id = 111 Chart settings Data Tab Datasource & Chart Type Visualization Type: Bar Chart Time Time range: No filter Query Metrics: SUM(count_value) with label “Patients” Series: year_month Breakdowns: name Customize Tab Chart Options Stacked Bars: on Sort Bars: on Y Axis Label: Number of Patients X Axis X Axis Label: Year Reduce X ticks: on Observation Period End Dates Figure 9.15: Settings for creating the Observation Period End Dates chart SQL query SELECT source.name, source.acronym, to_date(stratum_1, 'YYYYMM') AS year_month, count_value FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id WHERE analysis_id = 112 Chart settings Data Tab Datasource & Chart Type Visualization Type: Bar Chart Time Time range: No filter Query Metrics: SUM(count_value) with label “Patients” Series: year_month Breakdowns: name Customize Tab Chart Options Stacked Bars: on Sort Bars: on Y Axis Label: Number of Patients X Axis X Axis Label: Year Reduce X ticks: on "],["visit-deprecated.html", "9.7 Visit [Deprecated]", " 9.7 Visit [Deprecated] This dashboard shows the different types of visits per data source (see Visit Occurence Table) CSS To hide the dashboard header insert the following css code to the CSS field on the edit page: .dashboard > div:not(.dashboard-content) { /* dashboard header */ display: none; } With this every time you want to edit the dashboard layout you have to either comment the CSS inserted or remove it so the “Edit Dashboard” button can show again. Data Source Filter Figure 9.8: Settings for creating the Data Source filter chart For the filter to work the name of the fields to filter should match in all tables used on the charts of this dashboard. SQL query No SQL query, use the sql table data_source of the achilles database. Chart settings Data Tab Datasource & Chart Type Visualization Type: Filter Box Time Time range: No filter Filters Configuration Filters: name Date Filter: off Instant Filtering: on Visit Type Table Figure 9.16: Settings for creating the Visit Type Table chart SQL query SELECT source.name, source.acronym, concept_name AS "Type", MAX(count_value) AS "Count" FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id INNER JOIN public.concept ON CAST(stratum_1 AS BIGINT) = concept_id WHERE analysis_id = 201 GROUP BY name, acronym, "Type" ORDER BY "Count" DESC Chart settings Data Tab Datasource & Chart Type Visualization Type: Table Time Time range: No filter Query Query Mode: Raw Records Columns: name with label “Data Source”, Type, Count Visit Types Bars Figure 9.17: Settings for creating the Visit Types bar chart SQL query SELECT source.name, source.acronym, concept_name AS "Observation", count_value FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id INNER JOIN public.concept ON CAST(stratum_1 AS BIGINT) = concept_id WHERE analysis_id = 201 Chart settings Data Tab Datasource & Chart Type Visualization Type: Bar Chart Time Time range: No filter Query Metrics: MAX(count_value) with label Observations Series: name Breakdowns: Observation Customize Tab Chart Options Stacked Bars: on Sort Bars: on Extra Controls: on X Axis X Axis Label: Databases Reduce X ticks: on "],["death-deprecated.html", "9.8 Death [Deprecated]", " 9.8 Death [Deprecated] CSS To hide the dashboard header insert the following css code to the CSS field on the edit page: .dashboard > div:not(.dashboard-content) { /* dashboard header */ display: none; } With this every time you want to edit the dashboard layout you have to either comment the CSS inserted or remove it so the “Edit Dashboard” button can show again. Data Source Filter Figure 9.8: Settings for creating the Data Source filter chart For the filter to work the name of the fields to filter should match in all tables used on the charts of this dashboard. SQL query No SQL query, use the sql table data_source of the achilles database. Chart settings Data Tab Datasource & Chart Type Visualization Type: Filter Box Time Time range: No filter Filters Configuration Filters: name Date Filter: off Instant Filtering: on Number of Records Figure 9.18: Settings for creating the Number of Records chart SQL query SELECT source.name, count_value, source.acronym FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id WHERE analysis_id = 501 Chart settings Data Tab Datasource & Chart Type Visualization Type: Bar Chart Time Time range: No filter Query Metrics: MAX(count_value) with label Count Series: name Customize Tab Chart Options Y Axis Label: Number of Patients X Axis X Axis Label: Databases Reduce X ticks: on Death By Year per Thousand People Figure 9.19: Settings for creating the Death by Year per Thousand People chart SQL query SELECT source.name, source.acronym, EXTRACT(year FROM TO_DATE(stratum_1, 'YYYYMM')) AS Date, count_value FROM public.achilles_results as achilles INNER JOIN public.data_source as source ON achilles.data_source_id=source.id WHERE analysis_id = 502 Chart settings Data Tab Datasource & Chart Type Visualization Type: Bar Chart Time Time range: No filter Query Metrics: MAX(count_value) with label Count Series: date Breakdowns: name Customize Tab Chart Options Stacked Bars: on Sort Bars: on Y Axis Label:Number of Patients (in thousands) X Axis X Axis Label: Years Reduce X ticks: on "],["concepts-browser-deprecated.html", "9.9 Concepts Browser [Deprecated]", " 9.9 Concepts Browser [Deprecated] The concepts browser allows you to search for concepts by name or concept_id in all the data sources you select. No exact number of patients or occurrences are provided but the magnitude of both. CSS To hide the dashboard header insert the following css code to the CSS field on the edit page: .dashboard > div:not(.dashboard-content) { /* dashboard header */ display: none; } With this every time you want to edit the dashboard layout you have to either comment the CSS inserted or remove it so the “Edit Dashboard” button can show again. Data Source and Domain Filters Figure 9.1: Settings for creating the Data Source and Domain filter charts For the filters to work the name of the fields to filter should match in all tables used on the charts of this dashboard. SQL query SELECT concept_name, domain_id, source.name AS source_name, source.acronym FROM achilles_results JOIN concept ON cast(stratum_1 AS BIGINT) = concept_id INNER JOIN public.data_source AS source ON data_source_id=source.id WHERE analysis_id in ( 201, 401, 601, 701, 801, 901, 1001, 1801, 200, 400, 600, 700, 800, 1800 ) Chart settings Data Tab Datasource & Chart Type Visualization Type: Filter Box Time Time range: No filter Filters Configuration Filters: source_name or domain_id Date Filter: off Instant Filtering: on Number of Concepts Figure 9.20: Settings for creating the Number of Concepts chart SQL Query Same as Data Source and Domain filters query Chart settings Data Tab Datasource & Chart Type Visualization Type: Big Number Time Time range: No filter Query Metric: COUNT_DISTINCT(concept_name) with label Concepts Customize Tab Big Number Font Size: Small Subheader Font Size: Tiny Concept Browser Table {#conceptBrowserTable} Figure 9.21: Settings for creating the Concepts Table chart SELECT q1.concept_id AS concept_id, q1.concept_name AS concept_name, q1.domain_id, source.name AS source_name, source.acronym, sum(q1.count_value) as "Occurrence_count", sum(q1.count_person) as "Person_count", CASE WHEN sum(q1.count_value)<=10 THEN '<=10' WHEN sum(q1.count_value)<=100 THEN '11-10ˆ2' WHEN sum(q1.count_value)<=1000 THEN '10ˆ2-10ˆ3' WHEN sum(q1.count_value)<=10000 THEN '10ˆ3-10ˆ4' WHEN sum(q1.count_value)<=100000 THEN '10ˆ4-10ˆ5' WHEN sum(q1.count_value)<=1000000 THEN '10ˆ5-10ˆ6' ELSE '>10ˆ6' END as "magnitude_occurrences", CASE WHEN sum(q1.count_person)<=10 THEN '<=10' WHEN sum(q1.count_person)<=100 THEN '11-10ˆ2' WHEN sum(q1.count_person)<=1000 THEN '10ˆ2-10ˆ3' WHEN sum(q1.count_person)<=10000 THEN '10ˆ3-10ˆ4' WHEN sum(q1.count_person)<=100000 THEN '10ˆ4-10ˆ5' WHEN sum(q1.count_person)<=1000000 THEN '10ˆ5-10ˆ6' ELSE '>10ˆ6' END AS "magnitude_persons" FROM (SELECT analysis_id, stratum_1 concept_id, data_source_id, concept_name, domain_id, count_value, 0 as count_person FROM achilles_results JOIN concept ON cast(stratum_1 AS BIGINT)=concept_id WHERE analysis_id in ( 201, 301, 401, 601, 701, 801, 901, 1001, 1801 ) UNION (SELECT analysis_id, stratum_1 concept_id, data_source_id, concept_name, domain_id, 0 as count_value, sum(count_value) as count_person FROM achilles_results JOIN concept ON cast(stratum_1 as BIGINT)=concept_id WHERE analysis_id in ( 202, 401, 601, 701, 801, 901, 1001, 1801 ) GROUP BY analysis_id, stratum_1, data_source_id, concept_name, domain_id )) as q1 INNER JOIN public.data_source AS source ON q1.data_source_id=source.id GROUP BY q1.concept_id, q1.concept_name, q1.domain_id, source.name, acronym ORDER BY "Person_count" desc Chart settings Data Tab Datasource & Chart Type Visualization Type: Table Time Time range: No filter Query Query Mode: Raw Records Columns: source_name, concept_id, concept_name, domain_id, magnitude_persons, magnitude_occurrences Customize Tab Options Table Timestamps Format: %Y-%m-%d %H:%M:%S | 2019-01-14 01:32:10 Page Length: 50 Search Box: on Emit Filter Events: on "],["provenance-deprecated.html", "9.10 Provenance [Deprecated]", " 9.10 Provenance [Deprecated] This Dashboard shows the provenance of the data in the different data domains. CSS To hide the dashboard header insert the following css code to the CSS field on the edit page: .dashboard > div:not(.dashboard-content) { /* dashboard header */ display: none; } With this every time you want to edit the dashboard layout you have to either comment the CSS inserted or remove it so the “Edit Dashboard” button can show again. Data Source Filter Figure 9.8: Settings for creating the Data Source filter chart For the filter to work the name of the fields to filter should match in all tables used on the charts of this dashboard. SQL query No SQL query, use the sql table data_source of the achilles database. Chart settings Data Tab Datasource & Chart Type Visualization Type: Filter Box Time Time range: No filter Filters Configuration Filters: name Date Filter: off Instant Filtering: on Condition & Drug & Procedure & Device & Measurement & Observation Types {#dataProvenanceCharts} Figure 9.22: Settings for creating the Condition, Drug, Procedure, Device, Measurement and Observation charts SQL query All 6 charts use the same sql query. SELECT source.name, source.acronym, CASE WHEN analysis_id = 405 THEN 'Condition' WHEN analysis_id = 605 THEN 'Procedure' WHEN analysis_id = 705 THEN 'Drug' WHEN analysis_id = 805 THEN 'Observation' WHEN analysis_id = 1805 THEN 'Measurement' WHEN analysis_id = 2105 THEN 'Device' ELSE 'Other' END AS domain_name, concept_name, SUM(count_value) AS num_records FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id INNER JOIN public.concept AS c1 ON CAST(stratum_2 AS BIGINT) = concept_id WHERE analysis_id IN (405,605,705,805,1805,2105) GROUP BY source.name, source.acronym, concept_name, CASE WHEN analysis_id = 405 THEN 'Condition' WHEN analysis_id = 605 THEN 'Procedure' WHEN analysis_id = 705 THEN 'Drug' WHEN analysis_id = 805 THEN 'Observation' WHEN analysis_id = 1805 THEN 'Measurement' WHEN analysis_id = 2105 THEN 'Device' ELSE 'Other' END Chart settings Data Tab Datasource & Chart Type Visualization Type: Bar Chart Time Time range: No filter Query Metrics: SUM(num_records) with label Nr Records Filters: domain_name=Condition or domain_name=Drug or domain_name=Procedure or domain_name=Device or domain_name=Measurement or domain_name=Observation Series: name Breakdowns: concept_name Contribution: on Customize Tab Chart Options Stacked Bars: on "],["data-domains-deprecated.html", "9.11 Data Domains [Deprecated]", " 9.11 Data Domains [Deprecated] CSS To hide the dashboard header insert the following css code to the CSS field on the edit page: .dashboard > div:not(.dashboard-content) { /* dashboard header */ display: none; } With this every time you want to edit the dashboard layout you have to either comment the CSS inserted or remove it so the “Edit Dashboard” button can show again. Data Source Filter Figure 9.8: Settings for creating the Data Source filter chart For the filter to work the name of the fields to filter should match in all tables used on the charts of this dashboard. SQL query No SQL query, use the sql table data_source of the achilles database. Chart settings Data Tab Datasource & Chart Type Visualization Type: Filter Box Time Time range: No filter Filters Configuration Filters: name Date Filter: off Instant Filtering: on Average Number of Records per Person Figure 9.23: Settings for creating the Data Source filter chart SQL query SELECT source.name, source.acronym, CASE WHEN analysis_id = 201 THEN 'Visit' WHEN analysis_id = 401 THEN 'Condition' WHEN analysis_id = 501 THEN 'Death' WHEN analysis_id = 601 THEN 'Procedure' WHEN analysis_id = 701 THEN 'Drug Exposure' WHEN analysis_id = 801 THEN 'Observation' WHEN analysis_id = 1801 THEN 'Measurement' WHEN analysis_id = 2101 THEN 'Device' WHEN analysis_id = 2201 THEN 'Note' END AS Data_Domain, SUM(count_value) /AVG(num_persons) AS "records_per_person" FROM public.achilles_results AS achilles INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id INNER JOIN ( SELECT data_source_id , count_value as num_persons FROM achilles_results WHERE analysis_id = 1) counts ON achilles.data_source_id = counts.data_source_id GROUP BY analysis_id, source.name, source.acronym HAVING analysis_id IN ( 201, 401, 501, 601, 701, 801, 1801, 2101, 2201 ) Chart settings Data Tab Datasource & Chart Type Visualization Type: Heatmap Time Time range: No filter Query X: name Y: data_domain Metric: AVG(records_per_person) with a label avg records per person Row limit: None Heatmap Options Left Margin: 100 Show Percentage: off "],["404.html", "Page not found", " Page not found The page you requested cannot be found (perhaps it was moved or renamed). You may want to try searching to find the page's new location, or use the table of contents to find the page you are looking for. "]]
diff --git a/docs/src/02-installation.Rmd b/docs/src/02-installation.Rmd
index 952e5806..7da06aef 100644
--- a/docs/src/02-installation.Rmd
+++ b/docs/src/02-installation.Rmd
@@ -14,9 +14,9 @@ Currently, we use docker to deploy our environment
1. Clone the repository with the command `git clone --recurse-submodules https://github.com/EHDEN/NetworkDashboards`. If you already cloned the repository without the `--recurse-submodules` option, run `git submodule update --init` to fetch the superset submodule.
-2. Create a `.env` file on the `docker` directory, using `.env-example` as a reference, setting all necessary environment variables (`SUPERSET\_MAPBOX\_API\_KEY` and `DASHBOARD\_VIEWER\_SECRET\_KEY`).
+2. Create a `.env` file on the `docker` directory, using `.env-example` as a reference, setting all necessary environment variables (`SUPERSET_MAPBOX_API_KEY` and `DASHBOARD_VIEWER_SECRET_KEY`).
- 2.1 If you will use this application as a third-party application and will iframe it, set the variable `SINGLE\_APPLICATION\_MODE` to `False` and define the host of the main application on the variable `MAIN\_APPLICATION\_HOST`. Also make sure to add this last host to the list of `ALLOWED\_HOSTS`.
+ 2.1 If you will use this application as a third-party application and will iframe it, set the variable `SINGLE_APPLICATION_MODE` to `False` and define the host of the main application on the variable `MAIN_APPLICATION_HOST`. Also make sure to add this last host to the list of `ALLOWED_HOSTS`.
### Dashboard Viewer setup {-}
@@ -107,7 +107,7 @@ The concepts table is not in the repository due to its dimension, therefore we u
4. By default Superset's admin user credentials are admin/admin.
It is recommended that you change the password if you will use this in a production environment.
-5. To any anonymous user view dashboards, add the following:
+5. To any anonymous user view dashboards, add the following permissions to the public role:
- all datasource access on all_datasource_access
- can csrf token on Superset
@@ -117,6 +117,8 @@ The concepts table is not in the repository due to its dimension, therefore we u
- can read on CssTemplate
- can read on Dashboard
+6. For each dashboard you want anonymous users to be able to access, on the dashboard list page click edit (the pencil on the right) and add the "Admin" and "Public" roles to the "Roles with acess" field.
+
### Dummy data {-}
On a fresh installation, there are no achilles_results data so Superset's dashboards will display "No results". On the root of this repository, you can find the `demo` directory where we have an ACHILLES results file with synthetic data that you can upload to a data source on the uploader app of the dashboard viewer (http://localhost/uploader). If you wish to compare multiple data sources, on the `demo` directory there is also a python script that allows you to generate new ACHILLES results files, where it generates random count values based on the ranges of values for each set of analysis_id and stratums present on a base ACHILLES results file. So, from the one ACHILLES results fill we provided, you can have multiple data sources with different data.
diff --git a/docs/src/03-general.Rmd b/docs/src/03-general.Rmd
index 4a664a1e..e6acd1c0 100644
--- a/docs/src/03-general.Rmd
+++ b/docs/src/03-general.Rmd
@@ -41,7 +41,8 @@ SELECT source.name,
source.database_type,
source.acronym
FROM public.data_source AS source
-INNER JOIN public.country AS country ON source.country_id=country.id
+INNER JOIN public.country AS country
+ ON source.country_id=country.id
```
#### Chart settings {-}
@@ -109,8 +110,10 @@ SELECT source.name AS source,
concepts.concept_name AS gender,
achilles.count_value as count
FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
-INNER JOIN public.country AS country ON source.country_id=country.id
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
+INNER JOIN public.country AS country
+ ON source.country_id=country.id
JOIN (
SELECT '8507' AS concept_id, 'Male' AS concept_name
UNION
@@ -145,15 +148,17 @@ WHERE analysis_id = 2
knitr::include_graphics("images/03-general/04-patients_per_country.png")
```
-#### SQL query {#patientsPerCountryQuery} {-}
+#### SQL query {-#patientsPerCountryQuery}
```sql
SELECT country.country,
source.database_type,
count_value
-FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
-INNER JOIN public.country AS country ON source.country_id=country.id
+FROM public.achilles_results AS achilles
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
+INNER JOIN public.country AS country
+ ON source.country_id=country.id
WHERE analysis_id = 1
```
@@ -217,7 +222,8 @@ SELECT name,
longitude,
country
FROM public.data_source AS source
-INNER JOIN public.country AS country ON source.country_id=country.id
+INNER JOIN public.country AS country
+ ON source.country_id=country.id
```
#### Chart settings {-}
@@ -252,7 +258,8 @@ SELECT
stratum_4 as "cdm_version",
stratum_5 as "vocabulary_version"
FROM achilles_results
-JOIN data_source ON achilles_results.data_source_id = data_source.id
+JOIN data_source
+ ON achilles_results.data_source_id = data_source.id
JOIN country ON data_source.country_id = country.id
WHERE analysis_id=5000
```
diff --git a/docs/src/04-person.Rmd b/docs/src/04-person.Rmd
index ee7a3456..c859ee33 100644
--- a/docs/src/04-person.Rmd
+++ b/docs/src/04-person.Rmd
@@ -69,19 +69,39 @@ knitr::include_graphics("images/04-person/02-age_at_first_observation_table.png"
```sql
SELECT source.name,
source.acronym,
- SUM(CASE WHEN CAST(stratum_2 AS INTEGER) < 10 THEN count_value END) AS "0-10",
- SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 10 AND CAST(stratum_2 AS INTEGER) < 20 THEN count_value END) AS "10-20",
- SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 20 AND CAST(stratum_2 AS INTEGER) < 30 THEN count_value END) AS "20-30",
- SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 30 AND CAST(stratum_2 AS INTEGER) < 40 THEN count_value END) AS "30-40",
- SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 40 AND CAST(stratum_2 AS INTEGER) < 50 THEN count_value END) AS "40-50",
- SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 50 AND CAST(stratum_2 AS INTEGER) < 60 THEN count_value END) AS "50-60",
- SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 60 AND CAST(stratum_2 AS INTEGER) < 70 THEN count_value END) AS "60-70",
- SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 70 AND CAST(stratum_2 AS INTEGER) < 80 THEN count_value END) AS "70-80",
- SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 80 AND CAST(stratum_2 AS INTEGER) < 90 THEN count_value END) AS "80-90",
- SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 90 THEN count_value END) AS "90+"
+ SUM(CASE WHEN CAST(stratum_2 AS INTEGER) < 10
+ THEN count_value END) AS "0-10",
+ SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 10
+ AND CAST(stratum_2 AS INTEGER) < 20
+ THEN count_value END) AS "10-20",
+ SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 20
+ AND CAST(stratum_2 AS INTEGER) < 30
+ THEN count_value END) AS "20-30",
+ SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 30
+ AND CAST(stratum_2 AS INTEGER) < 40
+ THEN count_value END) AS "30-40",
+ SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 40
+ AND CAST(stratum_2 AS INTEGER) < 50
+ THEN count_value END) AS "40-50",
+ SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 50
+ AND CAST(stratum_2 AS INTEGER) < 60
+ THEN count_value END) AS "50-60",
+ SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 60
+ AND CAST(stratum_2 AS INTEGER) < 70
+ THEN count_value END) AS "60-70",
+ SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 70
+ AND CAST(stratum_2 AS INTEGER) < 80
+ THEN count_value END) AS "70-80",
+ SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 80
+ AND CAST(stratum_2 AS INTEGER) < 90
+ THEN count_value END) AS "80-90",
+ SUM(CASE WHEN CAST(stratum_2 AS INTEGER) >= 90
+ THEN count_value END) AS "90+"
FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
-INNER JOIN public.concept ON CAST(stratum_1 AS BIGINT) = concept_id
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
+INNER JOIN public.concept
+ ON CAST(stratum_1 AS BIGINT) = concept_id
WHERE analysis_id = 102
GROUP BY name, acronym
```
@@ -114,7 +134,8 @@ SELECT source.name,
count_value AS count,
source.acronym
FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
WHERE analysis_id = 101
```
@@ -152,7 +173,8 @@ SELECT source.name,
stratum_1 AS "Birth_year",
count_value AS count
FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
WHERE analysis_id = 3
```
@@ -192,7 +214,8 @@ SELECT source.name,
count_value AS Number_of_persons,
source.acronym
FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
JOIN (
SELECT '8507' AS concept_id, 'Male' AS concept_name
UNION
diff --git a/docs/src/05-observation_period.Rmd b/docs/src/05-observation_period.Rmd
index a9bec3c8..cc112e2a 100644
--- a/docs/src/05-observation_period.Rmd
+++ b/docs/src/05-observation_period.Rmd
@@ -48,7 +48,7 @@ No SQL query, use the sql table `data_source` of the `achilles` database.
- Date Filter: off
- Instant Filtering: on
-### Number of Patients in Observation Period {#numInObservationPeriod} {-}
+### Number of Patients in Observation Period {-#numInObservationPeriod}
The Number of Patients in Observation Period plot shows the number of patients that contribute at least one day in a specific month.
@@ -64,7 +64,8 @@ SELECT source.name,
to_date(stratum_1, 'YYYYMM') as Date,
count_value
FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
WHERE analysis_id = 110
```
@@ -102,7 +103,8 @@ SELECT source.name,
to_date(stratum_1, 'YYYYMM') AS year_month,
count_value
FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
WHERE analysis_id = 111
```
@@ -140,7 +142,8 @@ SELECT source.name,
to_date(stratum_1, 'YYYYMM') AS year_month,
count_value
FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
WHERE analysis_id = 112
```
diff --git a/docs/src/06-visit.Rmd b/docs/src/06-visit.Rmd
index fb195841..3775fc13 100644
--- a/docs/src/06-visit.Rmd
+++ b/docs/src/06-visit.Rmd
@@ -50,7 +50,7 @@ No SQL query, use the sql table `data_source` of the `achilles` database.
- Date Filter: off
- Instant Filtering: on
-### Visit Type Table {#visitTypeTable} {-}
+### Visit Type Table {-#visitTypeTable}
```{r visitTypeTable, fig.cap="Settings for creating the Visit Type Table chart",echo=FALSE, out.width="100%"}
knitr::include_graphics("images/06-visit/02-visit_types_table.png")
@@ -64,8 +64,10 @@ SELECT source.name,
concept_name AS "Type",
MAX(count_value) AS "Count"
FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
-INNER JOIN public.concept ON CAST(stratum_1 AS BIGINT) = concept_id
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
+INNER JOIN public.concept
+ ON CAST(stratum_1 AS BIGINT) = concept_id
WHERE analysis_id = 201
GROUP BY name, acronym, "Type"
ORDER BY "Count" DESC
@@ -96,8 +98,10 @@ SELECT source.name,
concept_name AS "Observation",
count_value
FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
-INNER JOIN public.concept ON CAST(stratum_1 AS BIGINT) = concept_id
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
+INNER JOIN public.concept
+ ON CAST(stratum_1 AS BIGINT) = concept_id
WHERE analysis_id = 201
```
diff --git a/docs/src/07-death.Rmd b/docs/src/07-death.Rmd
index 0269e2fa..9205e39c 100644
--- a/docs/src/07-death.Rmd
+++ b/docs/src/07-death.Rmd
@@ -61,7 +61,8 @@ SELECT source.name,
count_value,
source.acronym
FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
WHERE analysis_id = 501
```
@@ -96,7 +97,8 @@ SELECT source.name,
EXTRACT(year FROM TO_DATE(stratum_1, 'YYYYMM')) AS Date,
count_value
FROM public.achilles_results as achilles
-INNER JOIN public.data_source as source ON achilles.data_source_id=source.id
+INNER JOIN public.data_source as source
+ ON achilles.data_source_id=source.id
WHERE analysis_id = 502
```
diff --git a/docs/src/08-concepts_browser.Rmd b/docs/src/08-concepts_browser.Rmd
index 35a8d84f..58257ce5 100644
--- a/docs/src/08-concepts_browser.Rmd
+++ b/docs/src/08-concepts_browser.Rmd
@@ -38,10 +38,15 @@ SELECT concept_name,
domain_id,
source.name AS source_name,
source.acronym
-FROM achilles_results
-JOIN concept ON cast(stratum_1 AS BIGINT) = concept_id
-INNER JOIN public.data_source AS source ON data_source_id=source.id
-WHERE analysis_id in (201, 401, 601, 701, 801, 901, 1001, 1801, 200, 400, 600, 700, 800, 1800)
+FROM achilles_results JOIN concept
+ ON cast(stratum_1 AS BIGINT) = concept_id
+INNER JOIN public.data_source AS source
+ ON data_source_id=source.id
+WHERE
+ analysis_id in (
+ 201, 401, 601, 701, 801, 901, 1001, 1801,
+ 200, 400, 600, 700, 800, 1800
+ )
```
#### Chart settings {-}
@@ -97,21 +102,33 @@ SELECT
sum(q1.count_value) as "Occurrence_count",
sum(q1.count_person) as "Person_count",
CASE
- WHEN sum(q1.count_value)<=10 THEN '<=10'
- WHEN sum(q1.count_value)<=100 THEN '11-10ˆ2'
- WHEN sum(q1.count_value)<=1000 THEN '10ˆ2-10ˆ3'
- WHEN sum(q1.count_value)<=10000 THEN '10ˆ3-10ˆ4'
- WHEN sum(q1.count_value)<=100000 THEN '10ˆ4-10ˆ5'
- WHEN sum(q1.count_value)<=1000000 THEN '10ˆ5-10ˆ6'
+ WHEN sum(q1.count_value)<=10
+ THEN '<=10'
+ WHEN sum(q1.count_value)<=100
+ THEN '11-10ˆ2'
+ WHEN sum(q1.count_value)<=1000
+ THEN '10ˆ2-10ˆ3'
+ WHEN sum(q1.count_value)<=10000
+ THEN '10ˆ3-10ˆ4'
+ WHEN sum(q1.count_value)<=100000
+ THEN '10ˆ4-10ˆ5'
+ WHEN sum(q1.count_value)<=1000000
+ THEN '10ˆ5-10ˆ6'
ELSE '>10ˆ6'
END as "magnitude_occurrences",
CASE
- WHEN sum(q1.count_person)<=10 THEN '<=10'
- WHEN sum(q1.count_person)<=100 THEN '11-10ˆ2'
- WHEN sum(q1.count_person)<=1000 THEN '10ˆ2-10ˆ3'
- WHEN sum(q1.count_person)<=10000 THEN '10ˆ3-10ˆ4'
- WHEN sum(q1.count_person)<=100000 THEN '10ˆ4-10ˆ5'
- WHEN sum(q1.count_person)<=1000000 THEN '10ˆ5-10ˆ6'
+ WHEN sum(q1.count_person)<=10
+ THEN '<=10'
+ WHEN sum(q1.count_person)<=100
+ THEN '11-10ˆ2'
+ WHEN sum(q1.count_person)<=1000
+ THEN '10ˆ2-10ˆ3'
+ WHEN sum(q1.count_person)<=10000
+ THEN '10ˆ3-10ˆ4'
+ WHEN sum(q1.count_person)<=100000
+ THEN '10ˆ4-10ˆ5'
+ WHEN sum(q1.count_person)<=1000000
+ THEN '10ˆ5-10ˆ6'
ELSE '>10ˆ6'
END AS "magnitude_persons"
FROM (SELECT analysis_id,
@@ -121,8 +138,13 @@ FROM (SELECT analysis_id,
domain_id,
count_value, 0 as count_person
FROM achilles_results
- JOIN concept ON cast(stratum_1 AS BIGINT)=concept_id
- WHERE analysis_id in (201, 301, 401, 601, 701, 801, 901, 1001, 1801)
+ JOIN concept
+ ON cast(stratum_1 AS BIGINT)=concept_id
+ WHERE
+ analysis_id in (
+ 201, 301, 401, 601, 701, 801, 901, 1001,
+ 1801
+ )
UNION (SELECT analysis_id,
stratum_1 concept_id,
data_source_id,
@@ -131,11 +153,27 @@ FROM (SELECT analysis_id,
0 as count_value,
sum(count_value) as count_person
FROM achilles_results
- JOIN concept on cast(stratum_1 as BIGINT)=concept_id
- WHERE analysis_id in (202, 401, 601, 701, 801, 901, 1001, 1801)
- GROUP BY analysis_id,stratum_1,data_source_id,concept_name,domain_id) ) as q1
- INNER JOIN public.data_source AS source ON q1.data_source_id=source.id
-GROUP BY q1.concept_id,q1.concept_name,q1.domain_id,source.name, acronym
+ JOIN concept
+ ON cast(stratum_1 as BIGINT)=concept_id
+ WHERE
+ analysis_id in (
+ 202, 401, 601, 701, 801, 901, 1001, 1801
+ )
+ GROUP BY
+ analysis_id,
+ stratum_1,
+ data_source_id,
+ concept_name,
+ domain_id
+ )) as q1
+ INNER JOIN public.data_source AS source
+ ON q1.data_source_id=source.id
+GROUP BY
+ q1.concept_id,
+ q1.concept_name,
+ q1.domain_id,
+ source.name,
+ acronym
ORDER BY "Person_count" desc
```
diff --git a/docs/src/09-provenance.Rmd b/docs/src/09-provenance.Rmd
index a70680dc..4f3f84c0 100644
--- a/docs/src/09-provenance.Rmd
+++ b/docs/src/09-provenance.Rmd
@@ -61,18 +61,26 @@ All 6 charts use the same sql query.
```sql
SELECT source.name,
source.acronym,
- CASE WHEN analysis_id = 405 THEN 'Condition'
- WHEN analysis_id = 605 THEN 'Procedure'
- WHEN analysis_id = 705 THEN 'Drug'
- WHEN analysis_id = 805 THEN 'Observation'
- WHEN analysis_id = 1805 THEN 'Measurement'
- WHEN analysis_id = 2105 THEN 'Device'
+ CASE WHEN analysis_id = 405
+ THEN 'Condition'
+ WHEN analysis_id = 605
+ THEN 'Procedure'
+ WHEN analysis_id = 705
+ THEN 'Drug'
+ WHEN analysis_id = 805
+ THEN 'Observation'
+ WHEN analysis_id = 1805
+ THEN 'Measurement'
+ WHEN analysis_id = 2105
+ THEN 'Device'
ELSE 'Other' END AS domain_name,
concept_name,
SUM(count_value) AS num_records
FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
-INNER JOIN public.concept AS c1 ON CAST(stratum_2 AS BIGINT) = concept_id
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
+INNER JOIN public.concept AS c1
+ ON CAST(stratum_2 AS BIGINT) = concept_id
WHERE analysis_id IN (405,605,705,805,1805,2105)
GROUP BY source.name, source.acronym, concept_name,
CASE WHEN analysis_id = 405 THEN 'Condition'
diff --git a/docs/src/10-data_domains.Rmd b/docs/src/10-data_domains.Rmd
index 73f6bc53..8e3d312a 100644
--- a/docs/src/10-data_domains.Rmd
+++ b/docs/src/10-data_domains.Rmd
@@ -48,7 +48,7 @@ No SQL query, use the sql table `data_source` of the `achilles` database.
- Date Filter: off
- Instant Filtering: on
-### Average Number of Records per Person {#avgRecordsPerPerson} {-}
+### Average Number of Records per Person {-#avgRecordsPerPerson}
```{r , fig.cap="Settings for creating the Data Source filter chart",echo=FALSE, out.width="100%"}
knitr::include_graphics("images/10-data_domain/02-avg_records_per_person.png")
@@ -61,25 +61,41 @@ SELECT
source.name,
source.acronym,
CASE
- WHEN analysis_id = 201 THEN 'Visit'
- WHEN analysis_id = 401 THEN 'Condition'
- WHEN analysis_id = 501 THEN 'Death'
- WHEN analysis_id = 601 THEN 'Procedure'
- WHEN analysis_id = 701 THEN 'Drug Exposure'
- WHEN analysis_id = 801 THEN 'Observation'
- WHEN analysis_id = 1801 THEN 'Measurement'
- WHEN analysis_id = 2101 THEN 'Device'
- WHEN analysis_id = 2201 THEN 'Note'
+ WHEN analysis_id = 201
+ THEN 'Visit'
+ WHEN analysis_id = 401
+ THEN 'Condition'
+ WHEN analysis_id = 501
+ THEN 'Death'
+ WHEN analysis_id = 601
+ THEN 'Procedure'
+ WHEN analysis_id = 701
+ THEN 'Drug Exposure'
+ WHEN analysis_id = 801
+ THEN 'Observation'
+ WHEN analysis_id = 1801
+ THEN 'Measurement'
+ WHEN analysis_id = 2101
+ THEN 'Device'
+ WHEN analysis_id = 2201
+ THEN 'Note'
END AS Data_Domain,
- SUM(count_value) /AVG(num_persons) AS "records_per_person"
+ SUM(count_value) /AVG(num_persons)
+ AS "records_per_person"
FROM public.achilles_results AS achilles
-INNER JOIN public.data_source AS source ON achilles.data_source_id=source.id
+INNER JOIN public.data_source AS source
+ ON achilles.data_source_id=source.id
INNER JOIN (
SELECT data_source_id , count_value as num_persons
FROM achilles_results
- WHERE analysis_id = 1) counts ON achilles.data_source_id = counts.data_source_id
+ WHERE analysis_id = 1) counts
+ ON achilles.data_source_id = counts.data_source_id
GROUP BY analysis_id, source.name, source.acronym
-HAVING analysis_id IN (201, 401, 501, 601, 701, 801, 1801, 2101, 2201)
+HAVING
+ analysis_id IN (
+ 201, 401, 501, 601, 701, 801, 1801, 2101,
+ 2201
+ )
```
#### Chart settings {-}
diff --git a/docs/src/DataNetworkDashboards.rds b/docs/src/DataNetworkDashboards.rds
index 0084ee79..16311c72 100644
Binary files a/docs/src/DataNetworkDashboards.rds and b/docs/src/DataNetworkDashboards.rds differ
diff --git a/docs/src/backups.Rmd b/docs/src/backups.Rmd
index 82c6c507..94914c9f 100644
--- a/docs/src/backups.Rmd
+++ b/docs/src/backups.Rmd
@@ -2,7 +2,7 @@
1. Create a credentials file (the structure of the file depends on the target cloud server)
-2. Create a `.dashboards_backups.conf` file under your home directory (variable `$HOME`) using `dashboards_backups.conf.example` as base, setting the appropriate value for the several variables.
+2. Create a `backups.conf` under the `backups` directory using `backups.conf.example` as base, setting the appropriate value for the several variables.
For variables associated with files and directories always use *absolute* paths.
@@ -50,56 +50,16 @@
2. Add entry `0 3 * * * $HOME/NetworkDashboards/backups/backup.sh` (The path to the backup script might be different)
-## Restore
+### Restore {-}
-1. Select the compressed backup you want to restore and decompress it:
-
- `tar -xJf BACKUP_FILE.tar.xz`.
+1. Select the compressed backup you want to restore.
-2. 1. **Redis**
-
- 1. Make sure the redis docker container is down.
-
- 2. (Re)place the file `dump.rdb` on the redis volume by the file `redis.rdb`. By default the redis volume is located where this repository was cloned on the directory `docker/volumes/redis`.
-
- 3. Change its permissions, owner and group:
-
- ```shell
- chmod 0644 docker/volumes/redis/dump.rdb
- sudo chown -R 999:999 docker/volumes/redis
- ```
-
- 2. **Postgres**
-
- 1. Make sure all containers that make changes on the database are stopped.
-
- 2. Copy the file `postgres_backup.sql` into the postgres container
-
- `docker cp postgres.sql [CONTAINER_ID]:/tmp`.
-
- 5. Execute the backup script:
-
- `docker exec -u root dashboard_viewer_postgres_1 psql -f /tmp/postgres_backup.sql -U \$POSTGRES_USER -d \$POSTGRES_DB`.
-
- 3. **Media Files** If you have a volume pointing to where the media files are stored, replace all files with the ones present on the downloaded backup file. Else:
-
- 1. Bring the dashoard container up `docker-compose up -d dashboard`
-
- 2. Enter in the container `docker exec -it [CONTAINER_ID] bash`
-
- 3. If you don't know where the media files are stored you can check the value of the MEDIA_ROOT variable
-
- 1. `python manage.py shell`
-
- 2. `from django.conf import settings`
-
- 3. `print(settings.MEDIA_ROOT)`
-
- 4. Remove the entire MEDIA_ROOT directory and exit the container
-
- 5. Copy the media directory present on the backup file to the catalogue container `docker cp -a collected-media [CONTAINER_ID]:[MEDIA_ROOT_PARENT_PATH]`
+2. Make sure that all the environment variables are the same as the ones that were used for the chosen backup file.
+ Additionally, the `backups.conf` file is also necessary to set up, since the `TMP_DIRECTORY` variable will be used.
+
+3. Run the `backups/restore.sh` script.
-## Useful stuff
+### Useful stuff {-}
- How to create a shared link to a dropbox directory using its python's API:
@@ -113,8 +73,14 @@
# create a shared link for a directory
from dropbox.sharing import SharedLinkSettings
- sharing_settings = SharedLinkSettings(require_password=True, link_password=DIRECTORY_PASSWORD)
- d.sharing_create_shared_link_with_settings(DIRECTORY_PATH, sharing_settings)
+ sharing_settings = SharedLinkSettings(
+ require_password=True,
+ link_password=DIRECTORY_PASSWORD,
+ )
+ d.sharing_create_shared_link_with_settings(
+ DIRECTORY_PATH,
+ sharing_settings,
+ )
# get all links
for link in d.sharing_get_shared_links().links:
diff --git a/docs/src/code-documentation.Rmd b/docs/src/code-documentation.Rmd
index 896a05c4..19fb782f 100644
--- a/docs/src/code-documentation.Rmd
+++ b/docs/src/code-documentation.Rmd
@@ -42,7 +42,7 @@ Once again to avoid timeouts, such operations are executed on a background task.
Currently, this app is not being used and the URL mapping was delete.
To use it again uncomment the tabsManager [line](https://github.com/EHDEN/NetworkDashboards/blob/master/dashboard_viewer/dashboard_viewer/urls.py#L29) on the dashboard_viewer/dashboard_viewer/urls.py file.
-Then you can access the tabs page through the `[BASE_RUL]/tabs/` URL.
+Then you can access the tabs page through the `[BASE_URL]/tabs/` URL.
Views
diff --git a/docs/src/development.Rmd b/docs/src/development.Rmd
index df5ecc67..7aca10fd 100644
--- a/docs/src/development.Rmd
+++ b/docs/src/development.Rmd
@@ -33,57 +33,31 @@
### Superset {-}
-Currently, we have a custom chart plugin on our superset installation which doesn't allow us to use superset's pre-built images available on their docker hub, since we have to call npm's build procedures on the front-end code.
-To build our custom docker image we used superset's [Dockerfile](https://github.com/apache/superset/blob/1.0.1/Dockerfile) as a base, where we removed the Dev section and added some code to install our chart plugins before building the front-end code.
-Also, to make Superset import our custom chart plugins, some changes have to be made to the [superset-frontend/src/visualizations/presets/MainPreset.js](https://github.com/apache/superset/blob/1.0.1/superset-frontend/src/visualizations/presets/MainPreset.js) file.
+Currently, we have made some modifications to the box plot visualization on our superset installation which doesn't allow us to use superset's pre-built images available on their docker hub, since we have to call npm's build procedures on the front-end code.
+To build our custom docker image we used superset's [Dockerfile](https://github.com/apache/superset/blob/1.5.0/Dockerfile) as a base, where we removed the Dev section and added some code to install our chart plugins before building the front-end code.
-The changes made to the Dockerfile to install the chart plugins are in [this](https://github.com/EHDEN/NetworkDashboards/blob/master/docker/superset/Dockerfile#L44-L63) area:
+The changes made to the Dockerfile to install the chart plugins are in [this](https://github.com/EHDEN/NetworkDashboards/blob/master/docker/superset/Dockerfile#L47-L49) area:
-1. L44: First we copy the `superset/plugins` directory into the container, which contains all the extra and custom chart plugins.
-2. L48-51: Then we iterate over the chart plugins and execute `npm install ...` on each of them.
- This will make changes to both the package.json and package-lock.json files and for that, we copy them into a temporary directory `package_json_files`.
-3. L54: Then all superset's front-end code is copied into the container, which will override the package*.json files.
-4. L56: After this, we copy our custom MainPresets.js file.
-5. L60-L63: Finally, we replace the package*.json files with the ones that we saved earlier and then run the npm build command.
+1. L46: Repalce some boxplot fiels with ours;
+2. L47: Superset's original version of the controlPanel.ts file is a `.ts` versions however ours is a `.tsx`. For that, we have to remove the `.ts` version to properly override this file.
#### Update Superset {-}
1. `cd` into superset's submodule directory.
-2. Get the latest tags: `git fetch`.
+2. Get the latest tags: `git fetch -t`.
3. Checkout to the new desired release tag.
4. Check if there are any changes made to superset's Dockerfile (on the root of the repository for the current latest release), adapt them, and insert them on our custom Dockerfile under the `docker/superset` directory.
-5. Check if there are any changes made to superset's `superset-frontend/src/visualizations/presets/MainPreset.js` file.
- You can use the script `mainpreset_has_changes.py` under the `plugins` directory to check that.
- Apply the new changes, if any, and remember to keep our chart plugins imported and registered (Currently we only have the *Box plot* plugin).
-
-6. If the version of the frontend package `@superset-ui/plugin-chart-echarts` changed it's necessary to update our box plot plugin.
- Follow the instructions present [here](https://github.com/EHDEN/NetworkDashboards/tree/master/superset/plugins/plugins/plugin-chart-box-plot#how-to-update), also take into account the instruction of the next section.
+6. If the version of the plugin package `plugin-chart-echarts` changed, it's necessary to update our box plot plugin. If it is greater than 0.18.25, go to the history (`https://github.com/apache/superset/commits/[RELEASE-TAG]/superset-frontend/plugins/plugin-chart-echarts`) of commits done to the plugin-chart-echarts plugin update to the most recent commit, applying their changes to the files in the `superset/box-plot-overrides` directory. A fast way check the changes done between two commits: `git diff [old_commit_hash] [recent_commit_hash] -- superset-frontend/plugins/plugin-chart-echarts`
#### Chart Plugin Development {-}
-Instructions on how you can set up your development environment to develop on a custom superset chart plugin:
-
-1. Clone the [superset](https://github.com/apache/superset) repository.
- **IMPORTANT NOTE**: Since we build the superset's docker image using the existing superset's submodule, it's better not to use it to develop the plugins.
- If you decide to use it anyways, remember [this](https://github.com/EHDEN/NetworkDashboards/blob/master/docker/superset/Dockerfile#L54) and [this](https://github.com/EHDEN/NetworkDashboards/blob/master/docker/superset/Dockerfile#L99) steps.
- They might override directories (`superset-frontend/node_modules` and `superset/static/assets`) that are generated during the build process, which can cause frontend compilation errors or the app can serve outdated static files.
-
-2. Clone the [superset-ui](https://github.com/apache-superset/superset-ui) repository into the directory superset-frontend of superset's repository.
-
-1. Follow the instructions of [this tutorial](https://superset.apache.org/docs/installation/building-custom-viz-plugins) to create the necessary base files of your plugin.
+1. Follow the instructions of [this tutorial](https://superset.apache.org/docs/contributing/creating-viz-plugins) to create the necessary base files of your plugin.
-2. Copy the file `MainPreset.js` present on this directory into the superset repository into the `superset-frontend/src/visualizations/presets/` directory.
-
-3. Add the line `npm install -f --no-optional --save ./superset-frontend/superset-ui/plugins/plugin-chart-[your-chart-name]` into the file `docker/docker-frontend.sh` of the superset repository before the existing `npm install ...` commands.
-
-4. When the development is finished, on the root of the superset-ui repository run `yarn install` and then `yarn build [your-chart-name]`.
-
-5. Copy the directory of your plugin (including its sub-directory `esm`), within the superset-ui repository within the directory `plugins`, into the sub-directory `plugins` this directory.
- Make sure to run the command `yarn build [your-chart-name]` before doing this step.
+2. To deploy you can either use the `DYNAMIC_PLUGINS` feature flag or you can add and build your plugins in `superset/Dockerfile`.
#### Important features {-}
@@ -93,6 +67,7 @@ Instructions on how you can set up your development environment to develop on a
2. Filters:
- check [this](https://superset.apache.org/docs/frequently-asked-questions#how-to-add-dynamic-filters-to-a-dashboard) faq entry
- Append `?preselect_filters={"chartId":{"columnToFilterBy":["value1", "value2"]}}` to the dashboard URL to apply a filter once the dashboard is loaded. E.g. `?preselect_filters={"13":{"name":["Demo University of Aveiro"]}}`
+
3. Custom label colors: check [this](https://superset.apache.org/docs/frequently-asked-questions#is-there-a-way-to-force-the-use-specific-colors) faq entry
### Github Actions {-}
diff --git a/docs/src/materialized-views.Rmd b/docs/src/materialized-views.Rmd
index fb7d069f..bf3f3c71 100644
--- a/docs/src/materialized-views.Rmd
+++ b/docs/src/materialized-views.Rmd
@@ -17,11 +17,12 @@ SELECT data_source.acronym,
JOIN data_source ON ((a.data_source_id = data_source.id)))
JOIN country ON ((data_source.country_id = country.id)))
JOIN ( SELECT achilles_results.count_value,
- achilles_results.data_source_id,
- achilles_results.stratum_2,
- achilles_results.stratum_3
- FROM achilles_results
- WHERE (achilles_results.analysis_id = 0)) p ON ((p.data_source_id = data_source.id)))
+ achilles_results.data_source_id,
+ achilles_results.stratum_2,
+ achilles_results.stratum_3
+ FROM achilles_results
+ WHERE (achilles_results.analysis_id = 0)) p
+ ON p.data_source_id = data_source.id)
WHERE (a.analysis_id = 5000);
```
@@ -31,9 +32,10 @@ SELECT data_source.acronym,
SELECT country.country,
source.database_type,
achilles.count_value
- FROM ((achilles_results achilles
- JOIN data_source source ON ((achilles.data_source_id = source.id)))
- JOIN country country ON ((source.country_id = country.id)))
+ FROM (
+ (achilles_results achilles JOIN data_source source
+ ON (achilles.data_source_id = source.id))
+ JOIN country country ON source.country_id = country.id)
WHERE (achilles.analysis_id = 1);
```
@@ -45,8 +47,8 @@ SELECT achilles_results.count_value,
data_source.acronym,
data_source.database_type,
country.country
- FROM ((achilles_results
- JOIN data_source ON ((achilles_results.data_source_id = data_source.id)))
+ FROM ((achilles_results JOIN data_source
+ ON ((achilles_results.data_source_id = data_source.id)))
JOIN country ON ((data_source.country_id = country.id)))
WHERE (achilles_results.analysis_id = 1);
```
@@ -61,9 +63,11 @@ SELECT source.name,
concept.concept_name AS gender,
achilles.count_value
FROM (((achilles_results achilles
- JOIN data_source source ON ((achilles.data_source_id = source.id)))
+ JOIN data_source source
+ ON ((achilles.data_source_id = source.id)))
JOIN country ON ((country.id = source.country_id)))
- JOIN concept ON ((achilles.stratum_1 = (concept.concept_id)::text)))
+ JOIN concept
+ ON ((achilles.stratum_1 = (concept.concept_id)::text)))
WHERE (achilles.analysis_id = 2);
```
@@ -75,61 +79,107 @@ SELECT source.name,
source.database_type,
country.country,
sum(
- CASE
- WHEN ((achilles.stratum_2)::integer < 10) THEN achilles.count_value
- ELSE NULL::bigint
- END) AS "0-10",
+ CASE WHEN (
+ (achilles.stratum_2):: integer < 10
+ ) THEN achilles.count_value ELSE NULL :: bigint END
+ ) AS "0-10",
sum(
- CASE
- WHEN (((achilles.stratum_2)::integer >= 10) AND ((achilles.stratum_2)::integer < 20)) THEN achilles.count_value
- ELSE NULL::bigint
- END) AS "10-20",
+ CASE WHEN (
+ (
+ (achilles.stratum_2):: integer >= 10
+ )
+ AND (
+ (achilles.stratum_2):: integer < 20
+ )
+ ) THEN achilles.count_value ELSE NULL :: bigint END
+ ) AS "10-20",
sum(
- CASE
- WHEN (((achilles.stratum_2)::integer >= 20) AND ((achilles.stratum_2)::integer < 30)) THEN achilles.count_value
- ELSE NULL::bigint
- END) AS "20-30",
+ CASE WHEN (
+ (
+ (achilles.stratum_2):: integer >= 20
+ )
+ AND (
+ (achilles.stratum_2):: integer < 30
+ )
+ ) THEN achilles.count_value ELSE NULL :: bigint END
+ ) AS "20-30",
sum(
- CASE
- WHEN (((achilles.stratum_2)::integer >= 30) AND ((achilles.stratum_2)::integer < 40)) THEN achilles.count_value
- ELSE NULL::bigint
- END) AS "30-40",
+ CASE WHEN (
+ (
+ (achilles.stratum_2):: integer >= 30
+ )
+ AND (
+ (achilles.stratum_2):: integer < 40
+ )
+ ) THEN achilles.count_value ELSE NULL :: bigint END
+ ) AS "30-40",
sum(
- CASE
- WHEN (((achilles.stratum_2)::integer >= 40) AND ((achilles.stratum_2)::integer < 50)) THEN achilles.count_value
- ELSE NULL::bigint
- END) AS "40-50",
+ CASE WHEN (
+ (
+ (achilles.stratum_2):: integer >= 40
+ )
+ AND (
+ (achilles.stratum_2):: integer < 50
+ )
+ ) THEN achilles.count_value ELSE NULL :: bigint END
+ ) AS "40-50",
sum(
- CASE
- WHEN (((achilles.stratum_2)::integer >= 50) AND ((achilles.stratum_2)::integer < 60)) THEN achilles.count_value
- ELSE NULL::bigint
- END) AS "50-60",
+ CASE WHEN (
+ (
+ (achilles.stratum_2):: integer >= 50
+ )
+ AND (
+ (achilles.stratum_2):: integer < 60
+ )
+ ) THEN achilles.count_value ELSE NULL :: bigint END
+ ) AS "50-60",
sum(
- CASE
- WHEN (((achilles.stratum_2)::integer >= 60) AND ((achilles.stratum_2)::integer < 70)) THEN achilles.count_value
- ELSE NULL::bigint
- END) AS "60-70",
+ CASE WHEN (
+ (
+ (achilles.stratum_2):: integer >= 60
+ )
+ AND (
+ (achilles.stratum_2):: integer < 70
+ )
+ ) THEN achilles.count_value ELSE NULL :: bigint END
+ ) AS "60-70",
sum(
- CASE
- WHEN (((achilles.stratum_2)::integer >= 70) AND ((achilles.stratum_2)::integer < 80)) THEN achilles.count_value
- ELSE NULL::bigint
- END) AS "70-80",
+ CASE WHEN (
+ (
+ (achilles.stratum_2):: integer >= 70
+ )
+ AND (
+ (achilles.stratum_2):: integer < 80
+ )
+ ) THEN achilles.count_value ELSE NULL :: bigint END
+ ) AS "70-80",
sum(
- CASE
- WHEN (((achilles.stratum_2)::integer >= 80) AND ((achilles.stratum_2)::integer < 90)) THEN achilles.count_value
- ELSE NULL::bigint
- END) AS "80-90",
+ CASE WHEN (
+ (
+ (achilles.stratum_2):: integer >= 80
+ )
+ AND (
+ (achilles.stratum_2):: integer < 90
+ )
+ ) THEN achilles.count_value ELSE NULL :: bigint END
+ ) AS "80-90",
sum(
- CASE
- WHEN ((achilles.stratum_2)::integer >= 90) THEN achilles.count_value
- ELSE NULL::bigint
- END) AS "90+"
+ CASE WHEN (
+ (achilles.stratum_2):: integer >= 90
+ ) THEN achilles.count_value ELSE NULL :: bigint END
+ ) AS "90+"
FROM (((achilles_results achilles
- JOIN data_source source ON ((achilles.data_source_id = source.id)))
+ JOIN data_source source
+ ON ((achilles.data_source_id = source.id)))
JOIN country ON ((country.id = source.country_id)))
- JOIN concept ON ((achilles.stratum_1 = (concept.concept_id)::text)))
+ JOIN concept
+ ON ((achilles.stratum_1 = (concept.concept_id)::text)))
WHERE (achilles.analysis_id = 102)
- GROUP BY source.name, source.acronym, source.database_type, country.country;
+ GROUP BY
+ source.name,
+ source.acronym,
+ source.database_type,
+ country.country;
```
### age1observation_bar_chart {-}
@@ -142,7 +192,8 @@ SELECT source.name,
source.database_type,
country.country
FROM ((achilles_results achilles
- JOIN data_source source ON ((achilles.data_source_id = source.id)))
+ JOIN data_source source
+ ON achilles.data_source_id = source.id)
JOIN country ON ((country.id = source.country_id)))
WHERE (achilles.analysis_id = 101);
```
@@ -162,7 +213,8 @@ SELECT source.name,
achilles.max_value,
achilles.min_value
FROM ((achilles_results achilles
- JOIN data_source source ON ((achilles.data_source_id = source.id)))
+ JOIN data_source source
+ ON ((achilles.data_source_id = source.id)))
JOIN country ON ((source.country_id = country.id)))
WHERE (achilles.analysis_id = 103)
ORDER BY source.name;
@@ -178,7 +230,8 @@ SELECT source.name,
achilles.stratum_1 AS "Birth_year",
achilles.count_value AS count
FROM ((achilles_results achilles
- JOIN data_source source ON ((achilles.data_source_id = source.id)))
+ JOIN data_source source
+ ON ((achilles.data_source_id = source.id)))
JOIN country ON ((country.id = source.country_id)))
WHERE (achilles.analysis_id = 3);
```
@@ -191,27 +244,58 @@ SELECT source.name,
source.database_type,
country.country,
CASE
- WHEN (achilles.analysis_id = 201) THEN 'Visit'::text
- WHEN (achilles.analysis_id = 401) THEN 'Condition'::text
- WHEN (achilles.analysis_id = 501) THEN 'Death'::text
- WHEN (achilles.analysis_id = 601) THEN 'Procedure'::text
- WHEN (achilles.analysis_id = 701) THEN 'Drug Exposure'::text
- WHEN (achilles.analysis_id = 801) THEN 'Observation'::text
- WHEN (achilles.analysis_id = 1801) THEN 'Measurement'::text
- WHEN (achilles.analysis_id = 2101) THEN 'Device'::text
- WHEN (achilles.analysis_id = 2201) THEN 'Note'::text
+ WHEN (achilles.analysis_id = 201)
+ THEN 'Visit'::text
+ WHEN (achilles.analysis_id = 401)
+ THEN 'Condition'::text
+ WHEN (achilles.analysis_id = 501)
+ THEN 'Death'::text
+ WHEN (achilles.analysis_id = 601)
+ THEN 'Procedure'::text
+ WHEN (achilles.analysis_id = 701)
+ THEN 'Drug Exposure'::text
+ WHEN (achilles.analysis_id = 801)
+ THEN 'Observation'::text
+ WHEN (achilles.analysis_id = 1801)
+ THEN 'Measurement'::text
+ WHEN (achilles.analysis_id = 2101)
+ THEN 'Device'::text
+ WHEN (achilles.analysis_id = 2201)
+ THEN 'Note'::text
ELSE NULL::text
END AS data_domain,
- (sum(achilles.count_value) / avg(counts.num_persons)) AS records_per_person
+ (sum(achilles.count_value) / avg(counts.num_persons))
+ AS records_per_person
FROM (((achilles_results achilles
- JOIN data_source source ON ((achilles.data_source_id = source.id)))
+ JOIN data_source source
+ ON ((achilles.data_source_id = source.id)))
JOIN country ON ((country.id = source.country_id)))
JOIN ( SELECT achilles_results.data_source_id,
achilles_results.count_value AS num_persons
FROM achilles_results
- WHERE (achilles_results.analysis_id = 1)) counts ON ((achilles.data_source_id = counts.data_source_id)))
- GROUP BY achilles.analysis_id, source.name, source.acronym, source.database_type, country.country
-HAVING (achilles.analysis_id = ANY (ARRAY[(201)::bigint, (401)::bigint, (501)::bigint, (601)::bigint, (701)::bigint, (801)::bigint, (1801)::bigint, (2101)::bigint, (2201)::bigint]));
+ WHERE (achilles_results.analysis_id = 1)) counts
+ ON achilles.data_source_id = counts.data_source_id)
+ GROUP BY
+ achilles.analysis_id,
+ source.name,
+ source.acronym,
+ source.database_type,
+ country.country
+HAVING
+ (
+ achilles.analysis_id = ANY (
+ ARRAY[(201):: bigint,
+ (401):: bigint,
+ (501):: bigint,
+ (601):: bigint,
+ (701):: bigint,
+ (801):: bigint,
+ (1801):: bigint,
+ (2101):: bigint,
+ (2201):: bigint]
+ )
+ );
+
```
### data_domain_total_num_of_records {-}
@@ -222,23 +306,51 @@ SELECT data_source.name,
data_source.database_type,
country.country,
CASE
- WHEN (achilles_results.analysis_id = 201) THEN 'Visit'::text
- WHEN (achilles_results.analysis_id = 401) THEN 'Condition'::text
- WHEN (achilles_results.analysis_id = 501) THEN 'Death'::text
- WHEN (achilles_results.analysis_id = 601) THEN 'Procedure'::text
- WHEN (achilles_results.analysis_id = 701) THEN 'Drug Exposure'::text
- WHEN (achilles_results.analysis_id = 801) THEN 'Observation'::text
- WHEN (achilles_results.analysis_id = 1801) THEN 'Measurement'::text
- WHEN (achilles_results.analysis_id = 2101) THEN 'Device'::text
- WHEN (achilles_results.analysis_id = 2201) THEN 'Note'::text
+ WHEN (achilles_results.analysis_id = 201)
+ THEN 'Visit'::text
+ WHEN (achilles_results.analysis_id = 401)
+ THEN 'Condition'::text
+ WHEN (achilles_results.analysis_id = 501)
+ THEN 'Death'::text
+ WHEN (achilles_results.analysis_id = 601)
+ THEN 'Procedure'::text
+ WHEN (achilles_results.analysis_id = 701)
+ THEN 'Drug Exposure'::text
+ WHEN (achilles_results.analysis_id = 801)
+ THEN 'Observation'::text
+ WHEN (achilles_results.analysis_id = 1801)
+ THEN 'Measurement'::text
+ WHEN (achilles_results.analysis_id = 2101)
+ THEN 'Device'::text
+ WHEN (achilles_results.analysis_id = 2201)
+ THEN 'Note'::text
ELSE NULL::text
END AS data_domain,
sum(achilles_results.count_value) AS count
FROM ((achilles_results
- JOIN data_source ON ((achilles_results.data_source_id = data_source.id)))
+ JOIN data_source
+ ON ((achilles_results.data_source_id = data_source.id)))
JOIN country ON ((country.id = data_source.country_id)))
- GROUP BY data_source.name, data_source.acronym, data_source.database_type, country.country, achilles_results.analysis_id
-HAVING (achilles_results.analysis_id = ANY (ARRAY[(201)::bigint, (401)::bigint, (501)::bigint, (601)::bigint, (701)::bigint, (801)::bigint, (1801)::bigint, (2101)::bigint, (2201)::bigint]));
+ GROUP BY
+ data_source.name,
+ data_source.acronym,
+ data_source.database_type,
+ country.country,
+ achilles_results.analysis_id
+HAVING
+ (
+ achilles_results.analysis_id = ANY (
+ ARRAY[(201):: bigint,
+ (401):: bigint,
+ (501):: bigint,
+ (601):: bigint,
+ (701):: bigint,
+ (801):: bigint,
+ (1801):: bigint,
+ (2101):: bigint,
+ (2201):: bigint]
+ )
+ );
```
### number_of_distinct_per_person {-}
@@ -249,12 +361,18 @@ SELECT source.name,
country.country,
achilles.analysis_id,
CASE
- WHEN (achilles.analysis_id = 203) THEN 'Visit'::text
- WHEN (achilles.analysis_id = 403) THEN 'Condition'::text
- WHEN (achilles.analysis_id = 603) THEN 'Procedure'::text
- WHEN (achilles.analysis_id = 703) THEN 'Drug Exposure'::text
- WHEN (achilles.analysis_id = 803) THEN 'Observation'::text
- WHEN (achilles.analysis_id = 1803) THEN 'Measurement'::text
+ WHEN (achilles.analysis_id = 203)
+ THEN 'Visit'::text
+ WHEN (achilles.analysis_id = 403)
+ THEN 'Condition'::text
+ WHEN (achilles.analysis_id = 603)
+ THEN 'Procedure'::text
+ WHEN (achilles.analysis_id = 703)
+ THEN 'Drug Exposure'::text
+ WHEN (achilles.analysis_id = 803)
+ THEN 'Observation'::text
+ WHEN (achilles.analysis_id = 1803)
+ THEN 'Measurement'::text
ELSE NULL::text
END AS data_domain,
achilles.count_value,
@@ -266,9 +384,20 @@ SELECT source.name,
achilles.p90_value AS p90,
achilles.max_value
FROM ((achilles_results achilles
- JOIN data_source source ON ((achilles.data_source_id = source.id)))
+ JOIN data_source source
+ ON ((achilles.data_source_id = source.id)))
JOIN country ON ((source.country_id = country.id)))
- WHERE (achilles.analysis_id = ANY (ARRAY[(203)::bigint, (403)::bigint, (603)::bigint, (703)::bigint, (803)::bigint, (183)::bigint]))
+ WHERE
+ (
+ achilles.analysis_id = ANY (
+ ARRAY[(203):: bigint,
+ (403):: bigint,
+ (603):: bigint,
+ (703):: bigint,
+ (803):: bigint,
+ (183):: bigint]
+ )
+ )
ORDER BY source.name;
```
@@ -280,29 +409,58 @@ SELECT source.name,
source.database_type,
country.country,
CASE
- WHEN (achilles.analysis_id = 405) THEN 'Condition'::text
- WHEN (achilles.analysis_id = 605) THEN 'Procedure'::text
- WHEN (achilles.analysis_id = 705) THEN 'Drug'::text
- WHEN (achilles.analysis_id = 805) THEN 'Observation'::text
- WHEN (achilles.analysis_id = 1805) THEN 'Measurement'::text
- WHEN (achilles.analysis_id = 2105) THEN 'Device'::text
+ WHEN (achilles.analysis_id = 405)
+ THEN 'Condition'::text
+ WHEN (achilles.analysis_id = 605)
+ THEN 'Procedure'::text
+ WHEN (achilles.analysis_id = 705)
+ THEN 'Drug'::text
+ WHEN (achilles.analysis_id = 805)
+ THEN 'Observation'::text
+ WHEN (achilles.analysis_id = 1805)
+ THEN 'Measurement'::text
+ WHEN (achilles.analysis_id = 2105)
+ THEN 'Device'::text
ELSE 'Other'::text
END AS domain_name,
c1.concept_name,
sum(achilles.count_value) AS num_records
FROM (((achilles_results achilles
- JOIN data_source source ON ((achilles.data_source_id = source.id)))
+ JOIN data_source source
+ ON ((achilles.data_source_id = source.id)))
JOIN country ON ((country.id = source.country_id)))
- JOIN concept c1 ON ((achilles.stratum_2 = (c1.concept_id)::text)))
- WHERE (achilles.analysis_id = ANY (ARRAY[(405)::bigint, (605)::bigint, (705)::bigint, (805)::bigint, (1805)::bigint, (2105)::bigint]))
- GROUP BY source.name, source.acronym, source.database_type, country.country, c1.concept_name,
+ JOIN concept c1
+ ON ((achilles.stratum_2 = (c1.concept_id)::text)))
+ WHERE
+ (
+ achilles.analysis_id = ANY (
+ ARRAY[(405):: bigint,
+ (605):: bigint,
+ (705):: bigint,
+ (805):: bigint,
+ (1805):: bigint,
+ (2105):: bigint]
+ )
+ )
+ GROUP BY
+ source.name,
+ source.acronym,
+ source.database_type,
+ country.country,
+ c1.concept_name,
CASE
- WHEN (achilles.analysis_id = 405) THEN 'Condition'::text
- WHEN (achilles.analysis_id = 605) THEN 'Procedure'::text
- WHEN (achilles.analysis_id = 705) THEN 'Drug'::text
- WHEN (achilles.analysis_id = 805) THEN 'Observation'::text
- WHEN (achilles.analysis_id = 1805) THEN 'Measurement'::text
- WHEN (achilles.analysis_id = 2105) THEN 'Device'::text
+ WHEN (achilles.analysis_id = 405)
+ THEN 'Condition'::text
+ WHEN (achilles.analysis_id = 605)
+ THEN 'Procedure'::text
+ WHEN (achilles.analysis_id = 705)
+ THEN 'Drug'::text
+ WHEN (achilles.analysis_id = 805)
+ THEN 'Observation'::text
+ WHEN (achilles.analysis_id = 1805)
+ THEN 'Measurement'::text
+ WHEN (achilles.analysis_id = 2105)
+ THEN 'Device'::text
ELSE 'Other'::text
END;
```
@@ -317,7 +475,8 @@ SELECT source.name,
to_date(achilles.stratum_1, 'YYYYMM'::text) AS date,
achilles.count_value AS "Nr_patients"
FROM ((achilles_results achilles
- JOIN data_source source ON ((achilles.data_source_id = source.id)))
+ JOIN data_source source
+ ON ((achilles.data_source_id = source.id)))
JOIN country ON ((country.id = source.country_id)))
WHERE (achilles.analysis_id = 110);
```
@@ -325,24 +484,60 @@ SELECT source.name,
### cumulative_observation_time {-}
```sql
-SELECT data_source.name,
- data_source.acronym,
- data_source.database_type,
- country.country,
- cumulative_sums.xlengthofobservation,
- round((cumulative_sums.cumulative_sum / (totals.total)::numeric), 5) AS ypercentpersons
- FROM (((( SELECT achilles_results.data_source_id,
- ((achilles_results.stratum_1)::integer * 30) AS xlengthofobservation,
- sum(achilles_results.count_value) OVER (PARTITION BY achilles_results.data_source_id ORDER BY (achilles_results.stratum_1)::integer DESC) AS cumulative_sum
- FROM achilles_results
- WHERE (achilles_results.analysis_id = 108)) cumulative_sums
- JOIN ( SELECT achilles_results.data_source_id,
- achilles_results.count_value AS total
- FROM achilles_results
- WHERE (achilles_results.analysis_id = 1)) totals ON ((cumulative_sums.data_source_id = totals.data_source_id)))
- JOIN data_source ON ((cumulative_sums.data_source_id = data_source.id)))
- JOIN country ON ((country.id = data_source.country_id)))
- ORDER BY data_source.name, cumulative_sums.xlengthofobservation;
+SELECT
+ data_source.name,
+ data_source.acronym,
+ data_source.database_type,
+ country.country,
+ cumulative_sums.xlengthofobservation,
+ round(
+ (cumulative_sums.cumulative_sum / (totals.total):: numeric),
+ 5
+ ) AS ypercentpersons
+FROM
+ (
+ (
+ (
+ (
+ SELECT
+ achilles_results.data_source_id,
+ (
+ (achilles_results.stratum_1):: integer * 30
+ ) AS xlengthofobservation,
+ sum(achilles_results.count_value) OVER (
+ PARTITION BY achilles_results.data_source_id
+ ORDER BY
+ (achilles_results.stratum_1):: integer DESC
+ ) AS cumulative_sum
+ FROM
+ achilles_results
+ WHERE
+ (
+ achilles_results.analysis_id = 108
+ )
+ ) cumulative_sums
+ JOIN (
+ SELECT
+ achilles_results.data_source_id,
+ achilles_results.count_value AS total
+ FROM
+ achilles_results
+ WHERE
+ (achilles_results.analysis_id = 1)
+ ) totals ON (
+ (
+ cumulative_sums.data_source_id = totals.data_source_id
+ )
+ )
+ )
+ JOIN data_source
+ ON ((cumulative_sums.data_source_id = data_source.id))
+ )
+ JOIN country ON ((country.id = data_source.country_id))
+ )
+ORDER BY
+ data_source.name,
+ cumulative_sums.xlengthofobservation;
```
### number_of_observation_periods {-}
@@ -355,14 +550,22 @@ SELECT ar.data_source_id AS id,
ar.stratum_1,
ar.count_value,
pa.nrpatients AS patients,
- round((((100)::numeric * (ar.count_value)::numeric) / (pa.nrpatients)::numeric), 2) AS percentage
+ round(
+ (
+ ((100)::numeric * (ar.count_value)::numeric)
+ /
+ (pa.nrpatients)::numeric
+ ),
+ 2
+ ) AS percentage
FROM (((achilles_results ar
JOIN data_source ds ON ((ds.id = ar.data_source_id)))
JOIN country ON ((ds.country_id = country.id)))
JOIN ( SELECT achilles_results.count_value AS nrpatients,
achilles_results.data_source_id
FROM achilles_results
- WHERE (achilles_results.analysis_id = 0)) pa ON ((pa.data_source_id = ds.id)))
+ WHERE (achilles_results.analysis_id = 0)) pa
+ ON ((pa.data_source_id = ds.id)))
WHERE (ar.analysis_id = 113);
```
@@ -381,7 +584,8 @@ SELECT source.name,
achilles.p90_value AS p90,
achilles.max_value
FROM ((achilles_results achilles
- JOIN data_source source ON ((achilles.data_source_id = source.id)))
+ JOIN data_source source
+ ON ((achilles.data_source_id = source.id)))
JOIN country ON ((source.country_id = country.id)))
WHERE (achilles.analysis_id = 105)
ORDER BY source.name;
@@ -415,10 +619,17 @@ SELECT data_source.name,
achilles_results_1.p90_value,
achilles_results_1.stdev_value
FROM achilles_results achilles_results_1
- WHERE (achilles_results_1.analysis_id = 200)) achilles_results
- JOIN data_source ON ((achilles_results.data_source_id = data_source.id)))
+ WHERE (achilles_results_1.analysis_id = 200)
+ ) achilles_results
+ JOIN data_source
+ ON ((achilles_results.data_source_id = data_source.id)))
JOIN country ON ((country.id = data_source.country_id)))
- JOIN concept ON (((achilles_results.stratum_1)::integer = concept.concept_id)));
+ JOIN concept
+ ON
+ (achilles_results.stratum_1)::integer
+ =
+ concept.concept_id
+ );
```
### visit_type_table {-}
@@ -430,8 +641,22 @@ SELECT data_source.name,
country.country,
concept.concept_name,
ar1.count_value AS num_persons,
- round(((100.0 * (ar1.count_value)::numeric) / (denom.count_value)::numeric), 2) AS percent_persons,
- round(((1.0 * (ar2.count_value)::numeric) / (ar1.count_value)::numeric), 2) AS records_per_person
+ round(
+ (
+ (100.0 * (ar1.count_value)::numeric)
+ /
+ (denom.count_value)::numeric
+ ),
+ 2
+ ) AS percent_persons,
+ round(
+ (
+ (1.0 * (ar2.count_value)::numeric)
+ /
+ (ar1.count_value)::numeric
+ ),
+ 2
+ ) AS records_per_person
FROM (((((( SELECT achilles_results.id,
achilles_results.analysis_id,
achilles_results.stratum_1,
@@ -471,7 +696,9 @@ SELECT data_source.name,
achilles_results.p90_value,
achilles_results.stdev_value
FROM achilles_results
- WHERE (achilles_results.analysis_id = 201)) ar2 ON (((ar1.stratum_1 = ar2.stratum_1) AND (ar1.data_source_id = ar2.data_source_id))))
+ WHERE (achilles_results.analysis_id = 201)) ar2
+ ON (((ar1.stratum_1 = ar2.stratum_1)
+ AND (ar1.data_source_id = ar2.data_source_id))))
JOIN ( SELECT achilles_results.id,
achilles_results.analysis_id,
achilles_results.stratum_1,
@@ -491,86 +718,264 @@ SELECT data_source.name,
achilles_results.p90_value,
achilles_results.stdev_value
FROM achilles_results
- WHERE (achilles_results.analysis_id = 1)) denom ON ((ar1.data_source_id = denom.data_source_id)))
+ WHERE (achilles_results.analysis_id = 1)) denom
+ ON ((ar1.data_source_id = denom.data_source_id)))
JOIN data_source ON ((data_source.id = ar1.data_source_id)))
- JOIN country ON ((country.id = data_source.country_id)))
- JOIN concept ON (((ar1.stratum_1)::integer = concept.concept_id)))
+ JOIN country
+ ON ((country.id = data_source.country_id)))
+ JOIN concept
+ ON (((ar1.stratum_1)::integer = concept.concept_id)))
ORDER BY ar1.data_source_id, ar1.count_value DESC;
```
### domain_filter {-}
```sql
-SELECT concept.concept_name,
- concept.domain_id,
- source.name,
- source.acronym,
- source.database_type,
- country.country
- FROM (((achilles_results
- JOIN concept ON (((achilles_results.stratum_1)::bigint = concept.concept_id)))
- JOIN data_source source ON ((achilles_results.data_source_id = source.id)))
- JOIN country ON ((country.id = source.country_id)))
- WHERE (achilles_results.analysis_id = ANY (ARRAY[(201)::bigint, (401)::bigint, (601)::bigint, (701)::bigint, (801)::bigint, (901)::bigint, (1001)::bigint, (1801)::bigint, (200)::bigint, (400)::bigint, (600)::bigint, (700)::bigint, (800)::bigint, (1800)::bigint]));
+SELECT
+ concept.concept_name,
+ concept.domain_id,
+ source.name,
+ source.acronym,
+ source.database_type,
+ country.country
+FROM
+ (
+ (
+ (
+ achilles_results
+ JOIN concept ON (
+ (
+ (achilles_results.stratum_1):: bigint
+ =
+ concept.concept_id
+ )
+ )
+ )
+ JOIN data_source source ON (
+ (
+ achilles_results.data_source_id = source.id
+ )
+ )
+ )
+ JOIN country ON (
+ (country.id = source.country_id)
+ )
+ )
+WHERE
+ (
+ achilles_results.analysis_id = ANY (
+ ARRAY[(201):: bigint,
+ (401):: bigint,
+ (601):: bigint,
+ (701):: bigint,
+ (801):: bigint,
+ (901):: bigint,
+ (1001):: bigint,
+ (1801):: bigint,
+ (200):: bigint,
+ (400):: bigint,
+ (600):: bigint,
+ (700):: bigint,
+ (800):: bigint,
+ (1800):: bigint]
+ )
+ );
```
### concept_browser_table3 {-}
```sql
-SELECT source.name,
- source.acronym,
- source.database_type,
- country.country,
- ((((''::text) || ar1.concept_id) || ''::text) AS concept_id,
- concept.concept_name,
- concept.domain_id,
- (ar1.rc)::integer AS rc,
- (ar2.drc)::integer AS drc
- FROM ((((( SELECT achilles_results.data_source_id,
- achilles_results.analysis_id,
- achilles_results.stratum_1 AS concept_id,
- achilles_results.count_value AS rc
- FROM achilles_results
- WHERE (achilles_results.analysis_id = ANY (ARRAY[(401)::bigint, (601)::bigint, (701)::bigint, (801)::bigint, (1801)::bigint, (2101)::bigint]))) ar1
- JOIN ( SELECT ar.data_source_id,
- ar.analysis_id,
- ar.stratum_1 AS concept_id,
- ar.count_value AS drc
- FROM achilles_results ar
- WHERE (ar.analysis_id = ANY (ARRAY[(430)::bigint, (630)::bigint, (730)::bigint, (830)::bigint, (1830)::bigint, (2130)::bigint]))) ar2 ON (((ar1.concept_id = ar2.concept_id) AND (ar1.data_source_id = ar2.data_source_id))))
- JOIN data_source source ON ((ar1.data_source_id = source.id)))
- JOIN country ON ((source.country_id = country.id)))
- JOIN concept concept ON ((ar1.concept_id = (concept.concept_id)::text)))
- ORDER BY ((ar2.drc)::integer) DESC;
+SELECT
+ source.name,
+ source.acronym,
+ source.database_type,
+ country.country,
+ (
+ (
+ (
+ (
+'' :: text
+ ) || ar1.concept_id
+ ) || '' :: text
+ ) AS concept_id,
+ concept.concept_name,
+ concept.domain_id,
+ (ar1.rc):: integer AS rc,
+ (ar2.drc):: integer AS drc
+FROM
+ (
+ (
+ (
+ (
+ (
+ SELECT
+ achilles_results.data_source_id,
+ achilles_results.analysis_id,
+ achilles_results.stratum_1 AS concept_id,
+ achilles_results.count_value AS rc
+ FROM
+ achilles_results
+ WHERE
+ (
+ achilles_results.analysis_id = ANY (
+ ARRAY[(401):: bigint,
+ (601):: bigint,
+ (701):: bigint,
+ (801):: bigint,
+ (1801):: bigint,
+ (2101):: bigint]
+ )
+ )
+ ) ar1
+ JOIN (
+ SELECT
+ ar.data_source_id,
+ ar.analysis_id,
+ ar.stratum_1 AS concept_id,
+ ar.count_value AS drc
+ FROM
+ achilles_results ar
+ WHERE
+ (
+ ar.analysis_id = ANY (
+ ARRAY[(430):: bigint,
+ (630):: bigint,
+ (730):: bigint,
+ (830):: bigint,
+ (1830):: bigint,
+ (2130):: bigint]
+ )
+ )
+ ) ar2 ON (
+ (
+ (ar1.concept_id = ar2.concept_id)
+ AND (
+ ar1.data_source_id = ar2.data_source_id
+ )
+ )
+ )
+ )
+ JOIN data_source source ON (
+ (ar1.data_source_id = source.id)
+ )
+ )
+ JOIN country ON (
+ (source.country_id = country.id)
+ )
+ )
+ JOIN concept concept ON (
+ (
+ ar1.concept_id = (concept.concept_id):: text
+ )
+ )
+ )
+ORDER BY
+ (
+ (ar2.drc):: integer
+ ) DESC;
```
### concept_coverage2 {-}
```sql
-SELECT source.name AS source_name,
- source.database_type,
- country.country,
- ((((''::text) || concept.concept_id) || ''::text) AS concept_id,
- concept.concept_name,
- concept.domain_id,
- sum((ar1.rc)::integer) AS rc,
- sum((ar2.drc)::integer) AS drc
- FROM ((((( SELECT achilles_results.data_source_id,
- achilles_results.analysis_id,
- achilles_results.stratum_1 AS concept_id,
- achilles_results.count_value AS rc
- FROM achilles_results
- WHERE (achilles_results.analysis_id = ANY (ARRAY[(401)::bigint, (601)::bigint, (701)::bigint, (801)::bigint, (1801)::bigint, (2101)::bigint]))) ar1
- JOIN ( SELECT ar.data_source_id,
- ar.analysis_id,
- ar.stratum_1 AS concept_id,
- ar.count_value AS drc
- FROM achilles_results ar
- WHERE (ar.analysis_id = ANY (ARRAY[(430)::bigint, (630)::bigint, (730)::bigint, (830)::bigint, (1830)::bigint, (2130)::bigint]))) ar2 ON (((ar1.concept_id = ar2.concept_id) AND (ar1.data_source_id = ar2.data_source_id))))
- JOIN data_source source ON ((ar1.data_source_id = source.id)))
- JOIN country ON ((country.id = source.country_id)))
- JOIN concept concept ON ((ar1.concept_id = (concept.concept_id)::text)))
- GROUP BY source.name, source.database_type, country.country, concept.domain_id, concept.concept_id, concept.concept_name;
+SELECT
+ source.name AS source_name,
+ source.database_type,
+ country.country,
+ (
+ (
+ (
+ (
+'' :: text
+ ) || concept.concept_id
+ ) || '' :: text
+ ) AS concept_id,
+ concept.concept_name,
+ concept.domain_id,
+ sum(
+ (ar1.rc):: integer
+ ) AS rc,
+ sum(
+ (ar2.drc):: integer
+ ) AS drc
+FROM
+ (
+ (
+ (
+ (
+ (
+ SELECT
+ achilles_results.data_source_id,
+ achilles_results.analysis_id,
+ achilles_results.stratum_1 AS concept_id,
+ achilles_results.count_value AS rc
+ FROM
+ achilles_results
+ WHERE
+ (
+ achilles_results.analysis_id = ANY (
+ ARRAY[(401):: bigint,
+ (601):: bigint,
+ (701):: bigint,
+ (801):: bigint,
+ (1801):: bigint,
+ (2101):: bigint]
+ )
+ )
+ ) ar1
+ JOIN (
+ SELECT
+ ar.data_source_id,
+ ar.analysis_id,
+ ar.stratum_1 AS concept_id,
+ ar.count_value AS drc
+ FROM
+ achilles_results ar
+ WHERE
+ (
+ ar.analysis_id = ANY (
+ ARRAY[(430):: bigint,
+ (630):: bigint,
+ (730):: bigint,
+ (830):: bigint,
+ (1830):: bigint,
+ (2130):: bigint]
+ )
+ )
+ ) ar2 ON (
+ (
+ (ar1.concept_id = ar2.concept_id)
+ AND (
+ ar1.data_source_id = ar2.data_source_id
+ )
+ )
+ )
+ )
+ JOIN data_source source ON (
+ (ar1.data_source_id = source.id)
+ )
+ )
+ JOIN country ON (
+ (country.id = source.country_id)
+ )
+ )
+ JOIN concept concept ON (
+ (
+ ar1.concept_id = (concept.concept_id):: text
+ )
+ )
+ )
+GROUP BY
+ source.name,
+ source.database_type,
+ country.country,
+ concept.domain_id,
+ concept.concept_id,
+ concept.concept_name;
```
### data_density {-}
@@ -659,7 +1064,8 @@ SELECT source.acronym,
JOIN data_source source ON ((source.id = t1.id)))
ORDER BY t1.table_name, (
CASE
- WHEN (t1.stratum_1 ~ '^\d+\.?\d+$'::text) THEN t1.stratum_1
+ WHEN (t1.stratum_1 ~ '^\d+\.?\d+$'::text)
+ THEN t1.stratum_1
ELSE NULL::text
END)::integer;
```
@@ -670,7 +1076,14 @@ SELECT source.acronym,
SELECT source.acronym,
t1.table_name AS series_name,
to_date(t1.stratum_1, 'YYYYMM'::text) AS x_calendar_month,
- round(((1.0 * (t1.count_value)::numeric) / (denom.count_value)::numeric), 5) AS y_record_count
+ round(
+ (
+ (1.0 * (t1.count_value)::numeric)
+ /
+ (denom.count_value)::numeric
+ ),
+ 5
+ ) AS y_record_count
FROM ((( SELECT achilles_results.data_source_id AS id,
'Visit occurrence'::text AS table_name,
achilles_results.stratum_1,
@@ -766,11 +1179,14 @@ SELECT source.acronym,
achilles_results.p90_value,
achilles_results.stdev_value
FROM achilles_results
- WHERE (achilles_results.analysis_id = 117)) denom ON (((t1.stratum_1 = denom.stratum_1) AND (t1.id = denom.data_source_id))))
+ WHERE (achilles_results.analysis_id = 117)) denom
+ ON (((t1.stratum_1 = denom.stratum_1)
+ AND (t1.id = denom.data_source_id))))
JOIN data_source source ON ((source.id = t1.id)))
ORDER BY t1.table_name, (
CASE
- WHEN (t1.stratum_1 ~ '^\d+\.?\d+$'::text) THEN t1.stratum_1
+ WHEN (t1.stratum_1 ~ '^\d+\.?\d+$'::text)
+ THEN t1.stratum_1
ELSE NULL::text
END)::integer;
```
@@ -789,9 +1205,12 @@ SELECT source.name,
achilles.p75_value AS p75,
achilles.p90_value AS p90
FROM (((achilles_results achilles
- JOIN data_source source ON ((achilles.data_source_id = source.id)))
- JOIN concept c1 ON ((achilles.stratum_1 = (c1.concept_id)::text)))
- JOIN concept c2 ON ((achilles.stratum_2 = (c2.concept_id)::text)))
+ JOIN data_source source
+ ON ((achilles.data_source_id = source.id)))
+ JOIN concept c1
+ ON ((achilles.stratum_1 = (c1.concept_id)::text)))
+ JOIN concept c2
+ ON ((achilles.stratum_2 = (c2.concept_id)::text)))
WHERE (achilles.analysis_id = 206)
ORDER BY source.name, c1.concept_name, c2.concept_name;
```
@@ -800,25 +1219,82 @@ SELECT source.name,
```sql
SELECT
- source.acronym,
- ((((''::text) || ar1.concept_id) || ''::text) AS concept_id,
- concept.concept_name,
- concept.domain_id,
- (ar1.rc)::integer AS rc,
- (ar2.drc)::integer AS drc
- FROM (((( SELECT achilles_results.data_source_id,
- achilles_results.analysis_id,
- achilles_results.stratum_1 AS concept_id,
- achilles_results.count_value AS rc
- FROM achilles_results
- WHERE (achilles_results.analysis_id = ANY (ARRAY[(401)::bigint, (601)::bigint, (701)::bigint, (801)::bigint, (1801)::bigint, (2101)::bigint]))) ar1
- JOIN ( SELECT ar.data_source_id,
- ar.analysis_id,
- ar.stratum_1 AS concept_id,
- ar.count_value AS drc
- FROM achilles_results ar
- WHERE (ar.analysis_id = ANY (ARRAY[(430)::bigint, (630)::bigint, (730)::bigint, (830)::bigint, (1830)::bigint, (2130)::bigint]))) ar2 ON (((ar1.concept_id = ar2.concept_id) AND (ar1.data_source_id = ar2.data_source_id))))
- JOIN data_source source ON ((ar1.data_source_id = source.id)))
- JOIN concept concept ON ((ar1.concept_id = (concept.concept_id)::text)))
- ORDER BY ((ar2.drc)::integer) DESC;
+ source.acronym,
+ (
+ (
+ (
+ (
+'' :: text
+ ) || ar1.concept_id
+ ) || '' :: text
+ ) AS concept_id,
+ concept.concept_name,
+ concept.domain_id,
+ (ar1.rc):: integer AS rc,
+ (ar2.drc):: integer AS drc
+FROM
+ (
+ (
+ (
+ (
+ SELECT
+ achilles_results.data_source_id,
+ achilles_results.analysis_id,
+ achilles_results.stratum_1 AS concept_id,
+ achilles_results.count_value AS rc
+ FROM
+ achilles_results
+ WHERE
+ (
+ achilles_results.analysis_id = ANY (
+ ARRAY[(401):: bigint,
+ (601):: bigint,
+ (701):: bigint,
+ (801):: bigint,
+ (1801):: bigint,
+ (2101):: bigint]
+ )
+ )
+ ) ar1
+ JOIN (
+ SELECT
+ ar.data_source_id,
+ ar.analysis_id,
+ ar.stratum_1 AS concept_id,
+ ar.count_value AS drc
+ FROM
+ achilles_results ar
+ WHERE
+ (
+ ar.analysis_id = ANY (
+ ARRAY[(430):: bigint,
+ (630):: bigint,
+ (730):: bigint,
+ (830):: bigint,
+ (1830):: bigint,
+ (2130):: bigint]
+ )
+ )
+ ) ar2 ON (
+ (
+ (ar1.concept_id = ar2.concept_id)
+ AND (
+ ar1.data_source_id = ar2.data_source_id
+ )
+ )
+ )
+ )
+ JOIN data_source source ON (
+ (ar1.data_source_id = source.id)
+ )
+ )
+ JOIN concept concept ON (
+ (
+ ar1.concept_id = (concept.concept_id):: text
+ )
+ )
+ )
+ORDER BY ((ar2.drc):: integer) DESC;
```
diff --git a/docs/src/processes.Rmd b/docs/src/processes.Rmd
index b2667c20..adff7ae3 100644
--- a/docs/src/processes.Rmd
+++ b/docs/src/processes.Rmd
@@ -113,10 +113,15 @@ While parsing the uploaded file, some data is extracted to then present on the U
The next table is presented where the previous data is stored on the rows with analysis id 0 and 5000:
+```{r table2, echo=FALSE, message=FALSE, warnings=FALSE, results='asis'}
+tabl <- "
| Analysis Id | Stratum 1 | Stratum 2 | Stratum 3 | Stratum 4 | Stratum 5 |
-| ----------- | --------- | ------------------- | ---------------- | ----------- | ------------------ |
+| -----------:|:--------- | ------------------- | ---------------- | ----------- | ------------------ |
| 0 | | R Package Version | Generation Date | | |
| 5000 | | Source Release Date | CDM Release Date | CDM Version | Vocabulary Version |
+"
+cat(tabl)
+```
### Materialized Views {-}
**Target: admin user**
diff --git a/docs/useful-stuff.html b/docs/useful-stuff.html
index 2d4de6b7..7007ac41 100644
--- a/docs/useful-stuff.html
+++ b/docs/useful-stuff.html
@@ -24,7 +24,7 @@
-
+
@@ -327,18 +327,18 @@
5.2 Useful stuff
How to create a shared link to a dropbox directory using its python’s API:
-
pip install dropbox
-
import dropbox
-d = dropbox.Dropbox(API_TOKEN)
-
-# create a shared link for a directory
-from dropbox.sharing import SharedLinkSettings
-sharing_settings = SharedLinkSettings(require_password=True, link_password=DIRECTORY_PASSWORD)
-d.sharing_create_shared_link_with_settings(DIRECTORY_PATH, sharing_settings)
-
-# get all links
-for link in d.sharing_get_shared_links().links:
-print(f"{link.path} -> {link.url}")
+
pip install dropbox
+
import dropbox
+d = dropbox.Dropbox(API_TOKEN)
+
+# create a shared link for a directory
+from dropbox.sharing import SharedLinkSettings
+sharing_settings = SharedLinkSettings(require_password=True, link_password=DIRECTORY_PASSWORD)
+d.sharing_create_shared_link_with_settings(DIRECTORY_PATH, sharing_settings)
+
+# get all links
+for link in d.sharing_get_shared_links().links:
+print(f"{link.path} -> {link.url}")
With this every time you want to edit the dashboard layout you have to either comment the CSS inserted
or remove it so the “Edit Dashboard” button can show again.