diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..21ec98f9 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,5 @@ +frontend/node_modules +env +*.egg-info +pkpdapp/db.sqlite3 +.docker diff --git a/.env b/.env new file mode 100644 index 00000000..cd2816a9 --- /dev/null +++ b/.env @@ -0,0 +1 @@ +DEBUG=1 \ No newline at end of file diff --git a/.env.prod b/.env.prod new file mode 100644 index 00000000..af3d7a12 --- /dev/null +++ b/.env.prod @@ -0,0 +1,26 @@ +PORT=8020 +DEBUG=1 +HOST_NAME=monkshood +SECRET_KEY=aLargeRandomSecretKey +DJANGO_SUPERUSER_USERNAME=admin +DJANGO_SUPERUSER_PASSWORD=sekret1 +DJANGO_SUPERUSER_EMAIL=admin@example.com +EMAIL_HOST=in-v3.mailjet.com +EMAIL_PORT=25 +EMAIL_HOST_USER=email_username +EMAIL_HOST_PASSWORD=email_password +DEFAULT_FROM_EMAIL=sender@mydomain.com +POSTGRES_PASSWORD=sekret2 +DATABASE_URL=postgres://postgres:sekret2@postgres:5432/postgres + +RABBITMQ_DEFAULT_USER=guest +RABBITMQ_DEFAULT_PASS=guest + +AUTH_LDAP_USE=0 +AUTH_LDAP_SERVER_URI=ldap://ldap.forumsys.com:389 +AUTH_LDAP_DIRECT_BIND=1 +AUTH_LDAP_BIND_DN_TEMPLATE=uid=%(user)s,dc=example,dc=com +AUTH_LDAP_BIND_DN=cn=read-only-admin,dc=example,dc=com +AUTH_LDAP_BIND_PASSWORD=password +AUTH_LDAP_SEARCH_BASE=ou=mathematicians,dc=example,dc=com +AUTH_LDAP_SEARCH_FILTER=(uid=%(user)s) diff --git a/.flake8 b/.flake8 new file mode 100644 index 00000000..0f023914 --- /dev/null +++ b/.flake8 @@ -0,0 +1,14 @@ +[flake8] +max-line-length = 88 +extend-ignore = E203, W503 + +exclude= + .git, + venv + +ignore= + # Accept ambiguously named variables + E741 + + # Accept line breaks after binary operators + W504 diff --git a/.github/workflows/copyright-test.yml b/.github/workflows/copyright-test.yml new file mode 100644 index 00000000..e324bada --- /dev/null +++ b/.github/workflows/copyright-test.yml @@ -0,0 +1,38 @@ +name: Copyright + +on: + push: + branches: + - master + pull_request: + branches: + - '**' + +jobs: + + build-and-test: + name: copyright + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v1 + + - name: Set up Python 3.10 + uses: actions/setup-python@v1 + with: + python-version: '3.10' + architecture: x64 + + - name: install ldap + run: | + sudo apt-get update + sudo apt-get install libsasl2-dev python3-dev libldap2-dev libssl-dev + + - name: install pkpdapp + run: | + python --version + python -m pip install --upgrade pip setuptools wheel + python -m pip install -r requirements.txt + - name: run copyright test + run: | + python run-tests.py --copyright diff --git a/.github/workflows/coverage-test.yml b/.github/workflows/coverage-test.yml new file mode 100644 index 00000000..005675f4 --- /dev/null +++ b/.github/workflows/coverage-test.yml @@ -0,0 +1,45 @@ +name: Coverage + +on: + push: + branches: + - master + pull_request: + branches: + - '**' + +jobs: + + build-and-test: + name: coverage + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v1 + + - name: Set up Python 3.10 + uses: actions/setup-python@v1 + with: + python-version: '3.10' + architecture: x64 + + - name: install sundials + run: | + sudo apt-get update + sudo apt-get install libsundials-dev libsasl2-dev python3-dev libldap2-dev libssl-dev + + - name: install pkpdapp + run: | + python --version + python -m pip install --upgrade pip setuptools wheel + python -m pip install -r requirements.txt + python -m pip install coverage codecov + - name: run coverage + run: | + cd pkpdapp && coverage run manage.py test + - name: codecov + if: success() + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + run: | + cd pkpdapp && codecov diff --git a/.github/workflows/style-test.yml b/.github/workflows/style-test.yml new file mode 100644 index 00000000..3ada3666 --- /dev/null +++ b/.github/workflows/style-test.yml @@ -0,0 +1,40 @@ + +name: Style tests (flake8) + +on: + push: + branches: + - master + pull_request: + branches: + - '**' + +jobs: + + build-and-test: + name: style test + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v1 + + - name: Set up Python 3.10 + uses: actions/setup-python@v1 + with: + python-version: '3.10' + architecture: x64 + + - name: install ldap + run: | + sudo apt-get update + sudo apt-get install libsasl2-dev python3-dev libldap2-dev libssl-dev + + - name: install pkpd + run: | + python --version + python -m pip install --upgrade pip setuptools wheel + python -m pip install -r requirements.txt + python -m pip install flake8 + - name: run style tests + run: | + python -m flake8 diff --git a/.github/workflows/unit-test-os-versions.yml b/.github/workflows/unit-test-os-versions.yml new file mode 100644 index 00000000..012da12c --- /dev/null +++ b/.github/workflows/unit-test-os-versions.yml @@ -0,0 +1,48 @@ +name: Unit tests (OS versions) + +on: + push: + branches: + - master + pull_request: + branches: + - '**' + +jobs: + + build-and-test: + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest] + + steps: + - uses: actions/checkout@v1 + + - name: Set up Python 3.10 + uses: actions/setup-python@v1 + with: + python-version: '3.10' + architecture: x64 + + - name: install sundials (ubuntu) + if: ${{ matrix.os == 'ubuntu-latest' }} + run: | + sudo apt-get update + sudo apt-get install libsundials-dev libsasl2-dev python3-dev libldap2-dev libssl-dev + + - name: install sundials (macos) + if: ${{ matrix.os == 'macos-latest' }} + run: | + brew update-reset + brew install sundials + + - name: install pkpdapp + run: | + python --version + python -m pip install --upgrade pip setuptools wheel + python -m pip install -r requirements.txt + + - name: run unit tests + run: | + cd pkpdapp && python manage.py test diff --git a/.github/workflows/unit-test-python-versions.yml b/.github/workflows/unit-test-python-versions.yml new file mode 100644 index 00000000..40bb43e0 --- /dev/null +++ b/.github/workflows/unit-test-python-versions.yml @@ -0,0 +1,41 @@ +name: Unit tests (python versions) + +on: + push: + branches: + - master + pull_request: + branches: + - '**' + +jobs: + + build-and-test: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.10'] + + steps: + - uses: actions/checkout@v1 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + architecture: x64 + + - name: install sundials + run: | + sudo apt-get update + sudo apt-get install libsundials-dev libsasl2-dev python3-dev libldap2-dev libssl-dev + + - name: install pkpdapp + run: | + python --version + python -m pip install --upgrade pip setuptools wheel + python -m pip install -r requirements.txt + + - name: run unit tests + run: | + cd pkpdapp && python manage.py test diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..1bedc646 --- /dev/null +++ b/.gitignore @@ -0,0 +1,26 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ + +# Virtual environments +venv +env* + +# Setuptools files +*.egg-info + +# vscode +.vscode + +# database file (not sure yet what its good for) +db.sqlite3 + +# DS +.DS_Store + +# static files +pkpdapp/static + +# nodemodules +frontend/node_modules + +logfile.log* diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 00000000..5e0c5450 --- /dev/null +++ b/.pylintrc @@ -0,0 +1,2 @@ +[MASTER] +load-plugins=pylint_django diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..884f01e7 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,20 @@ +# Contributing to the PKPDApp + +The PKPDApp is built with the Django framework and as such adheres largely to Django's philosophy - one project contains multiple smaller apps that each perform exactly one task and are as self-contained as possible. As a result, the PKPDApp is really a collection of smaller apps that take care of model building, result illustration and so on. + +## Repository Structure + +To meet the modular structure of Django apps, the repository is organised in 3 layers + +1) `\pkpdapp`: The top level folder that contains administrative files, such as `setup.py`, `README.md` or this file `CONTRIBUTING.md`. +2) `\pkpdapp\pkpdapp`: The Django project folder that contains the various smaller apps and the executible `manage.py`. +3) `\pkpdapp\pkdpapp\pkpdapp`: The website application that defines the structure of the PKPDApp. + +## Summary of apps + +Apps in the PKPDApp can be broadly categorised into function and integration apps. Function apps are applications that perform a specific function, for example building a model, simulating the model or providing a plotting interface for simulation results. Those individual apps may be used at multiple occasions in the PKPDApp, i.e. for simulation or inference. The integration apps are applications that patch various functional apps together, and are in essence responsible for a good user experience. + +For an overview of apps and their purpose in the PKPDApp, please check out the list below. We will reference all apps realtive to the Django project root `\pkpdapp\pkdpapp`. + +- `\pkpdapp`: The main app of the website. It defines base templates and the url structure of the PKPDApp. + diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..b32acbe3 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,73 @@ +# Using a 2-stage build. This is the builder for javascript frontend + +FROM node:19 as build +RUN mkdir -p /app/frontend +WORKDIR /app/frontend +COPY frontend-v2/package.json /app/frontend + +RUN npm install + +COPY frontend-v2 /app/frontend/ +RUN npm run build + +FROM python:3.10 + +# install libsundials-dev +RUN apt-get update && apt-get upgrade -y +RUN apt-get install -y libsundials-dev memcached + +# install nginx +RUN apt-get install nginx vim -y --no-install-recommends +RUN ln -sf /dev/stdout /var/log/nginx/access.log \ + && ln -sf /dev/stderr /var/log/nginx/error.log +RUN chown www-data:www-data /etc/nginx/sites-available/default + +# install envsubst and git +RUN apt-get install -y gettext-base + +# clean up apt +RUN apt-get clean +RUN apt-get autoclean +RUN apt-get autoremove +RUN rm -rf /var/lib/apt/lists/* + +# install dependencies +COPY ./requirements.txt / +RUN apt-get update && apt-get upgrade -y +RUN apt-get install -y build-essential libsasl2-dev python3-dev libldap2-dev libssl-dev + +RUN pip install -r requirements.txt + +# install server code +WORKDIR /app +COPY ./pkpdapp . + +RUN python manage.py collectstatic --noinput +RUN python manage.py migrate --noinput + +# copy the built frontend (needs to be after we install nginx) +COPY --from=build /app/frontend/build /usr/share/nginx/html + +# we're running as the www-data user, so make the files owned by this user +RUN chown -R www-data:www-data . + +# make /var/www/.config dir and make it writable (myokit writes to it) +RUN mkdir -p /var/www/.config +RUN chown -R www-data:www-data /var/www + +# gunicorn and nginx needs to write to a few places +RUN chown -R www-data:www-data /var/lib/nginx /run /tmp + +# server setup files +COPY nginx.default.template . +COPY start-server.sh . +RUN chown -R www-data:www-data nginx.default.template start-server.sh + +# run as www-data +USER www-data + +# start server using the port given by the environment variable $PORT +# nginx config files don't support env variables so have to do it manually +# using envsubst +STOPSIGNAL SIGTERM +CMD /bin/bash -c "envsubst '\$PORT' < ./nginx.default.template > /etc/nginx/sites-available/default" && "./start-server.sh" diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 00000000..603071ac --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2023, PKPDApp +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/README.md b/README.md new file mode 100644 index 00000000..91dbfb32 --- /dev/null +++ b/README.md @@ -0,0 +1,167 @@ + + PKPDApp logo + + +[![Unit tests on multiple python versions](https://github.com/pkpdapp-team/pkpdapp/workflows/Unit%20tests%20(python%20versions)/badge.svg)](https://github.com/pkpdapp-team/pkpdapp/actions) +[![Unit tests on multiple operating systems](https://github.com/pkpdapp-team/pkpdapp/workflows/Unit%20tests%20(OS%20versions)/badge.svg)](https://github.com/pkpdapp-team/pkpdapp/actions) +[![codecov](https://codecov.io/gh/pkpdapp-team/pkpdapp/branch/master/graph/badge.svg)](https://codecov.io/gh/pkpdapp-team/pkpdapp) + +# PKPDApp + +PKPDApp is an open source web-based application to explore, analyse and model the pharmacokinetics and pharmacodynamics of chemical compounds. The app is currently under heavy development, however a preliminary version is being deployed with Heroku and can be found under https://pkpdapp.herokuapp.com/. + +## Installation - development + +If you are interested in developing PKPDApp with us, or just run the app locally, you can clone the repository and follow the installation instructions below. + +### Django backend + +1. Install sundials, python dev libraries and rabbitmq server + - Ubuntu-latest: + ```bash + apt-get install libsundials-dev python3-dev rabbitmq-server + ``` + Note: if you are in WSL then the rabbitmq server will not automatically start, you + can start it manually using `sudo -u rabbitmq rabbitmq-server` + - MacOS-latest: + ```bash + brew install sundials rabbitmq + ``` + Note: to restart rabbitmq after an upgrade: brew services restart rabbitmq + - Windows-latest: + Sundials will be installed automatically by installing the app. + + +5. Set environment variables + + - Edit the `.env` file in the root of the repository and edit the following environment + variables to correspond to your particular setup. The most important + variables to alter are those corresponding to secret keys and passwords, others + you can probably leave as-is. + +6. Install requirements + + - Create a new virtual environment (optional) then install the requirements + +```bash +python3 -m venv env +source env/bin/activate +pip install -r requirements.txt +``` + +5. Create database + +```bash +cd pkpdapp +python manage.py migrate +``` + +5. Run RabbitMQ + +```bash +celery -A pkpdapp worker --loglevel=INFO +``` + +5. Run local server + +```bash +python manage.py runserver +``` + +6. (Optional) Create admin user + +```bash +python manage.py createsuperuser +``` + +### React frontend + +Running the frontend will require a local installation of Node.js. On Ubuntu 20.04 LTS, +for example, you can install using `snap` + +```bash +sudo snap install node --classic +``` + +It is also useful to install the `yarn` package manager + +```bash +npm install --global yarn +``` + +Navigate to the `frontend/` directory and install the Node.js dependencies + +```bash +cd frontend +yarn install +``` + +You can run the frontend using + +```bash +yarn start +``` + +You should be able to see the pkpd web app at [127.0.0.1:3000](127.0.0.1:3000). + + +## Installation - production + +Alternatively you can build a docker image and run the image inside the container with commands below. + +```bash +$ docker-compose build +$ docker-compose up +``` + +You should be able to see the web application at [127.0.0.1](127.0.0.1). + +## Code testing + +We run a range of tests each time a commit is pushed to an open pull request +using Github Actions. Passing these tests is prerequisite for merging a pull +request. Some of these can be run locally as described below: + +- copyright tests: `python run-tests.py --copyright` +- code style: `flake8` +- unit tests: `python manage.py test` runs everything; to run a single test in +a file (say) called `test_models.py` use +`python manage.py test pkpdapp.tests.test_models` +- code coverage tests: can't be done locally + +## Generating OpenAPI spec + +The front-end communicates with the back-end using a REST API. The API can be +described using the OpenAPI specification. To generate the OpenAPI +specification, run the following command: + +```bash +python manage.py spectacular --color --file schema.yml +``` + +## Generating RTX Query API class + +The front-end uses the Redux Toolkit RTX Query tool to automatically generate a +client for the api based on the OpenAPI spec described above. To generate the +client, run the following command in the frontend directory: + +```bash +npx @rtk-query/codegen-openapi openapi-config.json +``` + +# Running the cache + +Install the cache using + +```bash +$ sudo apt install memcached +``` + +Run memcached using + +```bash +$ memcached -p 11211 +``` + +## License +PKPDApp is fully open source. For more information about its license, see [LICENSE.md](LICENSE.md). diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..79ee686e --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,38 @@ +version: "2" +volumes: + db: +services: + postgres: + image: "postgres" + stop_signal: SIGINT # Fast Shutdown mode + command: -p 5432 + ports: + - "5432:5432" + env_file: + - ./.env.prod + restart: unless-stopped + volumes: + - db:/var/lib/postgresql/data + rabbitmq: + image: rabbitmq:3-management-alpine + env_file: + - ./.env.prod + ports: + - 5672:5672 + - 15672:15672 + restart: unless-stopped + app: + image: pkpdapp + depends_on: + - postgres + - rabbitmq + build: + dockerfile: Dockerfile + context: . + ports: + - "80:${PORT}" + restart: unless-stopped + env_file: + - ./.env.prod + + diff --git a/frontend-v2/.gitignore b/frontend-v2/.gitignore new file mode 100644 index 00000000..4d29575d --- /dev/null +++ b/frontend-v2/.gitignore @@ -0,0 +1,23 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +/node_modules +/.pnp +.pnp.js + +# testing +/coverage + +# production +/build + +# misc +.DS_Store +.env.local +.env.development.local +.env.test.local +.env.production.local + +npm-debug.log* +yarn-debug.log* +yarn-error.log* diff --git a/frontend-v2/README.md b/frontend-v2/README.md new file mode 100644 index 00000000..a88dc80c --- /dev/null +++ b/frontend-v2/README.md @@ -0,0 +1,46 @@ +# Getting Started with Create React App + +This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app), using the [Redux](https://redux.js.org/) and [Redux Toolkit](https://redux-toolkit.js.org/) TS template. + +## Available Scripts + +In the project directory, you can run: + +### `yarn start` + +Runs the app in the development mode.\ +Open [http://localhost:3000](http://localhost:3000) to view it in the browser. + +The page will reload if you make edits.\ +You will also see any lint errors in the console. + +### `yarn test` + +Launches the test runner in the interactive watch mode.\ +See the section about [running tests](https://facebook.github.io/create-react-app/docs/running-tests) for more information. + +### `yarn build` + +Builds the app for production to the `build` folder.\ +It correctly bundles React in production mode and optimizes the build for the best performance. + +The build is minified and the filenames include the hashes.\ +Your app is ready to be deployed! + +See the section about [deployment](https://facebook.github.io/create-react-app/docs/deployment) for more information. + +### `yarn eject` + +**Note: this is a one-way operation. Once you `eject`, you can’t go back!** + +If you aren’t satisfied with the build tool and configuration choices, you can `eject` at any time. This command will remove the single build dependency from your project. + +Instead, it will copy all the configuration files and the transitive dependencies (webpack, Babel, ESLint, etc) right into your project so you have full control over them. All of the commands except `eject` will still work, but they will point to the copied scripts so you can tweak them. At this point you’re on your own. + +You don’t have to ever use `eject`. The curated feature set is suitable for small and middle deployments, and you shouldn’t feel obligated to use this feature. However we understand that this tool wouldn’t be useful if you couldn’t customize it when you are ready for it. + +## Learn More + +You can learn more in the [Create React App documentation](https://facebook.github.io/create-react-app/docs/getting-started). + +To learn React, check out the [React documentation](https://reactjs.org/). diff --git a/frontend-v2/cypress.config.ts b/frontend-v2/cypress.config.ts new file mode 100644 index 00000000..067f51cd --- /dev/null +++ b/frontend-v2/cypress.config.ts @@ -0,0 +1,18 @@ +import { defineConfig } from "cypress"; + +export default defineConfig({ + e2e: { + setupNodeEvents(on, config) { + // implement node event listeners here + }, + baseUrl: 'http://localhost:3000', + + }, + + component: { + devServer: { + framework: "next", + bundler: "webpack", + }, + }, +}); diff --git a/frontend-v2/cypress/.gitignore b/frontend-v2/cypress/.gitignore new file mode 100644 index 00000000..4c2cda08 --- /dev/null +++ b/frontend-v2/cypress/.gitignore @@ -0,0 +1,2 @@ +*.png +*.mp4 \ No newline at end of file diff --git a/frontend-v2/cypress/e2e/pkmodel.cy.js b/frontend-v2/cypress/e2e/pkmodel.cy.js new file mode 100644 index 00000000..4b69902f --- /dev/null +++ b/frontend-v2/cypress/e2e/pkmodel.cy.js @@ -0,0 +1,101 @@ +describe('landing page', () => { + beforeEach(() => { + const { username, password } = { username: 'demo', password: '12345'} + cy.login(username, password) + }) + + it('can create combined pk and pd model and simulate from it', () => { + // create a new project + cy.intercept('POST', '/api/project/').as('createProject') + cy.get('[data-cy="create-project"]').click() + cy.get('[data-cy="create-project-option-Small Molecule"]').click() + cy.wait('@createProject').then((interception) => { + const { id } = interception.response.body + + // select the species + cy.get(`[data-cy="project-${id}"]`).find('[data-cy="select-project.species"]').click() + cy.get('[data-cy="select-option-project.species-Monkey"]').click() + + // select the project + cy.get(`[data-cy="project-${id}"]`).find('[type=radio]').click() + }); + + cy.get('[data-cy="select-project.species"]').last().click() + cy.get('[data-cy="select-option-project.species-Monkey"]').last().click() + + + // go to model tab + cy.get('li').contains('Model').click() + + // select one compartment model + cy.get('[data-cy="select-pk_model"]').click() + cy.get('[data-cy="select-option-pk_model-one_compartment_preclinical"]').click() + + // select pd model + cy.get('[data-cy="select-pd_model"]').click() + cy.get('[data-cy="select-option-pd_model-indirect_effects_stimulation_elimination"]').click() + + // go to map variables tab + cy.contains('button', 'Map Variables').click() + + // dose into Aa compartment + cy.get('[data-cy="checkbox-dosing-Aa"]').click() + + // map C1 to pd effect + cy.get('[data-cy="checkbox-map-to-pd-C1"]').click() + + // go to parameters tab + cy.contains('button', 'Parameters').click() + + cy.get('[data-cy="parameter-CL-value"]').find('input').then(($input) => { + const old_value = $input.val() + + // reset to species defaults + cy.contains('button', 'Reset to Species Defaults').click() + cy.wait(1000) + + // check that the value has changed + cy.get('[data-cy="parameter-CL-value"]').find('input').then(($input) => { + expect($input.val()).not.to.eq(old_value) + }) + }); + + // go to trial design tab + cy.get('li').contains('Trial Design').click() + + // set the dose + cy.get('input[name="doses.0.amount"]').clear().type('1') + + // set the number of doses + cy.get('input[name="doses.0.repeats"]').clear().type('4') + + // set the duration + cy.get('input[name="doses.0.duration"]').clear().type('0.1') + + // set the interval + cy.get('input[name="doses.0.repeat_interval"]').clear().type('0.1') + + // go to simulation tab + cy.get('li').contains('Simulations').click() + + // should be no svg with class "main-svg" + cy.get('svg.main-svg').should('not.exist') + + // add a plot of Aa + cy.get('[data-cy="add-plot"]').click() + cy.get('[data-cy^="add-plot-option-Aa"]').click() + + // now there should be an svg with class "main-svg" + cy.get('svg.main-svg').should('exist') + + // should be no CL slider + cy.get('[data-cy="parameter-slider-CL"]').should('not.exist') + + // add a CL parameter slider + cy.get('[data-cy="add-parameter-slider"]').click() + cy.get('[data-cy="add-parameter-slider-option-CL"]').click() + + // CL slider should exist + cy.get('[data-cy="parameter-slider-CL"]').should('exist') + }); +}); diff --git a/frontend-v2/cypress/support/commands.ts b/frontend-v2/cypress/support/commands.ts new file mode 100644 index 00000000..59301d8b --- /dev/null +++ b/frontend-v2/cypress/support/commands.ts @@ -0,0 +1,55 @@ +/// +// *********************************************** +// This example commands.ts shows you how to +// create various custom commands and overwrite +// existing commands. +// +// For more comprehensive examples of custom +// commands please read more here: +// https://on.cypress.io/custom-commands +// *********************************************** +// +// +// -- This is a parent command -- +// Cypress.Commands.add('login', (email, password) => { ... }) +// +// +// -- This is a child command -- +// Cypress.Commands.add('drag', { prevSubject: 'element'}, (subject, options) => { ... }) +// +// +// -- This is a dual command -- +// Cypress.Commands.add('dismiss', { prevSubject: 'optional'}, (subject, options) => { ... }) +// +// +// -- This will overwrite an existing command -- +// Cypress.Commands.overwrite('visit', (originalFn, url, options) => { ... }) +// +// declare global { +// namespace Cypress { +// interface Chainable { +// login(email: string, password: string): Chainable +// drag(subject: string, options?: Partial): Chainable +// dismiss(subject: string, options?: Partial): Chainable +// visit(originalFn: CommandOriginalFn, url: string, options: Partial): Chainable +// } +// } +// } + +Cypress.Commands.add('login', (username, password) => { + cy.visit('/') + + cy.get('input[name=username]').type(username) + + // {enter} causes the form to submit + cy.get('input[name=password]').type(`${password}{enter}`, { log: false }) + + // we should be redirected to / + cy.url().should('not.contain', 'login') + + // our auth cookies should be present + cy.getCookie('csrftoken').should('exist') + cy.getCookie('sessionid').should('exist') +}) + +export {} \ No newline at end of file diff --git a/frontend-v2/cypress/support/e2e.ts b/frontend-v2/cypress/support/e2e.ts new file mode 100644 index 00000000..f80f74f8 --- /dev/null +++ b/frontend-v2/cypress/support/e2e.ts @@ -0,0 +1,20 @@ +// *********************************************************** +// This example support/e2e.ts is processed and +// loaded automatically before your test files. +// +// This is a great place to put global configuration and +// behavior that modifies Cypress. +// +// You can change the location of this file or turn off +// automatically serving support files with the +// 'supportFile' configuration option. +// +// You can read more here: +// https://on.cypress.io/configuration +// *********************************************************** + +// Import commands.js using ES2015 syntax: +import './commands' + +// Alternatively you can use CommonJS syntax: +// require('./commands') \ No newline at end of file diff --git a/frontend-v2/openapi-config.json b/frontend-v2/openapi-config.json new file mode 100644 index 00000000..1b493e73 --- /dev/null +++ b/frontend-v2/openapi-config.json @@ -0,0 +1,8 @@ +{ + "schemaFile": "../pkpdapp/schema.yml", + "apiFile": "./src/app/emptyApi.ts", + "apiImport": "emptySplitApi", + "outputFile": "./src/app/backendApi.ts", + "exportName": "backendApi", + "hooks": true +} \ No newline at end of file diff --git a/frontend-v2/openapi-config.ts b/frontend-v2/openapi-config.ts new file mode 100644 index 00000000..c8c53aa4 --- /dev/null +++ b/frontend-v2/openapi-config.ts @@ -0,0 +1,12 @@ +import type { ConfigFile } from '@rtk-query/codegen-openapi' + +const config: ConfigFile = { + schemaFile: '../backend/schema.yml', + apiFile: './src/store/emptyApi.ts', + apiImport: 'emptySplitApi', + outputFile: './src/store/petApi.ts', + exportName: 'petApi', + hooks: true, +} + +export default config \ No newline at end of file diff --git a/frontend-v2/package.json b/frontend-v2/package.json new file mode 100644 index 00000000..679f117d --- /dev/null +++ b/frontend-v2/package.json @@ -0,0 +1,64 @@ +{ + "name": "frontend-v2", + "version": "0.1.0", + "private": true, + "dependencies": { + "@emotion/react": "^11.10.8", + "@emotion/styled": "^11.10.8", + "@mui/icons-material": "^5.11.16", + "@mui/material": "^5.12.2", + "@reduxjs/toolkit": "^1.8.1", + "@testing-library/jest-dom": "^5.16.4", + "@testing-library/react": "^13.0.1", + "@testing-library/user-event": "^14.1.1", + "@types/jest": "^27.4.1", + "@types/node": "^17.0.25", + "@types/react": "^18.0.6", + "@types/react-dom": "^18.0.2", + "http-proxy-middleware": "^2.0.6", + "papaparse": "^5.4.1", + "plotly.js": "^2.23.2", + "react": "^18.2.0", + "react-dom": "^18.2.0", + "react-hook-form": "^7.43.9", + "react-player": "^2.13.0", + "react-plotly.js": "^2.6.0", + "react-redux": "^8.0.1", + "react-scripts": "5.0.1", + "typescript": "^4.6.0", + "web-vitals": "^2.1.0" + }, + "scripts": { + "start": "react-scripts start", + "build": "react-scripts build", + "test": "react-scripts test", + "eject": "react-scripts eject", + "cypress:open": "cypress open", + "cypress:e2e": "start-server-and-test 'yarn start' http://127.0.0.1:3000 \"cypress run --e2e\"" + }, + "eslintConfig": { + "extends": [ + "react-app", + "react-app/jest" + ] + }, + "browserslist": { + "production": [ + ">0.2%", + "not dead", + "not op_mini all" + ], + "development": [ + "last 1 chrome version", + "last 1 firefox version", + "last 1 safari version" + ] + }, + "devDependencies": { + "@babel/plugin-proposal-private-property-in-object": "^7.21.11", + "@types/papaparse": "^5.3.10", + "@types/react-plotly.js": "^2.6.0", + "cypress": "^12.16.0", + "start-server-and-test": "^2.0.0" + } +} diff --git a/frontend-v2/public/favicon.ico b/frontend-v2/public/favicon.ico new file mode 100644 index 00000000..354202a4 Binary files /dev/null and b/frontend-v2/public/favicon.ico differ diff --git a/frontend-v2/public/index.html b/frontend-v2/public/index.html new file mode 100644 index 00000000..0eb2f946 --- /dev/null +++ b/frontend-v2/public/index.html @@ -0,0 +1,43 @@ + + + + + + + + + + + + + PKPD App + + + +
+ + + diff --git a/frontend-v2/public/logo_pkpdapp.svg b/frontend-v2/public/logo_pkpdapp.svg new file mode 100644 index 00000000..97fe0bbe --- /dev/null +++ b/frontend-v2/public/logo_pkpdapp.svg @@ -0,0 +1,102 @@ + + + + + + + + + + + + + + + diff --git a/frontend-v2/public/manifest.json b/frontend-v2/public/manifest.json new file mode 100644 index 00000000..7ccca6bb --- /dev/null +++ b/frontend-v2/public/manifest.json @@ -0,0 +1,14 @@ +{ + "short_name": "React App", + "name": "Create React App Sample", + "icons": [ + { + "src": "logo_pkpdapp.svg", + "type": "image/svg+xml" + } + ], + "start_url": ".", + "display": "standalone", + "theme_color": "#000000", + "background_color": "#ffffff" +} diff --git a/frontend-v2/public/robots.txt b/frontend-v2/public/robots.txt new file mode 100644 index 00000000..e9e57dc4 --- /dev/null +++ b/frontend-v2/public/robots.txt @@ -0,0 +1,3 @@ +# https://www.robotstxt.org/robotstxt.html +User-agent: * +Disallow: diff --git a/frontend-v2/src/App.css b/frontend-v2/src/App.css new file mode 100644 index 00000000..01cc5867 --- /dev/null +++ b/frontend-v2/src/App.css @@ -0,0 +1,39 @@ +.App { + text-align: center; +} + +.App-logo { + height: 40vmin; + pointer-events: none; +} + +@media (prefers-reduced-motion: no-preference) { + .App-logo { + animation: App-logo-float infinite 3s ease-in-out; + } +} + +.App-header { + min-height: 100vh; + display: flex; + flex-direction: column; + align-items: center; + justify-content: center; + font-size: calc(10px + 2vmin); +} + +.App-link { + color: rgb(112, 76, 182); +} + +@keyframes App-logo-float { + 0% { + transform: translateY(0); + } + 50% { + transform: translateY(10px); + } + 100% { + transform: translateY(0px); + } +} diff --git a/frontend-v2/src/App.test.tsx b/frontend-v2/src/App.test.tsx new file mode 100644 index 00000000..659cc13d --- /dev/null +++ b/frontend-v2/src/App.test.tsx @@ -0,0 +1,15 @@ +import React from 'react'; +import { render } from '@testing-library/react'; +import { Provider } from 'react-redux'; +import { store } from './app/store'; +import App from './App'; + +test('renders learn react link', () => { + const { getByText } = render( + + + + ); + + expect(getByText(/learn/i)).toBeInTheDocument(); +}); diff --git a/frontend-v2/src/App.tsx b/frontend-v2/src/App.tsx new file mode 100644 index 00000000..926152ee --- /dev/null +++ b/frontend-v2/src/App.tsx @@ -0,0 +1,36 @@ +import React, { useEffect } from 'react'; +import './App.css'; + +import { fetchSession, isAuthenticated, login } from './features/login/loginSlice'; +import { useSelector } from 'react-redux'; +import Login from './features/login/login'; +import Sidebar from './features/main/Sidebar'; +import { useAppDispatch } from './app/hooks'; +import { RootState } from './app/store'; + +function App() { + const dispatch = useAppDispatch(); + const isAuth = useSelector(isAuthenticated); + const error = useSelector((state: RootState) => state.login.error); + + const onLogin = (username: string, password: string) => { + dispatch(login({ username, password })); + } + + useEffect(() => { + dispatch(fetchSession()); + }, [dispatch]); + + return ( + <> + { isAuth ? ( + + ): ( + + )} + + ); +} + +export default App; + diff --git a/frontend-v2/src/app/api.ts b/frontend-v2/src/app/api.ts new file mode 100644 index 00000000..853052d8 --- /dev/null +++ b/frontend-v2/src/app/api.ts @@ -0,0 +1,222 @@ +import { backendApi } from './backendApi' + +export const api = backendApi.enhanceEndpoints({ + addTagTypes: ['Project', 'Compound', 'Dataset', 'CombinedModel', 'Variable', 'Simulation', 'Protocol', 'Unit'], + endpoints: { + // Projects + projectList: { + providesTags: (result) => + result + ? [ + ...result.map(({ id }) => ({ type: 'Project' as const, id })), + { type: 'Project', id: 'LIST' }, + ] + : [{ type: 'Project', id: 'LIST' }], + }, + projectRetrieve: { + providesTags: (result, error, { id }) => [{ type: 'Project', id }], + }, + projectUpdate: { + invalidatesTags: (result, error, { id }) => [{ type: 'Project', id }], + }, + projectCreate: { + invalidatesTags: [{ type: 'Project', id: 'LIST' }], + }, + projectDestroy: { + invalidatesTags: (result, error, { id }) => [ + { type: 'Project', id }, + { type: 'Project', id: 'LIST' } + ], + }, + // Compounds + compoundList: { + providesTags: (result) => + result + ? [ + ...result.map(({ id }) => ({ type: 'Compound' as const, id })), + { type: 'Compound', id: 'LIST' }, + ] + : [{ type: 'Compound', id: 'LIST' }], + }, + compoundRetrieve: { + providesTags: (result, error, { id }) => [{ type: 'Compound', id }], + }, + compoundUpdate: { + invalidatesTags: (result, error, { id }) => [{ type: 'Compound', id }, { type: 'Unit', id: 'LIST' }], + }, + compoundCreate: { + invalidatesTags: [{ type: 'Compound', id: 'LIST' }], + }, + compoundDestroy: { + invalidatesTags: (result, error, { id }) => [ + { type: 'Compound', id }, + { type: 'Compound', id: 'LIST' } + ], + }, + // Datasets + datasetList: { + providesTags: (result) => + result + ? [ + ...result.map(({ id }) => ({ type: 'Dataset' as const, id })), + { type: 'Dataset', id: 'LIST' }, + ] + : [{ type: 'Dataset', id: 'LIST' }], + }, + datasetRetrieve: { + providesTags: (result, error, { id }) => [{ type: 'Dataset', id }], + }, + datasetUpdate: { + invalidatesTags: (result, error, { id }) => [{ type: 'Dataset', id }], + }, + datasetCreate: { + invalidatesTags: [{ type: 'Dataset', id: 'LIST' }], + }, + datasetDestroy: { + invalidatesTags: (result, error, { id }) => [ + { type: 'Dataset', id }, + { type: 'Dataset', id: 'LIST' } + ], + }, + // CombinedModel + combinedModelSetParamsToDefaultsUpdate: { + invalidatesTags: (result, error, { id }) => [{ type: 'Variable', id: 'LIST' }], + }, + combinedModelList: { + providesTags: (result) => + result + ? [ + ...result.map(({ id }) => ({ type: 'CombinedModel' as const, id })), + { type: 'CombinedModel', id: 'LIST' }, + ] + : [{ type: 'CombinedModel', id: 'LIST' }], + }, + combinedModelRetrieve: { + providesTags: (result, error, { id }) => [{ type: 'CombinedModel', id }], + }, + combinedModelUpdate: { + invalidatesTags: (result, error, { id }) => [{ type: 'CombinedModel', id }, { type: 'Variable', id: 'LIST' }, { type: 'Protocol', id: 'LIST' }], + }, + combinedModelCreate: { + invalidatesTags: [{ type: 'CombinedModel', id: 'LIST' }, { type: 'Variable', id: 'LIST' }], + }, + combinedModelDestroy: { + invalidatesTags: (result, error, { id }) => [ + { type: 'CombinedModel', id }, + { type: 'CombinedModel', id: 'LIST' }, + { type: 'Variable', id: 'LIST' } + ], + }, + variableList: { + providesTags: (result) => + result + ? [ + ...result.map(({ id }) => ({ type: 'Variable' as const, id })), + { type: 'Variable', id: 'LIST' }, + ] + : [{ type: 'Variable', id: 'LIST' }], + }, + variableRetrieve: { + providesTags: (result, error, { id }) => [{ type: 'Variable', id }], + }, + variableUpdate: { + invalidatesTags: (result, error, { id }) => [{ type: 'Variable', id }, { type: 'Protocol', id: 'LIST' }], + }, + variableCreate: { + invalidatesTags: [{ type: 'Variable', id: 'LIST' }], + }, + variableDestroy: { + invalidatesTags: (result, error, { id }) => [ + { type: 'Variable', id }, + { type: 'Variable', id: 'LIST' } + ], + }, + simulationList: { + providesTags: (result) => + result + ? [ + ...result.map(({ id }) => ({ type: 'Simulation' as const, id })), + { type: 'Simulation', id: 'LIST' }, + ] + : [{ type: 'Simulation', id: 'LIST' }], + }, + simulationRetrieve: { + providesTags: (result, error, { id }) => [{ type: 'Simulation', id }], + }, + simulationUpdate: { + invalidatesTags: (result, error, { id }) => [{ type: 'Simulation', id }], + }, + simulationCreate: { + invalidatesTags: [{ type: 'Simulation', id: 'LIST' }], + }, + simulationDestroy: { + invalidatesTags: (result, error, { id }) => [ + { type: 'Simulation', id }, + { type: 'Simulation', id: 'LIST' } + ], + }, + protocolList: { + providesTags: (result) => + result + ? [ + ...result.map(({ id }) => ({ type: 'Protocol' as const, id })), + { type: 'Protocol', id: 'LIST' }, + ] + : [{ type: 'Protocol', id: 'LIST' }], + }, + protocolRetrieve: { + providesTags: (result, error, { id }) => [{ type: 'Protocol', id }], + }, + protocolUpdate: { + invalidatesTags: (result, error, { id }) => [{ type: 'Protocol', id }], + }, + protocolCreate: { + invalidatesTags: [{ type: 'Protocol', id: 'LIST' }], + }, + protocolDestroy: { + invalidatesTags: (result, error, { id }) => [ + { type: 'Protocol', id }, + { type: 'Protocol', id: 'LIST' } + ], + }, + unitList: { + providesTags: (result) => + result + ? [ + ...result.map(({ id }) => ({ type: 'Unit' as const, id })), + { type: 'Unit', id: 'LIST' }, + ] + : [{ type: 'Unit', id: 'LIST' }], + }, + unitRetrieve: { + providesTags: (result, error, { id }) => [{ type: 'Unit', id }], + }, + unitUpdate: { + invalidatesTags: (result, error, { id }) => [{ type: 'Unit', id }], + }, + unitCreate: { + invalidatesTags: [{ type: 'Unit', id: 'LIST' }], + }, + unitDestroy: { + invalidatesTags: (result, error, { id }) => [ + { type: 'Unit', id }, + { type: 'Unit', id: 'LIST' } + ], + }, + + }, + //addTagTypes: ['User'], + //endpoints: { + // getUserByUserId: { + // providesTags: ['User'], + // }, + // patchUserByUserId: { + // invalidatesTags: ['User'], + // }, + // // alternatively, define a function which is called with the endpoint definition as an argument + // getUsers(endpoint) { + // endpoint.providesTags = ['User'] + // endpoint.keepUnusedDataFor = 120 + // }, + //}, +}); diff --git a/frontend-v2/src/app/backendApi.ts b/frontend-v2/src/app/backendApi.ts new file mode 100644 index 00000000..d3f07c32 --- /dev/null +++ b/frontend-v2/src/app/backendApi.ts @@ -0,0 +1,2836 @@ +import { emptySplitApi as api } from "./emptyApi"; +const injectedRtkApi = api.injectEndpoints({ + endpoints: (build) => ({ + algorithmList: build.query({ + query: () => ({ url: `/api/algorithm/` }), + }), + algorithmCreate: build.mutation< + AlgorithmCreateApiResponse, + AlgorithmCreateApiArg + >({ + query: (queryArg) => ({ + url: `/api/algorithm/`, + method: "POST", + body: queryArg.algorithm, + }), + }), + algorithmRetrieve: build.query< + AlgorithmRetrieveApiResponse, + AlgorithmRetrieveApiArg + >({ + query: (queryArg) => ({ url: `/api/algorithm/${queryArg.id}/` }), + }), + algorithmUpdate: build.mutation< + AlgorithmUpdateApiResponse, + AlgorithmUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/algorithm/${queryArg.id}/`, + method: "PUT", + body: queryArg.algorithm, + }), + }), + algorithmPartialUpdate: build.mutation< + AlgorithmPartialUpdateApiResponse, + AlgorithmPartialUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/algorithm/${queryArg.id}/`, + method: "PATCH", + body: queryArg.patchedAlgorithm, + }), + }), + algorithmDestroy: build.mutation< + AlgorithmDestroyApiResponse, + AlgorithmDestroyApiArg + >({ + query: (queryArg) => ({ + url: `/api/algorithm/${queryArg.id}/`, + method: "DELETE", + }), + }), + auceCreate: build.mutation({ + query: () => ({ url: `/api/auce/`, method: "POST" }), + }), + biomarkerTypeList: build.query< + BiomarkerTypeListApiResponse, + BiomarkerTypeListApiArg + >({ + query: () => ({ url: `/api/biomarker_type/` }), + }), + biomarkerTypeCreate: build.mutation< + BiomarkerTypeCreateApiResponse, + BiomarkerTypeCreateApiArg + >({ + query: (queryArg) => ({ + url: `/api/biomarker_type/`, + method: "POST", + body: queryArg.biomarkerType, + }), + }), + biomarkerTypeRetrieve: build.query< + BiomarkerTypeRetrieveApiResponse, + BiomarkerTypeRetrieveApiArg + >({ + query: (queryArg) => ({ url: `/api/biomarker_type/${queryArg.id}/` }), + }), + biomarkerTypeUpdate: build.mutation< + BiomarkerTypeUpdateApiResponse, + BiomarkerTypeUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/biomarker_type/${queryArg.id}/`, + method: "PUT", + body: queryArg.biomarkerType, + }), + }), + biomarkerTypePartialUpdate: build.mutation< + BiomarkerTypePartialUpdateApiResponse, + BiomarkerTypePartialUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/biomarker_type/${queryArg.id}/`, + method: "PATCH", + body: queryArg.patchedBiomarkerType, + }), + }), + biomarkerTypeDestroy: build.mutation< + BiomarkerTypeDestroyApiResponse, + BiomarkerTypeDestroyApiArg + >({ + query: (queryArg) => ({ + url: `/api/biomarker_type/${queryArg.id}/`, + method: "DELETE", + }), + }), + combinedModelList: build.query< + CombinedModelListApiResponse, + CombinedModelListApiArg + >({ + query: (queryArg) => ({ + url: `/api/combined_model/`, + params: { project_id: queryArg.projectId }, + }), + }), + combinedModelCreate: build.mutation< + CombinedModelCreateApiResponse, + CombinedModelCreateApiArg + >({ + query: (queryArg) => ({ + url: `/api/combined_model/`, + method: "POST", + body: queryArg.combinedModel, + }), + }), + combinedModelRetrieve: build.query< + CombinedModelRetrieveApiResponse, + CombinedModelRetrieveApiArg + >({ + query: (queryArg) => ({ url: `/api/combined_model/${queryArg.id}/` }), + }), + combinedModelUpdate: build.mutation< + CombinedModelUpdateApiResponse, + CombinedModelUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/combined_model/${queryArg.id}/`, + method: "PUT", + body: queryArg.combinedModel, + }), + }), + combinedModelPartialUpdate: build.mutation< + CombinedModelPartialUpdateApiResponse, + CombinedModelPartialUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/combined_model/${queryArg.id}/`, + method: "PATCH", + body: queryArg.patchedCombinedModel, + }), + }), + combinedModelDestroy: build.mutation< + CombinedModelDestroyApiResponse, + CombinedModelDestroyApiArg + >({ + query: (queryArg) => ({ + url: `/api/combined_model/${queryArg.id}/`, + method: "DELETE", + }), + }), + combinedModelSetParamsToDefaultsUpdate: build.mutation< + CombinedModelSetParamsToDefaultsUpdateApiResponse, + CombinedModelSetParamsToDefaultsUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/combined_model/${queryArg.id}/set_params_to_defaults/`, + method: "PUT", + body: queryArg.combinedModel, + }), + }), + combinedModelSetVariablesFromInferenceUpdate: build.mutation< + CombinedModelSetVariablesFromInferenceUpdateApiResponse, + CombinedModelSetVariablesFromInferenceUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/combined_model/${queryArg.id}/set_variables_from_inference/`, + method: "PUT", + body: queryArg.combinedModel, + }), + }), + combinedModelSimulateCreate: build.mutation< + CombinedModelSimulateCreateApiResponse, + CombinedModelSimulateCreateApiArg + >({ + query: (queryArg) => ({ + url: `/api/combined_model/${queryArg.id}/simulate`, + method: "POST", + body: queryArg.simulate, + }), + }), + compoundList: build.query({ + query: () => ({ url: `/api/compound/` }), + }), + compoundCreate: build.mutation< + CompoundCreateApiResponse, + CompoundCreateApiArg + >({ + query: (queryArg) => ({ + url: `/api/compound/`, + method: "POST", + body: queryArg.compound, + }), + }), + compoundRetrieve: build.query< + CompoundRetrieveApiResponse, + CompoundRetrieveApiArg + >({ + query: (queryArg) => ({ url: `/api/compound/${queryArg.id}/` }), + }), + compoundUpdate: build.mutation< + CompoundUpdateApiResponse, + CompoundUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/compound/${queryArg.id}/`, + method: "PUT", + body: queryArg.compound, + }), + }), + compoundPartialUpdate: build.mutation< + CompoundPartialUpdateApiResponse, + CompoundPartialUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/compound/${queryArg.id}/`, + method: "PATCH", + body: queryArg.patchedCompound, + }), + }), + compoundDestroy: build.mutation< + CompoundDestroyApiResponse, + CompoundDestroyApiArg + >({ + query: (queryArg) => ({ + url: `/api/compound/${queryArg.id}/`, + method: "DELETE", + }), + }), + datasetList: build.query({ + query: () => ({ url: `/api/dataset/` }), + }), + datasetCreate: build.mutation< + DatasetCreateApiResponse, + DatasetCreateApiArg + >({ + query: (queryArg) => ({ + url: `/api/dataset/`, + method: "POST", + body: queryArg.dataset, + }), + }), + datasetRetrieve: build.query< + DatasetRetrieveApiResponse, + DatasetRetrieveApiArg + >({ + query: (queryArg) => ({ url: `/api/dataset/${queryArg.id}/` }), + }), + datasetUpdate: build.mutation< + DatasetUpdateApiResponse, + DatasetUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/dataset/${queryArg.id}/`, + method: "PUT", + body: queryArg.dataset, + }), + }), + datasetPartialUpdate: build.mutation< + DatasetPartialUpdateApiResponse, + DatasetPartialUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/dataset/${queryArg.id}/`, + method: "PATCH", + body: queryArg.patchedDataset, + }), + }), + datasetDestroy: build.mutation< + DatasetDestroyApiResponse, + DatasetDestroyApiArg + >({ + query: (queryArg) => ({ + url: `/api/dataset/${queryArg.id}/`, + method: "DELETE", + }), + }), + datasetCsvUpdate: build.mutation< + DatasetCsvUpdateApiResponse, + DatasetCsvUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/dataset/${queryArg.id}/csv/`, + method: "PUT", + body: queryArg.datasetCsv, + }), + }), + doseList: build.query({ + query: () => ({ url: `/api/dose/` }), + }), + doseCreate: build.mutation({ + query: (queryArg) => ({ + url: `/api/dose/`, + method: "POST", + body: queryArg.dose, + }), + }), + doseRetrieve: build.query({ + query: (queryArg) => ({ url: `/api/dose/${queryArg.id}/` }), + }), + doseUpdate: build.mutation({ + query: (queryArg) => ({ + url: `/api/dose/${queryArg.id}/`, + method: "PUT", + body: queryArg.dose, + }), + }), + dosePartialUpdate: build.mutation< + DosePartialUpdateApiResponse, + DosePartialUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/dose/${queryArg.id}/`, + method: "PATCH", + body: queryArg.patchedDose, + }), + }), + doseDestroy: build.mutation({ + query: (queryArg) => ({ + url: `/api/dose/${queryArg.id}/`, + method: "DELETE", + }), + }), + inferenceList: build.query({ + query: () => ({ url: `/api/inference/` }), + }), + inferenceCreate: build.mutation< + InferenceCreateApiResponse, + InferenceCreateApiArg + >({ + query: (queryArg) => ({ + url: `/api/inference/`, + method: "POST", + body: queryArg.inference, + }), + }), + inferenceRetrieve: build.query< + InferenceRetrieveApiResponse, + InferenceRetrieveApiArg + >({ + query: (queryArg) => ({ url: `/api/inference/${queryArg.id}/` }), + }), + inferenceUpdate: build.mutation< + InferenceUpdateApiResponse, + InferenceUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/inference/${queryArg.id}/`, + method: "PUT", + body: queryArg.inference, + }), + }), + inferencePartialUpdate: build.mutation< + InferencePartialUpdateApiResponse, + InferencePartialUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/inference/${queryArg.id}/`, + method: "PATCH", + body: queryArg.patchedInference, + }), + }), + inferenceDestroy: build.mutation< + InferenceDestroyApiResponse, + InferenceDestroyApiArg + >({ + query: (queryArg) => ({ + url: `/api/inference/${queryArg.id}/`, + method: "DELETE", + }), + }), + inferenceStopCreate: build.mutation< + InferenceStopCreateApiResponse, + InferenceStopCreateApiArg + >({ + query: (queryArg) => ({ + url: `/api/inference/${queryArg.id}/stop`, + method: "POST", + }), + }), + inferenceWizardCreate: build.mutation< + InferenceWizardCreateApiResponse, + InferenceWizardCreateApiArg + >({ + query: () => ({ url: `/api/inference/wizard`, method: "POST" }), + }), + inferenceChainList: build.query< + InferenceChainListApiResponse, + InferenceChainListApiArg + >({ + query: () => ({ url: `/api/inference_chain/` }), + }), + inferenceChainCreate: build.mutation< + InferenceChainCreateApiResponse, + InferenceChainCreateApiArg + >({ + query: (queryArg) => ({ + url: `/api/inference_chain/`, + method: "POST", + body: queryArg.inferenceChain, + }), + }), + inferenceChainRetrieve: build.query< + InferenceChainRetrieveApiResponse, + InferenceChainRetrieveApiArg + >({ + query: (queryArg) => ({ url: `/api/inference_chain/${queryArg.id}/` }), + }), + inferenceChainUpdate: build.mutation< + InferenceChainUpdateApiResponse, + InferenceChainUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/inference_chain/${queryArg.id}/`, + method: "PUT", + body: queryArg.inferenceChain, + }), + }), + inferenceChainPartialUpdate: build.mutation< + InferenceChainPartialUpdateApiResponse, + InferenceChainPartialUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/inference_chain/${queryArg.id}/`, + method: "PATCH", + body: queryArg.patchedInferenceChain, + }), + }), + inferenceChainDestroy: build.mutation< + InferenceChainDestroyApiResponse, + InferenceChainDestroyApiArg + >({ + query: (queryArg) => ({ + url: `/api/inference_chain/${queryArg.id}/`, + method: "DELETE", + }), + }), + ncaCreate: build.mutation({ + query: () => ({ url: `/api/nca/`, method: "POST" }), + }), + pharmacodynamicList: build.query< + PharmacodynamicListApiResponse, + PharmacodynamicListApiArg + >({ + query: () => ({ url: `/api/pharmacodynamic/` }), + }), + pharmacodynamicCreate: build.mutation< + PharmacodynamicCreateApiResponse, + PharmacodynamicCreateApiArg + >({ + query: (queryArg) => ({ + url: `/api/pharmacodynamic/`, + method: "POST", + body: queryArg.pharmacodynamic, + }), + }), + pharmacodynamicRetrieve: build.query< + PharmacodynamicRetrieveApiResponse, + PharmacodynamicRetrieveApiArg + >({ + query: (queryArg) => ({ url: `/api/pharmacodynamic/${queryArg.id}/` }), + }), + pharmacodynamicUpdate: build.mutation< + PharmacodynamicUpdateApiResponse, + PharmacodynamicUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/pharmacodynamic/${queryArg.id}/`, + method: "PUT", + body: queryArg.pharmacodynamic, + }), + }), + pharmacodynamicPartialUpdate: build.mutation< + PharmacodynamicPartialUpdateApiResponse, + PharmacodynamicPartialUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/pharmacodynamic/${queryArg.id}/`, + method: "PATCH", + body: queryArg.patchedPharmacodynamic, + }), + }), + pharmacodynamicDestroy: build.mutation< + PharmacodynamicDestroyApiResponse, + PharmacodynamicDestroyApiArg + >({ + query: (queryArg) => ({ + url: `/api/pharmacodynamic/${queryArg.id}/`, + method: "DELETE", + }), + }), + pharmacodynamicMmtUpdate: build.mutation< + PharmacodynamicMmtUpdateApiResponse, + PharmacodynamicMmtUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/pharmacodynamic/${queryArg.id}/mmt/`, + method: "PUT", + body: queryArg.pharmacodynamic, + }), + }), + pharmacodynamicSbmlUpdate: build.mutation< + PharmacodynamicSbmlUpdateApiResponse, + PharmacodynamicSbmlUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/pharmacodynamic/${queryArg.id}/sbml/`, + method: "PUT", + body: queryArg.pharmacodynamicSbml, + }), + }), + pharmacodynamicSetVariablesFromInferenceUpdate: build.mutation< + PharmacodynamicSetVariablesFromInferenceUpdateApiResponse, + PharmacodynamicSetVariablesFromInferenceUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/pharmacodynamic/${queryArg.id}/set_variables_from_inference/`, + method: "PUT", + body: queryArg.pharmacodynamic, + }), + }), + pharmacodynamicSimulateCreate: build.mutation< + PharmacodynamicSimulateCreateApiResponse, + PharmacodynamicSimulateCreateApiArg + >({ + query: (queryArg) => ({ + url: `/api/pharmacodynamic/${queryArg.id}/simulate`, + method: "POST", + body: queryArg.simulate, + }), + }), + pharmacokineticList: build.query< + PharmacokineticListApiResponse, + PharmacokineticListApiArg + >({ + query: () => ({ url: `/api/pharmacokinetic/` }), + }), + pharmacokineticCreate: build.mutation< + PharmacokineticCreateApiResponse, + PharmacokineticCreateApiArg + >({ + query: (queryArg) => ({ + url: `/api/pharmacokinetic/`, + method: "POST", + body: queryArg.pharmacokinetic, + }), + }), + pharmacokineticRetrieve: build.query< + PharmacokineticRetrieveApiResponse, + PharmacokineticRetrieveApiArg + >({ + query: (queryArg) => ({ url: `/api/pharmacokinetic/${queryArg.id}/` }), + }), + pharmacokineticUpdate: build.mutation< + PharmacokineticUpdateApiResponse, + PharmacokineticUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/pharmacokinetic/${queryArg.id}/`, + method: "PUT", + body: queryArg.pharmacokinetic, + }), + }), + pharmacokineticPartialUpdate: build.mutation< + PharmacokineticPartialUpdateApiResponse, + PharmacokineticPartialUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/pharmacokinetic/${queryArg.id}/`, + method: "PATCH", + body: queryArg.patchedPharmacokinetic, + }), + }), + pharmacokineticDestroy: build.mutation< + PharmacokineticDestroyApiResponse, + PharmacokineticDestroyApiArg + >({ + query: (queryArg) => ({ + url: `/api/pharmacokinetic/${queryArg.id}/`, + method: "DELETE", + }), + }), + projectList: build.query({ + query: () => ({ url: `/api/project/` }), + }), + projectCreate: build.mutation< + ProjectCreateApiResponse, + ProjectCreateApiArg + >({ + query: (queryArg) => ({ + url: `/api/project/`, + method: "POST", + body: queryArg.project, + }), + }), + projectRetrieve: build.query< + ProjectRetrieveApiResponse, + ProjectRetrieveApiArg + >({ + query: (queryArg) => ({ url: `/api/project/${queryArg.id}/` }), + }), + projectUpdate: build.mutation< + ProjectUpdateApiResponse, + ProjectUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/project/${queryArg.id}/`, + method: "PUT", + body: queryArg.project, + }), + }), + projectPartialUpdate: build.mutation< + ProjectPartialUpdateApiResponse, + ProjectPartialUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/project/${queryArg.id}/`, + method: "PATCH", + body: queryArg.patchedProject, + }), + }), + projectDestroy: build.mutation< + ProjectDestroyApiResponse, + ProjectDestroyApiArg + >({ + query: (queryArg) => ({ + url: `/api/project/${queryArg.id}/`, + method: "DELETE", + }), + }), + projectMonolixUpdate: build.mutation< + ProjectMonolixUpdateApiResponse, + ProjectMonolixUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/project/${queryArg.id}/monolix/`, + method: "PUT", + body: queryArg.monolix, + }), + }), + projectAccessList: build.query< + ProjectAccessListApiResponse, + ProjectAccessListApiArg + >({ + query: () => ({ url: `/api/project_access/` }), + }), + projectAccessCreate: build.mutation< + ProjectAccessCreateApiResponse, + ProjectAccessCreateApiArg + >({ + query: (queryArg) => ({ + url: `/api/project_access/`, + method: "POST", + body: queryArg.projectAccess, + }), + }), + projectAccessRetrieve: build.query< + ProjectAccessRetrieveApiResponse, + ProjectAccessRetrieveApiArg + >({ + query: (queryArg) => ({ url: `/api/project_access/${queryArg.id}/` }), + }), + projectAccessUpdate: build.mutation< + ProjectAccessUpdateApiResponse, + ProjectAccessUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/project_access/${queryArg.id}/`, + method: "PUT", + body: queryArg.projectAccess, + }), + }), + projectAccessPartialUpdate: build.mutation< + ProjectAccessPartialUpdateApiResponse, + ProjectAccessPartialUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/project_access/${queryArg.id}/`, + method: "PATCH", + body: queryArg.patchedProjectAccess, + }), + }), + projectAccessDestroy: build.mutation< + ProjectAccessDestroyApiResponse, + ProjectAccessDestroyApiArg + >({ + query: (queryArg) => ({ + url: `/api/project_access/${queryArg.id}/`, + method: "DELETE", + }), + }), + protocolList: build.query({ + query: (queryArg) => ({ + url: `/api/protocol/`, + params: { project_id: queryArg.projectId }, + }), + }), + protocolCreate: build.mutation< + ProtocolCreateApiResponse, + ProtocolCreateApiArg + >({ + query: (queryArg) => ({ + url: `/api/protocol/`, + method: "POST", + body: queryArg.protocol, + }), + }), + protocolRetrieve: build.query< + ProtocolRetrieveApiResponse, + ProtocolRetrieveApiArg + >({ + query: (queryArg) => ({ url: `/api/protocol/${queryArg.id}/` }), + }), + protocolUpdate: build.mutation< + ProtocolUpdateApiResponse, + ProtocolUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/protocol/${queryArg.id}/`, + method: "PUT", + body: queryArg.protocol, + }), + }), + protocolPartialUpdate: build.mutation< + ProtocolPartialUpdateApiResponse, + ProtocolPartialUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/protocol/${queryArg.id}/`, + method: "PATCH", + body: queryArg.patchedProtocol, + }), + }), + protocolDestroy: build.mutation< + ProtocolDestroyApiResponse, + ProtocolDestroyApiArg + >({ + query: (queryArg) => ({ + url: `/api/protocol/${queryArg.id}/`, + method: "DELETE", + }), + }), + sessionRetrieve: build.query< + SessionRetrieveApiResponse, + SessionRetrieveApiArg + >({ + query: () => ({ url: `/api/session/` }), + }), + simulationList: build.query< + SimulationListApiResponse, + SimulationListApiArg + >({ + query: (queryArg) => ({ + url: `/api/simulation/`, + params: { project_id: queryArg.projectId }, + }), + }), + simulationCreate: build.mutation< + SimulationCreateApiResponse, + SimulationCreateApiArg + >({ + query: (queryArg) => ({ + url: `/api/simulation/`, + method: "POST", + body: queryArg.simulation, + }), + }), + simulationRetrieve: build.query< + SimulationRetrieveApiResponse, + SimulationRetrieveApiArg + >({ + query: (queryArg) => ({ url: `/api/simulation/${queryArg.id}/` }), + }), + simulationUpdate: build.mutation< + SimulationUpdateApiResponse, + SimulationUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/simulation/${queryArg.id}/`, + method: "PUT", + body: queryArg.simulation, + }), + }), + simulationPartialUpdate: build.mutation< + SimulationPartialUpdateApiResponse, + SimulationPartialUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/simulation/${queryArg.id}/`, + method: "PATCH", + body: queryArg.patchedSimulation, + }), + }), + simulationDestroy: build.mutation< + SimulationDestroyApiResponse, + SimulationDestroyApiArg + >({ + query: (queryArg) => ({ + url: `/api/simulation/${queryArg.id}/`, + method: "DELETE", + }), + }), + subjectList: build.query({ + query: () => ({ url: `/api/subject/` }), + }), + subjectCreate: build.mutation< + SubjectCreateApiResponse, + SubjectCreateApiArg + >({ + query: (queryArg) => ({ + url: `/api/subject/`, + method: "POST", + body: queryArg.subject, + }), + }), + subjectRetrieve: build.query< + SubjectRetrieveApiResponse, + SubjectRetrieveApiArg + >({ + query: (queryArg) => ({ url: `/api/subject/${queryArg.id}/` }), + }), + subjectUpdate: build.mutation< + SubjectUpdateApiResponse, + SubjectUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/subject/${queryArg.id}/`, + method: "PUT", + body: queryArg.subject, + }), + }), + subjectPartialUpdate: build.mutation< + SubjectPartialUpdateApiResponse, + SubjectPartialUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/subject/${queryArg.id}/`, + method: "PATCH", + body: queryArg.patchedSubject, + }), + }), + subjectDestroy: build.mutation< + SubjectDestroyApiResponse, + SubjectDestroyApiArg + >({ + query: (queryArg) => ({ + url: `/api/subject/${queryArg.id}/`, + method: "DELETE", + }), + }), + unitList: build.query({ + query: (queryArg) => ({ + url: `/api/unit/`, + params: { compound_id: queryArg.compoundId }, + }), + }), + unitCreate: build.mutation({ + query: (queryArg) => ({ + url: `/api/unit/`, + method: "POST", + body: queryArg.unit, + }), + }), + unitRetrieve: build.query({ + query: (queryArg) => ({ + url: `/api/unit/${queryArg.id}/`, + params: { compound_id: queryArg.compoundId }, + }), + }), + unitUpdate: build.mutation({ + query: (queryArg) => ({ + url: `/api/unit/${queryArg.id}/`, + method: "PUT", + body: queryArg.unit, + }), + }), + unitPartialUpdate: build.mutation< + UnitPartialUpdateApiResponse, + UnitPartialUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/unit/${queryArg.id}/`, + method: "PATCH", + body: queryArg.patchedUnit, + }), + }), + unitDestroy: build.mutation({ + query: (queryArg) => ({ + url: `/api/unit/${queryArg.id}/`, + method: "DELETE", + }), + }), + userList: build.query({ + query: () => ({ url: `/api/user/` }), + }), + userCreate: build.mutation({ + query: (queryArg) => ({ + url: `/api/user/`, + method: "POST", + body: queryArg.user, + }), + }), + userRetrieve: build.query({ + query: (queryArg) => ({ url: `/api/user/${queryArg.id}/` }), + }), + userUpdate: build.mutation({ + query: (queryArg) => ({ + url: `/api/user/${queryArg.id}/`, + method: "PUT", + body: queryArg.user, + }), + }), + userPartialUpdate: build.mutation< + UserPartialUpdateApiResponse, + UserPartialUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/user/${queryArg.id}/`, + method: "PATCH", + body: queryArg.patchedUser, + }), + }), + userDestroy: build.mutation({ + query: (queryArg) => ({ + url: `/api/user/${queryArg.id}/`, + method: "DELETE", + }), + }), + variableList: build.query({ + query: (queryArg) => ({ + url: `/api/variable/`, + params: { + dosed_pk_model_id: queryArg.dosedPkModelId, + pd_model_id: queryArg.pdModelId, + project_id: queryArg.projectId, + }, + }), + }), + variableCreate: build.mutation< + VariableCreateApiResponse, + VariableCreateApiArg + >({ + query: (queryArg) => ({ + url: `/api/variable/`, + method: "POST", + body: queryArg.variable, + }), + }), + variableRetrieve: build.query< + VariableRetrieveApiResponse, + VariableRetrieveApiArg + >({ + query: (queryArg) => ({ url: `/api/variable/${queryArg.id}/` }), + }), + variableUpdate: build.mutation< + VariableUpdateApiResponse, + VariableUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/variable/${queryArg.id}/`, + method: "PUT", + body: queryArg.variable, + }), + }), + variablePartialUpdate: build.mutation< + VariablePartialUpdateApiResponse, + VariablePartialUpdateApiArg + >({ + query: (queryArg) => ({ + url: `/api/variable/${queryArg.id}/`, + method: "PATCH", + body: queryArg.patchedVariable, + }), + }), + variableDestroy: build.mutation< + VariableDestroyApiResponse, + VariableDestroyApiArg + >({ + query: (queryArg) => ({ + url: `/api/variable/${queryArg.id}/`, + method: "DELETE", + }), + }), + whoamiRetrieve: build.query< + WhoamiRetrieveApiResponse, + WhoamiRetrieveApiArg + >({ + query: () => ({ url: `/api/whoami/` }), + }), + }), + overrideExisting: false, +}); +export { injectedRtkApi as backendApi }; +export type AlgorithmListApiResponse = /** status 200 */ AlgorithmRead[]; +export type AlgorithmListApiArg = void; +export type AlgorithmCreateApiResponse = /** status 201 */ AlgorithmRead; +export type AlgorithmCreateApiArg = { + algorithm: Algorithm; +}; +export type AlgorithmRetrieveApiResponse = /** status 200 */ AlgorithmRead; +export type AlgorithmRetrieveApiArg = { + /** A unique integer value identifying this algorithm. */ + id: number; +}; +export type AlgorithmUpdateApiResponse = /** status 200 */ AlgorithmRead; +export type AlgorithmUpdateApiArg = { + /** A unique integer value identifying this algorithm. */ + id: number; + algorithm: Algorithm; +}; +export type AlgorithmPartialUpdateApiResponse = + /** status 200 */ AlgorithmRead; +export type AlgorithmPartialUpdateApiArg = { + /** A unique integer value identifying this algorithm. */ + id: number; + patchedAlgorithm: PatchedAlgorithm; +}; +export type AlgorithmDestroyApiResponse = unknown; +export type AlgorithmDestroyApiArg = { + /** A unique integer value identifying this algorithm. */ + id: number; +}; +export type AuceCreateApiResponse = unknown; +export type AuceCreateApiArg = void; +export type BiomarkerTypeListApiResponse = + /** status 200 */ BiomarkerTypeRead[]; +export type BiomarkerTypeListApiArg = void; +export type BiomarkerTypeCreateApiResponse = + /** status 201 */ BiomarkerTypeRead; +export type BiomarkerTypeCreateApiArg = { + biomarkerType: BiomarkerType; +}; +export type BiomarkerTypeRetrieveApiResponse = + /** status 200 */ BiomarkerTypeRead; +export type BiomarkerTypeRetrieveApiArg = { + /** A unique integer value identifying this biomarker type. */ + id: number; +}; +export type BiomarkerTypeUpdateApiResponse = + /** status 200 */ BiomarkerTypeRead; +export type BiomarkerTypeUpdateApiArg = { + /** A unique integer value identifying this biomarker type. */ + id: number; + biomarkerType: BiomarkerType; +}; +export type BiomarkerTypePartialUpdateApiResponse = + /** status 200 */ BiomarkerTypeRead; +export type BiomarkerTypePartialUpdateApiArg = { + /** A unique integer value identifying this biomarker type. */ + id: number; + patchedBiomarkerType: PatchedBiomarkerType; +}; +export type BiomarkerTypeDestroyApiResponse = unknown; +export type BiomarkerTypeDestroyApiArg = { + /** A unique integer value identifying this biomarker type. */ + id: number; +}; +export type CombinedModelListApiResponse = + /** status 200 */ CombinedModelRead[]; +export type CombinedModelListApiArg = { + /** Filter results by project ID */ + projectId?: number; +}; +export type CombinedModelCreateApiResponse = + /** status 201 */ CombinedModelRead; +export type CombinedModelCreateApiArg = { + combinedModel: CombinedModel; +}; +export type CombinedModelRetrieveApiResponse = + /** status 200 */ CombinedModelRead; +export type CombinedModelRetrieveApiArg = { + /** A unique integer value identifying this combined model. */ + id: number; +}; +export type CombinedModelUpdateApiResponse = + /** status 200 */ CombinedModelRead; +export type CombinedModelUpdateApiArg = { + /** A unique integer value identifying this combined model. */ + id: number; + combinedModel: CombinedModel; +}; +export type CombinedModelPartialUpdateApiResponse = + /** status 200 */ CombinedModelRead; +export type CombinedModelPartialUpdateApiArg = { + /** A unique integer value identifying this combined model. */ + id: number; + patchedCombinedModel: PatchedCombinedModel; +}; +export type CombinedModelDestroyApiResponse = unknown; +export type CombinedModelDestroyApiArg = { + /** A unique integer value identifying this combined model. */ + id: number; +}; +export type CombinedModelSetParamsToDefaultsUpdateApiResponse = + /** status 200 */ CombinedModelRead; +export type CombinedModelSetParamsToDefaultsUpdateApiArg = { + /** A unique integer value identifying this combined model. */ + id: number; + combinedModel: CombinedModel; +}; +export type CombinedModelSetVariablesFromInferenceUpdateApiResponse = + /** status 200 */ CombinedModelRead; +export type CombinedModelSetVariablesFromInferenceUpdateApiArg = { + /** A unique integer value identifying this combined model. */ + id: number; + combinedModel: CombinedModel; +}; +export type CombinedModelSimulateCreateApiResponse = + /** status 200 */ SimulateResponse; +export type CombinedModelSimulateCreateApiArg = { + id: number; + simulate: Simulate; +}; +export type CompoundListApiResponse = /** status 200 */ CompoundRead[]; +export type CompoundListApiArg = void; +export type CompoundCreateApiResponse = /** status 201 */ CompoundRead; +export type CompoundCreateApiArg = { + compound: Compound; +}; +export type CompoundRetrieveApiResponse = /** status 200 */ CompoundRead; +export type CompoundRetrieveApiArg = { + /** A unique integer value identifying this compound. */ + id: number; +}; +export type CompoundUpdateApiResponse = /** status 200 */ CompoundRead; +export type CompoundUpdateApiArg = { + /** A unique integer value identifying this compound. */ + id: number; + compound: Compound; +}; +export type CompoundPartialUpdateApiResponse = /** status 200 */ CompoundRead; +export type CompoundPartialUpdateApiArg = { + /** A unique integer value identifying this compound. */ + id: number; + patchedCompound: PatchedCompound; +}; +export type CompoundDestroyApiResponse = unknown; +export type CompoundDestroyApiArg = { + /** A unique integer value identifying this compound. */ + id: number; +}; +export type DatasetListApiResponse = /** status 200 */ DatasetRead[]; +export type DatasetListApiArg = void; +export type DatasetCreateApiResponse = /** status 201 */ DatasetRead; +export type DatasetCreateApiArg = { + dataset: Dataset; +}; +export type DatasetRetrieveApiResponse = /** status 200 */ DatasetRead; +export type DatasetRetrieveApiArg = { + /** A unique integer value identifying this dataset. */ + id: number; +}; +export type DatasetUpdateApiResponse = /** status 200 */ DatasetRead; +export type DatasetUpdateApiArg = { + /** A unique integer value identifying this dataset. */ + id: number; + dataset: Dataset; +}; +export type DatasetPartialUpdateApiResponse = /** status 200 */ DatasetRead; +export type DatasetPartialUpdateApiArg = { + /** A unique integer value identifying this dataset. */ + id: number; + patchedDataset: PatchedDataset; +}; +export type DatasetDestroyApiResponse = unknown; +export type DatasetDestroyApiArg = { + /** A unique integer value identifying this dataset. */ + id: number; +}; +export type DatasetCsvUpdateApiResponse = /** status 200 */ DatasetCsv; +export type DatasetCsvUpdateApiArg = { + /** A unique integer value identifying this dataset. */ + id: number; + datasetCsv: DatasetCsv; +}; +export type DoseListApiResponse = /** status 200 */ DoseRead[]; +export type DoseListApiArg = void; +export type DoseCreateApiResponse = /** status 201 */ DoseRead; +export type DoseCreateApiArg = { + dose: Dose; +}; +export type DoseRetrieveApiResponse = /** status 200 */ DoseRead; +export type DoseRetrieveApiArg = { + /** A unique integer value identifying this dose. */ + id: number; +}; +export type DoseUpdateApiResponse = /** status 200 */ DoseRead; +export type DoseUpdateApiArg = { + /** A unique integer value identifying this dose. */ + id: number; + dose: Dose; +}; +export type DosePartialUpdateApiResponse = /** status 200 */ DoseRead; +export type DosePartialUpdateApiArg = { + /** A unique integer value identifying this dose. */ + id: number; + patchedDose: PatchedDose; +}; +export type DoseDestroyApiResponse = unknown; +export type DoseDestroyApiArg = { + /** A unique integer value identifying this dose. */ + id: number; +}; +export type InferenceListApiResponse = /** status 200 */ InferenceRead[]; +export type InferenceListApiArg = void; +export type InferenceCreateApiResponse = /** status 201 */ InferenceRead; +export type InferenceCreateApiArg = { + inference: Inference; +}; +export type InferenceRetrieveApiResponse = /** status 200 */ InferenceRead; +export type InferenceRetrieveApiArg = { + /** A unique integer value identifying this inference. */ + id: number; +}; +export type InferenceUpdateApiResponse = /** status 200 */ InferenceRead; +export type InferenceUpdateApiArg = { + /** A unique integer value identifying this inference. */ + id: number; + inference: Inference; +}; +export type InferencePartialUpdateApiResponse = + /** status 200 */ InferenceRead; +export type InferencePartialUpdateApiArg = { + /** A unique integer value identifying this inference. */ + id: number; + patchedInference: PatchedInference; +}; +export type InferenceDestroyApiResponse = unknown; +export type InferenceDestroyApiArg = { + /** A unique integer value identifying this inference. */ + id: number; +}; +export type InferenceStopCreateApiResponse = unknown; +export type InferenceStopCreateApiArg = { + id: number; +}; +export type InferenceWizardCreateApiResponse = unknown; +export type InferenceWizardCreateApiArg = void; +export type InferenceChainListApiResponse = + /** status 200 */ InferenceChainRead[]; +export type InferenceChainListApiArg = void; +export type InferenceChainCreateApiResponse = + /** status 201 */ InferenceChainRead; +export type InferenceChainCreateApiArg = { + inferenceChain: InferenceChain; +}; +export type InferenceChainRetrieveApiResponse = + /** status 200 */ InferenceChainRead; +export type InferenceChainRetrieveApiArg = { + /** A unique integer value identifying this inference chain. */ + id: number; +}; +export type InferenceChainUpdateApiResponse = + /** status 200 */ InferenceChainRead; +export type InferenceChainUpdateApiArg = { + /** A unique integer value identifying this inference chain. */ + id: number; + inferenceChain: InferenceChain; +}; +export type InferenceChainPartialUpdateApiResponse = + /** status 200 */ InferenceChainRead; +export type InferenceChainPartialUpdateApiArg = { + /** A unique integer value identifying this inference chain. */ + id: number; + patchedInferenceChain: PatchedInferenceChain; +}; +export type InferenceChainDestroyApiResponse = unknown; +export type InferenceChainDestroyApiArg = { + /** A unique integer value identifying this inference chain. */ + id: number; +}; +export type NcaCreateApiResponse = unknown; +export type NcaCreateApiArg = void; +export type PharmacodynamicListApiResponse = + /** status 200 */ PharmacodynamicRead[]; +export type PharmacodynamicListApiArg = void; +export type PharmacodynamicCreateApiResponse = + /** status 201 */ PharmacodynamicRead; +export type PharmacodynamicCreateApiArg = { + pharmacodynamic: Pharmacodynamic; +}; +export type PharmacodynamicRetrieveApiResponse = + /** status 200 */ PharmacodynamicRead; +export type PharmacodynamicRetrieveApiArg = { + /** A unique integer value identifying this pharmacodynamic model. */ + id: number; +}; +export type PharmacodynamicUpdateApiResponse = + /** status 200 */ PharmacodynamicRead; +export type PharmacodynamicUpdateApiArg = { + /** A unique integer value identifying this pharmacodynamic model. */ + id: number; + pharmacodynamic: Pharmacodynamic; +}; +export type PharmacodynamicPartialUpdateApiResponse = + /** status 200 */ PharmacodynamicRead; +export type PharmacodynamicPartialUpdateApiArg = { + /** A unique integer value identifying this pharmacodynamic model. */ + id: number; + patchedPharmacodynamic: PatchedPharmacodynamic; +}; +export type PharmacodynamicDestroyApiResponse = unknown; +export type PharmacodynamicDestroyApiArg = { + /** A unique integer value identifying this pharmacodynamic model. */ + id: number; +}; +export type PharmacodynamicMmtUpdateApiResponse = + /** status 200 */ PharmacodynamicRead; +export type PharmacodynamicMmtUpdateApiArg = { + /** A unique integer value identifying this pharmacodynamic model. */ + id: number; + pharmacodynamic: Pharmacodynamic; +}; +export type PharmacodynamicSbmlUpdateApiResponse = + /** status 200 */ PharmacodynamicSbml; +export type PharmacodynamicSbmlUpdateApiArg = { + /** A unique integer value identifying this pharmacodynamic model. */ + id: number; + pharmacodynamicSbml: PharmacodynamicSbmlWrite; +}; +export type PharmacodynamicSetVariablesFromInferenceUpdateApiResponse = + /** status 200 */ PharmacodynamicRead; +export type PharmacodynamicSetVariablesFromInferenceUpdateApiArg = { + /** A unique integer value identifying this pharmacodynamic model. */ + id: number; + pharmacodynamic: Pharmacodynamic; +}; +export type PharmacodynamicSimulateCreateApiResponse = + /** status 200 */ SimulateResponse; +export type PharmacodynamicSimulateCreateApiArg = { + id: number; + simulate: Simulate; +}; +export type PharmacokineticListApiResponse = + /** status 200 */ PharmacokineticRead[]; +export type PharmacokineticListApiArg = void; +export type PharmacokineticCreateApiResponse = + /** status 201 */ PharmacokineticRead; +export type PharmacokineticCreateApiArg = { + pharmacokinetic: Pharmacokinetic; +}; +export type PharmacokineticRetrieveApiResponse = + /** status 200 */ PharmacokineticRead; +export type PharmacokineticRetrieveApiArg = { + /** A unique integer value identifying this pharmacokinetic model. */ + id: number; +}; +export type PharmacokineticUpdateApiResponse = + /** status 200 */ PharmacokineticRead; +export type PharmacokineticUpdateApiArg = { + /** A unique integer value identifying this pharmacokinetic model. */ + id: number; + pharmacokinetic: Pharmacokinetic; +}; +export type PharmacokineticPartialUpdateApiResponse = + /** status 200 */ PharmacokineticRead; +export type PharmacokineticPartialUpdateApiArg = { + /** A unique integer value identifying this pharmacokinetic model. */ + id: number; + patchedPharmacokinetic: PatchedPharmacokinetic; +}; +export type PharmacokineticDestroyApiResponse = unknown; +export type PharmacokineticDestroyApiArg = { + /** A unique integer value identifying this pharmacokinetic model. */ + id: number; +}; +export type ProjectListApiResponse = /** status 200 */ ProjectRead[]; +export type ProjectListApiArg = void; +export type ProjectCreateApiResponse = /** status 201 */ ProjectRead; +export type ProjectCreateApiArg = { + project: Project; +}; +export type ProjectRetrieveApiResponse = /** status 200 */ ProjectRead; +export type ProjectRetrieveApiArg = { + /** A unique integer value identifying this project. */ + id: number; +}; +export type ProjectUpdateApiResponse = /** status 200 */ ProjectRead; +export type ProjectUpdateApiArg = { + /** A unique integer value identifying this project. */ + id: number; + project: Project; +}; +export type ProjectPartialUpdateApiResponse = /** status 200 */ ProjectRead; +export type ProjectPartialUpdateApiArg = { + /** A unique integer value identifying this project. */ + id: number; + patchedProject: PatchedProject; +}; +export type ProjectDestroyApiResponse = unknown; +export type ProjectDestroyApiArg = { + /** A unique integer value identifying this project. */ + id: number; +}; +export type ProjectMonolixUpdateApiResponse = /** status 200 */ MonolixRead; +export type ProjectMonolixUpdateApiArg = { + /** A unique integer value identifying this project. */ + id: number; + monolix: MonolixWrite; +}; +export type ProjectAccessListApiResponse = + /** status 200 */ ProjectAccessRead[]; +export type ProjectAccessListApiArg = void; +export type ProjectAccessCreateApiResponse = + /** status 201 */ ProjectAccessRead; +export type ProjectAccessCreateApiArg = { + projectAccess: ProjectAccess; +}; +export type ProjectAccessRetrieveApiResponse = + /** status 200 */ ProjectAccessRead; +export type ProjectAccessRetrieveApiArg = { + /** A unique integer value identifying this project access. */ + id: number; +}; +export type ProjectAccessUpdateApiResponse = + /** status 200 */ ProjectAccessRead; +export type ProjectAccessUpdateApiArg = { + /** A unique integer value identifying this project access. */ + id: number; + projectAccess: ProjectAccess; +}; +export type ProjectAccessPartialUpdateApiResponse = + /** status 200 */ ProjectAccessRead; +export type ProjectAccessPartialUpdateApiArg = { + /** A unique integer value identifying this project access. */ + id: number; + patchedProjectAccess: PatchedProjectAccess; +}; +export type ProjectAccessDestroyApiResponse = unknown; +export type ProjectAccessDestroyApiArg = { + /** A unique integer value identifying this project access. */ + id: number; +}; +export type ProtocolListApiResponse = /** status 200 */ ProtocolRead[]; +export type ProtocolListApiArg = { + /** Filter results by project ID */ + projectId?: number; +}; +export type ProtocolCreateApiResponse = /** status 201 */ ProtocolRead; +export type ProtocolCreateApiArg = { + protocol: Protocol; +}; +export type ProtocolRetrieveApiResponse = /** status 200 */ ProtocolRead; +export type ProtocolRetrieveApiArg = { + /** A unique integer value identifying this protocol. */ + id: number; +}; +export type ProtocolUpdateApiResponse = /** status 200 */ ProtocolRead; +export type ProtocolUpdateApiArg = { + /** A unique integer value identifying this protocol. */ + id: number; + protocol: Protocol; +}; +export type ProtocolPartialUpdateApiResponse = /** status 200 */ ProtocolRead; +export type ProtocolPartialUpdateApiArg = { + /** A unique integer value identifying this protocol. */ + id: number; + patchedProtocol: PatchedProtocol; +}; +export type ProtocolDestroyApiResponse = unknown; +export type ProtocolDestroyApiArg = { + /** A unique integer value identifying this protocol. */ + id: number; +}; +export type SessionRetrieveApiResponse = unknown; +export type SessionRetrieveApiArg = void; +export type SimulationListApiResponse = /** status 200 */ SimulationRead[]; +export type SimulationListApiArg = { + /** Filter results by project ID */ + projectId?: number; +}; +export type SimulationCreateApiResponse = /** status 201 */ SimulationRead; +export type SimulationCreateApiArg = { + simulation: Simulation; +}; +export type SimulationRetrieveApiResponse = /** status 200 */ SimulationRead; +export type SimulationRetrieveApiArg = { + /** A unique integer value identifying this simulation. */ + id: number; +}; +export type SimulationUpdateApiResponse = /** status 200 */ SimulationRead; +export type SimulationUpdateApiArg = { + /** A unique integer value identifying this simulation. */ + id: number; + simulation: Simulation; +}; +export type SimulationPartialUpdateApiResponse = + /** status 200 */ SimulationRead; +export type SimulationPartialUpdateApiArg = { + /** A unique integer value identifying this simulation. */ + id: number; + patchedSimulation: PatchedSimulation; +}; +export type SimulationDestroyApiResponse = unknown; +export type SimulationDestroyApiArg = { + /** A unique integer value identifying this simulation. */ + id: number; +}; +export type SubjectListApiResponse = /** status 200 */ SubjectRead[]; +export type SubjectListApiArg = void; +export type SubjectCreateApiResponse = /** status 201 */ SubjectRead; +export type SubjectCreateApiArg = { + subject: Subject; +}; +export type SubjectRetrieveApiResponse = /** status 200 */ SubjectRead; +export type SubjectRetrieveApiArg = { + /** A unique integer value identifying this subject. */ + id: number; +}; +export type SubjectUpdateApiResponse = /** status 200 */ SubjectRead; +export type SubjectUpdateApiArg = { + /** A unique integer value identifying this subject. */ + id: number; + subject: Subject; +}; +export type SubjectPartialUpdateApiResponse = /** status 200 */ SubjectRead; +export type SubjectPartialUpdateApiArg = { + /** A unique integer value identifying this subject. */ + id: number; + patchedSubject: PatchedSubject; +}; +export type SubjectDestroyApiResponse = unknown; +export type SubjectDestroyApiArg = { + /** A unique integer value identifying this subject. */ + id: number; +}; +export type UnitListApiResponse = /** status 200 */ UnitRead[]; +export type UnitListApiArg = { + /** Enable conversions based on compound information */ + compoundId?: number; +}; +export type UnitCreateApiResponse = /** status 201 */ UnitRead; +export type UnitCreateApiArg = { + unit: Unit; +}; +export type UnitRetrieveApiResponse = /** status 200 */ UnitRead; +export type UnitRetrieveApiArg = { + /** Enable conversions based on compound information */ + compoundId?: number; + /** A unique integer value identifying this unit. */ + id: number; +}; +export type UnitUpdateApiResponse = /** status 200 */ UnitRead; +export type UnitUpdateApiArg = { + /** A unique integer value identifying this unit. */ + id: number; + unit: Unit; +}; +export type UnitPartialUpdateApiResponse = /** status 200 */ UnitRead; +export type UnitPartialUpdateApiArg = { + /** A unique integer value identifying this unit. */ + id: number; + patchedUnit: PatchedUnit; +}; +export type UnitDestroyApiResponse = unknown; +export type UnitDestroyApiArg = { + /** A unique integer value identifying this unit. */ + id: number; +}; +export type UserListApiResponse = /** status 200 */ UserRead[]; +export type UserListApiArg = void; +export type UserCreateApiResponse = /** status 201 */ UserRead; +export type UserCreateApiArg = { + user: User; +}; +export type UserRetrieveApiResponse = /** status 200 */ UserRead; +export type UserRetrieveApiArg = { + /** A unique integer value identifying this user. */ + id: number; +}; +export type UserUpdateApiResponse = /** status 200 */ UserRead; +export type UserUpdateApiArg = { + /** A unique integer value identifying this user. */ + id: number; + user: User; +}; +export type UserPartialUpdateApiResponse = /** status 200 */ UserRead; +export type UserPartialUpdateApiArg = { + /** A unique integer value identifying this user. */ + id: number; + patchedUser: PatchedUser; +}; +export type UserDestroyApiResponse = unknown; +export type UserDestroyApiArg = { + /** A unique integer value identifying this user. */ + id: number; +}; +export type VariableListApiResponse = /** status 200 */ VariableRead[]; +export type VariableListApiArg = { + /** Filter results by dosed_pk_model ID */ + dosedPkModelId?: number; + /** Filter results by pd_model ID */ + pdModelId?: number; + /** Filter results by project ID */ + projectId?: number; +}; +export type VariableCreateApiResponse = /** status 201 */ VariableRead; +export type VariableCreateApiArg = { + variable: Variable; +}; +export type VariableRetrieveApiResponse = /** status 200 */ VariableRead; +export type VariableRetrieveApiArg = { + /** A unique integer value identifying this variable. */ + id: number; +}; +export type VariableUpdateApiResponse = /** status 200 */ VariableRead; +export type VariableUpdateApiArg = { + /** A unique integer value identifying this variable. */ + id: number; + variable: Variable; +}; +export type VariablePartialUpdateApiResponse = /** status 200 */ VariableRead; +export type VariablePartialUpdateApiArg = { + /** A unique integer value identifying this variable. */ + id: number; + patchedVariable: PatchedVariable; +}; +export type VariableDestroyApiResponse = unknown; +export type VariableDestroyApiArg = { + /** A unique integer value identifying this variable. */ + id: number; +}; +export type WhoamiRetrieveApiResponse = unknown; +export type WhoamiRetrieveApiArg = void; +export type CategoryEnum = "SA" | "OP" | "OT"; +export type Algorithm = { + name: string; + category: CategoryEnum; +}; +export type AlgorithmRead = { + id: number; + name: string; + category: CategoryEnum; +}; +export type PatchedAlgorithm = { + name?: string; + category?: CategoryEnum; +}; +export type PatchedAlgorithmRead = { + id?: number; + name?: string; + category?: CategoryEnum; +}; +export type BiomarkerType = { + name: string; + description?: string | null; + display?: boolean; + color?: number; + axis?: boolean; + stored_unit: number; + dataset: number; + display_unit: number; + stored_time_unit: number; + display_time_unit: number; +}; +export type BiomarkerTypeRead = { + id: number; + data: { + [key: string]: any[]; + } | null; + is_continuous: boolean; + is_categorical: boolean; + name: string; + description?: string | null; + display?: boolean; + color?: number; + axis?: boolean; + stored_unit: number; + dataset: number; + display_unit: number; + stored_time_unit: number; + display_time_unit: number; +}; +export type PatchedBiomarkerType = { + name?: string; + description?: string | null; + display?: boolean; + color?: number; + axis?: boolean; + stored_unit?: number; + dataset?: number; + display_unit?: number; + stored_time_unit?: number; + display_time_unit?: number; +}; +export type PatchedBiomarkerTypeRead = { + id?: number; + data?: { + [key: string]: any[]; + } | null; + is_continuous?: boolean; + is_categorical?: boolean; + name?: string; + description?: string | null; + display?: boolean; + color?: number; + axis?: boolean; + stored_unit?: number; + dataset?: number; + display_unit?: number; + stored_time_unit?: number; + display_time_unit?: number; +}; +export type PkpdMapping = { + pkpd_model: number; + pk_variable: number; + pd_variable: number; +}; +export type PkpdMappingRead = { + id: number; + datetime: string; + read_only: boolean; + pkpd_model: number; + pk_variable: number; + pd_variable: number; +}; +export type TypeEnum = "RO" | "FUP" | "BPR" | "TLG"; +export type DerivedVariable = { + read_only?: boolean; + datetime?: string | null; + type: TypeEnum; + pkpd_model: number; + pk_variable: number; +}; +export type DerivedVariableRead = { + id: number; + read_only?: boolean; + datetime?: string | null; + type: TypeEnum; + pkpd_model: number; + pk_variable: number; +}; +export type CombinedModelSpeciesEnum = "H" | "R" | "N" | "M"; +export type CombinedModel = { + mappings: PkpdMapping[]; + derived_variables: DerivedVariable[]; + read_only?: boolean; + datetime?: string | null; + name: string; + species?: CombinedModelSpeciesEnum; + has_saturation?: boolean; + has_effect?: boolean; + has_lag?: boolean; + has_bioavailability?: boolean; + has_hill_coefficient?: boolean; + time_max?: number; + project?: number | null; + pk_model?: number | null; + pd_model?: number | null; + pd_model2?: number | null; +}; +export type CombinedModelRead = { + id: number; + mappings: PkpdMappingRead[]; + derived_variables: DerivedVariableRead[]; + components: string; + variables: number[]; + mmt: string; + time_unit: number; + is_library_model: boolean; + read_only?: boolean; + datetime?: string | null; + name: string; + species?: CombinedModelSpeciesEnum; + has_saturation?: boolean; + has_effect?: boolean; + has_lag?: boolean; + has_bioavailability?: boolean; + has_hill_coefficient?: boolean; + time_max?: number; + project?: number | null; + pk_model?: number | null; + pd_model?: number | null; + pd_model2?: number | null; +}; +export type PatchedCombinedModel = { + mappings?: PkpdMapping[]; + derived_variables?: DerivedVariable[]; + read_only?: boolean; + datetime?: string | null; + name?: string; + species?: CombinedModelSpeciesEnum; + has_saturation?: boolean; + has_effect?: boolean; + has_lag?: boolean; + has_bioavailability?: boolean; + has_hill_coefficient?: boolean; + time_max?: number; + project?: number | null; + pk_model?: number | null; + pd_model?: number | null; + pd_model2?: number | null; +}; +export type PatchedCombinedModelRead = { + id?: number; + mappings?: PkpdMappingRead[]; + derived_variables?: DerivedVariableRead[]; + components?: string; + variables?: number[]; + mmt?: string; + time_unit?: number; + is_library_model?: boolean; + read_only?: boolean; + datetime?: string | null; + name?: string; + species?: CombinedModelSpeciesEnum; + has_saturation?: boolean; + has_effect?: boolean; + has_lag?: boolean; + has_bioavailability?: boolean; + has_hill_coefficient?: boolean; + time_max?: number; + project?: number | null; + pk_model?: number | null; + pd_model?: number | null; + pd_model2?: number | null; +}; +export type SimulateResponse = { + time: number[]; + outputs: { + [key: string]: number[]; + }; +}; +export type ErrorResponse = { + error: string; +}; +export type Simulate = { + outputs: string[]; + variables: { + [key: string]: number; + }; + time_max?: number; +}; +export type Efficacy = { + name?: string; + c50: number; + hill_coefficient?: number; + c50_unit: number; + compound: number; +}; +export type EfficacyRead = { + id: number; + name?: string; + c50: number; + hill_coefficient?: number; + c50_unit: number; + compound: number; +}; +export type CompoundTypeEnum = "SM" | "LM"; +export type IntrinsicClearanceAssayEnum = "MS" | "HC"; +export type Compound = { + efficacy_experiments: Efficacy[]; + name: string; + description?: string; + molecular_mass?: number; + compound_type?: CompoundTypeEnum; + fraction_unbound_plasma?: number | null; + blood_to_plasma_ratio?: number | null; + intrinsic_clearance?: number | null; + intrinsic_clearance_assay?: IntrinsicClearanceAssayEnum; + fraction_unbound_including_cells?: number | null; + target_molecular_mass?: number; + target_concentration?: number | null; + dissociation_constant?: number | null; + is_soluble?: boolean; + use_efficacy?: number | null; + molecular_mass_unit?: number; + intrinsic_clearance_unit?: number; + target_molecular_mass_unit?: number; + target_concentration_unit?: number; + dissociation_unit?: number; +}; +export type CompoundRead = { + id: number; + efficacy_experiments: EfficacyRead[]; + name: string; + description?: string; + molecular_mass?: number; + compound_type?: CompoundTypeEnum; + fraction_unbound_plasma?: number | null; + blood_to_plasma_ratio?: number | null; + intrinsic_clearance?: number | null; + intrinsic_clearance_assay?: IntrinsicClearanceAssayEnum; + fraction_unbound_including_cells?: number | null; + target_molecular_mass?: number; + target_concentration?: number | null; + dissociation_constant?: number | null; + is_soluble?: boolean; + use_efficacy?: number | null; + molecular_mass_unit?: number; + intrinsic_clearance_unit?: number; + target_molecular_mass_unit?: number; + target_concentration_unit?: number; + dissociation_unit?: number; +}; +export type PatchedCompound = { + efficacy_experiments?: Efficacy[]; + name?: string; + description?: string; + molecular_mass?: number; + compound_type?: CompoundTypeEnum; + fraction_unbound_plasma?: number | null; + blood_to_plasma_ratio?: number | null; + intrinsic_clearance?: number | null; + intrinsic_clearance_assay?: IntrinsicClearanceAssayEnum; + fraction_unbound_including_cells?: number | null; + target_molecular_mass?: number; + target_concentration?: number | null; + dissociation_constant?: number | null; + is_soluble?: boolean; + use_efficacy?: number | null; + molecular_mass_unit?: number; + intrinsic_clearance_unit?: number; + target_molecular_mass_unit?: number; + target_concentration_unit?: number; + dissociation_unit?: number; +}; +export type PatchedCompoundRead = { + id?: number; + efficacy_experiments?: EfficacyRead[]; + name?: string; + description?: string; + molecular_mass?: number; + compound_type?: CompoundTypeEnum; + fraction_unbound_plasma?: number | null; + blood_to_plasma_ratio?: number | null; + intrinsic_clearance?: number | null; + intrinsic_clearance_assay?: IntrinsicClearanceAssayEnum; + fraction_unbound_including_cells?: number | null; + target_molecular_mass?: number; + target_concentration?: number | null; + dissociation_constant?: number | null; + is_soluble?: boolean; + use_efficacy?: number | null; + molecular_mass_unit?: number; + intrinsic_clearance_unit?: number; + target_molecular_mass_unit?: number; + target_concentration_unit?: number; + dissociation_unit?: number; +}; +export type Dataset = { + name: string; + datetime?: string | null; + description?: string; + project?: number | null; +}; +export type Dose = { + start_time: number; + amount: number; + duration?: number; + repeats?: number; + repeat_interval?: number; + read_only?: boolean; + datetime?: string | null; +}; +export type DoseRead = { + id: number; + start_time: number; + amount: number; + duration?: number; + repeats?: number; + repeat_interval?: number; + read_only?: boolean; + datetime?: string | null; +}; +export type DoseTypeEnum = "D" | "I"; +export type Protocol = { + doses: Dose[]; + read_only?: boolean; + datetime?: string | null; + name: string; + dose_type?: DoseTypeEnum; + project?: number | null; + compound?: number | null; + time_unit?: number | null; + amount_unit?: number | null; +}; +export type ProtocolRead = { + id: number; + doses: DoseRead[]; + dataset: string; + variables: number[]; + subjects: number[]; + read_only?: boolean; + datetime?: string | null; + name: string; + dose_type?: DoseTypeEnum; + project?: number | null; + compound?: number | null; + time_unit?: number | null; + amount_unit?: number | null; +}; +export type DatasetRead = { + id: number; + biomarker_types: number[]; + subjects: number[]; + protocols: ProtocolRead[]; + name: string; + datetime?: string | null; + description?: string; + project?: number | null; +}; +export type PatchedDataset = { + name?: string; + datetime?: string | null; + description?: string; + project?: number | null; +}; +export type PatchedDatasetRead = { + id?: number; + biomarker_types?: number[]; + subjects?: number[]; + protocols?: ProtocolRead[]; + name?: string; + datetime?: string | null; + description?: string; + project?: number | null; +}; +export type DatasetCsv = { + csv: string; +}; +export type PatchedDose = { + start_time?: number; + amount?: number; + duration?: number; + repeats?: number; + repeat_interval?: number; + read_only?: boolean; + datetime?: string | null; +}; +export type PatchedDoseRead = { + id?: number; + start_time?: number; + amount?: number; + duration?: number; + repeats?: number; + repeat_interval?: number; + read_only?: boolean; + datetime?: string | null; +}; +export type LogLikelihoodParameter = { + name: string; + parent_index?: number | null; + child_index?: number; + length?: number | null; + child: number; + variable?: number | null; +}; +export type LogLikelihoodParameterRead = { + id: number; + name: string; + parent_index?: number | null; + child_index?: number; + length?: number | null; + parent: number; + child: number; + variable?: number | null; +}; +export type FormEnum = "N" | "U" | "LN" | "F" | "S" | "E" | "M"; +export type LogLikelihood = { + parameters: LogLikelihoodParameter[]; + name: string; + description?: string | null; + value?: number | null; + time_independent_data?: boolean; + observed?: boolean; + form?: FormEnum; + variable?: number | null; + biomarker_type?: number | null; + protocol_filter?: number | null; +}; +export type LogLikelihoodRead = { + id: number; + parameters: LogLikelihoodParameterRead[]; + model: string[] | null; + dataset: number | null; + time_variable: number | null; + is_a_prior: boolean; + name: string; + description?: string | null; + value?: number | null; + time_independent_data?: boolean; + observed?: boolean; + form?: FormEnum; + inference: number; + variable?: number | null; + biomarker_type?: number | null; + protocol_filter?: number | null; + children: number[]; +}; +export type InitializationStrategyEnum = "D" | "R" | "F"; +export type Inference = { + log_likelihoods: LogLikelihood[]; + read_only?: boolean; + datetime?: string | null; + name: string; + description?: string; + initialization_strategy?: InitializationStrategyEnum; + number_of_chains?: number; + max_number_of_iterations?: number; + burn_in?: number; + number_of_iterations?: number; + time_elapsed?: number; + number_of_function_evals?: number; + task_id?: string | null; + metadata?: { + [key: string]: any; + }; + project: number; + algorithm?: number; + initialization_inference?: number | null; +}; +export type InferenceRead = { + id: number; + log_likelihoods: LogLikelihoodRead[]; + read_only?: boolean; + datetime?: string | null; + name: string; + description?: string; + initialization_strategy?: InitializationStrategyEnum; + number_of_chains?: number; + max_number_of_iterations?: number; + burn_in?: number; + number_of_iterations?: number; + time_elapsed?: number; + number_of_function_evals?: number; + task_id?: string | null; + metadata?: { + [key: string]: any; + }; + project: number; + algorithm?: number; + initialization_inference?: number | null; +}; +export type PatchedInference = { + log_likelihoods?: LogLikelihood[]; + read_only?: boolean; + datetime?: string | null; + name?: string; + description?: string; + initialization_strategy?: InitializationStrategyEnum; + number_of_chains?: number; + max_number_of_iterations?: number; + burn_in?: number; + number_of_iterations?: number; + time_elapsed?: number; + number_of_function_evals?: number; + task_id?: string | null; + metadata?: { + [key: string]: any; + }; + project?: number; + algorithm?: number; + initialization_inference?: number | null; +}; +export type PatchedInferenceRead = { + id?: number; + log_likelihoods?: LogLikelihoodRead[]; + read_only?: boolean; + datetime?: string | null; + name?: string; + description?: string; + initialization_strategy?: InitializationStrategyEnum; + number_of_chains?: number; + max_number_of_iterations?: number; + burn_in?: number; + number_of_iterations?: number; + time_elapsed?: number; + number_of_function_evals?: number; + task_id?: string | null; + metadata?: { + [key: string]: any; + }; + project?: number; + algorithm?: number; + initialization_inference?: number | null; +}; +export type InferenceChain = { + inference: number; +}; +export type InferenceChainRead = { + id: number; + data: string; + outputs: string; + inference: number; +}; +export type PatchedInferenceChain = { + inference?: number; +}; +export type PatchedInferenceChainRead = { + id?: number; + data?: string; + outputs?: string; + inference?: number; +}; +export type Pharmacodynamic = { + mmt?: string; + read_only?: boolean; + datetime?: string | null; + name: string; + description?: string; + time_max?: number; + is_library_model?: boolean; + project?: number | null; +}; +export type PharmacodynamicRead = { + id: number; + components: string; + variables: number[]; + mmt?: string; + read_only?: boolean; + datetime?: string | null; + name: string; + description?: string; + time_max?: number; + is_library_model?: boolean; + project?: number | null; +}; +export type PatchedPharmacodynamic = { + mmt?: string; + read_only?: boolean; + datetime?: string | null; + name?: string; + description?: string; + time_max?: number; + is_library_model?: boolean; + project?: number | null; +}; +export type PatchedPharmacodynamicRead = { + id?: number; + components?: string; + variables?: number[]; + mmt?: string; + read_only?: boolean; + datetime?: string | null; + name?: string; + description?: string; + time_max?: number; + is_library_model?: boolean; + project?: number | null; +}; +export type PharmacodynamicSbml = {}; +export type PharmacodynamicSbmlWrite = { + sbml: string; +}; +export type Pharmacokinetic = { + read_only?: boolean; + datetime?: string | null; + name: string; + description?: string; + mmt?: string; + time_max?: number; + is_library_model?: boolean; +}; +export type PharmacokineticRead = { + id: number; + read_only?: boolean; + datetime?: string | null; + name: string; + description?: string; + mmt?: string; + time_max?: number; + is_library_model?: boolean; +}; +export type PatchedPharmacokinetic = { + read_only?: boolean; + datetime?: string | null; + name?: string; + description?: string; + mmt?: string; + time_max?: number; + is_library_model?: boolean; +}; +export type PatchedPharmacokineticRead = { + id?: number; + read_only?: boolean; + datetime?: string | null; + name?: string; + description?: string; + mmt?: string; + time_max?: number; + is_library_model?: boolean; +}; +export type ProjectAccess = { + read_only?: boolean; + user: number; +}; +export type ProjectAccessRead = { + id: number; + read_only?: boolean; + user: number; + project: number; +}; +export type ProjectSpeciesEnum = "M" | "R" | "H" | "K" | "O"; +export type Project = { + user_access: ProjectAccess[]; + name: string; + description?: string; + species?: ProjectSpeciesEnum; + compound: number; +}; +export type ProjectRead = { + id: number; + user_access: ProjectAccessRead[]; + protocols: number[]; + name: string; + description?: string; + created: string; + species?: ProjectSpeciesEnum; + compound: number; + users: number[]; +}; +export type PatchedProject = { + user_access?: ProjectAccess[]; + name?: string; + description?: string; + species?: ProjectSpeciesEnum; + compound?: number; +}; +export type PatchedProjectRead = { + id?: number; + user_access?: ProjectAccessRead[]; + protocols?: number[]; + name?: string; + description?: string; + created?: string; + species?: ProjectSpeciesEnum; + compound?: number; + users?: number[]; +}; +export type Monolix = {}; +export type MonolixRead = { + data: string; + pd_model: string; + pk_model: string; +}; +export type MonolixWrite = { + data_csv: string; + model_txt: string; + project_mlxtran: string; +}; +export type PatchedProjectAccess = { + read_only?: boolean; + user?: number; +}; +export type PatchedProjectAccessRead = { + id?: number; + read_only?: boolean; + user?: number; + project?: number; +}; +export type PatchedProtocol = { + doses?: Dose[]; + read_only?: boolean; + datetime?: string | null; + name?: string; + dose_type?: DoseTypeEnum; + project?: number | null; + compound?: number | null; + time_unit?: number | null; + amount_unit?: number | null; +}; +export type PatchedProtocolRead = { + id?: number; + doses?: DoseRead[]; + dataset?: string; + variables?: number[]; + subjects?: number[]; + read_only?: boolean; + datetime?: string | null; + name?: string; + dose_type?: DoseTypeEnum; + project?: number | null; + compound?: number | null; + time_unit?: number | null; + amount_unit?: number | null; +}; +export type SimulationSlider = { + variable: number; +}; +export type SimulationSliderRead = { + id: number; + variable: number; +}; +export type SimulationYAxis = { + right?: boolean; + variable: number; +}; +export type SimulationYAxisRead = { + id: number; + right?: boolean; + variable: number; +}; +export type SimulationCxLine = { + value: number; +}; +export type SimulationCxLineRead = { + id: number; + value: number; +}; +export type Y2ScaleEnum = "lin" | "lg2" | "lg10" | "ln"; +export type SimulationPlot = { + y_axes: SimulationYAxis[]; + cx_lines: SimulationCxLine[]; + index: number; + x_scale?: Y2ScaleEnum; + y_scale?: Y2ScaleEnum; + y2_scale?: Y2ScaleEnum; + min?: number | null; + max?: number | null; + min2?: number | null; + max2?: number | null; + x_unit: number; + y_unit?: number | null; + y_unit2?: number | null; +}; +export type SimulationPlotRead = { + id: number; + y_axes: SimulationYAxisRead[]; + cx_lines: SimulationCxLineRead[]; + index: number; + x_scale?: Y2ScaleEnum; + y_scale?: Y2ScaleEnum; + y2_scale?: Y2ScaleEnum; + min?: number | null; + max?: number | null; + min2?: number | null; + max2?: number | null; + x_unit: number; + y_unit?: number | null; + y_unit2?: number | null; +}; +export type Simulation = { + sliders: SimulationSlider[]; + plots: SimulationPlot[]; + name: string; + nrows?: number; + ncols?: number; + time_max?: number; + abs_tolerance?: number; + rel_tolerance?: number; + project: number; + time_max_unit: number; +}; +export type SimulationRead = { + id: number; + sliders: SimulationSliderRead[]; + plots: SimulationPlotRead[]; + name: string; + nrows?: number; + ncols?: number; + time_max?: number; + abs_tolerance?: number; + rel_tolerance?: number; + project: number; + time_max_unit: number; +}; +export type PatchedSimulation = { + sliders?: SimulationSlider[]; + plots?: SimulationPlot[]; + name?: string; + nrows?: number; + ncols?: number; + time_max?: number; + abs_tolerance?: number; + rel_tolerance?: number; + project?: number; + time_max_unit?: number; +}; +export type PatchedSimulationRead = { + id?: number; + sliders?: SimulationSliderRead[]; + plots?: SimulationPlotRead[]; + name?: string; + nrows?: number; + ncols?: number; + time_max?: number; + abs_tolerance?: number; + rel_tolerance?: number; + project?: number; + time_max_unit?: number; +}; +export type Subject = { + id_in_dataset: number; + shape?: number; + display?: boolean; + metadata?: string; + dataset: number; + protocol?: number | null; +}; +export type SubjectRead = { + id: number; + id_in_dataset: number; + shape?: number; + display?: boolean; + metadata?: string; + dataset: number; + protocol?: number | null; +}; +export type PatchedSubject = { + id_in_dataset?: number; + shape?: number; + display?: boolean; + metadata?: string; + dataset?: number; + protocol?: number | null; +}; +export type PatchedSubjectRead = { + id?: number; + id_in_dataset?: number; + shape?: number; + display?: boolean; + metadata?: string; + dataset?: number; + protocol?: number | null; +}; +export type Unit = { + symbol: string; + g?: number; + m?: number; + s?: number; + A?: number; + K?: number; + cd?: number; + mol?: number; + multiplier?: number; +}; +export type UnitRead = { + id: number; + compatible_units: { + [key: string]: string; + }[]; + symbol: string; + g?: number; + m?: number; + s?: number; + A?: number; + K?: number; + cd?: number; + mol?: number; + multiplier?: number; +}; +export type PatchedUnit = { + symbol?: string; + g?: number; + m?: number; + s?: number; + A?: number; + K?: number; + cd?: number; + mol?: number; + multiplier?: number; +}; +export type PatchedUnitRead = { + id?: number; + compatible_units?: { + [key: string]: string; + }[]; + symbol?: string; + g?: number; + m?: number; + s?: number; + A?: number; + K?: number; + cd?: number; + mol?: number; + multiplier?: number; +}; +export type User = { + username: string; + first_name?: string; + last_name?: string; + email?: string; +}; +export type Profile = { + user: number; +}; +export type ProfileRead = { + id: number; + user: number; +}; +export type UserRead = { + id: number; + username: string; + first_name?: string; + last_name?: string; + email?: string; + profile: ProfileRead; + project_set: number[]; +}; +export type PatchedUser = { + username?: string; + first_name?: string; + last_name?: string; + email?: string; +}; +export type PatchedUserRead = { + id?: number; + username?: string; + first_name?: string; + last_name?: string; + email?: string; + profile?: ProfileRead; + project_set?: number[]; +}; +export type Variable = { + read_only?: boolean; + datetime?: string | null; + is_public?: boolean; + lower_bound?: number | null; + upper_bound?: number | null; + default_value?: number; + is_log?: boolean; + name: string; + description?: string | null; + binding?: string | null; + qname: string; + unit_symbol?: string | null; + constant?: boolean; + state?: boolean; + color?: number; + display?: boolean; + axis?: boolean; + unit?: number | null; + pd_model?: number | null; + pk_model?: number | null; + dosed_pk_model?: number | null; + protocol?: number | null; +}; +export type VariableRead = { + id: number; + read_only?: boolean; + datetime?: string | null; + is_public?: boolean; + lower_bound?: number | null; + upper_bound?: number | null; + default_value?: number; + is_log?: boolean; + name: string; + description?: string | null; + binding?: string | null; + qname: string; + unit_symbol?: string | null; + constant?: boolean; + state?: boolean; + color?: number; + display?: boolean; + axis?: boolean; + unit?: number | null; + pd_model?: number | null; + pk_model?: number | null; + dosed_pk_model?: number | null; + protocol?: number | null; +}; +export type PatchedVariable = { + read_only?: boolean; + datetime?: string | null; + is_public?: boolean; + lower_bound?: number | null; + upper_bound?: number | null; + default_value?: number; + is_log?: boolean; + name?: string; + description?: string | null; + binding?: string | null; + qname?: string; + unit_symbol?: string | null; + constant?: boolean; + state?: boolean; + color?: number; + display?: boolean; + axis?: boolean; + unit?: number | null; + pd_model?: number | null; + pk_model?: number | null; + dosed_pk_model?: number | null; + protocol?: number | null; +}; +export type PatchedVariableRead = { + id?: number; + read_only?: boolean; + datetime?: string | null; + is_public?: boolean; + lower_bound?: number | null; + upper_bound?: number | null; + default_value?: number; + is_log?: boolean; + name?: string; + description?: string | null; + binding?: string | null; + qname?: string; + unit_symbol?: string | null; + constant?: boolean; + state?: boolean; + color?: number; + display?: boolean; + axis?: boolean; + unit?: number | null; + pd_model?: number | null; + pk_model?: number | null; + dosed_pk_model?: number | null; + protocol?: number | null; +}; +export const { + useAlgorithmListQuery, + useAlgorithmCreateMutation, + useAlgorithmRetrieveQuery, + useAlgorithmUpdateMutation, + useAlgorithmPartialUpdateMutation, + useAlgorithmDestroyMutation, + useAuceCreateMutation, + useBiomarkerTypeListQuery, + useBiomarkerTypeCreateMutation, + useBiomarkerTypeRetrieveQuery, + useBiomarkerTypeUpdateMutation, + useBiomarkerTypePartialUpdateMutation, + useBiomarkerTypeDestroyMutation, + useCombinedModelListQuery, + useCombinedModelCreateMutation, + useCombinedModelRetrieveQuery, + useCombinedModelUpdateMutation, + useCombinedModelPartialUpdateMutation, + useCombinedModelDestroyMutation, + useCombinedModelSetParamsToDefaultsUpdateMutation, + useCombinedModelSetVariablesFromInferenceUpdateMutation, + useCombinedModelSimulateCreateMutation, + useCompoundListQuery, + useCompoundCreateMutation, + useCompoundRetrieveQuery, + useCompoundUpdateMutation, + useCompoundPartialUpdateMutation, + useCompoundDestroyMutation, + useDatasetListQuery, + useDatasetCreateMutation, + useDatasetRetrieveQuery, + useDatasetUpdateMutation, + useDatasetPartialUpdateMutation, + useDatasetDestroyMutation, + useDatasetCsvUpdateMutation, + useDoseListQuery, + useDoseCreateMutation, + useDoseRetrieveQuery, + useDoseUpdateMutation, + useDosePartialUpdateMutation, + useDoseDestroyMutation, + useInferenceListQuery, + useInferenceCreateMutation, + useInferenceRetrieveQuery, + useInferenceUpdateMutation, + useInferencePartialUpdateMutation, + useInferenceDestroyMutation, + useInferenceStopCreateMutation, + useInferenceWizardCreateMutation, + useInferenceChainListQuery, + useInferenceChainCreateMutation, + useInferenceChainRetrieveQuery, + useInferenceChainUpdateMutation, + useInferenceChainPartialUpdateMutation, + useInferenceChainDestroyMutation, + useNcaCreateMutation, + usePharmacodynamicListQuery, + usePharmacodynamicCreateMutation, + usePharmacodynamicRetrieveQuery, + usePharmacodynamicUpdateMutation, + usePharmacodynamicPartialUpdateMutation, + usePharmacodynamicDestroyMutation, + usePharmacodynamicMmtUpdateMutation, + usePharmacodynamicSbmlUpdateMutation, + usePharmacodynamicSetVariablesFromInferenceUpdateMutation, + usePharmacodynamicSimulateCreateMutation, + usePharmacokineticListQuery, + usePharmacokineticCreateMutation, + usePharmacokineticRetrieveQuery, + usePharmacokineticUpdateMutation, + usePharmacokineticPartialUpdateMutation, + usePharmacokineticDestroyMutation, + useProjectListQuery, + useProjectCreateMutation, + useProjectRetrieveQuery, + useProjectUpdateMutation, + useProjectPartialUpdateMutation, + useProjectDestroyMutation, + useProjectMonolixUpdateMutation, + useProjectAccessListQuery, + useProjectAccessCreateMutation, + useProjectAccessRetrieveQuery, + useProjectAccessUpdateMutation, + useProjectAccessPartialUpdateMutation, + useProjectAccessDestroyMutation, + useProtocolListQuery, + useProtocolCreateMutation, + useProtocolRetrieveQuery, + useProtocolUpdateMutation, + useProtocolPartialUpdateMutation, + useProtocolDestroyMutation, + useSessionRetrieveQuery, + useSimulationListQuery, + useSimulationCreateMutation, + useSimulationRetrieveQuery, + useSimulationUpdateMutation, + useSimulationPartialUpdateMutation, + useSimulationDestroyMutation, + useSubjectListQuery, + useSubjectCreateMutation, + useSubjectRetrieveQuery, + useSubjectUpdateMutation, + useSubjectPartialUpdateMutation, + useSubjectDestroyMutation, + useUnitListQuery, + useUnitCreateMutation, + useUnitRetrieveQuery, + useUnitUpdateMutation, + useUnitPartialUpdateMutation, + useUnitDestroyMutation, + useUserListQuery, + useUserCreateMutation, + useUserRetrieveQuery, + useUserUpdateMutation, + useUserPartialUpdateMutation, + useUserDestroyMutation, + useVariableListQuery, + useVariableCreateMutation, + useVariableRetrieveQuery, + useVariableUpdateMutation, + useVariablePartialUpdateMutation, + useVariableDestroyMutation, + useWhoamiRetrieveQuery, +} = injectedRtkApi; diff --git a/frontend-v2/src/app/emptyApi.ts b/frontend-v2/src/app/emptyApi.ts new file mode 100644 index 00000000..b9b2bfde --- /dev/null +++ b/frontend-v2/src/app/emptyApi.ts @@ -0,0 +1,23 @@ +// Or from '@reduxjs/toolkit/query' if not using the auto-generated hooks +import { createApi, fetchBaseQuery } from '@reduxjs/toolkit/query/react' +import { selectCsrf } from '../features/login/loginSlice'; +import { RootState } from './store'; + +const baseQuery = fetchBaseQuery({ + baseUrl: "/", + credentials: "include", + prepareHeaders: (headers, { getState }) => { + const csrf = selectCsrf(getState() as RootState) + if (csrf) { + headers.set("X-CSRFToken", csrf); + } + return headers; + } +}); + +// initialize an empty api service that we'll inject endpoints into later as needed +export const emptySplitApi = createApi({ + baseQuery, + endpoints: () => ({}), +}); + diff --git a/frontend-v2/src/app/hooks.ts b/frontend-v2/src/app/hooks.ts new file mode 100644 index 00000000..93bb45e0 --- /dev/null +++ b/frontend-v2/src/app/hooks.ts @@ -0,0 +1,32 @@ +import { TypedUseSelectorHook, useDispatch, useSelector } from 'react-redux'; +import type { RootState, AppDispatch } from './store'; +import { Control, FieldPath, FieldValues, useFormState } from 'react-hook-form'; +import { useEffect, useState } from 'react'; + +// Use throughout your app instead of plain `useDispatch` and `useSelector` +export const useAppDispatch = () => useDispatch(); +export const useAppSelector: TypedUseSelectorHook = useSelector; + +type Props = { + name: FieldPath; + control: Control; +}; + +export function useFieldState({ name, control}: Props) { + const { defaultValues } = useFormState({ control, name }); + const keys = name.split('.'); + const defaultValue = keys.reduce((acc, key) => { + if (acc && typeof acc === 'object') { + return acc[key]; + } + return undefined; + }, defaultValues); + + const [fieldValue, setFieldValue] = useState(defaultValue); + + useEffect(() => { + setFieldValue(defaultValue); + }, [defaultValue]); + + return [fieldValue, setFieldValue]; +} diff --git a/frontend-v2/src/app/store.ts b/frontend-v2/src/app/store.ts new file mode 100644 index 00000000..f3a3a67a --- /dev/null +++ b/frontend-v2/src/app/store.ts @@ -0,0 +1,30 @@ +import { configureStore, ThunkAction, Action } from '@reduxjs/toolkit'; +import { setupListeners } from '@reduxjs/toolkit/query' +import loginReducer from '../features/login/loginSlice'; +import mainReducer from '../features/main/mainSlice'; +import { api } from '../app/api' + +export const store = configureStore({ + reducer: { + main: mainReducer, + login: loginReducer, + [api.reducerPath]: api.reducer, + }, + // Adding the api middleware enables caching, invalidation, polling, + // and other useful features of `rtk-query`. + middleware: (getDefaultMiddleware) => + getDefaultMiddleware().concat(api.middleware), +}); + +// optional, but required for refetchOnFocus/refetchOnReconnect behaviors +// see `setupListeners` docs - takes an optional callback as the 2nd arg for customization +setupListeners(store.dispatch) + +export type AppDispatch = typeof store.dispatch; +export type RootState = ReturnType; +export type AppThunk = ThunkAction< + ReturnType, + RootState, + unknown, + Action +>; diff --git a/frontend-v2/src/components/Checkbox.tsx b/frontend-v2/src/components/Checkbox.tsx new file mode 100644 index 00000000..6e3029d1 --- /dev/null +++ b/frontend-v2/src/components/Checkbox.tsx @@ -0,0 +1,41 @@ +import React from 'react'; +import { Control, Controller, FieldPath, FieldValues } from 'react-hook-form'; +import * as material from '@mui/material'; + +type Props = { + label: string; + name: FieldPath; + control: Control; + rules?: Object; + checkboxFieldProps?: material.CheckboxProps; +}; + +function Checkbox({ label, name, control, rules, checkboxFieldProps }: Props): React.ReactElement { + return ( + { + return ( + + } + label={label} + /> + ) + }} + /> + ); +}; + +export default Checkbox; \ No newline at end of file diff --git a/frontend-v2/src/components/ConfirmationDialog.tsx b/frontend-v2/src/components/ConfirmationDialog.tsx new file mode 100644 index 00000000..d6c772e1 --- /dev/null +++ b/frontend-v2/src/components/ConfirmationDialog.tsx @@ -0,0 +1,29 @@ +import { Dialog, DialogTitle, DialogContent, DialogActions, Button } from '@mui/material'; +import React from 'react'; + +interface ConfirmationDialogProps { + open: boolean; + title: string; + message: string; + onConfirm: () => void; + onCancel: () => void; +} + +const ConfirmationDialog: React.FC = ({ open, title, message, onConfirm, onCancel }) => { + return ( + + {title} + {message} + + + + + + ); +}; + +export default ConfirmationDialog; \ No newline at end of file diff --git a/frontend-v2/src/components/DropdownButton.tsx b/frontend-v2/src/components/DropdownButton.tsx new file mode 100644 index 00000000..ccab023c --- /dev/null +++ b/frontend-v2/src/components/DropdownButton.tsx @@ -0,0 +1,70 @@ +import { Button, IconButton, ListItem, ListItemButton, ListItemText, Popover } from '@mui/material'; +import React, { useState } from 'react'; + +type Option = { + label: string; + value: any; +}; + +type Props = { + options: Option[]; + onOptionSelected: (value: any) => void; + children?: React.ReactNode; + disabled?: boolean; + data_cy?: string; + useIcon?: boolean; +}; + +const DropdownButton: React.FC = ({ data_cy, options, onOptionSelected, children, disabled, useIcon }) => { + const [anchorEl, setAnchorEl] = useState(null); + + if (useIcon === undefined) { + useIcon = true; + } + + const handleButtonClick = (event: React.MouseEvent) => { + setAnchorEl(event.currentTarget); + }; + + const handleOptionSelected = (option: Option) => { + setAnchorEl(null); + onOptionSelected(option.value); + }; + + const open = Boolean(anchorEl); + const isDisabled = disabled || options.length === 0; + + if (data_cy === undefined) { + data_cy = `dropdown-button`; + } + + return ( +
+ {useIcon ? ( + + {children} + + ) : ( + + )} + setAnchorEl(null)} + data-cy={`dropdown-button-popover`} + anchorOrigin={{ vertical: 'bottom', horizontal: 'left' }} + transformOrigin={{ vertical: 'top', horizontal: 'left' }} + > + {options.map((option, index) => ( + handleOptionSelected(option)} data-cy={`${data_cy}-option-${option.label}`}> + + + ))} + +
+ ); +}; + +export default DropdownButton; \ No newline at end of file diff --git a/frontend-v2/src/components/DynamicTabs.tsx b/frontend-v2/src/components/DynamicTabs.tsx new file mode 100644 index 00000000..bc5750cd --- /dev/null +++ b/frontend-v2/src/components/DynamicTabs.tsx @@ -0,0 +1,69 @@ +import React, { PropsWithChildren, ReactElement, useState } from 'react'; +import Box from '@mui/material/Box'; +import Tabs from '@mui/material/Tabs'; +import Tab from '@mui/material/Tab'; +import ErrorIcon from '@mui/icons-material/Error'; +import { Tooltip } from '@mui/material'; + +interface TabContextProps { + currentTab: number; + setCurrentTab: React.Dispatch>; +} + +export const TabContext = React.createContext({ + currentTab: 0, + setCurrentTab: () => {}, +}); + +interface DynamicTabsProps { + tabNames: string[]; + tabErrors?: { [key: string]: string }; +} + +interface TabPanelProps { + index?: number; +} + +export const TabPanel: React.FC> = ({ index, children }) => { + const { currentTab } = React.useContext(TabContext); + + return ; +}; + +export const DynamicTabs: React.FC> = ({ tabNames, tabErrors, children }) => { + const [currentTab, setCurrentTab] = useState(0); + + let errors: { [key: string]: ReactElement } = {}; + for (const key in tabErrors) { + errors[key] = ( + + + + ) + } + + + const handleChange = (event: React.SyntheticEvent, newValue: number) => { + setCurrentTab(newValue); + }; + + + return ( + + + + + {tabNames.map((name, index) => ( + + ))} + + + + {React.Children.map(children, (child, index) => { + return React.cloneElement(child as React.ReactElement, { index }); + })} + + + + ); +}; diff --git a/frontend-v2/src/components/FloatField.tsx b/frontend-v2/src/components/FloatField.tsx new file mode 100644 index 00000000..3b33c259 --- /dev/null +++ b/frontend-v2/src/components/FloatField.tsx @@ -0,0 +1,69 @@ +import React from 'react'; +import { Control, Controller, FieldPath, FieldValues } from 'react-hook-form'; +import * as material from '@mui/material'; +import { useFieldState } from '../app/hooks'; + +type Props = { + label?: string; + name: FieldPath; + control: Control; + rules?: Object; + textFieldProps?: material.TextFieldProps; + data_cy?: string; +}; + +function convert(value: any) { + if (typeof value === 'string') { + if (value !== '') { + return parseFloat(value); + } else { + return null; + } + } else { + return null; + } +} + +function FloatField({ label, name, control, rules, textFieldProps, data_cy }: Props): React.ReactElement { + const [fieldValue, setFieldValue] = useFieldState({ name, control }); + return ( + { + const handleBlur = (e: React.FocusEvent) => { + const updatedValue = convert(e.target.value); + console.log('handleBlur', updatedValue, value) + if (updatedValue !== value) { + e.target.value = updatedValue as any; + onChange(e); + } + onBlur(); + }; + + const handleChange = (e: React.ChangeEvent) => { + console.log('handleChage', convert(e.target.value)) + setFieldValue(convert(e.target.value)); + }; + return ( + + ); + }} + /> + ); +}; + +export default FloatField; diff --git a/frontend-v2/src/components/HelpButton.tsx b/frontend-v2/src/components/HelpButton.tsx new file mode 100644 index 00000000..a7b7679d --- /dev/null +++ b/frontend-v2/src/components/HelpButton.tsx @@ -0,0 +1,34 @@ +import React, { useState } from 'react'; +import { HelpOutline } from '@mui/icons-material'; +import HelpDialog from './HelpDialog'; +import { IconButton } from '@mui/material'; + +interface HelpButtonProps { + title: string; + children: React.ReactNode; +} + +const HelpButton: React.FC = ({ title, children }) => { + const [open, setOpen] = useState(false); + + const handleOpen = () => { + setOpen(true); + }; + + const handleClose = () => { + setOpen(false); + }; + + return ( + <> + + + + + {children} + + + ); +}; + +export default HelpButton; \ No newline at end of file diff --git a/frontend-v2/src/components/HelpDialog.tsx b/frontend-v2/src/components/HelpDialog.tsx new file mode 100644 index 00000000..993c7c45 --- /dev/null +++ b/frontend-v2/src/components/HelpDialog.tsx @@ -0,0 +1,25 @@ +import React from 'react'; +import { Dialog, DialogTitle, DialogContent, DialogActions, Button } from '@mui/material'; + +interface HelpDialogProps { + open: boolean; + title: string; + onClose: () => void; + children: React.ReactNode; +} + +const HelpDialog: React.FC = ({ open, title, onClose, children }) => { + return ( + + {title} + {children} + + + + + ); +}; + +export default HelpDialog; \ No newline at end of file diff --git a/frontend-v2/src/components/IntegerField.tsx b/frontend-v2/src/components/IntegerField.tsx new file mode 100644 index 00000000..439b6c95 --- /dev/null +++ b/frontend-v2/src/components/IntegerField.tsx @@ -0,0 +1,61 @@ +import React from 'react'; +import { Control, Controller, FieldPath, FieldValues } from 'react-hook-form'; +import * as material from '@mui/material'; +import { useFieldState } from '../app/hooks'; + +type Props = { + label?: string; + name: FieldPath; + control: Control; + rules?: Object; + textFieldProps?: material.TextFieldProps; +}; + +function convert(value: any) { + if (typeof value === 'string' && value !== '') { + return parseInt(value); + } else { + return value; + } +} + +function IntegerField({ label, name, control, rules, textFieldProps }: Props): React.ReactElement { + const [fieldValue, setFieldValue] = useFieldState({ name, control }); + return ( + { + const handleBlur = (e: React.FocusEvent) => { + const updatedValue = convert(e.target.value); + if (updatedValue !== value) { + e.target.value = updatedValue; + onChange(e); + } + onBlur(); + }; + + const handleChange = (e: React.ChangeEvent) => { + setFieldValue(convert(e.target.value)); + }; + return ( + + ); + }} + /> + ); +}; + +export default IntegerField; diff --git a/frontend-v2/src/components/SelectField.tsx b/frontend-v2/src/components/SelectField.tsx new file mode 100644 index 00000000..ba8e931a --- /dev/null +++ b/frontend-v2/src/components/SelectField.tsx @@ -0,0 +1,57 @@ +import React from 'react'; +import { Control, Controller, FieldPath, FieldValues } from 'react-hook-form'; +import { Select, SelectProps, MenuItem, InputLabel, FormControl, OutlinedInput, FormControlProps } from '@mui/material'; + +type Option = { + value: any; + label: string; +}; + +type Props = { + label?: string; + name: FieldPath; + options: Option[]; + control: Control; + rules?: Object; + selectProps?: SelectProps; + formControlProps?: FormControlProps; +}; + +function SelectField({ label, name, options, control, rules, selectProps, formControlProps }: Props): React.ReactElement { + const labelId = `${name}-label`; + const displayEmpty = selectProps?.displayEmpty || true; + const labelWidth = (label ? label.length : 0) * 9 + return ( + ( + + {label} + + + )} + /> + ); +}; + +export default SelectField; \ No newline at end of file diff --git a/frontend-v2/src/components/TextField.tsx b/frontend-v2/src/components/TextField.tsx new file mode 100644 index 00000000..ebdac031 --- /dev/null +++ b/frontend-v2/src/components/TextField.tsx @@ -0,0 +1,60 @@ +import React, { useEffect, useState } from 'react'; +import { Control, Controller, FieldPath, FieldValues, useFormState } from 'react-hook-form'; +import * as material from '@mui/material'; +import { useFieldState } from '../app/hooks'; + +type Props = { + label?: string; + name: FieldPath; + control: Control; + rules?: Object; + mode?: 'onChange' | 'onBlur'; + textFieldProps?: material.TextFieldProps; +}; + +function TextField({ label, name, control, rules, mode, textFieldProps }: Props): React.ReactElement { + const [fieldValue, setFieldValue] = useFieldState({ name, control }); + + if (mode === undefined) { + mode = 'onBlur'; + } + + return ( + { + + const handleBlur = (e: React.FocusEvent) => { + if (mode === 'onBlur' && e.target.value !== value) { + onChange(e); + } + onBlur(); + }; + + const handleChange = (e: React.ChangeEvent) => { + setFieldValue(e.target.value); + if (mode === 'onChange') { + onChange(e); + } + }; + return ( + + ); + }} + /> + ); +}; + +export default TextField; \ No newline at end of file diff --git a/frontend-v2/src/components/Title.tsx b/frontend-v2/src/components/Title.tsx new file mode 100644 index 00000000..3da912a2 --- /dev/null +++ b/frontend-v2/src/components/Title.tsx @@ -0,0 +1,13 @@ +import * as React from 'react'; +import { Typography } from '@mui/material'; + +interface Props { + title: string; +} + +export const Title = ({ title }: Props) => ( + + {title} + +); + diff --git a/frontend-v2/src/components/UnitField.tsx b/frontend-v2/src/components/UnitField.tsx new file mode 100644 index 00000000..398762c6 --- /dev/null +++ b/frontend-v2/src/components/UnitField.tsx @@ -0,0 +1,42 @@ +import React from 'react'; +import { Control, FieldPath, FieldValues } from 'react-hook-form'; +import { SelectProps } from '@mui/material'; +import { Compound, Unit, UnitRead, useUnitRetrieveQuery } from '../app/backendApi'; +import SelectField from './SelectField'; + +type Props = { + label: string; + baseUnit?: UnitRead; + name: FieldPath; + control: Control; + rules?: Object; + selectProps?: SelectProps; + compound?: Compound; + isPreclinicalPerKg?: boolean; +}; + +function UnitField({ label, name, baseUnit, control, rules, selectProps, isPreclinicalPerKg }: Props): React.ReactElement { + if (!isPreclinicalPerKg) { + isPreclinicalPerKg = false; + } + + const isDimensionless = baseUnit?.symbol === '' || false; + if (!baseUnit || isDimensionless) { + selectProps = { ...selectProps, disabled: true }; + } + + const compatibleUnits = isPreclinicalPerKg ? + baseUnit?.compatible_units.filter(unit => unit.symbol.endsWith('/kg')) : + baseUnit?.compatible_units; + + const options = compatibleUnits ? + compatibleUnits.map((unit: { [key: string]: string }) => { + return { value: unit.id, label: unit.symbol } + }) : []; + + return ( + + ); +}; + +export default UnitField; \ No newline at end of file diff --git a/frontend-v2/src/features/drug/Drug.tsx b/frontend-v2/src/features/drug/Drug.tsx new file mode 100644 index 00000000..a54c4371 --- /dev/null +++ b/frontend-v2/src/features/drug/Drug.tsx @@ -0,0 +1,186 @@ +import { Button, Grid, IconButton, List, ListItem, ListItemSecondaryAction, Radio, Stack, Tooltip, Typography } from '@mui/material'; +import { useSelector } from 'react-redux'; +import { RootState } from '../../app/store'; +import { Compound, Efficacy, EfficacyRead, useCompoundRetrieveQuery, useCompoundUpdateMutation, useProjectRetrieveQuery, useUnitListQuery } from '../../app/backendApi'; +import { useFieldArray, useForm, useFormState } from 'react-hook-form'; +import FloatField from '../../components/FloatField'; +import UnitField from '../../components/UnitField'; +import SelectField from '../../components/SelectField'; +import { useEffect, useState } from 'react'; +import DeleteIcon from '@mui/icons-material/Delete'; +import TextField from '../../components/TextField'; +import useDirty from '../../hooks/useDirty'; +import ConfirmationDialog from '../../components/ConfirmationDialog'; + + +const Drug: React.FC = () => { + const projectId = useSelector((state: RootState) => state.main.selectedProject); + const { data: project, isLoading: isProjectLoading } = useProjectRetrieveQuery({id: projectId || 0}, { skip: !projectId }) + const { data: compound, isLoading: isCompoundLoading } = useCompoundRetrieveQuery({id: project?.compound || 0}, { skip: !project }) + const [ updateCompound ] = useCompoundUpdateMutation() + const { data: units, isLoading: isLoadingUnits } = useUnitListQuery({ compoundId: project?.compound}, { skip: !project?.compound}); + + const [ showConfirmDelete, setShowConfirmDelete ] = useState(false); + + + // create a form for the compound data using react-hook-form + const { reset, handleSubmit, control, setValue } = useForm({ + defaultValues: compound || { name: '', description: '', compound_type: 'SM', efficacy_experiments: [] } + }); + const { isDirty } = useFormState({ control }); + + useDirty(isDirty); + + const { fields: efficacy_experiments, append, remove } = useFieldArray({ + control, + name: "efficacy_experiments", + keyName: "theKey", + }); + + + useEffect(() => { + reset(compound); + }, [compound, reset]); + + const submit = handleSubmit((data) => { + if (data && compound && (JSON.stringify(data) !== JSON.stringify(compound))) { + // strange bug in react-hook-form is creating efficancy_experiments with undefined compounds, remove these for now. + data.efficacy_experiments = data.efficacy_experiments.filter((efficacy_experiment) => efficacy_experiment.compound !== undefined); + updateCompound({ id: compound.id, compound: data }).then((result) => { + // if the compound has no efficacy experiments, but the result has, then set the first one as the use_efficacy + if ('data' in result) { + if (compound.efficacy_experiments.length === 0 && result.data.efficacy_experiments.length > 0) { + updateCompound({ id: compound.id, compound: { ...data, use_efficacy: result.data.efficacy_experiments[0].id }}); + } + } + }); + } + }); + + useEffect(() => { + const intervalId = setInterval(() => { + if (isDirty) { + submit(); + } + }, 1000); + + return () => clearInterval(intervalId); + }, [submit, isDirty]); + + useEffect(() => () => { submit(); }, []); + + const addNewEfficacyExperiment = () => { + append([{ name: '', c50: compound?.target_concentration || 0, c50_unit: compound?.target_concentration_unit || 0, hill_coefficient: 1, compound: compound?.id || 0 }]); + }; + + const deleteEfficacyExperiment = (index: number) => { + remove(index); + }; + + + if (isProjectLoading || isCompoundLoading || isLoadingUnits) { + return
Loading...
; + } + + if (!compound || !project || !units) { + return
Not found
; + } + + const intrinsic_clearance_assay_options = [ + { value: "MS", label: "Microsomes" }, + { value: "HC", label: "Hepatocytes" }, + ]; + + const is_soluble_options = [ + { value: false, label: "Membrane-bound" }, + { value: true, label: "Soluble" }, + ]; + + const isLM = compound.compound_type === 'LM'; + + const isEfficacySelected = (efficacy_experiment: EfficacyRead ) => { + if (compound.use_efficacy === undefined) { + return false; + } + return efficacy_experiment.id === compound.use_efficacy; + } + + const handleSelectEfficacy = (efficacy_experiment: EfficacyRead) => { + if (efficacy_experiment.id === compound.use_efficacy) { + setValue('use_efficacy', null); + submit(); + } else { + setValue('use_efficacy', efficacy_experiment.id); + submit(); + } + } + + return ( + + + + Drug Properties + + + + + u.id === compound.molecular_mass_unit)} compound={compound} /> + + + + + + Target Properties + + + + + + u.id === compound.molecular_mass_unit)} compound={compound} /> + + + + + + Efficacy-Safety Data + + + + {efficacy_experiments.map((efficacy_experiment, index) => ( + + + + + + u.id === efficacy_experiment.c50_unit)} compound={compound} /> + + + + + + handleSelectEfficacy(efficacy_experiment as unknown as EfficacyRead)}/> + + + setShowConfirmDelete(true)}> + + + + { deleteEfficacyExperiment(index); setShowConfirmDelete(false); }} + onCancel={() => setShowConfirmDelete(false)} + /> + + + ))} + + + + ); +} + +export default Drug; diff --git a/frontend-v2/src/features/help/Help.tsx b/frontend-v2/src/features/help/Help.tsx new file mode 100644 index 00000000..33da4112 --- /dev/null +++ b/frontend-v2/src/features/help/Help.tsx @@ -0,0 +1,82 @@ +import React, { useEffect } from 'react'; +import { DynamicTabs, TabPanel } from '../../components/DynamicTabs'; +import HelpTab from './HelpTab'; +import { parse } from 'papaparse'; +import { set } from 'react-hook-form'; +import { Container } from '@mui/material'; + +export type Question = { + question: string; + answer: string; +} + +export type TutorialVideo = { + title: string; + type: string; + link: string; + keywords: string[]; +} + +const tutorialVideosUrl: string = '/backend/tutorial_videos.csv'; + +const Help: React.FC = () => { + const [ tutorialVideos, setTutorialVideos ] = React.useState([]); + useEffect(() => { + parse(tutorialVideosUrl, { + download: true, + error: (err) => { + console.error('Error downloading tutorial videos:', err); + }, + complete: (results) => { + setTutorialVideos( + results.data.map((row) => { + const rowList = row as string[]; + return { + title: rowList[0], + type: rowList[1], + link: rowList[2].replace('view?usp=sharing', 'preview'), + keywords: rowList[3].split(',').map((keyword) => keyword.trim()) + }; + }) + ); + } + }) + }, []); + let generic_questions: Question[] = Array(5).fill({ + question: "Question 1?", + answer: "Answer 1" + }); + generic_questions = generic_questions.map((question, index) => { + return { + question: `Question ${index + 1}?`, + answer: `Answer ${index + 1}: Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.` + }}); + + const questions = [ + generic_questions.slice(0, 0), + generic_questions.slice(0, 0), + generic_questions.slice(0, 0), + generic_questions.slice(0, 0), + generic_questions.slice(0, 0), + ] + const tutorials = [ + tutorialVideos.filter((video) => video.type.includes('Tutorial')), + tutorialVideos.filter((video) => video.type === 'Project'), + tutorialVideos.filter((video) => video.type === 'Drug'), + tutorialVideos.filter((video) => video.type === 'Model'), + tutorialVideos.filter((video) => video.type === 'Trial Design'), + tutorialVideos.filter((video) => video.type === 'Simulation'), + ] + + return ( + + { questions.map((question, index) => ( + + + + ))} + + ); +} + +export default Help; diff --git a/frontend-v2/src/features/help/HelpTab.tsx b/frontend-v2/src/features/help/HelpTab.tsx new file mode 100644 index 00000000..0e0d2484 --- /dev/null +++ b/frontend-v2/src/features/help/HelpTab.tsx @@ -0,0 +1,59 @@ +import { Accordion, AccordionDetails, AccordionSummary, Box, Card, Chip, Grid, Stack, Typography } from "@mui/material"; +import ExpandMoreIcon from '@mui/icons-material/ExpandMore'; +import { Question, TutorialVideo } from "./Help"; +import ReactPlayer from 'react-player/youtube' + + +interface Props { + questions: Question[]; + videos: TutorialVideo[]; +} + +// tab that lists questions as mui accordian components +const HelpTab: React.FC = ({ questions, videos }) => { + return ( +
+ + {questions.map((question, index) => { + return ( + + } + > + {question.question} + + + {question.answer} + + + ) + })} + + + {videos.map((video, index) => { + return ( + + {video.title} + + { video.keywords.map((keyword, index) => { + return ( + + ) + })} + +