Skip to content

Commit

Permalink
feat: map dosing and observation variables (#351)
Browse files Browse the repository at this point in the history
* Map data to model variables

Show a table of dose amount data (and units), with select menus
to map these to model inputs.
Show a table of observation variables (and units), with select menus
to map those to model outputs.

* Add a mapped qname to biomarkers

Store the mapped variable qname on biomarker types. Read it from
OBSERVATION_VARIABLE in a dataset.

* Support ES2015

* Allow for multiple variable mappings

Map each Administration ID to a dosing compartment.
Map each Observation ID to a model output.
Add new columns to the CSV data, with mappings and optional units.

* Preview the final dataset

Add a final step to the upload, which will preview the final CSV before saving it.

* Save modified dataset to backend

- load or create a dataset when we start an upload.
- save the dataset when we finish an upload.
- modify the `/datasets/:dataset_id:/csv` endpoint to accept a JSON string.

- update the dataset API to allow filtering by project ID.

* Allow for a single unit column

- When there's a single unit column, use that column for both dosing and observations.
- Split the CSV data into dosing rows and observation rows.
- Add administration route to the Map Dosing screen.
- Allow for dimensionless observation units.
- Filter mapped observation variables for compatibility with the observation unit value.
  • Loading branch information
eatyourgreens committed Mar 12, 2024
1 parent 61944c9 commit 8b918f2
Show file tree
Hide file tree
Showing 17 changed files with 665 additions and 65 deletions.
30 changes: 16 additions & 14 deletions frontend-v2/src/app/backendApi.ts
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,10 @@ const injectedRtkApi = api.injectEndpoints({
}),
}),
datasetList: build.query<DatasetListApiResponse, DatasetListApiArg>({
query: () => ({ url: `/api/dataset/` }),
query: (queryArg) => ({
url: `/api/dataset/`,
params: { project_id: queryArg.projectId },
}),
}),
datasetCreate: build.mutation<
DatasetCreateApiResponse,
Expand Down Expand Up @@ -1158,7 +1161,10 @@ export type CompoundDestroyApiArg = {
id: number;
};
export type DatasetListApiResponse = /** status 200 */ DatasetRead[];
export type DatasetListApiArg = void;
export type DatasetListApiArg = {
/** Filter results by project ID */
projectId?: number;
};
export type DatasetCreateApiResponse = /** status 201 */ DatasetRead;
export type DatasetCreateApiArg = {
dataset: Dataset;
Expand Down Expand Up @@ -1671,6 +1677,7 @@ export type BiomarkerType = {
display?: boolean;
color?: number;
axis?: boolean;
mapped_qname?: string;
stored_unit: number;
dataset: number;
display_unit: number;
Expand All @@ -1689,6 +1696,7 @@ export type BiomarkerTypeRead = {
display?: boolean;
color?: number;
axis?: boolean;
mapped_qname?: string;
stored_unit: number;
dataset: number;
display_unit: number;
Expand All @@ -1701,6 +1709,7 @@ export type PatchedBiomarkerType = {
display?: boolean;
color?: number;
axis?: boolean;
mapped_qname?: string;
stored_unit?: number;
dataset?: number;
display_unit?: number;
Expand All @@ -1719,6 +1728,7 @@ export type PatchedBiomarkerTypeRead = {
display?: boolean;
color?: number;
axis?: boolean;
mapped_qname?: string;
stored_unit?: number;
dataset?: number;
display_unit?: number;
Expand Down Expand Up @@ -2127,9 +2137,7 @@ export type Inference = {
time_elapsed?: number;
number_of_function_evals?: number;
task_id?: string | null;
metadata?: {
[key: string]: any;
};
metadata?: any;
project: number;
algorithm?: number;
initialization_inference?: number | null;
Expand All @@ -2149,9 +2157,7 @@ export type InferenceRead = {
time_elapsed?: number;
number_of_function_evals?: number;
task_id?: string | null;
metadata?: {
[key: string]: any;
};
metadata?: any;
project: number;
algorithm?: number;
initialization_inference?: number | null;
Expand All @@ -2170,9 +2176,7 @@ export type PatchedInference = {
time_elapsed?: number;
number_of_function_evals?: number;
task_id?: string | null;
metadata?: {
[key: string]: any;
};
metadata?: any;
project?: number;
algorithm?: number;
initialization_inference?: number | null;
Expand All @@ -2192,9 +2196,7 @@ export type PatchedInferenceRead = {
time_elapsed?: number;
number_of_function_evals?: number;
task_id?: string | null;
metadata?: {
[key: string]: any;
};
metadata?: any;
project?: number;
algorithm?: number;
initialization_inference?: number | null;
Expand Down
13 changes: 6 additions & 7 deletions frontend-v2/src/features/data/LoadData.tsx
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import { Alert, Box, Stack, Table, TableBody, TableCell, TableContainer, TableHead, TableRow } from '@mui/material';
import Papa, { ParseError, ParseMeta } from 'papaparse'
import React, {useCallback, useState} from 'react'
import { Alert, Box, Stack } from '@mui/material';
import Papa from 'papaparse'
import { FC, useCallback, useState} from 'react'
import {useDropzone} from 'react-dropzone'
import MapHeaders from './MapHeaders';
import { manditoryHeaders, normaliseHeader, normalisedHeaders } from './normaliseDataHeaders';
import { manditoryHeaders, normaliseHeader } from './normaliseDataHeaders';
import { StepperState } from './LoadDataStepper';

export type Row = {[key: string]: string};
Expand Down Expand Up @@ -48,7 +48,7 @@ const validateNormalisedFields = (fields: Field[]) => {
return errors;
}

const LoadData: React.FC<ILoadDataProps> = ({state, firstTime}) => {
const LoadData: FC<ILoadDataProps> = ({state, firstTime}) => {
const [errors, setErrors] = useState<string[]>(firstTime ? [] : validateNormalisedFields(state.normalisedFields));
const [showData, setShowData] = useState<boolean>(state.data.length > 0 && state.fields.length > 0);

Expand All @@ -62,7 +62,6 @@ const LoadData: React.FC<ILoadDataProps> = ({state, firstTime}) => {
// Parse the CSV data
const rawCsv = reader.result as string;
const csvData = Papa.parse(rawCsv.trim(), { header: true });
const data = csvData.data as Data;
const fields = csvData.meta.fields || [];
const normalisedFields = fields.map(normaliseHeader);
const errors = csvData.errors.map((e) => e.message).concat(validateNormalisedFields(normalisedFields));
Expand All @@ -77,7 +76,7 @@ const LoadData: React.FC<ILoadDataProps> = ({state, firstTime}) => {
reader.readAsText(file)
})

}, [])
}, [state])
const {getRootProps, getInputProps} = useDropzone({onDrop})

const setNormalisedFields = (fields: Field[]) => {
Expand Down
90 changes: 78 additions & 12 deletions frontend-v2/src/features/data/LoadDataStepper.tsx
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import * as React from 'react';
import { FC, useEffect } from 'react';
import { useSelector } from "react-redux";
import Papa from 'papaparse'
import Box from '@mui/material/Box';
import Stepper from '@mui/material/Stepper';
import Step from '@mui/material/Step';
Expand All @@ -8,9 +10,18 @@ import Typography from '@mui/material/Typography';
import LoadData from './LoadData';
import { useState } from 'react';
import MapObservations from './MapObservations';
import MapDosing from './MapDosing';
import PreviewData from './PreviewData';
import { RootState } from "../../app/store";
import {
DatasetRead,
useDatasetListQuery,
useDatasetCreateMutation,
useDatasetCsvUpdateMutation,
} from '../../app/backendApi';

const stepLabels = ['Upload Data', 'Map Observations'];
const stepComponents = [LoadData, MapObservations];
const stepLabels = ['Upload Data', 'Map Dosing', 'Map Observations', 'Preview Dataset'];
const stepComponents = [LoadData, MapDosing, MapObservations, PreviewData];

type Row = {[key: string]: string};
type Data = Row[];
Expand All @@ -27,17 +38,30 @@ export type StepperState = {
setData: (data: Data) => void;
amountUnit?: string;
setAmountUnit: (amountUnit: string) => void;
observationUnits?: {[key: string]: string};
setObservationUnits: (observationUnits: {[key: string]: string}) => void;
}

const LoadDataStepper: React.FC = () => {
const LoadDataStepper: FC = () => {
const [dataset, setDataset] = useState<null | DatasetRead>(null);
const [data, setData] = useState<Data>([]);
const [fields, setFields] = useState<string[]>([]);
const [normalisedFields, setNormalisedFields] = useState<string[]>([]);
const [timeUnit, setTimeUnit] = useState<string | undefined>(undefined);
const [amountUnit, setAmountUnit] = useState<string | undefined>(undefined);
const [observationUnits, setObservationUnits] = useState<{[key: string]: string}>({});
const selectedProject = useSelector(
(state: RootState) => state.main.selectedProject,
);
const selectedProjectOrZero = selectedProject || 0;
const { data: datasets = [], isLoading: isDatasetLoading } = useDatasetListQuery(
{ projectId: selectedProjectOrZero },
{ skip: !selectedProject },
);
const [
createDataset
] = useDatasetCreateMutation();
const [
updateDataset
] = useDatasetCsvUpdateMutation();


const state = {
fields,
Expand All @@ -49,13 +73,50 @@ const LoadDataStepper: React.FC = () => {
timeUnit,
setTimeUnit,
amountUnit,
setAmountUnit,
observationUnits,
setObservationUnits,
setAmountUnit
};

const [stepState, setStepState] = useState({ activeStep: 0, maxStep: 0 });
const StepComponent = stepComponents[stepState.activeStep];
const isFinished = stepState.activeStep === stepLabels.length;

useEffect(function onDataLoad() {
async function addDataset() {
let [dataset] = datasets;
if (!dataset) {
const response = await createDataset({
dataset: {
name: 'New Dataset',
project: selectedProjectOrZero,
}
});
if ('data' in response && response.data) {
dataset = response.data;
}
}
console.log({dataset});
setDataset(dataset);
}
if (!isDatasetLoading) {
addDataset();
}
}, [datasets, createDataset, isDatasetLoading]);

useEffect(function onFinished() {
if (isFinished && dataset?.id) {
try {
const csv = Papa.unparse(data);
updateDataset({
id: dataset.id,
datasetCsv: {
csv
}
})
} catch (e) {
console.error(e);
}
}
}, [isFinished, updateDataset, dataset?.id, data])

const handleNext = () => {
setStepState((prevActiveStep) => ({
Expand All @@ -77,10 +138,15 @@ const LoadDataStepper: React.FC = () => {
</Step>
))}
</Stepper>
<Typography>{stepState.activeStep === stepLabels.length ? 'The process is completed' : <StepComponent state={state} firstTime={stepState.activeStep === stepState.maxStep}/>}</Typography>
<Typography>
{isFinished ?
'The process is completed' :
<StepComponent state={state} firstTime={stepState.activeStep === stepState.maxStep}/>
}
</Typography>
<Box sx={{ display: 'flex', justifyContent: 'space-between', marginTop: 1 }}>
<Button disabled={stepState.activeStep === 0} onClick={handleBack}>Back</Button>
<Button variant="contained" color="primary" onClick={handleNext}>
<Button disabled={isFinished} variant="contained" color="primary" onClick={handleNext}>
{stepState.activeStep === stepLabels.length - 1 ? 'Finish' : 'Next'}
</Button>
</Box>
Expand Down
14 changes: 2 additions & 12 deletions frontend-v2/src/features/data/LoadDataTab.tsx
Original file line number Diff line number Diff line change
@@ -1,17 +1,7 @@
import * as React from 'react';
import { useState } from 'react';
import Button from '@mui/material/Button';
import Avatar from '@mui/material/Avatar';
import List from '@mui/material/List';
import ListItem from '@mui/material/ListItem';
import ListItemAvatar from '@mui/material/ListItemAvatar';
import ListItemButton from '@mui/material/ListItemButton';
import ListItemText from '@mui/material/ListItemText';
import DialogTitle from '@mui/material/DialogTitle';
import Dialog from '@mui/material/Dialog';
import PersonIcon from '@mui/icons-material/Person';
import AddIcon from '@mui/icons-material/Add';
import Typography from '@mui/material/Typography';
import { blue } from '@mui/material/colors';
import LoadDataStepper from './LoadDataStepper';
import { DialogContent } from '@mui/material';

Expand All @@ -38,7 +28,7 @@ function LoadDataDialog(props: LoadDataDialogProps) {
}

export default function LoadDataTab() {
const [open, setOpen] = React.useState(false);
const [open, setOpen] = useState(false);

const handleClickOpen = () => {
setOpen(true);
Expand Down
Loading

0 comments on commit 8b918f2

Please sign in to comment.