Skip to content

Commit

Permalink
expose prompt when applicable across contexts; support jsonpath ml in…
Browse files Browse the repository at this point in the history
…gest processor bug fix (#403)

Signed-off-by: Tyler Ohlsen <[email protected]>
  • Loading branch information
ohltyler authored Oct 2, 2024
1 parent 09897ad commit a7a0264
Show file tree
Hide file tree
Showing 9 changed files with 58 additions and 23 deletions.
1 change: 1 addition & 0 deletions common/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -480,3 +480,4 @@ export const EMPTY_MAP_ENTRY = { key: '', value: '' } as MapEntry;
export const MODEL_OUTPUT_SCHEMA_NESTED_PATH =
'output.properties.inference_results.items.properties.output.items.properties.dataAsMap.properties';
export const MODEL_OUTPUT_SCHEMA_FULL_PATH = 'output.properties';
export const PROMPT_FIELD = 'prompt'; // TODO: likely expand to support a pattern and/or multiple (e.g., "prompt", "prompt_template", etc.)
8 changes: 7 additions & 1 deletion common/interfaces.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,12 @@
import { Node, Edge } from 'reactflow';
import { FormikValues } from 'formik';
import { ObjectSchema } from 'yup';
import { COMPONENT_CLASS, PROCESSOR_TYPE, WORKFLOW_TYPE } from './constants';
import {
COMPONENT_CLASS,
PROCESSOR_TYPE,
PROMPT_FIELD,
WORKFLOW_TYPE,
} from './constants';

export type Index = {
name: string;
Expand Down Expand Up @@ -401,6 +406,7 @@ export type ModelInterface = {
export type ConnectorParameters = {
model?: string;
dimensions?: number;
[PROMPT_FIELD]?: string;
};

export type Model = {
Expand Down
4 changes: 2 additions & 2 deletions public/pages/workflow_detail/tools/tools.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -37,12 +37,12 @@ enum TAB_ID {
const inputTabs = [
{
id: TAB_ID.INGEST,
name: 'Run ingestion',
name: 'Ingest response',
disabled: false,
},
{
id: TAB_ID.QUERY,
name: 'Run query',
name: 'Search response',
disabled: false,
},
{
Expand Down
4 changes: 2 additions & 2 deletions public/pages/workflow_detail/workflow_detail.test.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -94,8 +94,8 @@ describe('WorkflowDetail Page with create ingestion option', () => {
expect(getByText('Export')).toBeInTheDocument();
expect(getByText('Visual')).toBeInTheDocument();
expect(getByText('JSON')).toBeInTheDocument();
expect(getByRole('tab', { name: 'Run ingestion' })).toBeInTheDocument();
expect(getByRole('tab', { name: 'Run query' })).toBeInTheDocument();
expect(getByRole('tab', { name: 'Ingest response' })).toBeInTheDocument();
expect(getByRole('tab', { name: 'Search response' })).toBeInTheDocument();
expect(getByRole('tab', { name: 'Errors' })).toBeInTheDocument();
expect(getByRole('tab', { name: 'Resources' })).toBeInTheDocument();

Expand Down
4 changes: 3 additions & 1 deletion public/pages/workflow_detail/workflow_detail.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import {
AppState,
catIndices,
getWorkflow,
searchConnectors,
searchModels,
useAppDispatch,
} from '../../store';
Expand Down Expand Up @@ -102,11 +103,12 @@ export function WorkflowDetail(props: WorkflowDetailProps) {

// On initial load:
// - fetch workflow
// - fetch available models as their IDs may be used when building flows
// - fetch available models & connectors as their IDs may be used when building flows
// - fetch all indices
useEffect(() => {
dispatch(getWorkflow({ workflowId, dataSourceId }));
dispatch(searchModels({ apiBody: FETCH_ALL_QUERY, dataSourceId }));
dispatch(searchConnectors({ apiBody: FETCH_ALL_QUERY, dataSourceId }));
dispatch(catIndices({ pattern: OMIT_SYSTEM_INDEX_PATTERN, dataSourceId }));
}, []);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ import {
WorkflowFormValues,
ModelInterface,
IndexMappings,
PROMPT_FIELD,
} from '../../../../../common';
import { MapArrayField, ModelField } from '../input_fields';
import {
Expand Down Expand Up @@ -61,18 +62,18 @@ interface MLProcessorInputsProps {
export function MLProcessorInputs(props: MLProcessorInputsProps) {
const dispatch = useAppDispatch();
const dataSourceId = getDataSourceId();
const models = useSelector((state: AppState) => state.ml.models);
const { models, connectors } = useSelector((state: AppState) => state.ml);
const indices = useSelector((state: AppState) => state.opensearch.indices);
const { values, setFieldValue, setFieldTouched } = useFormikContext<
WorkflowFormValues
>();

// extracting field info from the ML processor config
// TODO: have a better mechanism for guaranteeing the expected fields/config instead of hardcoding them here
// get some current form & config values
const modelField = props.config.fields.find(
(field) => field.type === 'model'
) as IConfigField;
const modelFieldPath = `${props.baseConfigPath}.${props.config.id}.${modelField.id}`;
const modelIdFieldPath = `${modelFieldPath}.id`;
const inputMapField = props.config.fields.find(
(field) => field.id === 'input_map'
) as IConfigField;
Expand All @@ -88,6 +89,12 @@ export function MLProcessorInputs(props: MLProcessorInputsProps) {
`${props.baseConfigPath}.${props.config.id}.full_response_path`
);

// contains a configurable prompt field or not. if so, expose some extra
// dedicated UI
const [containsPromptField, setContainsPromptField] = useState<boolean>(
false
);

// preview availability states
// if there are preceding search request processors, we cannot fetch and display the interim transformed query.
// additionally, cannot preview output transforms for search request processors because output_maps need to be defined
Expand Down Expand Up @@ -140,7 +147,7 @@ export function MLProcessorInputs(props: MLProcessorInputsProps) {
// on initial load of the models, update model interface states
useEffect(() => {
if (!isEmpty(models)) {
const modelId = getIn(values, `${modelFieldPath}.id`);
const modelId = getIn(values, modelIdFieldPath);
if (modelId) {
setModelInterface(models[modelId]?.interface);
}
Expand Down Expand Up @@ -212,6 +219,27 @@ export function MLProcessorInputs(props: MLProcessorInputsProps) {
}
}, [values?.search?.index?.name]);

// Check if there is an exposed prompt field users can override. Need to navigate
// to the associated connector details to view the connector parameters list.
useEffect(() => {
const selectedModel = Object.values(models).find(
(model) => model.id === getIn(values, modelIdFieldPath)
);
if (selectedModel?.connectorId !== undefined) {
const connectorParameters =
connectors[selectedModel.connectorId]?.parameters;
if (connectorParameters !== undefined) {
if (connectorParameters[PROMPT_FIELD] !== undefined) {
setContainsPromptField(true);
} else {
setContainsPromptField(false);
}
} else {
setContainsPromptField(false);
}
}
}, [models, connectors, getIn(values, modelIdFieldPath)]);

return (
<>
{isInputTransformModalOpen && (
Expand Down Expand Up @@ -262,7 +290,7 @@ export function MLProcessorInputs(props: MLProcessorInputsProps) {
{!isEmpty(getIn(values, modelFieldPath)?.id) && (
<>
<EuiSpacer size="s" />
{props.context === PROCESSOR_CONTEXT.SEARCH_RESPONSE && (
{containsPromptField && (
<>
<EuiText
size="m"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ import {
IProcessorConfig,
ModelInputFormField,
ModelInterface,
PROMPT_FIELD,
PROMPT_PRESETS,
PromptPreset,
WorkflowFormValues,
Expand Down Expand Up @@ -74,21 +75,16 @@ export function ConfigurePromptModal(props: ConfigurePromptModalProps) {

// hook to set the prompt if found in the model config
useEffect(() => {
const modelConfigString = getIn(
values,
`${props.baseConfigPath}.${props.config.id}.model_config`
) as string;
try {
const prompt = JSON.parse(modelConfigString)?.prompt;
const modelConfigObj = JSON.parse(getIn(values, modelConfigPath));
const prompt = getIn(modelConfigObj, PROMPT_FIELD);
if (!isEmpty(prompt)) {
setPromptStr(prompt);
} else {
setPromptStr('');
}
} catch {}
}, [
getIn(values, `${props.baseConfigPath}.${props.config.id}.model_config`),
]);
}, [getIn(values, modelConfigPath)]);

return (
<EuiModal onClose={props.onClose} style={{ width: '70vw' }}>
Expand Down Expand Up @@ -127,7 +123,7 @@ export function ConfigurePromptModal(props: ConfigurePromptModalProps) {
modelConfigPath,
customStringify({
...JSON.parse(modelConfig),
prompt: preset.prompt,
[PROMPT_FIELD]: preset.prompt,
})
);
} catch {}
Expand Down Expand Up @@ -168,9 +164,9 @@ export function ConfigurePromptModal(props: ConfigurePromptModalProps) {
// if the input is blank, it is assumed the user
// does not want any prompt. hence, remove the "prompt" field
// from the config altogether.
delete updatedModelConfig.prompt;
delete updatedModelConfig[PROMPT_FIELD];
} else {
updatedModelConfig.prompt = promptStr;
updatedModelConfig[PROMPT_FIELD] = promptStr;
}
setFieldValue(
modelConfigPath,
Expand Down
2 changes: 1 addition & 1 deletion public/utils/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,7 @@ function getTransformedResult(
? input
: mapEntry.value.startsWith(JSONPATH_ROOT_SELECTOR)
? // JSONPath transform
jsonpath.query(input, path)
jsonpath.value(input, path)
: // Standard dot notation
get(input, path);
}
Expand Down
2 changes: 2 additions & 0 deletions server/routes/helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import {
ModelInterface,
ModelOutput,
NO_MODIFICATIONS_FOUND_TEXT,
PROMPT_FIELD,
SearchHit,
WORKFLOW_RESOURCE_TYPE,
WORKFLOW_STATE,
Expand Down Expand Up @@ -160,6 +161,7 @@ export function getConnectorsFromResponses(
parameters: {
model: connectorHit._source?.parameters?.model,
dimensions: connectorHit._source?.parameters.dimensions,
[PROMPT_FIELD]: connectorHit?._source?.parameters[PROMPT_FIELD],
},
} as Connector;
});
Expand Down

0 comments on commit a7a0264

Please sign in to comment.