Skip to content

Commit

Permalink
Add dedicated prompt configuration for ML search resp processors (#389)
Browse files Browse the repository at this point in the history
Signed-off-by: Tyler Ohlsen <[email protected]>
  • Loading branch information
ohltyler committed Sep 18, 2024
1 parent 4e2445d commit c1f517e
Show file tree
Hide file tree
Showing 9 changed files with 401 additions and 19 deletions.
40 changes: 39 additions & 1 deletion common/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,12 @@
* SPDX-License-Identifier: Apache-2.0
*/

import { MapEntry, QueryPreset, WORKFLOW_STATE } from './interfaces';
import {
MapEntry,
PromptPreset,
QueryPreset,
WORKFLOW_STATE,
} from './interfaces';
import { customStringify } from './utils';

export const PLUGIN_ID = 'search-studio';
Expand Down Expand Up @@ -409,6 +414,39 @@ export const QUERY_PRESETS = [
},
] as QueryPreset[];

/**
* PROMPT PRESETS
*/
export const SUMMARIZE_DOCS_PROMPT =
"Human: You are a professional data analyist. \
You are given a list of document results. You will \
analyze the data and generate a human-readable summary of the results. If you don't \
know the answer, just say I don't know.\
\n\n Results: <provide some results> \
\n\n Human: Please summarize the results.\
\n\n Assistant:";

export const QA_WITH_DOCUMENTS_PROMPT =
"Human: You are a professional data analyist. \
You are given a list of document results, along with a question. You will \
analyze the results and generate a human-readable response to the question, \
based on the results. If you don't know the answer, just say I don't know.\
\n\n Results: <provide some results> \
\n\n Question: <provide some question> \
\n\n Human: Please answer the question using the provided results.\
\n\n Assistant:";

export const PROMPT_PRESETS = [
{
name: 'Summarize documents',
prompt: SUMMARIZE_DOCS_PROMPT,
},
{
name: 'QA with documents',
prompt: QA_WITH_DOCUMENTS_PROMPT,
},
] as PromptPreset[];

/**
* MISCELLANEOUS
*/
Expand Down
5 changes: 5 additions & 0 deletions common/interfaces.ts
Original file line number Diff line number Diff line change
Expand Up @@ -482,6 +482,11 @@ export type QueryPreset = {
query: string;
};

export type PromptPreset = {
name: string;
prompt: string;
};

export type QuickConfigureFields = {
modelId?: string;
vectorField?: string;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ import {
EuiSpacer,
EuiText,
EuiToolTip,
EuiSmallButton,
} from '@elastic/eui';
import {
IProcessorConfig,
Expand All @@ -30,8 +31,11 @@ import {
IndexMappings,
} from '../../../../../common';
import { MapArrayField, ModelField } from '../input_fields';
import { InputTransformModal } from './input_transform_modal';
import { OutputTransformModal } from './output_transform_modal';
import {
ConfigurePromptModal,
InputTransformModal,
OutputTransformModal,
} from './modals';
import { AppState, getMappings, useAppDispatch } from '../../../../store';
import {
formikToPartialPipeline,
Expand Down Expand Up @@ -108,13 +112,14 @@ export function MLProcessorInputs(props: MLProcessorInputsProps) {
}
}, [props.uiConfig.search.enrichRequest.processors]);

// advanced transformations modal state
// various modal states
const [isInputTransformModalOpen, setIsInputTransformModalOpen] = useState<
boolean
>(false);
const [isOutputTransformModalOpen, setIsOutputTransformModalOpen] = useState<
boolean
>(false);
const [isPromptModalOpen, setIsPromptModalOpen] = useState<boolean>(false);

// model interface state
const [modelInterface, setModelInterface] = useState<
Expand Down Expand Up @@ -240,6 +245,14 @@ export function MLProcessorInputs(props: MLProcessorInputsProps) {
onClose={() => setIsOutputTransformModalOpen(false)}
/>
)}
{isPromptModalOpen && (
<ConfigurePromptModal
config={props.config}
baseConfigPath={props.baseConfigPath}
modelInterface={modelInterface}
onClose={() => setIsPromptModalOpen(false)}
/>
)}
<ModelField
field={modelField}
fieldPath={modelFieldPath}
Expand All @@ -249,6 +262,23 @@ export function MLProcessorInputs(props: MLProcessorInputsProps) {
{!isEmpty(getIn(values, modelFieldPath)?.id) && (
<>
<EuiSpacer size="s" />
{props.context === PROCESSOR_CONTEXT.SEARCH_RESPONSE && (
<>
<EuiText
size="m"
style={{ marginTop: '4px' }}
>{`Configure prompt (Optional)`}</EuiText>
<EuiSpacer size="s" />
<EuiSmallButton
style={{ width: '100px' }}
fill={false}
onClick={() => setIsPromptModalOpen(true)}
>
Configure
</EuiSmallButton>
<EuiSpacer size="l" />
</>
)}
<EuiFlexGroup direction="row">
<EuiFlexItem grow={false}>
<EuiText
Expand Down Expand Up @@ -281,7 +311,6 @@ export function MLProcessorInputs(props: MLProcessorInputsProps) {
</EuiFlexItem>
</EuiFlexGroup>
<EuiSpacer size="s" />
<EuiSpacer size="s" />
<MapArrayField
field={inputMapField}
fieldPath={inputMapFieldPath}
Expand Down
Loading

0 comments on commit c1f517e

Please sign in to comment.