diff --git a/opentrons-ai-client/src/molecules/ChatDisplay/index.tsx b/opentrons-ai-client/src/molecules/ChatDisplay/index.tsx index 82a14eda2f5..e94ff11b897 100644 --- a/opentrons-ai-client/src/molecules/ChatDisplay/index.tsx +++ b/opentrons-ai-client/src/molecules/ChatDisplay/index.tsx @@ -22,8 +22,13 @@ import { import type { ChatData } from '../../resources/types' import { useAtom } from 'jotai' -import { feedbackModalAtom } from '../../resources/atoms' -import { delay } from 'lodash' +import { + chatDataAtom, + feedbackModalAtom, + scrollToBottomAtom, +} from '../../resources/atoms' +import { delay, set } from 'lodash' +import { useFormContext } from 'react-hook-form' interface ChatDisplayProps { chat: ChatData @@ -51,9 +56,20 @@ export function ChatDisplay({ chat, chatId }: ChatDisplayProps): JSX.Element { const { t } = useTranslation('protocol_generator') const [isCopied, setIsCopied] = useState(false) const [, setShowFeedbackModal] = useAtom(feedbackModalAtom) - const { role, reply } = chat + const { setValue } = useFormContext() + const [chatdata] = useAtom(chatDataAtom) + const [scrollToBottom, setScrollToBottom] = useAtom(scrollToBottomAtom) + const { role, reply, requestId } = chat const isUser = role === 'user' + const setInputFieldToCorrespondingRequest = (): void => { + const prompt = chatdata.find( + chat => chat.role === 'user' && chat.requestId === requestId + )?.reply + setScrollToBottom(!scrollToBottom) + setValue('userPrompt', prompt) + } + const handleFileDownload = (): void => { const lastCodeBlock = document.querySelector(`#${chatId}`) const code = lastCodeBlock?.textContent ?? '' @@ -97,7 +113,9 @@ export function ChatDisplay({ chat, chatId }: ChatDisplayProps): JSX.Element { {/* text should be markdown so this component will have a package or function to parse markdown */} { - setShowFeedbackModal(true) + setInputFieldToCorrespondingRequest() }} > diff --git a/opentrons-ai-client/src/molecules/InputPrompt/index.tsx b/opentrons-ai-client/src/molecules/InputPrompt/index.tsx index 70ee01560f4..14c254cd01b 100644 --- a/opentrons-ai-client/src/molecules/InputPrompt/index.tsx +++ b/opentrons-ai-client/src/molecules/InputPrompt/index.tsx @@ -3,6 +3,7 @@ import { useTranslation } from 'react-i18next' import styled, { css } from 'styled-components' import { useFormContext } from 'react-hook-form' import { useAtom } from 'jotai' +import { v4 as uuidv4 } from 'uuid' import { ALIGN_CENTER, @@ -42,13 +43,17 @@ export function InputPrompt(): JSX.Element { const [submitted, setSubmitted] = useState(false) const userPrompt = watch('userPrompt') ?? '' const { data, isLoading, callApi } = useApiCall() + const [requestId, setRequestId] = useState(uuidv4()) + // This is to autofill the input field for when we navigate to the chat page from the existing/new protocol generator pages useEffect(() => { setValue('userPrompt', chatPromptAtomValue) }, [chatPromptAtomValue, setValue]) const handleClick = async (): Promise => { + setRequestId(uuidv4()) const userInput: ChatData = { + requestId, role: 'user', reply: userPrompt, } @@ -100,6 +105,7 @@ export function InputPrompt(): JSX.Element { if (submitted && data != null && !isLoading) { const { role, reply } = data as ChatData const assistantResponse: ChatData = { + requestId, role, reply, } @@ -166,6 +172,7 @@ const LegacyStyledTextarea = styled.textarea` font-size: ${TYPOGRAPHY.fontSize20}; line-height: ${TYPOGRAPHY.lineHeight24}; padding: 1.2rem 0; + font-size: 1rem; ::placeholder { position: absolute; diff --git a/opentrons-ai-client/src/pages/Chat/index.tsx b/opentrons-ai-client/src/pages/Chat/index.tsx index 448170e4030..7bedeb8dffe 100644 --- a/opentrons-ai-client/src/pages/Chat/index.tsx +++ b/opentrons-ai-client/src/pages/Chat/index.tsx @@ -3,12 +3,14 @@ import { DIRECTION_COLUMN, Flex, SPACING } from '@opentrons/components' import { useAtom } from 'jotai' import { useRef, useEffect } from 'react' -import { chatDataAtom, feedbackModalAtom } from '../../resources/atoms' +import { + chatDataAtom, + feedbackModalAtom, + scrollToBottomAtom, +} from '../../resources/atoms' import { ChatDisplay } from '../../molecules/ChatDisplay' import { ChatFooter } from '../../molecules/ChatFooter' -import { PromptGuide } from '../../molecules/PromptGuide' import styled from 'styled-components' -import { Footer } from '../../molecules/Footer' import { FeedbackModal } from '../../molecules/FeedbackModal' export interface InputType { @@ -25,6 +27,7 @@ export function Chat(): JSX.Element | null { const [chatData] = useAtom(chatDataAtom) const scrollRef = useRef(null) const [showFeedbackModal] = useAtom(feedbackModalAtom) + const [scrollToBottom] = useAtom(scrollToBottomAtom) useEffect(() => { if (scrollRef.current != null) @@ -33,7 +36,7 @@ export function Chat(): JSX.Element | null { block: 'nearest', inline: 'nearest', }) - }, [chatData.length]) + }, [chatData.length, scrollToBottom]) return ( @@ -41,7 +44,6 @@ export function Chat(): JSX.Element | null { padding={`${SPACING.spacing40} ${SPACING.spacing40} ${SPACING.spacing20}`} flexDirection={DIRECTION_COLUMN} gridGap={SPACING.spacing12} - height="50vh" width="100%" > - - {chatData.length > 0 ? chatData.map((chat, index) => ( @@ -62,10 +62,9 @@ export function Chat(): JSX.Element | null { )) : null} - -
+ {showFeedbackModal ? : null}
diff --git a/opentrons-ai-client/src/resources/atoms.ts b/opentrons-ai-client/src/resources/atoms.ts index 97eee9b393c..b6f50b89d09 100644 --- a/opentrons-ai-client/src/resources/atoms.ts +++ b/opentrons-ai-client/src/resources/atoms.ts @@ -11,9 +11,12 @@ import type { /** ChatDataAtom is for chat data (user prompt and response from OpenAI API) */ export const chatDataAtom = atom([]) -/** ChatPromptAtom is for the prefilled userprompt when landing on the chat page */ +/** ChatPromptAtom is for the prefilled userprompt when navigating to the chat page from existing/new protocol pages */ export const chatPromptAtom = atom('') +/** Scroll to bottom of chat atom */ +export const scrollToBottomAtom = atom(false) + export const chatHistoryAtom = atom([]) export const feedbackModalAtom = atom(false) diff --git a/opentrons-ai-client/src/resources/types.ts b/opentrons-ai-client/src/resources/types.ts index 410bdfd98a6..cd6be5dc1b7 100644 --- a/opentrons-ai-client/src/resources/types.ts +++ b/opentrons-ai-client/src/resources/types.ts @@ -8,6 +8,8 @@ export interface ChatData { reply: string /** for testing purpose will be removed and this is not used in the app */ fake?: boolean + /** uuid to map the chat prompt request to the response from the LLM */ + requestId: string } export interface Chat {