From 69c8cc00bd2c9b38a93c24dfa5866d31edae20ad Mon Sep 17 00:00:00 2001
From: connected-znaim <60662281+connected-znaim@users.noreply.github.com>
Date: Wed, 6 Nov 2024 16:22:54 -0500
Subject: [PATCH] feat(opentrons-ai-client): Chat page to interact with LLM
(#16703)
# Overview
Added chat page with four new buttons to regenerate, send feedback (not
fully functional), copy and download the python code.
Regenerate: Finds the corresponding request to that LLM response and
fills in the inputfield so that it can be resubmitted. It also scrolls
down to the input field to show the filled out prompt
Send Feedback: Shows a send feedback modal that can in the future send
feedback to the server. This API will be implemented in a subsequent PR.
Copy: Copies the code to the clipboard
Download: Converts the python code in the LLM response into a python
file and downloads it.
## Test Plan and Hands on Testing
Manually tested that regenerate picks the right prompt by adding a few
in the chat.
Manually tested that clicking feedback shows the feedback modal
Manually tested that copy works
Manually tested that download creates the file, downloads it and checked
the contents to make sure the file was created properly.
## Changelog
## Review requests
## Risk assessment
---------
Co-authored-by: FELIPE BELGINE
---
components/src/icons/icon-data.ts | 10 ++
.../localization/en/protocol_generator.json | 4 +
.../__tests__/ChatDisplay.test.tsx | 19 ++-
.../src/molecules/ChatDisplay/index.tsx | 134 ++++++++++++++----
.../src/molecules/ChatFooter/index.tsx | 3 +-
.../__tests__/FeedbackModal.test.tsx | 36 +++++
.../src/molecules/FeedbackModal/index.tsx | 65 +++++++++
.../src/molecules/InputPrompt/index.tsx | 11 ++
.../organisms/MainContentContainer/index.tsx | 69 ---------
.../Chat/Chat.stories.tsx} | 8 +-
.../Chat/__tests__/Chat.test.tsx} | 17 ++-
opentrons-ai-client/src/pages/Chat/index.tsx | 79 ++++++++---
opentrons-ai-client/src/resources/atoms.ts | 7 +-
opentrons-ai-client/src/resources/types.ts | 2 +
14 files changed, 335 insertions(+), 129 deletions(-)
create mode 100644 opentrons-ai-client/src/molecules/FeedbackModal/__tests__/FeedbackModal.test.tsx
create mode 100644 opentrons-ai-client/src/molecules/FeedbackModal/index.tsx
delete mode 100644 opentrons-ai-client/src/organisms/MainContentContainer/index.tsx
rename opentrons-ai-client/src/{organisms/MainContentContainer/MainContainer.stories.tsx => pages/Chat/Chat.stories.tsx} (59%)
rename opentrons-ai-client/src/{organisms/MainContentContainer/__tests__/MainContentContainer.test.tsx => pages/Chat/__tests__/Chat.test.tsx} (68%)
diff --git a/components/src/icons/icon-data.ts b/components/src/icons/icon-data.ts
index 9f8338c7e87..9c97c5bee91 100644
--- a/components/src/icons/icon-data.ts
+++ b/components/src/icons/icon-data.ts
@@ -685,6 +685,11 @@ export const ICON_DATA_BY_NAME: Record<
'M18.793 34.9163C15.3763 34.6386 12.5013 33.2358 10.168 30.708C7.83464 28.1802 6.66797 25.1802 6.66797 21.708C6.66797 19.5691 7.16102 17.5552 8.14714 15.6663C9.13325 13.7775 10.5152 12.2358 12.293 11.0413L14.0846 12.833C12.5291 13.7497 11.3207 15.0066 10.4596 16.6038C9.59852 18.2011 9.16797 19.9025 9.16797 21.708C9.16797 24.4858 10.0846 26.8886 11.918 28.9163C13.7513 30.9441 16.043 32.1108 18.793 32.4163V34.9163ZM21.293 34.9163V32.4163C24.0707 32.083 26.3624 30.9094 28.168 28.8955C29.9735 26.8816 30.8763 24.4858 30.8763 21.708C30.8763 18.6802 29.8277 16.1177 27.7305 14.0205C25.6332 11.9233 23.0707 10.8747 20.043 10.8747H19.2096L21.7096 13.3747L19.918 15.1663L14.3763 9.62467L19.918 4.08301L21.7096 5.87467L19.2096 8.37467H20.043C23.7652 8.37467 26.918 9.67329 29.5013 12.2705C32.0846 14.8677 33.3763 18.0136 33.3763 21.708C33.3763 25.1802 32.2166 28.1802 29.8971 30.708C27.5777 33.2358 24.7096 34.6386 21.293 34.9163Z',
viewBox: '0 0 40 40',
},
+ reload: {
+ path:
+ 'M15.1406 23.6501C11.9581 23.6501 9.25062 22.5457 7.01813 20.337C4.78562 18.1282 3.66937 15.4326 3.66937 12.2501V12.0007L2.38687 13.2832C2.12562 13.5445 1.79312 13.6751 1.38937 13.6751C0.985625 13.6751 0.653125 13.5445 0.391875 13.2832C0.130625 13.022 0 12.6895 0 12.2857C0 11.882 0.130625 11.5495 0.391875 11.2882L4.09687 7.58322C4.38188 7.29822 4.71438 7.15572 5.09438 7.15572C5.47437 7.15572 5.80687 7.29822 6.09188 7.58322L9.79688 11.2882C10.0581 11.5495 10.1888 11.882 10.1888 12.2857C10.1888 12.6895 10.0581 13.022 9.79688 13.2832C9.53562 13.5445 9.20312 13.6751 8.79937 13.6751C8.39563 13.6751 8.06312 13.5445 7.80188 13.2832L6.51937 12.0007V12.2501C6.51937 14.6251 7.35656 16.6438 9.03094 18.3063C10.7053 19.9688 12.7419 20.8001 15.1406 20.8001C15.5206 20.8001 15.8947 20.7764 16.2628 20.7288C16.6309 20.6813 16.9931 20.5982 17.3494 20.4795C17.7531 20.3607 18.1331 20.3726 18.4894 20.5151C18.8456 20.6576 19.1187 20.907 19.3088 21.2632C19.4988 21.6432 19.5166 22.0173 19.3622 22.3854C19.2078 22.7535 18.9287 22.997 18.525 23.1157C17.9788 23.3057 17.4206 23.4423 16.8506 23.5254C16.2806 23.6085 15.7106 23.6501 15.1406 23.6501ZM14.9981 3.7001C14.6181 3.7001 14.2441 3.72385 13.8759 3.77135C13.5078 3.81885 13.1456 3.90197 12.7894 4.02072C12.3856 4.13947 11.9997 4.1276 11.6316 3.9851C11.2634 3.8426 10.9844 3.59322 10.7944 3.23697C10.6044 2.88072 10.5866 2.51854 10.7409 2.15041C10.8953 1.78229 11.1625 1.53885 11.5425 1.4201C12.1125 1.2301 12.6825 1.0876 13.2525 0.992598C13.8225 0.897598 14.4044 0.850098 14.9981 0.850098C18.1806 0.850098 20.8881 1.95447 23.1206 4.16322C25.3531 6.37197 26.4694 9.0676 26.4694 12.2501V12.4995L27.7519 11.217C28.0131 10.9557 28.3456 10.8251 28.7494 10.8251C29.1531 10.8251 29.4856 10.9557 29.7469 11.217C30.0081 11.4782 30.1388 11.8107 30.1388 12.2145C30.1388 12.6182 30.0081 12.9507 29.7469 13.212L26.0419 16.917C25.7569 17.202 25.4244 17.3445 25.0444 17.3445C24.6644 17.3445 24.3319 17.202 24.0469 16.917L20.3419 13.212C20.0806 12.9507 19.95 12.6182 19.95 12.2145C19.95 11.8107 20.0806 11.4782 20.3419 11.217C20.6031 10.9557 20.9356 10.8251 21.3394 10.8251C21.7431 10.8251 22.0756 10.9557 22.3369 11.217L23.6194 12.4995V12.2501C23.6194 9.8751 22.7822 7.85635 21.1078 6.19385C19.4334 4.53135 17.3969 3.7001 14.9981 3.7001Z',
+ viewBox: '0 0 31 24',
+ },
reticle: {
path:
'M8.01487 8.84912C8.47511 8.84912 8.84821 8.47603 8.84821 8.01579C8.84821 7.55555 8.47511 7.18245 8.01487 7.18245C7.55464 7.18245 7.18154 7.55555 7.18154 8.01579C7.18154 8.47603 7.55464 8.84912 8.01487 8.84912Z M8.66654 0.928711V2.36089C11.27 2.66533 13.3354 4.73075 13.6398 7.33418H15.072V8.66751H13.6398C13.3354 11.2709 11.27 13.3363 8.66654 13.6408V15.073H7.3332V13.6408C4.72979 13.3363 2.66437 11.2709 2.35992 8.66751H0.927734V7.33418H2.35992C2.66436 4.73075 4.72978 2.66533 7.3332 2.36089V0.928711H8.66654ZM12.2944 7.33418H11.6184C11.2502 7.33418 10.9518 7.63266 10.9518 8.00085C10.9518 8.36904 11.2502 8.66751 11.6184 8.66751H12.2944C12.0071 10.5336 10.5326 12.008 8.66654 12.2953V11.6194C8.66654 11.2512 8.36806 10.9527 7.99987 10.9527C7.63168 10.9527 7.3332 11.2512 7.3332 11.6194V12.2953C5.46716 12.008 3.99268 10.5336 3.70536 8.66751H4.38132C4.74951 8.66751 5.04798 8.36904 5.04798 8.00085C5.04798 7.63266 4.74951 7.33418 4.38132 7.33418H3.70536C3.99267 5.46812 5.46715 3.99364 7.3332 3.70632V4.38229C7.3332 4.75048 7.63168 5.04896 7.99987 5.04896C8.36806 5.04896 8.66654 4.75048 8.66654 4.38229V3.70632C10.5326 3.99364 12.0071 5.46812 12.2944 7.33418Z',
@@ -729,6 +734,11 @@ export const ICON_DATA_BY_NAME: Record<
'M10.8307 8.3335L1.66406 31.6668H4.78906L7.16406 25.4168H17.8307L20.2057 31.6668H23.3307L14.1641 8.3335H10.8307ZM16.8307 22.7502H8.16406L12.4141 11.4585H12.5807L16.8307 22.7502ZM30.1577 16.6668L24.1641 31.6668H26.2073L27.7602 27.649H34.7346L36.2875 31.6668H38.3307L32.3371 16.6668H30.1577ZM34.0807 25.9347H28.4141L31.1929 18.6758H31.3019L34.0807 25.9347Z',
viewBox: '0 0 40 40',
},
+ 'thumbs-down': {
+ path:
+ 'M2.99062 18.9525C2.23062 18.9525 1.56562 18.6675 0.995625 18.0975C0.425625 17.5275 0.140625 16.8625 0.140625 16.1025V13.2525C0.140625 13.0862 0.164375 12.9081 0.211875 12.7181C0.259375 12.5281 0.306875 12.35 0.354375 12.1837L4.62937 2.13749C4.84312 1.66249 5.19938 1.25874 5.69812 0.92624C6.19688 0.59374 6.71938 0.42749 7.26562 0.42749H22.9406V18.9525L14.3906 27.4312C14.0344 27.7875 13.6128 27.9953 13.1259 28.0547C12.6391 28.1141 12.17 28.025 11.7188 27.7875C11.2675 27.55 10.935 27.2175 10.7212 26.79C10.5075 26.3625 10.46 25.9231 10.5787 25.4719L12.1819 18.9525H2.99062ZM20.0906 17.7412V3.27749H7.26562L2.99062 13.2525V16.1025H15.8156L13.8919 23.94L20.0906 17.7412ZM27.2156 0.42749C27.9994 0.42749 28.6703 0.706553 29.2284 1.26468C29.7866 1.8228 30.0656 2.49374 30.0656 3.27749V16.1025C30.0656 16.8862 29.7866 17.5572 29.2284 18.1153C28.6703 18.6734 27.9994 18.9525 27.2156 18.9525H22.9406V16.1025H27.2156V3.27749H22.9406V0.42749H27.2156Z',
+ viewBox: '0 0 31 29',
+ },
'tip-position': {
path:
'M10.75 2H9.25V4.75H10.75V2ZM10.75 9.25V7.25H9.25V9.25H7.25V10.75H9.25V12.75H10.75V10.75H12.75V9.25H10.75ZM10.75 18V15.25H9.25V18H10.75ZM2 9.25V10.75H4.75V9.25H2ZM18 9.25H15.25V10.75H18V9.25Z',
diff --git a/opentrons-ai-client/src/assets/localization/en/protocol_generator.json b/opentrons-ai-client/src/assets/localization/en/protocol_generator.json
index e3e23b4f0d3..e321b939ade 100644
--- a/opentrons-ai-client/src/assets/localization/en/protocol_generator.json
+++ b/opentrons-ai-client/src/assets/localization/en/protocol_generator.json
@@ -3,6 +3,7 @@
"api": "API: An API level is 2.15",
"application": "Application: Your protocol's name, describing what it does.",
"commands": "Commands: List the protocol's steps, specifying quantities in microliters (uL) and giving exact source and destination locations.",
+ "cancel": "Cancel",
"copyright": "Copyright © 2024 Opentrons",
"copy_code": "Copy code",
"choose_file": "Choose file",
@@ -51,6 +52,9 @@
"robot_type": "Robot type: Choose the OT-2 or Opentrons Flex.",
"robot": "Robot: OT-2.",
"share_your_thoughts": "Share your thoughts here",
+ "send_feedback": "Send feedback",
+ "send_feedback_input_title": "Share why the response was not helpful",
+ "send_feedback_to_opentrons": "Send feedback to Opentrons",
"side_panel_body": "Write a prompt in natural language to generate a Reagent Transfer or a PCR protocol for the OT-2 or Opentrons Flex using the Opentrons Python Protocol API.",
"side_panel_header": "Use natural language to generate protocols with OpentronsAI powered by OpenAI",
"simulate_description": "Once OpentronsAI has written your protocol, type `simulate` in the prompt box to try it out.",
diff --git a/opentrons-ai-client/src/molecules/ChatDisplay/__tests__/ChatDisplay.test.tsx b/opentrons-ai-client/src/molecules/ChatDisplay/__tests__/ChatDisplay.test.tsx
index afec6d800cc..7836d18f90f 100644
--- a/opentrons-ai-client/src/molecules/ChatDisplay/__tests__/ChatDisplay.test.tsx
+++ b/opentrons-ai-client/src/molecules/ChatDisplay/__tests__/ChatDisplay.test.tsx
@@ -5,9 +5,24 @@ import { renderWithProviders } from '../../../__testing-utils__'
import { i18n } from '../../../i18n'
import { ChatDisplay } from '../index'
+import { useForm, FormProvider } from 'react-hook-form'
+
+const RenderChatDisplay = (props: React.ComponentProps) => {
+ const methods = useForm({
+ defaultValues: {},
+ })
+
+ return (
+
+
+
+ )
+}
const render = (props: React.ComponentProps) => {
- return renderWithProviders(, { i18nInstance: i18n })
+ return renderWithProviders(, {
+ i18nInstance: i18n,
+ })
}
describe('ChatDisplay', () => {
@@ -18,6 +33,7 @@ describe('ChatDisplay', () => {
chat: {
role: 'assistant',
reply: 'mock text from the backend',
+ requestId: '12351234',
},
chatId: 'mockId',
}
@@ -35,6 +51,7 @@ describe('ChatDisplay', () => {
chat: {
role: 'user',
reply: 'mock text from user input',
+ requestId: '12351234',
},
chatId: 'mockId',
}
diff --git a/opentrons-ai-client/src/molecules/ChatDisplay/index.tsx b/opentrons-ai-client/src/molecules/ChatDisplay/index.tsx
index dd39d415acc..22dbee37f1a 100644
--- a/opentrons-ai-client/src/molecules/ChatDisplay/index.tsx
+++ b/opentrons-ai-client/src/molecules/ChatDisplay/index.tsx
@@ -1,4 +1,4 @@
-import { useState } from 'react'
+import { useEffect, useState } from 'react'
import styled from 'styled-components'
import { useTranslation } from 'react-i18next'
import Markdown from 'react-markdown'
@@ -12,28 +12,81 @@ import {
JUSTIFY_CENTER,
JUSTIFY_FLEX_END,
JUSTIFY_FLEX_START,
- POSITION_ABSOLUTE,
POSITION_RELATIVE,
- PrimaryButton,
SPACING,
LegacyStyledText,
TYPOGRAPHY,
+ StyledText,
+ DIRECTION_ROW,
OVERFLOW_AUTO,
} from '@opentrons/components'
import type { ChatData } from '../../resources/types'
+import { useAtom } from 'jotai'
+import {
+ chatDataAtom,
+ feedbackModalAtom,
+ scrollToBottomAtom,
+} from '../../resources/atoms'
+import { delay } from 'lodash'
+import { useFormContext } from 'react-hook-form'
interface ChatDisplayProps {
chat: ChatData
chatId: string
}
+const HoverShadow = styled(Flex)`
+ alignitems: ${ALIGN_CENTER};
+ justifycontent: ${JUSTIFY_CENTER};
+ padding: ${SPACING.spacing8};
+ transition: box-shadow 0.3s ease;
+ border-radius: ${BORDERS.borderRadius8};
+
+ &:hover {
+ box-shadow: 0 4px 8px rgba(0, 0, 0, 0.2);
+ border-radius: ${BORDERS.borderRadius8};
+ }
+`
+
+const StyledIcon = styled(Icon)`
+ color: ${COLORS.blue50};
+`
+
export function ChatDisplay({ chat, chatId }: ChatDisplayProps): JSX.Element {
const { t } = useTranslation('protocol_generator')
const [isCopied, setIsCopied] = useState(false)
- const { role, reply } = chat
+ const [, setShowFeedbackModal] = useAtom(feedbackModalAtom)
+ const { setValue } = useFormContext()
+ const [chatdata] = useAtom(chatDataAtom)
+ const [scrollToBottom, setScrollToBottom] = useAtom(scrollToBottomAtom)
+ const { role, reply, requestId } = chat
const isUser = role === 'user'
+ const setInputFieldToCorrespondingRequest = (): void => {
+ const prompt = chatdata.find(
+ chat => chat.role === 'user' && chat.requestId === requestId
+ )?.reply
+ setScrollToBottom(!scrollToBottom)
+ setValue('userPrompt', prompt)
+ }
+
+ const handleFileDownload = (): void => {
+ const lastCodeBlock = document.querySelector(`#${chatId}`)
+ const code = lastCodeBlock?.textContent ?? ''
+ const blobParts: BlobPart[] = [code]
+
+ const file = new File(blobParts, 'OpentronsAI.py', { type: 'text/python' })
+ const url = URL.createObjectURL(file)
+ const a = document.createElement('a')
+
+ document.body.appendChild(a)
+ a.href = url
+ a.download = 'OpentronsAI.py'
+ a.click()
+ window.URL.revokeObjectURL(url)
+ }
+
const handleClickCopy = async (): Promise => {
const lastCodeBlock = document.querySelector(`#${chatId}`)
const code = lastCodeBlock?.textContent ?? ''
@@ -41,28 +94,32 @@ export function ChatDisplay({ chat, chatId }: ChatDisplayProps): JSX.Element {
setIsCopied(true)
}
+ useEffect(() => {
+ if (isCopied)
+ delay(() => {
+ setIsCopied(false)
+ }, 2000)
+ }, [isCopied])
+
function CodeText(props: JSX.IntrinsicAttributes): JSX.Element {
return
}
return (
-
+
-
+
{isUser ? t('you') : t('opentronsai')}
-
+
{/* text should be markdown so this component will have a package or function to parse markdown */}
{!isUser ? (
-
-
- {
+ setInputFieldToCorrespondingRequest()
+ }}
+ >
+
+
+ {
+ setShowFeedbackModal(true)
+ }}
+ >
+
+
+ {
+ await handleClickCopy()
+ }}
+ >
+
-
-
+
+ {
+ handleFileDownload()
+ }}
+ >
+
+
+
) : null}
diff --git a/opentrons-ai-client/src/molecules/ChatFooter/index.tsx b/opentrons-ai-client/src/molecules/ChatFooter/index.tsx
index b477da1dacd..fef7596f6f4 100644
--- a/opentrons-ai-client/src/molecules/ChatFooter/index.tsx
+++ b/opentrons-ai-client/src/molecules/ChatFooter/index.tsx
@@ -15,9 +15,9 @@ export function ChatFooter(): JSX.Element {
return (
@@ -32,5 +32,4 @@ const DISCLAIMER_TEXT_STYLE = css`
font-size: ${TYPOGRAPHY.fontSize20};
line-height: ${TYPOGRAPHY.lineHeight24};
text-align: ${TYPOGRAPHY.textAlignCenter};
- padding-bottom: ${SPACING.spacing24};
`
diff --git a/opentrons-ai-client/src/molecules/FeedbackModal/__tests__/FeedbackModal.test.tsx b/opentrons-ai-client/src/molecules/FeedbackModal/__tests__/FeedbackModal.test.tsx
new file mode 100644
index 00000000000..15d17938e93
--- /dev/null
+++ b/opentrons-ai-client/src/molecules/FeedbackModal/__tests__/FeedbackModal.test.tsx
@@ -0,0 +1,36 @@
+import { FeedbackModal } from '..'
+import { renderWithProviders } from '../../../__testing-utils__'
+import { screen } from '@testing-library/react'
+import { describe, it, expect } from 'vitest'
+import { i18n } from '../../../i18n'
+import { feedbackModalAtom } from '../../../resources/atoms'
+
+const initialValues: Array<[any, any]> = [[feedbackModalAtom, true]]
+
+const render = (): ReturnType => {
+ return renderWithProviders(, {
+ i18nInstance: i18n,
+ initialValues,
+ })
+}
+
+describe('FeedbackModal', () => {
+ it('should render Feedback modal', () => {
+ render()
+ screen.getByText('Send feedback to Opentrons')
+ screen.getByText('Share why the response was not helpful')
+ screen.getByText('Cancel')
+ screen.getByText('Send feedback')
+ })
+
+ // should move this test to the chat page
+ it.skip('should set the showFeedbackModel atom to be false when cancel button is clicked', () => {
+ render()
+ expect(feedbackModalAtom.init).toBe(true)
+
+ const cancelButton = screen.getByText('Cancel')
+ cancelButton.click()
+ // check if the feedbackModalAtom is set to false
+ expect(feedbackModalAtom.read).toBe(false)
+ })
+})
diff --git a/opentrons-ai-client/src/molecules/FeedbackModal/index.tsx b/opentrons-ai-client/src/molecules/FeedbackModal/index.tsx
new file mode 100644
index 00000000000..e65aa7a504c
--- /dev/null
+++ b/opentrons-ai-client/src/molecules/FeedbackModal/index.tsx
@@ -0,0 +1,65 @@
+import {
+ Modal,
+ Flex,
+ SPACING,
+ ALIGN_FLEX_END,
+ SecondaryButton,
+ StyledText,
+ PrimaryButton,
+ InputField,
+} from '@opentrons/components'
+import { useAtom } from 'jotai'
+import { useTranslation } from 'react-i18next'
+import { feedbackModalAtom } from '../../resources/atoms'
+import { useState } from 'react'
+
+export function FeedbackModal(): JSX.Element {
+ const { t } = useTranslation('protocol_generator')
+
+ const [feedbackValue, setFeedbackValue] = useState('')
+ const [, setShowFeedbackModal] = useAtom(feedbackModalAtom)
+
+ return (
+ {
+ setShowFeedbackModal(false)
+ }}
+ footer={
+
+ {
+ setShowFeedbackModal(false)
+ }}
+ >
+
+ {t(`cancel`)}
+
+
+ {
+ setShowFeedbackModal(false)
+ }}
+ >
+
+ {t(`send_feedback`)}
+
+
+
+ }
+ >
+ {
+ setFeedbackValue(event.target.value as string)
+ }}
+ >
+
+ )
+}
diff --git a/opentrons-ai-client/src/molecules/InputPrompt/index.tsx b/opentrons-ai-client/src/molecules/InputPrompt/index.tsx
index 70ee01560f4..c87d24f1975 100644
--- a/opentrons-ai-client/src/molecules/InputPrompt/index.tsx
+++ b/opentrons-ai-client/src/molecules/InputPrompt/index.tsx
@@ -3,6 +3,7 @@ import { useTranslation } from 'react-i18next'
import styled, { css } from 'styled-components'
import { useFormContext } from 'react-hook-form'
import { useAtom } from 'jotai'
+import { v4 as uuidv4 } from 'uuid'
import {
ALIGN_CENTER,
@@ -42,13 +43,21 @@ export function InputPrompt(): JSX.Element {
const [submitted, setSubmitted] = useState(false)
const userPrompt = watch('userPrompt') ?? ''
const { data, isLoading, callApi } = useApiCall()
+ const [requestId, setRequestId] = useState(uuidv4())
+
+ // This is to autofill the input field for when we navigate to the chat page from the existing/new protocol generator pages
+ useEffect(() => {
+ setValue('userPrompt', chatPromptAtomValue)
+ }, [chatPromptAtomValue, setValue])
useEffect(() => {
setValue('userPrompt', chatPromptAtomValue)
}, [chatPromptAtomValue, setValue])
const handleClick = async (): Promise => {
+ setRequestId(uuidv4())
const userInput: ChatData = {
+ requestId,
role: 'user',
reply: userPrompt,
}
@@ -100,6 +109,7 @@ export function InputPrompt(): JSX.Element {
if (submitted && data != null && !isLoading) {
const { role, reply } = data as ChatData
const assistantResponse: ChatData = {
+ requestId,
role,
reply,
}
@@ -166,6 +176,7 @@ const LegacyStyledTextarea = styled.textarea`
font-size: ${TYPOGRAPHY.fontSize20};
line-height: ${TYPOGRAPHY.lineHeight24};
padding: 1.2rem 0;
+ font-size: 1rem;
::placeholder {
position: absolute;
diff --git a/opentrons-ai-client/src/organisms/MainContentContainer/index.tsx b/opentrons-ai-client/src/organisms/MainContentContainer/index.tsx
deleted file mode 100644
index cc2ad54bc39..00000000000
--- a/opentrons-ai-client/src/organisms/MainContentContainer/index.tsx
+++ /dev/null
@@ -1,69 +0,0 @@
-import { useRef, useEffect } from 'react'
-import styled from 'styled-components'
-import { useAtom } from 'jotai'
-
-import {
- DIRECTION_COLUMN,
- Flex,
- OVERFLOW_AUTO,
- SPACING,
-} from '@opentrons/components'
-import { PromptGuide } from '../../molecules/PromptGuide'
-import { ChatDisplay } from '../../molecules/ChatDisplay'
-import { ChatFooter } from '../../molecules/ChatFooter'
-import { chatDataAtom } from '../../resources/atoms'
-
-export function MainContentContainer(): JSX.Element {
- const [chatData] = useAtom(chatDataAtom)
- const scrollRef = useRef(null)
-
- useEffect(() => {
- if (scrollRef.current != null)
- scrollRef.current.scrollIntoView({
- behavior: 'smooth',
- block: 'nearest',
- inline: 'nearest',
- })
- }, [chatData.length])
-
- return (
-
-
-
-
-
-
- {chatData.length > 0
- ? chatData.map((chat, index) => (
-
- ))
- : null}
-
-
-
-
-
-
-
- )
-}
-
-const ChatDataContainer = styled(Flex)`
- flex-direction: ${DIRECTION_COLUMN};
- width: 100%;
-`
diff --git a/opentrons-ai-client/src/organisms/MainContentContainer/MainContainer.stories.tsx b/opentrons-ai-client/src/pages/Chat/Chat.stories.tsx
similarity index 59%
rename from opentrons-ai-client/src/organisms/MainContentContainer/MainContainer.stories.tsx
rename to opentrons-ai-client/src/pages/Chat/Chat.stories.tsx
index 4f8fe5739fd..3a4d0d674d4 100644
--- a/opentrons-ai-client/src/organisms/MainContentContainer/MainContainer.stories.tsx
+++ b/opentrons-ai-client/src/pages/Chat/Chat.stories.tsx
@@ -1,12 +1,12 @@
import { I18nextProvider } from 'react-i18next'
import { i18n } from '../../i18n'
-import { MainContentContainer as MainContentContainerComponent } from './index'
+import { Chat as ChatComponent } from './index'
import type { Meta, StoryObj } from '@storybook/react'
-const meta: Meta = {
+const meta: Meta = {
title: 'AI/organisms/ChatContainer',
- component: MainContentContainerComponent,
+ component: ChatComponent,
decorators: [
Story => (
@@ -16,5 +16,5 @@ const meta: Meta = {
],
}
export default meta
-type Story = StoryObj
+type Story = StoryObj
export const ChatContainer: Story = {}
diff --git a/opentrons-ai-client/src/organisms/MainContentContainer/__tests__/MainContentContainer.test.tsx b/opentrons-ai-client/src/pages/Chat/__tests__/Chat.test.tsx
similarity index 68%
rename from opentrons-ai-client/src/organisms/MainContentContainer/__tests__/MainContentContainer.test.tsx
rename to opentrons-ai-client/src/pages/Chat/__tests__/Chat.test.tsx
index d3014a5895b..77874086534 100644
--- a/opentrons-ai-client/src/organisms/MainContentContainer/__tests__/MainContentContainer.test.tsx
+++ b/opentrons-ai-client/src/pages/Chat/__tests__/Chat.test.tsx
@@ -4,7 +4,7 @@ import { renderWithProviders } from '../../../__testing-utils__'
import { i18n } from '../../../i18n'
import { PromptGuide } from '../../../molecules/PromptGuide'
import { ChatFooter } from '../../../molecules/ChatFooter'
-import { MainContentContainer } from '../index'
+import { Chat } from '../index'
vi.mock('../../../molecules/PromptGuide')
vi.mock('../../../molecules/ChatFooter')
@@ -12,20 +12,27 @@ vi.mock('../../../molecules/ChatFooter')
window.HTMLElement.prototype.scrollIntoView = vi.fn()
const render = (): ReturnType => {
- return renderWithProviders(, {
+ return renderWithProviders(, {
i18nInstance: i18n,
})
}
-describe('MainContentContainer', () => {
+describe('Chat', () => {
beforeEach(() => {
vi.mocked(PromptGuide).mockReturnValue(mock PromptGuide
)
vi.mocked(ChatFooter).mockReturnValue(mock ChatFooter
)
})
- it('should render prompt guide and text', () => {
+ it('should render footer', () => {
render()
- screen.getByText('mock PromptGuide')
screen.getByText('mock ChatFooter')
})
+
+ it.skip('should not show the feedback modal when loading the page', () => {
+ render()
+ screen.getByText('Send feedback to Opentrons')
+ screen.getByText('Share why the response was not helpful')
+ screen.getByText('Cancel')
+ screen.getByText('Send feedback')
+ })
})
diff --git a/opentrons-ai-client/src/pages/Chat/index.tsx b/opentrons-ai-client/src/pages/Chat/index.tsx
index 6d9492038b8..7bedeb8dffe 100644
--- a/opentrons-ai-client/src/pages/Chat/index.tsx
+++ b/opentrons-ai-client/src/pages/Chat/index.tsx
@@ -1,13 +1,17 @@
import { useForm, FormProvider } from 'react-hook-form'
-import {
- DIRECTION_COLUMN,
- DIRECTION_ROW,
- Flex,
- JUSTIFY_CENTER,
- POSITION_RELATIVE,
-} from '@opentrons/components'
+import { DIRECTION_COLUMN, Flex, SPACING } from '@opentrons/components'
-import { MainContentContainer } from '../../organisms/MainContentContainer'
+import { useAtom } from 'jotai'
+import { useRef, useEffect } from 'react'
+import {
+ chatDataAtom,
+ feedbackModalAtom,
+ scrollToBottomAtom,
+} from '../../resources/atoms'
+import { ChatDisplay } from '../../molecules/ChatDisplay'
+import { ChatFooter } from '../../molecules/ChatFooter'
+import styled from 'styled-components'
+import { FeedbackModal } from '../../molecules/FeedbackModal'
export interface InputType {
userPrompt: string
@@ -20,19 +24,54 @@ export function Chat(): JSX.Element | null {
},
})
+ const [chatData] = useAtom(chatDataAtom)
+ const scrollRef = useRef(null)
+ const [showFeedbackModal] = useAtom(feedbackModalAtom)
+ const [scrollToBottom] = useAtom(scrollToBottomAtom)
+
+ useEffect(() => {
+ if (scrollRef.current != null)
+ scrollRef.current.scrollIntoView({
+ behavior: 'smooth',
+ block: 'nearest',
+ inline: 'nearest',
+ })
+ }, [chatData.length, scrollToBottom])
+
return (
-
-
-
- {/* */}
-
+
+
+
+
+ {chatData.length > 0
+ ? chatData.map((chat, index) => (
+
+ ))
+ : null}
+
-
-
+
+
+ {showFeedbackModal ? : null}
+
+
)
}
+
+const ChatDataContainer = styled(Flex)`
+ flex-direction: ${DIRECTION_COLUMN};
+ width: 100%;
+`
diff --git a/opentrons-ai-client/src/resources/atoms.ts b/opentrons-ai-client/src/resources/atoms.ts
index b92f3e848f5..ffacfe7afd8 100644
--- a/opentrons-ai-client/src/resources/atoms.ts
+++ b/opentrons-ai-client/src/resources/atoms.ts
@@ -11,11 +11,16 @@ import type {
/** ChatDataAtom is for chat data (user prompt and response from OpenAI API) */
export const chatDataAtom = atom([])
-/** ChatPromptAtom is for the prefilled userprompt when landing on the chat page */
+/** ChatPromptAtom is for the prefilled userprompt when navigating to the chat page from existing/new protocol pages */
export const chatPromptAtom = atom('')
+/** Scroll to bottom of chat atom */
+export const scrollToBottomAtom = atom(false)
+
export const chatHistoryAtom = atom([])
+export const feedbackModalAtom = atom(false)
+
export const tokenAtom = atom(null)
export const mixpanelAtom = atom({
diff --git a/opentrons-ai-client/src/resources/types.ts b/opentrons-ai-client/src/resources/types.ts
index 410bdfd98a6..cd6be5dc1b7 100644
--- a/opentrons-ai-client/src/resources/types.ts
+++ b/opentrons-ai-client/src/resources/types.ts
@@ -8,6 +8,8 @@ export interface ChatData {
reply: string
/** for testing purpose will be removed and this is not used in the app */
fake?: boolean
+ /** uuid to map the chat prompt request to the response from the LLM */
+ requestId: string
}
export interface Chat {