Skip to content

Commit

Permalink
added ability to regenerate response by clicking the button and scrol…
Browse files Browse the repository at this point in the history
…l down automatically when clicked
  • Loading branch information
connected-znaim committed Nov 4, 2024
1 parent 94e5493 commit 41556f4
Show file tree
Hide file tree
Showing 5 changed files with 44 additions and 15 deletions.
28 changes: 23 additions & 5 deletions opentrons-ai-client/src/molecules/ChatDisplay/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,13 @@ import {

import type { ChatData } from '../../resources/types'
import { useAtom } from 'jotai'
import { feedbackModalAtom } from '../../resources/atoms'
import { delay } from 'lodash'
import {
chatDataAtom,
feedbackModalAtom,
scrollToBottomAtom,
} from '../../resources/atoms'
import { delay, set } from 'lodash'

Check failure on line 30 in opentrons-ai-client/src/molecules/ChatDisplay/index.tsx

View workflow job for this annotation

GitHub Actions / js checks

'set' is defined but never used
import { useFormContext } from 'react-hook-form'

interface ChatDisplayProps {
chat: ChatData
Expand Down Expand Up @@ -51,9 +56,20 @@ export function ChatDisplay({ chat, chatId }: ChatDisplayProps): JSX.Element {
const { t } = useTranslation('protocol_generator')
const [isCopied, setIsCopied] = useState<boolean>(false)
const [, setShowFeedbackModal] = useAtom(feedbackModalAtom)
const { role, reply } = chat
const { setValue } = useFormContext()
const [chatdata] = useAtom(chatDataAtom)
const [scrollToBottom, setScrollToBottom] = useAtom(scrollToBottomAtom)
const { role, reply, requestId } = chat
const isUser = role === 'user'

const setInputFieldToCorrespondingRequest = (): void => {
const prompt = chatdata.find(
chat => chat.role === 'user' && chat.requestId === requestId
)?.reply
setScrollToBottom(!scrollToBottom)
setValue('userPrompt', prompt)
}

const handleFileDownload = (): void => {
const lastCodeBlock = document.querySelector(`#${chatId}`)
const code = lastCodeBlock?.textContent ?? ''
Expand Down Expand Up @@ -97,7 +113,9 @@ export function ChatDisplay({ chat, chatId }: ChatDisplayProps): JSX.Element {
</Flex>
{/* text should be markdown so this component will have a package or function to parse markdown */}
<Flex
padding={SPACING.spacing20}
padding={`${SPACING.spacing40} ${SPACING.spacing40} ${
isUser ? SPACING.spacing40 : SPACING.spacing12
} ${SPACING.spacing40}`}
backgroundColor={isUser ? COLORS.blue30 : COLORS.grey30}
data-testid={`ChatDisplay_from_${isUser ? 'user' : 'backend'}`}
borderRadius={SPACING.spacing12}
Expand Down Expand Up @@ -129,7 +147,7 @@ export function ChatDisplay({ chat, chatId }: ChatDisplayProps): JSX.Element {
>
<HoverShadow
onClick={() => {
setShowFeedbackModal(true)
setInputFieldToCorrespondingRequest()
}}
>
<StyledIcon size={SPACING.spacing20} name={'reload'} />
Expand Down
7 changes: 7 additions & 0 deletions opentrons-ai-client/src/molecules/InputPrompt/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import { useTranslation } from 'react-i18next'
import styled, { css } from 'styled-components'
import { useFormContext } from 'react-hook-form'
import { useAtom } from 'jotai'
import { v4 as uuidv4 } from 'uuid'

import {
ALIGN_CENTER,
Expand Down Expand Up @@ -42,13 +43,17 @@ export function InputPrompt(): JSX.Element {
const [submitted, setSubmitted] = useState<boolean>(false)
const userPrompt = watch('userPrompt') ?? ''
const { data, isLoading, callApi } = useApiCall()
const [requestId, setRequestId] = useState<string>(uuidv4())

// This is to autofill the input field for when we navigate to the chat page from the existing/new protocol generator pages
useEffect(() => {
setValue('userPrompt', chatPromptAtomValue)
}, [chatPromptAtomValue, setValue])

const handleClick = async (): Promise<void> => {
setRequestId(uuidv4())
const userInput: ChatData = {
requestId,
role: 'user',
reply: userPrompt,
}
Expand Down Expand Up @@ -100,6 +105,7 @@ export function InputPrompt(): JSX.Element {
if (submitted && data != null && !isLoading) {
const { role, reply } = data as ChatData
const assistantResponse: ChatData = {
requestId,
role,
reply,
}
Expand Down Expand Up @@ -166,6 +172,7 @@ const LegacyStyledTextarea = styled.textarea`
font-size: ${TYPOGRAPHY.fontSize20};
line-height: ${TYPOGRAPHY.lineHeight24};
padding: 1.2rem 0;
font-size: 1rem;
::placeholder {
position: absolute;
Expand Down
17 changes: 8 additions & 9 deletions opentrons-ai-client/src/pages/Chat/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,14 @@ import { DIRECTION_COLUMN, Flex, SPACING } from '@opentrons/components'

import { useAtom } from 'jotai'
import { useRef, useEffect } from 'react'
import { chatDataAtom, feedbackModalAtom } from '../../resources/atoms'
import {
chatDataAtom,
feedbackModalAtom,
scrollToBottomAtom,
} from '../../resources/atoms'
import { ChatDisplay } from '../../molecules/ChatDisplay'
import { ChatFooter } from '../../molecules/ChatFooter'
import { PromptGuide } from '../../molecules/PromptGuide'
import styled from 'styled-components'
import { Footer } from '../../molecules/Footer'
import { FeedbackModal } from '../../molecules/FeedbackModal'

export interface InputType {
Expand All @@ -25,6 +27,7 @@ export function Chat(): JSX.Element | null {
const [chatData] = useAtom(chatDataAtom)
const scrollRef = useRef<HTMLSpanElement | null>(null)
const [showFeedbackModal] = useAtom(feedbackModalAtom)
const [scrollToBottom] = useAtom(scrollToBottomAtom)

useEffect(() => {
if (scrollRef.current != null)
Expand All @@ -33,24 +36,21 @@ export function Chat(): JSX.Element | null {
block: 'nearest',
inline: 'nearest',
})
}, [chatData.length])
}, [chatData.length, scrollToBottom])

return (
<FormProvider {...methods}>
<Flex
padding={`${SPACING.spacing40} ${SPACING.spacing40} ${SPACING.spacing20}`}
flexDirection={DIRECTION_COLUMN}
gridGap={SPACING.spacing12}
height="50vh"
width="100%"
>
<Flex
width="100%"
flexDirection={DIRECTION_COLUMN}
gridGap={SPACING.spacing24}
>
<PromptGuide />

<ChatDataContainer>
{chatData.length > 0
? chatData.map((chat, index) => (
Expand All @@ -62,10 +62,9 @@ export function Chat(): JSX.Element | null {
))
: null}
</ChatDataContainer>
<span ref={scrollRef} />
</Flex>
<ChatFooter />
<Footer></Footer>
<span ref={scrollRef} />
{showFeedbackModal ? <FeedbackModal /> : null}
</Flex>
</FormProvider>
Expand Down
5 changes: 4 additions & 1 deletion opentrons-ai-client/src/resources/atoms.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,12 @@ import type {
/** ChatDataAtom is for chat data (user prompt and response from OpenAI API) */
export const chatDataAtom = atom<ChatData[]>([])

/** ChatPromptAtom is for the prefilled userprompt when landing on the chat page */
/** ChatPromptAtom is for the prefilled userprompt when navigating to the chat page from existing/new protocol pages */
export const chatPromptAtom = atom<string>('')

/** Scroll to bottom of chat atom */
export const scrollToBottomAtom = atom<boolean>(false)

export const chatHistoryAtom = atom<Chat[]>([])

export const feedbackModalAtom = atom<boolean>(false)
Expand Down
2 changes: 2 additions & 0 deletions opentrons-ai-client/src/resources/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ export interface ChatData {
reply: string
/** for testing purpose will be removed and this is not used in the app */
fake?: boolean
/** uuid to map the chat prompt request to the response from the LLM */
requestId: string
}

export interface Chat {
Expand Down

0 comments on commit 41556f4

Please sign in to comment.