Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for JSON mode #498

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions public/locales/en/model.json
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,10 @@
"label": "Frequency Penalty",
"description": "Number between -2.0 and 2.0. Positive values penalise new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. (Default: 0)"
},
"response_format": {
"label": "Response Format",
"description": "An object specifying the format that the model must output. Setting to { \"type\": \"json_object\" } enables JSON mode, which guarantees the message the model generates is valid JSON. More on https://platform.openai.com/docs/api-reference/chat/create#chat-create-response_format"
},
"defaultChatConfig": "Default Chat Config",
"defaultSystemMessage": "Default System Message",
"resetToDefault": "Reset To Default"
Expand Down
67 changes: 65 additions & 2 deletions src/components/ConfigMenu/ConfigMenu.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@ import React, { useEffect, useRef, useState } from 'react';
import useStore from '@store/store';
import { useTranslation } from 'react-i18next';
import PopupModal from '@components/PopupModal';
import { ConfigInterface, ModelOptions } from '@type/chat';
import { ConfigInterface, ModelOptions, ResponseFormatOptions } from '@type/chat';
import DownChevronArrow from '@icon/DownChevronArrow';
import { modelMaxToken, modelOptions } from '@constants/chat';
import { modelMaxToken, modelOptions, responseFormatOptions } from '@constants/chat';

const ConfigMenu = ({
setIsModalOpen,
Expand All @@ -25,6 +25,9 @@ const ConfigMenu = ({
const [_frequencyPenalty, _setFrequencyPenalty] = useState<number>(
config.frequency_penalty
);
const [_responseFormat, _setResponseFormat] = useState<ResponseFormatOptions>(
config.response_format
)
const { t } = useTranslation('model');

const handleConfirm = () => {
Expand All @@ -35,6 +38,7 @@ const ConfigMenu = ({
presence_penalty: _presencePenalty,
top_p: _topP,
frequency_penalty: _frequencyPenalty,
response_format: _responseFormat
});
setIsModalOpen(false);
};
Expand Down Expand Up @@ -66,6 +70,10 @@ const ConfigMenu = ({
_frequencyPenalty={_frequencyPenalty}
_setFrequencyPenalty={_setFrequencyPenalty}
/>
<ResponseFormatSelector
_responseFormat={_responseFormat}
_setResponseFormat={_setResponseFormat}
/>
</div>
</PopupModal>
);
Expand Down Expand Up @@ -293,4 +301,59 @@ export const FrequencyPenaltySlider = ({
);
};

export const ResponseFormatSelector = ({
_responseFormat,
_setResponseFormat,
}: {
_responseFormat: ResponseFormatOptions;
_setResponseFormat: React.Dispatch<React.SetStateAction<ResponseFormatOptions>>;
}) => {
const [dropDown, setDropDown] = useState<boolean>(false);
const { t } = useTranslation('model');

return (
<div className='mt-5 pt-5 border-t border-gray-500'>
<label className='block text-sm font-medium text-gray-900 dark:text-white'>
{t('response_format.label')}: {_responseFormat.type}
</label>
<button
className='btn btn-neutral btn-small flex gap-1'
type='button'
onClick={() => setDropDown((prev) => !prev)}
aria-label='model'
>
{_responseFormat.type}
<DownChevronArrow />
</button>
<div
id='dropdown'
className={`${
dropDown ? '' : 'hidden'
} absolute top-100 bottom-100 z-10 bg-white rounded-lg shadow-xl border-b border-black/10 dark:border-gray-900/50 text-gray-800 dark:text-gray-100 group dark:bg-gray-800 opacity-90`}
>
<ul
className='text-sm text-gray-700 dark:text-gray-200 p-0 m-0'
aria-labelledby='dropdownDefaultButton'
>
{responseFormatOptions.map((f) => (
<li
className='px-4 py-2 hover:bg-gray-100 dark:hover:bg-gray-600 dark:hover:text-white cursor-pointer'
onClick={() => {
_setResponseFormat(f);
setDropDown(false);
}}
key={f.type}
>
{f.type}
</li>
))}
</ul>
</div>
<div className='min-w-fit text-gray-500 dark:text-gray-300 text-sm mt-2'>
{t('response_format.description')}
</div>
</div>
);
};

export default ConfigMenu;
8 changes: 7 additions & 1 deletion src/constants/chat.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { v4 as uuidv4 } from 'uuid';
import { ChatInterface, ConfigInterface, ModelOptions } from '@type/chat';
import { ChatInterface, ConfigInterface, ModelOptions, ResponseFormatOptions } from '@type/chat';
import useStore from '@store/store';

const date = new Date();
Expand Down Expand Up @@ -29,7 +29,12 @@ export const modelOptions: ModelOptions[] = [
// 'gpt-4-32k-0314',
];

export const responseFormatOptions: ResponseFormatOptions[] = [
{ type: 'text' }, { type: 'json_object' }
]

export const defaultModel = 'gpt-3.5-turbo';
export const defaultResponseFormat = { type: 'text' } as ResponseFormatOptions;

export const modelMaxToken = {
'gpt-3.5-turbo': 4096,
Expand Down Expand Up @@ -111,6 +116,7 @@ export const _defaultChatConfig: ConfigInterface = {
presence_penalty: 0,
top_p: 1,
frequency_penalty: 0,
response_format: defaultResponseFormat
};

export const generateDefaultChat = (
Expand Down
6 changes: 6 additions & 0 deletions src/store/migrate.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,12 @@ import {
LocalStorageInterfaceV5ToV6,
LocalStorageInterfaceV6ToV7,
LocalStorageInterfaceV7oV8,
LocalStorageInterfaceV8toV9,
} from '@type/chat';
import {
_defaultChatConfig,
defaultModel,
defaultResponseFormat,
defaultUserMaxToken,
} from '@constants/chat';
import { officialAPIEndpoint } from '@constants/auth';
Expand Down Expand Up @@ -104,3 +106,7 @@ export const migrateV7 = (persistedState: LocalStorageInterfaceV7oV8) => {
chat.id = uuidv4();
});
};

export const migrateV8 = (persistedState: LocalStorageInterfaceV8toV9) => {
persistedState.responseFormat = defaultResponseFormat;
}

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

provided you got it saving similar to other props (e.g. max_tokens) this should work

export const migrateV8 = (persistedState: LocalStorageInterfaceV8toV9) => {
 persistedState.chats.forEach((chat) => {
    chat.config = {
      ...chat.config,
      responseFormat : defaultResponseFormat,
    };
  });
}

I would rename responseFormat to response_format for consistency purposes

6 changes: 5 additions & 1 deletion src/store/store.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import {
LocalStorageInterfaceV5ToV6,
LocalStorageInterfaceV6ToV7,
LocalStorageInterfaceV7oV8,
LocalStorageInterfaceV8toV9,
} from '@type/chat';
import {
migrateV0,
Expand All @@ -25,6 +26,7 @@ import {
migrateV5,
migrateV6,
migrateV7,
migrateV8,
} from './migrate';

export type StoreState = ChatSlice &
Expand Down Expand Up @@ -74,7 +76,7 @@ const useStore = create<StoreState>()(
{
name: 'free-chat-gpt',
partialize: (state) => createPartializedState(state),
version: 8,
version: 9,
migrate: (persistedState, version) => {
switch (version) {
case 0:
Expand All @@ -93,6 +95,8 @@ const useStore = create<StoreState>()(
migrateV6(persistedState as LocalStorageInterfaceV6ToV7);
case 7:
migrateV7(persistedState as LocalStorageInterfaceV7oV8);
case 8:
migrateV8(persistedState as LocalStorageInterfaceV8toV9);
break;
}
return persistedState as StoreState;
Expand Down
10 changes: 9 additions & 1 deletion src/types/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ export interface ConfigInterface {
presence_penalty: number;
top_p: number;
frequency_penalty: number;
response_format: ResponseFormatOptions;
}

export interface ChatHistoryInterface {
Expand All @@ -49,11 +50,13 @@ export interface Folder {
color?: string;
}

export type ModelOptions = 'gpt-4' | 'gpt-4-32k' | 'gpt-4-1106-preview' | 'gpt-3.5-turbo' | 'gpt-3.5-turbo-16k' | 'gpt-3.5-turbo-1106' ;
export type ModelOptions = 'gpt-4' | 'gpt-4-32k' | 'gpt-4-1106-preview' | 'gpt-3.5-turbo' | 'gpt-3.5-turbo-16k' | 'gpt-3.5-turbo-1106';
// | 'gpt-3.5-turbo-0301';
// | 'gpt-4-0314'
// | 'gpt-4-32k-0314'

export type ResponseFormatOptions = { 'type': 'text' } | { 'type': 'json_object' };

export type TotalTokenUsed = {
[model in ModelOptions]?: {
promptTokens: number;
Expand Down Expand Up @@ -147,3 +150,8 @@ export interface LocalStorageInterfaceV7oV8
foldersExpanded: boolean[];
folders: FolderCollection;
}

export interface LocalStorageInterfaceV8toV9
extends LocalStorageInterfaceV7oV8 {
responseFormat: ResponseFormatOptions;
}
4 changes: 2 additions & 2 deletions src/types/persist.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { LocalStorageInterfaceV7oV8 } from './chat';
import { LocalStorageInterfaceV8toV9 } from './chat';

interface PersistStorageState extends LocalStorageInterfaceV7oV8 {}
interface PersistStorageState extends LocalStorageInterfaceV8toV9 { }

export default PersistStorageState;
6 changes: 6 additions & 0 deletions src/utils/import.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ import {
defaultModel,
modelOptions,
_defaultChatConfig,
defaultResponseFormat,
responseFormatOptions,
} from '@constants/chat';
import { ExportV1, OpenAIChat } from '@type/export';

Expand Down Expand Up @@ -62,6 +64,10 @@ const validateAndFixChatConfig = (config: ConfigInterface) => {
if (!config.model) config.model = defaultModel;
if (!modelOptions.includes(config.model)) return false;

if (!config.response_format) config.response_format = defaultResponseFormat;
if (!responseFormatOptions.includes(config.response_format))
return false;

return true;
};

Expand Down