Skip to content

Commit

Permalink
Merge pull request #64 from animalnots/dev
Browse files Browse the repository at this point in the history
1.9.0 Added support for non-stream models (o1-)
  • Loading branch information
animalnots committed Sep 13, 2024
2 parents f162ab0 + ad9f4e9 commit 931601e
Show file tree
Hide file tree
Showing 7 changed files with 187 additions and 81 deletions.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "better-chatgpt",
"private": true,
"version": "1.8.4",
"version": "1.9.0",
"type": "module",
"homepage": "./",
"main": "electron/index.cjs",
Expand Down
13 changes: 11 additions & 2 deletions public/locales/en/api.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,5 +15,14 @@
},
"customEndpoint": "Use custom API endpoint",
"advancedConfig": "View advanced API configuration <0>here</0>",
"noApiKeyWarning": "No API key supplied! Please check your API settings."
}
"noApiKeyWarning": "No API key supplied! Please check your API settings.",
"errors": {
"errorGeneratingTitle": "Error generating title!",
"noMessagesSubmitted": "No messages submitted!",
"messageExceedMaxToken": "Message exceed max token!",
"failedToRetrieveData": "Failed to retrieve valid chat completion data.",
"streamLocked": "Oops, the stream is locked right now. Please try again",
"cancelledByUser": "Cancelled by user",
"generationCompleted": "Generation completed"
}
}
1 change: 1 addition & 0 deletions public/locales/en/main.json
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
"cloned": "Cloned",
"enterToSubmit": "Enter to submit",
"submitPlaceholder": "Type a message or click [/] for prompts...",
"stopNonStreamGenerationWarning": "Stopping generation is not recommended for non-stream models. Only use it if UI is stuck. Do you want to proceed?",
"reduceMessagesWarning": "Reducing messages may result in data loss. It is recommended to download the chat in JSON format if you care about the data. Do you want to proceed?",
"reduceMessagesFailedImportWarning": "Full import failed as the data hit the maximum storage limit. Import as much as possible?",
"partialImportWarning": "Full import failed as not all of the expected messages were imported: {{message}}. Would you like to import anyway?",
Expand Down
21 changes: 19 additions & 2 deletions src/components/StopGeneratingButton/StopGeneratingButton.tsx
Original file line number Diff line number Diff line change
@@ -1,14 +1,31 @@
import React from 'react';
import React, { useEffect } from 'react';
import useStore from '@store/store';
import { useTranslation } from 'react-i18next';
import { modelStreamSupport } from '@constants/modelLoader';

const StopGeneratingButton = () => {
const { t } = useTranslation();
const setGenerating = useStore((state) => state.setGenerating);
const generating = useStore((state) => state.generating);

const currentModel = useStore((state) =>
state.chats ? state.chats[state.currentChatIndex].config.model : ''
);
const handleGeneratingStop = () => {
if (modelStreamSupport[currentModel]) {
setGenerating(false);
} else {
const confirmMessage = t('stopNonStreamGenerationWarning');
if (window.confirm(confirmMessage)) {
setGenerating(false);
}
}
};

return generating ? (
<div
className='absolute bottom-6 left-0 right-0 m-auto flex md:w-full md:m-auto gap-0 md:gap-2 justify-center'
onClick={() => setGenerating(false)}
onClick={() => handleGeneratingStop()}
>
<button
className='btn relative btn-neutral border-0 md:border'
Expand Down
4 changes: 3 additions & 1 deletion src/constants/modelLoader.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,17 @@ let modelOptions: string[] = [];
let modelMaxToken: { [key: string]: number } = {};
let modelCost: ModelCost = {};
let modelTypes: { [key: string]: string } = {};
let modelStreamSupport: { [key: string]: boolean } = {};

const initializeModels = async () => {
const models = await loadModels();
modelOptions = models.modelOptions;
modelMaxToken = models.modelMaxToken;
modelCost = models.modelCost;
modelTypes = models.modelTypes;
modelStreamSupport = models.modelStreamSupport;
};

initializeModels();

export { modelOptions, modelMaxToken, modelCost, modelTypes };
export { modelOptions, modelMaxToken, modelCost, modelTypes, modelStreamSupport };
195 changes: 123 additions & 72 deletions src/hooks/useSubmit.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import { parseEventSource } from '@api/helper';
import { limitMessageTokens, updateTotalTokenUsed } from '@utils/messageUtils';
import { _defaultChatConfig } from '@constants/chat';
import { officialAPIEndpoint } from '@constants/auth';
import { modelStreamSupport } from '@constants/modelLoader';

const useSubmit = () => {
const { t, i18n } = useTranslation('api');
Expand All @@ -22,7 +23,7 @@ const useSubmit = () => {
const generating = useStore((state) => state.generating);
const currentChatIndex = useStore((state) => state.currentChatIndex);
const setChats = useStore((state) => state.setChats);

const generateTitle = async (
message: MessageInterface[],
modelConfig: ConfigInterface
Expand All @@ -35,8 +36,8 @@ const useSubmit = () => {
throw new Error(t('noApiKeyWarning') as string);
}
const titleChatConfig = {
..._defaultChatConfig, // Spread the original config
model: useStore.getState().titleModel ?? _defaultChatConfig.model, // Override the model property
..._defaultChatConfig, // Spread the original config
model: useStore.getState().titleModel ?? _defaultChatConfig.model, // Override the model property
};
// other endpoints
data = await getChatCompletion(
Expand All @@ -49,8 +50,8 @@ const useSubmit = () => {
);
} else if (apiKey) {
const titleChatConfig = {
...modelConfig, // Spread the original config
model: useStore.getState().titleModel ?? modelConfig.model, // Override the model property
...modelConfig, // Spread the original config
model: useStore.getState().titleModel ?? modelConfig.model, // Override the model property
};
// own apikey
data = await getChatCompletion(
Expand All @@ -63,7 +64,9 @@ const useSubmit = () => {
);
}
} catch (error: unknown) {
throw new Error(`Error generating title!\n${(error as Error).message}`);
throw new Error(
`${t('errors.errorGeneratingTitle')}\n${(error as Error).message}`
);
}
return data.choices[0].message.content;
};
Expand All @@ -88,91 +91,139 @@ const useSubmit = () => {
setGenerating(true);

try {
const isStreamSupported =
modelStreamSupport[chats[currentChatIndex].config.model];
let data;
let stream;
if (chats[currentChatIndex].messages.length === 0)
throw new Error('No messages submitted!');
throw new Error(t('errors.noMessagesSubmitted') as string);

const messages = limitMessageTokens(
chats[currentChatIndex].messages,
chats[currentChatIndex].config.max_tokens,
chats[currentChatIndex].config.model
);
if (messages.length === 0) throw new Error('Message exceed max token!');
if (messages.length === 0)
throw new Error(t('errors.messageExceedMaxToken') as string);
if (!isStreamSupported) {
if (!apiKey || apiKey.length === 0) {
// official endpoint
if (apiEndpoint === officialAPIEndpoint) {
throw new Error(t('noApiKeyWarning') as string);
}
// other endpoints
data = await getChatCompletion(
useStore.getState().apiEndpoint,
messages,
chats[currentChatIndex].config,
undefined,
undefined,
useStore.getState().apiVersion
);
} else if (apiKey) {
data = await getChatCompletion(
useStore.getState().apiEndpoint,
messages,
chats[currentChatIndex].config,
apiKey,
undefined,
useStore.getState().apiVersion
);
}

// no api key (free)
if (!apiKey || apiKey.length === 0) {
// official endpoint
if (apiEndpoint === officialAPIEndpoint) {
throw new Error(t('noApiKeyWarning') as string);
if (
!data ||
!data.choices ||
!data.choices[0] ||
!data.choices[0].message ||
!data.choices[0].message.content
) {
throw new Error(t('errors.failedToRetrieveData') as string);
}

// other endpoints
stream = await getChatCompletionStream(
useStore.getState().apiEndpoint,
messages,
chats[currentChatIndex].config,
undefined,
undefined,
useStore.getState().apiVersion
);
} else if (apiKey) {
// own apikey
stream = await getChatCompletionStream(
useStore.getState().apiEndpoint,
messages,
chats[currentChatIndex].config,
apiKey,
undefined,
useStore.getState().apiVersion
const updatedChats: ChatInterface[] = JSON.parse(
JSON.stringify(useStore.getState().chats)
);
}
const updatedMessages = updatedChats[currentChatIndex].messages;
(
updatedMessages[updatedMessages.length - 1]
.content[0] as TextContentInterface
).text += data.choices[0].message.content;
setChats(updatedChats);
} else {
// no api key (free)
if (!apiKey || apiKey.length === 0) {
// official endpoint
if (apiEndpoint === officialAPIEndpoint) {
throw new Error(t('noApiKeyWarning') as string);
}

if (stream) {
if (stream.locked)
throw new Error(
'Oops, the stream is locked right now. Please try again'
// other endpoints
stream = await getChatCompletionStream(
useStore.getState().apiEndpoint,
messages,
chats[currentChatIndex].config,
undefined,
undefined,
useStore.getState().apiVersion
);
const reader = stream.getReader();
let reading = true;
let partial = '';
while (reading && useStore.getState().generating) {
const { done, value } = await reader.read();
const result = parseEventSource(
partial + new TextDecoder().decode(value)
} else if (apiKey) {
// own apikey
stream = await getChatCompletionStream(
useStore.getState().apiEndpoint,
messages,
chats[currentChatIndex].config,
apiKey,
undefined,
useStore.getState().apiVersion
);
partial = '';
}

if (result === '[DONE]' || done) {
reading = false;
} else {
const resultString = result.reduce((output: string, curr) => {
if (typeof curr === 'string') {
partial += curr;
} else {
const content = curr.choices[0]?.delta?.content ?? null;
if (content) output += content;
}
return output;
}, '');

const updatedChats: ChatInterface[] = JSON.parse(
JSON.stringify(useStore.getState().chats)
if (stream) {
if (stream.locked)
throw new Error(t('errors.streamLocked') as string);
const reader = stream.getReader();
let reading = true;
let partial = '';
while (reading && useStore.getState().generating) {
const { done, value } = await reader.read();
const result = parseEventSource(
partial + new TextDecoder().decode(value)
);
const updatedMessages = updatedChats[currentChatIndex].messages;
(
updatedMessages[updatedMessages.length - 1]
.content[0] as TextContentInterface
).text += resultString;
setChats(updatedChats);
partial = '';

if (result === '[DONE]' || done) {
reading = false;
} else {
const resultString = result.reduce((output: string, curr) => {
if (typeof curr === 'string') {
partial += curr;
} else {
const content = curr.choices[0]?.delta?.content ?? null;
if (content) output += content;
}
return output;
}, '');

const updatedChats: ChatInterface[] = JSON.parse(
JSON.stringify(useStore.getState().chats)
);
const updatedMessages = updatedChats[currentChatIndex].messages;
(
updatedMessages[updatedMessages.length - 1]
.content[0] as TextContentInterface
).text += resultString;
setChats(updatedChats);
}
}
if (useStore.getState().generating) {
reader.cancel(t('errors.cancelledByUser') as string);
} else {
reader.cancel(t('errors.generationCompleted') as string);
}
reader.releaseLock();
stream.cancel();
}
if (useStore.getState().generating) {
reader.cancel('Cancelled by user');
} else {
reader.cancel('Generation completed');
}
reader.releaseLock();
stream.cancel();
}

// update tokens used in chatting
Expand Down
Loading

0 comments on commit 931601e

Please sign in to comment.