Skip to content

Commit

Permalink
begin settings
Browse files Browse the repository at this point in the history
  • Loading branch information
BitHighlander committed Aug 1, 2024
1 parent fed6930 commit afae8c3
Show file tree
Hide file tree
Showing 3 changed files with 67 additions and 29 deletions.
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
import { Divider, HStack, Stack } from '@chakra-ui/layout';
import { Avatar, Button, Icon, IconButton, Switch, Text } from '@chakra-ui/react';
import { Avatar, Button, Icon, Text } from '@chakra-ui/react';
import { useModal } from 'hooks/useModal/useModal';
import { useEffect, useState } from 'react';
import { FaTrash } from 'react-icons/fa';
import { HiRefresh } from 'react-icons/hi';
import { IoFileTray } from 'react-icons/io5';
import { TbRefreshAlert } from 'react-icons/tb';
import { MdPlayArrow, MdStop, MdViewList } from 'react-icons/md';
import { FC } from 'react';
import axios from 'axios';

import { SettingsListItem } from './SettingsListItem';

Expand All @@ -27,6 +29,7 @@ export const OllamaSettings: FC = () => {
});

const [prevAppSettings, setPrevAppSettings] = useState<OllamaSettingsProps>(appSettings);
const [isOllamaRunning, setIsOllamaRunning] = useState(false);

const handleViewModels = () => {
console.log('Viewing models...');
Expand All @@ -43,6 +46,21 @@ export const OllamaSettings: FC = () => {
// Mock action: Display a message or simulate stopping Ollama
};

const handleTestConnection = async () => {
try {
const response = await axios.get('http://127.0.0.1:11434/');
console.log('Ollama is healthy:', response.data);
setIsOllamaRunning(true);
} catch (error) {
console.error('Ollama is not responding:', error);
setIsOllamaRunning(false);
}
};

useEffect(() => {
handleTestConnection();
}, []);

useEffect(() => {
(async () => {
if (
Expand Down Expand Up @@ -91,6 +109,21 @@ export const OllamaSettings: FC = () => {
Stop
</Button>
</SettingsListItem>
<Divider my={1} />
<HStack>
<SettingsListItem
label={'Test Connection'}
onClick={handleTestConnection}
icon={<Icon as={HiRefresh} color='gray.500' />}
>
<Button variant={'ghost'} onClick={handleTestConnection}>
Test
</Button>
</SettingsListItem>
<Text color={isOllamaRunning ? 'green.500' : 'red.500'}>
{isOllamaRunning ? 'Online' : 'Offline'}
</Text>
</HStack>
</Stack>
);
};
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ export const SettingsList = ({ appHistory, ...routeProps }: SettingsListProps) =
</TabPanel>
<TabPanel>
<Menu>
<MorpheosSettings></MorpheosSettings>
<OllamaSettings></OllamaSettings>
</Menu>
</TabPanel>
</TabPanels>
Expand Down
59 changes: 32 additions & 27 deletions packages/keepkey-desktop/src/ai/ollama.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,40 +31,45 @@ let ollama: Ollama;
let ollamaProcess: ChildProcess | null;

export const loadOllama = async () => {
let runningInstance = await isOllamaInstanceRunning();

if (runningInstance) {
// connect to local instance
ollama = new Ollama({
host: DEFAULT_OLLAMA_URL,
});
let tag = TAG + " | loadOllama | "
try{
let runningInstance = await isOllamaInstanceRunning();
console.log(tag,'runningInstance:',runningInstance)
if (runningInstance) {
// connect to local instance
ollama = new Ollama({
host: DEFAULT_OLLAMA_URL,
});

await sendOllamaStatusToRenderer(
`local instance of ollama is running and connected at ${DEFAULT_OLLAMA_URL}`,
);
console.log(tag,'local instance of ollama is running and connected at',DEFAULT_OLLAMA_URL)
return true;
}

await sendOllamaStatusToRenderer(
`local instance of ollama is running and connected at ${DEFAULT_OLLAMA_URL}`,
);
const customAppData = getModelPathFromStorage();
runningInstance = await packedExecutableOllamaSpawn(customAppData);

return true;
}
if (runningInstance) {
// connect to local instance
ollama = new Ollama({
host: DEFAULT_OLLAMA_URL,
});

const customAppData = getModelPathFromStorage();
runningInstance = await packedExecutableOllamaSpawn(customAppData);
await sendOllamaStatusToRenderer(
`local instance of ollama is running and connected at ${DEFAULT_OLLAMA_URL}`,
);

if (runningInstance) {
// connect to local instance
ollama = new Ollama({
host: DEFAULT_OLLAMA_URL,
});
return true;
}

await sendOllamaStatusToRenderer(
`local instance of ollama is running and connected at ${DEFAULT_OLLAMA_URL}`,
);
ipcMain.emit(IpcMainChannel.Error, `Couldn't start Ollama locally.`);

return true;
return false;
}catch(e){
console.error(tag,e)
}

ipcMain.emit(IpcMainChannel.Error, `Couldn't start Ollama locally.`);

return false;
};

export const isOllamaInstanceRunning = async (url?: string): Promise<boolean> => {
Expand Down

0 comments on commit afae8c3

Please sign in to comment.