diff --git a/templates/components/engines/typescript/agent/chat.ts b/templates/components/engines/typescript/agent/chat.ts new file mode 100644 index 00000000..2d727e07 --- /dev/null +++ b/templates/components/engines/typescript/agent/chat.ts @@ -0,0 +1,27 @@ +import { + OpenAI, + QueryEngineTool, + OpenAIAgent, +} from "llamaindex"; +import { getDataSource } from "./index"; +import { STORAGE_CACHE_DIR } from "./constants.mjs"; + +export async function createChatEngine(llm: OpenAI) { + const index = await getDataSource(llm); + const queryEngine = index.asQueryEngine(); + const queryEngineTool = new QueryEngineTool({ + queryEngine: queryEngine, + metadata: { + name: "data_query_engine", + description: `A query engine for documents in storage folder: ${STORAGE_CACHE_DIR}`, + }, + }); + + const agent = new OpenAIAgent({ + tools: [queryEngineTool], + verbose: true, + llm, + }); + + return agent; +} diff --git a/templates/components/engines/typescript/agent/tools.ts b/templates/components/engines/typescript/agent/tools.ts new file mode 100644 index 00000000..bf001e7e --- /dev/null +++ b/templates/components/engines/typescript/agent/tools.ts @@ -0,0 +1 @@ +// Load tools here \ No newline at end of file diff --git a/templates/components/engines/typescript/chat/chat.ts b/templates/components/engines/typescript/chat/chat.ts new file mode 100644 index 00000000..32a16dce --- /dev/null +++ b/templates/components/engines/typescript/chat/chat.ts @@ -0,0 +1,16 @@ +import { + LLM, + ContextChatEngine, +} from "llamaindex"; +import { getDataSource } from "./index"; + +export async function createChatEngine(llm: LLM) { + const index = await getDataSource(llm); + const retriever = index.asRetriever(); + retriever.similarityTopK = 3; + + return new ContextChatEngine({ + chatModel: llm, + retriever, + }); +} diff --git a/templates/components/vectordbs/typescript/none/index.ts b/templates/components/vectordbs/typescript/none/index.ts index e335446c..528d6057 100644 --- a/templates/components/vectordbs/typescript/none/index.ts +++ b/templates/components/vectordbs/typescript/none/index.ts @@ -1,5 +1,4 @@ import { - ContextChatEngine, LLM, serviceContextFromDefaults, SimpleDocumentStore, @@ -8,7 +7,7 @@ import { } from "llamaindex"; import { CHUNK_OVERLAP, CHUNK_SIZE, STORAGE_CACHE_DIR } from "./constants.mjs"; -async function getDataSource(llm: LLM) { +export async function getDataSource(llm: LLM) { const serviceContext = serviceContextFromDefaults({ llm, chunkSize: CHUNK_SIZE, @@ -31,14 +30,3 @@ async function getDataSource(llm: LLM) { serviceContext, }); } - -export async function createChatEngine(llm: LLM) { - const index = await getDataSource(llm); - const retriever = index.asRetriever(); - retriever.similarityTopK = 3; - - return new ContextChatEngine({ - chatModel: llm, - retriever, - }); -}