Skip to content

Commit

Permalink
refactor: move chat engine from vectordbs to engine folder
Browse files Browse the repository at this point in the history
  • Loading branch information
thucpn committed Mar 14, 2024
1 parent 48912cc commit 2f07cc9
Show file tree
Hide file tree
Showing 4 changed files with 45 additions and 13 deletions.
27 changes: 27 additions & 0 deletions templates/components/engines/typescript/agent/chat.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import {
OpenAI,
QueryEngineTool,
OpenAIAgent,
} from "llamaindex";
import { getDataSource } from "./index";
import { STORAGE_CACHE_DIR } from "./constants.mjs";

export async function createChatEngine(llm: OpenAI) {
const index = await getDataSource(llm);
const queryEngine = index.asQueryEngine();
const queryEngineTool = new QueryEngineTool({
queryEngine: queryEngine,
metadata: {
name: "data_query_engine",
description: `A query engine for documents in storage folder: ${STORAGE_CACHE_DIR}`,
},
});

const agent = new OpenAIAgent({
tools: [queryEngineTool],
verbose: true,
llm,
});

return agent;
}
1 change: 1 addition & 0 deletions templates/components/engines/typescript/agent/tools.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
// Load tools here
16 changes: 16 additions & 0 deletions templates/components/engines/typescript/chat/chat.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import {
LLM,
ContextChatEngine,
} from "llamaindex";
import { getDataSource } from "./index";

export async function createChatEngine(llm: LLM) {
const index = await getDataSource(llm);
const retriever = index.asRetriever();
retriever.similarityTopK = 3;

return new ContextChatEngine({
chatModel: llm,
retriever,
});
}
14 changes: 1 addition & 13 deletions templates/components/vectordbs/typescript/none/index.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import {
ContextChatEngine,
LLM,
serviceContextFromDefaults,
SimpleDocumentStore,
Expand All @@ -8,7 +7,7 @@ import {
} from "llamaindex";
import { CHUNK_OVERLAP, CHUNK_SIZE, STORAGE_CACHE_DIR } from "./constants.mjs";

async function getDataSource(llm: LLM) {
export async function getDataSource(llm: LLM) {
const serviceContext = serviceContextFromDefaults({
llm,
chunkSize: CHUNK_SIZE,
Expand All @@ -31,14 +30,3 @@ async function getDataSource(llm: LLM) {
serviceContext,
});
}

export async function createChatEngine(llm: LLM) {
const index = await getDataSource(llm);
const retriever = index.asRetriever();
retriever.similarityTopK = 3;

return new ContextChatEngine({
chatModel: llm,
retriever,
});
}

0 comments on commit 2f07cc9

Please sign in to comment.