-
Notifications
You must be signed in to change notification settings - Fork 113
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat: add multi-agents template based on workflows (#271)
--------- Co-authored-by: Thuc Pham <[email protected]>
- Loading branch information
1 parent
b1f3d52
commit 435109f
Showing
37 changed files
with
1,960 additions
and
272 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
--- | ||
"create-llama": patch | ||
--- | ||
|
||
Add chat agent events UI |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,85 @@ | ||
/* eslint-disable turbo/no-undeclared-env-vars */ | ||
import { expect, test } from "@playwright/test"; | ||
import { ChildProcess } from "child_process"; | ||
import fs from "fs"; | ||
import path from "path"; | ||
import type { | ||
TemplateFramework, | ||
TemplatePostInstallAction, | ||
TemplateUI, | ||
} from "../helpers"; | ||
import { createTestDir, runCreateLlama, type AppType } from "./utils"; | ||
|
||
const templateFramework: TemplateFramework = "fastapi"; | ||
const dataSource: string = "--example-file"; | ||
const templateUI: TemplateUI = "shadcn"; | ||
const templatePostInstallAction: TemplatePostInstallAction = "runApp"; | ||
const appType: AppType = "--frontend"; | ||
const userMessage = "Write a blog post about physical standards for letters"; | ||
|
||
test.describe(`Test multiagent template ${templateFramework} ${dataSource} ${templateUI} ${appType} ${templatePostInstallAction}`, async () => { | ||
test.skip( | ||
process.platform !== "linux" || | ||
process.env.FRAMEWORK !== "fastapi" || | ||
process.env.DATASOURCE === "--no-files", | ||
"The multiagent template currently only works with FastAPI and files. We also only run on Linux to speed up tests.", | ||
); | ||
let port: number; | ||
let externalPort: number; | ||
let cwd: string; | ||
let name: string; | ||
let appProcess: ChildProcess; | ||
// Only test without using vector db for now | ||
const vectorDb = "none"; | ||
|
||
test.beforeAll(async () => { | ||
port = Math.floor(Math.random() * 10000) + 10000; | ||
externalPort = port + 1; | ||
cwd = await createTestDir(); | ||
const result = await runCreateLlama( | ||
cwd, | ||
"multiagent", | ||
templateFramework, | ||
dataSource, | ||
vectorDb, | ||
port, | ||
externalPort, | ||
templatePostInstallAction, | ||
templateUI, | ||
appType, | ||
); | ||
name = result.projectName; | ||
appProcess = result.appProcess; | ||
}); | ||
|
||
test("App folder should exist", async () => { | ||
const dirExists = fs.existsSync(path.join(cwd, name)); | ||
expect(dirExists).toBeTruthy(); | ||
}); | ||
|
||
test("Frontend should have a title", async ({ page }) => { | ||
await page.goto(`http://localhost:${port}`); | ||
await expect(page.getByText("Built by LlamaIndex")).toBeVisible(); | ||
}); | ||
|
||
test("Frontend should be able to submit a message and receive the start of a streamed response", async ({ | ||
page, | ||
}) => { | ||
await page.goto(`http://localhost:${port}`); | ||
await page.fill("form input", userMessage); | ||
|
||
const responsePromise = page.waitForResponse((res) => | ||
res.url().includes("/api/chat"), | ||
); | ||
|
||
await page.click("form button[type=submit]"); | ||
|
||
const response = await responsePromise; | ||
expect(response.ok()).toBeTruthy(); | ||
}); | ||
|
||
// clean processes | ||
test.afterAll(async () => { | ||
appProcess?.kill(); | ||
}); | ||
}); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
33 changes: 0 additions & 33 deletions
33
templates/types/multiagent/fastapi/app/agents/dummy/agent.py
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,83 @@ | ||
import asyncio | ||
from typing import Any, List | ||
|
||
from llama_index.core.tools.types import ToolMetadata, ToolOutput | ||
from llama_index.core.tools.utils import create_schema_from_function | ||
from llama_index.core.workflow import Context, Workflow | ||
|
||
from app.agents.single import ( | ||
AgentRunResult, | ||
ContextAwareTool, | ||
FunctionCallingAgent, | ||
) | ||
from app.agents.planner import StructuredPlannerAgent | ||
|
||
|
||
class AgentCallTool(ContextAwareTool): | ||
def __init__(self, agent: Workflow) -> None: | ||
self.agent = agent | ||
name = f"call_{agent.name}" | ||
|
||
async def schema_call(input: str) -> str: | ||
pass | ||
|
||
# create the schema without the Context | ||
fn_schema = create_schema_from_function(name, schema_call) | ||
self._metadata = ToolMetadata( | ||
name=name, | ||
description=( | ||
f"Use this tool to delegate a sub task to the {agent.name} agent." | ||
+ (f" The agent is an {agent.role}." if agent.role else "") | ||
), | ||
fn_schema=fn_schema, | ||
) | ||
|
||
# overload the acall function with the ctx argument as it's needed for bubbling the events | ||
async def acall(self, ctx: Context, input: str) -> ToolOutput: | ||
task = asyncio.create_task(self.agent.run(input=input)) | ||
# bubble all events while running the agent to the calling agent | ||
async for ev in self.agent.stream_events(): | ||
ctx.write_event_to_stream(ev) | ||
ret: AgentRunResult = await task | ||
response = ret.response.message.content | ||
return ToolOutput( | ||
content=str(response), | ||
tool_name=self.metadata.name, | ||
raw_input={"args": input, "kwargs": {}}, | ||
raw_output=response, | ||
) | ||
|
||
|
||
class AgentCallingAgent(FunctionCallingAgent): | ||
def __init__( | ||
self, | ||
*args: Any, | ||
name: str, | ||
agents: List[FunctionCallingAgent] | None = None, | ||
**kwargs: Any, | ||
) -> None: | ||
agents = agents or [] | ||
tools = [AgentCallTool(agent=agent) for agent in agents] | ||
super().__init__(*args, name=name, tools=tools, **kwargs) | ||
# call add_workflows so agents will get detected by llama agents automatically | ||
self.add_workflows(**{agent.name: agent for agent in agents}) | ||
|
||
|
||
class AgentOrchestrator(StructuredPlannerAgent): | ||
def __init__( | ||
self, | ||
*args: Any, | ||
name: str = "orchestrator", | ||
agents: List[FunctionCallingAgent] | None = None, | ||
**kwargs: Any, | ||
) -> None: | ||
agents = agents or [] | ||
tools = [AgentCallTool(agent=agent) for agent in agents] | ||
super().__init__( | ||
*args, | ||
name=name, | ||
tools=tools, | ||
**kwargs, | ||
) | ||
# call add_workflows so agents will get detected by llama agents automatically | ||
self.add_workflows(**{agent.name: agent for agent in agents}) |
Oops, something went wrong.