Skip to content

Commit

Permalink
add missing return
Browse files Browse the repository at this point in the history
  • Loading branch information
ahuang11 committed Oct 30, 2024
1 parent 18096a0 commit fa4bd6a
Showing 1 changed file with 1 addition and 1 deletion.
2 changes: 1 addition & 1 deletion lumen/ai/agents.py
Original file line number Diff line number Diff line change
Expand Up @@ -871,7 +871,7 @@ async def _system_prompt_with_context(
system_prompt += f"\n### CONTEXT: {context}".strip()
return system_prompt

async def respond(self, messages: list, title: str = "", render_output: bool = True, agents: list[Agent] | None = None):
async def respond(self, messages: list, title: str = "", render_output: bool = True, agents: list[Agent] | None = None) -> None:
pipeline = memory['current_pipeline']
analyses = {a.name: a for a in self.analyses if await a.applies(pipeline)}
if not analyses:
Expand Down

0 comments on commit fa4bd6a

Please sign in to comment.