-
Notifications
You must be signed in to change notification settings - Fork 113
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat: Make suggest next questions configurable (#275)
--------- Co-authored-by: Marcus Schiesser <[email protected]>
- Loading branch information
Showing
12 changed files
with
182 additions
and
320 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
--- | ||
"create-llama": patch | ||
--- | ||
|
||
Add env config for next questions feature |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
28 changes: 8 additions & 20 deletions
28
templates/components/llamaindex/typescript/streaming/suggestion.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
File renamed without changes.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,78 @@ | ||
import logging | ||
import os | ||
import re | ||
from typing import List, Optional | ||
|
||
from app.api.routers.models import Message | ||
from llama_index.core.prompts import PromptTemplate | ||
from llama_index.core.settings import Settings | ||
|
||
logger = logging.getLogger("uvicorn") | ||
|
||
|
||
class NextQuestionSuggestion: | ||
""" | ||
Suggest the next questions that user might ask based on the conversation history | ||
Disable this feature by removing the NEXT_QUESTION_PROMPT environment variable | ||
""" | ||
|
||
@classmethod | ||
def get_configured_prompt(cls) -> Optional[str]: | ||
prompt = os.getenv("NEXT_QUESTION_PROMPT", None) | ||
if not prompt: | ||
return None | ||
return PromptTemplate(prompt) | ||
|
||
@classmethod | ||
async def suggest_next_questions_all_messages( | ||
cls, | ||
messages: List[Message], | ||
) -> Optional[List[str]]: | ||
""" | ||
Suggest the next questions that user might ask based on the conversation history | ||
Return None if suggestion is disabled or there is an error | ||
""" | ||
prompt_template = cls.get_configured_prompt() | ||
if not prompt_template: | ||
return None | ||
|
||
try: | ||
# Reduce the cost by only using the last two messages | ||
last_user_message = None | ||
last_assistant_message = None | ||
for message in reversed(messages): | ||
if message.role == "user": | ||
last_user_message = f"User: {message.content}" | ||
elif message.role == "assistant": | ||
last_assistant_message = f"Assistant: {message.content}" | ||
if last_user_message and last_assistant_message: | ||
break | ||
conversation: str = f"{last_user_message}\n{last_assistant_message}" | ||
|
||
# Call the LLM and parse questions from the output | ||
prompt = prompt_template.format(conversation=conversation) | ||
output = await Settings.llm.acomplete(prompt) | ||
questions = cls._extract_questions(output.text) | ||
|
||
return questions | ||
except Exception as e: | ||
logger.error(f"Error when generating next question: {e}") | ||
return None | ||
|
||
@classmethod | ||
def _extract_questions(cls, text: str) -> List[str]: | ||
content_match = re.search(r"```(.*?)```", text, re.DOTALL) | ||
content = content_match.group(1) if content_match else "" | ||
return content.strip().split("\n") | ||
|
||
@classmethod | ||
async def suggest_next_questions( | ||
cls, | ||
chat_history: List[Message], | ||
response: str, | ||
) -> List[str]: | ||
""" | ||
Suggest the next questions that user might ask based on the chat history and the last response | ||
""" | ||
messages = chat_history + [Message(role="assistant", content=response)] | ||
return await cls.suggest_next_questions_all_messages(messages) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
60 changes: 0 additions & 60 deletions
60
templates/types/multiagent/fastapi/app/api/services/suggestion.py
This file was deleted.
Oops, something went wrong.
Oops, something went wrong.