From 7a05e0f5e769a36a2a4a0c381233310204db27cd Mon Sep 17 00:00:00 2001 From: Willy Douhard Date: Mon, 8 Apr 2024 11:51:07 +0200 Subject: [PATCH] Wd/update literalsdk (#885) * update to async literal sdk * changelog --- CHANGELOG.md | 14 ++++++++++++++ backend/chainlit/data/__init__.py | 12 +++++------- backend/chainlit/llama_index/callbacks.py | 3 --- backend/pyproject.toml | 4 ++-- 4 files changed, 21 insertions(+), 12 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index fc1b9fe5cd..3ffe945962 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,10 +6,24 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ## [Unreleased] +Nothing unreleased! + +## [1.0.501] - 2024-04-08 + +### Added + +- Messages and steps now accept tags and metadata (useful for the data layer) + ### Changed +- The LLama Index callback handler should now show retrieved chunks in the intermadiary steps - Renamed the Literal environment variable to `LITERAL_API_URL` (it used to be `LITERAL_SERVER`) +### Fixed + +- Starting a new conversation should close the element side bar +- Resolved security issues by upgrading starlette dependency + ## [1.0.500] - 2024-04-02 ### Added diff --git a/backend/chainlit/data/__init__.py b/backend/chainlit/data/__init__.py index 729af04cd6..1a50243de6 100644 --- a/backend/chainlit/data/__init__.py +++ b/backend/chainlit/data/__init__.py @@ -10,10 +10,8 @@ from chainlit.logger import logger from chainlit.session import WebsocketSession from chainlit.types import Feedback, Pagination, ThreadDict, ThreadFilter -from chainlit.user import PersistedUser, User, UserDict -from literalai import Attachment, PageInfo, PaginatedResponse -from literalai import Score as LiteralScore -from literalai import Step as LiteralStep +from chainlit.user import PersistedUser, User +from literalai import Attachment, PageInfo, PaginatedResponse, Score as LiteralScore, Step as LiteralStep from literalai.filter import threads_filters as LiteralThreadsFilters from literalai.step import StepDict as LiteralStepDict @@ -128,9 +126,9 @@ async def delete_user_session(self, id: str) -> bool: class ChainlitDataLayer(BaseDataLayer): def __init__(self, api_key: str, server: Optional[str]): - from literalai import LiteralClient + from literalai import AsyncLiteralClient - self.client = LiteralClient(api_key=api_key, url=server) + self.client = AsyncLiteralClient(api_key=api_key, url=server) logger.info("Chainlit data layer initialized") def attachment_to_element_dict(self, attachment: Attachment) -> "ElementDict": @@ -457,7 +455,7 @@ async def update_thread( tags: Optional[List[str]] = None, ): await self.client.api.upsert_thread( - thread_id=thread_id, + id=thread_id, name=name, participant_id=user_id, metadata=metadata, diff --git a/backend/chainlit/llama_index/callbacks.py b/backend/chainlit/llama_index/callbacks.py index 0997b28c56..a1e57638e5 100644 --- a/backend/chainlit/llama_index/callbacks.py +++ b/backend/chainlit/llama_index/callbacks.py @@ -173,12 +173,10 @@ def on_event_end( token_count = self.total_llm_token_count or None raw_response = response.raw if response else None model = raw_response.get("model", None) if raw_response else None - provider = "openai" if messages and isinstance(response, ChatResponse): msg: ChatMessage = response.message step.generation = ChatGeneration( - provider=provider, model=model, messages=messages, message_completion=GenerationMessage( @@ -189,7 +187,6 @@ def on_event_end( ) elif formatted_prompt: step.generation = CompletionGeneration( - provider=provider, model=model, prompt=formatted_prompt, completion=content, diff --git a/backend/pyproject.toml b/backend/pyproject.toml index e153126993..b6616b6998 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "chainlit" -version = "1.0.500" +version = "1.0.501" keywords = ['LLM', 'Agents', 'gen ai', 'chat ui', 'chatbot ui', 'openai', 'copilot', 'langchain', 'conversational ai'] description = "Build Conversational AI." authors = ["Chainlit"] @@ -23,7 +23,7 @@ chainlit = 'chainlit.cli:cli' [tool.poetry.dependencies] python = ">=3.8.1,<4.0.0" httpx = ">=0.23.0" -literalai = "0.0.401" +literalai = "0.0.500" dataclasses_json = "^0.5.7" fastapi = "^0.110.1" starlette = "^0.37.2"