Skip to content

Commit

Permalink
Revert "not yet fully working"
Browse files Browse the repository at this point in the history
This reverts commit c1cca76.
  • Loading branch information
thomashacker committed Apr 17, 2024
1 parent c1cca76 commit 84dec39
Show file tree
Hide file tree
Showing 5 changed files with 14 additions and 208 deletions.
166 changes: 0 additions & 166 deletions goldenverba/components/generation/GeminiGenerator.py

This file was deleted.

2 changes: 0 additions & 2 deletions goldenverba/components/generation/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
from goldenverba.components.generation.GPT4Generator import GPT4Generator
from goldenverba.components.generation.interface import Generator
from goldenverba.components.generation.Llama2Generator import Llama2Generator
from goldenverba.components.generation.GeminiGenerator import GeminiGenerator


class GeneratorManager:
Expand All @@ -18,7 +17,6 @@ def __init__(self):
"GPT3Generator": GPT3Generator(),
"CohereGenerator": CohereGenerator(),
"Llama2Generator": Llama2Generator(),
"GeminiGenerator": GeminiGenerator(),
}
self.selected_generator: Generator = self.generators["GPT3Generator"]

Expand Down
4 changes: 2 additions & 2 deletions goldenverba/server/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -600,7 +600,7 @@ async def websocket_generate_stream(websocket: WebSocket):
[payload.query], [payload.context], payload.conversation
):
full_text += chunk["message"]
if chunk["finish_reason"] == "stop" or chunk["finish_reason"] == "STOP":
if chunk["finish_reason"] == "stop":
chunk["full_text"] = full_text
await websocket.send_json(chunk)

Expand All @@ -613,7 +613,7 @@ async def websocket_generate_stream(websocket: WebSocket):
await websocket.send_json(
{"message": e, "finish_reason": "stop", "full_text": e}
)
msg.good("Successfully streamed answer")
msg.good("Succesfully streamed answer")


# Retrieve auto complete suggestions based on user input
Expand Down
49 changes: 12 additions & 37 deletions goldenverba/verba_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ def setup_client(self):
openai.api_version = os.getenv("OPENAI_API_VERSION")

if os.getenv("OPENAI_API_TYPE") == "azure":
openai_header_key_name = "X-Azure-Api-Key"
openai_header_key_name = "X-Azure-Api-Key"

if openai_key != "":
additional_header[openai_header_key_name] = openai_key
Expand Down Expand Up @@ -273,12 +273,6 @@ def verify_installed_libraries(self) -> None:
self.installed_libraries["openai"] = True
except Exception:
self.installed_libraries["openai"] = False
try:
import vertexai

self.installed_libraries["google-cloud-aiplatform"] = True
except Exception:
self.installed_libraries["google-cloud-aiplatform"] = False

try:
import cohere
Expand Down Expand Up @@ -362,7 +356,7 @@ def verify_variables(self) -> None:
self.environment_variables["LLAMA2-7B-CHAT-HF"] = True
else:
self.environment_variables["LLAMA2-7B-CHAT-HF"] = False

# OpenAI API Type, should be set to "azure" if using Azure OpenAI
if os.environ.get("OPENAI_API_TYPE", "") != "":
self.environment_variables["OPENAI_API_TYPE"] = True
Expand All @@ -374,50 +368,33 @@ def verify_variables(self) -> None:
self.environment_variables["OPENAI_API_VERSION"] = True
else:
self.environment_variables["OPENAI_API_VERSION"] = False
# OpenAI API Version
if os.environ.get("OPENAI_API_VERSION", "") != "":
self.environment_variables["OPENAI_API_VERSION"] = True
else:
self.environment_variables["OPENAI_API_VERSION"] = False
# OpenAI API Version
if os.environ.get("GOOGLE_CLOUD_PROJECT", "") != "":
self.environment_variables["GOOGLE_CLOUD_PROJECT"] = True
else:
self.environment_variables["GOOGLE_CLOUD_PROJECT"] = False
# OpenAI API Version
if os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", "") != "":
self.environment_variables["GOOGLE_APPLICATION_CREDENTIALS"] = True
else:
self.environment_variables["GOOGLE_APPLICATION_CREDENTIALS"] = False

# Azure openai ressource name, mandatory when using Azure, should be XXX when endpoint is https://XXX.openai.azure.com
if os.environ.get("AZURE_OPENAI_RESOURCE_NAME", "") != "":
self.environment_variables["AZURE_OPENAI_RESOURCE_NAME"] = True
else:
self.environment_variables["AZURE_OPENAI_RESOURCE_NAME"] = False

# Model used for embeddings. mandatory when using Azure. Typically "text-embedding-ada-002"
#Model used for embeddings. mandatory when using Azure. Typically "text-embedding-ada-002"
if os.environ.get("AZURE_OPENAI_EMBEDDING_MODEL", "") != "":
self.environment_variables["AZURE_OPENAI_EMBEDDING_MODEL"] = True
else:
self.environment_variables["AZURE_OPENAI_EMBEDDING_MODEL"] = False

# Model used for queries. mandatory when using Azure, but can also be used to change the model used for queries when using OpenAI.
#Model used for queries. mandatory when using Azure, but can also be used to change the model used for queries when using OpenAI.
if os.environ.get("OPENAI_MODEL", "") != "":
self.environment_variables["OPENAI_MODEL"] = True
else:
self.environment_variables["OPENAI_MODEL"] = False

if os.environ.get("OPENAI_API_TYPE", "") == "azure":
if not (
self.environment_variables["OPENAI_BASE_URL"]
and self.environment_variables["AZURE_OPENAI_RESOURCE_NAME"]
and self.environment_variables["AZURE_OPENAI_EMBEDDING_MODEL"]
and self.environment_variables["OPENAI_MODEL"]
if os.environ.get("OPENAI_API_TYPE", "")=="azure":
if not(
self.environment_variables["OPENAI_BASE_URL"] and
self.environment_variables["AZURE_OPENAI_RESOURCE_NAME"] and
self.environment_variables["AZURE_OPENAI_EMBEDDING_MODEL"] and
self.environment_variables["OPENAI_MODEL"]
):
raise EnvironmentError(
"Missing environment variables. When using Azure OpenAI, you need to set OPENAI_BASE_URL, AZURE_OPENAI_RESOURCE_NAME, AZURE_OPENAI_EMBEDDING_MODEL and OPENAI_MODEL. Please check documentation."
)
raise EnvironmentError("Missing environment variables. When using Azure OpenAI, you need to set OPENAI_BASE_URL, AZURE_OPENAI_RESOURCE_NAME, AZURE_OPENAI_EMBEDDING_MODEL and OPENAI_MODEL. Please check documentation.")

def get_schemas(self) -> dict:
"""
Expand Down Expand Up @@ -622,9 +599,7 @@ async def generate_stream_answer(

else:
full_text = ""
async for (
result
) in self.generator_manager.selected_generator.generate_stream(
async for result in self.generator_manager.selected_generator.generate_stream(
queries, contexts, conversation
):
full_text += result["message"]
Expand Down
1 change: 0 additions & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@
"click==8.1.7",
"asyncio",
"tiktoken==0.5.1",
"google-cloud-aiplatform==1.47.0",
"cohere==4.33",
"requests",
"pypdf2",
Expand Down

0 comments on commit 84dec39

Please sign in to comment.