Skip to content

Commit

Permalink
verbosity and error handling
Browse files Browse the repository at this point in the history
  • Loading branch information
artitw committed Sep 23, 2024
1 parent 5becfa0 commit 30bc90e
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 13 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

setuptools.setup(
name="text2text",
version="1.5.4",
version="1.5.6",
author="artitw",
author_email="[email protected]",
description="Text2Text: Crosslingual NLP/G toolkit",
Expand Down
26 changes: 14 additions & 12 deletions text2text/assistant.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def __init__(self, **kwargs):
self.model_name = kwargs.get("model_name", "llama3.1")
self.load_model()
self.client = ollama.Client(host=self.model_url)
self.llama_index_client = Ollama(model=self.model_name, request_timeout=120.0)
self.structured_client = Ollama(model=self.model_name, request_timeout=120.0)

def __del__(self):
ollama.delete(self.model_name)
Expand All @@ -40,20 +40,22 @@ def load_model(self):

result = ollama.pull(self.model_name)
if result["status"] != "success":
print(f"Cannot pull {self.model_name}.")
print(f"Did not pull {self.model_name}.")

time.sleep(10)

def chat_completion(self, messages=[{"role": "user", "content": "hello"}], stream=False, schema=None, **kwargs):
try:
if schema:
msgs = [ChatMessage(**m) for m in messages]
return self.llama_index_client.as_structured_llm(schema).chat(messages=msgs).raw
return self.client.chat(model=self.model_name, messages=messages, stream=stream)
except Exception as e:
print(e)
self.load_model()
return self.chat_completion(messages=messages, stream=stream, **kwargs)
if not ollama.ps():
result = ollama.pull(self.model_name)
if result["status"] == "success":
time.sleep(10)
return self.chat_completion(messages=messages, stream=stream, **kwargs)
raise Exception(f"Cannot pull {self.model_name}. Try restarting.")

if schema:
msgs = [ChatMessage(**m) for m in messages]
return self.structured_client.as_structured_llm(schema).chat(messages=msgs).raw
return self.client.chat(model=self.model_name, messages=messages, stream=stream)

def embed(self, texts):
return ollama.embed(model=self.model_name, input=texts)
Expand Down

0 comments on commit 30bc90e

Please sign in to comment.