Skip to content

Commit

Permalink
Merge pull request #74 from cubenlp/dev
Browse files Browse the repository at this point in the history
update async response
  • Loading branch information
RexWzh authored Mar 24, 2024
2 parents 755040c + 49acc50 commit 8a2aabd
Show file tree
Hide file tree
Showing 5 changed files with 48 additions and 37 deletions.
2 changes: 1 addition & 1 deletion chattool/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

__author__ = """Rex Wang"""
__email__ = '[email protected]'
__version__ = '3.1.2'
__version__ = '3.1.3'

import os, sys, requests
from .chattype import Chat, Resp
Expand Down
75 changes: 43 additions & 32 deletions chattool/chattype.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,47 +228,23 @@ def getresponse( self
self._resp = resp
return resp

async def async_stream_responses(self, timeout:int=0, textonly:bool=False):
async def async_stream_responses( self
, timeout:int=0
, textonly:bool=False
, **options):
"""Post request asynchronously and stream the responses
Args:
timeout (int, optional): timeout for the API call. Defaults to 0(no timeout).
textonly (bool, optional): whether to only return the text. Defaults to False.
options (dict, optional): other options like `temperature`, `top_p`, etc.
Returns:
str: response text
"""
# TODO: Support other options
data = json.dumps({
"model" : self.model, "messages" : self.chat_log, "stream":True})
headers = {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + self.api_key}
async with aiohttp.ClientSession() as session:
async with session.post(self.chat_url, headers=headers, data=data, timeout=timeout) as response:
while True:
line = await response.content.readline()
if not line: break
# strip the prefix of `data: {...}`
strline = line.decode().lstrip('data:').strip()
if strline == '[DONE]': break
# skip empty line
if not strline: continue
# read the json string
try:
# wrap the response
resp = Resp(json.loads(strline))
# stop if the response is finished
if resp.finish_reason == 'stop': break
# deal with the message
if 'content' not in resp.delta: continue
if textonly:
yield resp.delta_content
else:
yield resp
except Exception as e:
print(f"Error: {e}, line: {strline}")
break
async for resp in _async_stream_responses(
self.api_key, self.chat_url, self.chat_log, self.model, timeout=timeout, **options):
yield resp.delta_content if textonly else resp

# Part3: function call
def iswaiting(self):
Expand Down Expand Up @@ -490,3 +466,38 @@ def __eq__(self, chat: object) -> bool:
def __getitem__(self, index):
"""Get the message at index"""
return self._chat_log[index]

async def _async_stream_responses( api_key:str
, chat_url:str
, chat_log:str
, model:str
, timeout:int=0
, **options):
"""Post request asynchronously and stream the responses"""
options.update({'model':model, 'messages':chat_log, 'stream':True})
data = json.dumps(options)
headers = {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + api_key}
async with aiohttp.ClientSession() as session:
async with session.post(chat_url, headers=headers, data=data, timeout=timeout) as response:
while True:
line = await response.content.readline()
if not line: break
# strip the prefix of `data: {...}`
strline = line.decode().lstrip('data:').strip()
if strline == '[DONE]': break
# skip empty line
if not strline: continue
# read the json string
try:
# wrap the response
resp = Resp(json.loads(strline))
# stop if the response is finished
if resp.finish_reason == 'stop': break
# deal with the message
if 'content' not in resp.delta: continue
yield resp
except Exception as e:
print(f"Error: {e}, line: {strline}")
break
4 changes: 2 additions & 2 deletions chattool/response.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ def content(self):
@property
def function_call(self):
"""Function call"""
return self.message['function_call']
return self.message.get('function_call')

@property
def delta(self):
Expand Down Expand Up @@ -111,4 +111,4 @@ def error_code(self):
@property
def finish_reason(self):
"""Finish reason"""
return self.response['choices'][0]['finish_reason']
return self.response['choices'][0].get('finish_reason')
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
with open('README.md') as readme_file:
readme = readme_file.read()

VERSION = '3.1.2'
VERSION = '3.1.3'

requirements = [
'Click>=7.0', 'requests>=2.20', "responses>=0.23", 'aiohttp>=3.8',
Expand Down
2 changes: 1 addition & 1 deletion tests/test_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ async def show_resp(chat):

def test_async_typewriter2():
async def show_resp(chat):
async for txt in chat.async_stream_responses(textonly=True):
async for txt in chat.async_stream_responses(textonly=True, top_p=0):
print(txt, end='')
chat = Chat("Print hello using Python")
asyncio.run(show_resp(chat))
Expand Down

0 comments on commit 8a2aabd

Please sign in to comment.