Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Perf] 优化Function Calling的处理流程 #532

Closed
wants to merge 8 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions config-template.py
Original file line number Diff line number Diff line change
Expand Up @@ -239,6 +239,11 @@
"size": "256x256", # 图片尺寸,支持256x256, 512x512, 1024x1024
}

# 跟踪函数调用
# 为True时,在每次GPT进行Function Calling时都会输出发送一条回复给用户
# 同时,一次提问内所有的Function Calling和普通回复消息都会单独发送给用户
trace_function_calls = True

# 群内回复消息时是否引用原消息
quote_origin = True

Expand Down
1 change: 1 addition & 0 deletions override-all.json
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@
"image_api_params": {
"size": "256x256"
},
"trace_function_calls": true,
"quote_origin": true,
"at_sender": false,
"include_image_description": true,
Expand Down
18 changes: 11 additions & 7 deletions pkg/openai/api/chat_completion.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def flush_pending_msg(self):
)
self.pending_msg = ""

def append_message(self, role: str, content: str, name: str=None):
def append_message(self, role: str, content: str, name: str=None, function_call: dict=None):
msg = {
"role": role,
"content": content
Expand All @@ -39,6 +39,9 @@ def append_message(self, role: str, content: str, name: str=None):
if name is not None:
msg['name'] = name

if function_call is not None:
msg['function_call'] = function_call

self.messages.append(msg)

def __init__(
Expand Down Expand Up @@ -87,16 +90,17 @@ def __next__(self) -> dict:
choice0 = resp["choices"][0]

# 如果不是函数调用,且finish_reason为stop,则停止迭代
if 'function_call' not in choice0['message']: # and choice0["finish_reason"] == "stop"
if choice0['finish_reason'] == 'stop': # and choice0["finish_reason"] == "stop"
self.stopped = True

if 'function_call' in choice0['message']:
self.pending_func_call = choice0['message']['function_call']

# self.append_message(
# role="assistant",
# content="function call: "+json.dumps(self.pending_func_call, ensure_ascii=False)
# )
self.append_message(
role="assistant",
content=choice0['message']['content'],
function_call=choice0['message']['function_call']
)

return {
"id": resp["id"],
Expand All @@ -106,7 +110,7 @@ def __next__(self) -> dict:
"message": {
"role": "assistant",
"type": "function_call",
"content": None,
"content": choice0['message']['content'],
"function_call": choice0['message']['function_call']
},
"finish_reason": "function_call"
Expand Down
1 change: 1 addition & 0 deletions pkg/openai/modelmgr.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,7 @@ def count_completion_tokens(messages: list, model: str) -> int:


def count_tokens(messages: list, model: str):

if model in CHAT_COMPLETION_MODELS:
return count_chat_completion_tokens(messages, model)
elif model in COMPLETION_MODELS:
Expand Down
50 changes: 40 additions & 10 deletions pkg/openai/session.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,7 @@ def expire_check_timer_loop(self, create_timestamp: int):

# 请求回复
# 这个函数是阻塞的
def append(self, text: str=None) -> tuple[str, str, list[str]]:
def query(self, text: str=None) -> tuple[str, str, list[str]]:
"""向session中添加一条消息,返回接口回复

Args:
Expand Down Expand Up @@ -255,29 +255,59 @@ def append(self, text: str=None) -> tuple[str, str, list[str]]:

funcs = []

trace_func_calls = config.trace_function_calls
botmgr = pkg.utils.context.get_qqbot_manager()

session_name_spt: list[str] = self.name.split("_")

pending_res_text = ""

# TODO 对不起,我知道这样非常非常屎山,但我之后会重构的
for resp in pkg.utils.context.get_openai_manager().request_completion(prompts):

if pending_res_text != "":
botmgr.adapter.send_message(
session_name_spt[0],
session_name_spt[1],
pending_res_text
)
pending_res_text = ""

finish_reason = resp['choices'][0]['finish_reason']

if resp['choices'][0]['message']['type'] == 'text': # 普通回复
res_text += resp['choices'][0]['message']['content']
if resp['choices'][0]['message']['role'] == "assistant" and resp['choices'][0]['message']['content'] != None: # 包含纯文本响应

if not trace_func_calls:
res_text += resp['choices'][0]['message']['content'] + "\n"
else:
res_text = resp['choices'][0]['message']['content']
pending_res_text = resp['choices'][0]['message']['content']

total_tokens += resp['usage']['total_tokens']

pending_msgs.append(
{
"role": "assistant",
"content": resp['choices'][0]['message']['content']
}
)
msg = {
"role": "assistant",
"content": resp['choices'][0]['message']['content']
}

if 'function_call' in resp['choices'][0]['message']:
msg['function_call'] = json.dumps(resp['choices'][0]['message']['function_call'])

pending_msgs.append(msg)

elif resp['choices'][0]['message']['type'] == 'function_call':
if resp['choices'][0]['message']['type'] == 'function_call':
# self.prompt.append(
# {
# "role": "assistant",
# "content": "function call: "+json.dumps(resp['choices'][0]['message']['function_call'])
# }
# )
if trace_func_calls:
botmgr.adapter.send_message(
session_name_spt[0],
session_name_spt[1],
"调用函数 "+resp['choices'][0]['message']['function_call']['name'] + "..."
)

total_tokens += resp['usage']['total_tokens']
elif resp['choices'][0]['message']['type'] == 'function_return':
Expand Down
27 changes: 0 additions & 27 deletions pkg/qqbot/cmds/session/continue.py

This file was deleted.

4 changes: 2 additions & 2 deletions pkg/qqbot/message.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def process_normal_message(text_message: str, mgr, config, launcher_type: str,
try:
prefix = "[GPT]" if config.show_prefix else ""

text, finish_reason, funcs = session.append(text_message)
text, finish_reason, funcs = session.query(text_message)

# 触发插件事件
args = {
Expand Down Expand Up @@ -68,7 +68,7 @@ def process_normal_message(text_message: str, mgr, config, launcher_type: str,
except openai.error.APIConnectionError as e:
err_msg = str(e)
if err_msg.__contains__('Error communicating with OpenAI'):
reply = handle_exception("{}会话调用API失败:{}\n请尝试关闭网络代理来解决此问题。".format(session_name, e),
reply = handle_exception("{}会话调用API失败:{}\n您的网络无法访问OpenAI接口或网络代理不正常".format(session_name, e),
"[bot]err:调用API失败,请重试或联系管理员,或等待修复")
else:
reply = handle_exception("{}会话调用API失败:{}".format(session_name, e), "[bot]err:调用API失败,请重试或联系管理员,或等待修复")
Expand Down
1 change: 0 additions & 1 deletion res/templates/cmdpriv-template.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
"plugin.del": 2,
"plugin.off": 2,
"plugin.on": 2,
"continue": 1,
"default": 1,
"default.set": 2,
"del": 1,
Expand Down
1 change: 0 additions & 1 deletion res/wiki/功能使用.md
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,6 @@
!draw <提示语> 进行绘图
!version 查看当前版本并检查更新
!resend 重新回复上一个问题
!continue 继续响应未完成的回合(通常用于内容函数继续调用)
!plugin 用法请查看插件使用页的`管理`章节
!default 查看可用的情景预设值
```
Expand Down
Loading