Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Feature]: 允许基础功能指定模型 #1708

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 2 additions & 0 deletions core_functional.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@ def get_core_functions():
"AutoClearHistory": False,
# [6] 文本预处理 (可选参数,默认 None,举例:写个函数移除所有的换行符)
"PreProcess": None,
# [7] 模型选择 (可选参数。如不设置,则使用当前全局模型;如设置,则用指定模型覆盖全局模型。)
# "ModelOverride": "gpt-3.5-turbo", # 主要用途:强制点击此基础功能按钮时,使用指定的模型。
},


Expand Down
42 changes: 35 additions & 7 deletions request_llms/bridge_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -852,6 +852,13 @@ def decode(self, *args, **kwargs):
AVAIL_LLM_MODELS += [azure_model_name]


# -=-=-=-=-=-=--=-=-=-=-=-=--=-=-=-=-=-=--=-=-=-=-=-=-=-=
# -=-=-=-=-=-=-=-=-=- ☝️ 以上是模型路由 -=-=-=-=-=-=-=-=-=
# -=-=-=-=-=-=--=-=-=-=-=-=--=-=-=-=-=-=--=-=-=-=-=-=-=-=

# -=-=-=-=-=-=--=-=-=-=-=-=--=-=-=-=-=-=--=-=-=-=-=-=-=-=
# -=-=-=-=-=-=-= 👇 以下是多模型路由切换函数 -=-=-=-=-=-=-=
# -=-=-=-=-=-=--=-=-=-=-=-=--=-=-=-=-=-=--=-=-=-=-=-=-=-=


def LLM_CATCH_EXCEPTION(f):
Expand Down Expand Up @@ -888,13 +895,11 @@ def predict_no_ui_long_connection(inputs:str, llm_kwargs:dict, history:list, sys
model = llm_kwargs['llm_model']
n_model = 1
if '&' not in model:

# 如果只询问1个大语言模型:
# 如果只询问“一个”大语言模型(多数情况):
method = model_info[model]["fn_without_ui"]
return method(inputs, llm_kwargs, history, sys_prompt, observe_window, console_slience)
else:

# 如果同时询问多个大语言模型,这个稍微啰嗦一点,但思路相同,您不必读这个else分支
# 如果同时询问“多个”大语言模型,这个稍微啰嗦一点,但思路相同,您不必读这个else分支
executor = ThreadPoolExecutor(max_workers=4)
models = model.split('&')
n_model = len(models)
Expand Down Expand Up @@ -947,8 +952,26 @@ def mutex_manager(window_mutex, observe_window):
res = '<br/><br/>\n\n---\n\n'.join(return_string_collect)
return res


def predict(inputs:str, llm_kwargs:dict, *args, **kwargs):
# 根据基础功能区 ModelOverride 参数调整模型类型,用于 `predict` 中
import importlib
import core_functional
def execute_model_override(llm_kwargs, additional_fn, method):
functional = core_functional.get_core_functions()
if 'ModelOverride' in functional[additional_fn]:
# 热更新Prompt & ModelOverride
importlib.reload(core_functional)
functional = core_functional.get_core_functions()
model_override = functional[additional_fn]['ModelOverride']
if model_override not in model_info:
raise ValueError(f"模型覆盖参数 '{model_override}' 指向一个暂不支持的模型,请检查配置文件。")
method = model_info[model_override]["fn_with_ui"]
llm_kwargs['llm_model'] = model_override
return llm_kwargs, additional_fn, method
# 默认返回原参数
return llm_kwargs, additional_fn, method

def predict(inputs:str, llm_kwargs:dict, plugin_kwargs:dict, chatbot,
history:list=[], system_prompt:str='', stream:bool=True, additional_fn:str=None):
"""
发送至LLM,流式获取输出。
用于基础的对话功能。
Expand All @@ -967,6 +990,11 @@ def predict(inputs:str, llm_kwargs:dict, *args, **kwargs):
"""

inputs = apply_gpt_academic_string_mask(inputs, mode="show_llm")

method = model_info[llm_kwargs['llm_model']]["fn_with_ui"] # 如果这里报错,检查config中的AVAIL_LLM_MODELS选项
yield from method(inputs, llm_kwargs, *args, **kwargs)

if additional_fn: # 根据基础功能区 ModelOverride 参数调整模型类型
llm_kwargs, additional_fn, method = execute_model_override(llm_kwargs, additional_fn, method)

yield from method(inputs, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, stream, additional_fn)