Skip to content

Commit

Permalink
[Feature]: allow model mutex override in core_functional.py (#1708)
Browse files Browse the repository at this point in the history
* allow_core_func_specify_model

* change arg name

* 模型覆盖支持热更新&当模型覆盖指向不存在的模型时报错

* allow model mutex override

---------

Co-authored-by: binary-husky <qingxu.fu@outlook.com>
  • Loading branch information
awwaawwa and binary-husky committed May 17, 2024
1 parent 881a596 commit b7eb9ab
Show file tree
Hide file tree
Showing 2 changed files with 37 additions and 7 deletions.
2 changes: 2 additions & 0 deletions core_functional.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@ def get_core_functions():
"AutoClearHistory": False,
# [6] 文本预处理 (可选参数,默认 None,举例:写个函数移除所有的换行符)
"PreProcess": None,
# [7] 模型选择 (可选参数。如不设置,则使用当前全局模型;如设置,则用指定模型覆盖全局模型。)
# "ModelOverride": "gpt-3.5-turbo", # 主要用途:强制点击此基础功能按钮时,使用指定的模型。
},


Expand Down
42 changes: 35 additions & 7 deletions request_llms/bridge_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -906,6 +906,13 @@ def decode(self, *args, **kwargs):
AVAIL_LLM_MODELS += [azure_model_name]


# -=-=-=-=-=-=--=-=-=-=-=-=--=-=-=-=-=-=--=-=-=-=-=-=-=-=
# -=-=-=-=-=-=-=-=-=- ☝️ 以上是模型路由 -=-=-=-=-=-=-=-=-=
# -=-=-=-=-=-=--=-=-=-=-=-=--=-=-=-=-=-=--=-=-=-=-=-=-=-=

# -=-=-=-=-=-=--=-=-=-=-=-=--=-=-=-=-=-=--=-=-=-=-=-=-=-=
# -=-=-=-=-=-=-= 👇 以下是多模型路由切换函数 -=-=-=-=-=-=-=
# -=-=-=-=-=-=--=-=-=-=-=-=--=-=-=-=-=-=--=-=-=-=-=-=-=-=


def LLM_CATCH_EXCEPTION(f):
Expand Down Expand Up @@ -942,13 +949,11 @@ def predict_no_ui_long_connection(inputs:str, llm_kwargs:dict, history:list, sys
model = llm_kwargs['llm_model']
n_model = 1
if '&' not in model:

# 如果只询问1个大语言模型:
# 如果只询问“一个”大语言模型(多数情况):
method = model_info[model]["fn_without_ui"]
return method(inputs, llm_kwargs, history, sys_prompt, observe_window, console_slience)
else:

# 如果同时询问多个大语言模型,这个稍微啰嗦一点,但思路相同,您不必读这个else分支
# 如果同时询问“多个”大语言模型,这个稍微啰嗦一点,但思路相同,您不必读这个else分支
executor = ThreadPoolExecutor(max_workers=4)
models = model.split('&')
n_model = len(models)
Expand Down Expand Up @@ -1001,8 +1006,26 @@ def mutex_manager(window_mutex, observe_window):
res = '<br/><br/>\n\n---\n\n'.join(return_string_collect)
return res


def predict(inputs:str, llm_kwargs:dict, *args, **kwargs):
# 根据基础功能区 ModelOverride 参数调整模型类型,用于 `predict` 中
import importlib
import core_functional
def execute_model_override(llm_kwargs, additional_fn, method):
functional = core_functional.get_core_functions()
if 'ModelOverride' in functional[additional_fn]:
# 热更新Prompt & ModelOverride
importlib.reload(core_functional)
functional = core_functional.get_core_functions()
model_override = functional[additional_fn]['ModelOverride']
if model_override not in model_info:
raise ValueError(f"模型覆盖参数 '{model_override}' 指向一个暂不支持的模型,请检查配置文件。")
method = model_info[model_override]["fn_with_ui"]
llm_kwargs['llm_model'] = model_override
return llm_kwargs, additional_fn, method
# 默认返回原参数
return llm_kwargs, additional_fn, method

def predict(inputs:str, llm_kwargs:dict, plugin_kwargs:dict, chatbot,
history:list=[], system_prompt:str='', stream:bool=True, additional_fn:str=None):
"""
发送至LLM,流式获取输出。
用于基础的对话功能。
Expand All @@ -1021,6 +1044,11 @@ def predict(inputs:str, llm_kwargs:dict, *args, **kwargs):
"""

inputs = apply_gpt_academic_string_mask(inputs, mode="show_llm")

method = model_info[llm_kwargs['llm_model']]["fn_with_ui"] # 如果这里报错,检查config中的AVAIL_LLM_MODELS选项
yield from method(inputs, llm_kwargs, *args, **kwargs)

if additional_fn: # 根据基础功能区 ModelOverride 参数调整模型类型
llm_kwargs, additional_fn, method = execute_model_override(llm_kwargs, additional_fn, method)

yield from method(inputs, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, stream, additional_fn)

0 comments on commit b7eb9ab

Please sign in to comment.