From 73c6a2aeb0006a6241e687f7db2b2d5b896cf71a Mon Sep 17 00:00:00 2001 From: w_xiaolizu Date: Fri, 21 Apr 2023 17:09:49 +0800 Subject: [PATCH] =?UTF-8?q?=E5=A2=9E=E5=8A=A0=E5=9F=BA=E7=A1=80=E5=8A=9F?= =?UTF-8?q?=E8=83=BD=E5=88=A4=E7=A9=BA?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- __main__.py | 1 - crazy_functions/辅助回答.py | 6 +++--- toolbox.py | 12 ++++++++---- 3 files changed, 11 insertions(+), 8 deletions(-) diff --git a/__main__.py b/__main__.py index 09df8e6..b464695 100644 --- a/__main__.py +++ b/__main__.py @@ -96,7 +96,6 @@ class ChatBot(ChatBotFrame): def draw_function_chat(self): with gr.Tab('Function'): with gr.Accordion("基础功能区", open=True) as self.area_basic_fn: - gr.Markdown('> 以下功能依赖输入区内容') with gr.Row(): for k in functional: variant = functional[k]["Color"] if "Color" in functional[k] else "secondary" diff --git a/crazy_functions/辅助回答.py b/crazy_functions/辅助回答.py index bbe548a..fe3eb3e 100644 --- a/crazy_functions/辅助回答.py +++ b/crazy_functions/辅助回答.py @@ -12,10 +12,10 @@ from crazy_functions.crazy_utils import request_gpt_model_in_new_thread_with_ui_ def 猜你想问(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): if txt: show_say = txt - prompt = txt+'\nAfter answering the questions, list three more questions that users may ask.' + prompt = txt+'\n回答完问题后,再列出用户可能提出的三个问题。' else: - prompt = history[-1]+"\nAnalyze the above answers and list three more questions that users may ask." - show_say = 'Analyze the above answers and list three more questions that users may ask.' + prompt = history[-1]+"\n分析上述回答,再列出用户可能提出的三个问题。" + show_say = '分析上述回答,再列出用户可能提出的三个问题。' gpt_say = yield from request_gpt_model_in_new_thread_with_ui_alive( inputs=prompt, inputs_show_user=show_say, diff --git a/toolbox.py b/toolbox.py index c19f818..96308ba 100644 --- a/toolbox.py +++ b/toolbox.py @@ -32,10 +32,8 @@ def ArgsGeneralWrapper(f): 装饰器函数,用于重组输入参数,改变输入参数的顺序与结构。 """ def decorated(cookies, max_length, llm_model, txt, top_p, temperature, - chatbot, history, system_prompt, models, ipaddr:gr.Request, *args): + chatbot, history, system_prompt, models, ipaddr: gr.Request, *args): """""" - txt_passon = txt - if 'input加密' in models: txt_passon = func_box.encryption_str(txt) # 引入一个有cookie的chatbot cookies.update({ 'top_p':top_p, @@ -54,6 +52,13 @@ def ArgsGeneralWrapper(f): } chatbot_with_cookie = ChatBotWithCookies(cookies) chatbot_with_cookie.write_list(chatbot) + txt_passon = txt + if 'input加密' in models: txt_passon = func_box.encryption_str(txt) + if txt_passon == '' and len(args) > 1: + msgs = '### Warning 输入框为空\n' \ + 'tips: 使用基础功能时,请在输入栏内输入需要处理的文本内容' + yield from update_ui(chatbot=chatbot_with_cookie, history=history, msg=msgs) # 刷新界面 + return yield from f(txt_passon, llm_kwargs, plugin_kwargs, chatbot_with_cookie, history, system_prompt, *args) return decorated @@ -106,7 +111,6 @@ def HotReload(f): yield from f_hot_reload(*args, **kwargs) return decorated - ####################################### 其他小工具 ##################################### def get_reduce_token_percent(text):