From 472b6a88b4771bd2858538bd5ea31a5e7066eb84 Mon Sep 17 00:00:00 2001 From: w_xiaolizu Date: Thu, 20 Apr 2023 16:19:48 +0800 Subject: [PATCH] =?UTF-8?q?=E4=BF=AE=E6=94=B9=E4=B8=8A=E4=BC=A0=E6=96=87?= =?UTF-8?q?=E4=BB=B6=E9=80=BB=E8=BE=91=EF=BC=9A=E4=B8=8D=E5=86=8D=E5=88=A0?= =?UTF-8?q?=E9=99=A4=E6=96=87=E4=BB=B6=E3=80=81=E4=BF=9D=E7=95=99=E7=94=A8?= =?UTF-8?q?=E6=88=B7=E6=96=87=E4=BB=B6=EF=BD=9C=E5=A2=9E=E5=8A=A0=E7=94=A8?= =?UTF-8?q?=E6=88=B7=E8=8E=B7=E5=8F=96=E5=8E=86=E5=8F=B2=E4=B8=8A=E4=BC=A0?= =?UTF-8?q?=E8=AE=B0=E5=BD=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- __main__.py | 154 ++++++++++++--------- auto_functional.py | 6 + crazy_functional.py | 4 +- crazy_functions/理解PDF文档内容.py | 3 +- crazy_functions/{三千问.py => 辅助回答.py} | 0 test.py | 30 +++- toolbox.py | 49 ++++--- 7 files changed, 161 insertions(+), 85 deletions(-) create mode 100644 auto_functional.py rename crazy_functions/{三千问.py => 辅助回答.py} (100%) diff --git a/__main__.py b/__main__.py index b0ef843..8094fee 100644 --- a/__main__.py +++ b/__main__.py @@ -1,9 +1,11 @@ import os + os.environ['no_proxy'] = '*' # 避免代理网络产生意外污染 import gradio as gr from request_llm.bridge_chatgpt import predict -from toolbox import format_io, find_free_port, on_file_uploaded, on_report_generated, get_conf, ArgsGeneralWrapper, \ - DummyWith +from toolbox import format_io, find_free_port, on_file_uploaded, on_report_generated, get_user_upload, get_conf, \ + ArgsGeneralWrapper, DummyWith + # 建议您复制一个config_private.py放自己的秘密, 如API和代理网址, 避免不小心传github被别人看到 proxies, WEB_PORT, LLM_MODEL, CONCURRENT_COUNT, AUTHENTICATION, CHATBOT_HEIGHT, LAYOUT, API_KEY, AVAIL_LLM_MODELS = \ @@ -61,7 +63,7 @@ if LAYOUT == "TOP-DOWN": CHATBOT_HEIGHT /= 2 cancel_handles = [] -with gr.Blocks(title="ChatGPT 学术优化", theme=set_theme, analytics_enabled=False, css=advanced_css) as demo: +with gr.Blocks(title="ChatGPT For Tester", theme=set_theme, analytics_enabled=False, css=advanced_css) as demo: gr.HTML(title_html) cookies = gr.State({'api_key': API_KEY, 'llm_model': LLM_MODEL}) with gr_L1(): @@ -76,67 +78,96 @@ with gr.Blocks(title="ChatGPT 学术优化", theme=set_theme, analytics_enabled= status = gr.Markdown(f"Tip: 按Enter提交, 按Shift+Enter换行。当前模型: {LLM_MODEL} \n {proxy_info}") with gr_L2(scale=1): - with gr.Accordion("输入区", open=True) as area_input_primary: - with gr.Row(): - txt = gr.Textbox(show_label=False, placeholder="Input question here.").style(container=False) - with gr.Row(): - submitBtn = gr.Button("提交", variant="primary") - with gr.Row(): - resetBtn = gr.Button("重置", variant="secondary"); - resetBtn.style(size="sm") - stopBtn = gr.Button("停止", variant="secondary"); - stopBtn.style(size="sm") + with gr.Tab('对话模式'): + with gr.Accordion("输入区", open=True) as area_input_primary: + with gr.Row(): + txt = gr.Textbox(show_label=False, placeholder="Input question here.").style(container=False) + with gr.Row(): + submitBtn = gr.Button("提交", variant="primary") + with gr.Row(): + resetBtn = gr.Button("重置", variant="secondary"); + resetBtn.style(size="sm") + stopBtn = gr.Button("停止", variant="secondary"); + stopBtn.style(size="sm") - with gr.Tab('Function'): - with gr.Accordion("基础功能区", open=True) as area_basic_fn: - with gr.Row(): - for k in functional: - variant = functional[k]["Color"] if "Color" in functional[k] else "secondary" - functional[k]["Button"] = gr.Button(k, variant=variant) - with gr.Tab('Public'): + with gr.Tab('Function'): + with gr.Accordion("基础功能区", open=True) as area_basic_fn: + with gr.Row(): + for k in functional: + variant = functional[k]["Color"] if "Color" in functional[k] else "secondary" + functional[k]["Button"] = gr.Button(k, variant=variant) + with gr.Tab('Public'): + with gr.Box(): + with gr.Accordion("上传本地文件可供高亮函数插件调用", + open=False) as area_file_up: + file_upload = gr.Files(label="任何文件, 但推荐上传压缩文件(zip, tar)", + file_count="multiple") + file_upload.style() + with gr.Row(): + upload_history = submitBtn = gr.Button("Get Upload History", variant="primary") + with gr.Accordion("函数插件区", open=True) as area_crazy_fn: + with gr.Row(): + gr.Markdown("注意:以下“高亮”标识的函数插件需从输入区读取路径作为参数.") + with gr.Row(): + for k in crazy_fns: + if not crazy_fns[k].get("AsButton", True): continue + variant = crazy_fns[k]["Color"] if "Color" in crazy_fns[k] else "secondary" + crazy_fns[k]["Button"] = gr.Button(k, variant=variant) + crazy_fns[k]["Button"].style(size="sm") + with gr.Row(): + with gr.Accordion("更多函数插件", open=True): + dropdown_fn_list = [k for k in crazy_fns.keys() if + not crazy_fns[k].get("AsButton", True)] + with gr.Column(scale=1): + dropdown = gr.Dropdown(dropdown_fn_list, value=r"打开插件列表", label="").style( + container=False) + with gr.Column(scale=1): + switchy_bt = gr.Button(r"请先从插件列表中选择", variant="secondary") + + with gr.Tab('Setting'): + with gr.Accordion("展开SysPrompt & 交互界面布局 & Github地址", open=True): + system_prompt = gr.Textbox(show_label=True, placeholder=f"System Prompt", label="System prompt", + value=initial_prompt) + top_p = gr.Slider(minimum=-0, maximum=1.0, value=1.0, step=0.01, interactive=True, + label="Top-p (nucleus sampling)", ) + temperature = gr.Slider(minimum=-0, maximum=2.0, value=1.0, step=0.01, interactive=True, + label="Temperature", ) + max_length_sl = gr.Slider(minimum=256, maximum=4096, value=512, step=1, interactive=True, + label="MaxLength", ) + + models_box = gr.CheckboxGroup(["input加密"], + value=["input加密"], label="对话模式") + md_dropdown = gr.Dropdown(AVAIL_LLM_MODELS, value=LLM_MODEL, label="更换LLM模型/请求源").style( + container=False) + + gr.Markdown(description) + with gr.Tab('Auto-GPT'): with gr.Row(): - with gr.Accordion("点击展开“文件上传区”。上传本地文件可供高亮函数插件调用。", - open=False) as area_file_up: - file_upload = gr.Files(label="任何文件, 但推荐上传压缩文件(zip, tar)", file_count="multiple") - with gr.Accordion("函数插件区", open=True) as area_crazy_fn: - with gr.Row(): - gr.Markdown("注意:以下“高亮”标识的函数插件需从输入区读取路径作为参数.") - with gr.Row(): - for k in crazy_fns: - if not crazy_fns[k].get("AsButton", True): continue - variant = crazy_fns[k]["Color"] if "Color" in crazy_fns[k] else "secondary" - crazy_fns[k]["Button"] = gr.Button(k, variant=variant) - crazy_fns[k]["Button"].style(size="sm") - with gr.Row(): - with gr.Accordion("更多函数插件", open=True): - dropdown_fn_list = [k for k in crazy_fns.keys() if not crazy_fns[k].get("AsButton", True)] - with gr.Column(scale=1): - dropdown = gr.Dropdown(dropdown_fn_list, value=r"打开插件列表", label="").style( - container=False) - with gr.Column(scale=1): - switchy_bt = gr.Button(r"请先从插件列表中选择", variant="secondary") - - with gr.Tab('Setting'): - with gr.Accordion("展开SysPrompt & 交互界面布局 & Github地址", open=True): - system_prompt = gr.Textbox(show_label=True, placeholder=f"System Prompt", label="System prompt", - value=initial_prompt) - top_p = gr.Slider(minimum=-0, maximum=1.0, value=1.0, step=0.01, interactive=True, - label="Top-p (nucleus sampling)", ) - temperature = gr.Slider(minimum=-0, maximum=2.0, value=1.0, step=0.01, interactive=True, - label="Temperature", ) - max_length_sl = gr.Slider(minimum=256, maximum=4096, value=512, step=1, interactive=True, label="MaxLength",) - - models_box = gr.CheckboxGroup(["input加密"], - value=["input加密"], label="对话模式") - md_dropdown = gr.Dropdown(AVAIL_LLM_MODELS, value=LLM_MODEL, label="更换LLM模型/请求源").style( + ai_name = gr.Textbox(show_label=False, placeholder="Give AI a name.").style(container=False) + with gr.Row(): + user_input = gr.Textbox(lines=5, show_label=False, placeholder="Describe your AI's role.").style( container=False) + with gr.Box(): + with gr.Row() as goal_list: + goal_array = [] + for text in range(4): + goal_array.append(gr.Textbox(show_label=False, placeholder="Enter up to 1 goals.").style(container=False)) + with gr.Row(): + submit_add = gr.Button("Adding goals", variant="secondary") + with gr.Row(): + __l = [str(i) for i in range(10, 101, 10)] + __l.insert(0, '1') + submit_numer = gr.Dropdown(__l, value='1', interactive=True, label='Number of Next').style( + container=False) + with gr.Row(): + submit_next = gr.Button("Next", variant="primary") + submit_auto = gr.Button("Continuous", variant="secondary") + submit_stop = gr.Button("Stop", variant="stop") - gr.Markdown(description) - - - # 整理反复出现的控件句柄组合 + # 整理反复出现的控件句柄组合, # submitBtn.info - input_combo = [cookies, max_length_sl, md_dropdown, txt, top_p, temperature, chatbot, history, system_prompt, models_box] + input_combo = [cookies, max_length_sl, md_dropdown, txt, top_p, temperature, chatbot, history, system_prompt, + models_box] output_combo = [cookies, chatbot, history, status] predict_args = dict(fn=ArgsGeneralWrapper(predict), inputs=input_combo, outputs=output_combo) # 提交按钮、重置按钮 @@ -151,6 +182,7 @@ with gr.Blocks(title="ChatGPT 学术优化", theme=set_theme, analytics_enabled= cancel_handles.append(click_handle) # 文件上传区,接收文件后与chatbot的互动 file_upload.upload(on_file_uploaded, [file_upload, chatbot, txt], [chatbot, txt]) + upload_history.click(get_user_upload, [chatbot], outputs=[]) # 函数插件-固定按钮区 for k in crazy_fns: if not crazy_fns[k].get("AsButton", True): continue @@ -164,8 +196,6 @@ with gr.Blocks(title="ChatGPT 学术优化", theme=set_theme, analytics_enabled= def on_dropdown_changed(k): variant = crazy_fns[k]["Color"] if "Color" in crazy_fns[k] else "secondary" return {switchy_bt: gr.update(value=k, variant=variant)} - - dropdown.select(on_dropdown_changed, [dropdown], [switchy_bt]) @@ -198,7 +228,7 @@ def auto_opentab_delay(): threading.Thread(target=open, name="open-browser", daemon=True).start() threading.Thread(target=auto_update, name="self-upgrade", daemon=True).start() - #threading.Thread(target=warm_up_modules, name="warm-up", daemon=True).start() + # threading.Thread(target=warm_up_modules, name="warm-up", daemon=True).start() auto_opentab_delay() diff --git a/auto_functional.py b/auto_functional.py new file mode 100644 index 0000000..331aff9 --- /dev/null +++ b/auto_functional.py @@ -0,0 +1,6 @@ +#! .\venv\ +# encoding: utf-8 +# @Time : 2023/4/20 +# @Author : Spike +# @Descr : + diff --git a/crazy_functional.py b/crazy_functional.py index d89679b..854bacc 100644 --- a/crazy_functional.py +++ b/crazy_functional.py @@ -19,9 +19,9 @@ def get_crazy_functions(): from crazy_functions.解析项目源代码 import 解析一个Lua项目 from crazy_functions.解析项目源代码 import 解析一个CSharp项目 from crazy_functions.总结word文档 import 总结word文档 - from crazy_functions.三千问 import 猜你想问 + from crazy_functions.辅助回答 import 猜你想问 function_plugins = { - "三千问:猜你想问": { + "猜你想问": { "Function": HotReload(猜你想问) }, "解析整个Python项目": { diff --git a/crazy_functions/理解PDF文档内容.py b/crazy_functions/理解PDF文档内容.py index 5050864..ed0359b 100644 --- a/crazy_functions/理解PDF文档内容.py +++ b/crazy_functions/理解PDF文档内容.py @@ -51,9 +51,10 @@ def 解析PDF(file_name, llm_kwargs, plugin_kwargs, chatbot, history, system_pro ) iteration_results.append(gpt_say) last_iteration_result = gpt_say - ############################## <第 3 步,整理history> ################################## final_results.extend(iteration_results) + # 将摘要添加到历史中,方便"猜你想问"使用 + history.extend([last_iteration_result]) final_results.append(f'接下来,你是一名专业的学术教授,利用以上信息,使用中文回答我的问题。') # 接下来两句话只显示在界面上,不起实际作用 i_say_show_user = f'接下来,你是一名专业的学术教授,利用以上信息,使用中文回答我的问题。'; gpt_say = "[Local Message] 收到。" diff --git a/crazy_functions/三千问.py b/crazy_functions/辅助回答.py similarity index 100% rename from crazy_functions/三千问.py rename to crazy_functions/辅助回答.py diff --git a/test.py b/test.py index 67576b4..29ad5f5 100644 --- a/test.py +++ b/test.py @@ -20,6 +20,32 @@ with gr.Blocks() as demo: btn = gr.Button(value="Submit") btn.click(sentence_builder, inputs=[txt, txt_2], outputs=[txt_3]) -if __name__ == "__main__": - demo.launch() +class ChatGPTForTester: + + def __init__(self): + self.demo = gr.Blocks() + + def book(self): + with self.demo: + txt = gr.Textbox(label="Input", lines=2) + txt_2 = gr.CheckboxGroup(['USA', "Japan"], value=['USA'], label='你好呀') + txt_3 = gr.Textbox(value="", label="Output") + btn = gr.Button(value="Submit") + btn.click(sentence_builder, inputs=[txt, txt_2], outputs=[txt_3]) + + def book2(self): + with self.demo: + txt = gr.Textbox(label="Input", lines=2) + txt_2 = gr.CheckboxGroup(['USA', "Japan"], value=['USA'], label='我好呀') + txt_3 = gr.Textbox(value="", label="Output") + btn = gr.Button(value="Submit") + btn.click(sentence_builder, inputs=[txt, txt_2], outputs=[txt_3]) + + def main(self): + self.book2() + self.book() + self.demo.launch() + +if __name__ == "__main__": + ChatGPTForTester().main() diff --git a/toolbox.py b/toolbox.py index 1fda556..e7ecf75 100644 --- a/toolbox.py +++ b/toolbox.py @@ -8,6 +8,10 @@ import func_box from latex2mathml.converter import convert as tex2mathml from functools import wraps, lru_cache import logging +import shutil +import os +import time +import glob ############################### 插件输入输出接驳区 ####################################### class ChatBotWithCookies(list): def __init__(self, cookie): @@ -378,29 +382,32 @@ def find_recent_files(directory): return recent_files -def on_file_uploaded(files, chatbot, txt): +def get_user_upload(chatbot, ipaddr: gr.Request): + private_upload = './private_upload' + user_history = os.path.join(private_upload, ipaddr.client.host) + history = '' + for root, d, file in os.walk(private_upload): + history += f'目录:{root} 文件: {file}\n' + chatbot.append(['我检查了之前上传的文件: ', + '[Local Message] 请自行复制以下目录or目录/文件, 供以高亮插件使用\n' + f'{history}' + ]) + +def on_file_uploaded(files, chatbot, txt, ipaddr: gr.Request): if len(files) == 0: return chatbot, txt - import shutil - import os - import time - import glob - from toolbox import extract_archive - try: - shutil.rmtree('./private_upload/') - except: - pass - time_tag = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime()) - os.makedirs(f'private_upload/{time_tag}', exist_ok=True) + private_upload = './private_upload' + # shutil.rmtree('./private_upload/') 不需要删除文件 + time_tag_path = os.path.join(private_upload, ipaddr.client.host, time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime())) + os.makedirs(f'{time_tag_path}', exist_ok=True) err_msg = '' for file in files: file_origin_name = os.path.basename(file.orig_name) - shutil.copy(file.name, f'private_upload/{time_tag}/{file_origin_name}') - err_msg += extract_archive(f'private_upload/{time_tag}/{file_origin_name}', - dest_dir=f'private_upload/{time_tag}/{file_origin_name}.extract') - moved_files = [fp for fp in glob.glob( - 'private_upload/**/*', recursive=True)] - txt = f'private_upload/{time_tag}' + shutil.copy(file.name, f'{time_tag_path}/{file_origin_name}') + err_msg += extract_archive(f'{time_tag_path}/{file_origin_name}', + dest_dir=f'{time_tag_path}/{file_origin_name}.extract') + moved_files = [fp for fp in glob.glob(f'{time_tag_path}/**/*', recursive=True)] + txt = f'{time_tag_path}' moved_files_str = '\t\n\n'.join(moved_files) chatbot.append(['我上传了文件,请查收', f'[Local Message] 收到以下文件: \n\n{moved_files_str}' + @@ -510,3 +517,9 @@ class DummyWith(): def __exit__(self, exc_type, exc_value, traceback): return + + +if __name__ == '__main__': + private_upload = './private_upload' + for r, d, f in os.walk(private_upload): + print(r, f)