From a1092d8f924420d243e36dca22ccfd3037df821a Mon Sep 17 00:00:00 2001 From: qingxu fu <505030475@qq.com> Date: Sat, 1 Jul 2023 00:17:26 +0800 Subject: [PATCH 1/5] =?UTF-8?q?=E6=8F=90=E4=BE=9B=E8=87=AA=E5=8A=A8?= =?UTF-8?q?=E6=B8=85=E7=A9=BA=E8=BE=93=E5=85=A5=E6=A1=86=E7=9A=84=E9=80=89?= =?UTF-8?q?=E9=A1=B9?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- config.py | 3 +++ main.py | 9 +++++++-- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/config.py b/config.py index 58e0e09..f187a0c 100644 --- a/config.py +++ b/config.py @@ -56,6 +56,9 @@ LOCAL_MODEL_DEVICE = "cpu" # 可选 "cuda" # 设置gradio的并行线程数(不需要修改) CONCURRENT_COUNT = 100 +# 是否在提交时自动清空输入框 +AUTO_CLEAR_TXT = False + # 加一个live2d装饰 ADD_WAIFU = False diff --git a/main.py b/main.py index 65e1f4c..f1b7f45 100644 --- a/main.py +++ b/main.py @@ -6,8 +6,8 @@ def main(): from request_llm.bridge_all import predict from toolbox import format_io, find_free_port, on_file_uploaded, on_report_generated, get_conf, ArgsGeneralWrapper, DummyWith # 建议您复制一个config_private.py放自己的秘密, 如API和代理网址, 避免不小心传github被别人看到 - proxies, WEB_PORT, LLM_MODEL, CONCURRENT_COUNT, AUTHENTICATION, CHATBOT_HEIGHT, LAYOUT, API_KEY, AVAIL_LLM_MODELS = \ - get_conf('proxies', 'WEB_PORT', 'LLM_MODEL', 'CONCURRENT_COUNT', 'AUTHENTICATION', 'CHATBOT_HEIGHT', 'LAYOUT', 'API_KEY', 'AVAIL_LLM_MODELS') + proxies, WEB_PORT, LLM_MODEL, CONCURRENT_COUNT, AUTHENTICATION, CHATBOT_HEIGHT, LAYOUT, API_KEY, AVAIL_LLM_MODELS, AUTO_CLEAR_TXT = \ + get_conf('proxies', 'WEB_PORT', 'LLM_MODEL', 'CONCURRENT_COUNT', 'AUTHENTICATION', 'CHATBOT_HEIGHT', 'LAYOUT', 'API_KEY', 'AVAIL_LLM_MODELS', 'AUTO_CLEAR_TXT') # 如果WEB_PORT是-1, 则随机选取WEB端口 PORT = find_free_port() if WEB_PORT <= 0 else WEB_PORT @@ -144,6 +144,11 @@ def main(): resetBtn2.click(lambda: ([], [], "已重置"), None, [chatbot, history, status]) clearBtn.click(lambda: ("",""), None, [txt, txt2]) clearBtn2.click(lambda: ("",""), None, [txt, txt2]) + if AUTO_CLEAR_TXT: + submitBtn.click(lambda: ("",""), None, [txt, txt2]) + submitBtn2.click(lambda: ("",""), None, [txt, txt2]) + txt.submit(lambda: ("",""), None, [txt, txt2]) + txt2.submit(lambda: ("",""), None, [txt, txt2]) # 基础功能区的回调函数注册 for k in functional: if ("Visible" in functional[k]) and (not functional[k]["Visible"]): continue From d7ac99f603bef9eeae26b22df4891d562cd829d4 Mon Sep 17 00:00:00 2001 From: qingxu fu <505030475@qq.com> Date: Sat, 1 Jul 2023 01:46:43 +0800 Subject: [PATCH 2/5] =?UTF-8?q?=E6=9B=B4=E6=AD=A3=E9=94=99=E8=AF=AF?= =?UTF-8?q?=E6=8F=90=E7=A4=BA?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- crazy_functions/crazy_functions_test.py | 2 +- crazy_functions/latex_utils.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/crazy_functions/crazy_functions_test.py b/crazy_functions/crazy_functions_test.py index 60b6b87..a10f3c2 100644 --- a/crazy_functions/crazy_functions_test.py +++ b/crazy_functions/crazy_functions_test.py @@ -195,7 +195,7 @@ def test_Latex(): # txt = r"https://arxiv.org/abs/2303.08774" # txt = r"https://arxiv.org/abs/2303.12712" # txt = r"C:\Users\fuqingxu\arxiv_cache\2303.12712\workfolder" - txt = r"C:\Users\fuqingxu\Desktop\9" + txt = r"2306.17157" # 这个paper有个input命令文件名大小写错误! for cookies, cb, hist, msg in (Latex翻译中文并重新编译PDF)(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): diff --git a/crazy_functions/latex_utils.py b/crazy_functions/latex_utils.py index eb65a8a..be5a367 100644 --- a/crazy_functions/latex_utils.py +++ b/crazy_functions/latex_utils.py @@ -203,6 +203,7 @@ def merge_tex_files_(project_foler, main_file, mode): c = fx.read() else: # e.g., \input{srcs/07_appendix} + assert os.path.exists(fp+'.tex'), f'即找不到{fp},也找不到{fp}.tex,Tex源文件缺失!' with open(fp+'.tex', 'r', encoding='utf-8', errors='replace') as fx: c = fx.read() c = merge_tex_files_(project_foler, c, mode) From 41c10f5688fe5e9993ec16723f1191b07798b9c0 Mon Sep 17 00:00:00 2001 From: qingxu fu <505030475@qq.com> Date: Sat, 1 Jul 2023 02:28:32 +0800 Subject: [PATCH 3/5] report image generation error in UI --- crazy_functions/图片生成.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/crazy_functions/图片生成.py b/crazy_functions/图片生成.py index 5bf8bc4..1bf53f4 100644 --- a/crazy_functions/图片生成.py +++ b/crazy_functions/图片生成.py @@ -27,8 +27,10 @@ def gen_image(llm_kwargs, prompt, resolution="256x256"): } response = requests.post(url, headers=headers, json=data, proxies=proxies) print(response.content) - image_url = json.loads(response.content.decode('utf8'))['data'][0]['url'] - + try: + image_url = json.loads(response.content.decode('utf8'))['data'][0]['url'] + except: + raise RuntimeError(response.content.decode()) # 文件保存到本地 r = requests.get(image_url, proxies=proxies) file_path = 'gpt_log/image_gen/' From 5f7ffef2385786f91a741dc41de223492fe7a66a Mon Sep 17 00:00:00 2001 From: w_xiaolizu Date: Fri, 21 Apr 2023 17:09:49 +0800 Subject: [PATCH 4/5] =?UTF-8?q?=E5=A2=9E=E5=8A=A0=E5=9F=BA=E7=A1=80?= =?UTF-8?q?=E5=8A=9F=E8=83=BD=E5=88=A4=E7=A9=BA?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- crazy_functions/辅助回答.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 crazy_functions/辅助回答.py diff --git a/crazy_functions/辅助回答.py b/crazy_functions/辅助回答.py new file mode 100644 index 0000000..b635f88 --- /dev/null +++ b/crazy_functions/辅助回答.py @@ -0,0 +1,28 @@ +# encoding: utf-8 +# @Time : 2023/4/19 +# @Author : Spike +# @Descr : +from toolbox import update_ui +from toolbox import CatchException, report_execption, write_results_to_file +from crazy_functions.crazy_utils import request_gpt_model_in_new_thread_with_ui_alive + + +@CatchException +def 猜你想问(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): + if txt: + show_say = txt + prompt = txt+'\n回答完问题后,再列出用户可能提出的三个问题。' + else: + prompt = history[-1]+"\n分析上述回答,再列出用户可能提出的三个问题。" + show_say = '分析上述回答,再列出用户可能提出的三个问题。' + gpt_say = yield from request_gpt_model_in_new_thread_with_ui_alive( + inputs=prompt, + inputs_show_user=show_say, + llm_kwargs=llm_kwargs, + chatbot=chatbot, + history=history, + sys_prompt=system_prompt + ) + chatbot[-1] = (show_say, gpt_say) + history.extend([show_say, gpt_say]) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 \ No newline at end of file From 380952679e89c9b086197d10b98db2279caab9cc Mon Sep 17 00:00:00 2001 From: qingxu fu <505030475@qq.com> Date: Sat, 1 Jul 2023 22:11:39 +0800 Subject: [PATCH 5/5] initialize UI improvement --- config.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/config.py b/config.py index f187a0c..85bc810 100644 --- a/config.py +++ b/config.py @@ -29,6 +29,9 @@ DEFAULT_WORKER_NUM = 3 # 对话窗的高度 CHATBOT_HEIGHT = 1115 +# 主题 +THEME = "Default" + # 代码高亮 CODE_HIGHLIGHT = True