diff --git a/__main__.py b/__main__.py index ec93935..8adc54c 100644 --- a/__main__.py +++ b/__main__.py @@ -395,8 +395,8 @@ def check_proxy_free(): time.sleep(5) if __name__ == '__main__': - # PORT = find_free_port() if WEB_PORT <= 0 else WEB_PORT - PORT = 7891 if WEB_PORT <= 0 else WEB_PORT + PORT = find_free_port() if WEB_PORT <= 0 else WEB_PORT + # PORT = 7891 if WEB_PORT <= 0 else WEB_PORT check_proxy_free() ChatBot().main() gr.close_all() diff --git a/crazy_functional.py b/crazy_functional.py index d80a58f..3ee5202 100644 --- a/crazy_functional.py +++ b/crazy_functional.py @@ -118,11 +118,11 @@ def get_crazy_functions(): "Function": HotReload(解析项目本身), "AsButton": False, # 加入下拉菜单中 }, - "[老旧的Demo] 把本项目源代码切换成全英文": { - # HotReload 的意思是热更新,修改函数插件代码后,不需要重启程序,代码直接生效 - "AsButton": False, # 加入下拉菜单中 - "Function": HotReload(全项目切换英文) - }, + # "[老旧的Demo] 把本项目源代码切换成全英文": { + # # HotReload 的意思是热更新,修改函数插件代码后,不需要重启程序,代码直接生效 + # "AsButton": False, # 加入下拉菜单中 + # "Function": HotReload(全项目切换英文) + # }, "[插件demo] 历史上的今天": { # HotReload 的意思是热更新,修改函数插件代码后,不需要重启程序,代码直接生效 "Function": HotReload(高阶功能模板函数) diff --git a/crazy_functions/理解Jupyter.py b/crazy_functions/理解Jupyter.py new file mode 100644 index 0000000..5158465 --- /dev/null +++ b/crazy_functions/理解Jupyter.py @@ -0,0 +1,30 @@ +#! .\venv\ +# encoding: utf-8 +# @Time : 2023/5/23 +# @Author : Spike +# @Descr : +import json +from toolbox import CatchException, update_ui +from crazy_functions.crazy_utils import request_gpt_model_in_new_thread_with_ui_alive, input_clipping +import func_box + + +class ParseNoteBook: + + def __init__(self, file): + self.file = file + + def load_dict(self): + with open(self.file, 'r', encoding='utf-8', errors='replace') as f: + return json.load(f) + + +@CatchException +def 翻译理解jupyter(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): + pass + + +if __name__ == '__main__': + obj = ParseNoteBook('/Users/kilig/Desktop/jupy/NotarizedUpload.ipynb').load_dict() + print(obj['cells']) + diff --git a/crazy_functions/解析JupyterNotebook.py b/crazy_functions/解析JupyterNotebook.py index b4bcd56..6fca0fc 100644 --- a/crazy_functions/解析JupyterNotebook.py +++ b/crazy_functions/解析JupyterNotebook.py @@ -144,3 +144,13 @@ def 解析ipynb文件(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_p yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return yield from ipynb解释(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, ) + + +if __name__ == '__main__': + import json + filename = '' + code = parseNotebook(filename) + print(code) + with open(filename, 'r', encoding='utf-8', errors='replace') as f: + notebook = f.read() + print(notebook) \ No newline at end of file diff --git a/func_box.py b/func_box.py index 02fb412..661d4c9 100644 --- a/func_box.py +++ b/func_box.py @@ -4,6 +4,7 @@ # @Author : Spike # @Descr : import hashlib +import io import json import os.path import subprocess @@ -480,8 +481,8 @@ def thread_write_chat(chatbot): """ private_key = toolbox.get_conf('private_key')[0] chat_title = chatbot[0][0].split() - i_say = chatbot[-1][0].strip("

/p") - gpt_result = chatbot[-1][1].strip('

/div') + i_say = chatbot[-1][0].strip("

/p").strip('

/div') + gpt_result = chatbot[-1][1].strip("

/p").strip('

/div') if private_key in chat_title: SqliteHandle(f'ai_private_{chat_title[-2]}').inset_prompt({i_say: gpt_result}) else: @@ -526,17 +527,20 @@ class YamlHandle: class JsonHandle: - def __init__(self, file=os.path.join(prompt_path, 'prompts-PlexPt.json')): - if not os.path.exists(file): - Shell(f'touch {file}').read() - self.file = file + def __init__(self, file): + if os.path.exists(file): + with open(file=file, mode='r') as self.file_obj: + pass + else: + self.file_obj = io.StringIO() # 创建空白文本对象 + self.file_obj.write('{}') # 向文本对象写入有有效 JSON 格式的数据 + self.file_obj.seek(0) # 将文本对象的光标重置到开头 def load(self): - with open(file=self.file, mode='r') as f: - data = json.load(f) - return data + data = json.load(self.file_obj) + return data if __name__ == '__main__': - pass \ No newline at end of file + print(JsonHandle('/Users/kilig/Job/Python-project/academic_gpt/test.json').load()) diff --git a/request_llm/bridge_chatgpt.py b/request_llm/bridge_chatgpt.py index 49365ed..fdcb55f 100644 --- a/request_llm/bridge_chatgpt.py +++ b/request_llm/bridge_chatgpt.py @@ -282,7 +282,7 @@ def generate_payload(inputs, llm_kwargs, history, system_prompt, stream): if __name__ == '__main__': llm_kwargs = { - 'api_key': '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', + 'api_key': 'sk-', 'llm_model': 'gpt-3.5-turbo', 'top_p': 1, 'max_length': 512, diff --git a/theme.py b/theme.py index 5ef7e96..8070cfa 100644 --- a/theme.py +++ b/theme.py @@ -33,7 +33,7 @@ def adjust_theme(): set_theme = gr.themes.Default( primary_hue=gr.themes.utils.colors.orange, neutral_hue=gr.themes.utils.colors.gray, - font=["sans-serif", "Microsoft YaHei", "ui-sans-serif", "system-ui", + font=["sans-serif", "PingFang SC", "ui-sans-serif", "system-ui", "sans-serif", gr.themes.utils.fonts.GoogleFont("Source Sans Pro")], font_mono=["ui-monospace", "Consolas", "monospace", gr.themes.utils.fonts.GoogleFont("IBM Plex Mono")]) set_theme.set( @@ -137,11 +137,15 @@ advanced_css = """ } [data-testid = "bot"] { max-width: 95%; + color: #ccd2db !important; + letter-spacing: 0.5px; + font-weight: normal; /* width: auto !important; */ border-bottom-left-radius: 0 !important; } [data-testid = "user"] { max-width: 100%; + letter-spacing: 0.5px; /* width: auto !important; */ border-bottom-right-radius: 0 !important; } @@ -154,7 +158,7 @@ advanced_css = """ margin: 0 2px 0 2px; padding: .2em .4em .1em .4em; background-color: rgba(13, 17, 23, 0.95); - color: #c9d1d9; + color: #eff0f2; } .dark .markdown-body code { diff --git a/toolbox.py b/toolbox.py index 26e1972..6a340b4 100644 --- a/toolbox.py +++ b/toolbox.py @@ -249,10 +249,10 @@ def text_divide_paragraph(text): return text else: # wtf input - lines = text.split("\n") + # lines = text.split("\n") # for i, line in enumerate(lines): # lines[i] = lines[i].replace(" ", " ") - text = "
".join(lines) + # text = "
".join(lines) return text @lru_cache(maxsize=128) # 使用 lru缓存 加快转换速度 @@ -365,11 +365,11 @@ def format_io(self, y): if y is None or y == []: return [] i_ask, gpt_reply = y[-1] - i_ask = text_divide_paragraph(i_ask) # 输入部分太自由,预处理一波 + # i_ask = text_divide_paragraph(i_ask) # 输入部分太自由,预处理一波 gpt_reply = close_up_code_segment_during_stream(gpt_reply) # 当代码输出半截的时候,试着补上后个``` y[-1] = ( - None if i_ask is None else markdown.markdown(i_ask, extensions=['fenced_code', 'tables']), - #None if i_ask is None else markdown_convertion(i_ask), + # None if i_ask is None else markdown.markdown(i_ask, extensions=['fenced_code', 'tables']), + None if i_ask is None else markdown_convertion(i_ask), None if gpt_reply is None else markdown_convertion(gpt_reply) ) return y