Merge branch 'wps_i18n' of https://github.com/Kilig947/gpt_academic into Kilig947-wps_i18n

This commit is contained in:
binary-husky
2023-05-28 19:07:41 +08:00
102 changed files with 9322 additions and 88 deletions

View File

@ -28,17 +28,16 @@ def gen_image(llm_kwargs, prompt, resolution="256x256"):
response = requests.post(url, headers=headers, json=data, proxies=proxies)
print(response.content)
image_url = json.loads(response.content.decode('utf8'))['data'][0]['url']
# 文件保存到本地
r = requests.get(image_url, proxies=proxies)
file_path = 'gpt_log/image_gen/'
os.makedirs(file_path, exist_ok=True)
file_name = 'Image' + time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime()) + '.png'
with open(file_path+file_name, 'wb+') as f: f.write(r.content)
with open(file_path + file_name, 'wb+') as f:
f.write(r.content)
return image_url, file_path + file_name
return image_url, file_path+file_name
@CatchException

View File

@ -0,0 +1,30 @@
#! .\venv\
# encoding: utf-8
# @Time : 2023/5/23
# @Author : Spike
# @Descr :
import json
from toolbox import CatchException, update_ui
from crazy_functions.crazy_utils import request_gpt_model_in_new_thread_with_ui_alive, input_clipping
import func_box
class ParseNoteBook:
def __init__(self, file):
self.file = file
def load_dict(self):
with open(self.file, 'r', encoding='utf-8', errors='replace') as f:
return json.load(f)
@CatchException
def 翻译理解jupyter(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port):
pass
if __name__ == '__main__':
obj = ParseNoteBook('/Users/kilig/Desktop/jupy/NotarizedUpload.ipynb').load_dict()
print(obj['cells'])

View File

@ -51,9 +51,10 @@ def 解析PDF(file_name, llm_kwargs, plugin_kwargs, chatbot, history, system_pro
)
iteration_results.append(gpt_say)
last_iteration_result = gpt_say
############################## <第 3 步整理history> ##################################
final_results.extend(iteration_results)
# 将摘要添加到历史中,方便"猜你想问"使用
history.extend([last_iteration_result])
final_results.append(f'接下来,你是一名专业的学术教授,利用以上信息,使用中文回答我的问题。')
# 接下来两句话只显示在界面上,不起实际作用
i_say_show_user = f'接下来,你是一名专业的学术教授,利用以上信息,使用中文回答我的问题。'; gpt_say = "[Local Message] 收到。"
@ -110,3 +111,4 @@ def 理解PDF文档内容标准文件输入(txt, llm_kwargs, plugin_kwargs, chat
txt = file_manifest[0]
# 开始正式执行任务
yield from 解析PDF(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt)

View File

@ -144,3 +144,13 @@ def 解析ipynb文件(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_p
yield from update_ui(chatbot=chatbot, history=history) # 刷新界面
return
yield from ipynb解释(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, )
if __name__ == '__main__':
import json
filename = ''
code = parseNotebook(filename)
print(code)
with open(filename, 'r', encoding='utf-8', errors='replace') as f:
notebook = f.read()
print(notebook)

View File

@ -0,0 +1,31 @@
#! .\venv\
# encoding: utf-8
# @Time : 2023/4/19
# @Author : Spike
# @Descr :
from toolbox import update_ui
from toolbox import CatchException, report_execption, write_results_to_file
from crazy_functions.crazy_utils import request_gpt_model_in_new_thread_with_ui_alive
@CatchException
def 猜你想问(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port):
if txt:
show_say = txt
prompt = txt+'\n回答完问题后,再列出用户可能提出的三个问题。'
else:
show_say = '分析上述回答,再列出用户可能提出的三个问题。'
try:
prompt = history[-1]+f"\n{show_say}"
except IndexError:
prompt = system_prompt+"\n再列出用户可能提出的三个问题。"
gpt_say = yield from request_gpt_model_in_new_thread_with_ui_alive(
inputs=prompt,
inputs_show_user=show_say,
llm_kwargs=llm_kwargs,
chatbot=chatbot,
history=history,
sys_prompt=system_prompt
)
chatbot.append([show_say, gpt_say])
history.extend([show_say, gpt_say])