Compare commits

...

3 Commits

Author SHA1 Message Date
ec1cfaadba pip 2023-07-28 12:28:04 +08:00
2747c23868 Merge branch 'master' of github.com:binary-husky/chatgpt_academic 2023-07-28 10:35:50 +08:00
4216c5196e verify ignore history practice 2023-07-27 22:30:55 +08:00
14 changed files with 91 additions and 65 deletions

View File

@ -1,7 +1,7 @@
# 'primary' 颜色对应 theme.py 中的 primary_hue
# 'secondary' 颜色对应 theme.py 中的 neutral_hue
# 'stop' 颜色对应 theme.py 中的 color_er
# 默认按钮颜色是 secondary
import importlib
from toolbox import clear_line_break
@ -14,7 +14,12 @@ def get_core_functions():
r"Furthermore, list all modification and explain the reasons to do so in markdown table." + "\n\n",
# 后语
"Suffix": r"",
"Color": r"secondary", # 按钮颜色
# 按钮颜色 (默认 secondary)
"Color": r"secondary",
# 按钮是否可见 (默认 True即可见)
"Visible": True,
# 是否在触发时清除历史 (默认 False即不处理之前的对话历史)
"AutoClearHistory": True
},
"中文学术润色": {
"Prefix": r"作为一名中文学术论文写作改进助理,你的任务是改进所提供文本的拼写、语法、清晰、简洁和整体可读性," +
@ -76,3 +81,13 @@ def get_core_functions():
"Suffix": r"",
}
}
def handle_core_functionality(additional_fn, inputs, history):
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
history = [] if core_functional[additional_fn].get("AutoClearHistory", False) else history
return inputs, history

View File

@ -144,11 +144,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
return
if additional_fn is not None:
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
from core_functional import handle_core_functionality
inputs, history = handle_core_functionality(additional_fn, inputs, history)
# 处理历史信息
history_feedin = []

View File

@ -185,11 +185,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
return
if additional_fn is not None:
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
from core_functional import handle_core_functionality
inputs, history = handle_core_functionality(additional_fn, inputs, history)
# 处理历史信息
history_feedin = []

View File

@ -129,11 +129,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
return
if additional_fn is not None:
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
from core_functional import handle_core_functionality
inputs, history = handle_core_functionality(additional_fn, inputs, history)
raw_input = inputs
logging.info(f'[raw_input] {raw_input}')

View File

@ -116,11 +116,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
return
if additional_fn is not None:
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
from core_functional import handle_core_functionality
inputs, history = handle_core_functionality(additional_fn, inputs, history)
raw_input = inputs
logging.info(f'[raw_input] {raw_input}')

View File

@ -290,11 +290,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
return
if additional_fn is not None:
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
from core_functional import handle_core_functionality
inputs, history = handle_core_functionality(additional_fn, inputs, history)
# 处理历史信息
history_feedin = []

View File

@ -154,11 +154,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
return
if additional_fn is not None:
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
from core_functional import handle_core_functionality
inputs, history = handle_core_functionality(additional_fn, inputs, history)
# 处理历史信息
history_feedin = []

View File

@ -154,11 +154,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
return
if additional_fn is not None:
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
from core_functional import handle_core_functionality
inputs, history = handle_core_functionality(additional_fn, inputs, history)
# 处理历史信息
history_feedin = []

View File

@ -154,11 +154,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
return
if additional_fn is not None:
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
from core_functional import handle_core_functionality
inputs, history = handle_core_functionality(additional_fn, inputs, history)
# 处理历史信息
history_feedin = []

View File

@ -224,11 +224,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
yield from update_ui(chatbot=chatbot, history=history)
if additional_fn is not None:
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
from core_functional import handle_core_functionality
inputs, history = handle_core_functionality(additional_fn, inputs, history)
# 处理历史信息
history_feedin = []

View File

@ -224,11 +224,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
return
if additional_fn is not None:
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
from core_functional import handle_core_functionality
inputs, history = handle_core_functionality(additional_fn, inputs, history)
history_feedin = []
for i in range(len(history)//2):

View File

@ -248,14 +248,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
return
if additional_fn is not None:
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]:
inputs = core_functional[additional_fn]["PreProcess"](
inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + \
inputs + core_functional[additional_fn]["Suffix"]
from core_functional import handle_core_functionality
inputs, history = handle_core_functionality(additional_fn, inputs, history)
history_feedin = []
for i in range(len(history)//2):

View File

@ -96,11 +96,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
additional_fn代表点击的哪个按钮按钮见functional.py
"""
if additional_fn is not None:
import core_functional
importlib.reload(core_functional) # 热更新prompt
core_functional = core_functional.get_core_functions()
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
from core_functional import handle_core_functionality
inputs, history = handle_core_functionality(additional_fn, inputs, history)
raw_input = "What I would like to say is the following: " + inputs
history.extend([inputs, ""])

50
setup.py Normal file
View File

@ -0,0 +1,50 @@
import setuptools, glob, os, fnmatch
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
def _process_requirements():
packages = open('requirements.txt').read().strip().split('\n')
requires = []
for pkg in packages:
if pkg.startswith('git+ssh'):
return_code = os.system('pip install {}'.format(pkg))
assert return_code == 0, 'error, status_code is: {}, exit!'.format(return_code)
if pkg.startswith('./docs'):
continue
else:
requires.append(pkg)
return requires
def package_files(directory):
import subprocess
list_of_files = subprocess.check_output("git ls-files", shell=True).splitlines()
return [str(k) for k in list_of_files]
extra_files = package_files('./')
setuptools.setup(
name="void-terminal",
version="0.0.0",
author="Qingxu",
author_email="505030475@qq.com",
description="LLM based APIs",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/binary-husky/gpt-academic",
project_urls={
"Bug Tracker": "https://github.com/binary-husky/gpt-academic/issues",
},
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
package_dir={"": "."},
package_data={"": extra_files},
include_package_data=True,
packages=setuptools.find_packages(where="."),
python_requires=">=3.9",
install_requires=_process_requirements(),
)