Compare commits
3 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| ec1cfaadba | |||
| 2747c23868 | |||
| 4216c5196e |
@ -1,7 +1,7 @@
|
|||||||
# 'primary' 颜色对应 theme.py 中的 primary_hue
|
# 'primary' 颜色对应 theme.py 中的 primary_hue
|
||||||
# 'secondary' 颜色对应 theme.py 中的 neutral_hue
|
# 'secondary' 颜色对应 theme.py 中的 neutral_hue
|
||||||
# 'stop' 颜色对应 theme.py 中的 color_er
|
# 'stop' 颜色对应 theme.py 中的 color_er
|
||||||
# 默认按钮颜色是 secondary
|
import importlib
|
||||||
from toolbox import clear_line_break
|
from toolbox import clear_line_break
|
||||||
|
|
||||||
|
|
||||||
@ -14,7 +14,12 @@ def get_core_functions():
|
|||||||
r"Furthermore, list all modification and explain the reasons to do so in markdown table." + "\n\n",
|
r"Furthermore, list all modification and explain the reasons to do so in markdown table." + "\n\n",
|
||||||
# 后语
|
# 后语
|
||||||
"Suffix": r"",
|
"Suffix": r"",
|
||||||
"Color": r"secondary", # 按钮颜色
|
# 按钮颜色 (默认 secondary)
|
||||||
|
"Color": r"secondary",
|
||||||
|
# 按钮是否可见 (默认 True,即可见)
|
||||||
|
"Visible": True,
|
||||||
|
# 是否在触发时清除历史 (默认 False,即不处理之前的对话历史)
|
||||||
|
"AutoClearHistory": True
|
||||||
},
|
},
|
||||||
"中文学术润色": {
|
"中文学术润色": {
|
||||||
"Prefix": r"作为一名中文学术论文写作改进助理,你的任务是改进所提供文本的拼写、语法、清晰、简洁和整体可读性," +
|
"Prefix": r"作为一名中文学术论文写作改进助理,你的任务是改进所提供文本的拼写、语法、清晰、简洁和整体可读性," +
|
||||||
@ -76,3 +81,13 @@ def get_core_functions():
|
|||||||
"Suffix": r"",
|
"Suffix": r"",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def handle_core_functionality(additional_fn, inputs, history):
|
||||||
|
import core_functional
|
||||||
|
importlib.reload(core_functional) # 热更新prompt
|
||||||
|
core_functional = core_functional.get_core_functions()
|
||||||
|
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
|
||||||
|
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
|
||||||
|
history = [] if core_functional[additional_fn].get("AutoClearHistory", False) else history
|
||||||
|
return inputs, history
|
||||||
|
|||||||
@ -144,11 +144,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
return
|
return
|
||||||
|
|
||||||
if additional_fn is not None:
|
if additional_fn is not None:
|
||||||
import core_functional
|
from core_functional import handle_core_functionality
|
||||||
importlib.reload(core_functional) # 热更新prompt
|
inputs, history = handle_core_functionality(additional_fn, inputs, history)
|
||||||
core_functional = core_functional.get_core_functions()
|
|
||||||
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
|
|
||||||
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
|
|
||||||
|
|
||||||
# 处理历史信息
|
# 处理历史信息
|
||||||
history_feedin = []
|
history_feedin = []
|
||||||
|
|||||||
@ -185,11 +185,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
return
|
return
|
||||||
|
|
||||||
if additional_fn is not None:
|
if additional_fn is not None:
|
||||||
import core_functional
|
from core_functional import handle_core_functionality
|
||||||
importlib.reload(core_functional) # 热更新prompt
|
inputs, history = handle_core_functionality(additional_fn, inputs, history)
|
||||||
core_functional = core_functional.get_core_functions()
|
|
||||||
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
|
|
||||||
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
|
|
||||||
|
|
||||||
# 处理历史信息
|
# 处理历史信息
|
||||||
history_feedin = []
|
history_feedin = []
|
||||||
|
|||||||
@ -129,11 +129,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
return
|
return
|
||||||
|
|
||||||
if additional_fn is not None:
|
if additional_fn is not None:
|
||||||
import core_functional
|
from core_functional import handle_core_functionality
|
||||||
importlib.reload(core_functional) # 热更新prompt
|
inputs, history = handle_core_functionality(additional_fn, inputs, history)
|
||||||
core_functional = core_functional.get_core_functions()
|
|
||||||
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
|
|
||||||
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
|
|
||||||
|
|
||||||
raw_input = inputs
|
raw_input = inputs
|
||||||
logging.info(f'[raw_input] {raw_input}')
|
logging.info(f'[raw_input] {raw_input}')
|
||||||
|
|||||||
@ -116,11 +116,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
return
|
return
|
||||||
|
|
||||||
if additional_fn is not None:
|
if additional_fn is not None:
|
||||||
import core_functional
|
from core_functional import handle_core_functionality
|
||||||
importlib.reload(core_functional) # 热更新prompt
|
inputs, history = handle_core_functionality(additional_fn, inputs, history)
|
||||||
core_functional = core_functional.get_core_functions()
|
|
||||||
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
|
|
||||||
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
|
|
||||||
|
|
||||||
raw_input = inputs
|
raw_input = inputs
|
||||||
logging.info(f'[raw_input] {raw_input}')
|
logging.info(f'[raw_input] {raw_input}')
|
||||||
|
|||||||
@ -290,11 +290,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
return
|
return
|
||||||
|
|
||||||
if additional_fn is not None:
|
if additional_fn is not None:
|
||||||
import core_functional
|
from core_functional import handle_core_functionality
|
||||||
importlib.reload(core_functional) # 热更新prompt
|
inputs, history = handle_core_functionality(additional_fn, inputs, history)
|
||||||
core_functional = core_functional.get_core_functions()
|
|
||||||
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
|
|
||||||
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
|
|
||||||
|
|
||||||
# 处理历史信息
|
# 处理历史信息
|
||||||
history_feedin = []
|
history_feedin = []
|
||||||
|
|||||||
@ -154,11 +154,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
return
|
return
|
||||||
|
|
||||||
if additional_fn is not None:
|
if additional_fn is not None:
|
||||||
import core_functional
|
from core_functional import handle_core_functionality
|
||||||
importlib.reload(core_functional) # 热更新prompt
|
inputs, history = handle_core_functionality(additional_fn, inputs, history)
|
||||||
core_functional = core_functional.get_core_functions()
|
|
||||||
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
|
|
||||||
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
|
|
||||||
|
|
||||||
# 处理历史信息
|
# 处理历史信息
|
||||||
history_feedin = []
|
history_feedin = []
|
||||||
|
|||||||
@ -154,11 +154,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
return
|
return
|
||||||
|
|
||||||
if additional_fn is not None:
|
if additional_fn is not None:
|
||||||
import core_functional
|
from core_functional import handle_core_functionality
|
||||||
importlib.reload(core_functional) # 热更新prompt
|
inputs, history = handle_core_functionality(additional_fn, inputs, history)
|
||||||
core_functional = core_functional.get_core_functions()
|
|
||||||
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
|
|
||||||
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
|
|
||||||
|
|
||||||
# 处理历史信息
|
# 处理历史信息
|
||||||
history_feedin = []
|
history_feedin = []
|
||||||
|
|||||||
@ -154,11 +154,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
return
|
return
|
||||||
|
|
||||||
if additional_fn is not None:
|
if additional_fn is not None:
|
||||||
import core_functional
|
from core_functional import handle_core_functionality
|
||||||
importlib.reload(core_functional) # 热更新prompt
|
inputs, history = handle_core_functionality(additional_fn, inputs, history)
|
||||||
core_functional = core_functional.get_core_functions()
|
|
||||||
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
|
|
||||||
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
|
|
||||||
|
|
||||||
# 处理历史信息
|
# 处理历史信息
|
||||||
history_feedin = []
|
history_feedin = []
|
||||||
|
|||||||
@ -224,11 +224,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
yield from update_ui(chatbot=chatbot, history=history)
|
yield from update_ui(chatbot=chatbot, history=history)
|
||||||
|
|
||||||
if additional_fn is not None:
|
if additional_fn is not None:
|
||||||
import core_functional
|
from core_functional import handle_core_functionality
|
||||||
importlib.reload(core_functional) # 热更新prompt
|
inputs, history = handle_core_functionality(additional_fn, inputs, history)
|
||||||
core_functional = core_functional.get_core_functions()
|
|
||||||
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
|
|
||||||
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
|
|
||||||
|
|
||||||
# 处理历史信息
|
# 处理历史信息
|
||||||
history_feedin = []
|
history_feedin = []
|
||||||
|
|||||||
@ -224,11 +224,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
return
|
return
|
||||||
|
|
||||||
if additional_fn is not None:
|
if additional_fn is not None:
|
||||||
import core_functional
|
from core_functional import handle_core_functionality
|
||||||
importlib.reload(core_functional) # 热更新prompt
|
inputs, history = handle_core_functionality(additional_fn, inputs, history)
|
||||||
core_functional = core_functional.get_core_functions()
|
|
||||||
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
|
|
||||||
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
|
|
||||||
|
|
||||||
history_feedin = []
|
history_feedin = []
|
||||||
for i in range(len(history)//2):
|
for i in range(len(history)//2):
|
||||||
|
|||||||
@ -248,14 +248,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
return
|
return
|
||||||
|
|
||||||
if additional_fn is not None:
|
if additional_fn is not None:
|
||||||
import core_functional
|
from core_functional import handle_core_functionality
|
||||||
importlib.reload(core_functional) # 热更新prompt
|
inputs, history = handle_core_functionality(additional_fn, inputs, history)
|
||||||
core_functional = core_functional.get_core_functions()
|
|
||||||
if "PreProcess" in core_functional[additional_fn]:
|
|
||||||
inputs = core_functional[additional_fn]["PreProcess"](
|
|
||||||
inputs) # 获取预处理函数(如果有的话)
|
|
||||||
inputs = core_functional[additional_fn]["Prefix"] + \
|
|
||||||
inputs + core_functional[additional_fn]["Suffix"]
|
|
||||||
|
|
||||||
history_feedin = []
|
history_feedin = []
|
||||||
for i in range(len(history)//2):
|
for i in range(len(history)//2):
|
||||||
|
|||||||
@ -96,11 +96,8 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
additional_fn代表点击的哪个按钮,按钮见functional.py
|
additional_fn代表点击的哪个按钮,按钮见functional.py
|
||||||
"""
|
"""
|
||||||
if additional_fn is not None:
|
if additional_fn is not None:
|
||||||
import core_functional
|
from core_functional import handle_core_functionality
|
||||||
importlib.reload(core_functional) # 热更新prompt
|
inputs, history = handle_core_functionality(additional_fn, inputs, history)
|
||||||
core_functional = core_functional.get_core_functions()
|
|
||||||
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
|
|
||||||
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
|
|
||||||
|
|
||||||
raw_input = "What I would like to say is the following: " + inputs
|
raw_input = "What I would like to say is the following: " + inputs
|
||||||
history.extend([inputs, ""])
|
history.extend([inputs, ""])
|
||||||
|
|||||||
50
setup.py
Normal file
50
setup.py
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
import setuptools, glob, os, fnmatch
|
||||||
|
|
||||||
|
with open("README.md", "r", encoding="utf-8") as fh:
|
||||||
|
long_description = fh.read()
|
||||||
|
|
||||||
|
|
||||||
|
def _process_requirements():
|
||||||
|
packages = open('requirements.txt').read().strip().split('\n')
|
||||||
|
requires = []
|
||||||
|
for pkg in packages:
|
||||||
|
if pkg.startswith('git+ssh'):
|
||||||
|
return_code = os.system('pip install {}'.format(pkg))
|
||||||
|
assert return_code == 0, 'error, status_code is: {}, exit!'.format(return_code)
|
||||||
|
if pkg.startswith('./docs'):
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
requires.append(pkg)
|
||||||
|
return requires
|
||||||
|
|
||||||
|
def package_files(directory):
|
||||||
|
import subprocess
|
||||||
|
list_of_files = subprocess.check_output("git ls-files", shell=True).splitlines()
|
||||||
|
return [str(k) for k in list_of_files]
|
||||||
|
|
||||||
|
extra_files = package_files('./')
|
||||||
|
|
||||||
|
setuptools.setup(
|
||||||
|
name="void-terminal",
|
||||||
|
version="0.0.0",
|
||||||
|
author="Qingxu",
|
||||||
|
author_email="505030475@qq.com",
|
||||||
|
description="LLM based APIs",
|
||||||
|
long_description=long_description,
|
||||||
|
long_description_content_type="text/markdown",
|
||||||
|
url="https://github.com/binary-husky/gpt-academic",
|
||||||
|
project_urls={
|
||||||
|
"Bug Tracker": "https://github.com/binary-husky/gpt-academic/issues",
|
||||||
|
},
|
||||||
|
classifiers=[
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
"License :: OSI Approved :: MIT License",
|
||||||
|
"Operating System :: OS Independent",
|
||||||
|
],
|
||||||
|
package_dir={"": "."},
|
||||||
|
package_data={"": extra_files},
|
||||||
|
include_package_data=True,
|
||||||
|
packages=setuptools.find_packages(where="."),
|
||||||
|
python_requires=">=3.9",
|
||||||
|
install_requires=_process_requirements(),
|
||||||
|
)
|
||||||
Reference in New Issue
Block a user