Fix PEP 8: E302 expected 2 blank lines, found 1 & PEP 8: E303 too many blank lines (4)

This commit is contained in:
kainstan
2023-06-06 08:29:26 +08:00
parent a019a64e65
commit 0fc8f740d0

View File

@ -90,11 +90,11 @@ def trimmed_format_exc():
replace_path = "." replace_path = "."
return str.replace(current_path, replace_path) return str.replace(current_path, replace_path)
def CatchException(f): def CatchException(f):
""" """
装饰器函数捕捉函数f中的异常并封装到一个生成器中返回并显示到聊天当中。 装饰器函数捕捉函数f中的异常并封装到一个生成器中返回并显示到聊天当中。
""" """
@wraps(f) @wraps(f)
def decorated(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT=-1): def decorated(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT=-1):
try: try:
@ -149,6 +149,7 @@ def HotReload(f):
======================================================================== ========================================================================
""" """
def get_reduce_token_percent(text): def get_reduce_token_percent(text):
""" """
* 此函数未来将被弃用 * 此函数未来将被弃用
@ -208,8 +209,6 @@ def regular_txt_to_markdown(text):
return text return text
def report_execption(chatbot, history, a, b): def report_execption(chatbot, history, a, b):
""" """
向chatbot中添加错误信息 向chatbot中添加错误信息
@ -234,6 +233,7 @@ def text_divide_paragraph(text):
text = "</br>".join(lines) text = "</br>".join(lines)
return text return text
@lru_cache(maxsize=128) # 使用 lru缓存 加快转换速度 @lru_cache(maxsize=128) # 使用 lru缓存 加快转换速度
def markdown_convertion(txt): def markdown_convertion(txt):
""" """
@ -433,6 +433,7 @@ def find_recent_files(directory):
return recent_files return recent_files
def promote_file_to_downloadzone(file, rename_file=None): def promote_file_to_downloadzone(file, rename_file=None):
# 将文件复制一份到下载区 # 将文件复制一份到下载区
import shutil import shutil
@ -441,6 +442,7 @@ def promote_file_to_downloadzone(file, rename_file=None):
if os.path.exists(new_path): os.remove(new_path) if os.path.exists(new_path): os.remove(new_path)
shutil.copyfile(file, new_path) shutil.copyfile(file, new_path)
def on_file_uploaded(files, chatbot, txt, txt2, checkboxes): def on_file_uploaded(files, chatbot, txt, txt2, checkboxes):
""" """
当文件被上传时的回调函数 当文件被上传时的回调函数
@ -488,17 +490,20 @@ def on_report_generated(files, chatbot):
chatbot.append(['报告如何远程获取?', '报告已经添加到右侧“文件上传区”(可能处于折叠状态),请查收。']) chatbot.append(['报告如何远程获取?', '报告已经添加到右侧“文件上传区”(可能处于折叠状态),请查收。'])
return report_files, chatbot return report_files, chatbot
def is_openai_api_key(key): def is_openai_api_key(key):
API_MATCH_ORIGINAL = re.match(r"sk-[a-zA-Z0-9]{48}$", key) API_MATCH_ORIGINAL = re.match(r"sk-[a-zA-Z0-9]{48}$", key)
API_MATCH_AZURE = re.match(r"[a-zA-Z0-9]{32}$", key) API_MATCH_AZURE = re.match(r"[a-zA-Z0-9]{32}$", key)
return bool(API_MATCH_ORIGINAL) or bool(API_MATCH_AZURE) return bool(API_MATCH_ORIGINAL) or bool(API_MATCH_AZURE)
def is_api2d_key(key): def is_api2d_key(key):
if key.startswith('fk') and len(key) == 41: if key.startswith('fk') and len(key) == 41:
return True return True
else: else:
return False return False
def is_any_api_key(key): def is_any_api_key(key):
if ',' in key: if ',' in key:
keys = key.split(',') keys = key.split(',')
@ -508,6 +513,7 @@ def is_any_api_key(key):
else: else:
return is_openai_api_key(key) or is_api2d_key(key) return is_openai_api_key(key) or is_api2d_key(key)
def what_keys(keys): def what_keys(keys):
avail_key_list = {'OpenAI Key':0, "API2D Key":0} avail_key_list = {'OpenAI Key':0, "API2D Key":0}
key_list = keys.split(',') key_list = keys.split(',')
@ -522,6 +528,7 @@ def what_keys(keys):
return f"检测到: OpenAI Key {avail_key_list['OpenAI Key']}API2D Key {avail_key_list['API2D Key']}" return f"检测到: OpenAI Key {avail_key_list['OpenAI Key']}API2D Key {avail_key_list['API2D Key']}"
def select_api_key(keys, llm_model): def select_api_key(keys, llm_model):
import random import random
avail_key_list = [] avail_key_list = []
@ -541,6 +548,7 @@ def select_api_key(keys, llm_model):
api_key = random.choice(avail_key_list) # 随机负载均衡 api_key = random.choice(avail_key_list) # 随机负载均衡
return api_key return api_key
def read_env_variable(arg, default_value): def read_env_variable(arg, default_value):
""" """
环境变量可以是 `GPT_ACADEMIC_CONFIG`(优先),也可以直接是`CONFIG` 环境变量可以是 `GPT_ACADEMIC_CONFIG`(优先),也可以直接是`CONFIG`
@ -595,6 +603,7 @@ def read_env_variable(arg, default_value):
print亮绿(f"[ENV_VAR] 成功读取环境变量{arg}") print亮绿(f"[ENV_VAR] 成功读取环境变量{arg}")
return r return r
@lru_cache(maxsize=128) @lru_cache(maxsize=128)
def read_single_conf_with_lru_cache(arg): def read_single_conf_with_lru_cache(arg):
from colorful import print亮红, print亮绿, print亮蓝 from colorful import print亮红, print亮绿, print亮蓝
@ -659,6 +668,7 @@ class DummyWith():
def __exit__(self, exc_type, exc_value, traceback): def __exit__(self, exc_type, exc_value, traceback):
return return
def run_gradio_in_subpath(demo, auth, port, custom_path): def run_gradio_in_subpath(demo, auth, port, custom_path):
""" """
把gradio的运行地址更改到指定的二次路径上 把gradio的运行地址更改到指定的二次路径上
@ -753,6 +763,7 @@ def clip_history(inputs, history, tokenizer, max_token_limit):
======================================================================== ========================================================================
""" """
def zip_folder(source_folder, dest_folder, zip_name): def zip_folder(source_folder, dest_folder, zip_name):
import zipfile import zipfile
import os import os
@ -784,15 +795,18 @@ def zip_folder(source_folder, dest_folder, zip_name):
print(f"Zip file created at {zip_file}") print(f"Zip file created at {zip_file}")
def zip_result(folder): def zip_result(folder):
import time import time
t = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime()) t = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime())
zip_folder(folder, './gpt_log/', f'{t}-result.zip') zip_folder(folder, './gpt_log/', f'{t}-result.zip')
def gen_time_str(): def gen_time_str():
import time import time
return time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime()) return time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime())
class ProxyNetworkActivate(): class ProxyNetworkActivate():
""" """
这段代码定义了一个名为TempProxy的空上下文管理器, 用于给一小段代码上代理 这段代码定义了一个名为TempProxy的空上下文管理器, 用于给一小段代码上代理
@ -812,12 +826,14 @@ class ProxyNetworkActivate():
if 'HTTPS_PROXY' in os.environ: os.environ.pop('HTTPS_PROXY') if 'HTTPS_PROXY' in os.environ: os.environ.pop('HTTPS_PROXY')
return return
def objdump(obj, file='objdump.tmp'): def objdump(obj, file='objdump.tmp'):
import pickle import pickle
with open(file, 'wb+') as f: with open(file, 'wb+') as f:
pickle.dump(obj, f) pickle.dump(obj, f)
return return
def objload(file='objdump.tmp'): def objload(file='objdump.tmp'):
import pickle, os import pickle, os
if not os.path.exists(file): if not os.path.exists(file):