Spaces:
Paused
Paused
Merge branch 'master' into huggingface
Browse files- Dockerfile +6 -2
- README.md +3 -5
- crazy_functions/Latex全文润色.py +1 -3
- crazy_functions/Latex全文翻译.py +1 -1
- docker-compose.yml +1 -0
- main.py +4 -2
- request_llm/bridge_all.py +16 -13
- request_llm/edge_gpt_free.py +20 -20
- requirements.txt +1 -1
- toolbox.py +20 -1
- version +2 -2
Dockerfile
CHANGED
@@ -9,12 +9,16 @@ RUN echo '[global]' > /etc/pip.conf && \
|
|
9 |
|
10 |
|
11 |
WORKDIR /gpt
|
12 |
-
COPY requirements.txt .
|
13 |
-
RUN pip3 install -r requirements.txt
|
14 |
|
|
|
15 |
COPY . .
|
16 |
|
|
|
|
|
|
|
|
|
17 |
# 可选步骤,用于预热模块
|
18 |
RUN python3 -c 'from check_proxy import warm_up_modules; warm_up_modules()'
|
19 |
|
|
|
20 |
CMD ["python3", "-u", "main.py"]
|
|
|
9 |
|
10 |
|
11 |
WORKDIR /gpt
|
|
|
|
|
12 |
|
13 |
+
# 装载项目文件
|
14 |
COPY . .
|
15 |
|
16 |
+
# 安装依赖
|
17 |
+
RUN pip3 install -r requirements.txt
|
18 |
+
|
19 |
+
|
20 |
# 可选步骤,用于预热模块
|
21 |
RUN python3 -c 'from check_proxy import warm_up_modules; warm_up_modules()'
|
22 |
|
23 |
+
# 启动
|
24 |
CMD ["python3", "-u", "main.py"]
|
README.md
CHANGED
@@ -3,8 +3,6 @@ title: academic-chatgpt
|
|
3 |
emoji: 😻
|
4 |
colorFrom: blue
|
5 |
colorTo: blue
|
6 |
-
sdk: gradio
|
7 |
-
sdk_version: 3.28.3
|
8 |
python_version: 3.11
|
9 |
app_file: main.py
|
10 |
pinned: false
|
@@ -13,9 +11,9 @@ pinned: false
|
|
13 |
# ChatGPT 学术优化
|
14 |
> **Note**
|
15 |
>
|
16 |
-
>
|
17 |
>
|
18 |
-
> `pip install -r requirements.txt -i https://
|
19 |
>
|
20 |
|
21 |
# <img src="docs/logo.png" width="40" > GPT 学术优化 (GPT Academic)
|
@@ -315,7 +313,7 @@ gpt_academic开发者QQ群-2:610599535
|
|
315 |
|
316 |
- 已知问题
|
317 |
- 某些浏览器翻译插件干扰此软件前端的运行
|
318 |
-
-
|
319 |
|
320 |
## 参考与学习
|
321 |
|
|
|
3 |
emoji: 😻
|
4 |
colorFrom: blue
|
5 |
colorTo: blue
|
|
|
|
|
6 |
python_version: 3.11
|
7 |
app_file: main.py
|
8 |
pinned: false
|
|
|
11 |
# ChatGPT 学术优化
|
12 |
> **Note**
|
13 |
>
|
14 |
+
> 5月27日对gradio依赖进行了较大的修复和调整,fork并解决了官方Gradio的一系列bug。但如果27日当天进行了更新,可能会导致代码报错(依赖缺失,卡在loading界面等),请及时更新到**最新版代码**并重新安装pip依赖即可。若给您带来困扰还请谅解。安装依赖时,请严格选择requirements.txt中**指定的版本**:
|
15 |
>
|
16 |
+
> `pip install -r requirements.txt -i https://pypi.org/simple`
|
17 |
>
|
18 |
|
19 |
# <img src="docs/logo.png" width="40" > GPT 学术优化 (GPT Academic)
|
|
|
313 |
|
314 |
- 已知问题
|
315 |
- 某些浏览器翻译插件干扰此软件前端的运行
|
316 |
+
- 官方Gradio目前有很多兼容性Bug,请务必使用requirement.txt安装Gradio
|
317 |
|
318 |
## 参考与学习
|
319 |
|
crazy_functions/Latex全文润色.py
CHANGED
@@ -66,7 +66,7 @@ def 多文件润色(file_manifest, project_folder, llm_kwargs, plugin_kwargs, ch
|
|
66 |
with open(fp, 'r', encoding='utf-8', errors='replace') as f:
|
67 |
file_content = f.read()
|
68 |
# 定义注释的正则表达式
|
69 |
-
comment_pattern = r'%.*'
|
70 |
# 使用正则表达式查找注释,并替换为空字符串
|
71 |
clean_tex_content = re.sub(comment_pattern, '', file_content)
|
72 |
# 记录删除注释后的文本
|
@@ -238,5 +238,3 @@ def Latex英文纠错(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_p
|
|
238 |
yield from update_ui(chatbot=chatbot, history=history) # 刷新界面
|
239 |
return
|
240 |
yield from 多文件润色(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, language='en', mode='proofread')
|
241 |
-
|
242 |
-
|
|
|
66 |
with open(fp, 'r', encoding='utf-8', errors='replace') as f:
|
67 |
file_content = f.read()
|
68 |
# 定义注释的正则表达式
|
69 |
+
comment_pattern = r'(?<!\\)%.*'
|
70 |
# 使用正则表达式查找注释,并替换为空字符串
|
71 |
clean_tex_content = re.sub(comment_pattern, '', file_content)
|
72 |
# 记录删除注释后的文本
|
|
|
238 |
yield from update_ui(chatbot=chatbot, history=history) # 刷新界面
|
239 |
return
|
240 |
yield from 多文件润色(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, language='en', mode='proofread')
|
|
|
|
crazy_functions/Latex全文翻译.py
CHANGED
@@ -46,7 +46,7 @@ def 多文件翻译(file_manifest, project_folder, llm_kwargs, plugin_kwargs, ch
|
|
46 |
with open(fp, 'r', encoding='utf-8', errors='replace') as f:
|
47 |
file_content = f.read()
|
48 |
# 定义注释的正则表达式
|
49 |
-
comment_pattern = r'%.*'
|
50 |
# 使用正则表达式查找注释,并替换为空字符串
|
51 |
clean_tex_content = re.sub(comment_pattern, '', file_content)
|
52 |
# 记录删除注释后的文本
|
|
|
46 |
with open(fp, 'r', encoding='utf-8', errors='replace') as f:
|
47 |
file_content = f.read()
|
48 |
# 定义注释的正则表达式
|
49 |
+
comment_pattern = r'(?<!\\)%.*'
|
50 |
# 使用正则表达式查找注释,并替换为空字符串
|
51 |
clean_tex_content = re.sub(comment_pattern, '', file_content)
|
52 |
# 记录删除注释后的文本
|
docker-compose.yml
CHANGED
@@ -99,6 +99,7 @@ services:
|
|
99 |
command: >
|
100 |
bash -c " echo '[gpt-academic] 正在从github拉取最新代码...' &&
|
101 |
git pull &&
|
|
|
102 |
echo '[jittorllms] 正在从github拉取最新代码...' &&
|
103 |
git --git-dir=request_llm/jittorllms/.git --work-tree=request_llm/jittorllms pull --force &&
|
104 |
python3 -u main.py"
|
|
|
99 |
command: >
|
100 |
bash -c " echo '[gpt-academic] 正在从github拉取最新代码...' &&
|
101 |
git pull &&
|
102 |
+
pip install -r requirements.txt &&
|
103 |
echo '[jittorllms] 正在从github拉取最新代码...' &&
|
104 |
git --git-dir=request_llm/jittorllms/.git --work-tree=request_llm/jittorllms pull --force &&
|
105 |
python3 -u main.py"
|
main.py
CHANGED
@@ -2,6 +2,7 @@ import os; os.environ['no_proxy'] = '*' # 避免代理网络产生意外污染
|
|
2 |
|
3 |
def main():
|
4 |
import gradio as gr
|
|
|
5 |
from request_llm.bridge_all import predict
|
6 |
from toolbox import format_io, find_free_port, on_file_uploaded, on_report_generated, get_conf, ArgsGeneralWrapper, DummyWith
|
7 |
# 建议您复制一个config_private.py放自己的秘密, 如API和代理网址, 避免不小心传github被别人看到
|
@@ -197,7 +198,7 @@ def main():
|
|
197 |
threading.Thread(target=warm_up_modules, name="warm-up", daemon=True).start()
|
198 |
|
199 |
auto_opentab_delay()
|
200 |
-
demo.queue(concurrency_count=CONCURRENT_COUNT).launch(server_name="0.0.0.0", share=False, favicon_path="docs/logo.png")
|
201 |
|
202 |
# 如果需要在二级路径下运行
|
203 |
# CUSTOM_PATH, = get_conf('CUSTOM_PATH')
|
@@ -205,7 +206,8 @@ def main():
|
|
205 |
# from toolbox import run_gradio_in_subpath
|
206 |
# run_gradio_in_subpath(demo, auth=AUTHENTICATION, port=PORT, custom_path=CUSTOM_PATH)
|
207 |
# else:
|
208 |
-
# demo.launch(server_name="0.0.0.0", server_port=PORT, auth=AUTHENTICATION, favicon_path="docs/logo.png"
|
|
|
209 |
|
210 |
if __name__ == "__main__":
|
211 |
main()
|
|
|
2 |
|
3 |
def main():
|
4 |
import gradio as gr
|
5 |
+
if gr.__version__ not in ['3.28.3','3.32.2']: assert False, "请用 pip install -r requirements.txt 安装依赖"
|
6 |
from request_llm.bridge_all import predict
|
7 |
from toolbox import format_io, find_free_port, on_file_uploaded, on_report_generated, get_conf, ArgsGeneralWrapper, DummyWith
|
8 |
# 建议您复制一个config_private.py放自己的秘密, 如API和代理网址, 避免不小心传github被别人看到
|
|
|
198 |
threading.Thread(target=warm_up_modules, name="warm-up", daemon=True).start()
|
199 |
|
200 |
auto_opentab_delay()
|
201 |
+
demo.queue(concurrency_count=CONCURRENT_COUNT).launch(server_name="0.0.0.0", share=False, favicon_path="docs/logo.png", blocked_paths=["config.py","config_private.py","docker-compose.yml","Dockerfile"])
|
202 |
|
203 |
# 如果需要在二级路径下运行
|
204 |
# CUSTOM_PATH, = get_conf('CUSTOM_PATH')
|
|
|
206 |
# from toolbox import run_gradio_in_subpath
|
207 |
# run_gradio_in_subpath(demo, auth=AUTHENTICATION, port=PORT, custom_path=CUSTOM_PATH)
|
208 |
# else:
|
209 |
+
# demo.launch(server_name="0.0.0.0", server_port=PORT, auth=AUTHENTICATION, favicon_path="docs/logo.png",
|
210 |
+
# blocked_paths=["config.py","config_private.py","docker-compose.yml","Dockerfile"])
|
211 |
|
212 |
if __name__ == "__main__":
|
213 |
main()
|
request_llm/bridge_all.py
CHANGED
@@ -202,19 +202,22 @@ if "stack-claude" in AVAIL_LLM_MODELS:
|
|
202 |
}
|
203 |
})
|
204 |
if "newbing-free" in AVAIL_LLM_MODELS:
|
205 |
-
|
206 |
-
|
207 |
-
|
208 |
-
|
209 |
-
|
210 |
-
"
|
211 |
-
|
212 |
-
|
213 |
-
|
214 |
-
|
215 |
-
|
216 |
-
|
217 |
-
|
|
|
|
|
|
|
218 |
|
219 |
def LLM_CATCH_EXCEPTION(f):
|
220 |
"""
|
|
|
202 |
}
|
203 |
})
|
204 |
if "newbing-free" in AVAIL_LLM_MODELS:
|
205 |
+
try:
|
206 |
+
from .bridge_newbingfree import predict_no_ui_long_connection as newbingfree_noui
|
207 |
+
from .bridge_newbingfree import predict as newbingfree_ui
|
208 |
+
# claude
|
209 |
+
model_info.update({
|
210 |
+
"newbing-free": {
|
211 |
+
"fn_with_ui": newbingfree_ui,
|
212 |
+
"fn_without_ui": newbingfree_noui,
|
213 |
+
"endpoint": newbing_endpoint,
|
214 |
+
"max_token": 4096,
|
215 |
+
"tokenizer": tokenizer_gpt35,
|
216 |
+
"token_cnt": get_token_num_gpt35,
|
217 |
+
}
|
218 |
+
})
|
219 |
+
except:
|
220 |
+
print(trimmed_format_exc())
|
221 |
|
222 |
def LLM_CATCH_EXCEPTION(f):
|
223 |
"""
|
request_llm/edge_gpt_free.py
CHANGED
@@ -196,9 +196,9 @@ class _ChatHubRequest:
|
|
196 |
self,
|
197 |
prompt: str,
|
198 |
conversation_style: CONVERSATION_STYLE_TYPE,
|
199 |
-
options
|
200 |
-
webpage_context
|
201 |
-
search_result
|
202 |
) -> None:
|
203 |
"""
|
204 |
Updates request object
|
@@ -294,9 +294,9 @@ class _Conversation:
|
|
294 |
|
295 |
def __init__(
|
296 |
self,
|
297 |
-
proxy
|
298 |
-
async_mode
|
299 |
-
cookies
|
300 |
) -> None:
|
301 |
if async_mode:
|
302 |
return
|
@@ -350,8 +350,8 @@ class _Conversation:
|
|
350 |
|
351 |
@staticmethod
|
352 |
async def create(
|
353 |
-
proxy
|
354 |
-
cookies
|
355 |
):
|
356 |
self = _Conversation(async_mode=True)
|
357 |
self.struct = {
|
@@ -418,11 +418,11 @@ class _ChatHub:
|
|
418 |
def __init__(
|
419 |
self,
|
420 |
conversation: _Conversation,
|
421 |
-
proxy
|
422 |
-
cookies
|
423 |
) -> None:
|
424 |
-
self.session
|
425 |
-
self.wss
|
426 |
self.request: _ChatHubRequest
|
427 |
self.loop: bool
|
428 |
self.task: asyncio.Task
|
@@ -441,7 +441,7 @@ class _ChatHub:
|
|
441 |
conversation_style: CONVERSATION_STYLE_TYPE = None,
|
442 |
raw: bool = False,
|
443 |
options: dict = None,
|
444 |
-
webpage_context
|
445 |
search_result: bool = False,
|
446 |
) -> Generator[str, None, None]:
|
447 |
"""
|
@@ -611,10 +611,10 @@ class Chatbot:
|
|
611 |
|
612 |
def __init__(
|
613 |
self,
|
614 |
-
proxy
|
615 |
-
cookies
|
616 |
) -> None:
|
617 |
-
self.proxy
|
618 |
self.chat_hub: _ChatHub = _ChatHub(
|
619 |
_Conversation(self.proxy, cookies=cookies),
|
620 |
proxy=self.proxy,
|
@@ -623,8 +623,8 @@ class Chatbot:
|
|
623 |
|
624 |
@staticmethod
|
625 |
async def create(
|
626 |
-
proxy
|
627 |
-
cookies
|
628 |
):
|
629 |
self = Chatbot.__new__(Chatbot)
|
630 |
self.proxy = proxy
|
@@ -641,7 +641,7 @@ class Chatbot:
|
|
641 |
wss_link: str = "wss://sydney.bing.com/sydney/ChatHub",
|
642 |
conversation_style: CONVERSATION_STYLE_TYPE = None,
|
643 |
options: dict = None,
|
644 |
-
webpage_context
|
645 |
search_result: bool = False,
|
646 |
) -> dict:
|
647 |
"""
|
@@ -667,7 +667,7 @@ class Chatbot:
|
|
667 |
conversation_style: CONVERSATION_STYLE_TYPE = None,
|
668 |
raw: bool = False,
|
669 |
options: dict = None,
|
670 |
-
webpage_context
|
671 |
search_result: bool = False,
|
672 |
) -> Generator[str, None, None]:
|
673 |
"""
|
|
|
196 |
self,
|
197 |
prompt: str,
|
198 |
conversation_style: CONVERSATION_STYLE_TYPE,
|
199 |
+
options = None,
|
200 |
+
webpage_context = None,
|
201 |
+
search_result = False,
|
202 |
) -> None:
|
203 |
"""
|
204 |
Updates request object
|
|
|
294 |
|
295 |
def __init__(
|
296 |
self,
|
297 |
+
proxy = None,
|
298 |
+
async_mode = False,
|
299 |
+
cookies = None,
|
300 |
) -> None:
|
301 |
if async_mode:
|
302 |
return
|
|
|
350 |
|
351 |
@staticmethod
|
352 |
async def create(
|
353 |
+
proxy = None,
|
354 |
+
cookies = None,
|
355 |
):
|
356 |
self = _Conversation(async_mode=True)
|
357 |
self.struct = {
|
|
|
418 |
def __init__(
|
419 |
self,
|
420 |
conversation: _Conversation,
|
421 |
+
proxy = None,
|
422 |
+
cookies = None,
|
423 |
) -> None:
|
424 |
+
self.session = None
|
425 |
+
self.wss = None
|
426 |
self.request: _ChatHubRequest
|
427 |
self.loop: bool
|
428 |
self.task: asyncio.Task
|
|
|
441 |
conversation_style: CONVERSATION_STYLE_TYPE = None,
|
442 |
raw: bool = False,
|
443 |
options: dict = None,
|
444 |
+
webpage_context = None,
|
445 |
search_result: bool = False,
|
446 |
) -> Generator[str, None, None]:
|
447 |
"""
|
|
|
611 |
|
612 |
def __init__(
|
613 |
self,
|
614 |
+
proxy = None,
|
615 |
+
cookies = None,
|
616 |
) -> None:
|
617 |
+
self.proxy = proxy
|
618 |
self.chat_hub: _ChatHub = _ChatHub(
|
619 |
_Conversation(self.proxy, cookies=cookies),
|
620 |
proxy=self.proxy,
|
|
|
623 |
|
624 |
@staticmethod
|
625 |
async def create(
|
626 |
+
proxy = None,
|
627 |
+
cookies = None,
|
628 |
):
|
629 |
self = Chatbot.__new__(Chatbot)
|
630 |
self.proxy = proxy
|
|
|
641 |
wss_link: str = "wss://sydney.bing.com/sydney/ChatHub",
|
642 |
conversation_style: CONVERSATION_STYLE_TYPE = None,
|
643 |
options: dict = None,
|
644 |
+
webpage_context = None,
|
645 |
search_result: bool = False,
|
646 |
) -> dict:
|
647 |
"""
|
|
|
667 |
conversation_style: CONVERSATION_STYLE_TYPE = None,
|
668 |
raw: bool = False,
|
669 |
options: dict = None,
|
670 |
+
webpage_context = None,
|
671 |
search_result: bool = False,
|
672 |
) -> Generator[str, None, None]:
|
673 |
"""
|
requirements.txt
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
gradio
|
2 |
tiktoken>=0.3.3
|
3 |
requests[socks]
|
4 |
transformers
|
|
|
1 |
+
gradio-stable-fork
|
2 |
tiktoken>=0.3.3
|
3 |
requests[socks]
|
4 |
transformers
|
toolbox.py
CHANGED
@@ -764,4 +764,23 @@ def zip_folder(source_folder, dest_folder, zip_name):
|
|
764 |
|
765 |
def gen_time_str():
|
766 |
import time
|
767 |
-
return time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime())
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
764 |
|
765 |
def gen_time_str():
|
766 |
import time
|
767 |
+
return time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime())
|
768 |
+
|
769 |
+
|
770 |
+
class ProxyNetworkActivate():
|
771 |
+
"""
|
772 |
+
这段代码定义了一个名为TempProxy的空上下文管理器, 用于给一小段代码上代理
|
773 |
+
"""
|
774 |
+
def __enter__(self):
|
775 |
+
from toolbox import get_conf
|
776 |
+
proxies, = get_conf('proxies')
|
777 |
+
if 'no_proxy' in os.environ: os.environ.pop('no_proxy')
|
778 |
+
os.environ['HTTP_PROXY'] = proxies['http']
|
779 |
+
os.environ['HTTPS_PROXY'] = proxies['https']
|
780 |
+
return self
|
781 |
+
|
782 |
+
def __exit__(self, exc_type, exc_value, traceback):
|
783 |
+
os.environ['no_proxy'] = '*'
|
784 |
+
if 'HTTP_PROXY' in os.environ: os.environ.pop('HTTP_PROXY')
|
785 |
+
if 'HTTPS_PROXY' in os.environ: os.environ.pop('HTTPS_PROXY')
|
786 |
+
return
|
version
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"version": 3.
|
3 |
"show_feature": true,
|
4 |
-
"new_feature": "修复PDF翻译的BUG, 新增HTML中英双栏对照 <-> 添加了OpenAI图片生成插件 <-> 添加了OpenAI音频转文本总结插件 <-> 通过Slack添加对Claude的支持 <-> 提供复旦MOSS模型适配(启用需额外依赖) <-> 提供docker-compose方案兼容LLAMA盘古RWKV等模型的后端 <-> 新增Live2D装饰 <-> 完善对话历史的保存/载入/删除 <-> 保存对话功能"
|
5 |
}
|
|
|
1 |
{
|
2 |
+
"version": 3.37,
|
3 |
"show_feature": true,
|
4 |
+
"new_feature": "修复gradio复制按钮BUG <-> 修复PDF翻译的BUG, 新增HTML中英双栏对照 <-> 添加了OpenAI图片生成插件 <-> 添加了OpenAI音频转文本总结插件 <-> 通过Slack添加对Claude的支持 <-> 提供复旦MOSS模型适配(启用需额外依赖) <-> 提供docker-compose方案兼容LLAMA盘古RWKV等模型的后端 <-> 新增Live2D装饰 <-> 完善对话历史的保存/载入/删除 <-> 保存对话功能"
|
5 |
}
|