Spaces:
Sleeping
Sleeping
Tuchuanhuhuhu
commited on
Commit
·
6a88a02
1
Parent(s):
0aaa1a4
更新依赖版本
Browse files- ChuanhuChatbot.py +2 -0
- modules/llama_func.py +4 -4
- modules/models/base_model.py +3 -0
- requirements.txt +3 -3
ChuanhuChatbot.py
CHANGED
@@ -287,6 +287,7 @@ with gr.Blocks(css=customCSS, theme=small_and_beautiful_theme) as demo:
|
|
287 |
chatbot,
|
288 |
use_streaming_checkbox,
|
289 |
use_websearch_checkbox,
|
|
|
290 |
index_files,
|
291 |
language_select_dropdown,
|
292 |
],
|
@@ -349,6 +350,7 @@ with gr.Blocks(css=customCSS, theme=small_and_beautiful_theme) as demo:
|
|
349 |
chatbot,
|
350 |
use_streaming_checkbox,
|
351 |
use_websearch_checkbox,
|
|
|
352 |
index_files,
|
353 |
language_select_dropdown,
|
354 |
],
|
|
|
287 |
chatbot,
|
288 |
use_streaming_checkbox,
|
289 |
use_websearch_checkbox,
|
290 |
+
autogpt_mode,
|
291 |
index_files,
|
292 |
language_select_dropdown,
|
293 |
],
|
|
|
350 |
chatbot,
|
351 |
use_streaming_checkbox,
|
352 |
use_websearch_checkbox,
|
353 |
+
autogpt_mode,
|
354 |
index_files,
|
355 |
language_select_dropdown,
|
356 |
],
|
modules/llama_func.py
CHANGED
@@ -107,7 +107,7 @@ def construct_index(
|
|
107 |
):
|
108 |
from langchain.chat_models import ChatOpenAI
|
109 |
from langchain.embeddings.huggingface import HuggingFaceEmbeddings
|
110 |
-
from llama_index import
|
111 |
|
112 |
if api_key:
|
113 |
os.environ["OPENAI_API_KEY"] = api_key
|
@@ -129,7 +129,7 @@ def construct_index(
|
|
129 |
index_name = get_index_name(file_src)
|
130 |
if os.path.exists(f"./index/{index_name}.json"):
|
131 |
logging.info("找到了缓存的索引文件,加载中……")
|
132 |
-
return
|
133 |
else:
|
134 |
try:
|
135 |
documents = get_documents(file_src)
|
@@ -144,12 +144,12 @@ def construct_index(
|
|
144 |
chunk_size_limit=chunk_size_limit,
|
145 |
embed_model=embed_model,
|
146 |
)
|
147 |
-
index =
|
148 |
documents, service_context=service_context
|
149 |
)
|
150 |
logging.debug("索引构建完成!")
|
151 |
os.makedirs("./index", exist_ok=True)
|
152 |
-
index.
|
153 |
logging.debug("索引已保存至本地!")
|
154 |
return index
|
155 |
|
|
|
107 |
):
|
108 |
from langchain.chat_models import ChatOpenAI
|
109 |
from langchain.embeddings.huggingface import HuggingFaceEmbeddings
|
110 |
+
from llama_index import GPTVectorStoreIndex, ServiceContext, LangchainEmbedding, OpenAIEmbedding
|
111 |
|
112 |
if api_key:
|
113 |
os.environ["OPENAI_API_KEY"] = api_key
|
|
|
129 |
index_name = get_index_name(file_src)
|
130 |
if os.path.exists(f"./index/{index_name}.json"):
|
131 |
logging.info("找到了缓存的索引文件,加载中……")
|
132 |
+
return GPTVectorStoreIndex.load_from_disk(f"./index/{index_name}.json")
|
133 |
else:
|
134 |
try:
|
135 |
documents = get_documents(file_src)
|
|
|
144 |
chunk_size_limit=chunk_size_limit,
|
145 |
embed_model=embed_model,
|
146 |
)
|
147 |
+
index = GPTVectorStoreIndex.from_documents(
|
148 |
documents, service_context=service_context
|
149 |
)
|
150 |
logging.debug("索引构建完成!")
|
151 |
os.makedirs("./index", exist_ok=True)
|
152 |
+
index.storage_context.persist(f"./index/{index_name}")
|
153 |
logging.debug("索引已保存至本地!")
|
154 |
return index
|
155 |
|
modules/models/base_model.py
CHANGED
@@ -277,6 +277,7 @@ class BaseLLMModel:
|
|
277 |
chatbot,
|
278 |
stream=False,
|
279 |
use_websearch=False,
|
|
|
280 |
files=None,
|
281 |
reply_language="中文",
|
282 |
should_check_token_count=True,
|
@@ -383,6 +384,7 @@ class BaseLLMModel:
|
|
383 |
chatbot,
|
384 |
stream=False,
|
385 |
use_websearch=False,
|
|
|
386 |
files=None,
|
387 |
reply_language="中文",
|
388 |
):
|
@@ -402,6 +404,7 @@ class BaseLLMModel:
|
|
402 |
chatbot,
|
403 |
stream=stream,
|
404 |
use_websearch=use_websearch,
|
|
|
405 |
files=files,
|
406 |
reply_language=reply_language,
|
407 |
)
|
|
|
277 |
chatbot,
|
278 |
stream=False,
|
279 |
use_websearch=False,
|
280 |
+
autogpt_mode=False,
|
281 |
files=None,
|
282 |
reply_language="中文",
|
283 |
should_check_token_count=True,
|
|
|
384 |
chatbot,
|
385 |
stream=False,
|
386 |
use_websearch=False,
|
387 |
+
autogpt_mode=False,
|
388 |
files=None,
|
389 |
reply_language="中文",
|
390 |
):
|
|
|
404 |
chatbot,
|
405 |
stream=stream,
|
406 |
use_websearch=use_websearch,
|
407 |
+
autogpt_mode=autogpt_mode,
|
408 |
files=files,
|
409 |
reply_language=reply_language,
|
410 |
)
|
requirements.txt
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
gradio==3.
|
2 |
gradio_client==0.1.4
|
3 |
mdtex2html
|
4 |
pypinyin
|
@@ -8,8 +8,8 @@ tqdm
|
|
8 |
colorama
|
9 |
duckduckgo_search==2.9.5
|
10 |
Pygments
|
11 |
-
llama_index==0.
|
12 |
-
langchain
|
13 |
markdown
|
14 |
PyPDF2
|
15 |
pdfplumber
|
|
|
1 |
+
gradio==3.30.0
|
2 |
gradio_client==0.1.4
|
3 |
mdtex2html
|
4 |
pypinyin
|
|
|
8 |
colorama
|
9 |
duckduckgo_search==2.9.5
|
10 |
Pygments
|
11 |
+
llama_index==0.6.8
|
12 |
+
langchain==0.0.170
|
13 |
markdown
|
14 |
PyPDF2
|
15 |
pdfplumber
|