Wendyy commited on
Commit
b4901e6
·
1 Parent(s): 0566212

Update modules/chat_func.py

Browse files
Files changed (1) hide show
  1. modules/chat_func.py +0 -1
modules/chat_func.py CHANGED
@@ -271,7 +271,6 @@ def predict(
271
  from llama_index.indices.query.schema import QueryBundle
272
  from langchain.llms import OpenAIChat
273
 
274
- files = [{'name': 'database/cuc-pure.txt'}]
275
  logging.info("输入为:" + colorama.Fore.BLUE + f"{inputs}" + colorama.Style.RESET_ALL)
276
  if should_check_token_count:
277
  yield chatbot+[(inputs, "")], history, "开始生成回答……", all_token_counts
 
271
  from llama_index.indices.query.schema import QueryBundle
272
  from langchain.llms import OpenAIChat
273
 
 
274
  logging.info("输入为:" + colorama.Fore.BLUE + f"{inputs}" + colorama.Style.RESET_ALL)
275
  if should_check_token_count:
276
  yield chatbot+[(inputs, "")], history, "开始生成回答……", all_token_counts