Spaces:
Sleeping
Sleeping
user
commited on
Commit
·
ecb7b4d
1
Parent(s):
b024377
Address tokenizer FutureWarning and clean up tokenization spaces
Browse files
app.py
CHANGED
@@ -5,6 +5,8 @@ import faiss
|
|
5 |
import numpy as np
|
6 |
import os
|
7 |
import pickle
|
|
|
|
|
8 |
|
9 |
@st.cache_resource
|
10 |
def load_models():
|
@@ -124,8 +126,6 @@ if prompt := st.chat_input("What would you like to ask the Muse?"):
|
|
124 |
with st.spinner("The Muse is contemplating..."):
|
125 |
try:
|
126 |
response = generate_response(prompt, tokenizer, generation_model, embedding_model, index, chunks)
|
127 |
-
if not response or response.isspace():
|
128 |
-
raise ValueError("Generated response is empty or contains only whitespace")
|
129 |
except Exception as e:
|
130 |
response = f"I apologize, but I encountered an error: {str(e)}"
|
131 |
|
|
|
5 |
import numpy as np
|
6 |
import os
|
7 |
import pickle
|
8 |
+
import warnings
|
9 |
+
warnings.filterwarnings("ignore", category=FutureWarning, module="transformers")
|
10 |
|
11 |
@st.cache_resource
|
12 |
def load_models():
|
|
|
126 |
with st.spinner("The Muse is contemplating..."):
|
127 |
try:
|
128 |
response = generate_response(prompt, tokenizer, generation_model, embedding_model, index, chunks)
|
|
|
|
|
129 |
except Exception as e:
|
130 |
response = f"I apologize, but I encountered an error: {str(e)}"
|
131 |
|