Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -4,7 +4,7 @@ from transformers import pipeline
|
|
4 |
from sklearn.metrics.pairwise import cosine_similarity
|
5 |
from sentence_transformers import SentenceTransformer
|
6 |
import numpy as np
|
7 |
-
|
8 |
# Set modern page configuration
|
9 |
st.set_page_config(page_title="News Analyzer", layout="wide")
|
10 |
|
@@ -156,23 +156,24 @@ with col2:
|
|
156 |
# Button & Answer Display
|
157 |
if st.button("🔮 Get Answer"):
|
158 |
if user_question.strip() and uploaded_file is not None:
|
159 |
-
|
160 |
-
|
161 |
-
|
162 |
-
|
163 |
-
|
164 |
-
|
165 |
-
|
166 |
-
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
|
173 |
-
|
174 |
-
|
175 |
-
|
|
|
176 |
else:
|
177 |
answer = "⚠️ Please upload a valid file first!"
|
178 |
|
|
|
4 |
from sklearn.metrics.pairwise import cosine_similarity
|
5 |
from sentence_transformers import SentenceTransformer
|
6 |
import numpy as np
|
7 |
+
|
8 |
# Set modern page configuration
|
9 |
st.set_page_config(page_title="News Analyzer", layout="wide")
|
10 |
|
|
|
156 |
# Button & Answer Display
|
157 |
if st.button("🔮 Get Answer"):
|
158 |
if user_question.strip() and uploaded_file is not None:
|
159 |
+
with st.spinner("⏳ Wait, our agent will look into that..."):
|
160 |
+
# Extract the 1st column as context (0-indexed)
|
161 |
+
context = df.iloc[:, 0].dropna().tolist()
|
162 |
+
|
163 |
+
# Generate embeddings for the context rows and the question
|
164 |
+
context_embeddings = sentence_model.encode(context)
|
165 |
+
question_embedding = sentence_model.encode([user_question])
|
166 |
+
|
167 |
+
# Calculate cosine similarity
|
168 |
+
similarities = cosine_similarity(question_embedding, context_embeddings)
|
169 |
+
top_indices = similarities[0].argsort()[-5:][::-1] # Get top 5 similar rows
|
170 |
+
|
171 |
+
# Prepare the top 5 similar context rows
|
172 |
+
top_context = "\n".join([context[i] for i in top_indices])
|
173 |
+
|
174 |
+
# Get answer from Hugging Face model using top context
|
175 |
+
result = pipe(question=user_question, context=top_context)
|
176 |
+
answer = result['answer']
|
177 |
else:
|
178 |
answer = "⚠️ Please upload a valid file first!"
|
179 |
|