raghavNCI
commited on
Commit
·
43cf665
1
Parent(s):
9952378
changes v17
Browse files- question.py +15 -6
question.py
CHANGED
@@ -33,6 +33,11 @@ def extract_last_keywords(raw: str, max_keywords=6):
|
|
33 |
return parts
|
34 |
return []
|
35 |
|
|
|
|
|
|
|
|
|
|
|
36 |
def mistral_generate(prompt: str, max_new_tokens=128):
|
37 |
payload = {
|
38 |
"inputs": prompt,
|
@@ -93,10 +98,12 @@ async def ask_question(input: QuestionInput):
|
|
93 |
|
94 |
print("Fetched articles:", articles)
|
95 |
|
|
|
|
|
96 |
context = "\n\n".join([
|
97 |
-
|
98 |
-
for
|
99 |
-
])[:
|
100 |
|
101 |
if not context.strip():
|
102 |
return {
|
@@ -107,9 +114,11 @@ async def ask_question(input: QuestionInput):
|
|
107 |
|
108 |
# Step 3: Ask Mistral to answer using the context
|
109 |
answer_prompt = (
|
110 |
-
f"
|
111 |
-
f"If not
|
112 |
-
f"Context:\n{context}\n\
|
|
|
|
|
113 |
)
|
114 |
answer = mistral_generate(answer_prompt, max_new_tokens=256)
|
115 |
if not answer:
|
|
|
33 |
return parts
|
34 |
return []
|
35 |
|
36 |
+
def is_relevant(article, keywords):
|
37 |
+
text = f"{article.get('title', '')} {article.get('content', '')}".lower()
|
38 |
+
return any(kw.lower() in text for kw in keywords)
|
39 |
+
|
40 |
+
|
41 |
def mistral_generate(prompt: str, max_new_tokens=128):
|
42 |
payload = {
|
43 |
"inputs": prompt,
|
|
|
98 |
|
99 |
print("Fetched articles:", articles)
|
100 |
|
101 |
+
relevant_articles = [a for a in articles if is_relevant(a, keywords)]
|
102 |
+
|
103 |
context = "\n\n".join([
|
104 |
+
a.get("content") or ""
|
105 |
+
for a in relevant_articles
|
106 |
+
])[:15000]
|
107 |
|
108 |
if not context.strip():
|
109 |
return {
|
|
|
114 |
|
115 |
# Step 3: Ask Mistral to answer using the context
|
116 |
answer_prompt = (
|
117 |
+
f"You are a concise news assistant. Answer the user's question clearly using the context below if relevant. "
|
118 |
+
f"If the context is not helpful, you may rely on your own knowledge, but do not mention the context or question again.\n\n"
|
119 |
+
f"Context:\n{context}\n\n"
|
120 |
+
f"Question: {question}\n\n"
|
121 |
+
f"Answer:"
|
122 |
)
|
123 |
answer = mistral_generate(answer_prompt, max_new_tokens=256)
|
124 |
if not answer:
|