raghavNCI
commited on
Commit
·
8573cc3
1
Parent(s):
6f68e08
changes v10
Browse files- question.py +8 -6
question.py
CHANGED
@@ -62,28 +62,30 @@ async def ask_question(input: QuestionInput):
|
|
62 |
|
63 |
# Call HF Inference API manually
|
64 |
hf_api_url = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.3"
|
|
|
65 |
headers = {
|
66 |
"Authorization": f"Bearer {HF_TOKEN}",
|
67 |
"Content-Type": "application/json"
|
68 |
}
|
69 |
|
70 |
payload = {
|
71 |
-
"inputs":
|
72 |
-
"past_user_inputs": [],
|
73 |
-
"generated_responses": [],
|
74 |
-
"text": f"You are an assistant that answers questions based on recent news.\n\nContext:\n{context}\n\nQuestion: {question}"
|
75 |
-
},
|
76 |
"parameters": {
|
77 |
"max_new_tokens": 256,
|
78 |
"temperature": 0.7
|
79 |
}
|
80 |
}
|
81 |
|
|
|
82 |
try:
|
83 |
response = requests.post(hf_api_url, headers=headers, data=json.dumps(payload), timeout=30)
|
84 |
response.raise_for_status()
|
85 |
hf_response = response.json()
|
86 |
-
|
|
|
|
|
|
|
|
|
87 |
except Exception as e:
|
88 |
return {"error": f"Hugging Face API error: {str(e)}"}
|
89 |
|
|
|
62 |
|
63 |
# Call HF Inference API manually
|
64 |
hf_api_url = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.3"
|
65 |
+
prompt = f"<s>[INST] Use the context below to answer the question. If not enough information is available, say 'Cannot answer'.\n\nContext:\n{context}\n\nQuestion: {question} [/INST]"
|
66 |
headers = {
|
67 |
"Authorization": f"Bearer {HF_TOKEN}",
|
68 |
"Content-Type": "application/json"
|
69 |
}
|
70 |
|
71 |
payload = {
|
72 |
+
"inputs": prompt,
|
|
|
|
|
|
|
|
|
73 |
"parameters": {
|
74 |
"max_new_tokens": 256,
|
75 |
"temperature": 0.7
|
76 |
}
|
77 |
}
|
78 |
|
79 |
+
|
80 |
try:
|
81 |
response = requests.post(hf_api_url, headers=headers, data=json.dumps(payload), timeout=30)
|
82 |
response.raise_for_status()
|
83 |
hf_response = response.json()
|
84 |
+
if isinstance(hf_response, list) and len(hf_response) > 0:
|
85 |
+
answer = hf_response[0].get("generated_text", "").strip()
|
86 |
+
else:
|
87 |
+
answer = "Cannot answer – model did not return a valid response."
|
88 |
+
|
89 |
except Exception as e:
|
90 |
return {"error": f"Hugging Face API error: {str(e)}"}
|
91 |
|