Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -79,26 +79,34 @@ def generate_response(
|
|
79 |
|
80 |
context = search_relevant_text(message) # Get relevant content from PDF
|
81 |
|
82 |
-
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
102 |
|
103 |
@app.route('/')
|
104 |
def index():
|
|
|
79 |
|
80 |
context = search_relevant_text(message) # Get relevant content from PDF
|
81 |
|
82 |
+
# Format the prompt for Mistral
|
83 |
+
prompt = f"""<s>[INST] {system_message}
|
84 |
+
|
85 |
+
Context from the PDF:
|
86 |
+
{context}
|
87 |
+
|
88 |
+
User Question: {message} [/INST]"""
|
89 |
+
|
90 |
+
# Add conversation history if it exists
|
91 |
+
for prev_msg, prev_response in history:
|
92 |
+
prompt += f" {prev_response} </s>[INST] {prev_msg} [/INST]"
|
93 |
+
|
94 |
+
try:
|
95 |
+
response = client.text_generation(
|
96 |
+
prompt,
|
97 |
+
max_new_tokens=max_tokens,
|
98 |
+
temperature=temperature,
|
99 |
+
top_p=top_p,
|
100 |
+
stream=True
|
101 |
+
)
|
102 |
+
|
103 |
+
full_response = ""
|
104 |
+
for chunk in response:
|
105 |
+
full_response += chunk
|
106 |
+
return full_response
|
107 |
+
except Exception as e:
|
108 |
+
print(f"Error generating response: {str(e)}")
|
109 |
+
return "I apologize, but I encountered an error while generating the response. Please try again."
|
110 |
|
111 |
@app.route('/')
|
112 |
def index():
|