Spaces:
Sleeping
Sleeping
change client groq and set Share True
Browse files
app.py
CHANGED
@@ -89,7 +89,7 @@ model = SentenceTransformer('paraphrase-MiniLM-L6-v2')
|
|
89 |
# Normalize user query using Groq VM
|
90 |
def normalize_query(user_query):
|
91 |
try:
|
92 |
-
response =
|
93 |
model="llama-3.1-70b-versatile",
|
94 |
messages=[{
|
95 |
"role": "user",
|
@@ -171,7 +171,7 @@ def generate_response(user_query, top_places):
|
|
171 |
"""
|
172 |
|
173 |
# Generate the response using the model
|
174 |
-
response =
|
175 |
model="llama-3.1-70b-versatile",
|
176 |
messages=[
|
177 |
{"role": "system", "content": system_prompt}, # System prompt defines behavior
|
@@ -208,4 +208,4 @@ iface = gr.Interface(
|
|
208 |
|
209 |
# Launch the Gradio App
|
210 |
if __name__ == "__main__":
|
211 |
-
iface.launch()
|
|
|
89 |
# Normalize user query using Groq VM
|
90 |
def normalize_query(user_query):
|
91 |
try:
|
92 |
+
response = client.chat.completions.create(
|
93 |
model="llama-3.1-70b-versatile",
|
94 |
messages=[{
|
95 |
"role": "user",
|
|
|
171 |
"""
|
172 |
|
173 |
# Generate the response using the model
|
174 |
+
response = client.chat.completions.create(
|
175 |
model="llama-3.1-70b-versatile",
|
176 |
messages=[
|
177 |
{"role": "system", "content": system_prompt}, # System prompt defines behavior
|
|
|
208 |
|
209 |
# Launch the Gradio App
|
210 |
if __name__ == "__main__":
|
211 |
+
iface.launch(sahre=True)
|