Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -143,14 +143,10 @@ def chat_groq(user_message, model='llama-3.3-70b-versatile', system_prompt="You
|
|
143 |
response = requests.post("https://organizedprogrammers-bettergroqinterface.hf.space/chat", json=payload)
|
144 |
|
145 |
if response.status_code == 200:
|
146 |
-
return response.json()
|
147 |
else:
|
148 |
raise Exception(f"API request failed with status {response.status_code}: {response.text}")
|
149 |
|
150 |
-
print(response['content'][0]['message']['content'])
|
151 |
-
|
152 |
-
return response['content'][0]['message']['content']
|
153 |
-
|
154 |
def ask_llm(user_message, model='llama-3.3-70b-versatile', system_prompt="You are a helpful assistant."):
|
155 |
return chat_groq(user_message, model=model, system_prompt=system_prompt)
|
156 |
|
|
|
143 |
response = requests.post("https://organizedprogrammers-bettergroqinterface.hf.space/chat", json=payload)
|
144 |
|
145 |
if response.status_code == 200:
|
146 |
+
return response['content'][0]['message']['content'].json()
|
147 |
else:
|
148 |
raise Exception(f"API request failed with status {response.status_code}: {response.text}")
|
149 |
|
|
|
|
|
|
|
|
|
150 |
def ask_llm(user_message, model='llama-3.3-70b-versatile', system_prompt="You are a helpful assistant."):
|
151 |
return chat_groq(user_message, model=model, system_prompt=system_prompt)
|
152 |
|