Spaces:
Sleeping
Sleeping
James MacQuillan
commited on
Commit
·
32faf89
1
Parent(s):
6c1699a
push
Browse files
app.py
CHANGED
@@ -404,8 +404,8 @@ def detect_data_request(user_input, history):
|
|
404 |
# Send the data to the LLM for further processing
|
405 |
final_response = ""
|
406 |
response = client.chat_completion(
|
407 |
-
model="
|
408 |
-
messages=[{"role": "user", "content": f"answer {user_input} with {human_readable_text}"}],
|
409 |
max_tokens=1500,
|
410 |
stream=False
|
411 |
)
|
@@ -413,7 +413,7 @@ def detect_data_request(user_input, history):
|
|
413 |
full_response = response.choices[0].message['content']
|
414 |
history.append(("You: " + user_input, "IM.B: " + full_response))
|
415 |
|
416 |
-
|
417 |
except:
|
418 |
if requestfromfp == []:
|
419 |
history.append(("You: " + user_input, "IM.B: " + 'sorry the response was empty'))
|
|
|
404 |
# Send the data to the LLM for further processing
|
405 |
final_response = ""
|
406 |
response = client.chat_completion(
|
407 |
+
model="Qwen/Qwen2.5-72B-Instruct",
|
408 |
+
messages=[{"role": "user", "content": f"answer {user_input} with {human_readable_text} as IM.B an AI chatbot built for investing by automatedstockmining.org"}],
|
409 |
max_tokens=1500,
|
410 |
stream=False
|
411 |
)
|
|
|
413 |
full_response = response.choices[0].message['content']
|
414 |
history.append(("You: " + user_input, "IM.B: " + full_response))
|
415 |
|
416 |
+
|
417 |
except:
|
418 |
if requestfromfp == []:
|
419 |
history.append(("You: " + user_input, "IM.B: " + 'sorry the response was empty'))
|