Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -45,7 +45,7 @@ def run_conversation(user_prompt, messages, model_option, max_tokens):
|
|
45 |
]
|
46 |
response = client.chat.completions.create(
|
47 |
model=model_option,
|
48 |
-
messages=messages,
|
49 |
tools=tools,
|
50 |
tool_choice="auto",
|
51 |
max_tokens=max_tokens
|
@@ -77,12 +77,12 @@ def run_conversation(user_prompt, messages, model_option, max_tokens):
|
|
77 |
|
78 |
second_response = client.chat.completions.create(
|
79 |
model=model_option,
|
80 |
-
messages=messages
|
81 |
)
|
82 |
|
83 |
return second_response.choices[0].message.content
|
84 |
else:
|
85 |
-
return response_message
|
86 |
|
87 |
|
88 |
# Initialize chat history and selected model
|
|
|
45 |
]
|
46 |
response = client.chat.completions.create(
|
47 |
model=model_option,
|
48 |
+
messages=[{"role": m["role"], "content": str(m["content"])} for m in messages],
|
49 |
tools=tools,
|
50 |
tool_choice="auto",
|
51 |
max_tokens=max_tokens
|
|
|
77 |
|
78 |
second_response = client.chat.completions.create(
|
79 |
model=model_option,
|
80 |
+
messages=[{"role": m["role"], "content": str(m["content"])} for m in messages]
|
81 |
)
|
82 |
|
83 |
return second_response.choices[0].message.content
|
84 |
else:
|
85 |
+
return response_message.content
|
86 |
|
87 |
|
88 |
# Initialize chat history and selected model
|