Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -20,28 +20,24 @@ def gpt_call(history, user_message,
|
|
20 |
temperature=0.7,
|
21 |
top_p=0.95):
|
22 |
"""
|
23 |
-
OpenAI ChatCompletion API
|
24 |
-
- history: [(user_text, assistant_text), ...]
|
25 |
-
- user_message: μ¬μ©μκ° λ°©κΈ μ
λ ₯ν λ©μμ§
|
26 |
"""
|
27 |
-
|
|
|
28 |
messages = [{"role": "system", "content": MAIN_PROMPT}]
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
# 3) λ§μ§λ§μ μ΄λ² μ¬μ©μμ μ
λ ₯μ μΆκ°
|
42 |
messages.append({"role": "user", "content": user_message})
|
43 |
|
44 |
-
#
|
45 |
completion = client.chat.completions.create(
|
46 |
model=model,
|
47 |
messages=messages,
|
@@ -49,6 +45,7 @@ def gpt_call(history, user_message,
|
|
49 |
temperature=temperature,
|
50 |
top_p=top_p
|
51 |
)
|
|
|
52 |
return completion.choices[0].message.content
|
53 |
|
54 |
def respond(user_message, history):
|
|
|
20 |
temperature=0.7,
|
21 |
top_p=0.95):
|
22 |
"""
|
23 |
+
OpenAI ChatCompletion API function.
|
|
|
|
|
24 |
"""
|
25 |
+
|
26 |
+
# Ensure history is formatted correctly
|
27 |
messages = [{"role": "system", "content": MAIN_PROMPT}]
|
28 |
+
|
29 |
+
for exchange in history:
|
30 |
+
if isinstance(exchange, (list, tuple)) and len(exchange) == 2:
|
31 |
+
user_text, assistant_text = exchange
|
32 |
+
if isinstance(user_text, str) and user_text.strip():
|
33 |
+
messages.append({"role": "user", "content": user_text})
|
34 |
+
if isinstance(assistant_text, str) and assistant_text.strip():
|
35 |
+
messages.append({"role": "assistant", "content": assistant_text})
|
36 |
+
|
37 |
+
# Add latest user input
|
|
|
|
|
|
|
38 |
messages.append({"role": "user", "content": user_message})
|
39 |
|
40 |
+
# Call OpenAI API
|
41 |
completion = client.chat.completions.create(
|
42 |
model=model,
|
43 |
messages=messages,
|
|
|
45 |
temperature=temperature,
|
46 |
top_p=top_p
|
47 |
)
|
48 |
+
|
49 |
return completion.choices[0].message.content
|
50 |
|
51 |
def respond(user_message, history):
|