Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -80,20 +80,20 @@ def predict(message, history):
|
|
80 |
for human, assistant in history:
|
81 |
if (switch_to.format(model=gpt3_turbo).lower() in human.lower()):
|
82 |
model = gpt3_turbo
|
83 |
-
if (switch_to.format(model=gpt4_turbo).lower() in human.lower()):
|
84 |
-
model = gpt4_turbo
|
85 |
if (switch_to.format(model=gpt4).lower() in human.lower()):
|
86 |
model = gpt4
|
|
|
|
|
87 |
|
88 |
history_openai_format.append({"role": "user", "content": human })
|
89 |
history_openai_format.append({"role": "assistant", "content": assistant})
|
90 |
|
91 |
if (switch_to.format(model=gpt3_turbo).lower() in message.lower()):
|
92 |
model = gpt3_turbo
|
93 |
-
if (switch_to.format(model=gpt4_turbo).lower() in message.lower()):
|
94 |
-
model = gpt4_turbo
|
95 |
if (switch_to.format(model=gpt4).lower() in message.lower()):
|
96 |
-
model = gpt4
|
|
|
|
|
97 |
|
98 |
history_openai_format.append({"role": "user", "content": prompt})
|
99 |
|
|
|
80 |
for human, assistant in history:
|
81 |
if (switch_to.format(model=gpt3_turbo).lower() in human.lower()):
|
82 |
model = gpt3_turbo
|
|
|
|
|
83 |
if (switch_to.format(model=gpt4).lower() in human.lower()):
|
84 |
model = gpt4
|
85 |
+
if (switch_to.format(model=gpt4_turbo).lower() in human.lower()):
|
86 |
+
model = gpt4_turbo
|
87 |
|
88 |
history_openai_format.append({"role": "user", "content": human })
|
89 |
history_openai_format.append({"role": "assistant", "content": assistant})
|
90 |
|
91 |
if (switch_to.format(model=gpt3_turbo).lower() in message.lower()):
|
92 |
model = gpt3_turbo
|
|
|
|
|
93 |
if (switch_to.format(model=gpt4).lower() in message.lower()):
|
94 |
+
model = gpt4
|
95 |
+
if (switch_to.format(model=gpt4_turbo).lower() in message.lower()):
|
96 |
+
model = gpt4_turbo
|
97 |
|
98 |
history_openai_format.append({"role": "user", "content": prompt})
|
99 |
|