Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -3,8 +3,6 @@ from openai import OpenAI
|
|
3 |
from os import getenv
|
4 |
import json
|
5 |
|
6 |
-
MODEL_ID = "mistralai/mixtral-8x22b-instruct"
|
7 |
-
|
8 |
client = OpenAI(
|
9 |
base_url="https://openrouter.ai/api/v1",
|
10 |
api_key=getenv("OPENROUTER_API_KEY"),
|
@@ -14,9 +12,9 @@ client = OpenAI(
|
|
14 |
with gr.Blocks() as app:
|
15 |
dialog_state_var = gr.State([])
|
16 |
|
17 |
-
def api_call(prompt):
|
18 |
completion = client.chat.completions.create(
|
19 |
-
model=
|
20 |
messages=[
|
21 |
{
|
22 |
"role": "user",
|
@@ -27,7 +25,7 @@ with gr.Blocks() as app:
|
|
27 |
answer = completion.choices[0].message.content
|
28 |
return answer
|
29 |
|
30 |
-
def generate_next(intro, setting, topic, student_identity, teacher_identity, student_start, student_base, teacher_base, dialog_state):
|
31 |
|
32 |
dialog_prev = "\n\n".join(dialog_state)
|
33 |
|
@@ -37,7 +35,7 @@ with gr.Blocks() as app:
|
|
37 |
prompt_student+=student_start
|
38 |
else:
|
39 |
prompt_student+=student_base
|
40 |
-
student = api_call(prompt_student)
|
41 |
print(student)
|
42 |
student = json.loads(student)
|
43 |
analysis_student = student["analyse"]
|
@@ -47,7 +45,7 @@ with gr.Blocks() as app:
|
|
47 |
|
48 |
# TEACHER
|
49 |
prompt_teacher = intro+"\n\Verlauf des Dialogs: "+setting+"\n\nThema, bei dem der Schüler Hilfe braucht:"+topic+"\n\nBeschreibung des Schülers:"+student_identity+"\n\nBeschreibung des Tutors:"+teacher_identity+"\n\nBisheriger Dialog:\n"+dialog_prev+"\n\n"+teacher_base
|
50 |
-
teacher = api_call(prompt_teacher)
|
51 |
print(teacher)
|
52 |
teacher = json.loads(teacher)
|
53 |
analysis_teacher = teacher["analyse"]
|
@@ -67,6 +65,8 @@ with gr.Blocks() as app:
|
|
67 |
|
68 |
|
69 |
# Text boxes for user inputs
|
|
|
|
|
70 |
txt_intro = gr.Textbox(label="Einleitung", lines=4, value = "Ich möchte einen möglichst realistischen Dialog zwischen einem Schüler und einem Tutor simulieren.")
|
71 |
txt_setting = gr.Textbox(label="Dialogverlauf", lines=4, value = "Der Schüler kommt bei seinen Hausaufgaben nicht weiter und bittet den Tutorum Hilfe. Der Tutor diagnostiziert welches Vorwissen der Schülers hat und welche Art von Hilfe er genau braucht. Dann erabeiten Tutor und Schüler Schritt für Schritt das Thema.")
|
72 |
txt_topic = gr.Textbox(label="Thema des Dialogs", lines=4, value="Zellteilung - Die Phasen der Mitose")
|
@@ -89,7 +89,7 @@ with gr.Blocks() as app:
|
|
89 |
# Define the layout and how components interact
|
90 |
btn_submit.click(
|
91 |
fn=generate_next,
|
92 |
-
inputs=[txt_intro, txt_setting, txt_topic, txt_student_identity, txt_teacher_identity,txt_student_start, txt_student_base, txt_teacher_base, dialog_state_var],
|
93 |
outputs=[prompt_student,prompt_teacher, chat_interface, dialog_state_var, analysis_student, analysis_teacher],
|
94 |
)
|
95 |
|
|
|
3 |
from os import getenv
|
4 |
import json
|
5 |
|
|
|
|
|
6 |
client = OpenAI(
|
7 |
base_url="https://openrouter.ai/api/v1",
|
8 |
api_key=getenv("OPENROUTER_API_KEY"),
|
|
|
12 |
with gr.Blocks() as app:
|
13 |
dialog_state_var = gr.State([])
|
14 |
|
15 |
+
def api_call(prompt, model_id):
|
16 |
completion = client.chat.completions.create(
|
17 |
+
model=model_id,
|
18 |
messages=[
|
19 |
{
|
20 |
"role": "user",
|
|
|
25 |
answer = completion.choices[0].message.content
|
26 |
return answer
|
27 |
|
28 |
+
def generate_next(intro, setting, topic, student_identity, teacher_identity, student_start, student_base, teacher_base, dialog_state, llm_teacher, llm_student):
|
29 |
|
30 |
dialog_prev = "\n\n".join(dialog_state)
|
31 |
|
|
|
35 |
prompt_student+=student_start
|
36 |
else:
|
37 |
prompt_student+=student_base
|
38 |
+
student = api_call(prompt_student, llm_student)
|
39 |
print(student)
|
40 |
student = json.loads(student)
|
41 |
analysis_student = student["analyse"]
|
|
|
45 |
|
46 |
# TEACHER
|
47 |
prompt_teacher = intro+"\n\Verlauf des Dialogs: "+setting+"\n\nThema, bei dem der Schüler Hilfe braucht:"+topic+"\n\nBeschreibung des Schülers:"+student_identity+"\n\nBeschreibung des Tutors:"+teacher_identity+"\n\nBisheriger Dialog:\n"+dialog_prev+"\n\n"+teacher_base
|
48 |
+
teacher = api_call(prompt_teacher, llm_teacher)
|
49 |
print(teacher)
|
50 |
teacher = json.loads(teacher)
|
51 |
analysis_teacher = teacher["analyse"]
|
|
|
65 |
|
66 |
|
67 |
# Text boxes for user inputs
|
68 |
+
txt_llm_teacher = gr.Textbox(label="LLM Tutor (https://openrouter.ai/models)", lines=4, value = "mistralai/mixtral-8x22b-instruct")
|
69 |
+
txt_llm_student = gr.Textbox(label="LLM Schüler (https://openrouter.ai/models)", lines=4, value = "mistralai/mixtral-8x22b-instruct")
|
70 |
txt_intro = gr.Textbox(label="Einleitung", lines=4, value = "Ich möchte einen möglichst realistischen Dialog zwischen einem Schüler und einem Tutor simulieren.")
|
71 |
txt_setting = gr.Textbox(label="Dialogverlauf", lines=4, value = "Der Schüler kommt bei seinen Hausaufgaben nicht weiter und bittet den Tutorum Hilfe. Der Tutor diagnostiziert welches Vorwissen der Schülers hat und welche Art von Hilfe er genau braucht. Dann erabeiten Tutor und Schüler Schritt für Schritt das Thema.")
|
72 |
txt_topic = gr.Textbox(label="Thema des Dialogs", lines=4, value="Zellteilung - Die Phasen der Mitose")
|
|
|
89 |
# Define the layout and how components interact
|
90 |
btn_submit.click(
|
91 |
fn=generate_next,
|
92 |
+
inputs=[txt_intro, txt_setting, txt_topic, txt_student_identity, txt_teacher_identity,txt_student_start, txt_student_base, txt_teacher_base, dialog_state_var,txt_llm_teacher,txt_llm_student],
|
93 |
outputs=[prompt_student,prompt_teacher, chat_interface, dialog_state_var, analysis_student, analysis_teacher],
|
94 |
)
|
95 |
|