Spaces:
Sleeping
Sleeping
Update for AAP use case
Browse files
app.py
CHANGED
@@ -12,16 +12,15 @@ import base64
|
|
12 |
with open("Iso_Logotipo_Ceibal.png", "rb") as image_file:
|
13 |
encoded_image = base64.b64encode(image_file.read()).decode()
|
14 |
|
15 |
-
|
16 |
openai.api_key = os.environ['OPENAI_API_KEY']
|
17 |
|
18 |
def clear_chat(message, chat_history):
|
19 |
return "", []
|
20 |
|
21 |
-
def add_new_message(message,
|
22 |
new_chat = []
|
23 |
|
24 |
-
new_chat.append({"role": "system", "content": 'Sos {} y tendr谩s que responder preguntas que te har谩n ni帽os de escuela, las respuestas tienen que ser c贸mo si tu fueras {} y responder con la informaci贸n de su vida. Las respuestas tienen que estar orientadas a ni帽os entre 9 y 10 a帽os. No respondas preguntas hasta que el usuario te pregunte sobre alg煤n tema.'.format(
|
25 |
|
26 |
for turn in chat_history:
|
27 |
user, bot = turn
|
@@ -29,11 +28,9 @@ def add_new_message(message,person, chat_history):
|
|
29 |
new_chat.append({"role": "assistant","content":bot})
|
30 |
new_chat.append({"role": "user","content":message})
|
31 |
return new_chat
|
32 |
-
|
33 |
-
|
34 |
|
35 |
-
def respond(message,
|
36 |
-
prompt = add_new_message(message,
|
37 |
# stream = client.generate_stream(prompt,
|
38 |
# max_new_tokens=1024,
|
39 |
# stop_sequences=["\nUser:", "<|endoftext|>"],
|
@@ -79,27 +76,24 @@ with gr.Blocks() as demo:
|
|
79 |
</h1>
|
80 |
<img src='data:image/jpg;base64,{}' width=200px>
|
81 |
<h3>
|
82 |
-
|
83 |
</h3>
|
84 |
</center>
|
85 |
""".format(encoded_image))
|
86 |
with gr.Row():
|
87 |
-
|
88 |
-
with gr.Row():
|
89 |
-
chatbot = gr.Chatbot( height=550) #just to fit the notebook
|
90 |
with gr.Row():
|
91 |
with gr.Row():
|
92 |
with gr.Column(scale=4):
|
93 |
-
msg = gr.Textbox(label="Texto de entrada")
|
94 |
with gr.Column(scale=1):
|
95 |
btn = gr.Button("Enviar")
|
96 |
-
clear = gr.ClearButton(components=[msg, chatbot], value="Borrar chat")
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
|
101 |
-
btn.click(respond, inputs=[msg,
|
102 |
-
msg.submit(respond, inputs=[msg,
|
103 |
clear.click(clear_chat,inputs=[msg, chatbot], outputs=[msg, chatbot])
|
104 |
demo.queue()
|
105 |
demo.launch()
|
|
|
12 |
with open("Iso_Logotipo_Ceibal.png", "rb") as image_file:
|
13 |
encoded_image = base64.b64encode(image_file.read()).decode()
|
14 |
|
|
|
15 |
openai.api_key = os.environ['OPENAI_API_KEY']
|
16 |
|
17 |
def clear_chat(message, chat_history):
|
18 |
return "", []
|
19 |
|
20 |
+
def add_new_message(message, rubrica, chat_history):
|
21 |
new_chat = []
|
22 |
|
23 |
+
new_chat.append({"role": "system", "content": 'Sos {} y tendr谩s que responder preguntas que te har谩n ni帽os de escuela, las respuestas tienen que ser c贸mo si tu fueras {} y responder con la informaci贸n de su vida. Las respuestas tienen que estar orientadas a ni帽os entre 9 y 10 a帽os. No respondas preguntas hasta que el usuario te pregunte sobre alg煤n tema.'.format(rubrica,estilo)})
|
24 |
|
25 |
for turn in chat_history:
|
26 |
user, bot = turn
|
|
|
28 |
new_chat.append({"role": "assistant","content":bot})
|
29 |
new_chat.append({"role": "user","content":message})
|
30 |
return new_chat
|
|
|
|
|
31 |
|
32 |
+
def respond(message, rubrica, chat_history):
|
33 |
+
prompt = add_new_message(message, rubrica, chat_history)
|
34 |
# stream = client.generate_stream(prompt,
|
35 |
# max_new_tokens=1024,
|
36 |
# stop_sequences=["\nUser:", "<|endoftext|>"],
|
|
|
76 |
</h1>
|
77 |
<img src='data:image/jpg;base64,{}' width=200px>
|
78 |
<h3>
|
79 |
+
Este espacio permite probar la generaci贸n mediante IA de devoluciones en base a una r煤brica.
|
80 |
</h3>
|
81 |
</center>
|
82 |
""".format(encoded_image))
|
83 |
with gr.Row():
|
84 |
+
rubrica = gr.Textbox(lines=5, label="Escribe la r煤brica que quieres usar para generar la devoluci贸n.")
|
|
|
|
|
85 |
with gr.Row():
|
86 |
with gr.Row():
|
87 |
with gr.Column(scale=4):
|
88 |
+
msg = gr.Textbox(lines=5, label="Texto de entrada para ser evaluado y generar devoluci贸n.")
|
89 |
with gr.Column(scale=1):
|
90 |
btn = gr.Button("Enviar")
|
91 |
+
clear = gr.ClearButton(components=[msg, chatbot], value="Borrar chat")
|
92 |
+
with gr.Row():
|
93 |
+
chatbot = gr.Chatbot(lines=10) #just to fit the notebook
|
|
|
94 |
|
95 |
+
btn.click(respond, inputs=[msg, rubrica, chatbot], outputs=[msg, chatbot])
|
96 |
+
msg.submit(respond, inputs=[msg, rubrica,chatbot], outputs=[msg, chatbot]) #Press enter to submit
|
97 |
clear.click(clear_chat,inputs=[msg, chatbot], outputs=[msg, chatbot])
|
98 |
demo.queue()
|
99 |
demo.launch()
|