gnosticdev commited on
Commit
ec0bc03
Β·
verified Β·
1 Parent(s): 606a0d4

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +28 -0
app.py ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import AutoModelForCausalLM, AutoTokenizer
2
+ import gradio as gr
3
+ import torch
4
+
5
+ # Cargar el modelo y el tokenizador
6
+ tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-large")
7
+ model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-large")
8
+
9
+ # Inicializar el historial de conversaciΓ³n
10
+ chat_history_ids = None
11
+
12
+ def chat_with_bot(user_input):
13
+ global chat_history_ids
14
+ # Codificar la entrada del usuario
15
+ new_user_input_ids = tokenizer.encode(user_input + tokenizer.eos_token, return_tensors='pt')
16
+
17
+ # Concatenar la entrada del usuario con el historial de conversaciΓ³n
18
+ bot_input_ids = torch.cat([chat_history_ids, new_user_input_ids], dim=-1) if chat_history_ids is not None else new_user_input_ids
19
+
20
+ # Generar una respuesta
21
+ chat_history_ids = model.generate(bot_input_ids, max_length=1000, pad_token_id=tokenizer.eos_token_id)
22
+
23
+ # Decodificar y devolver la respuesta
24
+ return tokenizer.decode(chat_history_ids[:, bot_input_ids.shape[-1]:][0], skip_special_tokens=True)
25
+
26
+ # Crear la interfaz de Gradio
27
+ iface = gr.Interface(fn=chat_with_bot, inputs="text", outputs="text", title="Chatbot con DialoGPT")
28
+ iface.launch()