123LETSPLAY commited on
Commit
5299cbe
·
verified ·
1 Parent(s): 64098c3

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +42 -0
app.py ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import BlenderbotTokenizer, BlenderbotForConditionalGeneration
2
+ import gradio as gr
3
+
4
+ # Load the model and tokenizer
5
+ model_name = "facebook/blenderbot-400M-distill"
6
+ tokenizer = BlenderbotTokenizer.from_pretrained(model_name)
7
+ model = BlenderbotForConditionalGeneration.from_pretrained(model_name)
8
+
9
+ # Initialize message history
10
+ conversation_history = []
11
+
12
+ # Function to interact with the chatbot
13
+ def vanilla_chatbot(message, history):
14
+ global conversation_history
15
+
16
+ # Append user message to history
17
+ conversation_history.append(message)
18
+
19
+ # Encode the new user input, add the eos_token and return a tensor in Pytorch
20
+ inputs = tokenizer([message], return_tensors='pt')
21
+
22
+ # Generate bot response
23
+ reply_ids = model.generate(**inputs)
24
+ bot_response = tokenizer.batch_decode(reply_ids, skip_special_tokens=True)[0]
25
+
26
+ # Append bot response to history
27
+ conversation_history.append(bot_response)
28
+
29
+ # Return the generated response
30
+ return bot_response
31
+
32
+ # Create a Gradio chat interface
33
+ demo_chatbot = gr.Interface(
34
+ fn=vanilla_chatbot,
35
+ inputs=gr.Textbox(lines=2, placeholder="Enter your message here..."),
36
+ outputs=gr.Textbox(placeholder="Bot response will appear here..."),
37
+ title="Mashdemy Chatbot",
38
+ description="Enter text to start chatting."
39
+ )
40
+
41
+ # Launch the Gradio interface
42
+ demo_chatbot.launch(share=True)