Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
from fastai.text.all import *
|
2 |
-
from transformers import
|
3 |
import torch
|
4 |
import gradio as gr
|
5 |
|
@@ -31,20 +31,20 @@ def classify_psychiatric_text(txt):
|
|
31 |
probabilities = torch.softmax(logits, dim=1).squeeze().tolist()
|
32 |
return dict(zip(psychiatric_labels, probabilities))
|
33 |
|
34 |
-
# Load
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
|
39 |
# Chat function for Lifestyle and Nutrition
|
40 |
chat_history = []
|
41 |
|
42 |
def chatbot_response(user_input):
|
43 |
global chat_history
|
44 |
-
new_input_ids =
|
45 |
bot_input_ids = torch.cat([chat_history, new_input_ids], dim=-1) if chat_history else new_input_ids
|
46 |
-
chat_history =
|
47 |
-
response =
|
48 |
return response
|
49 |
|
50 |
def clear_chat():
|
|
|
1 |
from fastai.text.all import *
|
2 |
+
from transformers import BlenderbotForConditionalGeneration, BlenderbotTokenizer
|
3 |
import torch
|
4 |
import gradio as gr
|
5 |
|
|
|
31 |
probabilities = torch.softmax(logits, dim=1).squeeze().tolist()
|
32 |
return dict(zip(psychiatric_labels, probabilities))
|
33 |
|
34 |
+
# Load BlenderBot for Lifestyle and Nutrition Chatbot
|
35 |
+
blender_model_name = "facebook/blenderbot-3B" # Pre-trained BlenderBot 3B model
|
36 |
+
blender_tokenizer = BlenderbotTokenizer.from_pretrained(blender_model_name)
|
37 |
+
blender_model = BlenderbotForConditionalGeneration.from_pretrained(blender_model_name)
|
38 |
|
39 |
# Chat function for Lifestyle and Nutrition
|
40 |
chat_history = []
|
41 |
|
42 |
def chatbot_response(user_input):
|
43 |
global chat_history
|
44 |
+
new_input_ids = blender_tokenizer.encode(user_input + blender_tokenizer.eos_token, return_tensors='pt')
|
45 |
bot_input_ids = torch.cat([chat_history, new_input_ids], dim=-1) if chat_history else new_input_ids
|
46 |
+
chat_history = blender_model.generate(bot_input_ids, max_length=1000, pad_token_id=blender_tokenizer.eos_token_id)
|
47 |
+
response = blender_tokenizer.decode(chat_history[:, bot_input_ids.shape[-1]:][0], skip_special_tokens=True)
|
48 |
return response
|
49 |
|
50 |
def clear_chat():
|