File size: 3,461 Bytes
39e0066
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
999f39b
39e0066
 
 
 
 
 
 
 
b64d695
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39e0066
b64d695
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
# from transformers import pipeline
# insurance_context = """
# Insurance is a contract, represented by a policy, in which an individual or entity receives financial protection against losses. 
# Common types include life insurance, health insurance, auto insurance, and home insurance. 
# Life insurance provides a sum of money to beneficiaries upon the insured's death, while health insurance covers medical expenses.
# Auto insurance offers protection against vehicle-related accidents and damages. Home insurance covers damages to one’s property.
# """
# qa_pipeline = pipeline("question-answering", model="distilbert-base-uncased-distilled-squad")
# def ask_question(question, context=insurance_context):
#     # Use the model to answer the question based on the context
#     response = qa_pipeline({
#         'question': question,
#         'context': context
#     })
#     return response['answer']

# def chat():
#     print("Hello! I'm your insurance Q&A chatbot. Ask me anything about insurance.")
#     while True:
#         user_input = input("You: ")
        
#         if user_input.lower() in ["exit", "quit"]:
#             print("Thank you for using the chatbot. Goodbye!")
#             break
#         answer = ask_question(user_input)
#         print("Bot:", answer)
# chat()


# import streamlit as st
# from transformers import BlenderbotTokenizer, BlenderbotForConditionalGeneration

# # Load BlenderBot model and tokenizer
# model_name = "facebook/blenderbot-400M-distill"
# tokenizer = BlenderbotTokenizer.from_pretrained(model_name)
# model = BlenderbotForConditionalGeneration.from_pretrained(model_name)

# # Function to generate a response from BlenderBot
# def get_blenderbot_response(input_text):
#     inputs = tokenizer(input_text, return_tensors="pt")
#     reply_ids = model.generate(**inputs)
#     response = tokenizer.decode(reply_ids[0], skip_special_tokens=True)
#     return response

# # Streamlit app
# st.title("Insurance Q&A Chatbot")
# st.write("Ask any question about insurance, and I'll do my best to help!")

# # Chat history
# if "history" not in st.session_state:
#     st.session_state.history = []

# # Input text box for user
# user_input = st.text_input("You:", "")

# # Respond to user input
# if user_input:
#     # Add user question to history
#     st.session_state.history.append({"user": user_input})

#     # Generate bot response
#     response = get_blenderbot_response(user_input)
#     st.session_state.history.append({"bot": response})

# # Display chat history
# for message in st.session_state.history:
#     if "user" in message:
#         st.write("**You:**", message["user"])
#     if "bot" in message:
#         st.write("**Bot:**", message["bot"])

import streamlit as st
from rasa.core.agent import Agent
from rasa.shared.core.domain import Domain
from rasa.shared.core.tracker_store import InMemoryTrackerStore
from rasa.shared.nlu.interpreter import RasaNLUInterpreter

# Load Rasa model
domain = Domain.load("insurance_domain.yml")
interpreter = RasaNLUInterpreter("insurance_nlu.pkl")
tracker_store = InMemoryTrackerStore(domain)
agent = Agent.load("insurance_model", interpreter=interpreter, tracker_store=tracker_store)

# Define Streamlit app
st.title("Insurance Chatbot")

user_input = st.text_area("You:", height=200)

if st.button("Send"):
    response = agent.handle_text(user_input)
    st.text_area("Bot:", value=response[0].text, height=200, max_chars=None, key=None)