Insurance / app.py
sujra's picture
Upload app.py
4d3ea5b verified
raw
history blame
1.01 kB
import streamlit as st
from transformers import pipeline, AutoTokenizer, AutoModelForSequenceClassification
# Load LLAMA model and tokenizer
model_name = "sujra/insurance_Model"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForSequenceClassification.from_pretrained(model_name)
# Define function for generating text
#
def generate_text(prompt):
pipe = pipeline(task="text-generation", model=model, tokenizer=tokenizer, max_length=200)
result = pipe(f"<s>[INST] {prompt} [/INST]")
generated_text = result[0]['generated_text']
return generated_text
st.title("Insurance Response Generation")
prompt_input = st.text_input("Enter your prompt:")
if st.button("Generate Response"):
if prompt_input:
with st.spinner("Generating response..."): # Display a spinner while generating response
response = generate_text(prompt_input)
st.write("Generated Response:")
st.write(response)
else:
st.write("Please enter a prompt.")