CaGBERT / app.py
MSey's picture
Update app.py
c5042ea verified
raw
history blame
1.01 kB
from transformers import pipeline, AutoTokenizer
import streamlit as st
@st.cache_resource
def context_text(text): return f"### Context\n{text}\n\n### Answer"
@st.cache_resource
def load_pipe():
model_name = "MSey/pbt_CaBERT_7_c10731"
return pipeline("token-classification", model=model_name), AutoTokenizer.from_pretrained(model_name)
pipe, tokenizer = load_pipe()
st.header("Test Environment for pbt_CaBERT_7_c10731")
user_input = st.text_input("Enter your Prompt here:", "")
contexted_ipnut = context_text(user_input)
context_len = len(contexted_ipnut)
if user_input:
with st.spinner('Generating response...'):
response = pipe(contexted_ipnut)
st.write("Response:")
tuples = ""
# Process each entity and highlight the labeled words
for entity in response:
label = entity['entity']
word = entity["word"]
tuples += f"{word}\t{label}\n"
# Display the highlighted text using st.markdown
st.text(tuples)