File size: 680 Bytes
28422fb
6596636
6dbe383
 
 
 
bcc7480
6dbe383
 
 
 
 
cdc5981
28f0c2b
6dbe383
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
from transformers import pipeline
import streamlit as st

# Access the secret token
hf_token = st.secrets["default"]["hf_token"]

@st.cache_resource
def load_model():
    return pipeline("text-generation", model="MSey/tiny_CaLL_r10_O1_f10_LT_c1022", use_auth_token =hf_token)

model = load_model()

st.header("Test Environment for tiny_CaLL_r10_O1_f10_c1022")
user_input = st.text_input("Enter your Prompt here:", "")

if user_input:
    with st.spinner('Generating response...'):
        response = model(user_input, max_length=50, num_return_sequences=1)
        generated_text = response[0]['generated_text']
        st.write("Generated Text:")
        st.write(generated_text)