File size: 810 Bytes
28422fb
6596636
6dbe383
ba093b8
 
8fe4b6c
bcc7480
6dbe383
3c264e5
6dbe383
 
 
cdc5981
28f0c2b
ba093b8
50a2b09
6dbe383
 
 
cf5b356
50a2b09
6dbe383
b46b018
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
from transformers import pipeline
import streamlit as st

@st.cache_resource
def context_text(text): return f"### Context\n{text}\n\n### Answer"

@st.cache_resource
def load_model():
    return pipeline("text-generation", model="MSey/tiny_CaLL_r10_O1_f10_LT_c1022")

model = load_model()

st.header("Test Environment for tiny_CaLL_r10_O1_f10_c1022")
user_input = st.text_input("Enter your Prompt here:", "")
contexted_ipnut = context_text(user_input)
context_len = len(contexted_ipnut)

if user_input:
    with st.spinner('Generating response...'):
        response = model(contexted_ipnut, max_new_tokens = 200, num_return_sequences=1)
        generated_text = response[0]['generated_text'][context_len:]
        st.write("Generated Text:")
        st.markdown(generated_text)
        st.text(generated_text)