MSey90 commited on
Commit
6dbe383
·
unverified ·
1 Parent(s): 605dd29

include private model

Browse files
Files changed (1) hide show
  1. app.py +17 -1
app.py CHANGED
@@ -1,6 +1,22 @@
1
  from transformers import pipeline
2
  from huggingface_hub import login as hf_login
3
  import streamlit as st
 
 
 
 
 
 
 
 
 
 
4
  st.header("Test Environment for tiny_CaLL_r10_O1_f10_c1022")
5
  user_input = st.text_input("Enter your Prompt here:", "")
6
- st.write(user_input)
 
 
 
 
 
 
 
1
  from transformers import pipeline
2
  from huggingface_hub import login as hf_login
3
  import streamlit as st
4
+
5
+ # Access the secret token
6
+ hf_token = st.secrets["default"]["hf_token"]
7
+
8
+ @st.cache(allow_output_mutation=True)
9
+ def load_model():
10
+ return pipeline("text-generation", model="MSey/tiny_CaLL_r10_O1_f10_LT_c1022", use_auth_token =hf_token)
11
+
12
+ model = load_model()
13
+
14
  st.header("Test Environment for tiny_CaLL_r10_O1_f10_c1022")
15
  user_input = st.text_input("Enter your Prompt here:", "")
16
+
17
+ if user_input:
18
+ with st.spinner('Generating response...'):
19
+ response = model(user_input, max_length=50, num_return_sequences=1)
20
+ generated_text = response[0]['generated_text']
21
+ st.write("Generated Text:")
22
+ st.write(generated_text)