MSey90 commited on
Commit
8fe4b6c
·
unverified ·
1 Parent(s): 3c264e5

increase max lengt

Browse files
Files changed (1) hide show
  1. app.py +2 -1
app.py CHANGED
@@ -4,6 +4,7 @@ import streamlit as st
4
  # Access the secret token
5
  hf_token = st.secrets["default"]["hf_token"]
6
  st.write(hf_token)
 
7
  @st.cache_resource
8
  def load_model():
9
  return pipeline("text-generation", model="MSey/tiny_CaLL_r10_O1_f10_LT_c1022")
@@ -15,7 +16,7 @@ user_input = st.text_input("Enter your Prompt here:", "")
15
 
16
  if user_input:
17
  with st.spinner('Generating response...'):
18
- response = model(user_input, max_length=50, num_return_sequences=1)
19
  generated_text = response[0]['generated_text']
20
  st.write("Generated Text:")
21
  st.write(generated_text)
 
4
  # Access the secret token
5
  hf_token = st.secrets["default"]["hf_token"]
6
  st.write(hf_token)
7
+
8
  @st.cache_resource
9
  def load_model():
10
  return pipeline("text-generation", model="MSey/tiny_CaLL_r10_O1_f10_LT_c1022")
 
16
 
17
  if user_input:
18
  with st.spinner('Generating response...'):
19
+ response = model(user_input, max_length=200, num_return_sequences=1)
20
  generated_text = response[0]['generated_text']
21
  st.write("Generated Text:")
22
  st.write(generated_text)