MSey90 commited on
Commit
c447bbf
·
unverified ·
1 Parent(s): 6a3ee71

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -5,10 +5,10 @@ import streamlit as st
5
  def context_text(text): return f"### Context\n{text}\n\n### Answer"
6
 
7
  @st.cache_resource
8
- def load_model():
9
  return pipeline("text-generation", model="MSey/tiny_CaLL_r10_O1_f10_LT_c1022")
10
 
11
- model = load_model()
12
 
13
  st.header("Test Environment for tiny_CaLL_r10_O1_f10_LT_c1022")
14
  user_input = st.text_input("Enter your Prompt here:", "")
@@ -17,7 +17,7 @@ context_len = len(contexted_ipnut)
17
 
18
  if user_input:
19
  with st.spinner('Generating response...'):
20
- response = model(contexted_ipnut, max_new_tokens = 200, num_return_sequences=1)
21
  generated_text = response[0]['generated_text'][context_len:]
22
  st.write("Generated Text:")
23
  st.markdown(generated_text)
 
5
  def context_text(text): return f"### Context\n{text}\n\n### Answer"
6
 
7
  @st.cache_resource
8
+ def load_pipe():
9
  return pipeline("text-generation", model="MSey/tiny_CaLL_r10_O1_f10_LT_c1022")
10
 
11
+ pipe = load_pipe()
12
 
13
  st.header("Test Environment for tiny_CaLL_r10_O1_f10_LT_c1022")
14
  user_input = st.text_input("Enter your Prompt here:", "")
 
17
 
18
  if user_input:
19
  with st.spinner('Generating response...'):
20
+ response = pipe(contexted_ipnut, max_new_tokens = 200, num_return_sequences=1)
21
  generated_text = response[0]['generated_text'][context_len:]
22
  st.write("Generated Text:")
23
  st.markdown(generated_text)