lagy commited on
Commit
f5e0ea2
verified
1 Parent(s): bc1f031

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -2
app.py CHANGED
@@ -50,6 +50,17 @@ def generate(model,tokenizer,instruction,context,streamer):
50
  output = output.split("###")[3].split("<|endoftext|>", 1)[0]
51
  return output
52
 
 
 
 
 
 
 
 
 
 
 
 
53
  st.write(" ")
54
 
55
  instruction = st.text_area('Instrucci贸n')
@@ -57,8 +68,8 @@ context = st.text_area('Contexto')
57
  max_length = st.number_input('Max generation length',value=10)
58
 
59
 
60
- model = AutoModelForCausalLM.from_pretrained("lagy/carballo_finetuned")
61
- tokenizer = AutoTokenizer.from_pretrained("lagy/carballo_finetuned")
62
  model.eval()
63
 
64
 
 
50
  output = output.split("###")[3].split("<|endoftext|>", 1)[0]
51
  return output
52
 
53
+
54
+ @st.experimental_singleton
55
+ def get_model():
56
+ # Create a model of the specified type.
57
+ return AutoModelForCausalLM.from_pretrained("lagy/carballo_finetuned")
58
+
59
+ @st.experimental_singleton
60
+ def get_tokenizer():
61
+ # Create a model of the specified type.
62
+ return AutoTokenizer.from_pretrained("lagy/carballo_finetuned")
63
+
64
  st.write(" ")
65
 
66
  instruction = st.text_area('Instrucci贸n')
 
68
  max_length = st.number_input('Max generation length',value=10)
69
 
70
 
71
+ model = get_model()
72
+ tokenizer = get_tokenizer()
73
  model.eval()
74
 
75