jucendrero commited on
Commit
6951118
·
1 Parent(s): 5b21f8a

Second functional version

Browse files
Files changed (1) hide show
  1. app.py +6 -11
app.py CHANGED
@@ -111,18 +111,13 @@ def make_recipe(input_ingredients):
111
  while rerun_model_output(pre_output):
112
  if i == 3:
113
  return frame_html_response(error_html_response)
114
- # output = model.generate(**tokenized_input,
115
- # max_length=600,
116
- # do_sample=True,
117
- # top_p=0.92,
118
- # top_k=50,
119
- # # no_repeat_ngram_size=2,
120
- # num_return_sequences=3)
121
  output = model.generate(**tokenized_input,
122
- max_length=600,
123
- num_beams=5,
124
- no_repeat_ngram_size=3,
125
- early_stopping=True)
 
 
126
  pre_output = tokenizer.decode(output[0], skip_special_tokens=False)
127
  i += 1
128
  pre_output_trimmed = pre_output[:pre_output.find('<RECIPE_END>')]
 
111
  while rerun_model_output(pre_output):
112
  if i == 3:
113
  return frame_html_response(error_html_response)
 
 
 
 
 
 
 
114
  output = model.generate(**tokenized_input,
115
+ max_length=600,
116
+ do_sample=True,
117
+ top_p=0.92,
118
+ top_k=50,
119
+ # no_repeat_ngram_size=3,
120
+ num_return_sequences=3)
121
  pre_output = tokenizer.decode(output[0], skip_special_tokens=False)
122
  i += 1
123
  pre_output_trimmed = pre_output[:pre_output.find('<RECIPE_END>')]