swcrazyfan commited on
Commit
7a70724
·
1 Parent(s): 88e2ab4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -5,9 +5,9 @@ from transformers import T5Tokenizer, T5ForConditionalGeneration
5
  model = T5ForConditionalGeneration.from_pretrained('swcrazyfan/KingJamesify-T5-large')
6
  tokenizer = T5Tokenizer.from_pretrained('swcrazyfan/KingJamesify-T5-large')
7
 
8
- def king_jamesify(input_text):
9
  input_ids = tokenizer.encode(input_text, return_tensors='pt').to(torch.int64)
10
- generated_ids = model.generate(input_ids=input_ids, max_length=100, num_beams=4, repetition_penalty=2.5, length_penalty=1.0, temperature=1.0, top_k=50, top_p=1.0, no_repeat_ngram_size=3)
11
  result = tokenizer.decode(generated_ids[0], skip_special_tokens=True)
12
  return result
13
 
 
5
  model = T5ForConditionalGeneration.from_pretrained('swcrazyfan/KingJamesify-T5-large')
6
  tokenizer = T5Tokenizer.from_pretrained('swcrazyfan/KingJamesify-T5-large')
7
 
8
+ def king_jamesify(input_text, temperature, max_length, num_beams, repetition_penalty, length_penalty, top_k, top_p, no_repeat_ngram_size):
9
  input_ids = tokenizer.encode(input_text, return_tensors='pt').to(torch.int64)
10
+ generated_ids = model.generate(input_ids=input_ids, max_length=max_length, num_beams=num_beams, repetition_penalty=repetition_penalty, length_penalty=length_penalty, temperature=temperature, top_k=top_k, top_p=top_p, no_repeat_ngram_size=no_repeat_ngram_size)
11
  result = tokenizer.decode(generated_ids[0], skip_special_tokens=True)
12
  return result
13