Spaces:
Sleeping
Sleeping
Commit
·
1ca58e2
1
Parent(s):
67a7715
Update app.py
Browse files
app.py
CHANGED
@@ -5,8 +5,8 @@ from transformers import T5Tokenizer, T5ForConditionalGeneration
|
|
5 |
model = T5ForConditionalGeneration.from_pretrained('swcrazyfan/KingJamesify-T5-large')
|
6 |
tokenizer = T5Tokenizer.from_pretrained('swcrazyfan/KingJamesify-T5-large')
|
7 |
|
8 |
-
def king_jamesify(input_text,
|
9 |
-
input_ids = tokenizer.encode(input_text, return_tensors='pt').to(torch.int64)
|
10 |
generated_ids = model.generate(input_ids=input_ids, max_length=max_length, num_beams=num_beams, temperature=temperature)
|
11 |
result = tokenizer.decode(generated_ids[0], skip_special_tokens=True)
|
12 |
return result
|
|
|
5 |
model = T5ForConditionalGeneration.from_pretrained('swcrazyfan/KingJamesify-T5-large')
|
6 |
tokenizer = T5Tokenizer.from_pretrained('swcrazyfan/KingJamesify-T5-large')
|
7 |
|
8 |
+
def king_jamesify(input_text, num_beams, max_length, temperature):
|
9 |
+
input_ids = tokenizer.encode("kingify: " + input_text, return_tensors='pt').to(torch.int64)
|
10 |
generated_ids = model.generate(input_ids=input_ids, max_length=max_length, num_beams=num_beams, temperature=temperature)
|
11 |
result = tokenizer.decode(generated_ids[0], skip_special_tokens=True)
|
12 |
return result
|