Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -3,14 +3,14 @@ from transformers import GPT2LMHeadModel, GPT2Tokenizer
|
|
3 |
import torch
|
4 |
|
5 |
# Initialisierung des Modells und des Tokenizers
|
6 |
-
tokenizer = GPT2Tokenizer.from_pretrained("Loewolf/GPT_1
|
7 |
-
model = GPT2LMHeadModel.from_pretrained("Loewolf/GPT_1
|
8 |
|
9 |
def generate_text(prompt):
|
10 |
input_ids = tokenizer.encode(prompt, return_tensors="pt")
|
11 |
attention_mask = torch.ones(input_ids.shape, dtype=torch.long)
|
12 |
|
13 |
-
max_length = model.config.n_positions if len(input_ids[0]) > model.config.n_positions else len(input_ids[0]) +
|
14 |
beam_output = model.generate(
|
15 |
input_ids,
|
16 |
attention_mask=attention_mask,
|
|
|
3 |
import torch
|
4 |
|
5 |
# Initialisierung des Modells und des Tokenizers
|
6 |
+
tokenizer = GPT2Tokenizer.from_pretrained("Loewolf/L-GPT_1")
|
7 |
+
model = GPT2LMHeadModel.from_pretrained("Loewolf/L-GPT_1")
|
8 |
|
9 |
def generate_text(prompt):
|
10 |
input_ids = tokenizer.encode(prompt, return_tensors="pt")
|
11 |
attention_mask = torch.ones(input_ids.shape, dtype=torch.long)
|
12 |
|
13 |
+
max_length = model.config.n_positions if len(input_ids[0]) > model.config.n_positions else len(input_ids[0]) + 90
|
14 |
beam_output = model.generate(
|
15 |
input_ids,
|
16 |
attention_mask=attention_mask,
|