Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -10,7 +10,7 @@ def generate_text(prompt):
|
|
10 |
input_ids = tokenizer.encode(prompt, return_tensors="pt")
|
11 |
attention_mask = torch.ones(input_ids.shape, dtype=torch.bool)
|
12 |
|
13 |
-
max_length = model.config.n_positions if len(input_ids[0]) > model.config.n_positions else len(input_ids[0]) +
|
14 |
beam_output = model.generate(
|
15 |
input_ids,
|
16 |
attention_mask=attention_mask,
|
@@ -20,8 +20,8 @@ def generate_text(prompt):
|
|
20 |
no_repeat_ngram_size=2,
|
21 |
early_stopping=True,
|
22 |
temperature=0.9,
|
23 |
-
top_p=0.
|
24 |
-
top_k=
|
25 |
length_penalty=2.0,
|
26 |
do_sample=True,
|
27 |
eos_token_id=tokenizer.eos_token_id,
|
@@ -30,7 +30,7 @@ def generate_text(prompt):
|
|
30 |
|
31 |
text = tokenizer.decode(beam_output[0], skip_special_tokens=True)
|
32 |
return text
|
33 |
-
|
34 |
css = """
|
35 |
body { font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; }
|
36 |
.gradio_app { max-width: 600px; margin: 50px auto; }
|
|
|
10 |
input_ids = tokenizer.encode(prompt, return_tensors="pt")
|
11 |
attention_mask = torch.ones(input_ids.shape, dtype=torch.bool)
|
12 |
|
13 |
+
max_length = model.config.n_positions if len(input_ids[0]) > model.config.n_positions else len(input_ids[0]) + 50
|
14 |
beam_output = model.generate(
|
15 |
input_ids,
|
16 |
attention_mask=attention_mask,
|
|
|
20 |
no_repeat_ngram_size=2,
|
21 |
early_stopping=True,
|
22 |
temperature=0.9,
|
23 |
+
top_p=0.95,
|
24 |
+
top_k=70,
|
25 |
length_penalty=2.0,
|
26 |
do_sample=True,
|
27 |
eos_token_id=tokenizer.eos_token_id,
|
|
|
30 |
|
31 |
text = tokenizer.decode(beam_output[0], skip_special_tokens=True)
|
32 |
return text
|
33 |
+
h1 = "Löwolf Chat"
|
34 |
css = """
|
35 |
body { font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; }
|
36 |
.gradio_app { max-width: 600px; margin: 50px auto; }
|