Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -6,7 +6,7 @@ import spaces
|
|
6 |
import torch
|
7 |
from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer
|
8 |
DESCRIPTION = """\
|
9 |
-
# Llama
|
10 |
|
11 |
This is a demo of text completion with AI LLM's.
|
12 |
|
@@ -33,7 +33,7 @@ def generate(
|
|
33 |
message: str,
|
34 |
max_new_tokens: int = 1024,
|
35 |
temperature: float = 0.6,
|
36 |
-
top_p: float = 0.
|
37 |
top_k: int = 50,
|
38 |
repetition_penalty: float = 1.2,
|
39 |
) -> Iterator[str]:
|
@@ -87,10 +87,10 @@ with gr.Blocks(css="style.css", fill_height=True) as demo:
|
|
87 |
)
|
88 |
temperature = gr.Slider(
|
89 |
label="Temperature",
|
90 |
-
minimum=0.
|
91 |
maximum=1.0,
|
92 |
step=0.1,
|
93 |
-
value=0.
|
94 |
)
|
95 |
top_p = gr.Slider(
|
96 |
label="Top-p (nucleus sampling)",
|
|
|
6 |
import torch
|
7 |
from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer
|
8 |
DESCRIPTION = """\
|
9 |
+
# Llama backend
|
10 |
|
11 |
This is a demo of text completion with AI LLM's.
|
12 |
|
|
|
33 |
message: str,
|
34 |
max_new_tokens: int = 1024,
|
35 |
temperature: float = 0.6,
|
36 |
+
top_p: float = 0.1,
|
37 |
top_k: int = 50,
|
38 |
repetition_penalty: float = 1.2,
|
39 |
) -> Iterator[str]:
|
|
|
87 |
)
|
88 |
temperature = gr.Slider(
|
89 |
label="Temperature",
|
90 |
+
minimum=0.1,
|
91 |
maximum=1.0,
|
92 |
step=0.1,
|
93 |
+
value=0.1,
|
94 |
)
|
95 |
top_p = gr.Slider(
|
96 |
label="Top-p (nucleus sampling)",
|