Update app.py
Browse files
app.py
CHANGED
@@ -16,13 +16,18 @@ def format_prompt(message, history):
|
|
16 |
return prompt
|
17 |
|
18 |
def generate(
|
19 |
-
prompt, history, system_prompt, max_new_tokens=100, temperature=0.7, repetition_penalty=1.2, top_p=0.95, top_k=1,
|
20 |
):
|
21 |
temperature = float(temperature)
|
22 |
if temperature < 1e-2:
|
23 |
temperature = 1e-2
|
24 |
top_p = float(top_p)
|
25 |
|
|
|
|
|
|
|
|
|
|
|
26 |
generate_kwargs = dict(
|
27 |
temperature=temperature,
|
28 |
max_new_tokens=max_new_tokens,
|
@@ -30,7 +35,7 @@ def generate(
|
|
30 |
top_k=top_k,
|
31 |
repetition_penalty=repetition_penalty,
|
32 |
do_sample=True,
|
33 |
-
seed=
|
34 |
)
|
35 |
|
36 |
formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
|
@@ -93,6 +98,17 @@ additional_inputs=[
|
|
93 |
interactive=True,
|
94 |
info="Higher k means more diverse outputs by considering a range of tokens",
|
95 |
),
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
96 |
]
|
97 |
|
98 |
examples=[["I'm planning a vacation to Japan. Can you suggest a one-week itinerary including must-visit places and local cuisines to try?", None, None, None, None, None, None],
|
@@ -108,7 +124,7 @@ gr.ChatInterface(
|
|
108 |
chatbot=gr.Chatbot(show_label=False, show_share_button=False, show_copy_button=True, likeable=True, layout="panel"),
|
109 |
additional_inputs=additional_inputs,
|
110 |
title="Mixtral 8x7b Instruct v0.1 Chatbot",
|
111 |
-
description="Chatbot space with costumizable options for model: https://huggingface.co/mistralai/Mixtral-8x7B-Instruct-v0.1
|
112 |
examples=examples,
|
113 |
concurrency_limit=20,
|
114 |
).launch(show_api=False)
|
|
|
16 |
return prompt
|
17 |
|
18 |
def generate(
|
19 |
+
prompt, history, system_prompt, max_new_tokens=100, temperature=0.7, repetition_penalty=1.2, top_p=0.95, top_k=1, random_seed=False, manual_seed=42,
|
20 |
):
|
21 |
temperature = float(temperature)
|
22 |
if temperature < 1e-2:
|
23 |
temperature = 1e-2
|
24 |
top_p = float(top_p)
|
25 |
|
26 |
+
if random_seed:
|
27 |
+
seed = random.randint(1, 100000)
|
28 |
+
else:
|
29 |
+
seed = manual_seed
|
30 |
+
|
31 |
generate_kwargs = dict(
|
32 |
temperature=temperature,
|
33 |
max_new_tokens=max_new_tokens,
|
|
|
35 |
top_k=top_k,
|
36 |
repetition_penalty=repetition_penalty,
|
37 |
do_sample=True,
|
38 |
+
seed=seed,
|
39 |
)
|
40 |
|
41 |
formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
|
|
|
98 |
interactive=True,
|
99 |
info="Higher k means more diverse outputs by considering a range of tokens",
|
100 |
),
|
101 |
+
gr.Checkbox(
|
102 |
+
label="Use Random Seed",
|
103 |
+
value=False,
|
104 |
+
info="Use a random starting point to initiate the generation process instead of the manual one"
|
105 |
+
),
|
106 |
+
gr.Number(
|
107 |
+
label="Manual Seed",
|
108 |
+
value=42,
|
109 |
+
minimum=1,
|
110 |
+
info="Use a manual starting point to initiate the generation process",
|
111 |
+
),
|
112 |
]
|
113 |
|
114 |
examples=[["I'm planning a vacation to Japan. Can you suggest a one-week itinerary including must-visit places and local cuisines to try?", None, None, None, None, None, None],
|
|
|
124 |
chatbot=gr.Chatbot(show_label=False, show_share_button=False, show_copy_button=True, likeable=True, layout="panel"),
|
125 |
additional_inputs=additional_inputs,
|
126 |
title="Mixtral 8x7b Instruct v0.1 Chatbot",
|
127 |
+
description="Chatbot space with costumizable options for model: https://huggingface.co/mistralai/Mixtral-8x7B-Instruct-v0.1\nIf you get an erorr, you putted a too much high Max_New_Tokens or your system prompt+prompt is too long, shorten up one of these",
|
128 |
examples=examples,
|
129 |
concurrency_limit=20,
|
130 |
).launch(show_api=False)
|