Spaces:
Running
on
Zero
Running
on
Zero
Locutusque
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -19,12 +19,16 @@ def generate(
|
|
19 |
pipe = load_model(model_name)
|
20 |
|
21 |
# Set tokenize correctly. Otherwise ticking the box breaks it.
|
22 |
-
|
|
|
|
|
|
|
|
|
23 |
outputs = pipe(prompt, max_new_tokens=max_new_tokens, do_sample=True,
|
24 |
temperature=temperature, top_k=top_k, top_p=top_p, repetition_penalty=1.10)
|
25 |
return outputs[0]["generated_text"]
|
26 |
|
27 |
-
model_choices = ["Locutusque/UltraQwen-7B", "Locutusque/UltraQwen-1_8B", "Locutusque/TinyMistral-248M-v2.5-Instruct", "M4-ai/TinyMistral-6x248M-Instruct", "Locutusque/Hercules-1.0-Mistral-7B", "Locutusque/Hercules-2.0-Mistral-7B"]
|
28 |
# What at the best options?
|
29 |
g = gr.Interface(
|
30 |
fn=generate,
|
@@ -38,7 +42,7 @@ g = gr.Interface(
|
|
38 |
],
|
39 |
outputs=[gr.Textbox(lines=10, label="Output")],
|
40 |
title="Locutusque's Language Models",
|
41 |
-
description="Try out Locutusque's language models here! Credit goes to Mediocreatmybest for this space.",
|
42 |
concurrency_limit=1
|
43 |
)
|
44 |
|
|
|
19 |
pipe = load_model(model_name)
|
20 |
|
21 |
# Set tokenize correctly. Otherwise ticking the box breaks it.
|
22 |
+
if model_name == "Locutusque/TinyMistral-248M-v3":
|
23 |
+
prompt = user_input
|
24 |
+
gr.Interface.notify("Warning: Locutusque/TinyMistral-248M-v3 is an experimental preview model, performance is not guaranteed", type="warning")
|
25 |
+
else:
|
26 |
+
prompt = f"<|im_start|>user\n{user_input}<|im_end|>\n<|im_start|>assistant\n"
|
27 |
outputs = pipe(prompt, max_new_tokens=max_new_tokens, do_sample=True,
|
28 |
temperature=temperature, top_k=top_k, top_p=top_p, repetition_penalty=1.10)
|
29 |
return outputs[0]["generated_text"]
|
30 |
|
31 |
+
model_choices = ["Locutusque/TinyMistral-248M-v3", "Locutusque/UltraQwen-7B", "Locutusque/UltraQwen-1_8B", "Locutusque/TinyMistral-248M-v2.5-Instruct", "M4-ai/TinyMistral-6x248M-Instruct", "Locutusque/Hercules-1.0-Mistral-7B", "Locutusque/Hercules-2.0-Mistral-7B"]
|
32 |
# What at the best options?
|
33 |
g = gr.Interface(
|
34 |
fn=generate,
|
|
|
42 |
],
|
43 |
outputs=[gr.Textbox(lines=10, label="Output")],
|
44 |
title="Locutusque's Language Models",
|
45 |
+
description="Try out Locutusque's language models here! Credit goes to Mediocreatmybest for this space. You may also find some experimental preview models that have not been made public here.",
|
46 |
concurrency_limit=1
|
47 |
)
|
48 |
|