Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -16,7 +16,7 @@ def format_prompt(message, history, system_prompt=""):
|
|
16 |
return prompt
|
17 |
|
18 |
def generate(
|
19 |
-
prompt, history, system_prompt="", temperature=0.9, max_new_tokens=
|
20 |
):
|
21 |
temperature = float(temperature)
|
22 |
if temperature < 1e-2:
|
@@ -91,13 +91,13 @@ css = """
|
|
91 |
"""
|
92 |
|
93 |
with gr.Blocks(css=css) as demo:
|
94 |
-
gr.HTML("<h1><center>Mistral 7B Instruct<h1><center>")
|
95 |
gr.HTML("<h3><center>In this demo, you can chat with <a href='https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.1'>Mistral-7B-Instruct</a> model. 💬<h3><center>")
|
96 |
gr.HTML("<h3><center>Learn more about the model <a href='https://huggingface.co/docs/transformers/main/model_doc/mistral'>here</a>. 📚<h3><center>")
|
97 |
gr.ChatInterface(
|
98 |
generate,
|
99 |
additional_inputs=additional_inputs,
|
100 |
-
examples=[["
|
101 |
)
|
102 |
|
103 |
demo.queue().launch(debug=True)
|
|
|
16 |
return prompt
|
17 |
|
18 |
def generate(
|
19 |
+
prompt, history, system_prompt="", temperature=0.9, max_new_tokens=512, top_p=0.95, repetition_penalty=1.0,
|
20 |
):
|
21 |
temperature = float(temperature)
|
22 |
if temperature < 1e-2:
|
|
|
91 |
"""
|
92 |
|
93 |
with gr.Blocks(css=css) as demo:
|
94 |
+
gr.HTML("<h1><center>Mistral 7B Instruct running on CPU Including System Prompt<h1><center>")
|
95 |
gr.HTML("<h3><center>In this demo, you can chat with <a href='https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.1'>Mistral-7B-Instruct</a> model. 💬<h3><center>")
|
96 |
gr.HTML("<h3><center>Learn more about the model <a href='https://huggingface.co/docs/transformers/main/model_doc/mistral'>here</a>. 📚<h3><center>")
|
97 |
gr.ChatInterface(
|
98 |
generate,
|
99 |
additional_inputs=additional_inputs,
|
100 |
+
examples=[["How to make Money with AI?"], ["Write me a recipe for pancakes."]]
|
101 |
)
|
102 |
|
103 |
demo.queue().launch(debug=True)
|