Spaces:
Sleeping
Sleeping
attempt8 fix
Browse files
app.py
CHANGED
@@ -84,7 +84,7 @@ def generate(
|
|
84 |
chat_interface = gr.ChatInterface(
|
85 |
fn=generate,
|
86 |
additional_inputs=[
|
87 |
-
gr.Dropdown(label="Model ID", choices=["Nekochu/Luminia-13B-v3", "Nekochu/Llama-2-13B-German-ORPO"]
|
88 |
gr.Textbox(label="System prompt", lines=6),
|
89 |
gr.Slider(
|
90 |
label="Max new tokens",
|
@@ -123,9 +123,10 @@ chat_interface = gr.ChatInterface(
|
|
123 |
),
|
124 |
],
|
125 |
stop_btn=None,
|
|
|
126 |
examples=[
|
127 |
-
["### Instruction: Create stable diffusion metadata based on the given english description. Luminia ### Input: favorites and popular SFW ### Response:"],
|
128 |
-
["### Instruction: Provide tips on stable diffusion to optimize low token prompts and enhance quality include prompt example. ### Response:"],
|
129 |
],
|
130 |
)
|
131 |
|
|
|
84 |
chat_interface = gr.ChatInterface(
|
85 |
fn=generate,
|
86 |
additional_inputs=[
|
87 |
+
gr.Dropdown(label="Model ID", choices=["Nekochu/Luminia-13B-v3", "Nekochu/Llama-2-13B-German-ORPO"]),
|
88 |
gr.Textbox(label="System prompt", lines=6),
|
89 |
gr.Slider(
|
90 |
label="Max new tokens",
|
|
|
123 |
),
|
124 |
],
|
125 |
stop_btn=None,
|
126 |
+
outputs="text",
|
127 |
examples=[
|
128 |
+
["Nekochu/Luminia-13B-v3", "### Instruction: Create stable diffusion metadata based on the given english description. Luminia ### Input: favorites and popular SFW ### Response:"],
|
129 |
+
["Nekochu/Llama-2-13B-German-ORPO", "### Instruction: Provide tips on stable diffusion to optimize low token prompts and enhance quality include prompt example. ### Response:"],
|
130 |
],
|
131 |
)
|
132 |
|