Spaces:
Running
on
Zero
Running
on
Zero
change default model
Browse files
app.py
CHANGED
@@ -63,7 +63,7 @@ if not torch.cuda.is_available():
|
|
63 |
DESCRIPTION += "\n<p>Running on CPU 🥶 This demo does not work on CPU.</p>"
|
64 |
|
65 |
if torch.cuda.is_available():
|
66 |
-
model_id = "
|
67 |
model = None
|
68 |
tokenizer = None
|
69 |
|
@@ -266,7 +266,7 @@ def create_chat_interface():
|
|
266 |
model_dropdown = gr.Dropdown(
|
267 |
choices=model_choices,
|
268 |
label="Select Model",
|
269 |
-
value="
|
270 |
)
|
271 |
model_dropdown.change(load_model, inputs=[model_dropdown])
|
272 |
|
|
|
63 |
DESCRIPTION += "\n<p>Running on CPU 🥶 This demo does not work on CPU.</p>"
|
64 |
|
65 |
if torch.cuda.is_available():
|
66 |
+
model_id = "rubra-ai/Meta-Llama-3-8B-Instruct" # Default model
|
67 |
model = None
|
68 |
tokenizer = None
|
69 |
|
|
|
266 |
model_dropdown = gr.Dropdown(
|
267 |
choices=model_choices,
|
268 |
label="Select Model",
|
269 |
+
value="rubra-ai/Meta-Llama-3-8B-Instruct"
|
270 |
)
|
271 |
model_dropdown.change(load_model, inputs=[model_dropdown])
|
272 |
|