Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -15,7 +15,7 @@ llm = None
|
|
15 |
llm_model = None
|
16 |
# hf_hub_download(repo_id="bartowski/dolphin-2.9.1-yi-1.5-34b-GGUF", filename="dolphin-2.9.1-yi-1.5-34b-Q6_K.gguf", local_dir = "./models")
|
17 |
# hf_hub_download(repo_id="crusoeai/dolphin-2.9.1-llama-3-70b-GGUF", filename="dolphin-2.9.1-llama-3-70b.Q3_K_M.gguf", local_dir = "./models")
|
18 |
-
hf_hub_download(repo_id="mradermacher/Dolphin3.0-Mistral-24B-GGUF", filename="Dolphin3.0-Mistral-24B.
|
19 |
# hf_hub_download(repo_id="kroonen/dolphin-2.9.2-Phi-3-Medium-GGUF", filename="dolphin-2.9.2-Phi-3-Medium-Q6_K.gguf", local_dir = "./models")
|
20 |
hf_hub_download(repo_id="cognitivecomputations/dolphin-2.9.2-qwen2-72b-gguf", filename="qwen2-Q3_K_M.gguf", local_dir = "./models")
|
21 |
|
@@ -84,9 +84,9 @@ demo = gr.ChatInterface(
|
|
84 |
respond,
|
85 |
additional_inputs=[
|
86 |
gr.Dropdown([
|
87 |
-
'Dolphin3.0-Mistral-24B.
|
88 |
'qwen2-Q3_K_M.gguf'
|
89 |
-
], value="Dolphin3.0-Mistral-24B.
|
90 |
gr.Slider(minimum=1, maximum=8192, value=8192, step=1, label="Max tokens"),
|
91 |
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
|
92 |
gr.Slider(
|
|
|
15 |
llm_model = None
|
16 |
# hf_hub_download(repo_id="bartowski/dolphin-2.9.1-yi-1.5-34b-GGUF", filename="dolphin-2.9.1-yi-1.5-34b-Q6_K.gguf", local_dir = "./models")
|
17 |
# hf_hub_download(repo_id="crusoeai/dolphin-2.9.1-llama-3-70b-GGUF", filename="dolphin-2.9.1-llama-3-70b.Q3_K_M.gguf", local_dir = "./models")
|
18 |
+
hf_hub_download(repo_id="mradermacher/Dolphin3.0-Mistral-24B-GGUF", filename="Dolphin3.0-Mistral-24B.Q6_K.gguf", local_dir = "./models")
|
19 |
# hf_hub_download(repo_id="kroonen/dolphin-2.9.2-Phi-3-Medium-GGUF", filename="dolphin-2.9.2-Phi-3-Medium-Q6_K.gguf", local_dir = "./models")
|
20 |
hf_hub_download(repo_id="cognitivecomputations/dolphin-2.9.2-qwen2-72b-gguf", filename="qwen2-Q3_K_M.gguf", local_dir = "./models")
|
21 |
|
|
|
84 |
respond,
|
85 |
additional_inputs=[
|
86 |
gr.Dropdown([
|
87 |
+
'Dolphin3.0-Mistral-24B.Q6_K.gguf',
|
88 |
'qwen2-Q3_K_M.gguf'
|
89 |
+
], value="Dolphin3.0-Mistral-24B.Q6_K.gguf", label="Model"),
|
90 |
gr.Slider(minimum=1, maximum=8192, value=8192, step=1, label="Max tokens"),
|
91 |
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
|
92 |
gr.Slider(
|