Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -13,9 +13,9 @@ import ollama
|
|
13 |
|
14 |
# Model from run.sh
|
15 |
MODEL_ID_MAP = {
|
16 |
-
"(LiquidAI)LFM2-1.2B": 'LFM2-1.2B-GGUF:Q5_K_M',
|
17 |
-
"(LiquidAI)LFM2-700M": 'LFM2-700M-GGUF:Q5_K_M',
|
18 |
-
"(LiquidAI)LFM2-350M": 'LFM2-350M-GGUF:Q5_K_M',
|
19 |
"(Google)Gemma-3-270M-it-qat": 'hf.co/unsloth/gemma-3-270m-it-qat-GGUF:Q5_K_M',
|
20 |
"(ιΏιεε)Qwen3-4B-Instruct-2507": 'hf.co/bartowski/Qwen_Qwen3-4B-Instruct-2507-GGUF:Q4_K_M',
|
21 |
"(IBM)Granite3.3-2B": 'granite3.3:2b',
|
|
|
13 |
|
14 |
# Model from run.sh
|
15 |
MODEL_ID_MAP = {
|
16 |
+
"(LiquidAI)LFM2-1.2B": 'hf.co/LiquidAI/LFM2-1.2B-GGUF:Q5_K_M',
|
17 |
+
"(LiquidAI)LFM2-700M": 'hf.co/LiquidAI/LFM2-700M-GGUF:Q5_K_M',
|
18 |
+
"(LiquidAI)LFM2-350M": 'hf.co/LiquidAI/LFM2-350M-GGUF:Q5_K_M',
|
19 |
"(Google)Gemma-3-270M-it-qat": 'hf.co/unsloth/gemma-3-270m-it-qat-GGUF:Q5_K_M',
|
20 |
"(ιΏιεε)Qwen3-4B-Instruct-2507": 'hf.co/bartowski/Qwen_Qwen3-4B-Instruct-2507-GGUF:Q4_K_M',
|
21 |
"(IBM)Granite3.3-2B": 'granite3.3:2b',
|