Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -50,8 +50,10 @@ def load_llm():
|
|
50 |
if model_instance is None:
|
51 |
print("Cargando el modelo...")
|
52 |
model_instance = AutoModelForCausalLM.from_pretrained(
|
53 |
-
"TheBloke/Llama-2-7B-Chat-GGUF",
|
54 |
-
|
|
|
|
|
55 |
model_type='llama',
|
56 |
max_new_tokens=2048,
|
57 |
context_length = 4096,
|
|
|
50 |
if model_instance is None:
|
51 |
print("Cargando el modelo...")
|
52 |
model_instance = AutoModelForCausalLM.from_pretrained(
|
53 |
+
#"TheBloke/Llama-2-7B-Chat-GGUF",
|
54 |
+
"TheBloke/phi-2-GGUF",
|
55 |
+
#model_file='llama-2-7b-chat.Q5_K_M.gguf',
|
56 |
+
model_file='phi-2.Q4_K_M.gguf',
|
57 |
model_type='llama',
|
58 |
max_new_tokens=2048,
|
59 |
context_length = 4096,
|