Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -51,9 +51,9 @@ def load_llm():
|
|
51 |
print("Cargando el modelo...")
|
52 |
model_instance = AutoModelForCausalLM.from_pretrained(
|
53 |
#"TheBloke/Llama-2-7B-Chat-GGUF",
|
54 |
-
"
|
55 |
#model_file='llama-2-7b-chat.Q5_K_M.gguf',
|
56 |
-
model_file='
|
57 |
model_type='llama',
|
58 |
max_new_tokens=2048,
|
59 |
context_length = 4096,
|
|
|
51 |
print("Cargando el modelo...")
|
52 |
model_instance = AutoModelForCausalLM.from_pretrained(
|
53 |
#"TheBloke/Llama-2-7B-Chat-GGUF",
|
54 |
+
"TheBloke/Llama-2-7B-Chat-GGUF",
|
55 |
#model_file='llama-2-7b-chat.Q5_K_M.gguf',
|
56 |
+
model_file='llama-2-7b-chat.Q3_K_M.gguf',
|
57 |
model_type='llama',
|
58 |
max_new_tokens=2048,
|
59 |
context_length = 4096,
|