change to tinyllama
Browse files
app.py
CHANGED
@@ -1,10 +1,10 @@
|
|
1 |
import gradio as gr
|
2 |
from llama_cpp import Llama
|
3 |
|
4 |
-
model = "
|
5 |
llm = Llama.from_pretrained(
|
6 |
repo_id=model,
|
7 |
-
filename="
|
8 |
verbose=True,
|
9 |
use_mmap=False,
|
10 |
use_mlock=True,
|
|
|
1 |
import gradio as gr
|
2 |
from llama_cpp import Llama
|
3 |
|
4 |
+
model = "TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF"
|
5 |
llm = Llama.from_pretrained(
|
6 |
repo_id=model,
|
7 |
+
filename="tinyllama-1.1b-chat-v1.0.Q8_0.gguf",
|
8 |
verbose=True,
|
9 |
use_mmap=False,
|
10 |
use_mlock=True,
|