Spaces:
Paused
Paused
Update olapp.py
Browse files
olapp.py
CHANGED
@@ -1,35 +1,12 @@
|
|
1 |
from http.server import HTTPServer, BaseHTTPRequestHandler
|
2 |
from urllib.parse import urlparse
|
3 |
-
#import os
|
4 |
import json
|
5 |
-
#from huggingface_hub.file_download import http_get
|
6 |
from llama_cpp import Llama
|
7 |
|
8 |
-
|
9 |
-
#directory = "/home/oluser/olapp/"
|
10 |
-
#model_url = "https://huggingface.co/IlyaGusev/saiga_mistral_7b_gguf/resolve/main/model-q8_0.gguf"
|
11 |
-
#model_name = "model-q8_0.gguf"
|
12 |
-
#final_model_path = os.path.join(directory, model_name)
|
13 |
-
|
14 |
-
#print("Downloading all files...")
|
15 |
-
#rm_files = [os.path.join(directory, f) for f in os.listdir(directory)]
|
16 |
-
#for f in rm_files:
|
17 |
-
# if os.path.isfile(f):
|
18 |
-
# os.remove(f)
|
19 |
-
# else:
|
20 |
-
# shutil.rmtree(f)
|
21 |
-
|
22 |
-
#if not os.path.exists(final_model_path):
|
23 |
-
# with open(final_model_path, "wb") as f:
|
24 |
-
# http_get(model_url, f)
|
25 |
-
#os.chmod(final_model_path, 0o777)
|
26 |
-
#print("Files downloaded!")
|
27 |
-
|
28 |
print("Loading model...")
|
29 |
|
30 |
-
|
31 |
model_path="/home/oluser/olapp/model-q4_K.gguf",
|
32 |
-
#model_path=final_model_path,
|
33 |
n_ctx=4096,
|
34 |
n_parts=1,
|
35 |
)
|
|
|
1 |
from http.server import HTTPServer, BaseHTTPRequestHandler
|
2 |
from urllib.parse import urlparse
|
|
|
3 |
import json
|
|
|
4 |
from llama_cpp import Llama
|
5 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6 |
print("Loading model...")
|
7 |
|
8 |
+
llm = Llama(
|
9 |
model_path="/home/oluser/olapp/model-q4_K.gguf",
|
|
|
10 |
n_ctx=4096,
|
11 |
n_parts=1,
|
12 |
)
|