Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Update app.py
Browse files
app.py
CHANGED
@@ -57,18 +57,17 @@ repo_name = "IlyaGusev/saiga_13b_lora_llamacpp"
|
|
57 |
model_name = "ggml-model-q4_1.bin"
|
58 |
embedder_name = "sentence-transformers/paraphrase-multilingual-mpnet-base-v2"
|
59 |
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
|
67 |
print("Downloading all files...")
|
68 |
-
snapshot_download(repo_id=repo_name, local_dir="/data/saiga_13b_lora_llamacpp", allow_patterns=model_name,
|
69 |
-
snapshot_download(repo_id=embedder_name, local_dir="/data/paraphrase-multilingual-mpnet-base-v2", cache_dir="/data")
|
70 |
print("Files downloaded!")
|
71 |
-
print(os.listdir("/data/saiga_13b_lora_llamacpp"))
|
72 |
|
73 |
model = Llama(
|
74 |
model_path=f"/data/saiga_13b_lora_llamacpp/{model_name}",
|
@@ -79,7 +78,7 @@ model = Llama(
|
|
79 |
print("Model loaded!")
|
80 |
|
81 |
max_new_tokens = 1500
|
82 |
-
embeddings = HuggingFaceEmbeddings(model_name=
|
83 |
|
84 |
def get_uuid():
|
85 |
return str(uuid4())
|
|
|
57 |
model_name = "ggml-model-q4_1.bin"
|
58 |
embedder_name = "sentence-transformers/paraphrase-multilingual-mpnet-base-v2"
|
59 |
|
60 |
+
rm_files = [os.path.join("/data", f) for f in os.listdir("/data")]
|
61 |
+
for f in rm_files:
|
62 |
+
if os.path.isfile(f):
|
63 |
+
os.remove(f)
|
64 |
+
else:
|
65 |
+
shutil.rmtree(f)
|
66 |
|
67 |
print("Downloading all files...")
|
68 |
+
snapshot_download(repo_id=repo_name, local_dir="/data/saiga_13b_lora_llamacpp", allow_patterns=model_name, local_dir_use_symlinks=False)
|
69 |
+
# snapshot_download(repo_id=embedder_name, local_dir="/data/paraphrase-multilingual-mpnet-base-v2", cache_dir="/data")
|
70 |
print("Files downloaded!")
|
|
|
71 |
|
72 |
model = Llama(
|
73 |
model_path=f"/data/saiga_13b_lora_llamacpp/{model_name}",
|
|
|
78 |
print("Model loaded!")
|
79 |
|
80 |
max_new_tokens = 1500
|
81 |
+
embeddings = HuggingFaceEmbeddings(model_name=embedder_name)
|
82 |
|
83 |
def get_uuid():
|
84 |
return str(uuid4())
|