Spaces:
Sleeping
Sleeping
Update main.py
Browse files
main.py
CHANGED
@@ -2,7 +2,7 @@ from flask import Flask, request, jsonify
|
|
2 |
from langchain_community.llms import LlamaCpp
|
3 |
from sentence_transformers import SentenceTransformer
|
4 |
from transformers import AutoTokenizer, AutoModel
|
5 |
-
from huggingface_hub import hf_hub_download
|
6 |
|
7 |
# cosine_similarity
|
8 |
import torch
|
@@ -15,16 +15,22 @@ n_gpu_layers = 0
|
|
15 |
n_batch = 1024
|
16 |
|
17 |
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
28 |
|
29 |
model0 = AutoModel.from_pretrained('sentence-transformers/paraphrase-TinyBERT-L6-v2')
|
30 |
model = SentenceTransformer('sentence-transformers/paraphrase-TinyBERT-L6-v2')
|
|
|
2 |
from langchain_community.llms import LlamaCpp
|
3 |
from sentence_transformers import SentenceTransformer
|
4 |
from transformers import AutoTokenizer, AutoModel
|
5 |
+
from huggingface_hub import hf_hub_download, HfHubError
|
6 |
|
7 |
# cosine_similarity
|
8 |
import torch
|
|
|
15 |
n_batch = 1024
|
16 |
|
17 |
|
18 |
+
try:
|
19 |
+
model_path = hf_hub_download(repo_id="repo_name", filename="model_file_name", force_download=True)
|
20 |
+
except HfHubError as e:
|
21 |
+
print(f"Error downloading the model: {e}")
|
22 |
+
model_path = None
|
23 |
+
|
24 |
+
# تأكد من أن النموذج تم تنزيله بنجاح
|
25 |
+
if model_path:
|
26 |
+
llm = LlamaCpp(
|
27 |
+
model_path=model_path, # path to GGUF file
|
28 |
+
temperature=0.1,
|
29 |
+
n_gpu_layers=n_gpu_layers,
|
30 |
+
n_batch=n_batch,
|
31 |
+
verbose=True,
|
32 |
+
n_ctx=4096
|
33 |
+
)
|
34 |
|
35 |
model0 = AutoModel.from_pretrained('sentence-transformers/paraphrase-TinyBERT-L6-v2')
|
36 |
model = SentenceTransformer('sentence-transformers/paraphrase-TinyBERT-L6-v2')
|