Update app.py
Browse files
app.py
CHANGED
@@ -2,39 +2,27 @@ from fastapi import FastAPI, Request
|
|
2 |
from llama_cpp import Llama
|
3 |
from huggingface_hub import hf_hub_download
|
4 |
import os
|
5 |
-
|
6 |
|
7 |
app = FastAPI()
|
8 |
|
9 |
-
|
10 |
-
|
11 |
-
try:
|
12 |
-
data = await request.json()
|
13 |
-
prompt = data["prompt"]
|
14 |
-
mode = data['mode']
|
15 |
-
if mode == 'f':
|
16 |
-
return requests.post("https://mrali-codexpert-computing.hf.space/api", json={"prompt": prompt}).json()
|
17 |
-
else:
|
18 |
-
res = model_l(
|
19 |
-
prompt,
|
20 |
-
temperature=0.6,
|
21 |
-
echo=False,
|
22 |
-
max_tokens=32,
|
23 |
-
)
|
24 |
|
25 |
-
return {"responses": res["choices"]}
|
26 |
-
except Exception as j:
|
27 |
-
print(j)
|
28 |
-
return {"responses": "Error!"}
|
29 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
30 |
|
31 |
if __name__ == "__main__":
|
32 |
-
os.system("ulimit -l unlimited")
|
33 |
-
|
34 |
-
hf_hub_download("TheBloke/deepseek-coder-1.3b-base-GGUF", "deepseek-coder-1.3b-base.Q5_K_M.gguf", local_dir="./")
|
35 |
-
global model_l
|
36 |
-
|
37 |
-
model_l = Llama(model_path="./deepseek-coder-1.3b-base.Q5_K_M.gguf", n_ctx=16192, n_gpu_layers=0, n_threads=2, use_mlock=True)
|
38 |
-
|
39 |
import uvicorn
|
40 |
uvicorn.run(app, host="0.0.0.0", port=7860)
|
|
|
2 |
from llama_cpp import Llama
|
3 |
from huggingface_hub import hf_hub_download
|
4 |
import os
|
5 |
+
os.system("ulimit -l unlimited")
|
6 |
|
7 |
app = FastAPI()
|
8 |
|
9 |
+
hf_hub_download("TheBloke/deepseek-coder-1.3b-base-GGUF", "deepseek-coder-1.3b-base.Q5_K_M.gguf", local_dir="./")
|
10 |
+
model_l = Llama(model_path="./deepseek-coder-1.3b-base.Q5_K_M.gguf", n_ctx=16192, n_gpu_layers=0, n_threads=2, use_mlock=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
|
|
|
|
|
|
|
|
|
12 |
|
13 |
+
@app.post("/api")
|
14 |
+
async def completion(request: Request):
|
15 |
+
data = await request.json()
|
16 |
+
prompt = data["prompt"]
|
17 |
+
|
18 |
+
res = model_l(
|
19 |
+
prompt,
|
20 |
+
temperature=0.6,
|
21 |
+
echo=False,
|
22 |
+
max_tokens=41,
|
23 |
+
)
|
24 |
+
return {"responses": res["choices"]}
|
25 |
|
26 |
if __name__ == "__main__":
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
27 |
import uvicorn
|
28 |
uvicorn.run(app, host="0.0.0.0", port=7860)
|