Spaces:
Sleeping
Sleeping
Update main.py
Browse files
main.py
CHANGED
@@ -1,5 +1,9 @@
|
|
1 |
from flask import Flask, request, jsonify
|
2 |
from langchain_community.llms import LlamaCpp
|
|
|
|
|
|
|
|
|
3 |
import os
|
4 |
app = Flask(__name__)
|
5 |
|
@@ -7,13 +11,11 @@ n_gpu_layers = 0
|
|
7 |
n_batch = 1024
|
8 |
|
9 |
|
10 |
-
llm =
|
11 |
-
|
12 |
-
|
13 |
-
n_gpu_layers=n_gpu_layers,
|
14 |
-
n_batch=n_batch,
|
15 |
-
n_ctx=4096
|
16 |
)
|
|
|
17 |
file_size = os.stat('Phi-3-mini-4k-instruct-q4.gguf')
|
18 |
print("model size ====> :", file_size.st_size, "bytes")
|
19 |
|
@@ -27,10 +29,9 @@ def get_skills():
|
|
27 |
f"<|user|>\n{cv_body}<|end|>\n<|assistant|>Can you list the skills mentioned in the CV?<|end|>",
|
28 |
max_tokens=256, # Generate up to 256 tokens
|
29 |
stop=["<|end|>"],
|
30 |
-
echo=True, # Whether to echo the prompt
|
31 |
)
|
32 |
|
33 |
-
return jsonify({'skills': output})
|
34 |
|
35 |
if __name__ == '__main__':
|
36 |
app.run()
|
|
|
1 |
from flask import Flask, request, jsonify
|
2 |
from langchain_community.llms import LlamaCpp
|
3 |
+
from llama_cpp import Llama
|
4 |
+
|
5 |
+
|
6 |
+
|
7 |
import os
|
8 |
app = Flask(__name__)
|
9 |
|
|
|
11 |
n_batch = 1024
|
12 |
|
13 |
|
14 |
+
llm = Llama(
|
15 |
+
model_path="Phi-3-mini-4k-instruct-q4.gguf", # path to GGUF file
|
16 |
+
n_gpu_layers=0, # The number of layers to offload to GPU, if you have GPU acceleration available. Set to 0 if no GPU acceleration is available on your system.
|
|
|
|
|
|
|
17 |
)
|
18 |
+
|
19 |
file_size = os.stat('Phi-3-mini-4k-instruct-q4.gguf')
|
20 |
print("model size ====> :", file_size.st_size, "bytes")
|
21 |
|
|
|
29 |
f"<|user|>\n{cv_body}<|end|>\n<|assistant|>Can you list the skills mentioned in the CV?<|end|>",
|
30 |
max_tokens=256, # Generate up to 256 tokens
|
31 |
stop=["<|end|>"],
|
|
|
32 |
)
|
33 |
|
34 |
+
return jsonify({'skills': output['choices'][0]['text']})
|
35 |
|
36 |
if __name__ == '__main__':
|
37 |
app.run()
|