Ahmed007 commited on
Commit
85f5ead
·
verified ·
1 Parent(s): 915fa6c

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +9 -6
main.py CHANGED
@@ -15,7 +15,7 @@ n_batch = 1024
15
  callback_manager = CallbackManager([StreamingStdOutCallbackHandler()])
16
 
17
  llm = LlamaCpp(
18
- model_path="Meta-Llama-3-8B-Instruct.Q5_K_M.gguf",
19
  temperature=0.1,
20
  n_gpu_layers=n_gpu_layers,
21
  n_batch=n_batch,
@@ -27,16 +27,19 @@ llm = LlamaCpp(
27
  @app.route('/', methods=['POST'])
28
  def get_skills():
29
  cv_body = request.json.get('cv_body')
 
30
 
31
- template = """[INST] <<SYS>>
32
- You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Your answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure that your responses are socially unbiased and positive in nature. If a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don't know the answer to a question, please don't share false information.
33
- <</SYS>>
34
- {cv_body}[/INST]"""
 
 
35
 
36
  prompt = PromptTemplate(template=template, input_variables=["text"])
37
  chain = prompt | llm | StrOutputParser()
38
 
39
- ans = chain.invoke({"question": "What are his best skills? write in points","cv_body":cv_body},
40
  config={
41
  # "callbacks": [ConsoleCallbackHandler()]
42
  })
 
15
  callback_manager = CallbackManager([StreamingStdOutCallbackHandler()])
16
 
17
  llm = LlamaCpp(
18
+ model_path="Phi-3-mini-4k-instruct-fp16.gguf",
19
  temperature=0.1,
20
  n_gpu_layers=n_gpu_layers,
21
  n_batch=n_batch,
 
27
  @app.route('/', methods=['POST'])
28
  def get_skills():
29
  cv_body = request.json.get('cv_body')
30
+ question = "What are his best skills? write in points"
31
 
32
+ template = """
33
+ <|user|>
34
+ I am analyzing cv this cv {cv_body} , {question} <|end|>
35
+ <|assistant|>
36
+
37
+ """
38
 
39
  prompt = PromptTemplate(template=template, input_variables=["text"])
40
  chain = prompt | llm | StrOutputParser()
41
 
42
+ ans = chain.invoke({"question":question ,"cv_body":cv_body},
43
  config={
44
  # "callbacks": [ConsoleCallbackHandler()]
45
  })