Ahmed007 commited on
Commit
867b22a
·
verified ·
1 Parent(s): a1eb076

Upload 4 files

Browse files
Files changed (4) hide show
  1. Dockerfile +14 -0
  2. app.py +90 -0
  3. main.yml +27 -0
  4. requirements.txt +14 -0
Dockerfile ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # read the doc: https://huggingface.co/docs/hub/spaces-sdks-docker
2
+ # you will also find guides on how best to write your Dockerfile
3
+
4
+ FROM python:3.9
5
+
6
+ WORKDIR /code
7
+
8
+ COPY ./requirements.txt /code/requirements.txt
9
+
10
+ RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
11
+
12
+ COPY . .
13
+
14
+ CMD ["gunicorn", "-b" ,"0.0.0.0:7860","app:app", ]
app.py ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, request, jsonify
2
+ import os
3
+ import nltk
4
+ from langchain.llms import LlamaCpp
5
+ from langchain.callbacks.manager import CallbackManager
6
+ from langchain.prompts import PromptTemplate
7
+ from langchain.schema.output_parser import StrOutputParser
8
+
9
+ nltk.download('punkt')
10
+
11
+ app = Flask(__name__)
12
+
13
+ # Download model
14
+ if not os.path.exists('phi-2.Q4_K_M.gguf'):
15
+ os.system('wget https://huggingface.co/TheBloke/phi-2-GGUF/resolve/main/phi-2.Q4_K_M.gguf')
16
+
17
+ # Disable GPU usage
18
+ os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
19
+
20
+ # Callback manager setup
21
+ callback_manager = CallbackManager([])
22
+
23
+ # Creating LlamaCpp instance
24
+ llm = LlamaCpp(
25
+ model_path="phi-2.Q4_K_M.gguf",
26
+ temperature=0.1,
27
+ n_gpu_layers=0,
28
+ n_batch=1024,
29
+ callback_manager=callback_manager,
30
+ verbose=True,
31
+ n_ctx=2048
32
+ )
33
+
34
+ # Define templates
35
+ templates = {
36
+ "work_experience": """Instruction:
37
+ Extract and summarize the work experience mentioned in the CV provided below. Focus solely on the details related to work history, including job titles, companies, and duration.
38
+ Text: {text}
39
+ Question: {question}
40
+ Output:""",
41
+
42
+ "certification": """Instruction:
43
+ Extract and summarize the certification history mentioned in the CV provided below. Include details such as degrees earned, institutions attended, and graduation years.
44
+ Text: {text}
45
+ Question: {question}
46
+ Output:""",
47
+
48
+ "contact_info": """Instruction:
49
+ Extract and provide the contact information mentioned in the CV provided below. Include details such as phone number, email address, and any other relevant contact links.
50
+ Text: {text}
51
+ Question: {question}
52
+ Output:""",
53
+
54
+ "skills": """Instruction:
55
+ Focus solely on extracting the skills mentioned in the text below, excluding any other details or context. Your answer should consist of concise skills.
56
+ Text: {text}
57
+ Question: {question}
58
+ Output:"""
59
+ }
60
+
61
+ @app.route('/', methods=['POST'])
62
+ def generate_text():
63
+ data = request.get_json()
64
+ question = data.get('question')
65
+ text = data.get('text')
66
+
67
+ if not question or not text:
68
+ return jsonify({"error": "Both 'question' and 'text' fields are required."}), 400
69
+
70
+ if question == "Please summarize the work experience mentioned in the CV.":
71
+ template_key = "work_experience"
72
+ elif question == "Please summarize the certification history mentioned in the CV without repeating the output only once.":
73
+ template_key = "certification"
74
+ elif question == "Please extract the contact information mentioned in the CV once.":
75
+ template_key = "contact_info"
76
+ elif question == "What are the 6 skills? Please provide a concise short answer of the only(skills) mentioned in the text without repeating the answer.":
77
+ template_key = "skills"
78
+ else:
79
+ return jsonify({"error": "Invalid question provided."}), 400
80
+
81
+ prompt = PromptTemplate(template=templates[template_key], input_variables=["question", "text"])
82
+ chain = prompt | llm | StrOutputParser()
83
+ response = chain.invoke({"question": question, "text": text})
84
+
85
+ return jsonify({"generated_text": response})
86
+
87
+
88
+ if __name__ == '__main__':
89
+ port = int(os.environ.get("PORT", 8000))
90
+ app.run( port= 8000)
main.yml ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Python application
2
+
3
+ on:
4
+ push:
5
+ branches: [ main ]
6
+ pull_request:
7
+ branches: [ main ]
8
+
9
+ jobs:
10
+ build:
11
+ runs-on: ubuntu-latest
12
+
13
+ steps:
14
+ - uses: actions/checkout@v2
15
+
16
+ - name: Set up Python 3.x
17
+ uses: actions/setup-python@v2
18
+ with:
19
+ python-version: '3.x'
20
+
21
+ - name: Install dependencies
22
+ run: |
23
+ python -m pip install --upgrade pip
24
+ pip install -r requirements.txt
25
+
26
+ - name: Run the app
27
+ run: python app.py
requirements.txt ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ flask
2
+ langchain
3
+ matplotlib
4
+ numpy
5
+ nltk
6
+ gensim
7
+ scikit-learn
8
+ llama-cpp-python
9
+ huggingface-hub
10
+ langchain
11
+ langchain-experimental
12
+ scipy==1.10.1
13
+ gunicorn
14
+ langchain-community