sanket09 commited on
Commit
20eae19
·
verified ·
1 Parent(s): 6896c15

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -2
app.py CHANGED
@@ -1,5 +1,6 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
 
3
 
4
  """
5
  For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
@@ -7,6 +8,18 @@ For more information on `huggingface_hub` Inference API support, please check th
7
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
8
 
9
 
 
 
 
 
 
 
 
 
 
 
 
 
10
  def respond(
11
  message,
12
  history: list[tuple[str, str]],
@@ -39,12 +52,23 @@ def respond(
39
  response += token
40
  yield response
41
 
 
 
 
 
 
 
 
 
 
 
42
  """
43
  For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
44
  """
45
  demo = gr.ChatInterface(
46
- respond,
47
  additional_inputs=[
 
48
  gr.Textbox(value="You are a Job Advisor Chatbot.", label="System message"),
49
  gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
50
  gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
@@ -60,4 +84,4 @@ demo = gr.ChatInterface(
60
 
61
 
62
  if __name__ == "__main__":
63
- demo.launch()
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
+ import fitz # PyMuPDF
4
 
5
  """
6
  For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
 
8
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
9
 
10
 
11
+ def extract_text_from_pdf(pdf_path):
12
+ # Open the provided PDF file
13
+ doc = fitz.open(pdf_path)
14
+ text = ""
15
+
16
+ # Extract text from each page
17
+ for page in doc:
18
+ text += page.get_text()
19
+
20
+ return text
21
+
22
+
23
  def respond(
24
  message,
25
  history: list[tuple[str, str]],
 
52
  response += token
53
  yield response
54
 
55
+
56
+ def process_resume_and_respond(pdf_file, message, history, system_message, max_tokens, temperature, top_p):
57
+ # Extract text from the PDF file
58
+ resume_text = extract_text_from_pdf(pdf_file.name)
59
+ # Combine the resume text with the user message
60
+ combined_message = f"Resume:\n{resume_text}\n\nUser message:\n{message}"
61
+ # Respond using the combined message
62
+ return respond(combined_message, history, system_message, max_tokens, temperature, top_p)
63
+
64
+
65
  """
66
  For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
67
  """
68
  demo = gr.ChatInterface(
69
+ process_resume_and_respond,
70
  additional_inputs=[
71
+ gr.File(label="Upload Resume PDF"),
72
  gr.Textbox(value="You are a Job Advisor Chatbot.", label="System message"),
73
  gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
74
  gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
 
84
 
85
 
86
  if __name__ == "__main__":
87
+ demo.launch()