sanket09 commited on
Commit
b7bd810
·
verified ·
1 Parent(s): 75ef3f7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -7
app.py CHANGED
@@ -7,7 +7,6 @@ For more information on `huggingface_hub` Inference API support, please check th
7
  """
8
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
9
 
10
-
11
  def extract_text_from_pdf(pdf_path):
12
  # Open the provided PDF file
13
  doc = fitz.open(pdf_path)
@@ -17,9 +16,9 @@ def extract_text_from_pdf(pdf_path):
17
  for page in doc:
18
  text += page.get_text()
19
 
 
20
  return text
21
 
22
-
23
  def respond(
24
  message,
25
  history: list[tuple[str, str]],
@@ -50,9 +49,9 @@ def respond(
50
  token = message.choices[0].delta.content
51
 
52
  response += token
 
53
  yield response
54
 
55
-
56
  def process_resume_and_respond(pdf_file, message, history, system_message, max_tokens, temperature, top_p):
57
  # Extract text from the PDF file
58
  resume_text = extract_text_from_pdf(pdf_file.name)
@@ -63,7 +62,6 @@ def process_resume_and_respond(pdf_file, message, history, system_message, max_t
63
  response = "".join([token for token in response_gen])
64
  return response
65
 
66
-
67
  # Store the uploaded PDF content globally
68
  uploaded_resume_text = ""
69
 
@@ -72,7 +70,6 @@ def upload_resume(pdf_file):
72
  uploaded_resume_text = extract_text_from_pdf(pdf_file.name)
73
  return "Resume uploaded successfully!"
74
 
75
-
76
  def respond_with_resume(message, history, system_message, max_tokens, temperature, top_p):
77
  global uploaded_resume_text
78
  # Combine the uploaded resume text with the user message
@@ -82,7 +79,6 @@ def respond_with_resume(message, history, system_message, max_tokens, temperatur
82
  response = "".join([token for token in response_gen])
83
  return response
84
 
85
-
86
  """
87
  For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
88
  """
@@ -113,6 +109,5 @@ demo = gr.TabbedInterface(
113
  ["Upload Resume", "Chat with Job Advisor"]
114
  )
115
 
116
-
117
  if __name__ == "__main__":
118
  demo.launch()
 
7
  """
8
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
9
 
 
10
  def extract_text_from_pdf(pdf_path):
11
  # Open the provided PDF file
12
  doc = fitz.open(pdf_path)
 
16
  for page in doc:
17
  text += page.get_text()
18
 
19
+ doc.close() # Ensure the PDF file is closed
20
  return text
21
 
 
22
  def respond(
23
  message,
24
  history: list[tuple[str, str]],
 
49
  token = message.choices[0].delta.content
50
 
51
  response += token
52
+ print(f"Token: {token}") # Debugging statement to trace tokens
53
  yield response
54
 
 
55
  def process_resume_and_respond(pdf_file, message, history, system_message, max_tokens, temperature, top_p):
56
  # Extract text from the PDF file
57
  resume_text = extract_text_from_pdf(pdf_file.name)
 
62
  response = "".join([token for token in response_gen])
63
  return response
64
 
 
65
  # Store the uploaded PDF content globally
66
  uploaded_resume_text = ""
67
 
 
70
  uploaded_resume_text = extract_text_from_pdf(pdf_file.name)
71
  return "Resume uploaded successfully!"
72
 
 
73
  def respond_with_resume(message, history, system_message, max_tokens, temperature, top_p):
74
  global uploaded_resume_text
75
  # Combine the uploaded resume text with the user message
 
79
  response = "".join([token for token in response_gen])
80
  return response
81
 
 
82
  """
83
  For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
84
  """
 
109
  ["Upload Resume", "Chat with Job Advisor"]
110
  )
111
 
 
112
  if __name__ == "__main__":
113
  demo.launch()