Spaces:
Sleeping
Sleeping
File size: 3,252 Bytes
fd1d045 e3c833a fd1d045 11dba13 fd1d045 11dba13 1978351 cc64b94 fd1d045 0b69c55 fd1d045 d827e5b fd1d045 66fb32e fd1d045 3d6fe90 fd1d045 3d6fe90 fd1d045 f723b1b fd1d045 2b39c28 fd1d045 215fe64 f93f7a5 fd1d045 630942b 2379f2a fd1d045 630942b 2379f2a d27535a 630942b d27535a 630942b fd1d045 2379f2a fd1d045 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 |
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.messages import HumanMessage
from langchain_core.messages import AIMessage
from langchain_community.chat_message_histories import ChatMessageHistory
from pypdf import PdfReader
import os
import gradio as gr
from langchain_openai import AzureChatOpenAI
client = AzureChatOpenAI(
azure_deployment = "GPT-4o"
)
def extract_text( pdf_path):
# creating a pdf reader object
reader = PdfReader(pdf_path)
all_text = ""
for page in reader.pages:
all_text += page.extract_text()
return all_text
def get_response( candidate, chat_history, resume, jd):
resume = extract_text(resume.name)
jd = extract_text(jd.name)
prompt = ChatPromptTemplate.from_messages(
[
(
"system",
"""Your Task is Perform as intelligent interviewer, Your Task is ask question to the resume's candidate by following candidate Answer.
at the end exit with greeting to the candidate.
**Ask question follow up on the candidate response. get chat history.**
""",
),
MessagesPlaceholder(variable_name="messages"),
]
)
chain = prompt | client
# chat_histroy_prompt = chat_history
answer = chain.invoke(
{
"messages": [
HumanMessage(
content=f" job description :{jd}\n Resume :{resume}"
),
AIMessage(content=f"""Perform as intelligent interviewer, Your Task is ask question to the resume's candidate by following candidate Answer.
chat history : {chat_history}"""),
HumanMessage(content=candidate),
],
}
)
# print("INTERVIEWER :", answer.content)
# chat_history.append({"candidate":candidate,"interviewer":answer.content })
result = answer.content
chat_history.append((candidate, result))
print("chat_history", chat_history)
return "", chat_history
def gradio_interface() -> None:
"""Create a Gradio interface for the chatbot."""
with gr.Blocks(css = "style.css" ,theme="shivi/calm_seafoam") as demo:
gr.HTML("""<center class="darkblue" text-align:center;padding:30px;'></center>
<center>
<br><h1 style="color:#006e49">Screening Assistant Chatbot</h1></center>""")
with gr.Row():
with gr.Column(scale=0.50):
resume = gr.File(label="Resume", elem_classes="resume")
with gr.Column(scale=0.50):
jd = gr.File(label="Job Description", elem_classes="jd")
with gr.Row():
with gr.Column():
chatbot = gr.Chatbot()
with gr.Row():
with gr.Column(scale=0.80):
msg = gr.Textbox(label="Question", show_label=False, placeholder="Question...")
with gr.Column(scale=0.20):
clear = gr.ClearButton([msg, chatbot], elem_classes="clear")
msg.submit(get_response, [msg, chatbot, resume, jd], [msg, chatbot])
demo.launch(debug=True, share=True)
gradio_interface() |