Job_Match / app.py
aaliyaan's picture
cahnged name
c3246c1
import gradio as gr
from huggingface_hub import InferenceClient
from PyPDF2 import PdfReader
# Models Setup
models = {
"Job Consultant (Zephyr)": {
"client": InferenceClient(model="HuggingFaceH4/zephyr-7b-beta"),
},
"PDF Summarizer (T5)": {
"client": InferenceClient(model="aaliyaan/t5-small-finetuned-career"),
},
"Broken Answer (T0pp)": {
"client": InferenceClient(model="bigscience/T0p"),
},
}
# Chat Function with Context
def chat_with_model(model_choice, user_message, chat_history, file=None):
if model_choice == "Resume Summarizer (T5)" and file is not None:
pdf_text = extract_text_from_pdf(file)
user_message += f"\n\nPDF Content:\n{pdf_text}"
if not user_message.strip():
return chat_history, ""
model_info = models[model_choice]
client = model_info["client"]
# Prepare messages for the InferenceClient including chat history
messages = [{"role": "system", "content": "You are a helpful assistant."}]
# Add previous conversation to the messages
for user_msg, bot_msg in chat_history:
messages.append({"role": "user", "content": user_msg})
messages.append({"role": "assistant", "content": bot_msg})
# Add the current user message
messages.append({"role": "user", "content": user_message})
# Generate Response
response = ""
for message in client.chat_completion(
messages,
max_tokens=150,
stream=True,
temperature=0.7,
top_p=0.95
):
token = message.choices[0].delta.content
response += token
# Update Chat History
chat_history.append((user_message, response))
return chat_history, ""
# Function to Extract Text from PDF
def extract_text_from_pdf(file):
reader = PdfReader(file.name)
text = "\n".join(page.extract_text() for page in reader.pages if page.extract_text())
return text
# Interface Setup
def create_chat_interface():
with gr.Blocks(css="""
.chatbox {
background-color: #f7f7f8;
border-radius: 12px;
padding: 16px;
font-family: 'Segoe UI', Tahoma, sans-serif;
}
.chat-title {
font-size: 24px;
font-weight: bold;
text-align: center;
margin-bottom: 12px;
color: #3a9fd6;
}
""") as interface:
gr.Markdown("<div class='chat-title'>Job Consultant AI</div>")
with gr.Row():
model_choice = gr.Dropdown(
choices=list(models.keys()),
value="Job Consultant (Zephyr)",
label="Select Model"
)
chat_history = gr.Chatbot(label="Chat History", elem_classes="chatbox")
user_message = gr.Textbox(
placeholder="Type your message here and press Enter...",
show_label=False,
elem_classes="chatbox",
)
file_input = gr.File(label="Upload PDF", visible=False, file_types=[".pdf"])
def toggle_pdf_input(selected_model):
return gr.update(visible=(selected_model == "Resume Summarizer (T5)"))
model_choice.change(fn=toggle_pdf_input, inputs=model_choice, outputs=file_input)
# Link the input box to send messages on Enter
user_message.submit(
chat_with_model,
inputs=[model_choice, user_message, chat_history, file_input],
outputs=[chat_history, user_message],
)
return interface
if __name__ == "__main__":
interface = create_chat_interface()
interface.launch(server_name="0.0.0.0", server_port=7860)