|
import gradio as gr |
|
from huggingface_hub import InferenceClient |
|
from PyPDF2 import PdfReader |
|
|
|
|
|
models = { |
|
"Job Consultant (Zephyr)": { |
|
"client": InferenceClient(model="HuggingFaceH4/zephyr-7b-beta"), |
|
}, |
|
"PDF Summarizer (T5)": { |
|
"client": InferenceClient(model="aaliyaan/t5-small-finetuned-career"), |
|
}, |
|
"Broken Answer (T0pp)": { |
|
"client": InferenceClient(model="bigscience/T0p"), |
|
}, |
|
} |
|
|
|
|
|
def chat_with_model(model_choice, user_message, chat_history, file=None): |
|
if model_choice == "Resume Summarizer (T5)" and file is not None: |
|
pdf_text = extract_text_from_pdf(file) |
|
user_message += f"\n\nPDF Content:\n{pdf_text}" |
|
|
|
if not user_message.strip(): |
|
return chat_history, "" |
|
|
|
model_info = models[model_choice] |
|
client = model_info["client"] |
|
|
|
|
|
messages = [{"role": "system", "content": "You are a helpful assistant."}] |
|
|
|
|
|
for user_msg, bot_msg in chat_history: |
|
messages.append({"role": "user", "content": user_msg}) |
|
messages.append({"role": "assistant", "content": bot_msg}) |
|
|
|
|
|
messages.append({"role": "user", "content": user_message}) |
|
|
|
|
|
response = "" |
|
for message in client.chat_completion( |
|
messages, |
|
max_tokens=150, |
|
stream=True, |
|
temperature=0.7, |
|
top_p=0.95 |
|
): |
|
token = message.choices[0].delta.content |
|
response += token |
|
|
|
|
|
chat_history.append((user_message, response)) |
|
return chat_history, "" |
|
|
|
|
|
def extract_text_from_pdf(file): |
|
reader = PdfReader(file.name) |
|
text = "\n".join(page.extract_text() for page in reader.pages if page.extract_text()) |
|
return text |
|
|
|
|
|
def create_chat_interface(): |
|
with gr.Blocks(css=""" |
|
.chatbox { |
|
background-color: #f7f7f8; |
|
border-radius: 12px; |
|
padding: 16px; |
|
font-family: 'Segoe UI', Tahoma, sans-serif; |
|
} |
|
.chat-title { |
|
font-size: 24px; |
|
font-weight: bold; |
|
text-align: center; |
|
margin-bottom: 12px; |
|
color: #3a9fd6; |
|
} |
|
""") as interface: |
|
gr.Markdown("<div class='chat-title'>Job Consultant AI</div>") |
|
|
|
with gr.Row(): |
|
model_choice = gr.Dropdown( |
|
choices=list(models.keys()), |
|
value="Job Consultant (Zephyr)", |
|
label="Select Model" |
|
) |
|
|
|
chat_history = gr.Chatbot(label="Chat History", elem_classes="chatbox") |
|
|
|
user_message = gr.Textbox( |
|
placeholder="Type your message here and press Enter...", |
|
show_label=False, |
|
elem_classes="chatbox", |
|
) |
|
|
|
file_input = gr.File(label="Upload PDF", visible=False, file_types=[".pdf"]) |
|
|
|
def toggle_pdf_input(selected_model): |
|
return gr.update(visible=(selected_model == "Resume Summarizer (T5)")) |
|
|
|
model_choice.change(fn=toggle_pdf_input, inputs=model_choice, outputs=file_input) |
|
|
|
|
|
user_message.submit( |
|
chat_with_model, |
|
inputs=[model_choice, user_message, chat_history, file_input], |
|
outputs=[chat_history, user_message], |
|
) |
|
|
|
return interface |
|
|
|
if __name__ == "__main__": |
|
interface = create_chat_interface() |
|
interface.launch(server_name="0.0.0.0", server_port=7860) |
|
|