LLM_FinetuneR / app.py
Diksha2001's picture
Update app.py
30b9ad3 verified
raw
history blame
5.43 kB
import gradio as gr
import json
import subprocess
def run_pipeline(pdf_file, system_prompt, max_step, learning_rate, epochs, model_name):
# Construct job input
data = {
"input": {
"pdf_file": pdf_file.name,
"system_prompt": system_prompt,
"max_step": max_step,
"learning_rate": learning_rate,
"epochs": epochs,
"model_name": model_name
}
}
try:
# Call handler.py using the constructed input
input_json = json.dumps(data)
process = subprocess.Popen(
['python3', 'handler.py', '--test_input', input_json],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True
)
stdout, stderr = process.communicate()
# Extract JSON object from the output
output_lines = stdout.splitlines()
handler_output = None
for line in output_lines:
try:
parsed_line = json.loads(line)
if isinstance(parsed_line, dict) and "status" in parsed_line:
handler_output = parsed_line
break
except json.JSONDecodeError:
continue
if handler_output is None:
return {"status": "error", "details": f"No valid JSON found in output: {stdout}"}
# Check the status in the parsed JSON
if handler_output.get("status") == "success":
# Extract and format the result
model_name = handler_output.get("model_name", "N/A")
processing_time = handler_output.get("processing_time", "N/A")
evaluation_results = handler_output.get("evaluation_results", {})
return {
"model_name": model_name,
"processing_time": processing_time,
"evaluation_results": evaluation_results
}
else:
# Return error details from the handler output
return handler_output
except FileNotFoundError:
return {"status": "error", "details": "Handler script not found"}
except Exception as e:
return {"status": "error", "details": str(e)}
# Define Gradio interface
with gr.Blocks(css='''
.gradio-container {
background-color: #121212; /* Dark background */
color: #f1f1f1; /* Light text color */
padding: 20px;
font-family: 'Arial', sans-serif;
}
.gr-row {
margin-bottom: 20px;
}
/* Styling for Textboxes and Numbers */
input[type="text"], input[type="number"], textarea {
background-color: #f0f0f0; /* Light grey background for inputs */
border: 1px solid #ccc; /* Light grey border */
color: #000; /* Black text inside the inputs */
border-radius: 8px;
padding: 10px;
font-size: 16px;
width: 100%;
box-sizing: border-box;
}
/* Styling specific to textarea placeholder */
textarea::placeholder {
color: #999; /* Slightly darker grey placeholder text */
}
/* Button styling */
button {
background-color: #4CAF50; /* Green button */
color: white;
border: none;
padding: 12px 20px;
cursor: pointer;
font-weight: bold;
font-size: 16px;
transition: background-color 0.3s ease;
border-radius: 8px;
}
button:hover {
background-color: #3e8e41; /* Darker green hover effect */
}
/* Styling for JSON output */
.gr-json {
background-color: #333; /* Dark background for JSON output */
border: 1px solid #444; /* Slightly lighter border */
padding: 12px;
font-size: 14px;
max-height: 300px;
overflow-y: auto;
margin-top: 10px;
color: #f1f1f1; /* Light text color */
}
/* Adjust margins for all inputs */
.gr-row .gr-textbox, .gr-row .gr-number {
margin-bottom: 15px;
}
''') as demo:
# Add Heading at the top
gr.Markdown(
'<h2 style="color: #87CEEB; text-align: center;">πŸ€– Fine-tuning Pipeline Configurator</h2>'
)
# Layout structure with improved spacing
with gr.Row():
with gr.Column(scale=2):
pdf_file = gr.File(label="Upload PDF File", file_types=[".pdf"]) # Updated to accept file uploads
with gr.Column(scale=3):
system_prompt = gr.Textbox(label="System Prompt", placeholder="Enter system instructions or context", value="You are a helpful assistant that provides detailed information based on the provided text.")
with gr.Column(scale=2):
max_step = gr.Number(label="Max Steps", value=150)
with gr.Column(scale=2):
learning_rate = gr.Number(label="Learning Rate", value=2e-4)
with gr.Column(scale=2):
epochs = gr.Number(label="Epochs", value=10)
with gr.Column(scale=3):
model_name = gr.Textbox(label="Model Name", placeholder="Enter the model name")
result_output = gr.JSON(label="Pipeline Results")
run_button = gr.Button("Run Pipeline")
# Trigger the function when the button is clicked
run_button.click(
run_pipeline,
inputs=[pdf_file, system_prompt, max_step, learning_rate, epochs, model_name],
outputs=[result_output]
)
# Run Gradio app
demo.launch()