Detectify / app.py
arpit13's picture
Update app.py
e8e5865 verified
import gradio as gr
import cv2
import torch
import numpy as np
# Load the YOLOv5 model
model = torch.hub.load('ultralytics/yolov5', 'yolov5s', pretrained=True)
# Function to run inference on an image and count objects
def run_inference(image):
# Convert the image from PIL format to a format compatible with OpenCV
image = np.array(image)
# Run YOLOv5 inference
results = model(image)
# Extract detection results
detections = results.pandas().xyxy[0]
# Count objects by category
object_counts = detections['name'].value_counts()
# Create a formatted string to show object counts
count_text = "\n".join([f"{obj}: {count}" for obj, count in object_counts.items()])
# Convert the annotated image from BGR to RGB for display
annotated_image = results.render()[0]
annotated_image = cv2.cvtColor(annotated_image, cv2.COLOR_BGR2RGB)
return annotated_image, count_text
# Create the Gradio interface with enhanced UI
interface = gr.Interface(
fn=run_inference,
inputs=gr.Image(type="pil"),
outputs=[
gr.Image(type="pil"),
gr.Textbox(label="Object Counts", lines=5, interactive=False) # Display counts as text
],
title="DTECTIFY, The Object Detection with Counts",
description="Upload an image and let Detectify detect and count objects in real-time. Get annotated results with bounding boxes and an instant count of objects by category. Fast, accurate, and easy-to-use for all your object detection needs!",
css="""
/* General body and background settings */
body {
font-family: 'Arial', sans-serif;
background: linear-gradient(135deg, #FF6F61, #FF9F9F, #FFEB3B);
animation: gradientBG 5s ease infinite;
margin: 0;
padding: 0;
color: white;
height: 100vh;
display: flex;
justify-content: center;
align-items: center;
text-align: center;
overflow-y: auto;
}
@keyframes gradientBG {
0% { background-position: 0% 50%; }
50% { background-position: 100% 50%; }
100% { background-position: 0% 50%; }
}
/* Main container styles */
.gradio-container {
background: rgba(0, 0, 0, 0.5);
border-radius: 20px;
padding: 30px;
width: 100%;
max-width: 800px;
box-shadow: 0 8px 15px rgba(0, 0, 0, 0.5);
overflow-y: auto;
animation: fadeIn 1s ease-out;
}
/* Fade in effect */
@keyframes fadeIn {
from {
opacity: 0;
}
to {
opacity: 1;
}
}
/* Header styling */
.gradio-header {
font-size: 2.5rem;
font-weight: bold;
color: #FFEB3B;
}
/* Description styling */
.gradio-description {
font-size: 1.2rem;
color: #ffffff;
margin-top: 10px;
font-style: italic;
max-width: 700px;
margin-left: auto;
margin-right: auto;
}
/* Button styling with hover effect */
.gr-button {
background: linear-gradient(90deg, #4CAF50, #FFC107);
color: white;
padding: 1rem 2rem;
font-size: 1.2rem;
border-radius: 12px;
border: none;
cursor: pointer;
transition: transform 0.3s ease, background 0.3s ease, box-shadow 0.3s ease;
box-shadow: 0 5px 10px rgba(0, 0, 0, 0.2);
margin-top: 20px;
width: 200px; /* Fixed width for consistency */
}
.gr-button:hover {
background: linear-gradient(90deg, #FFC107, #4CAF50);
transform: scale(1.05);
box-shadow: 0 8px 20px rgba(0, 0, 0, 0.3);
}
.gr-button:active {
background: linear-gradient(90deg, #4CAF50, #FFC107);
transform: scale(1.1);
box-shadow: 0 10px 25px rgba(0, 0, 0, 0.4);
}
/* Image container styling */
.gr-image-container {
margin-top: 20px;
border-radius: 15px;
box-shadow: 0 5px 10px rgba(0, 0, 0, 0.3);
}
/* Textbox styling */
.gr-textbox {
background-color: #333;
color: #FFEB3B;
border: none;
padding: 10px;
border-radius: 10px;
font-size: 1rem;
width: 100%;
text-align: left;
}
.gr-textbox:focus {
outline: none;
border: 2px solid #FF6F61;
}
/* Button text formatting */
.gradio-button {
text-transform: uppercase;
}
/* Mobile responsiveness */
@media screen and (max-width: 768px) {
.gradio-container {
padding: 15px;
width: 90%;
}
.gradio-header {
font-size: 2rem;
}
.gr-button {
width: 100%;
}
}
/* Desktop and larger screen adjustments */
@media screen and (min-width: 1024px) {
.gr-button {
width: 250px;
}
}
"""
)
# Launch the app
interface.launch()