Spaces:
Runtime error
Runtime error
import torch | |
from transformers import pipeline | |
from PIL import Image | |
import matplotlib.pyplot as plt | |
import matplotlib.patches as patches | |
import gradio as gr | |
from random import choice | |
import io | |
model = pipeline(model="jaimin/ObjectDetect") | |
COLORS = ["#ff7f7f", "#ff7fbf", "#ff7fff", "#bf7fff", | |
"#7f7fff", "#7fbfff", "#7fffff", "#7fffbf", | |
"#7fff7f", "#bfff7f", "#ffff7f", "#ffbf7f"] | |
fdic = { | |
"family" : "Impact", | |
"style" : "italic", | |
"size" : 15, | |
"color" : "yellow", | |
"weight" : "bold" | |
} | |
def get_figure(in_pil_img, in_results): | |
plt.figure(figsize=(16, 10)) | |
plt.imshow(in_pil_img) | |
#pyplot.gcf() | |
ax = plt.gca() | |
for prediction in in_results: | |
selected_color = choice(COLORS) | |
x, y = prediction['box']['xmin'], prediction['box']['ymin'], | |
w, h = prediction['box']['xmax'] - prediction['box']['xmin'], prediction['box']['ymax'] - prediction['box']['ymin'] | |
ax.add_patch(plt.Rectangle((x, y), w, h, fill=False, color=selected_color, linewidth=3)) | |
ax.text(x, y, f"{prediction['label']}: {round(prediction['score']*100, 1)}%", fontdict=fdic) | |
plt.axis("off") | |
return plt.gcf() | |
def infer(model, in_pil_img): | |
results = model(in_pil_img) | |
figure = get_figure(in_pil_img, results) | |
buf = io.BytesIO() | |
figure.savefig(buf, bbox_inches='tight') | |
buf.seek(0) | |
output_pil_img = Image.open(buf) | |
return output_pil_img | |
input = gr.inputs.Image(label="Upload your Image", type = 'pil', optional=True) | |
output = gr.outputs.Textbox(label="Captions") | |
interface = gr.Interface( | |
fn=predict, | |
inputs = input, | |
outputs=output, | |
) | |
interface.launch(debug=True) | |