Spaces:
Runtime error
Runtime error
File size: 1,148 Bytes
2bd9468 9902a40 2bd9468 9902a40 891d8e1 2bd9468 9902a40 3382a71 9902a40 891d8e1 2bd9468 9902a40 2bd9468 c1a4cc8 891d8e1 2bd9468 3ec95cb 2bd9468 3382a71 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 |
import gradio as gr
from transformers import pipeline
from PIL import Image
# ์ด๋ฏธ์ง ์ธ์ ํ์ดํ๋ผ์ธ ๋ก๋
model = pipeline("image-classification", model="google/vit-base-patch16-224")
def classify_image(uploaded_image):
# ์
๋ก๋๋ ์ด๋ฏธ์ง๊ฐ PIL ์ด๋ฏธ์ง ๊ฐ์ฒด๊ฐ ์๋ ๊ฒฝ์ฐ ๋ณํ
if not isinstance(uploaded_image, Image.Image):
# ์
๋ก๋๋ ์ด๋ฏธ์ง๊ฐ PIL ์ด๋ฏธ์ง ๊ฐ์ฒด๊ฐ ์๋๋ฉด, ์ด ๋ถ๋ถ์ ์ฒ๋ฆฌํ๋ ๋ก์ง ์ถ๊ฐ
# ์: uploaded_image = Image.open(io.BytesIO(uploaded_image))
raise ValueError("Uploaded image is not a PIL Image object.")
predictions = model(uploaded_image)
return {prediction['label']: prediction['score'] for prediction in predictions}
# Gradio ์ธํฐํ์ด์ค ์์ฑ
iface = gr.Interface(fn=classify_image,
inputs=gr.Image(),
outputs=gr.Label(num_top_classes=3),
title="์ด๋ฏธ์ง ๋ถ๋ฅ๊ธฐ",
description="์ด๋ฏธ์ง๋ฅผ ์
๋ก๋ํ๋ฉด, ์ฌ๋ฌผ์ ์ธ์ํ๊ณ ์ต์์ 3๊ฐ์ ๋ถ๋ฅ ๊ฒฐ๊ณผ๋ฅผ ์ถ๋ ฅํฉ๋๋ค.")
# ์ธํฐํ์ด์ค ์คํ
iface.launch()
|