import cv2 from ultralytics import YOLO import numpy as np import os import gradio as gr def fonk(video_path): model=YOLO("best.pt") cap=cv2.VideoCapture(video_path) frame_width = int(cap.get(3)) frame_height = int(cap.get(4)) size = (frame_width, frame_height) output_video = cv2.VideoWriter('filename.mp4', cv2.VideoWriter_fourcc(*'XVID'), 10, size) while True: ret, frame= cap.read() if ret!=True: break results= model(frame) for result in results: box=result.boxes x1, y1, x2, y2 = map(int, box.xyxy[0]) print(x1, y1, x2, y2) frame= cv2.rectangle(frame, (x1, y1), (x2, y2), (0, 255, 0), 2) output_video.write(frame) return output_video demo = gr.Interface(fonk, inputs= gr.Video(), outputs=gr.Video(), examples=["cow-video-cows-mooing-and-grazing-in-a-field.mp4"], title= "cows", cache_examples=True) demo.launch()