mennamostafa55555 commited on
Commit
c75d65a
·
1 Parent(s): 2f8eb6f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -12
app.py CHANGED
@@ -2,7 +2,7 @@ import supervision as sv
2
  import gradio as gr
3
  from ultralytics import YOLO
4
  import sahi
5
-
6
 
7
 
8
  # Images
@@ -25,23 +25,19 @@ sahi.utils.file.download_from_url(
25
  annotatorbbox = sv.BoxAnnotator()
26
  annotatormask=sv.MaskAnnotator()
27
 
28
-
29
  def yolov8_inference(
30
  image: gr.inputs.Image = None,
31
- model_name: gr.inputs.Dropdown = None,
32
- image_size: gr.inputs.Slider = 360,
33
- conf_threshold: gr.inputs.Slider = 0.25,
34
  iou_threshold: gr.inputs.Slider = 0.45,
35
  ):
36
 
37
-
38
- model = YOLO("https://huggingface.co/spaces/devisionx/Final_demo/blob/main/best_weights.pt")
39
-
40
- results = model(image,conf=conf_threshold,iou=iou_threshold ,imgsz=360)[0]
41
  detections = sv.Detections.from_yolov8(results)
42
- annotated_image = annotatorbbox.annotate(scene=image, detections=detections)
43
- annotated_image = annotatormask.annotate(scene=annotated_image, detections=detections)
44
-
45
 
46
 
47
 
 
2
  import gradio as gr
3
  from ultralytics import YOLO
4
  import sahi
5
+ import numpy as np
6
 
7
 
8
  # Images
 
25
  annotatorbbox = sv.BoxAnnotator()
26
  annotatormask=sv.MaskAnnotator()
27
 
 
28
  def yolov8_inference(
29
  image: gr.inputs.Image = None,
30
+ conf_threshold: gr.inputs.Slider = 0.5,
 
 
31
  iou_threshold: gr.inputs.Slider = 0.45,
32
  ):
33
 
34
+ image=image[:, :, ::-1].astype(np.uint8)
35
+ model = YOLO("/content/best_weights.pt")
36
+ results = model(image,imgsz=360)[0]
37
+ image=image[:, :, ::-1].astype(np.uint8)
38
  detections = sv.Detections.from_yolov8(results)
39
+ annotated_image = annotatormask.annotate(scene=image, detections=detections)
40
+ annotated_image = annotatorbbox.annotate(scene=annotated_image , detections=detections)
 
41
 
42
 
43