SivaResearch commited on
Commit
0bed063
·
1 Parent(s): 06de5c0

Added file for initial run

Browse files
Files changed (5) hide show
  1. app.py +104 -0
  2. requirements.txt +2 -0
  3. yolov8n-cls.pt +3 -0
  4. yolov8n-seg.pt +3 -0
  5. yolov8n.pt +3 -0
app.py ADDED
@@ -0,0 +1,104 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from ultralyticsplus import YOLO, render_result, postprocess_classify_output
3
+
4
+
5
+ def classification(image, threshold):
6
+ model = YOLO('yolov8n-cls.pt')
7
+ model.overrides['conf'] = threshold
8
+
9
+ # result = model('bus.jpg')
10
+ result = model.predict(image)
11
+ render = postprocess_classify_output(model=model, result=result[0])
12
+ return render
13
+
14
+
15
+ def detection(image, threshold):
16
+
17
+ model = YOLO('yolov8n.pt')
18
+ model.overrides['conf'] = threshold
19
+ results = model.predict(image)
20
+ render = render_result(model=model, image=image, result=results[0])
21
+ return render
22
+
23
+
24
+ def segmentation(image, threshold):
25
+
26
+ model = YOLO('yolov8n-seg.pt')
27
+ model.overrides['conf'] = threshold
28
+ results = model.predict(image)
29
+ render = render_result(model=model, image=image, result=results[0])
30
+ return render
31
+
32
+
33
+ with gr.Blocks() as demo:
34
+
35
+ with gr.Tab("Detection"):
36
+ with gr.Row():
37
+ with gr.Column():
38
+ detect_input = gr.Image()
39
+ detect_threshold = gr.Slider(
40
+ maximum=1,
41
+ step=0.01,
42
+ value=0.25,
43
+ label="Threshold:",
44
+ interactive=True)
45
+ detect_button = gr.Button("Detect!")
46
+ with gr.Column():
47
+ detect_output = gr.Image(
48
+ label="Predictions:", interactive=False)
49
+
50
+ with gr.Tab("Segmentation"):
51
+ with gr.Row():
52
+ with gr.Column():
53
+ segment_input = gr.Image()
54
+ segment_threshold = gr.Slider(
55
+ maximum=1,
56
+ step=0.01,
57
+ value=0.25,
58
+ label="Threshold:",
59
+ interactive=True)
60
+ segment_button = gr.Button("Segment!")
61
+ with gr.Column():
62
+ segment_output = gr.Image(
63
+ label="Predictions:", interactive=False)
64
+ with gr.Tab("Classification"):
65
+ with gr.Row():
66
+ with gr.Column():
67
+ classify_input = gr.Image()
68
+ classify_threshold = gr.Slider(
69
+ maximum=1,
70
+ step=0.01,
71
+ value=0.25,
72
+ label="Threshold:",
73
+ interactive=True)
74
+ classify_button = gr.Button("Classify!")
75
+ with gr.Column():
76
+ classify_output = gr.Label(
77
+ label="Predictions:", show_label=True, num_top_classes=5)
78
+
79
+ detect_button.click(
80
+ detection,
81
+ inputs=[
82
+ detect_input,
83
+ detect_threshold],
84
+ outputs=detect_output,
85
+ api_name="Detect")
86
+
87
+ segment_button.click(
88
+ segmentation,
89
+ inputs=[
90
+ segment_input,
91
+ segment_threshold],
92
+ outputs=segment_output,
93
+ api_name="Segmentation")
94
+
95
+ classify_button.click(
96
+ classification,
97
+ inputs=[
98
+ classify_input,
99
+ classify_threshold],
100
+ outputs=classify_output,
101
+ api_name="classify")
102
+
103
+
104
+ demo.launch(debug=True, enable_queue=True)
requirements.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ ultralyticsplus==0.0.28
2
+ gradio==3.23.0
yolov8n-cls.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5079cd980628313a2e62cd09d358e5c8debf0d8f75b6e8be7973d94e3a5da9f
3
+ size 5533216
yolov8n-seg.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d39e867b2c3a5dbc1aa764411544b475cb14727bf6af1ec46c238f8bb1351ab9
3
+ size 7054355
yolov8n.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:31e20dde3def09e2cf938c7be6fe23d9150bbbe503982af13345706515f2ef95
3
+ size 6534387