hb-setosys commited on
Commit
d211767
·
verified ·
1 Parent(s): a210028

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -25
app.py CHANGED
@@ -1,7 +1,6 @@
1
  import cv2
2
  import numpy as np
3
  import torch
4
- import gradio as gr
5
  from ultralytics import YOLO
6
  from sort import Sort
7
 
@@ -15,17 +14,23 @@ TRUCK_CLASS_ID = 7 # "truck"
15
  # Initialize SORT tracker
16
  tracker = Sort()
17
 
 
 
 
 
 
 
18
  def count_unique_trucks(video_path):
19
  cap = cv2.VideoCapture(video_path)
20
  if not cap.isOpened():
21
  return "Error: Unable to open video file."
22
 
23
  unique_truck_ids = set()
24
- frame_skip = 5 # Process every 5th frame for efficiency
25
- truck_tracker_history = {} # To store truck ID with frame count and position
26
- min_distance_threshold = 50 # Minimum distance between truck positions to be counted as different
27
 
 
28
  frame_count = 0
 
29
  while True:
30
  ret, frame = cap.read()
31
  if not ret:
@@ -45,7 +50,7 @@ def count_unique_trucks(video_path):
45
  confidence = float(box.conf.item()) # Get confidence score
46
 
47
  # Track only trucks
48
- if class_id == TRUCK_CLASS_ID and confidence > 0.5:
49
  x1, y1, x2, y2 = map(int, box.xyxy[0]) # Get bounding box
50
  detections.append([x1, y1, x2, y2, confidence])
51
 
@@ -57,30 +62,24 @@ def count_unique_trucks(video_path):
57
  truck_id = int(obj[4]) # Unique ID assigned by SORT
58
  x1, y1, x2, y2 = obj[:4] # Get the bounding box coordinates
59
 
60
- # Compute the center of the truck for better tracking
61
- truck_center = ((x1 + x2) / 2, (y1 + y2) / 2)
62
 
63
- # Only add truck ID if it's sufficiently far from previously tracked trucks
64
- if truck_id not in truck_tracker_history:
65
- truck_tracker_history[truck_id] = {
66
- "frame_count": frame_count,
67
- "position": truck_center
68
- }
69
- unique_truck_ids.add(truck_id) # Add the truck as a unique truck
70
- else:
71
- last_truck = truck_tracker_history[truck_id]
72
- last_position = last_truck["position"]
73
-
74
- # Calculate the distance between the current and last positions
75
  distance = np.linalg.norm(np.array(truck_center) - np.array(last_position))
76
 
77
- # If the distance between truck positions is greater than the threshold, it's a new truck detection
78
- if distance > min_distance_threshold:
79
  unique_truck_ids.add(truck_id)
80
- truck_tracker_history[truck_id] = {
81
- "frame_count": frame_count,
82
- "position": truck_center
83
- }
 
 
 
 
84
 
85
  cap.release()
86
 
@@ -92,6 +91,7 @@ def analyze_video(video_file):
92
  return "\n".join([f"{key}: {value}" for key, value in result.items()])
93
 
94
  # Define Gradio interface
 
95
  iface = gr.Interface(
96
  fn=analyze_video,
97
  inputs=gr.Video(label="Upload Video"),
 
1
  import cv2
2
  import numpy as np
3
  import torch
 
4
  from ultralytics import YOLO
5
  from sort import Sort
6
 
 
14
  # Initialize SORT tracker
15
  tracker = Sort()
16
 
17
+ # Minimum confidence threshold for detection
18
+ CONFIDENCE_THRESHOLD = 0.5
19
+
20
+ # Distance threshold to avoid duplicate counts
21
+ DISTANCE_THRESHOLD = 50
22
+
23
  def count_unique_trucks(video_path):
24
  cap = cv2.VideoCapture(video_path)
25
  if not cap.isOpened():
26
  return "Error: Unable to open video file."
27
 
28
  unique_truck_ids = set()
29
+ truck_history = {}
 
 
30
 
31
+ frame_skip = 5 # Process every 5th frame for efficiency
32
  frame_count = 0
33
+
34
  while True:
35
  ret, frame = cap.read()
36
  if not ret:
 
50
  confidence = float(box.conf.item()) # Get confidence score
51
 
52
  # Track only trucks
53
+ if class_id == TRUCK_CLASS_ID and confidence > CONFIDENCE_THRESHOLD:
54
  x1, y1, x2, y2 = map(int, box.xyxy[0]) # Get bounding box
55
  detections.append([x1, y1, x2, y2, confidence])
56
 
 
62
  truck_id = int(obj[4]) # Unique ID assigned by SORT
63
  x1, y1, x2, y2 = obj[:4] # Get the bounding box coordinates
64
 
65
+ truck_center = (x1 + x2) / 2, (y1 + y2) / 2 # Calculate the center of the truck
 
66
 
67
+ # If truck is already in history, check the movement distance
68
+ if truck_id in truck_history:
69
+ last_position = truck_history[truck_id]["position"]
 
 
 
 
 
 
 
 
 
70
  distance = np.linalg.norm(np.array(truck_center) - np.array(last_position))
71
 
72
+ if distance > DISTANCE_THRESHOLD:
73
+ # If the truck moved significantly, count as new
74
  unique_truck_ids.add(truck_id)
75
+
76
+ else:
77
+ # If truck is not in history, add it
78
+ truck_history[truck_id] = {
79
+ "frame_count": frame_count,
80
+ "position": truck_center
81
+ }
82
+ unique_truck_ids.add(truck_id)
83
 
84
  cap.release()
85
 
 
91
  return "\n".join([f"{key}: {value}" for key, value in result.items()])
92
 
93
  # Define Gradio interface
94
+ import gradio as gr
95
  iface = gr.Interface(
96
  fn=analyze_video,
97
  inputs=gr.Video(label="Upload Video"),