|
import logging |
|
import multiprocessing |
|
import threading |
|
import time |
|
|
|
import cv2 |
|
|
|
import imgcomparison |
|
from detector import InfiniteCounter |
|
|
|
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') |
|
|
|
|
|
class VideoGet: |
|
def __init__(self, src, segments=multiprocessing.cpu_count()): |
|
self.device = src |
|
self.results = [] |
|
self.results_lock = threading.Lock() |
|
self.total_frames = int(cv2.VideoCapture(src).get(cv2.CAP_PROP_FRAME_COUNT)) |
|
self.segment_length = self.total_frames // segments |
|
self.segments = self._split_into_segments() |
|
self.threads = [] |
|
self.comparator = imgcomparison.AbsDiffHistComparator(0.99) |
|
logging.info(f"VideoGet initialized with {segments} segments") |
|
|
|
def _split_into_segments(self): |
|
segments = [] |
|
for start_frame in range(0, self.total_frames, self.segment_length): |
|
end_frame = min(start_frame + self.segment_length, self.total_frames) |
|
segments.append((start_frame, end_frame)) |
|
logging.info(f"Video split into {len(segments)} segments") |
|
return segments |
|
|
|
def start(self): |
|
logging.info("Starting video processing") |
|
for segment in self.segments: |
|
thread = threading.Thread(target=self.process_segment, args=(segment,)) |
|
thread.start() |
|
self.threads.append(thread) |
|
|
|
def check_transition(self, local_stream): |
|
_, prev_frame = local_stream.read() |
|
if prev_frame is None: |
|
logging.warning(f"{threading.current_thread().name} | Initial frame is None") |
|
return |
|
yield 0, prev_frame |
|
|
|
frame_counter = InfiniteCounter() |
|
for frame_count in frame_counter.count(): |
|
|
|
_, frame = local_stream.read() |
|
|
|
if frame is None: |
|
logging.info(f"{threading.current_thread().name} | End of segment reached") |
|
break |
|
elif not self.comparator.are_same(prev_frame, frame): |
|
logging.info(f"{threading.current_thread().name} | Transition detected at frame {frame_count}") |
|
|
|
while True: |
|
if self.comparator.are_same(prev_frame, frame): |
|
break |
|
prev_frame = frame |
|
_, frame = local_stream.read() |
|
frame_counter.increment() |
|
yield frame_count, frame |
|
|
|
prev_frame = frame |
|
|
|
yield frame_count, None |
|
|
|
def process_segment(self, segment): |
|
start_frame, end_frame = segment |
|
logging.info(f"{threading.current_thread().name} | Processing segment: Start frame {start_frame}, End frame {end_frame}") |
|
local_stream = cv2.VideoCapture(self.device) |
|
|
|
local_stream.set(cv2.CAP_PROP_POS_FRAMES, start_frame) |
|
|
|
qualifying_frames = [] |
|
last_transition_frame = start_frame |
|
|
|
for transition_frame, frame in self.check_transition(local_stream): |
|
if transition_frame is not None and last_transition_frame < end_frame: |
|
while last_transition_frame <= transition_frame and last_transition_frame < end_frame: |
|
grabbed, current_frame = local_stream.read() |
|
if not grabbed: |
|
break |
|
qualifying_frames.append(current_frame) |
|
last_transition_frame += 1 |
|
if transition_frame is None or transition_frame >= end_frame: |
|
break |
|
|
|
while last_transition_frame < end_frame: |
|
grabbed, frame = local_stream.read() |
|
if not grabbed: |
|
break |
|
qualifying_frames.append(frame) |
|
last_transition_frame += 1 |
|
|
|
local_stream.release() |
|
logging.info(f"{threading.current_thread().name} | Segment processed. Start frame: {start_frame}, End frame: {end_frame}") |
|
|
|
with self.results_lock: |
|
self.results.append(qualifying_frames) |
|
|
|
def stop(self): |
|
for thread in self.threads: |
|
thread.join() |
|
logging.info("Thread joined") |
|
logging.info("Stopping video processing") |
|
|
|
|
|
if __name__ == '__main__': |
|
start_time = time.time() |
|
video_get = VideoGet() |
|
video_get.start() |
|
video_get.stop() |
|
end_time = time.time() |
|
total_time = end_time - start_time |
|
logging.info(f"Total video processing time: {total_time} seconds") |
|
|