Spaces:
Running
Running
import gradio as gr | |
import cv2 | |
from ultralytics import YOLO, solutions | |
# Initialize the YOLO model | |
model = YOLO("yolov8s.pt") | |
def process_video(video_path, analytics_type): | |
cap = cv2.VideoCapture(video_path) | |
assert cap.isOpened(), "Error reading video file" | |
# Get video properties | |
w, h, fps = (int(cap.get(x)) for x in (cv2.CAP_PROP_FRAME_WIDTH, cv2.CAP_PROP_FRAME_HEIGHT, cv2.CAP_PROP_FPS)) | |
output_filename = f"{analytics_type}_output.avi" | |
out = cv2.VideoWriter(output_filename, cv2.VideoWriter_fourcc(*"MJPG"), fps, (w, h)) | |
# Set up analytics based on the selected type | |
analytics = solutions.Analytics( | |
type=analytics_type, | |
writer=out, | |
im0_shape=(w, h), | |
view_img=False | |
) | |
clswise_count = {} | |
frame_count = 0 | |
while cap.isOpened(): | |
success, frame = cap.read() | |
if success: | |
frame_count += 1 | |
results = model.track(frame, persist=True, verbose=False) | |
if results[0].boxes.id is not None: | |
boxes = results[0].boxes.xyxy.cpu() | |
clss = results[0].boxes.cls.cpu().tolist() | |
for box, cls in zip(boxes, clss): | |
if model.names[int(cls)] in clswise_count: | |
clswise_count[model.names[int(cls)]] += 1 | |
else: | |
clswise_count[model.names[int(cls)]] = 1 | |
# Update analytics based on type | |
if analytics_type == "line": | |
total_counts = sum(clswise_count.values()) | |
analytics.update_line(frame_count, total_counts) | |
elif analytics_type == "multiple_line": | |
analytics.update_multiple_lines(clswise_count, list(clswise_count.keys()), frame_count) | |
elif analytics_type == "pie": | |
analytics.update_pie(clswise_count) | |
elif analytics_type == "area": | |
analytics.update_area(frame_count, clswise_count) | |
clswise_count = {} # Reset for next frame | |
else: | |
break | |
cap.release() | |
out.release() | |
return output_filename # Return the output video file | |
def gradio_app(video, analytics_type): | |
# Save uploaded video locally | |
video_path = video.name | |
output_video = process_video(video_path, analytics_type) | |
# Return processed video for display | |
return output_video | |
# Gradio interface | |
with gr.Blocks() as demo: | |
gr.Markdown("# YOLO Video Processing App") | |
with gr.Row(): | |
video_input = gr.Video(label="Upload Video", type="file") | |
analytics_dropdown = gr.Dropdown( | |
["line", "multiple_line", "pie", "area"], | |
label="Select Analytics Type", | |
value="line" | |
) | |
output_video = gr.Video(label="Processed Output") | |
# Button to start processing | |
submit_btn = gr.Button("Process Video") | |
# Define the output when the button is clicked | |
submit_btn.click(gradio_app, inputs=[video_input, analytics_dropdown], outputs=output_video) | |
# Launch the Gradio app | |
demo.launch() |