YOLOv8-Plots / app.py
prithivMLmods's picture
Update app.py
063ca5e verified
raw
history blame contribute delete
No virus
3.43 kB
import gradio as gr
import cv2
from ultralytics import YOLO
# Initialize the YOLO model
model = YOLO("yolov8s.pt")
def process_video(video_path, analytics_type):
cap = cv2.VideoCapture(video_path)
assert cap.isOpened(), "Error reading video file"
# Get video properties
w, h, fps = (int(cap.get(x)) for x in (cv2.CAP_PROP_FRAME_WIDTH, cv2.CAP_PROP_FRAME_HEIGHT, cv2.CAP_PROP_FPS))
output_filename = f"{analytics_type}_output.avi"
out = cv2.VideoWriter(output_filename, cv2.VideoWriter_fourcc(*"MJPG"), fps, (w, h))
clswise_count = {}
frame_count = 0
while cap.isOpened():
success, frame = cap.read()
if success:
frame_count += 1
results = model.track(frame, persist=True, verbose=False)
if results[0].boxes.id is not None:
boxes = results[0].boxes.xyxy.cpu()
clss = results[0].boxes.cls.cpu().tolist()
for box, cls in zip(boxes, clss):
if model.names[int(cls)] in clswise_count:
clswise_count[model.names[int(cls)]] += 1
else:
clswise_count[model.names[int(cls)]] = 1
# Perform simple analytics based on type
if analytics_type == "line":
# Display the number of detections on each frame (for example)
cv2.putText(frame, f"Frame {frame_count}: Detections - {sum(clswise_count.values())}",
(10, 30), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2, cv2.LINE_AA)
elif analytics_type == "multiple_line":
# Display classwise counts
for i, (cls_name, count) in enumerate(clswise_count.items()):
cv2.putText(frame, f"{cls_name}: {count}",
(10, 30 + (i + 1) * 30), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2, cv2.LINE_AA)
elif analytics_type == "pie":
# Placeholder for pie chart (can implement later with matplotlib and overlay on frame)
pass
elif analytics_type == "area":
# Placeholder for area graph (implement as needed)
pass
out.write(frame)
clswise_count = {} # Reset for next frame
else:
break
cap.release()
out.release()
return output_filename # Return the output video file
def gradio_app(video, analytics_type):
# Save uploaded video locally
video_path = video # Gradio automatically returns the file path
output_video = process_video(video_path, analytics_type)
# Return processed video for display
return output_video
# Gradio interface
with gr.Blocks() as demo:
gr.Markdown("# YOLO Video Processing App")
with gr.Row():
video_input = gr.Video(label="Upload Video")
analytics_dropdown = gr.Dropdown(
["line", "multiple_line", "pie", "area"],
label="Select Analytics Type",
value="line"
)
output_video = gr.Video(label="Processed Output")
# Button to start processing
submit_btn = gr.Button("Process Video")
# Define the output when the button is clicked
submit_btn.click(gradio_app, inputs=[video_input, analytics_dropdown], outputs=output_video)
# Launch the Gradio app with a public link
demo.launch(share=True)