Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save chrismatthieu/1dae808bd71b02e0ffe6f8340b292d43 to your computer and use it in GitHub Desktop.
Save chrismatthieu/1dae808bd71b02e0ffe6f8340b292d43 to your computer and use it in GitHub Desktop.
followme.py
#!/usr/bin/env python3
import pyrealsense2 as rs
import numpy as np
import cv2
import torch
from ultralytics import YOLO
import rclpy
from rclpy.node import Node
from geometry_msgs.msg import Twist
import math
class PersonFollower(Node):
def __init__(self):
super().__init__('person_follower')
# Initialize RealSense pipeline
self.pipeline = rs.pipeline()
config = rs.config()
config.enable_stream(rs.stream.color, 640, 480, rs.format.bgr8, 30)
config.enable_stream(rs.stream.depth, 640, 480, rs.format.z16, 30)
self.pipeline.start(config)
# Initialize YOLO model
self.model = YOLO('yolov8n.pt')
# Initialize ROS2 publisher for robot control
self.cmd_vel_publisher = self.create_publisher(
Twist,
'/robot532f2c9fdf664d13b62dc872703e5448/cmd_vel',
10
)
# Create timer for main control loop
self.timer = self.create_timer(0.1, self.control_loop)
# Parameters
self.min_distance = 1.0 # meters
self.max_velocity = 0.2 # 20% of max velocity
self.get_logger().info('Person follower node initialized')
def control_loop(self):
# Wait for a coherent pair of frames
frames = self.pipeline.wait_for_frames()
color_frame = frames.get_color_frame()
depth_frame = frames.get_depth_frame()
if not color_frame or not depth_frame:
return
# Convert images to numpy arrays
color_image = np.asanyarray(color_frame.get_data())
depth_image = np.asanyarray(depth_frame.get_data())
# Run YOLO detection
results = self.model(color_image)
# Initialize twist message
twist = Twist()
# Process detections
for result in results:
boxes = result.boxes
for box in boxes:
# Check if the detected object is a person (class 0 in COCO dataset)
if box.cls == 0: # person class
# Get bounding box coordinates
x1, y1, x2, y2 = box.xyxy[0].cpu().numpy()
x1, y1, x2, y2 = int(x1), int(y1), int(x2), int(y2)
# Calculate center of the bounding box
center_x = (x1 + x2) // 2
center_y = (y1 + y2) // 2
# Get depth at the center of the person
depth = depth_frame.get_distance(center_x, center_y)
# Draw bounding box and distance
cv2.rectangle(color_image, (x1, y1), (x2, y2), (0, 255, 0), 2)
cv2.putText(color_image, f'Distance: {depth:.2f}m',
(x1, y1 - 10), cv2.FONT_HERSHEY_SIMPLEX,
0.5, (0, 255, 0), 2)
# Control robot based on distance
if depth > self.min_distance:
# Move forward
twist.linear.x = self.max_velocity
# Add some angular velocity to center the person
image_center = color_image.shape[1] // 2
angular_velocity = (center_x - image_center) / image_center
twist.angular.z = -angular_velocity * 0.5
else:
# Stop if too close
twist.linear.x = 0.0
twist.angular.z = 0.0
break # Only follow the first person detected
# Publish twist message
self.cmd_vel_publisher.publish(twist)
# Display the image
cv2.imshow('Person Following', color_image)
cv2.waitKey(1)
def __del__(self):
self.pipeline.stop()
cv2.destroyAllWindows()
def main(args=None):
rclpy.init(args=args)
follower = PersonFollower()
rclpy.spin(follower)
follower.destroy_node()
rclpy.shutdown()
if __name__ == '__main__':
main()
@chrismatthieu
Copy link
Author

Here's the prompt:

I would like to create a follow the person python script for this robot. it's running ros2 jazzy and has an intel realsense d435 stereo depth camera. please use the python pyrealsense2-beta pip for interacting with the camera. use yolo v8 for object detection. use the follow ros2 topic for sending ros2 twist messages: /robot532f2c9fdf664d13b62dc872703e5448/cmd_vel

please have the robot perform object detection looking for a human. if a human is detected , move the robot forward at 20% velocity and continue moving forward as long as the robot is more than 1 meter away from the human. stop the robot if the robot is less than 1 meter away.

Please draw a rectangle around the human detected and include their distance from the robot in the video.

--
make the robot go faster the further away the human is from the robot

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment