Aruco polygon definition and relative ball coordinates implemented. Roslaunch implemented.

This commit is contained in:
Cagdas Aras CIBLAK 2024-03-05 16:51:15 +01:00
parent 7b42d5325c
commit cfc811f636
8 changed files with 209 additions and 179 deletions

View File

@ -0,0 +1,11 @@
<launch>
<!-- ArUco Tag Detection Node -->
<node name="camera_publisher" pkg="ball_tracking" type="camera_publisher.py" output="screen" />
<!-- ArUco Tag Detection Node -->
<node name="aruco_area" pkg="ball_tracking" type="aruco_area.py" output="screen" />
<!-- Ball Tracking Node -->
<node name="tracker_aruco" pkg="ball_tracking" type="tracker_aruco.py" output="screen" />
</launch>

View File

@ -0,0 +1,72 @@
#!/usr/bin/env python3
import rospy
from geometry_msgs.msg import Point
from sensor_msgs.msg import Image
import cv2
import cv2.aruco as aruco
import numpy as np
from cv_bridge import CvBridge
def calculate_marker_centers(corners):
"""Calculate the center of each ArUco marker."""
centers = []
for corner in corners:
cx = np.mean([point[0] for point in corner[0]], axis=0)
cy = np.mean([point[1] for point in corner[0]], axis=0)
centers.append((cx, cy))
return centers
def find_polygon_centroid(centers):
"""Calculate the centroid of the polygon formed by the marker centers."""
centroid_x = np.mean([center[0] for center in centers], axis=0)
centroid_y = np.mean([center[1] for center in centers], axis=0)
return centroid_x, centroid_y
def image_callback(msg):
global pub
bridge = CvBridge()
frame = bridge.imgmsg_to_cv2(msg, desired_encoding='bgr8')
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
corners, ids, rejected = aruco.detectMarkers(gray, arucoDict)
if ids is not None and len(ids) == 4: # Ensure exactly four markers are detected
marker_centers = calculate_marker_centers(corners)
centerX, centerY = find_polygon_centroid(marker_centers)
if centerX is not None and centerY is not None:
origin = Point()
origin.x = centerX
origin.y = centerY
origin.z = 0
pub.publish(origin)
# Optional: Visualize the centroid for debugging
cv2.circle(frame, (int(centerX), int(centerY)), 5, (0, 255, 0), -1)
# Also, draw detected markers for visualization
aruco.drawDetectedMarkers(frame, corners, ids)
# Display the frame with detected markers and the calculated centroid
cv2.imshow('Frame', frame)
cv2.waitKey(1)
def main():
global pub
rospy.init_node('aruco_origin_publisher', anonymous=True)
pub = rospy.Publisher('/aruco_origin', Point, queue_size=10)
# Initialize the ArUco dictionary
global arucoDict
arucoDict = aruco.getPredefinedDictionary(aruco.DICT_4X4_50)
# Subscribe to the camera_image topic
rospy.Subscriber('camera_image', Image, image_callback)
rospy.spin()
cv2.destroyAllWindows()
if __name__ == '__main__':
main()

View File

@ -0,0 +1,39 @@
#!/usr/bin/env python3
import rospy
from sensor_msgs.msg import Image
from cv_bridge import CvBridge
import cv2
def main():
rospy.init_node('camera_publisher', anonymous=True)
# Create a publisher on the 'camera_image' topic, sending Image messages
image_pub = rospy.Publisher('camera_image', Image, queue_size=10)
bridge = CvBridge()
cap = cv2.VideoCapture(0)
if not cap.isOpened():
rospy.logerr("Error: Could not open camera.")
return
rate = rospy.Rate(30) # Adjust the rate as needed
while not rospy.is_shutdown():
ret, frame = cap.read()
if not ret:
rospy.logerr("Error: Can't receive frame (stream end?). Exiting ...")
break
# Convert the OpenCV image to a ROS Image message
image_msg = bridge.cv2_to_imgmsg(frame, "bgr8")
image_pub.publish(image_msg)
rate.sleep()
cap.release()
if __name__ == '__main__':
main()

View File

@ -1,69 +0,0 @@
#!/usr/bin/env python3
import rospy
import cv2
import cv_bridge
from sensor_msgs.msg import Image
from geometry_msgs.msg import Point
class BallTracker:
def __init__(self):
self.bridge = cv_bridge.CvBridge()
self.image_sub = rospy.Subscriber("/camera/image_raw", Image, self.image_callback)
self.coord_pub = rospy.Publisher("/ball/coordinates", Point, queue_size=10)
def image_callback(self, msg):
# Convert the ROS Image message to a CV2 image
try:
cv_image = self.bridge.imgmsg_to_cv2(msg, desired_encoding='bgr8')
except cv_bridge.CvBridgeError as e:
print(e)
# Process the cv_image here to find the ball and its coordinates
# For example, using color detection, contour detection, etc.
ball_center = self.find_ball(cv_image)
# If a ball is detected, publish its coordinates
if ball_center is not None:
point = Point()
point.x = ball_center[0]
point.y = ball_center[1]
point.z = 0 # Assuming the table is a 2D plane
self.coord_pub.publish(point)
def find_ball(self, image):
# Convert the image from BGR to HSV color space
hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
# Define the HSV range for orange color
lower_bound = np.array([0, 100, 100]) # Adjust based on your ball's color in your lighting condition
upper_bound = np.array([22, 255, 255])
# Threshold the HSV image to get only the orange colors
mask = cv2.inRange(hsv, lower_bound, upper_bound)
# Perform morphological operations to remove small noises and fill in the holes
mask = cv2.erode(mask, None, iterations=2)
mask = cv2.dilate(mask, None, iterations=2)
# Find contours in the masked image
contours, _ = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
if contours:
# Find the largest contour in the mask, then use it to compute the minimum enclosing circle
c = max(contours, key=cv2.contourArea)
((x, y), radius) = cv2.minEnclosingCircle(c)
# Only proceed if the radius meets a minimum size, indicating a significant object
if radius > 10: # This threshold might need adjustment
return (int(x), int(y))
return None
if __name__ == '__main__':
rospy.init_node('ball_tracker', anonymous=True)
bt = BallTracker()
try:
rospy.spin()
except KeyboardInterrupt:
print("Shutting down")

View File

@ -1,74 +0,0 @@
#!/usr/bin/env python3
import rospy
import cv2
import numpy as np
from cv_bridge import CvBridge
from sensor_msgs.msg import Image
from geometry_msgs.msg import Point
class BallTracker:
def __init__(self):
# Initialize the node
rospy.init_node('ball_tracker', anonymous=True)
self.ball_coord_pub = rospy.Publisher("/ball/coordinates", Point, queue_size=10)
# OpenCV part for capturing video from the webcam
self.cap = cv2.VideoCapture(0)
if not self.cap.isOpened():
rospy.logerr("Failed to open camera")
exit()
self.bridge = CvBridge()
def find_ball(self, image):
# Convert BGR to HSV
hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
# Define range for orange color and create a mask
lower_orange = np.array([5, 50, 50])
upper_orange = np.array([15, 255, 255])
mask = cv2.inRange(hsv, lower_orange, upper_orange)
# Find contours in the mask
contours, _ = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
if contours:
# Find the largest contour and its center
c = max(contours, key=cv2.contourArea)
((x, y), radius) = cv2.minEnclosingCircle(c)
if radius > 10: # Adjust the threshold as needed
return (int(x), int(y)), radius
return None, None
def run(self):
while not rospy.is_shutdown():
ret, frame = self.cap.read()
if not ret:
rospy.logerr("Failed to capture image")
break
ball_position, radius = self.find_ball(frame)
if ball_position:
cv2.circle(frame, ball_position, int(radius), (0, 255, 0), 2) # Draw the circle around the ball
cv2.putText(frame, f"Coords: {ball_position}", (10, 30), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 255, 0), 2)
point_msg = Point()
point_msg.x = ball_position[0]
point_msg.y = ball_position[1]
point_msg.z = 0 # Assuming the ball is always on the same plane
self.ball_coord_pub.publish(point_msg)
# Display the image with detected ball and coordinates
cv2.imshow('Ball Tracking', frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
self.cap.release()
cv2.destroyAllWindows()
if __name__ == '__main__':
ball_tracker = BallTracker()
ball_tracker.run()

View File

@ -1,35 +0,0 @@
#!/usr/bin/env python3
import rospy
import cv2
from cv_bridge import CvBridge
from sensor_msgs.msg import Image
class CameraPublisher:
def __init__(self):
self.camera = cv2.VideoCapture(0) # Adjust the device index if needed
self.bridge = CvBridge()
self.publisher = rospy.Publisher("/camera/image_raw", Image, queue_size=10)
rospy.init_node('camera_publisher', anonymous=True)
self.rate = rospy.Rate(10) # 10hz
def publish_frames(self):
while not rospy.is_shutdown():
ret, frame = self.camera.read()
if ret:
try:
ros_image = self.bridge.cv2_to_imgmsg(frame, "bgr8")
self.publisher.publish(ros_image)
except cv_bridge.CvBridgeError as e:
print(e)
self.rate.sleep()
if __name__ == '__main__':
camera_publisher = CameraPublisher()
try:
camera_publisher.publish_frames()
except rospy.ROSInterruptException:
pass
finally:
camera_publisher.camera.release()

View File

@ -0,0 +1,86 @@
#!/usr/bin/env python3
import rospy
from sensor_msgs.msg import Image
from geometry_msgs.msg import Point
import cv2
import numpy as np
from cv_bridge import CvBridge
def nothing(x):
pass
def origin_callback(msg):
global origin
origin = (msg.x, msg.y)
def image_callback(msg):
global origin
bridge = CvBridge()
frame = bridge.imgmsg_to_cv2(msg, desired_encoding='bgr8')
# HSV thresholding and ball tracking
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
lh = cv2.getTrackbarPos('Lower-H', 'settings')
ls = cv2.getTrackbarPos('Lower-S', 'settings')
lv = cv2.getTrackbarPos('Lower-V', 'settings')
uh = cv2.getTrackbarPos('Upper-H', 'settings')
us = cv2.getTrackbarPos('Upper-S', 'settings')
uv = cv2.getTrackbarPos('Upper-V', 'settings')
lower_orange = np.array([lh, ls, lv])
upper_orange = np.array([uh, us, uv])
mask = cv2.inRange(hsv, lower_orange, upper_orange)
res = cv2.bitwise_and(frame, frame, mask=mask)
contours, _ = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
center = None
if contours:
c = max(contours, key=cv2.contourArea)
((x, y), radius) = cv2.minEnclosingCircle(c)
if radius > 10:
center = (int(x), int(y))
cv2.circle(frame, center, int(radius), (0, 255, 255), 2)
cv2.circle(frame, center, 5, (0, 0, 255), -1)
# Publish the ball's coordinates adjusted based on the origin
if origin:
adjusted_x = x - origin[0]
adjusted_y = y - origin[1]
ball_point = Point(x=adjusted_x, y=adjusted_y, z=0)
else:
ball_point = Point(x=x, y=y, z=0)
coord_pub.publish(ball_point)
# Display the frames for debugging
cv2.imshow('frame', frame)
# cv2.imshow('mask', mask)
# cv2.imshow('res', res)
cv2.waitKey(1)
origin = None # Global variable to hold the origin
# Initialize the ROS node
rospy.init_node('ball_tracking_node', anonymous=True)
# ROS Subscribers
rospy.Subscriber('/aruco_origin', Point, origin_callback)
rospy.Subscriber('camera_image', Image, image_callback)
# ROS Publisher for the ball coordinates
coord_pub = rospy.Publisher('/ball_coordinates', Point, queue_size=10)
# Trackbars setup for HSV thresholding
cv2.namedWindow('settings')
cv2.createTrackbar('Lower-H', 'settings', 0, 179, nothing)
cv2.createTrackbar('Lower-S', 'settings', 198, 255, nothing)
cv2.createTrackbar('Lower-V', 'settings', 100, 255, nothing)
cv2.createTrackbar('Upper-H', 'settings', 22, 179, nothing)
cv2.createTrackbar('Upper-S', 'settings', 255, 255, nothing)
cv2.createTrackbar('Upper-V', 'settings', 255, 255, nothing)
rospy.spin() # Keep the node running
cv2.destroyAllWindows()

View File

@ -92,4 +92,4 @@ while not rospy.is_shutdown():
# When everything done, release the capture and destroy all windows
cap.release()
cv2.destroyAllWindows()
#Minabebis :3