latest pushes

This commit is contained in:
Ronaldson Bellande 2024-07-23 01:28:20 -04:00
parent 819c217a97
commit 59e333c038
62 changed files with 1534 additions and 415 deletions

View File

@ -0,0 +1,77 @@
# Copyright (C) 2024 Bellande Robotics Sensors Research Innovation Center, Ronaldson Bellande
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
cmake_minimum_required(VERSION 3.0.2)
project(humanoid_robot_intelligence_control_system_object_detector)
if($ENV{ROS_VERSION} EQUAL 1)
find_package(catkin REQUIRED COMPONENTS
rospy
std_msgs
sensor_msgs
geometry_msgs
cv_bridge
)
catkin_package(
CATKIN_DEPENDS
rospy
std_msgs
sensor_msgs
geometry_msgs
cv_bridge
)
else()
find_package(ament_cmake REQUIRED)
find_package(ament_cmake_python REQUIRED)
find_package(rclpy REQUIRED)
find_package(std_msgs REQUIRED)
find_package(sensor_msgs REQUIRED)
find_package(geometry_msgs REQUIRED)
find_package(cv_bridge REQUIRED)
endif()
if($ENV{ROS_VERSION} EQUAL 1)
catkin_python_setup()
catkin_install_python(PROGRAMS
src/face_tracker.py
src/face_follower.py
src/object_tracker.py
src/object_follower.py
DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
)
install(DIRECTORY config launch rviz
DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
)
else()
ament_python_install_package(${PROJECT_NAME})
install(PROGRAMS
src/face_tracker.py
src/face_follower.py
src/object_tracker.py
src/object_follower.py
DESTINATION lib/${PROJECT_NAME}
)
install(DIRECTORY config launch rviz
DESTINATION share/${PROJECT_NAME}
)
ament_package()
endif()

View File

@ -0,0 +1,114 @@
# Copyright (C) 2024 Bellande Robotics Sensors Research Innovation Center, Ronaldson Bellande
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#!/usr/bin/env python3
#!/usr/bin/env python3
import os
import sys
import subprocess
from launch import LaunchDescription
from launch_ros.actions import Node
def ros1_launch_description():
# Get command-line arguments
args = sys.argv[1:]
# Construct the ROS 1 launch command
roslaunch_command = ["roslaunch", "humanoid_robot_intelligence_control_system_object_detector", "object_follow.launch"] + args
roslaunch_command.extend([
"usb_cam", "usb_cam_node", "name:=camera",
"video_device:=/dev/video0",
"image_width:=640",
"image_height:=480",
"pixel_format:=yuyv",
"camera_frame_id:=usb_cam",
"io_method:=mmap"
])
roslaunch_command.extend([
"humanoid_robot_intelligence_control_system_object_detector", "object_detection_processor.py", "name:=object_detection_processor_node"
])
roslaunch_command.extend([
"humanoid_robot_intelligence_control_system_object_detector", "object_follower.py", "name:=object_follower_node"
])
roslaunch_command.extend([
"rviz", "rviz", "name:=rviz",
"args:=-d $(find humanoid_robot_intelligence_control_system_object_detector)/rviz/object_follow_visualization.rviz"
])
# Execute the launch command
subprocess.call(roslaunch_command)
def ros2_launch_description():
nodes_to_launch = []
nodes_to_launch.append(Node(
package='usb_cam',
executable='usb_cam_node',
name='camera',
output='screen',
parameters=[{
'video_device': '/dev/video0',
'image_width': 640,
'image_height': 480,
'pixel_format': 'yuyv',
'camera_frame_id': 'usb_cam',
'io_method': 'mmap'
}]
))
nodes_to_launch.append(Node(
package='humanoid_robot_intelligence_control_system_object_detector',
executable='object_detection_processor.py',
name='object_detection_processor_node',
output='screen',
parameters=[{'yaml_path': '$(find humanoid_robot_intelligence_control_system_object_detector)/config/object_detection_params.yaml'}]
))
nodes_to_launch.append(Node(
package='humanoid_robot_intelligence_control_system_object_detector',
executable='object_follower.py',
name='object_follower_node',
output='screen',
parameters=[{
'p_gain': 0.4,
'i_gain': 0.0,
'd_gain': 0.0
}]
))
nodes_to_launch.append(Node(
package='rviz2',
executable='rviz2',
name='rviz',
arguments=['-d', '$(find humanoid_robot_intelligence_control_system_object_detector)/rviz/object_follow_visualization.rviz']
))
return LaunchDescription(nodes_to_launch)
if __name__ == "__main__":
ros_version = os.getenv("ROS_VERSION")
if ros_version == "1":
ros1_launch_description()
elif ros_version == "2":
ros2_launch_description()
else:
print("Unsupported ROS version. Please set the ROS_VERSION environment variable to '1' for ROS 1 or '2' for ROS 2.")
sys.exit(1)

View File

@ -0,0 +1,112 @@
# Copyright (C) 2024 Bellande Robotics Sensors Research Innovation Center, Ronaldson Bellande
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#!/usr/bin/env python3
import os
import sys
import subprocess
from launch import LaunchDescription
from launch_ros.actions import Node
def ros1_launch_description():
# Get command-line arguments
args = sys.argv[1:]
# Construct the ROS 1 launch command
roslaunch_command = ["roslaunch", "humanoid_robot_intelligence_control_system_configure", "object_tracker.launch"] + args
roslaunch_command.extend([
"usb_cam", "usb_cam_node", "name:=camera",
"video_device:=/dev/video0",
"image_width:=640",
"image_height:=480",
"pixel_format:=yuyv",
"camera_frame_id:=usb_cam",
"io_method:=mmap"
])
roslaunch_command.extend([
"humanoid_robot_intelligence_control_system_object_detector", "object_detection_processor.py", "name:=object_detection_processor_node"
])
roslaunch_command.extend([
"humanoid_robot_intelligence_control_system_configure", "object_tracker.py", "name:=object_tracker_node"
])
roslaunch_command.extend([
"rviz", "rviz", "name:=rviz",
"args:=-d $(find humanoid_robot_intelligence_control_system_object_detector)/rviz/object_tracking_visualization.rviz"
])
# Execute the launch command
subprocess.call(roslaunch_command)
def ros2_launch_description():
nodes_to_launch = []
nodes_to_launch.append(Node(
package='usb_cam',
executable='usb_cam_node',
name='camera',
output='screen',
parameters=[{
'video_device': '/dev/video0',
'image_width': 640,
'image_height': 480,
'pixel_format': 'yuyv',
'camera_frame_id': 'usb_cam',
'io_method': 'mmap'
}]
))
nodes_to_launch.append(Node(
package='humanoid_robot_intelligence_control_system_object_detector',
executable='object_detection_processor.py',
name='object_detection_processor_node',
output='screen',
parameters=[{'yaml_path': '$(find humanoid_robot_intelligence_control_system_object_detector)/config/object_detection_params.yaml'}]
))
nodes_to_launch.append(Node(
package='humanoid_robot_intelligence_control_system_object_detector',
executable='object_tracker.py',
name='object_tracker_node',
output='screen',
parameters=[{
'p_gain': 0.4,
'i_gain': 0.0,
'd_gain': 0.0
}]
))
nodes_to_launch.append(Node(
package='rviz2',
executable='rviz2',
name='rviz',
arguments=['-d', '$(find humanoid_robot_intelligence_control_system_object_detector)/rviz/object_tracking_visualization.rviz']
))
return LaunchDescription(nodes_to_launch)
if __name__ == "__main__":
ros_version = os.getenv("ROS_VERSION")
if ros_version == "1":
ros1_launch_description()
elif ros_version == "2":
ros2_launch_description()
else:
print("Unsupported ROS version. Please set the ROS_VERSION environment variable to '1' for ROS 1 or '2' for ROS 2.")
sys.exit(1)

View File

@ -0,0 +1,108 @@
# Copyright (C) 2024 Bellande Robotics Sensors Research Innovation Center, Ronaldson Bellande
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import os
import sys
import subprocess
from launch import LaunchDescription
from launch_ros.actions import Node
from launch.actions import DeclareLaunchArgument
from launch.substitutions import LaunchConfiguration
def ros1_launch_description():
# Get command-line arguments
args = sys.argv[1:]
# Construct the ROS 1 launch command
roslaunch_command = ["roslaunch", "humanoid_robot_intelligence_control_system_object_detector", "object_detector_processor.launch"] + args
roslaunch_command.extend([
"usb_cam", "usb_cam_node", "name:=camera",
"video_device:=/dev/video0",
"image_width:=640",
"image_height:=480",
"pixel_format:=yuyv",
"camera_frame_id:=usb_cam",
"io_method:=mmap"
])
roslaunch_command.extend([
"ros_web_api_bellande_2d_computer_vision", "bellande_2d_computer_vision_object_detection.py", "name:=object_detection_node"
])
roslaunch_command.extend([
"humanoid_robot_intelligence_control_system_ball_detector", "object_detection_processor.py", "name:=object_detection_processor_node"
])
roslaunch_command.extend([
"rviz", "rviz", "name:=rviz",
"args:=-d $(find ros_web_api_bellande_2d_computer_vision)/rviz/visualization.rviz"
])
# Execute the launch command
subprocess.call(roslaunch_command)
def ros2_launch_description():
nodes_to_launch = []
nodes_to_launch.append(Node(
package='usb_cam',
executable='usb_cam_node',
name='camera',
output='screen',
parameters=[{
'video_device': '/dev/video0',
'image_width': 640,
'image_height': 480,
'pixel_format': 'yuyv',
'camera_frame_id': 'usb_cam',
'io_method': 'mmap'
}]
))
nodes_to_launch.append(Node(
package='ros_web_api_bellande_2d_computer_vision',
executable='bellande_2d_computer_vision_object_detection.py',
name='object_detection_node',
output='screen',
remappings=[('camera/image_raw', '/usb_cam/image_raw')]
))
nodes_to_launch.append(Node(
package='humanoid_robot_intelligence_control_system_object_detector',
executable='object_detection_processor.py',
name='object_detection_processor_node',
output='screen',
parameters=[{'yaml_path': '$(find ros_web_api_bellande_2d_computer_vision)/yaml/object_detection_params.yaml'}]
))
nodes_to_launch.append(Node(
package='rviz2',
executable='rviz2',
name='rviz',
arguments=['-d', '$(find ros_web_api_bellande_2d_computer_vision)/rviz/visualization.rviz']
))
return LaunchDescription(nodes_to_launch)
if __name__ == "__main__":
ros_version = os.getenv("ROS_VERSION")
if ros_version == "1":
ros1_launch_description()
elif ros_version == "2":
ros2_launch_description()
else:
print("Unsupported ROS version. Please set the ROS_VERSION environment variable to '1' for ROS 1 or '2' for ROS 2.")
sys.exit(1)

View File

@ -0,0 +1,108 @@
# Copyright (C) 2024 Bellande Robotics Sensors Research Innovation Center, Ronaldson Bellande
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import os
import sys
import subprocess
from launch import LaunchDescription
from launch_ros.actions import Node
from launch.actions import DeclareLaunchArgument
from launch.substitutions import LaunchConfiguration
def ros1_launch_description():
# Get command-line arguments
args = sys.argv[1:]
# Construct the ROS 1 launch command
roslaunch_command = ["roslaunch", "humanoid_robot_intelligence_control_system_object_detector", "object_detector_processor.launch"] + args
roslaunch_command.extend([
"usb_cam", "usb_cam_node", "name:=camera",
"video_device:=/dev/video0",
"image_width:=640",
"image_height:=480",
"pixel_format:=yuyv",
"camera_frame_id:=usb_cam",
"io_method:=mmap"
])
roslaunch_command.extend([
"ros_web_api_bellande_2d_computer_vision", "bellande_2d_computer_vision_object_detection.py", "name:=object_detection_node"
])
roslaunch_command.extend([
"humanoid_robot_intelligence_control_system_ball_detector", "object_detection_processor.py", "name:=object_detection_processor_node"
])
roslaunch_command.extend([
"rviz", "rviz", "name:=rviz",
"args:=-d $(find ros_web_api_bellande_2d_computer_vision)/rviz/visualization.rviz"
])
# Execute the launch command
subprocess.call(roslaunch_command)
def ros2_launch_description():
nodes_to_launch = []
nodes_to_launch.append(Node(
package='usb_cam',
executable='usb_cam_node',
name='camera',
output='screen',
parameters=[{
'video_device': '/dev/video0',
'image_width': 640,
'image_height': 480,
'pixel_format': 'yuyv',
'camera_frame_id': 'usb_cam',
'io_method': 'mmap'
}]
))
nodes_to_launch.append(Node(
package='ros_web_api_bellande_2d_computer_vision',
executable='bellande_2d_computer_vision_object_detection.py',
name='object_detection_node',
output='screen',
remappings=[('camera/image_raw', '/usb_cam/image_raw')]
))
nodes_to_launch.append(Node(
package='humanoid_robot_intelligence_control_system_object_detector',
executable='object_detection_processor.py',
name='object_detection_processor_node',
output='screen',
parameters=[{'yaml_path': '$(find ros_web_api_bellande_2d_computer_vision)/yaml/object_detection_params.yaml'}]
))
nodes_to_launch.append(Node(
package='rviz2',
executable='rviz2',
name='rviz',
arguments=['-d', '$(find ros_web_api_bellande_2d_computer_vision)/rviz/visualization.rviz']
))
return LaunchDescription(nodes_to_launch)
if __name__ == "__main__":
ros_version = os.getenv("ROS_VERSION")
if ros_version == "1":
ros1_launch_description()
elif ros_version == "2":
ros2_launch_description()
else:
print("Unsupported ROS version. Please set the ROS_VERSION environment variable to '1' for ROS 1 or '2' for ROS 2.")
sys.exit(1)

View File

@ -0,0 +1,41 @@
<?xml version="1.0"?>
<!--
Copyright (C) 2024 Bellande Robotics Sensors Research Innovation Center, Ronaldson Bellande
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
-->
<launch>
<!-- Launch USB camera -->
<node name="usb_cam" pkg="usb_cam" type="usb_cam_node" output="screen">
<param name="video_device" value="/dev/video0" />
<param name="image_width" value="640" />
<param name="image_height" value="480" />
<param name="pixel_format" value="yuyv" />
<param name="camera_frame_id" value="usb_cam" />
<param name="io_method" value="mmap"/>
</node>
<!-- Launch object detection node -->
<node name="object_detection_node" pkg="ros_web_api_bellande_2d_computer_vision" type="bellande_2d_computer_vision_object_detection.py" output="screen">
<remap from="camera/image_raw" to="/usb_cam/image_raw"/>
</node>
<!-- Launch object detection processor node -->
<node name="object_detection_processor_node" pkg="humanoid_robot_intelligence_control_system_ball_detector" type="object_detection_processor.py" output="screen">
<param name="yaml_path" value="$(find ros_web_api_bellande_2d_computer_vision)/yaml/object_detection_params.yaml"/>
</node>
<!-- Launch RViz -->
<node name="rviz" pkg="rviz" type="rviz" args="-d $(find ros_web_api_bellande_2d_computer_vision)/rviz/visualization.rviz" />
</launch>

View File

@ -0,0 +1,41 @@
<?xml version="1.0"?>
<!--
Copyright (C) 2024 Bellande Robotics Sensors Research Innovation Center, Ronaldson Bellande
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
-->
<launch>
<!-- Launch USB camera -->
<node name="usb_cam" pkg="usb_cam" type="usb_cam_node" output="screen">
<param name="video_device" value="/dev/video0" />
<param name="image_width" value="640" />
<param name="image_height" value="480" />
<param name="pixel_format" value="yuyv" />
<param name="camera_frame_id" value="usb_cam" />
<param name="io_method" value="mmap"/>
</node>
<!-- Launch object detection node -->
<node name="object_detection_node" pkg="ros_web_api_bellande_2d_computer_vision" type="bellande_2d_computer_vision_object_detection.py" output="screen">
<remap from="camera/image_raw" to="/usb_cam/image_raw"/>
</node>
<!-- Launch object detection processor node -->
<node name="object_detection_processor_node" pkg="humanoid_robot_intelligence_control_system_ball_detector" type="object_detection_processor.py" output="screen">
<param name="yaml_path" value="$(find ros_web_api_bellande_2d_computer_vision)/yaml/object_detection_params.yaml"/>
</node>
<!-- Launch RViz -->
<node name="rviz" pkg="rviz" type="rviz" args="-d $(find ros_web_api_bellande_2d_computer_vision)/rviz/visualization.rviz" />
</launch>

View File

@ -0,0 +1,41 @@
<?xml version="1.0"?>
<!--
Copyright (C) 2024 Bellande Robotics Sensors Research Innovation Center, Ronaldson Bellande
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
-->
<launch>
<!-- Launch USB camera -->
<node name="usb_cam" pkg="usb_cam" type="usb_cam_node" output="screen">
<param name="video_device" value="/dev/video0" />
<param name="image_width" value="640" />
<param name="image_height" value="480" />
<param name="pixel_format" value="yuyv" />
<param name="camera_frame_id" value="usb_cam" />
<param name="io_method" value="mmap"/>
</node>
<!-- Launch object detection node -->
<node name="object_detection_node" pkg="ros_web_api_bellande_2d_computer_vision" type="bellande_2d_computer_vision_object_detection.py" output="screen">
<remap from="camera/image_raw" to="/usb_cam/image_raw"/>
</node>
<!-- Launch object detection processor node -->
<node name="object_detection_processor_node" pkg="humanoid_robot_intelligence_control_system_ball_detector" type="object_detection_processor.py" output="screen">
<param name="yaml_path" value="$(find ros_web_api_bellande_2d_computer_vision)/yaml/object_detection_params.yaml"/>
</node>
<!-- Launch RViz -->
<node name="rviz" pkg="rviz" type="rviz" args="-d $(find ros_web_api_bellande_2d_computer_vision)/rviz/visualization.rviz" />
</launch>

View File

@ -0,0 +1,41 @@
<?xml version="1.0"?>
<!--
Copyright (C) 2024 Bellande Robotics Sensors Research Innovation Center, Ronaldson Bellande
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
-->
<launch>
<!-- Launch USB camera -->
<node name="usb_cam" pkg="usb_cam" type="usb_cam_node" output="screen">
<param name="video_device" value="/dev/video0" />
<param name="image_width" value="640" />
<param name="image_height" value="480" />
<param name="pixel_format" value="yuyv" />
<param name="camera_frame_id" value="usb_cam" />
<param name="io_method" value="mmap"/>
</node>
<!-- Launch object detection node -->
<node name="object_detection_node" pkg="ros_web_api_bellande_2d_computer_vision" type="bellande_2d_computer_vision_object_detection.py" output="screen">
<remap from="camera/image_raw" to="/usb_cam/image_raw"/>
</node>
<!-- Launch object detection processor node -->
<node name="object_detection_processor_node" pkg="humanoid_robot_intelligence_control_system_ball_detector" type="object_detection_processor.py" output="screen">
<param name="yaml_path" value="$(find ros_web_api_bellande_2d_computer_vision)/yaml/object_detection_params.yaml"/>
</node>
<!-- Launch RViz -->
<node name="rviz" pkg="rviz" type="rviz" args="-d $(find ros_web_api_bellande_2d_computer_vision)/rviz/visualization.rviz" />
</launch>

View File

@ -0,0 +1,61 @@
<?xml version="1.0"?>
<!--
Copyright (C) 2024 Bellande Robotics Sensors Research Innovation Center, Ronaldson Bellande
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy of
the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License.
-->
<package format="3">
<name>humanoid_robot_intelligence_control_system_configure</name>
<version>0.0.1</version>
<description>
This Package is for Configuration of Tracker and Follower related to it
</description>
<license>Apache 2.0</license>
<maintainer email="ronaldsonbellande@gmail.com">Ronaldson Bellande</maintainer>
<buildtool_depend condition="$ROS_VERSION == 1">catkin</buildtool_depend>
<buildtool_depend condition="$ROS_VERSION == 2">ament_cmake</buildtool_depend>
<buildtool_depend condition="$ROS_VERSION == 2">ament_cmake_python</buildtool_depend>
<!-- ROS 1 dependencies -->
<depend condition="$ROS_VERSION == 1">rospy</depend>
<depend condition="$ROS_VERSION == 1">std_msgs</depend>
<depend condition="$ROS_VERSION == 1">sensor_msgs</depend>
<depend condition="$ROS_VERSION == 1">geometry_msgs</depend>
<depend condition="$ROS_VERSION == 1">cv_bridge</depend>
<depend condition="$ROS_VERSION == 1">image_transport</depend>
<depend condition="$ROS_VERSION == 1">dynamic_reconfigure</depend>
<depend condition="$ROS_VERSION == 1">message_generation</depend>
<depend condition="$ROS_VERSION == 1">message_runtime</depend>
<!-- ROS 2 dependencies -->
<depend condition="$ROS_VERSION == 2">rclpy</depend>
<depend condition="$ROS_VERSION == 2">std_msgs</depend>
<depend condition="$ROS_VERSION == 2">sensor_msgs</depend>
<depend condition="$ROS_VERSION == 2">geometry_msgs</depend>
<depend condition="$ROS_VERSION == 2">cv_bridge</depend>
<depend condition="$ROS_VERSION == 2">image_transport</depend>
<depend condition="$ROS_VERSION == 2">rosidl_default_generators</depend>
<depend condition="$ROS_VERSION == 2">rosidl_default_runtime</depend>
<!-- Common dependencies -->
<depend>python3-opencv</depend>
<depend>python3-yaml</depend>
<depend>usb_cam</depend>
<export>
<build_type condition="$ROS_VERSION == 1">catkin</build_type>
<build_type condition="$ROS_VERSION == 2">ament_cmake</build_type>
</export>
</package>

View File

@ -0,0 +1,26 @@
# Copyright (C) 2024 Bellande Robotics Sensors Research Innovation Center, Ronaldson Bellande
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
from distutils.core import setup
from catkin_pkg.python_setup import generate_distutils_setup
# fetch values from package.xml
setup_args = generate_distutils_setup(
scripts=['src/face_tracker.py', "src/face_follower.py", "src/object_tracker.py", "src/object_follower.py"],
packages=['humanoid_robot_intelligence_control_system_configure'],
package_dir={'': 'src'},
)
setup(**setup_args)

View File

@ -0,0 +1,173 @@
#!/usr/bin/env python3
# Copyright (C) 2024 Bellande Robotics Sensors Research Innovation Center, Ronaldson Bellande
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import rospy
import math
from sensor_msgs.msg import JointState
from std_msgs.msg import String, Bool
from geometry_msgs.msg import Point
from humanoid_robot_intelligence_control_system_walking_module_msgs.msg import WalkingParam
from humanoid_robot_intelligence_control_system_walking_module_msgs.srv import GetWalkingParam
class FaceFollower:
def __init__(self):
rospy.init_node('face_follower')
self.FOV_WIDTH = 35.2 * math.pi / 180
self.FOV_HEIGHT = 21.6 * math.pi / 180
self.count_not_found = 0
self.count_to_approach = 0
self.on_following = False
self.approach_face_position = "NotFound"
self.CAMERA_HEIGHT = 0.46
self.NOT_FOUND_THRESHOLD = 50
self.MAX_FB_STEP = 40.0 * 0.001
self.MAX_RL_TURN = 15.0 * math.pi / 180
self.IN_PLACE_FB_STEP = -3.0 * 0.001
self.MIN_FB_STEP = 5.0 * 0.001
self.MIN_RL_TURN = 5.0 * math.pi / 180
self.UNIT_FB_STEP = 1.0 * 0.001
self.UNIT_RL_TURN = 0.5 * math.pi / 180
self.SPOT_FB_OFFSET = 0.0 * 0.001
self.SPOT_RL_OFFSET = 0.0 * 0.001
self.SPOT_ANGLE_OFFSET = 0.0
self.hip_pitch_offset = 7.0
self.current_pan = -10
self.current_tilt = -10
self.current_x_move = 0.005
self.current_r_angle = 0
self.curr_period_time = 0.6
self.accum_period_time = 0.0
self.DEBUG_PRINT = False
self.current_joint_states_sub = rospy.Subscriber(
"/humanoid_robot_intelligence_control_system/goal_joint_states", JointState, self.current_joint_states_callback)
self.set_walking_command_pub = rospy.Publisher(
"/humanoid_robot_intelligence_control_system/walking/command", String, queue_size=1)
self.set_walking_param_pub = rospy.Publisher(
"/humanoid_robot_intelligence_control_system/walking/set_params", WalkingParam, queue_size=1)
self.get_walking_param_client = rospy.ServiceProxy(
"/humanoid_robot_intelligence_control_system/walking/get_params", GetWalkingParam)
self.face_position_sub = rospy.Subscriber(
"/face_detector/face_position", Point, self.face_position_callback)
self.prev_time = rospy.Time.now()
self.current_walking_param = WalkingParam()
def start_following(self):
self.on_following = True
rospy.loginfo("Start Face following")
self.set_walking_command("start")
result = self.get_walking_param()
if result:
self.hip_pitch_offset = self.current_walking_param.hip_pitch_offset
self.curr_period_time = self.current_walking_param.period_time
else:
self.hip_pitch_offset = 7.0 * math.pi / 180
self.curr_period_time = 0.6
def stop_following(self):
self.on_following = False
self.count_to_approach = 0
rospy.loginfo("Stop Face following")
self.set_walking_command("stop")
def current_joint_states_callback(self, msg):
for i, name in enumerate(msg.name):
if name == "head_pan":
self.current_pan = msg.position[i]
elif name == "head_tilt":
self.current_tilt = msg.position[i]
def face_position_callback(self, msg):
if self.on_following:
self.process_following(msg.x, msg.y, msg.z)
def calc_footstep(self, target_distance, target_angle, delta_time):
next_movement = self.current_x_move
target_distance = max(0, target_distance)
fb_goal = min(target_distance * 0.1, self.MAX_FB_STEP)
self.accum_period_time += delta_time
if self.accum_period_time > (self.curr_period_time / 4):
self.accum_period_time = 0.0
if (target_distance * 0.1 / 2) < self.current_x_move:
next_movement -= self.UNIT_FB_STEP
else:
next_movement += self.UNIT_FB_STEP
fb_goal = min(next_movement, fb_goal)
fb_move = max(fb_goal, self.MIN_FB_STEP)
rl_goal = 0.0
if abs(target_angle) * 180 / math.pi > 5.0:
rl_offset = abs(target_angle) * 0.2
rl_goal = min(rl_offset, self.MAX_RL_TURN)
rl_goal = max(rl_goal, self.MIN_RL_TURN)
rl_angle = min(abs(self.current_r_angle) + self.UNIT_RL_TURN, rl_goal)
if target_angle < 0:
rl_angle *= -1
else:
rl_angle = 0
return fb_move, rl_angle
def process_following(self, x_angle, y_angle, face_size):
curr_time = rospy.Time.now()
delta_time = (curr_time - self.prev_time).to_sec()
self.prev_time = curr_time
self.count_not_found = 0
if self.current_tilt == -10 and self.current_pan == -10:
rospy.logerr("Failed to get current angle of head joints.")
self.set_walking_command("stop")
self.on_following = False
self.approach_face_position = "NotFound"
return False
self.approach_face_position = "OutOfRange"
distance_to_face = self.CAMERA_HEIGHT * math.tan(math.pi * 0.5 + self.current_tilt - self.hip_pitch_offset - face_size)
distance_to_face = abs(distance_to_face)
distance_to_approach = 0.5 # Adjust this value as needed
if (distance_to_face < distance_to_approach) and (abs(x_angle) < 25.0):
self.count_to_approach += 1
if self.count_to_approach > 20:
self.set_walking_command("stop")
self.on_following = False
self.approach_face_position = "OnLeft" if x_angle > 0 else "OnRight"
return True
elif self.count_to_approach > 15:
self.set_walking_param(self.IN_PLACE_FB_STEP, 0, 0)
return False
else:
self.count_to_approach = 0
distance_to_walk = distance_to_face - distance_to_approach
fb_move, rl_angle = self.calc_footstep(distance_to_walk, self.current_pan, delta_time)
self.set_walking_param(fb_move, 0, rl_angle)
return False
def set_walking_command(self, command):
if command == "start":
self.get_walking_param()
self.set_walking_param(self.IN_PLACE_FB_STEP, 0, 0, True)
msg = String()
msg.data = command
self.set_walking_command_pub.

View File

@ -0,0 +1,179 @@
#!/usr/bin/env python3
# Copyright (C) 2024 Bellande Robotics Sensors Research Innovation Center, Ronaldson Bellande
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import rospy
import math
from sensor_msgs.msg import JointState
from std_msgs.msg import String
from geometry_msgs.msg import Point
class FaceTracker:
def __init__(self):
rospy.init_node('face_tracker')
self.FOV_WIDTH = 35.2 * math.pi / 180
self.FOV_HEIGHT = 21.6 * math.pi / 180
self.NOT_FOUND_THRESHOLD = 50
self.WAITING_THRESHOLD = 5
self.use_head_scan = True
self.count_not_found = 0
self.on_tracking = False
self.current_face_pan = 0
self.current_face_tilt = 0
self.x_error_sum = 0
self.y_error_sum = 0
self.tracking_status = "NotFound"
self.DEBUG_PRINT = False
self.p_gain = rospy.get_param('~p_gain', 0.4)
self.i_gain = rospy.get_param('~i_gain', 0.0)
self.d_gain = rospy.get_param('~d_gain', 0.0)
rospy.loginfo(f"Face tracking Gain : {self.p_gain}, {self.i_gain}, {self.d_gain}")
self.head_joint_pub = rospy.Publisher(
"/humanoid_robot_intelligence_control_system/head_control/set_joint_states", JointState, queue_size=1)
self.head_scan_pub = rospy.Publisher(
"/humanoid_robot_intelligence_control_system/head_control/scan_command", String, queue_size=1)
self.face_position_sub = rospy.Subscriber(
"/face_detector/face_position", Point, self.face_position_callback)
self.face_tracking_command_sub = rospy.Subscriber(
"/face_tracker/command", String, self.face_tracker_command_callback)
self.face_position = Point()
self.prev_time = rospy.Time.now()
def face_position_callback(self, msg):
self.face_position = msg
def face_tracker_command_callback(self, msg):
if msg.data == "start":
self.start_tracking()
elif msg.data == "stop":
self.stop_tracking()
elif msg.data == "toggle_start":
if not self.on_tracking:
self.start_tracking()
else:
self.stop_tracking()
def start_tracking(self):
self.on_tracking = True
rospy.loginfo("Start Face tracking")
def stop_tracking(self):
self.go_init()
self.on_tracking = False
rospy.loginfo("Stop Face tracking")
self.current_face_pan = 0
self.current_face_tilt = 0
self.x_error_sum = 0
self.y_error_sum = 0
def process_tracking(self):
if not self.on_tracking:
self.face_position.z = 0
self.count_not_found = 0
return "NotFound"
if self.face_position.z <= 0:
self.count_not_found += 1
if self.count_not_found < self.WAITING_THRESHOLD:
tracking_status = "Waiting"
elif self.count_not_found > self.NOT_FOUND_THRESHOLD:
self.scan_face()
self.count_not_found = 0
tracking_status = "NotFound"
else:
tracking_status = "NotFound"
else:
self.count_not_found = 0
tracking_status = "Found"
if tracking_status != "Found":
self.tracking_status = tracking_status
return tracking_status
x_error = -math.atan(self.face_position.x * math.tan(self.FOV_WIDTH))
y_error = -math.atan(self.face_position.y * math.tan(self.FOV_HEIGHT))
curr_time = rospy.Time.now()
delta_time = (curr_time - self.prev_time).to_sec()
self.prev_time = curr_time
x_error_diff = (x_error - self.current_face_pan) / delta_time
y_error_diff = (y_error - self.current_face_tilt) / delta_time
self.x_error_sum += x_error
self.y_error_sum += y_error
x_error_target = x_error * self.p_gain + x_error_diff * self.d_gain + self.x_error_sum * self.i_gain
y_error_target = y_error * self.p_gain + y_error_diff * self.d_gain + self.y_error_sum * self.i_gain
if self.DEBUG_PRINT:
rospy.loginfo(f"Error: {x_error * 180 / math.pi} | {y_error * 180 / math.pi}")
rospy.loginfo(f"Error diff: {x_error_diff * 180 / math.pi} | {y_error_diff * 180 / math.pi} | {delta_time}")
rospy.loginfo(f"Error sum: {self.x_error_sum * 180 / math.pi} | {self.y_error_sum * 180 / math.pi}")
rospy.loginfo(f"Error target: {x_error_target * 180 / math.pi} | {y_error_target * 180 / math.pi}")
self.publish_head_joint(x_error_target, y_error_target)
self.current_face_pan = x_error
self.current_face_tilt = y_error
self.tracking_status = tracking_status
return tracking_status
def publish_head_joint(self, pan, tilt):
min_angle = 1 * math.pi / 180
if abs(pan) < min_angle and abs(tilt) < min_angle:
return
head_angle_msg = JointState()
head_angle_msg.name = ["head_pan", "head_tilt"]
head_angle_msg.position = [pan, tilt]
self.head_joint_pub.publish(head_angle_msg)
def go_init(self):
head_angle_msg = JointState()
head_angle_msg.name = ["head_pan", "head_tilt"]
head_angle_msg.position = [0.0, 0.0]
self.head_joint_pub.publish(head_angle_msg)
def scan_face(self):
if not self.use_head_scan:
return
scan_msg = String()
scan_msg.data = "scan"
self.head_scan_pub.publish(scan_msg)
def run(self):
rate = rospy.Rate(30) # 30Hz
while not rospy.is_shutdown():
self.process_tracking()
rate.sleep()
if __name__ == '__main__':
try:
tracker = FaceTracker()
tracker.run()
except rospy.ROSInterruptException:
pass

View File

@ -0,0 +1,218 @@
#!/usr/bin/env python3
# Copyright (C) 2024 Bellande Robotics Sensors Research Innovation Center, Ronaldson Bellande
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import rospy
import math
from sensor_msgs.msg import JointState
from std_msgs.msg import String
from humanoid_robot_intelligence_control_system_walking_module_msgs.msg import WalkingParam
from humanoid_robot_intelligence_control_system_walking_module_msgs.srv import GetWalkingParam
from geometry_msgs.msg import Point
class ObjectFollower:
def __init__(self):
rospy.init_node('object_follower')
self.FOV_WIDTH = 35.2 * math.pi / 180
self.FOV_HEIGHT = 21.6 * math.pi / 180
self.count_not_found = 0
self.count_to_approach = 0
self.on_tracking = False
self.approach_object_position = "NotFound"
self.CAMERA_HEIGHT = 0.46
self.NOT_FOUND_THRESHOLD = 50
self.MAX_FB_STEP = 40.0 * 0.001
self.MAX_RL_TURN = 15.0 * math.pi / 180
self.IN_PLACE_FB_STEP = -3.0 * 0.001
self.MIN_FB_STEP = 5.0 * 0.001
self.MIN_RL_TURN = 5.0 * math.pi / 180
self.UNIT_FB_STEP = 1.0 * 0.001
self.UNIT_RL_TURN = 0.5 * math.pi / 180
self.SPOT_FB_OFFSET = 0.0 * 0.001
self.SPOT_RL_OFFSET = 0.0 * 0.001
self.SPOT_ANGLE_OFFSET = 0.0
self.hip_pitch_offset = 7.0
self.current_pan = -10
self.current_tilt = -10
self.current_x_move = 0.005
self.current_r_angle = 0
self.curr_period_time = 0.6
self.accum_period_time = 0.0
self.DEBUG_PRINT = False
self.current_joint_states_sub = rospy.Subscriber(
"/humanoid_robot_intelligence_control_system/goal_joint_states", JointState, self.current_joint_states_callback)
self.set_walking_command_pub = rospy.Publisher(
"/humanoid_robot_intelligence_control_system/walking/command", String, queue_size=1)
self.set_walking_param_pub = rospy.Publisher(
"/humanoid_robot_intelligence_control_system/walking/set_params", WalkingParam, queue_size=1)
self.get_walking_param_client = rospy.ServiceProxy(
"/humanoid_robot_intelligence_control_system/walking/get_params", GetWalkingParam)
# Subscribe to object detection results
self.object_detection_sub = rospy.Subscriber(
"/object_detection_result", Point, self.object_detection_callback)
self.prev_time = rospy.Time.now()
self.current_walking_param = WalkingParam()
def start_following(self):
self.on_tracking = True
rospy.loginfo("Start Object following")
self.set_walking_command("start")
result = self.get_walking_param()
if result:
self.hip_pitch_offset = self.current_walking_param.hip_pitch_offset
self.curr_period_time = self.current_walking_param.period_time
else:
self.hip_pitch_offset = 7.0 * math.pi / 180
self.curr_period_time = 0.6
def stop_following(self):
self.on_tracking = False
self.count_to_approach = 0
rospy.loginfo("Stop Object following")
self.set_walking_command("stop")
def current_joint_states_callback(self, msg):
for i, name in enumerate(msg.name):
if name == "head_pan":
self.current_pan = msg.position[i]
elif name == "head_tilt":
self.current_tilt = msg.position[i]
def calc_footstep(self, target_distance, target_angle, delta_time):
next_movement = self.current_x_move
target_distance = max(0, target_distance)
fb_goal = min(target_distance * 0.1, self.MAX_FB_STEP)
self.accum_period_time += delta_time
if self.accum_period_time > (self.curr_period_time / 4):
self.accum_period_time = 0.0
if (target_distance * 0.1 / 2) < self.current_x_move:
next_movement -= self.UNIT_FB_STEP
else:
next_movement += self.UNIT_FB_STEP
fb_goal = min(next_movement, fb_goal)
fb_move = max(fb_goal, self.MIN_FB_STEP)
rl_goal = 0.0
if abs(target_angle) * 180 / math.pi > 5.0:
rl_offset = abs(target_angle) * 0.2
rl_goal = min(rl_offset, self.MAX_RL_TURN)
rl_goal = max(rl_goal, self.MIN_RL_TURN)
rl_angle = min(abs(self.current_r_angle) + self.UNIT_RL_TURN, rl_goal)
if target_angle < 0:
rl_angle *= -1
else:
rl_angle = 0
return fb_move, rl_angle
def process_following(self, x_angle, y_angle, object_size):
curr_time = rospy.Time.now()
delta_time = (curr_time - self.prev_time).to_sec()
self.prev_time = curr_time
self.count_not_found = 0
if self.current_tilt == -10 and self.current_pan == -10:
rospy.logerr("Failed to get current angle of head joints.")
self.set_walking_command("stop")
self.on_tracking = False
self.approach_object_position = "NotFound"
return False
self.approach_object_position = "OutOfRange"
distance_to_object = self.CAMERA_HEIGHT * math.tan(math.pi * 0.5 + self.current_tilt - self.hip_pitch_offset - object_size)
distance_to_object = abs(distance_to_object)
distance_to_approach = 0.22
if (distance_to_object < distance_to_approach) and (abs(x_angle) < 25.0):
self.count_to_approach += 1
if self.count_to_approach > 20:
self.set_walking_command("stop")
self.on_tracking = False
self.approach_object_position = "OnLeft" if x_angle > 0 else "OnRight"
return True
elif self.count_to_approach > 15:
self.set_walking_param(self.IN_PLACE_FB_STEP, 0, 0)
return False
else:
self.count_to_approach = 0
distance_to_walk = distance_to_object - distance_to_approach
fb_move, rl_angle = self.calc_footstep(distance_to_walk, self.current_pan, delta_time)
self.set_walking_param(fb_move, 0, rl_angle)
return False
def decide_object_position(self, x_angle, y_angle):
if self.current_tilt == -10 and self.current_pan == -10:
self.approach_object_position = "NotFound"
return
object_x_angle = self.current_pan + x_angle
self.approach_object_position = "OnLeft" if object_x_angle > 0 else "OnRight"
def wait_following(self):
self.count_not_found += 1
if self.count_not_found > self.NOT_FOUND_THRESHOLD * 0.5:
self.set_walking_param(0.0, 0.0, 0.0)
def set_walking_command(self, command):
if command == "start":
self.get_walking_param()
self.set_walking_param(self.IN_PLACE_FB_STEP, 0, 0, True)
msg = String()
msg.data = command
self.set_walking_command_pub.publish(msg)
def set_walking_param(self, x_move, y_move, rotation_angle, balance=False):
self.current_walking_param.balance_enable = balance
self.current_walking_param.x_move_amplitude = x_move + self.SPOT_FB_OFFSET
self.current_walking_param.y_move_amplitude = y_move + self.SPOT_RL_OFFSET
self.current_walking_param.angle_move_amplitude = rotation_angle + self.SPOT_ANGLE_OFFSET
self.set_walking_param_pub.publish(self.current_walking_param)
self.current_x_move = x_move
self.current_r_angle = rotation_angle
def get_walking_param(self):
try:
response = self.get_walking_param_client()
self.current_walking_param = response.parameters
return True
except rospy.ServiceException as e:
rospy.logerr("Failed to get walking parameters: %s" % e)
return False
def object_detection_callback(self, msg):
if self.on_tracking:
x_angle = msg.x
y_angle = msg.y
object_size = msg.z
self.process_following(x_angle, y_angle, object_size)
else:
self.wait_following()
if __name__ == '__main__':
try:
follower = ObjectFollower()
follower.start_following()
rospy.spin()
except rospy.ROSInterruptException:
pass

View File

@ -0,0 +1,194 @@
#!/usr/bin/env python3
# Copyright (C) 2024 Bellande Robotics Sensors Research Innovation Center, Ronaldson Bellande
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import rospy
import math
from sensor_msgs.msg import JointState
from std_msgs.msg import String, Float64MultiArray
from humanoid_robot_intelligence_control_system_ball_detector.msg import CircleSetStamped
from geometry_msgs.msg import Point
class ObjectTracker:
def __init__(self):
rospy.init_node('object_tracker')
self.FOV_WIDTH = 35.2 * math.pi / 180
self.FOV_HEIGHT = 21.6 * math.pi / 180
self.NOT_FOUND_THRESHOLD = 50
self.WAITING_THRESHOLD = 5
self.use_head_scan = True
self.count_not_found = 0
self.on_tracking = False
self.current_object_pan = 0
self.current_object_tilt = 0
self.x_error_sum = 0
self.y_error_sum = 0
self.current_object_bottom = 0
self.tracking_status = "NotFound"
self.DEBUG_PRINT = False
param_nh = rospy.get_param("~")
self.p_gain = param_nh.get("p_gain", 0.4)
self.i_gain = param_nh.get("i_gain", 0.0)
self.d_gain = param_nh.get("d_gain", 0.0)
rospy.loginfo(f"Object tracking Gain : {self.p_gain}, {self.i_gain}, {self.d_gain}")
self.head_joint_offset_pub = rospy.Publisher(
"/humanoid_robot_intelligence_control_system/head_control/set_joint_states_offset", JointState, queue_size=0)
self.head_joint_pub = rospy.Publisher(
"/humanoid_robot_intelligence_control_system/head_control/set_joint_states", JointState, queue_size=0)
self.head_scan_pub = rospy.Publisher(
"/humanoid_robot_intelligence_control_system/head_control/scan_command", String, queue_size=0)
self.object_position_sub = rospy.Subscriber(
"/object_detector_node/object_position", Point, self.object_position_callback)
self.object_tracking_command_sub = rospy.Subscriber(
"/object_tracker/command", String, self.object_tracker_command_callback)
self.object_position = Point()
self.prev_time = rospy.Time.now()
def object_position_callback(self, msg):
self.object_position = msg
def object_tracker_command_callback(self, msg):
if msg.data == "start":
self.start_tracking()
elif msg.data == "stop":
self.stop_tracking()
elif msg.data == "toggle_start":
if not self.on_tracking:
self.start_tracking()
else:
self.stop_tracking()
def start_tracking(self):
self.on_tracking = True
rospy.loginfo("Start Object tracking") if self.DEBUG_PRINT else None
def stop_tracking(self):
self.go_init()
self.on_tracking = False
rospy.loginfo("Stop Object tracking") if self.DEBUG_PRINT else None
self.current_object_pan = 0
self.current_object_tilt = 0
self.x_error_sum = 0
self.y_error_sum = 0
def set_using_head_scan(self, use_scan):
self.use_head_scan = use_scan
def process_tracking(self):
if not self.on_tracking:
self.object_position.z = 0
self.count_not_found = 0
return "NotFound"
if self.object_position.z <= 0:
self.count_not_found += 1
if self.count_not_found < self.WAITING_THRESHOLD:
if self.tracking_status in ["Found", "Waiting"]:
tracking_status = "Waiting"
else:
tracking_status = "NotFound"
elif self.count_not_found > self.NOT_FOUND_THRESHOLD:
self.scan_object()
self.count_not_found = 0
tracking_status = "NotFound"
else:
tracking_status = "NotFound"
else:
self.count_not_found = 0
tracking_status = "Found"
if tracking_status != "Found":
self.tracking_status = tracking_status
self.current_object_pan = 0
self.current_object_tilt = 0
self.x_error_sum = 0
self.y_error_sum = 0
return tracking_status
x_error = -math.atan(self.object_position.x * math.tan(self.FOV_WIDTH))
y_error = -math.atan(self.object_position.y * math.tan(self.FOV_HEIGHT))
object_size = self.object_position.z
curr_time = rospy.Time.now()
delta_time = (curr_time - self.prev_time).to_sec()
self.prev_time = curr_time
x_error_diff = (x_error - self.current_object_pan) / delta_time
y_error_diff = (y_error - self.current_object_tilt) / delta_time
self.x_error_sum += x_error
self.y_error_sum += y_error
x_error_target = x_error * self.p_gain + x_error_diff * self.d_gain + self.x_error_sum * self.i_gain
y_error_target = y_error * self.p_gain + y_error_diff * self.d_gain + self.y_error_sum * self.i_gain
if self.DEBUG_PRINT:
rospy.loginfo(f"Error: {x_error * 180 / math.pi} | {y_error * 180 / math.pi}")
rospy.loginfo(f"Error diff: {x_error_diff * 180 / math.pi} | {y_error_diff * 180 / math.pi} | {delta_time}")
rospy.loginfo(f"Error sum: {self.x_error_sum * 180 / math.pi} | {self.y_error_sum * 180 / math.pi}")
rospy.loginfo(f"Error target: {x_error_target * 180 / math.pi} | {y_error_target * 180 / math.pi}")
self.publish_head_joint(x_error_target, y_error_target)
self.current_object_pan = x_error
self.current_object_tilt = y_error
self.current_object_bottom = object_size
self.object_position.z = 0
self.tracking_status = tracking_status
return tracking_status
def publish_head_joint(self, pan, tilt):
min_angle = 1 * math.pi / 180
if abs(pan) < min_angle and abs(tilt) < min_angle:
return
head_angle_msg = JointState()
head_angle_msg.name = ["head_pan", "head_tilt"]
head_angle_msg.position = [pan, tilt]
self.head_joint_offset_pub.publish(head_angle_msg)
def go_init(self):
head_angle_msg = JointState()
head_angle_msg.name = ["head_pan", "head_tilt"]
head_angle_msg.position = [0.0, 0.0]
self.head_joint_pub.publish(head_angle_msg)
def scan_object(self):
if not self.use_head_scan:
return
scan_msg = String()
scan_msg.data = "scan"
self.head_scan_pub.publish(scan_msg)
if __name__ == '__main__':
try:
tracker = ObjectTracker()
rate = rospy.Rate(30)
while not rospy.is_shutdown():
tracker.process_tracking()
rate.sleep()
except rospy.ROSInterruptException:
pass

View File

@ -1,28 +0,0 @@
<?xml version="1.0"?>
<launch>
<arg name="face_cascade_name_0"
default="$(find face_detection)/include/face_detection/HaarCascades/haarcascade_frontalface_alt.xml" />
<arg name="face_cascade_name_1"
default="$(find face_detection)/include/face_detection/HaarCascades/haarcascade_frontalface_alt2.xml" />
<arg name="face_cascade_name_2"
default="$(find face_detection)/include/face_detection/HaarCascades/haarcascade_frontalface_alt_tree.xml" />
<arg name="face_cascade_name_3"
default="$(find face_detection)/include/face_detection/HaarCascades/haarcascade_frontalface_default.xml" />
<arg name="face_cascade_name_4"
default="$(find face_detection)/include/face_detection/lbpCascades/lbpcascade_frontalface.xml" />
<node pkg="face_detection" type="face_tracking" name="face_tracking"
args="$(arg face_cascade_name_0)
$(arg face_cascade_name_1)
$(arg face_cascade_name_2)
$(arg face_cascade_name_3)
$(arg face_cascade_name_4)"
output="screen">
<param name="imageInput" type="string" value="/usb_cam_node/image_raw" />
<param name="displayed_Image" type="int" value="0" />
<!-- <param name="publish" type="int" value="2" /> -->
<param name="publish" type="int" value="3" />
<param name="start_condition" type="bool" value="false" />
</node>
</launch>

View File

@ -1,23 +0,0 @@
# combination action page number and mp3 file path
action_and_sound:
4 : "/home/humanoid_robot_intelligence_control_system/catkin_ws/src/ROBOTIS-HUMANOID_ROBOT-Demo/humanoid_robot_intelligence_control_system_demo/data/mp3/Thank you.mp3"
41: "/home/humanoid_robot_intelligence_control_system/catkin_ws/src/ROBOTIS-HUMANOID_ROBOT-Demo/humanoid_robot_intelligence_control_system_demo/data/mp3/Introduction.mp3"
24: "/home/humanoid_robot_intelligence_control_system/catkin_ws/src/ROBOTIS-HUMANOID_ROBOT-Demo/humanoid_robot_intelligence_control_system_demo/data/mp3/Wow.mp3"
23: "/home/humanoid_robot_intelligence_control_system/catkin_ws/src/ROBOTIS-HUMANOID_ROBOT-Demo/humanoid_robot_intelligence_control_system_demo/data/mp3/Yes go.mp3"
15: "/home/humanoid_robot_intelligence_control_system/catkin_ws/src/ROBOTIS-HUMANOID_ROBOT-Demo/humanoid_robot_intelligence_control_system_demo/data/mp3/Sit down.mp3"
1: "/home/humanoid_robot_intelligence_control_system/catkin_ws/src/ROBOTIS-HUMANOID_ROBOT-Demo/humanoid_robot_intelligence_control_system_demo/data/mp3/Stand up.mp3"
54: "/home/humanoid_robot_intelligence_control_system/catkin_ws/src/ROBOTIS-HUMANOID_ROBOT-Demo/humanoid_robot_intelligence_control_system_demo/data/mp3/Clap please.mp3"
27: "/home/humanoid_robot_intelligence_control_system/catkin_ws/src/ROBOTIS-HUMANOID_ROBOT-Demo/humanoid_robot_intelligence_control_system_demo/data/mp3/Oops.mp3"
38: "/home/humanoid_robot_intelligence_control_system/catkin_ws/src/ROBOTIS-HUMANOID_ROBOT-Demo/humanoid_robot_intelligence_control_system_demo/data/mp3/Bye bye.mp3"
# 101 : "/home/humanoid_robot_intelligence_control_system/catkin_ws/src/ROBOTIS-HUMANOID_ROBOT-Demo/humanoid_robot_intelligence_control_system_demo/data/mp3/Oops.mp3"
110 : ""
111 : "/home/humanoid_robot_intelligence_control_system/catkin_ws/src/ROBOTIS-HUMANOID_ROBOT-Demo/humanoid_robot_intelligence_control_system_demo/data/mp3/Intro01.mp3"
115 : "/home/humanoid_robot_intelligence_control_system/catkin_ws/src/ROBOTIS-HUMANOID_ROBOT-Demo/humanoid_robot_intelligence_control_system_demo/data/mp3/Intro02.mp3"
118 : "/home/humanoid_robot_intelligence_control_system/catkin_ws/src/ROBOTIS-HUMANOID_ROBOT-Demo/humanoid_robot_intelligence_control_system_demo/data/mp3/Intro03.mp3"
# play list
prev_default: [4, 41, 24, 23, 15, 1, 54, 27, 38]
default: [4, 110, 111, 115, 118, 24, 54, 27, 38]
# example of play list
#certification: [101]

View File

@ -1,18 +0,0 @@
# combination action page number and mp3 file path
action_and_sound:
4 : "/home/humanoid_robot_intelligence_control_system/catkin_ws/src/ROBOTIS-HUMANOID_ROBOT/ROBOTIS-HUMANOID_ROBOT-Demo/humanoid_robot_intelligence_control_system_demo/data/mp3/Thank you.mp3"
41: "/home/humanoid_robot_intelligence_control_system/catkin_ws/src/ROBOTIS-HUMANOID_ROBOT/ROBOTIS-HUMANOID_ROBOT-Demo/humanoid_robot_intelligence_control_system_demo/data/mp3/Introduction.mp3"
24: "/home/humanoid_robot_intelligence_control_system/catkin_ws/src/ROBOTIS-HUMANOID_ROBOT/ROBOTIS-HUMANOID_ROBOT-Demo/humanoid_robot_intelligence_control_system_demo/data/mp3/Wow.mp3"
23: "/home/humanoid_robot_intelligence_control_system/catkin_ws/src/ROBOTIS-HUMANOID_ROBOT/ROBOTIS-HUMANOID_ROBOT-Demo/humanoid_robot_intelligence_control_system_demo/data/mp3/Yes go.mp3"
15: "/home/humanoid_robot_intelligence_control_system/catkin_ws/src/ROBOTIS-HUMANOID_ROBOT/ROBOTIS-HUMANOID_ROBOT-Demo/humanoid_robot_intelligence_control_system_demo/data/mp3/Sit down.mp3"
1: "/home/humanoid_robot_intelligence_control_system/catkin_ws/src/ROBOTIS-HUMANOID_ROBOT/ROBOTIS-HUMANOID_ROBOT-Demo/humanoid_robot_intelligence_control_system_demo/data/mp3/Stand up.mp3"
54: "/home/humanoid_robot_intelligence_control_system/catkin_ws/src/ROBOTIS-HUMANOID_ROBOT/ROBOTIS-HUMANOID_ROBOT-Demo/humanoid_robot_intelligence_control_system_demo/data/mp3/Clap please.mp3"
27: "/home/humanoid_robot_intelligence_control_system/catkin_ws/src/ROBOTIS-HUMANOID_ROBOT/ROBOTIS-HUMANOID_ROBOT-Demo/humanoid_robot_intelligence_control_system_demo/data/mp3/Oops.mp3"
38: "/home/humanoid_robot_intelligence_control_system/catkin_ws/src/ROBOTIS-HUMANOID_ROBOT/ROBOTIS-HUMANOID_ROBOT-Demo/humanoid_robot_intelligence_control_system_demo/data/mp3/Bye bye.mp3"
101 : "/home/humanoid_robot_intelligence_control_system/catkin_ws/src/ROBOTIS-HUMANOID_ROBOT/ROBOTIS-HUMANOID_ROBOT-Demo/humanoid_robot_intelligence_control_system_demo/data/mp3/Oops.mp3"
# play list
default: [4, 41, 24, 23, 15, 1, 54, 27, 38]
# example of play list
certificatino: [101]

View File

@ -1,346 +0,0 @@
/*******************************************************************************
* Copyright 2017 ROBOTIS CO., LTD.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
/* Author: Kayman Jung */
#include "humanoid_robot_intelligence_control_system_demo/action_demo.h"
namespace humanoid_robot_intelligence_control_system_op {
ActionDemo::ActionDemo()
: SPIN_RATE(30), DEBUG_PRINT(false), play_index_(0),
play_status_(StopAction) {
enable_ = false;
ros::NodeHandle nh(ros::this_node::getName());
std::string default_path =
ros::package::getPath("humanoid_robot_intelligence_control_system_demo") + "/list/action_script.yaml";
script_path_ = nh.param<std::string>("action_script", default_path);
std::string default_play_list = "default";
play_list_name_ =
nh.param<std::string>("action_script_play_list", default_play_list);
demo_command_sub_ = nh.subscribe("/humanoid_robot_intelligence_control_system/demo_command", 1,
&ActionDemo::demoCommandCallback, this);
parseActionScript(script_path_);
boost::thread queue_thread =
boost::thread(boost::bind(&ActionDemo::callbackThread, this));
boost::thread process_thread =
boost::thread(boost::bind(&ActionDemo::processThread, this));
}
ActionDemo::~ActionDemo() {}
void ActionDemo::setDemoEnable() {
setModuleToDemo("action_module");
enable_ = true;
ROS_INFO_COND(DEBUG_PRINT, "Start ActionScript Demo");
playAction(InitPose);
startProcess(play_list_name_);
}
void ActionDemo::setDemoDisable() {
stopProcess();
enable_ = false;
ROS_WARN("Set Action demo disable");
play_list_.resize(0);
}
void ActionDemo::process() {
switch (play_status_) {
case PlayAction: {
if (play_list_.size() == 0) {
ROS_WARN("Play List is empty.");
return;
}
// action is not running
if (isActionRunning() == false) {
// play
bool result_play = playActionWithSound(play_list_.at(play_index_));
ROS_INFO_COND(!result_play, "Fail to play action script.");
// add play index
int index_to_play = (play_index_ + 1) % play_list_.size();
play_index_ = index_to_play;
} else {
// wait
return;
}
break;
}
case PauseAction: {
stopMP3();
brakeAction();
play_status_ = ReadyAction;
break;
}
case StopAction: {
stopMP3();
stopAction();
play_status_ = ReadyAction;
break;
}
default:
break;
}
}
void ActionDemo::startProcess(const std::string &set_name) {
parseActionScriptSetName(script_path_, set_name);
play_status_ = PlayAction;
}
void ActionDemo::resumeProcess() { play_status_ = PlayAction; }
void ActionDemo::pauseProcess() { play_status_ = PauseAction; }
void ActionDemo::stopProcess() {
play_index_ = 0;
play_status_ = StopAction;
}
void ActionDemo::processThread() {
// set node loop rate
ros::Rate loop_rate(SPIN_RATE);
// node loop
while (ros::ok()) {
if (enable_ == true)
process();
// relax to fit output rate
loop_rate.sleep();
}
}
void ActionDemo::callbackThread() {
ros::NodeHandle nh(ros::this_node::getName());
// subscriber & publisher
module_control_pub_ =
nh.advertise<std_msgs::String>("/humanoid_robot_intelligence_control_system/enable_ctrl_module", 0);
motion_index_pub_ =
nh.advertise<std_msgs::Int32>("/humanoid_robot_intelligence_control_system/action/page_num", 0);
play_sound_pub_ = nh.advertise<std_msgs::String>("/play_sound_file", 0);
buttuon_sub_ = nh.subscribe("/humanoid_robot_intelligence_control_system/open_cr/button", 1,
&ActionDemo::buttonHandlerCallback, this);
is_running_client_ =
nh.serviceClient<humanoid_robot_intelligence_control_system_action_module_msgs::IsRunning>(
"/humanoid_robot_intelligence_control_system/action/is_running");
set_joint_module_client_ =
nh.serviceClient<humanoid_robot_intelligence_control_system_controller_msgs::SetModule>(
"/humanoid_robot_intelligence_control_system/set_present_ctrl_modules");
while (nh.ok()) {
ros::spinOnce();
usleep(1000);
}
}
void ActionDemo::parseActionScript(const std::string &path) {
YAML::Node doc;
try {
// load yaml
doc = YAML::LoadFile(path.c_str());
} catch (const std::exception &e) {
ROS_ERROR_STREAM("Fail to load action script yaml. - " << e.what());
ROS_ERROR_STREAM("Script Path : " << path);
return;
}
// parse action_sound table
YAML::Node sub_node = doc["action_and_sound"];
for (YAML::iterator yaml_it = sub_node.begin(); yaml_it != sub_node.end();
++yaml_it) {
int action_index = yaml_it->first.as<int>();
std::string mp3_path = yaml_it->second.as<std::string>();
action_sound_table_[action_index] = mp3_path;
}
// default action set
if (doc["default"])
play_list_ = doc["default"].as<std::vector<int>>();
}
bool ActionDemo::parseActionScriptSetName(const std::string &path,
const std::string &set_name) {
YAML::Node doc;
try {
// load yaml
doc = YAML::LoadFile(path.c_str());
} catch (const std::exception &e) {
ROS_ERROR("Fail to load yaml.");
return false;
}
// parse action_sound table
if (doc[set_name]) {
play_list_ = doc[set_name].as<std::vector<int>>();
return true;
} else
return false;
}
bool ActionDemo::playActionWithSound(int motion_index) {
std::map<int, std::string>::iterator map_it =
action_sound_table_.find(motion_index);
if (map_it == action_sound_table_.end())
return false;
playAction(motion_index);
playMP3(map_it->second);
ROS_INFO_STREAM_COND(DEBUG_PRINT,
"action : " << motion_index
<< ", mp3 path : " << map_it->second);
return true;
}
void ActionDemo::playMP3(std::string &path) {
std_msgs::String sound_msg;
sound_msg.data = path;
play_sound_pub_.publish(sound_msg);
}
void ActionDemo::stopMP3() {
std_msgs::String sound_msg;
sound_msg.data = "";
play_sound_pub_.publish(sound_msg);
}
void ActionDemo::playAction(int motion_index) {
std_msgs::Int32 motion_msg;
motion_msg.data = motion_index;
motion_index_pub_.publish(motion_msg);
}
void ActionDemo::stopAction() {
std_msgs::Int32 motion_msg;
motion_msg.data = StopActionCommand;
motion_index_pub_.publish(motion_msg);
}
void ActionDemo::brakeAction() {
std_msgs::Int32 motion_msg;
motion_msg.data = BrakeActionCommand;
motion_index_pub_.publish(motion_msg);
}
// check running of action
bool ActionDemo::isActionRunning() {
humanoid_robot_intelligence_control_system_action_module_msgs::IsRunning is_running_srv;
if (is_running_client_.call(is_running_srv) == false) {
ROS_ERROR("Failed to get action status");
return true;
} else {
if (is_running_srv.response.is_running == true) {
return true;
}
}
return false;
}
void ActionDemo::buttonHandlerCallback(const std_msgs::String::ConstPtr &msg) {
if (enable_ == false)
return;
if (msg->data == "start") {
switch (play_status_) {
case PlayAction: {
pauseProcess();
break;
}
case PauseAction: {
resumeProcess();
break;
}
case StopAction: {
resumeProcess();
break;
}
default:
break;
}
} else if (msg->data == "mode") {
}
}
void ActionDemo::setModuleToDemo(const std::string &module_name) {
callServiceSettingModule(module_name);
ROS_INFO_STREAM("enable module : " << module_name);
}
void ActionDemo::callServiceSettingModule(const std::string &module_name) {
humanoid_robot_intelligence_control_system_controller_msgs::SetModule set_module_srv;
set_module_srv.request.module_name = module_name;
if (set_joint_module_client_.call(set_module_srv) == false) {
ROS_ERROR("Failed to set module");
return;
}
return;
}
void ActionDemo::demoCommandCallback(const std_msgs::String::ConstPtr &msg) {
if (enable_ == false)
return;
if (msg->data == "start") {
resumeProcess();
} else if (msg->data == "stop") {
pauseProcess();
}
}
} /* namespace humanoid_robot_intelligence_control_system_op */