Merge pull request #25 from RonaldsonBellande/main

latest pushes
This commit is contained in:
Ronaldson Bellande 2024-07-22 16:58:37 -04:00 committed by GitHub
commit df86aaa7ee
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
36 changed files with 766 additions and 1836 deletions

90
.github/workflows/clone.yml vendored Normal file
View File

@ -0,0 +1,90 @@
name: GitHub Clone Count Update Everyday
on:
schedule:
- cron: "0 */24 * * *"
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: gh login
run: echo "${{ secrets.SECRET_TOKEN }}" | gh auth login --with-token
- name: parse latest clone count
run: |
curl --user "${{ github.actor }}:${{ secrets.SECRET_TOKEN }}" \
-H "Accept: application/vnd.github.v3+json" \
https://api.github.com/repos/${{ github.repository }}/traffic/clones \
> clone.json
- name: create gist and download previous count
id: set_id
run: |
if gh secret list | grep -q "GIST_ID"
then
echo "GIST_ID found"
echo "GIST=${{ secrets.GIST_ID }}" >> $GITHUB_OUTPUT
curl https://gist.githubusercontent.com/${{ github.actor }}/${{ secrets.GIST_ID }}/raw/clone.json > clone_before.json
if cat clone_before.json | grep '404: Not Found'; then
echo "GIST_ID not valid anymore. Creating another gist..."
gist_id=$(gh gist create clone.json | awk -F / '{print $NF}')
echo $gist_id | gh secret set GIST_ID
echo "GIST=$gist_id" >> $GITHUB_OUTPUT
cp clone.json clone_before.json
git rm --ignore-unmatch CLONE.md
fi
else
echo "GIST_ID not found. Creating a gist..."
gist_id=$(gh gist create clone.json | awk -F / '{print $NF}')
echo $gist_id | gh secret set GIST_ID
echo "GIST=$gist_id" >> $GITHUB_OUTPUT
cp clone.json clone_before.json
fi
- name: update clone.json
run: |
curl https://raw.githubusercontent.com/MShawon/github-clone-count-badge/master/main.py > main.py
python3 main.py
- name: Update gist with latest count
run: |
content=$(sed -e 's/\\/\\\\/g' -e 's/\t/\\t/g' -e 's/\"/\\"/g' -e 's/\r//g' "clone.json" | sed -E ':a;N;$!ba;s/\r{0,1}\n/\\n/g')
echo '{"description": "${{ github.repository }} clone statistics", "files": {"clone.json": {"content": "'"$content"'"}}}' > post_clone.json
curl -s -X PATCH \
--user "${{ github.actor }}:${{ secrets.SECRET_TOKEN }}" \
-H "Content-Type: application/json" \
-d @post_clone.json https://api.github.com/gists/${{ steps.set_id.outputs.GIST }} > /dev/null 2>&1
if [ ! -f CLONE.md ]; then
shields="https://img.shields.io/badge/dynamic/json?color=success&label=Clone&query=count&url="
url="https://gist.githubusercontent.com/${{ github.actor }}/${{ steps.set_id.outputs.GIST }}/raw/clone.json"
repo="https://github.com/MShawon/github-clone-count-badge"
echo ''> CLONE.md
echo '
**Markdown**
```markdown' >> CLONE.md
echo "[![GitHub Clones]($shields$url&logo=github)]($repo)" >> CLONE.md
echo '
```
**HTML**
```html' >> CLONE.md
echo "<a href='$repo'><img alt='GitHub Clones' src='$shields$url&logo=github'></a>" >> CLONE.md
echo '```' >> CLONE.md
git add CLONE.md
git config --global user.name "GitHub Action"
git config --global user.email "action@github.com"
git commit -m "create clone count badge"
fi
- name: Push
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.GITHUB_TOKEN }}

View File

@ -1,30 +0,0 @@
# This config file for Travis CI utilizes ros-industrial/industrial_ci package.
# For more info for the package, see https://github.com/ros-industrial/industrial_ci/blob/master/README.rst
sudo: required
dist: trusty
services:
- docker
language: generic
python:
- "3.8"
compiler:
- gcc
notifications:
email:
on_success: change
on_failure: always
recipients:
- ronaldsonbellande@gmail.com
env:
matrix:
- ROS_DISTRO=noetic ROS_REPO=ros-shadow-fixed UPSTREAM_WORKSPACE=file OS_NAME=ubuntu OS_CODE_NAME=focal $ROSINSTALL_FILENAME=".humanoid_robot_intelligence_control_system_demo.rosinstall"
branches:
only:
- master
- noetic
install:
- git clone https://github.com/ros-industrial/industrial_ci.git .ci_config
script:
- source .ci_config/travis.sh

3
git_scripts/.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
fix_errors.sh
push.sh
repository_recal.sh

View File

@ -1,34 +0,0 @@
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Changelog for package humanoid_robot_intelligence_control_system_ball_detector
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
0.3.2 (2023-10-03)
------------------
* Make it compatible for ROS1/ROS2
* Fix bugs
* Update package.xml and CMakeList.txt for to the latest versions
* Contributors & Maintainer: Ronaldson Bellande
0.3.1 (2023-09-27)
------------------
* Starting from this point it under a new license
* Fix errors and Issues
* Rename Repository for a completely different purpose
* Make it compatible with ROS/ROS2
* Upgrade version of all builds and make it more compatible
* Update package.xml and CMakeList.txt for to the latest versions
* Contributors & Maintainer: Ronaldson Bellande
0.3.0 (2021-05-05)
------------------
* Update package.xml and CMakeList.txt for noetic branch
* Contributors: Ronaldson Bellande
0.1.0 (2018-04-19)
------------------
* first release for ROS Kinetic
* added launch files in order to move the camera setting to humanoid_robot_intelligence_control_system_camera_setting package
* added missing package in find_package()
* refacoring to release
* split repositoryfrom ROBOTIS-HUMANOID_ROBOT
* Contributors: Kayman, Zerom, Pyo

View File

@ -1,133 +0,0 @@
# Copyright (C) 2024 Bellande Robotics Sensors Research Innovation Center, Ronaldson Bellande
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
cmake_minimum_required(VERSION 3.8)
project(humanoid_robot_intelligence_control_system_ball_detector)
if($ENV{ROS_VERSION} EQUAL 1)
find_package(
catkin REQUIRED COMPONENTS
roscpp
roslib
std_msgs
sensor_msgs
geometry_msgs
dynamic_reconfigure
cv_bridge
image_transport
message_generation
)
find_package(Boost REQUIRED COMPONENTS thread)
find_package(OpenCV 4.2 REQUIRED)
find_package(PkgConfig REQUIRED)
else()
find_package(ament_cmake REQUIRED)
endif()
pkg_check_modules(YAML_CPP REQUIRED yaml-cpp)
find_path(
YAML_CPP_INCLUDE_DIR
NAMES yaml_cpp.h
PATHS ${YAML_CPP_INCLUDE_DIRS}
)
find_library(
YAML_CPP_LIBRARY
NAMES YAML_CPP
PATHS ${YAML_CPP_LIBRARY_DIRS}
)
link_directories(${YAML_CPP_LIBRARY_DIRS})
if(NOT ${YAML_CPP_VERSION} VERSION_LESS "0.5")
add_definitions(-DHAVE_NEW_YAMLCPP)
endif(NOT ${YAML_CPP_VERSION} VERSION_LESS "0.5")
add_message_files(
FILES
CircleSetStamped.msg
BallDetectorParams.msg
)
add_service_files(
FILES
GetParameters.srv
SetParameters.srv
)
generate_messages(
DEPENDENCIES
std_msgs
geometry_msgs
)
generate_dynamic_reconfigure_options(cfg/DetectorParams.cfg)
if($ENV{ROS_VERSION} EQUAL 1)
catkin_package(
INCLUDE_DIRS include
CATKIN_DEPENDS
roscpp
roslib
std_msgs
sensor_msgs
geometry_msgs
dynamic_reconfigure
cv_bridge
image_transport
message_runtime
DEPENDS Boost OpenCV
)
endif()
include_directories(
include
${catkin_INCLUDE_DIRS}
${Boost_INCLUDE_DIRS}
${OpenCV_INCLUDE_DIRS}
${YAML_CPP_INCLUDE_DIRS}
)
add_executable(
ball_detector_node
src/ball_detector.cpp
src/ball_detector_node.cpp
)
add_dependencies(ball_detector_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
target_link_libraries(
ball_detector_node
${catkin_LIBRARIES}
${Boost_LIBRARIES}
${OpenCV_LIBRARIES}
${YAML_CPP_LIBRARIES}
)
install(
TARGETS ball_detector_node
RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
)
install(
DIRECTORY include/${PROJECT_NAME}/
DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
)
install(
DIRECTORY config launch rviz
DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
)

View File

@ -1,33 +0,0 @@
#!/usr/bin/env python
PACKAGE='humanoid_robot_intelligence_control_system_ball_detector'
from dynamic_reconfigure.parameter_generator_catkin import *
gen = ParameterGenerator()
# Name Type Reconfiguration levexl Description Default Min Max
gen.add("gaussian_blur_size",int_t , -1, "Size of Gaussian Blur Kernel (odd value!)", 7, 1, 11)
gen.add("gaussian_blur_sigma",double_t , -1, "Std deviation of Gaussian Blur Kernel", 2, 1, 5)
gen.add("canny_edge_th",double_t , -1, "Threshold of the edge detector", 50, 50, 200)
gen.add("hough_accum_resolution",double_t , -1, "Resolution of the Hough accumulator, in terms of inverse ratio of image resolution", 2, 1, 8)
gen.add("min_circle_dist",double_t , -1, "Minimum distance between circles", 40, 10, 200)
gen.add("hough_accum_th",double_t , -1, "Accumulator threshold to decide circle detection", 15, 10, 200)
gen.add("min_radius",int_t , -1, "Minimum circle radius allowed", 20, 10, 200)
gen.add("max_radius",int_t , -1, "Maximum circle radius allowed", 150, 100, 600)
gen.add("filter_h_min",int_t , -1, "Threshold of H filter", 180, 0, 359)
gen.add("filter_h_max",int_t , -1, "Threshold of H filter", 245, 0, 359)
gen.add("filter_s_min",int_t , -1, "Threshold of S filter", 200, 0, 255)
gen.add("filter_s_max",int_t , -1, "Threshold of S filter", 255, 0, 255)
gen.add("filter_v_min",int_t , -1, "Threshold of V filter", 50, 0, 255)
gen.add("filter_v_max",int_t , -1, "Threshold of V filter", 255, 0, 255)
gen.add("use_second_filter", bool_t, 0, "Use second filter", False)
gen.add("filter2_h_min",int_t , -1, "Threshold of H filter", 160, 0, 359)
gen.add("filter2_h_max",int_t , -1, "Threshold of H filter", 255, 0, 359)
gen.add("filter2_s_min",int_t , -1, "Threshold of S filter", 0, 0, 255)
gen.add("filter2_s_max",int_t , -1, "Threshold of S filter", 55, 0, 255)
gen.add("filter2_v_min",int_t , -1, "Threshold of V filter", 180, 0, 255)
gen.add("filter2_v_max",int_t , -1, "Threshold of V filter", 255, 0, 255)
gen.add("ellipse_size",int_t , -1, "Ellipse size", 2, 1, 9)
gen.add("debug_image", bool_t, 0, "Show filtered image to debug", False)
exit(gen.generate(PACKAGE, "ball_detector_node", "DetectorParams"))

View File

@ -1,33 +0,0 @@
#!/usr/bin/env python
PACKAGE='humanoid_robot_intelligence_control_system_ball_detector'
from dynamic_reconfigure.parameter_generator_catkin import *
gen = ParameterGenerator()
# Name Type Reconfiguration levexl Description Default Min Max
gen.add("gaussian_blur_size",int_t , -1, "Size of Gaussian Blur Kernel (odd value!)", 7, 1, 11)
gen.add("gaussian_blur_sigma",double_t , -1, "Std deviation of Gaussian Blur Kernel", 2, 1, 5)
gen.add("canny_edge_th",double_t , -1, "Threshold of the edge detector", 50, 50, 200)
gen.add("hough_accum_resolution",double_t , -1, "Resolution of the Hough accumulator, in terms of inverse ratio of image resolution", 2, 1, 8)
gen.add("min_circle_dist",double_t , -1, "Minimum distance between circles", 40, 10, 200)
gen.add("hough_accum_th",double_t , -1, "Accumulator threshold to decide circle detection", 15, 10, 200)
gen.add("min_radius",int_t , -1, "Minimum circle radius allowed", 20, 10, 200)
gen.add("max_radius",int_t , -1, "Maximum circle radius allowed", 150, 100, 600)
gen.add("filter_h_min",int_t , -1, "Threshold of H filter", 180, 0, 359)
gen.add("filter_h_max",int_t , -1, "Threshold of H filter", 245, 0, 359)
gen.add("filter_s_min",int_t , -1, "Threshold of S filter", 200, 0, 255)
gen.add("filter_s_max",int_t , -1, "Threshold of S filter", 255, 0, 255)
gen.add("filter_v_min",int_t , -1, "Threshold of V filter", 50, 0, 255)
gen.add("filter_v_max",int_t , -1, "Threshold of V filter", 255, 0, 255)
gen.add("use_second_filter", bool_t, 0, "Use second filter", False)
gen.add("filter2_h_min",int_t , -1, "Threshold of H filter", 160, 0, 359)
gen.add("filter2_h_max",int_t , -1, "Threshold of H filter", 255, 0, 359)
gen.add("filter2_s_min",int_t , -1, "Threshold of S filter", 0, 0, 255)
gen.add("filter2_s_max",int_t , -1, "Threshold of S filter", 55, 0, 255)
gen.add("filter2_v_min",int_t , -1, "Threshold of V filter", 180, 0, 255)
gen.add("filter2_v_max",int_t , -1, "Threshold of V filter", 255, 0, 255)
gen.add("ellipse_size",int_t , -1, "Ellipse size", 2, 1, 9)
gen.add("debug_image", bool_t, 0, "Show filtered image to debug", False)
exit(gen.generate(PACKAGE, "ball_detector_node", "DetectorParamsBlue"))

View File

@ -1,33 +0,0 @@
#!/usr/bin/env python
PACKAGE='humanoid_robot_intelligence_control_system_ball_detector'
from dynamic_reconfigure.parameter_generator_catkin import *
gen = ParameterGenerator()
# Name Type Reconfiguration levexl Description Default Min Max
gen.add("gaussian_blur_size",int_t , -1, "Size of Gaussian Blur Kernel (odd value!)", 7, 1, 11)
gen.add("gaussian_blur_sigma",double_t , -1, "Std deviation of Gaussian Blur Kernel", 2, 1, 5)
gen.add("canny_edge_th",double_t , -1, "Threshold of the edge detector", 50, 50, 200)
gen.add("hough_accum_resolution",double_t , -1, "Resolution of the Hough accumulator, in terms of inverse ratio of image resolution", 2, 1, 8)
gen.add("min_circle_dist",double_t , -1, "Minimum distance between circles", 40, 10, 200)
gen.add("hough_accum_th",double_t , -1, "Accumulator threshold to decide circle detection", 15, 10, 200)
gen.add("min_radius",int_t , -1, "Minimum circle radius allowed", 20, 10, 200)
gen.add("max_radius",int_t , -1, "Maximum circle radius allowed", 150, 100, 600)
gen.add("filter_h_min",int_t , -1, "Threshold of H filter", 330, 0, 359)
gen.add("filter_h_max",int_t , -1, "Threshold of H filter", 30, 0, 359)
gen.add("filter_s_min",int_t , -1, "Threshold of S filter", 128, 0, 255)
gen.add("filter_s_max",int_t , -1, "Threshold of S filter", 255, 0, 255)
gen.add("filter_v_min",int_t , -1, "Threshold of V filter", 128, 0, 255)
gen.add("filter_v_max",int_t , -1, "Threshold of V filter", 255, 0, 255)
gen.add("use_second_filter", bool_t, 0, "Use second filter", False)
gen.add("filter2_h_min",int_t , -1, "Threshold of H filter", 160, 0, 359)
gen.add("filter2_h_max",int_t , -1, "Threshold of H filter", 255, 0, 359)
gen.add("filter2_s_min",int_t , -1, "Threshold of S filter", 0, 0, 255)
gen.add("filter2_s_max",int_t , -1, "Threshold of S filter", 55, 0, 255)
gen.add("filter2_v_min",int_t , -1, "Threshold of V filter", 180, 0, 255)
gen.add("filter2_v_max",int_t , -1, "Threshold of V filter", 255, 0, 255)
gen.add("ellipse_size",int_t , -1, "Ellipse size", 5, 1, 9)
gen.add("debug_image", bool_t, 0, "Show filtered image to debug", False)
exit(gen.generate(PACKAGE, "ball_detector_node", "DetectorParamsRed"))

View File

@ -1,23 +0,0 @@
gaussian_blur_size: 7
gaussian_blur_sigma: 2
canny_edge_th: 100
hough_accum_resolution: 1
min_circle_dist: 100
hough_accum_th: 28
min_radius: 20
max_radius: 300
filter_h_min: 355
filter_h_max: 25
filter_s_min: 220
filter_s_max: 255
filter_v_min: 80
filter_v_max: 255
use_second_filter: false
filter2_h_min: 30
filter2_h_max: 355
filter2_s_min: 0
filter2_s_max: 40
filter2_v_min: 200
filter2_v_max: 255
ellipse_size: 2
filter_debug: false

View File

@ -1,23 +0,0 @@
gaussian_blur_size: 7
gaussian_blur_sigma: 2
canny_edge_th: 100
hough_accum_resolution: 1
min_circle_dist: 100
hough_accum_th: 28
min_radius: 20
max_radius: 300
filter_h_min: 350
filter_h_max: 15
filter_s_min: 200
filter_s_max: 255
filter_v_min: 60
filter_v_max: 255
use_second_filter: false
filter2_h_min: 30
filter2_h_max: 355
filter2_s_min: 0
filter2_s_max: 40
filter2_v_min: 200
filter2_v_max: 255
ellipse_size: 2
filter_debug: false

View File

@ -1,23 +0,0 @@
gaussian_blur_size: 7
gaussian_blur_sigma: 2.52
canny_edge_th: 100.5
hough_accum_resolution: 1
min_circle_dist: 28.5
hough_accum_th: 26.6
min_radius: 25
max_radius: 150
filter_h_min: 350
filter_h_max: 20
filter_s_min: 90
filter_s_max: 255
filter_v_min: 86
filter_v_max: 255
use_second_filter: true
filter2_h_min: 30
filter2_h_max: 355
filter2_s_min: 0
filter2_s_max: 40
filter2_v_min: 200
filter2_v_max: 255
ellipse_size: 1
filter_debug: false

View File

@ -1,169 +0,0 @@
/*******************************************************************************
* Copyright 2017 ROBOTIS CO., LTD.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
/* Author: Kayman Jung */
#ifndef _BALL_DETECTOR_H_
#define _BALL_DETECTOR_H_
#include <string>
#include <cv_bridge/cv_bridge.h>
#include <dynamic_reconfigure/server.h>
#include <image_transport/image_transport.h>
#include <ros/package.h>
#include <ros/ros.h>
#include <sensor_msgs/CameraInfo.h>
#include <sensor_msgs/image_encodings.h>
#include <std_msgs/Bool.h>
#include <std_msgs/String.h>
#include <boost/thread.hpp>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <yaml-cpp/yaml.h>
#include "humanoid_robot_intelligence_control_system_ball_detector/ball_detector_config.h"
#include "humanoid_robot_intelligence_control_system_ball_detector/CircleSetStamped.h"
#include "humanoid_robot_intelligence_control_system_ball_detector/DetectorParamsConfig.h"
#include "humanoid_robot_intelligence_control_system_ball_detector/GetParameters.h"
#include "humanoid_robot_intelligence_control_system_ball_detector/SetParameters.h"
namespace humanoid_robot_intelligence_control_system_op {
class BallDetector {
public:
BallDetector();
~BallDetector();
// checks if a new image has been received
bool newImage();
// execute circle detection with the cureent image
void process();
// publish the output image (input image + marked circles)
void publishImage();
// publish the circle set data
void publishCircles();
protected:
const static int NOT_FOUND_TH = 30;
// callbacks to image subscription
void imageCallback(const sensor_msgs::ImageConstPtr &msg);
// callbacks to camera info subscription
void cameraInfoCallback(const sensor_msgs::CameraInfo &msg);
void dynParamCallback(humanoid_robot_intelligence_control_system_ball_detector::DetectorParamsConfig &config,
uint32_t level);
void enableCallback(const std_msgs::Bool::ConstPtr &msg);
void paramCommandCallback(const std_msgs::String::ConstPtr &msg);
bool setParamCallback(humanoid_robot_intelligence_control_system_ball_detector::SetParameters::Request &req,
humanoid_robot_intelligence_control_system_ball_detector::SetParameters::Response &res);
bool getParamCallback(humanoid_robot_intelligence_control_system_ball_detector::GetParameters::Request &req,
humanoid_robot_intelligence_control_system_ball_detector::GetParameters::Response &res);
void resetParameter();
void publishParam();
void printConfig();
void saveConfig();
void setInputImage(const cv::Mat &inIm);
void setInputImage(const cv::Mat &inIm, cv::Mat &in_filter_img);
void getOutputImage(cv::Mat &outIm);
void filterImage();
void filterImage(const cv::Mat &in_filter_img, cv::Mat &out_filter_img);
void makeFilterMask(const cv::Mat &source_img, cv::Mat &mask_img, int range);
void makeFilterMaskFromBall(const cv::Mat &source_img, cv::Mat &mask_img);
void inRangeHsv(const cv::Mat &input_img, const HsvFilter &filter_value,
cv::Mat &output_img);
void mophology(const cv::Mat &intput_img, cv::Mat &output_img,
int ellipse_size);
void houghDetection(const unsigned int imgEncoding);
void houghDetection2(const cv::Mat &input_hough);
void drawOutputImage();
// ros node handle
ros::NodeHandle nh_;
ros::Subscriber enable_sub_;
// image publisher/subscriber
image_transport::ImageTransport it_;
image_transport::Publisher image_pub_;
cv_bridge::CvImage cv_img_pub_;
image_transport::Subscriber image_sub_;
cv_bridge::CvImagePtr cv_img_ptr_sub_;
bool enable_;
bool init_param_;
int not_found_count_;
// circle set publisher
humanoid_robot_intelligence_control_system_ball_detector::CircleSetStamped circles_msg_;
ros::Publisher circles_pub_;
// camera info subscriber
sensor_msgs::CameraInfo camera_info_msg_;
ros::Subscriber camera_info_sub_;
ros::Publisher camera_info_pub_;
// dynamic reconfigure
DetectorConfig params_config_;
std::string param_path_;
bool has_path_;
// web setting
std::string default_setting_path_;
ros::Publisher param_pub_;
ros::Subscriber param_command_sub_;
ros::ServiceServer get_param_client_;
ros::ServiceServer set_param_client_;
// flag indicating a new image has been received
bool new_image_flag_;
// image time stamp and frame id
ros::Time sub_time_;
std::string image_frame_id_;
// img encoding id
unsigned int img_encoding_;
/** \brief Set of detected circles
*
* Detected circles. For a circle i:
* x_i: circles[i][0]
* y_i: circles[i][1]
* radius_i: circles[i][2]
*
**/
std::vector<cv::Vec3f> circles_;
cv::Mat in_image_;
cv::Mat out_image_;
dynamic_reconfigure::Server<humanoid_robot_intelligence_control_system_ball_detector::DetectorParamsConfig>
param_server_;
dynamic_reconfigure::Server<
humanoid_robot_intelligence_control_system_ball_detector::DetectorParamsConfig>::CallbackType callback_fnc_;
};
} // namespace humanoid_robot_intelligence_control_system_op
#endif // _BALL_DETECTOR_H_

View File

@ -1,90 +0,0 @@
/*******************************************************************************
* Copyright 2017 ROBOTIS CO., LTD.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
/* Author: Kayman Jung */
#ifndef _DETECTOR_CONFIG_H_
#define _DETECTOR_CONFIG_H_
namespace humanoid_robot_intelligence_control_system_op {
// constants
const int GAUSSIAN_BLUR_SIZE_DEFAULT = 7;
const double GAUSSIAN_BLUR_SIGMA_DEFAULT = 2;
const double CANNY_EDGE_TH_DEFAULT = 130;
const double HOUGH_ACCUM_RESOLUTION_DEFAULT = 2;
const double MIN_CIRCLE_DIST_DEFAULT = 30;
const double HOUGH_ACCUM_TH_DEFAULT = 120;
const int MIN_RADIUS_DEFAULT = 30;
const int MAX_RADIUS_DEFAULT = 400;
const unsigned int IMG_MONO = 0;
const unsigned int IMG_RGB8 = 1;
const int FILTER_RANGE_DEFAULT_MIN = 160;
const int FILTER_RANGE_DEFAULT_MAX = 255;
const int FILTER_H_MIN_DEFAULT = 0;
const int FILTER_H_MAX_DEFAULT = 30;
const int FILTER_S_MIN_DEFAULT = 0;
const int FILTER_S_MAX_DEFAULT = 255;
const int FILTER_V_MIN_DEFAULT = 0;
const int FILTER_V_MAX_DEFAULT = 255;
const int ELLIPSE_SIZE = 5;
class HsvFilter {
public:
HsvFilter()
: h_min(FILTER_H_MIN_DEFAULT), h_max(FILTER_H_MAX_DEFAULT),
s_min(FILTER_S_MIN_DEFAULT), s_max(FILTER_S_MAX_DEFAULT),
v_min(FILTER_V_MIN_DEFAULT), v_max(FILTER_V_MAX_DEFAULT) {}
int h_min;
int h_max;
int s_min;
int s_max;
int v_min;
int v_max;
};
class DetectorConfig {
public:
int gaussian_blur_size; // size of gaussian blur kernel mask [pixel]
double gaussian_blur_sigma; // sigma of gaussian blur kernel mask [pixel]
double canny_edge_th; // threshold of the edge detector.
double hough_accum_resolution; // resolution of the Hough accumulator, in
// terms of inverse ratio of image resolution
double min_circle_dist; // Minimum distance between circles
double hough_accum_th; // accumulator threshold to decide circle detection
int min_radius; // minimum circle radius allowed
int max_radius; // maximum circle radius allowed
HsvFilter filter_threshold; // filter threshold
bool use_second_filter;
HsvFilter filter2_threshold; // filter threshold
int ellipse_size;
bool debug; // to debug log
DetectorConfig()
: canny_edge_th(CANNY_EDGE_TH_DEFAULT),
hough_accum_resolution(HOUGH_ACCUM_RESOLUTION_DEFAULT),
min_circle_dist(MIN_CIRCLE_DIST_DEFAULT),
hough_accum_th(HOUGH_ACCUM_TH_DEFAULT), min_radius(MIN_RADIUS_DEFAULT),
max_radius(MAX_RADIUS_DEFAULT), filter_threshold(),
use_second_filter(false), filter2_threshold(),
ellipse_size(ELLIPSE_SIZE), debug(false) {}
~DetectorConfig() {}
};
} // namespace humanoid_robot_intelligence_control_system_op
#endif // _DETECTOR_CONFIG_H_

View File

@ -1,27 +0,0 @@
<?xml version="1.0"?>
<!-- Launches an UVC camera, the ball detector and its visualization -->
<launch>
<!-- UVC camera -->
<node pkg="usb_cam" type="usb_cam_node" name="usb_cam_node" output="screen">
<param name="video_device" type="string" value="/dev/video0" />
<param name="image_width" type="int" value="1280" />
<param name="image_height" type="int" value="720" />
<param name="framerate " type="int" value="30" />
<param name="camera_frame_id" type="string" value="cam_link" />
<param name="camera_name" type="string" value="camera" />
<!-- <param name="autofocus" type="bool" value="False" /> -->
<!-- <param name="autoexposure" type="bool" value="False" /> -->
<!-- <param name="auto_white_balance" type="bool" value="False" /> -->
<!-- <param name="gain" value="255" /> -->
<!-- <param name="brightness" value="64" /> -->
<!-- <param name="exposure" value="80" /> -->
<!-- <param name="auto_exposure" type="bool" value="False" /> -->
<!-- <param name="exposure_absolute" value="1000" /> -->
<!-- <param name="auto_white_balance" type="bool" value="False" /> -->
<!-- <param name="white_balance_temperature" value="2800" /> -->
<!-- <param name="camera_info_url" type="string" value="file://$(find ar_pose)/data/camera_1280720.yaml" /> -->
</node>
<!-- ball detector -->
<include file="$(find humanoid_robot_intelligence_control_system_ball_detector)/launch/ball_detector.launch" />
</launch>

View File

@ -1,32 +0,0 @@
<?xml version="1.0"?>
<!-- Launches an UVC camera, the ball detector and its visualization -->
<launch>
<!-- UVC camera -->
<node pkg="uvc_camera" type="uvc_camera_node" name="uvc_camera_node" output="screen">
<param name="device" type="string" value="/dev/video0" />
<param name="width" type="int" value="800" />
<param name="height" type="int" value="600" />
<param name="fps" type="int" value="30" />
<param name="auto_gain" value="false" />
<param name="gain" type="int" value="120" />
<param name="exposure" value="100" />
</node>
<!-- <param name="gain" value="255" /> -->
<!-- <param name="auto_exposure" type="bool" value="False" /> -->
<!-- <param name="exposure_absolute" value="1000" /> -->
<!-- <param name="brightness" value="127" /> -->
<!-- <param name="auto_white_balance" type="bool" value="False" /> -->
<!-- <param name="white_balance_temperature" value="2800" /> -->
<!-- <param name="auto_exposure" type="bool" value="False" /> -->
<!-- <param name="exposure_absolute" value="1000" /> -->
<!-- <param name="brightness" value="64" /> -->
<!-- <param name="auto_white_balance" type="bool" value="False" /> -->
<!-- <param name="white_balance_temperature" value="2800" /> -->
<!-- ball detector -->
<include file="$(find humanoid_robot_intelligence_control_system_ball_detector)/launch/ball_detector.launch" />
</launch>

View File

@ -1,17 +0,0 @@
# This represents the parameters of ball_detector
uint32 gaussian_blur_size # only odd number, 1 - 11
float32 gaussian_blur_sigma # 1 - 5
float32 canny_edge_th # 50 - 200
float32 hough_accum_resolution # 1 - 8
float32 hough_accum_th # 10 - 200
float32 min_circle_dist # 10 - 200
uint32 min_radius # 10 - 200
uint32 max_radius # 100 - 600
uint32 filter_h_min # 0 - 359
uint32 filter_h_max
uint32 filter_s_min # 0 - 255
uint32 filter_s_max
uint32 filter_v_min # 0 - 255
uint32 filter_v_max
uint32 ellipse_size # 1 - 9

View File

@ -1,9 +0,0 @@
# This represents the set of detected circles
#timestamp and frame id of the image frame
std_msgs/Header header
#set of detected circles:
# (circles[i].x, circles[i].y) is the center point in image coordinates
# circles[i].z is the circle radius
geometry_msgs/Point[] circles

View File

@ -1,138 +0,0 @@
Panels:
- Class: rviz/Displays
Help Height: 0
Name: Displays
Property Tree Widget:
Expanded:
- /Global Options1
- /Status1
Splitter Ratio: 0.5
Tree Height: 355
- Class: rviz/Selection
Name: Selection
- Class: rviz/Tool Properties
Expanded:
- /2D Pose Estimate1
- /2D Nav Goal1
- /Publish Point1
Name: Tool Properties
Splitter Ratio: 0.588679
- Class: rviz/Views
Expanded:
- /Current View1
Name: Views
Splitter Ratio: 0.5
- Class: rviz/Time
Experimental: false
Name: Time
SyncMode: 0
SyncSource: Image
Visualization Manager:
Class: ""
Displays:
- Alpha: 0.5
Cell Size: 1
Class: rviz/Grid
Color: 160; 160; 164
Enabled: true
Line Style:
Line Width: 0.03
Value: Lines
Name: Grid
Normal Cell Count: 0
Offset:
X: 0
Y: 0
Z: 0
Plane: XY
Plane Cell Count: 10
Reference Frame: <Fixed Frame>
Value: true
- Alpha: 1
Class: rviz/RobotModel
Collision Enabled: false
Enabled: true
Links:
All Links Enabled: true
Expand Joint Details: false
Expand Link Details: false
Expand Tree: false
Link Tree Style: ""
Name: RobotModel
Robot Description: robot_description
TF Prefix: ""
Update Interval: 0
Value: true
Visual Enabled: true
- Class: rviz/Image
Enabled: true
Image Topic: /ball_detector_node/image_out
Max Value: 1
Median window: 5
Min Value: 0
Name: Image
Normalize Range: true
Queue Size: 2
Transport Hint: compressed
Value: true
Enabled: true
Global Options:
Background Color: 48; 48; 48
Fixed Frame: map
Frame Rate: 30
Name: root
Tools:
- Class: rviz/Interact
Hide Inactive Objects: true
- Class: rviz/MoveCamera
- Class: rviz/Select
- Class: rviz/FocusCamera
- Class: rviz/Measure
- Class: rviz/SetInitialPose
Topic: /initialpose
- Class: rviz/SetGoal
Topic: /move_base_simple/goal
- Class: rviz/PublishPoint
Single click: true
Topic: /clicked_point
Value: true
Views:
Current:
Class: rviz/Orbit
Distance: 10
Enable Stereo Rendering:
Stereo Eye Separation: 0.06
Stereo Focal Distance: 1
Swap Stereo Eyes: false
Value: false
Focal Point:
X: 0
Y: 0
Z: 0
Name: Current View
Near Clip Distance: 0.01
Pitch: 0.785398
Target Frame: <Fixed Frame>
Value: Orbit (rviz)
Yaw: 0.785398
Saved: ~
Window Geometry:
Displays:
collapsed: false
Height: 1056
Hide Left Dock: false
Hide Right Dock: false
Image:
collapsed: false
QMainWindow State: 000000ff00000000fd00000004000000000000027500000396fc0200000009fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000006400fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c0061007900730100000028000001a4000000dd00fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d00610067006501000001d2000001ec0000001600ffffff000000010000010f00000396fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073010000002800000396000000b000fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000073f0000003efc0100000002fb0000000800540069006d006501000000000000073f000002f600fffffffb0000000800540069006d00650100000000000004500000000000000000000003af0000039600000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
Selection:
collapsed: false
Time:
collapsed: false
Tool Properties:
collapsed: false
Views:
collapsed: false
Width: 1855
X: 1985
Y: 24

View File

@ -1,904 +0,0 @@
/*******************************************************************************
* Copyright 2017 ROBOTIS CO., LTD.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
/* Author: Kayman Jung */
#include <fstream>
#include "humanoid_robot_intelligence_control_system_ball_detector/ball_detector.h"
namespace humanoid_robot_intelligence_control_system_op {
BallDetector::BallDetector()
: nh_(ros::this_node::getName()), it_(this->nh_), enable_(true),
params_config_(), init_param_(false), not_found_count_(0) {
has_path_ = nh_.getParam("yaml_path", param_path_);
if (has_path_)
std::cout << "Path : " << param_path_ << std::endl;
// detector config struct
DetectorConfig detect_config;
// get user parameters from dynamic reconfigure (yaml entries)
nh_.param<int>("gaussian_blur_size", detect_config.gaussian_blur_size,
params_config_.gaussian_blur_size);
if (detect_config.gaussian_blur_size % 2 == 0)
detect_config.gaussian_blur_size -= 1;
if (detect_config.gaussian_blur_size <= 0)
detect_config.gaussian_blur_size = 1;
nh_.param<double>("gaussian_blur_sigma", detect_config.gaussian_blur_sigma,
params_config_.gaussian_blur_sigma);
nh_.param<double>("canny_edge_th", detect_config.canny_edge_th,
params_config_.canny_edge_th);
nh_.param<double>("hough_accum_resolution",
detect_config.hough_accum_resolution,
params_config_.hough_accum_resolution);
nh_.param<double>("min_circle_dist", detect_config.min_circle_dist,
params_config_.min_circle_dist);
nh_.param<double>("hough_accum_th", detect_config.hough_accum_th,
params_config_.hough_accum_th);
nh_.param<int>("min_radius", detect_config.min_radius,
params_config_.min_radius);
nh_.param<int>("max_radius", detect_config.max_radius,
params_config_.max_radius);
nh_.param<int>("filter_h_min", detect_config.filter_threshold.h_min,
params_config_.filter_threshold.h_min);
nh_.param<int>("filter_h_max", detect_config.filter_threshold.h_max,
params_config_.filter_threshold.h_max);
nh_.param<int>("filter_s_min", detect_config.filter_threshold.s_min,
params_config_.filter_threshold.s_min);
nh_.param<int>("filter_s_max", detect_config.filter_threshold.s_max,
params_config_.filter_threshold.s_max);
nh_.param<int>("filter_v_min", detect_config.filter_threshold.v_min,
params_config_.filter_threshold.v_min);
nh_.param<int>("filter_v_max", detect_config.filter_threshold.v_max,
params_config_.filter_threshold.v_max);
nh_.param<bool>("use_second_filter", detect_config.use_second_filter,
params_config_.use_second_filter);
nh_.param<int>("filter2_h_min", detect_config.filter2_threshold.h_min,
params_config_.filter2_threshold.h_min);
nh_.param<int>("filter2_h_max", detect_config.filter2_threshold.h_max,
params_config_.filter2_threshold.h_max);
nh_.param<int>("filter2_s_min", detect_config.filter2_threshold.s_min,
params_config_.filter2_threshold.s_min);
nh_.param<int>("filter2_s_max", detect_config.filter2_threshold.s_max,
params_config_.filter2_threshold.s_max);
nh_.param<int>("filter2_v_min", detect_config.filter2_threshold.v_min,
params_config_.filter2_threshold.v_min);
nh_.param<int>("filter2_v_max", detect_config.filter2_threshold.v_max,
params_config_.filter2_threshold.v_max);
nh_.param<int>("ellipse_size", detect_config.ellipse_size,
params_config_.ellipse_size);
nh_.param<bool>("filter_debug", detect_config.debug, params_config_.debug);
// sets publishers
image_pub_ = it_.advertise("image_out", 100);
circles_pub_ =
nh_.advertise<humanoid_robot_intelligence_control_system_ball_detector::
CircleSetStamped>("circle_set", 100);
camera_info_pub_ = nh_.advertise<sensor_msgs::CameraInfo>("camera_info", 100);
// sets subscribers
enable_sub_ = nh_.subscribe("enable", 1, &BallDetector::enableCallback, this);
image_sub_ = it_.subscribe("image_in", 1, &BallDetector::imageCallback, this);
camera_info_sub_ = nh_.subscribe("cameraInfo_in", 100,
&BallDetector::cameraInfoCallback, this);
// initializes newImageFlag
new_image_flag_ = false;
// dynamic_reconfigure
callback_fnc_ = boost::bind(&BallDetector::dynParamCallback, this, _1, _2);
param_server_.setCallback(callback_fnc_);
// web setting
param_pub_ =
nh_.advertise<humanoid_robot_intelligence_control_system_ball_detector::
BallDetectorParams>("current_params", 1);
param_command_sub_ = nh_.subscribe("param_command", 1,
&BallDetector::paramCommandCallback, this);
set_param_client_ =
nh_.advertiseService("set_param", &BallDetector::setParamCallback, this);
get_param_client_ =
nh_.advertiseService("get_param", &BallDetector::getParamCallback, this);
default_setting_path_ = ros::package::getPath(ROS_PACKAGE_NAME) +
"/config/ball_detector_params_default.yaml";
// sets config and prints it
params_config_ = detect_config;
init_param_ = true;
printConfig();
}
BallDetector::~BallDetector() {}
bool BallDetector::newImage() {
if (new_image_flag_) {
new_image_flag_ = false;
return true;
} else {
return false;
}
}
void BallDetector::process() {
if (enable_ == false)
return;
if (cv_img_ptr_sub_ != NULL) {
cv::Mat img_hsv, img_filtered;
// set input image
setInputImage(cv_img_ptr_sub_->image, img_hsv);
// image filtering
filterImage(img_hsv, img_filtered);
// detect circles
houghDetection2(img_filtered);
// // set input image
// setInputImage(cv_img_ptr_sub_->image);
// // image filtering
// filterImage();
// //detect circles
// houghDetection(this->img_encoding_);
}
}
void BallDetector::publishImage() {
if (enable_ == false)
return;
// image_raw topic
cv_img_pub_.header.seq++;
cv_img_pub_.header.stamp = sub_time_;
cv_img_pub_.header.frame_id = image_frame_id_;
switch (img_encoding_) {
case IMG_RGB8:
cv_img_pub_.encoding = sensor_msgs::image_encodings::RGB8;
break;
case IMG_MONO:
cv_img_pub_.encoding = sensor_msgs::image_encodings::MONO8;
break;
default:
cv_img_pub_.encoding = sensor_msgs::image_encodings::MONO8;
break;
}
getOutputImage(cv_img_pub_.image);
image_pub_.publish(cv_img_pub_.toImageMsg());
camera_info_pub_.publish(camera_info_msg_);
}
void BallDetector::publishCircles() {
if (enable_ == false)
return;
if (circles_.size() == 0)
return;
// clears and resize the message
circles_msg_.circles.clear();
circles_msg_.circles.resize(circles_.size());
// fill header
circles_msg_.header.seq++;
circles_msg_.header.stamp = sub_time_;
circles_msg_.header.frame_id =
"detector"; // To do: get frame_id from input image
// fill circle data
// top(-1), bottom(+1)
// left(-1), right(+1)
for (int idx = 0; idx < circles_.size(); idx++) {
circles_msg_.circles[idx].x =
circles_[idx][0] / out_image_.cols * 2 - 1; // x (-1 ~ 1)
circles_msg_.circles[idx].y =
circles_[idx][1] / out_image_.rows * 2 - 1; // y (-1 ~ 1)
circles_msg_.circles[idx].z = circles_[idx][2]; // radius
}
// publish message
circles_pub_.publish(circles_msg_);
}
void BallDetector::enableCallback(const std_msgs::Bool::ConstPtr &msg) {
enable_ = msg->data;
}
void BallDetector::imageCallback(const sensor_msgs::ImageConstPtr &msg) {
if (enable_ == false)
return;
try {
if (msg->encoding.compare(sensor_msgs::image_encodings::MONO8) == 0)
this->img_encoding_ = IMG_MONO;
if (msg->encoding.compare(sensor_msgs::image_encodings::RGB8) == 0)
this->img_encoding_ = IMG_RGB8;
this->cv_img_ptr_sub_ = cv_bridge::toCvCopy(msg, msg->encoding);
} catch (cv_bridge::Exception &e) {
ROS_ERROR("cv_bridge exception: %s", e.what());
return;
}
// indicates a new image is available
this->sub_time_ = msg->header.stamp;
this->image_frame_id_ = msg->header.frame_id;
this->new_image_flag_ = true;
return;
}
void BallDetector::dynParamCallback(
humanoid_robot_intelligence_control_system_ball_detector::
DetectorParamsConfig &config,
uint32_t level) {
params_config_.gaussian_blur_size = config.gaussian_blur_size;
params_config_.gaussian_blur_sigma = config.gaussian_blur_sigma;
params_config_.canny_edge_th = config.canny_edge_th;
params_config_.hough_accum_resolution = config.hough_accum_resolution;
params_config_.min_circle_dist = config.min_circle_dist;
params_config_.hough_accum_th = config.hough_accum_th;
params_config_.min_radius = config.min_radius;
params_config_.max_radius = config.max_radius;
params_config_.filter_threshold.h_min = config.filter_h_min;
params_config_.filter_threshold.h_max = config.filter_h_max;
params_config_.filter_threshold.s_min = config.filter_s_min;
params_config_.filter_threshold.s_max = config.filter_s_max;
params_config_.filter_threshold.v_min = config.filter_v_min;
params_config_.filter_threshold.v_max = config.filter_v_max;
params_config_.use_second_filter = config.use_second_filter;
params_config_.filter2_threshold.h_min = config.filter2_h_min;
params_config_.filter2_threshold.h_max = config.filter2_h_max;
params_config_.filter2_threshold.s_min = config.filter2_s_min;
params_config_.filter2_threshold.s_max = config.filter2_s_max;
params_config_.filter2_threshold.v_min = config.filter2_v_min;
params_config_.filter2_threshold.v_max = config.filter2_v_max;
params_config_.ellipse_size = config.ellipse_size;
params_config_.debug = config.debug_image;
// gaussian_blur has to be odd number.
if (params_config_.gaussian_blur_size % 2 == 0)
params_config_.gaussian_blur_size -= 1;
if (params_config_.gaussian_blur_size <= 0)
params_config_.gaussian_blur_size = 1;
printConfig();
saveConfig();
}
void BallDetector::cameraInfoCallback(const sensor_msgs::CameraInfo &msg) {
if (enable_ == false)
return;
camera_info_msg_ = msg;
}
void BallDetector::paramCommandCallback(const std_msgs::String::ConstPtr &msg) {
if (msg->data == "debug") {
params_config_.debug = true;
saveConfig();
} else if (msg->data == "normal") {
params_config_.debug = false;
saveConfig();
} else if (msg->data == "reset") {
// load default parameters and apply
resetParameter();
}
}
bool BallDetector::setParamCallback(
humanoid_robot_intelligence_control_system_ball_detector::SetParameters::
Request &req,
humanoid_robot_intelligence_control_system_ball_detector::SetParameters::
Response &res) {
params_config_.gaussian_blur_size = req.params.gaussian_blur_size;
params_config_.gaussian_blur_sigma = req.params.gaussian_blur_sigma;
params_config_.canny_edge_th = req.params.canny_edge_th;
params_config_.hough_accum_resolution = req.params.hough_accum_resolution;
params_config_.min_circle_dist = req.params.min_circle_dist;
params_config_.hough_accum_th = req.params.hough_accum_th;
params_config_.min_radius = req.params.min_radius;
params_config_.max_radius = req.params.max_radius;
params_config_.filter_threshold.h_min = req.params.filter_h_min;
params_config_.filter_threshold.h_max = req.params.filter_h_max;
params_config_.filter_threshold.s_min = req.params.filter_s_min;
params_config_.filter_threshold.s_max = req.params.filter_s_max;
params_config_.filter_threshold.v_min = req.params.filter_v_min;
params_config_.filter_threshold.v_max = req.params.filter_v_max;
params_config_.ellipse_size = req.params.ellipse_size;
saveConfig();
res.returns = req.params;
return true;
}
bool BallDetector::getParamCallback(
humanoid_robot_intelligence_control_system_ball_detector::GetParameters::
Request &req,
humanoid_robot_intelligence_control_system_ball_detector::GetParameters::
Response &res) {
res.returns.gaussian_blur_size = params_config_.gaussian_blur_size;
res.returns.gaussian_blur_sigma = params_config_.gaussian_blur_sigma;
res.returns.canny_edge_th = params_config_.canny_edge_th;
res.returns.hough_accum_resolution = params_config_.hough_accum_resolution;
res.returns.min_circle_dist = params_config_.min_circle_dist;
res.returns.hough_accum_th = params_config_.hough_accum_th;
res.returns.min_radius = params_config_.min_radius;
res.returns.max_radius = params_config_.max_radius;
res.returns.filter_h_min = params_config_.filter_threshold.h_min;
res.returns.filter_h_max = params_config_.filter_threshold.h_max;
res.returns.filter_s_min = params_config_.filter_threshold.s_min;
res.returns.filter_s_max = params_config_.filter_threshold.s_max;
res.returns.filter_v_min = params_config_.filter_threshold.v_min;
res.returns.filter_v_max = params_config_.filter_threshold.v_max;
res.returns.ellipse_size = params_config_.ellipse_size;
return true;
}
void BallDetector::resetParameter() {
YAML::Node doc;
try {
// load yaml
doc = YAML::LoadFile(default_setting_path_.c_str());
// parse
params_config_.gaussian_blur_size = doc["gaussian_blur_size"].as<int>();
params_config_.gaussian_blur_sigma =
doc["gaussian_blur_sigma"].as<double>();
params_config_.canny_edge_th = doc["canny_edge_th"].as<double>();
params_config_.hough_accum_resolution =
doc["hough_accum_resolution"].as<double>();
params_config_.min_circle_dist = doc["min_circle_dist"].as<double>();
params_config_.hough_accum_th = doc["hough_accum_th"].as<double>();
params_config_.min_radius = doc["min_radius"].as<int>();
params_config_.max_radius = doc["max_radius"].as<int>();
params_config_.filter_threshold.h_min = doc["filter_h_min"].as<int>();
params_config_.filter_threshold.h_max = doc["filter_h_max"].as<int>();
params_config_.filter_threshold.s_min = doc["filter_s_min"].as<int>();
params_config_.filter_threshold.s_max = doc["filter_s_max"].as<int>();
params_config_.filter_threshold.v_min = doc["filter_v_min"].as<int>();
params_config_.filter_threshold.v_max = doc["filter_v_max"].as<int>();
params_config_.use_second_filter = doc["use_second_filter"].as<bool>();
params_config_.filter2_threshold.h_min = doc["filter2_h_min"].as<int>();
params_config_.filter2_threshold.h_max = doc["filter2_h_max"].as<int>();
params_config_.filter2_threshold.s_min = doc["filter2_s_min"].as<int>();
params_config_.filter2_threshold.s_max = doc["filter2_s_max"].as<int>();
params_config_.filter2_threshold.v_min = doc["filter2_v_min"].as<int>();
params_config_.filter2_threshold.v_max = doc["filter2_v_max"].as<int>();
params_config_.ellipse_size = doc["ellipse_size"].as<int>();
params_config_.debug = doc["filter_debug"].as<bool>();
// gaussian_blur has to be odd number.
if (params_config_.gaussian_blur_size % 2 == 0)
params_config_.gaussian_blur_size -= 1;
if (params_config_.gaussian_blur_size <= 0)
params_config_.gaussian_blur_size = 1;
printConfig();
saveConfig();
publishParam();
} catch (const std::exception &e) {
ROS_ERROR_STREAM(
"Failed to Get default parameters : " << default_setting_path_);
return;
}
}
void BallDetector::publishParam() {
humanoid_robot_intelligence_control_system_ball_detector::BallDetectorParams
params;
params.gaussian_blur_size = params_config_.gaussian_blur_size;
params.gaussian_blur_sigma = params_config_.gaussian_blur_sigma;
params.canny_edge_th = params_config_.canny_edge_th;
params.hough_accum_resolution = params_config_.hough_accum_resolution;
params.min_circle_dist = params_config_.min_circle_dist;
params.hough_accum_th = params_config_.hough_accum_th;
params.min_radius = params_config_.min_radius;
params.max_radius = params_config_.max_radius;
params.filter_h_min = params_config_.filter_threshold.h_min;
params.filter_h_max = params_config_.filter_threshold.h_max;
params.filter_s_min = params_config_.filter_threshold.s_min;
params.filter_s_max = params_config_.filter_threshold.s_max;
params.filter_v_min = params_config_.filter_threshold.v_min;
params.filter_v_max = params_config_.filter_threshold.v_max;
params.ellipse_size = params_config_.ellipse_size;
param_pub_.publish(params);
}
void BallDetector::printConfig() {
if (init_param_ == false)
return;
std::cout << "Detetctor Configuration:" << std::endl
<< " gaussian_blur_size: " << params_config_.gaussian_blur_size
<< std::endl
<< " gaussian_blur_sigma: " << params_config_.gaussian_blur_sigma
<< std::endl
<< " canny_edge_th: " << params_config_.canny_edge_th
<< std::endl
<< " hough_accum_resolution: "
<< params_config_.hough_accum_resolution << std::endl
<< " min_circle_dist: " << params_config_.min_circle_dist
<< std::endl
<< " hough_accum_th: " << params_config_.hough_accum_th
<< std::endl
<< " min_radius: " << params_config_.min_radius << std::endl
<< " max_radius: " << params_config_.max_radius << std::endl
<< " filter_h_min: " << params_config_.filter_threshold.h_min
<< std::endl
<< " filter_h_max: " << params_config_.filter_threshold.h_max
<< std::endl
<< " filter_s_min: " << params_config_.filter_threshold.s_min
<< std::endl
<< " filter_s_max: " << params_config_.filter_threshold.s_max
<< std::endl
<< " filter_v_min: " << params_config_.filter_threshold.v_min
<< std::endl
<< " filter_v_max: " << params_config_.filter_threshold.v_max
<< std::endl
<< " use_second_filter: " << params_config_.use_second_filter
<< std::endl
<< " filter2_h_min: " << params_config_.filter2_threshold.h_min
<< std::endl
<< " filter2_h_max: " << params_config_.filter2_threshold.h_max
<< std::endl
<< " filter2_s_min: " << params_config_.filter2_threshold.s_min
<< std::endl
<< " filter2_s_max: " << params_config_.filter2_threshold.s_max
<< std::endl
<< " filter2_v_min: " << params_config_.filter2_threshold.v_min
<< std::endl
<< " filter2_v_max: " << params_config_.filter2_threshold.v_max
<< std::endl
<< " ellipse_size: " << params_config_.ellipse_size << std::endl
<< " filter_image_to_debug: " << params_config_.debug
<< std::endl
<< std::endl;
}
void BallDetector::saveConfig() {
if (has_path_ == false)
return;
YAML::Emitter yaml_out;
yaml_out << YAML::BeginMap;
yaml_out << YAML::Key << "gaussian_blur_size" << YAML::Value
<< params_config_.gaussian_blur_size;
yaml_out << YAML::Key << "gaussian_blur_sigma" << YAML::Value
<< params_config_.gaussian_blur_sigma;
yaml_out << YAML::Key << "canny_edge_th" << YAML::Value
<< params_config_.canny_edge_th;
yaml_out << YAML::Key << "hough_accum_resolution" << YAML::Value
<< params_config_.hough_accum_resolution;
yaml_out << YAML::Key << "min_circle_dist" << YAML::Value
<< params_config_.min_circle_dist;
yaml_out << YAML::Key << "hough_accum_th" << YAML::Value
<< params_config_.hough_accum_th;
yaml_out << YAML::Key << "min_radius" << YAML::Value
<< params_config_.min_radius;
yaml_out << YAML::Key << "max_radius" << YAML::Value
<< params_config_.max_radius;
yaml_out << YAML::Key << "filter_h_min" << YAML::Value
<< params_config_.filter_threshold.h_min;
yaml_out << YAML::Key << "filter_h_max" << YAML::Value
<< params_config_.filter_threshold.h_max;
yaml_out << YAML::Key << "filter_s_min" << YAML::Value
<< params_config_.filter_threshold.s_min;
yaml_out << YAML::Key << "filter_s_max" << YAML::Value
<< params_config_.filter_threshold.s_max;
yaml_out << YAML::Key << "filter_v_min" << YAML::Value
<< params_config_.filter_threshold.v_min;
yaml_out << YAML::Key << "filter_v_max" << YAML::Value
<< params_config_.filter_threshold.v_max;
yaml_out << YAML::Key << "use_second_filter" << YAML::Value
<< params_config_.use_second_filter;
yaml_out << YAML::Key << "filter2_h_min" << YAML::Value
<< params_config_.filter2_threshold.h_min;
yaml_out << YAML::Key << "filter2_h_max" << YAML::Value
<< params_config_.filter2_threshold.h_max;
yaml_out << YAML::Key << "filter2_s_min" << YAML::Value
<< params_config_.filter2_threshold.s_min;
yaml_out << YAML::Key << "filter2_s_max" << YAML::Value
<< params_config_.filter2_threshold.s_max;
yaml_out << YAML::Key << "filter2_v_min" << YAML::Value
<< params_config_.filter2_threshold.v_min;
yaml_out << YAML::Key << "filter2_v_max" << YAML::Value
<< params_config_.filter2_threshold.v_max;
yaml_out << YAML::Key << "ellipse_size" << YAML::Value
<< params_config_.ellipse_size;
yaml_out << YAML::Key << "filter_debug" << YAML::Value
<< params_config_.debug;
yaml_out << YAML::EndMap;
// output to file
std::ofstream fout(param_path_.c_str());
fout << yaml_out.c_str();
}
void BallDetector::setInputImage(const cv::Mat &inIm) {
in_image_ = inIm.clone();
if (params_config_.debug == false)
out_image_ = in_image_.clone();
}
void BallDetector::setInputImage(const cv::Mat &inIm, cv::Mat &in_filter_img) {
cv::cvtColor(inIm, in_filter_img, cv::COLOR_RGB2HSV);
if (params_config_.debug == false)
out_image_ = inIm.clone();
}
void BallDetector::getOutputImage(cv::Mat &outIm) {
this->drawOutputImage();
outIm = out_image_.clone();
}
void BallDetector::filterImage() {
if (!in_image_.data)
return;
cv::Mat img_hsv, img_filtered;
cv::cvtColor(in_image_, img_hsv, cv::COLOR_RGB2HSV);
inRangeHsv(img_hsv, params_config_.filter_threshold, img_filtered);
// mophology : open and close
mophology(img_filtered, img_filtered, params_config_.ellipse_size);
if (params_config_.use_second_filter == true) {
// mask
cv::Mat img_mask;
// check hsv range
cv::Mat img_filtered2;
inRangeHsv(img_hsv, params_config_.filter2_threshold, img_filtered2);
makeFilterMaskFromBall(img_filtered, img_mask);
cv::bitwise_and(img_filtered2, img_mask, img_filtered2);
// or
cv::bitwise_or(img_filtered, img_filtered2, img_filtered);
}
mophology(img_filtered, img_filtered, params_config_.ellipse_size);
cv::cvtColor(img_filtered, in_image_, cv::COLOR_GRAY2RGB);
}
void BallDetector::filterImage(const cv::Mat &in_filter_img,
cv::Mat &out_filter_img) {
if (!in_filter_img.data)
return;
inRangeHsv(in_filter_img, params_config_.filter_threshold, out_filter_img);
// mophology : open and close
mophology(out_filter_img, out_filter_img, params_config_.ellipse_size);
if (params_config_.use_second_filter == true) {
// mask
cv::Mat img_mask;
// check hsv range
cv::Mat img_filtered2;
inRangeHsv(in_filter_img, params_config_.filter2_threshold, img_filtered2);
makeFilterMaskFromBall(out_filter_img, img_mask);
cv::bitwise_and(img_filtered2, img_mask, img_filtered2);
// or
cv::bitwise_or(out_filter_img, img_filtered2, out_filter_img);
}
mophology(out_filter_img, out_filter_img, params_config_.ellipse_size);
// cv::cvtColor(img_filtered, in_image_, cv::COLOR_GRAY2RGB);
// draws results to output Image
if (params_config_.debug == true)
cv::cvtColor(out_filter_img, out_image_, cv::COLOR_GRAY2RGB);
// out_image_ = in_image_.clone();
}
void BallDetector::makeFilterMask(const cv::Mat &source_img, cv::Mat &mask_img,
int range) {
// source_img.
mask_img = cv::Mat::zeros(source_img.rows, source_img.cols, CV_8UC1);
int source_height = source_img.rows;
int source_width = source_img.cols;
// channel : 1
if (source_img.channels() != 1)
return;
for (int i = 0; i < source_height; i++) {
for (int j = 0; j < source_width; j++) {
uint8_t pixel = source_img.at<uint8_t>(i, j);
if (pixel == 0)
continue;
for (int mask_i = i - range; mask_i <= i + range; mask_i++) {
if (mask_i < 0 || mask_i >= source_height)
continue;
for (int mask_j = j - range; mask_j <= j + range; mask_j++) {
if (mask_j < 0 || mask_j >= source_width)
continue;
mask_img.at<uchar>(mask_i, mask_j, 0) = 255;
}
}
}
}
}
void BallDetector::makeFilterMaskFromBall(const cv::Mat &source_img,
cv::Mat &mask_img) {
// source_img.
mask_img = cv::Mat::zeros(source_img.rows, source_img.cols, CV_8UC1);
if (circles_.size() == 0)
return;
// channel : 1
if (source_img.channels() != 1)
return;
cv::Mat img_labels, stats, centroids;
int numOfLables = cv::connectedComponentsWithStats(
source_img, img_labels, stats, centroids, 8, CV_32S);
for (int j = 1; j < numOfLables; j++) {
int area = stats.at<int>(j, cv::CC_STAT_AREA);
int left = stats.at<int>(j, cv::CC_STAT_LEFT);
int top = stats.at<int>(j, cv::CC_STAT_TOP);
int width = stats.at<int>(j, cv::CC_STAT_WIDTH);
int height = stats.at<int>(j, cv::CC_STAT_HEIGHT);
int center_x = left + width * 0.5;
int center_y = top + height * 0.5;
int radius = (width + height) * 0.5;
for (int mask_i = center_y - radius; mask_i <= center_y + radius;
mask_i++) {
if (mask_i < 0 || mask_i >= source_img.rows)
continue;
int mask_offset = abs(mask_i - center_y) * 0.5;
for (int mask_j = center_x - radius + mask_offset;
mask_j <= center_x + radius - mask_offset; mask_j++) {
if (mask_j < 0 || mask_j >= source_img.cols)
continue;
mask_img.at<uchar>(mask_i, mask_j, 0) = 255;
}
}
}
}
void BallDetector::inRangeHsv(const cv::Mat &input_img,
const HsvFilter &filter_value,
cv::Mat &output_img) {
// 0-360 -> 0-180
int scaled_hue_min = static_cast<int>(filter_value.h_min * 0.5);
int scaled_hue_max = static_cast<int>(filter_value.h_max * 0.5);
if (scaled_hue_min <= scaled_hue_max) {
cv::Scalar min_value =
cv::Scalar(scaled_hue_min, filter_value.s_min, filter_value.v_min, 0);
cv::Scalar max_value =
cv::Scalar(scaled_hue_max, filter_value.s_max, filter_value.v_max, 0);
cv::inRange(input_img, min_value, max_value, output_img);
} else {
cv::Mat lower_hue_range, upper_hue_range;
cv::Scalar min_value, max_value;
min_value = cv::Scalar(0, filter_value.s_min, filter_value.v_min, 0);
max_value =
cv::Scalar(scaled_hue_max, filter_value.s_max, filter_value.v_max, 0);
cv::inRange(input_img, min_value, max_value, lower_hue_range);
min_value =
cv::Scalar(scaled_hue_min, filter_value.s_min, filter_value.v_min, 0);
max_value = cv::Scalar(179, filter_value.s_max, filter_value.v_max, 0);
cv::inRange(input_img, min_value, max_value, upper_hue_range);
cv::bitwise_or(lower_hue_range, upper_hue_range, output_img);
}
}
void BallDetector::mophology(const cv::Mat &intput_img, cv::Mat &output_img,
int ellipse_size) {
cv::erode(intput_img, output_img,
cv::getStructuringElement(cv::MORPH_ELLIPSE,
cv::Size(ellipse_size, ellipse_size)));
cv::dilate(
output_img, output_img,
cv::getStructuringElement(cv::MORPH_ELLIPSE,
cv::Size(ellipse_size * 2, ellipse_size * 2)));
cv::dilate(output_img, output_img,
cv::getStructuringElement(cv::MORPH_ELLIPSE,
cv::Size(ellipse_size, ellipse_size)));
cv::erode(output_img, output_img,
cv::getStructuringElement(cv::MORPH_ELLIPSE,
cv::Size(ellipse_size, ellipse_size)));
}
void BallDetector::houghDetection(const unsigned int imgEncoding) {
cv::Mat gray_image;
std::vector<cv::Vec3f> circles_current;
std::vector<cv::Vec3f> prev_circles = circles_;
// clear previous circles
circles_.clear();
// If input image is RGB, convert it to gray
if (imgEncoding == IMG_RGB8)
cv::cvtColor(in_image_, gray_image, CV_RGB2GRAY);
// Reduce the noise so we avoid false circle detection
cv::GaussianBlur(gray_image, gray_image,
cv::Size(params_config_.gaussian_blur_size,
params_config_.gaussian_blur_size),
params_config_.gaussian_blur_sigma);
double hough_accum_th = params_config_.hough_accum_th;
// Apply the Hough Transform to find the circles
cv::HoughCircles(gray_image, circles_current, CV_HOUGH_GRADIENT,
params_config_.hough_accum_resolution,
params_config_.min_circle_dist, params_config_.canny_edge_th,
hough_accum_th, params_config_.min_radius,
params_config_.max_radius);
if (circles_current.size() == 0)
not_found_count_ += 1;
else
not_found_count_ = 0;
double alpha = 0.2;
for (int ix = 0; ix < circles_current.size(); ix++) {
cv::Point2d center =
cv::Point(circles_current[ix][0], circles_current[ix][1]);
double radius = circles_current[ix][2];
for (int prev_ix = 0; prev_ix < prev_circles.size(); prev_ix++) {
cv::Point2d prev_center =
cv::Point(prev_circles[prev_ix][0], prev_circles[prev_ix][1]);
double prev_radius = prev_circles[prev_ix][2];
cv::Point2d diff = center - prev_center;
double radius_th = std::max(radius, prev_radius) * 0.75;
if (sqrt(diff.dot(diff)) < radius_th) {
if (abs(radius - prev_radius) < radius_th) {
circles_current[ix] =
circles_current[ix] * alpha + prev_circles[prev_ix] * (1 - alpha);
}
break;
}
}
circles_.push_back(circles_current[ix]);
}
}
void BallDetector::houghDetection2(const cv::Mat &input_hough) {
// cv::Mat gray_image;
std::vector<cv::Vec3f> circles_current;
std::vector<cv::Vec3f> prev_circles = circles_;
// clear previous circles
circles_.clear();
// If input image is RGB, convert it to gray
// if (imgEncoding == IMG_RGB8)
// cv::cvtColor(input_hough, gray_image, CV_RGB2GRAY);
// Reduce the noise so we avoid false circle detection
cv::GaussianBlur(input_hough, input_hough,
cv::Size(params_config_.gaussian_blur_size,
params_config_.gaussian_blur_size),
params_config_.gaussian_blur_sigma);
double hough_accum_th = params_config_.hough_accum_th;
// Apply the Hough Transform to find the circles
cv::HoughCircles(input_hough, circles_current, CV_HOUGH_GRADIENT,
params_config_.hough_accum_resolution,
params_config_.min_circle_dist, params_config_.canny_edge_th,
hough_accum_th, params_config_.min_radius,
params_config_.max_radius);
if (circles_current.size() == 0)
not_found_count_ += 1;
else
not_found_count_ = 0;
double alpha = 0.2;
for (int ix = 0; ix < circles_current.size(); ix++) {
cv::Point2d center =
cv::Point(circles_current[ix][0], circles_current[ix][1]);
double radius = circles_current[ix][2];
for (int prev_ix = 0; prev_ix < prev_circles.size(); prev_ix++) {
cv::Point2d prev_center =
cv::Point(prev_circles[prev_ix][0], prev_circles[prev_ix][1]);
double prev_radius = prev_circles[prev_ix][2];
cv::Point2d diff = center - prev_center;
double radius_th = std::max(radius, prev_radius) * 0.75;
if (sqrt(diff.dot(diff)) < radius_th) {
if (abs(radius - prev_radius) < radius_th) {
circles_current[ix] =
circles_current[ix] * alpha + prev_circles[prev_ix] * (1 - alpha);
}
break;
}
}
circles_.push_back(circles_current[ix]);
}
}
void BallDetector::drawOutputImage() {
cv::Point center_position;
int radius = 0;
size_t ii;
// draws results to output Image
// if (params_config_.debug == true)
// out_image_ = in_image_.clone();
for (ii = 0; ii < circles_.size(); ii++) {
{
int this_radius = cvRound(circles_[ii][2]);
if (this_radius > radius) {
radius = this_radius;
center_position =
cv::Point(cvRound(circles_[ii][0]), cvRound(circles_[ii][1]));
}
}
}
cv::circle(out_image_, center_position, 5, cv::Scalar(0, 0, 255), -1, 8,
0); // circle center in blue
cv::circle(out_image_, center_position, radius, cv::Scalar(0, 0, 255), 3, 8,
0); // circle outline in blue
}
} // namespace humanoid_robot_intelligence_control_system_op

View File

@ -1,50 +0,0 @@
/*******************************************************************************
* Copyright 2017 ROBOTIS CO., LTD.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
/* Author: Kayman Jung */
#include "humanoid_robot_intelligence_control_system_ball_detector/ball_detector.h"
// node main
int main(int argc, char **argv) {
// init ros
ros::init(argc, argv, "ball_detector_node");
// create ros wrapper object
humanoid_robot_intelligence_control_system_op::BallDetector detector;
// set node loop rate
ros::Rate loop_rate(30);
// node loop
while (ros::ok()) {
// if new image , do things
if (detector.newImage()) {
detector.process();
detector.publishImage();
detector.publishCircles();
}
// execute pending callbacks
ros::spinOnce();
// relax to fit output rate
loop_rate.sleep();
}
// exit program
return 0;
}

View File

@ -1,3 +0,0 @@
---
BallDetectorParams returns

View File

@ -1,3 +0,0 @@
BallDetectorParams params
---
BallDetectorParams returns

View File

@ -0,0 +1,76 @@
# Copyright (C) 2024 Bellande Robotics Sensors Research Innovation Center, Ronaldson Bellande
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import os
import sys
import subprocess
from launch import LaunchDescription
from launch_ros.actions import Node
from launch.actions import IncludeLaunchDescription
from launch.launch_description_sources import PythonLaunchDescriptionSource
def ros1_launch_description():
# Get command-line arguments
args = sys.argv[1:]
# Construct the ROS 1 launch commandi
roslaunch_command = ["roslaunch", "humanoid_robot_intelligence_control_system_bringup", "humanoid_robot_intelligence_control_system_bringup.launch"] + args
# Execute the launch command
subprocess.call(roslaunch_command)
def ros2_launch_description():
# Create a list to hold all nodes to be launched
nodes_to_launch = []
# Add the HUMANOID_ROBOT Manager launch file
nodes_to_launch.append(IncludeLaunchDescription(
PythonLaunchDescriptionSource([
'$(find humanoid_robot_intelligence_control_system_manager)/launch/',
'humanoid_robot_intelligence_control_system_manager.launch.py'
])
))
# Add the UVC camera node
nodes_to_launch.append(Node(
package='usb_cam',
executable='usb_cam_node',
name='usb_cam_node',
output='screen',
parameters=[{
'video_device': '/dev/video0',
'image_width': 1280,
'image_height': 720,
'framerate': 30,
'camera_frame_id': 'cam_link',
'camera_name': 'camera'
}]
))
# Return the LaunchDescription containing all nodes
return LaunchDescription(nodes_to_launch)
if __name__ == "__main__":
ros_version = os.getenv("ROS_VERSION")
if ros_version == "1":
ros1_launch_description()
elif ros_version == "2":
ros2_launch_description()
else:
print("Unsupported ROS version. Please set the ROS_VERSION environment variable to '1' for ROS 1 or '2' for ROS 2.")
sys.exit(1)

View File

@ -0,0 +1,85 @@
# Copyright (C) 2024 Bellande Robotics Sensors Research Innovation Center, Ronaldson Bellande
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import os
import sys
import subprocess
from launch import LaunchDescription
from launch_ros.actions import Node
from launch.actions import IncludeLaunchDescription, ExecuteProcess
from launch.launch_description_sources import PythonLaunchDescriptionSource
from launch.substitutions import FindExecutable, Command
def ros1_launch_description():
# Get command-line arguments
args = sys.argv[1:]
# Construct the ROS 1 launch command
roslaunch_command = ["roslaunch", "humanoid_robot_intelligence_control_system_bringup", "humanoid_robot_intelligence_control_system_bringup_visualization.launch"] + args
# Execute the launch command
subprocess.call(roslaunch_command)
def ros2_launch_description():
# Create a list to hold all nodes to be launched
nodes_to_launch = []
# Add robot description
nodes_to_launch.append(ExecuteProcess(
cmd=[FindExecutable(name='xacro'), '$(find humanoid_robot_intelligence_control_system_description)/urdf/humanoid_robot_intelligence_control_system.urdf.xacro'],
output='screen'
))
# Add joint state publisher
nodes_to_launch.append(Node(
package='joint_state_publisher',
executable='joint_state_publisher',
name='joint_state_publisher',
parameters=[{'use_gui': True}],
remappings=[('/robot/joint_states', '/humanoid_robot_intelligence_control_system/present_joint_states')]
))
# Add robot state publisher
nodes_to_launch.append(Node(
package='robot_state_publisher',
executable='robot_state_publisher',
name='robot_state_publisher',
remappings=[('/joint_states', '/humanoid_robot_intelligence_control_system/present_joint_states')]
))
# Add rviz
nodes_to_launch.append(Node(
package='rviz2',
executable='rviz2',
name='rviz2',
arguments=['-d', '$(find humanoid_robot_intelligence_control_system_bringup)/rviz/humanoid_robot_intelligence_control_system_bringup.rviz']
))
# Return the LaunchDescription containing all nodes
return LaunchDescription(nodes_to_launch)
if __name__ == "__main__":
ros_version = os.getenv("ROS_VERSION")
if ros_version == "1":
ros1_launch_description()
elif ros_version == "2":
ros2_launch_description()
else:
print("Unsupported ROS version. Please set the ROS_VERSION environment variable to '1' for ROS 1 or '2' for ROS 2.")
sys.exit(1)

View File

@ -0,0 +1,79 @@
# Copyright (C) 2024 Bellande Robotics Sensors Research Innovation Center, Ronaldson Bellande
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
cmake_minimum_required(VERSION 3.0.2)
project(humanoid_robot_intelligence_control_system_object_detector)
if($ENV{ROS_VERSION} EQUAL 1)
find_package(catkin REQUIRED COMPONENTS
rospy
std_msgs
sensor_msgs
geometry_msgs
cv_bridge
message_generation
)
generate_messages(
DEPENDENCIES
std_msgs
geometry_msgs
)
catkin_package(
CATKIN_DEPENDS
rospy
std_msgs
sensor_msgs
geometry_msgs
cv_bridge
message_runtime
)
else()
find_package(ament_cmake REQUIRED)
find_package(ament_cmake_python REQUIRED)
find_package(rclpy REQUIRED)
find_package(std_msgs REQUIRED)
find_package(sensor_msgs REQUIRED)
find_package(geometry_msgs REQUIRED)
find_package(cv_bridge REQUIRED)
endif()
if($ENV{ROS_VERSION} EQUAL 1)
catkin_python_setup()
catkin_install_python(PROGRAMS
scripts/object_detection_processor.py
DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
)
install(DIRECTORY config launch rviz
DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
)
else()
ament_python_install_package(${PROJECT_NAME})
install(PROGRAMS
scripts/object_detection_processor.py
DESTINATION lib/${PROJECT_NAME}
)
install(DIRECTORY config launch rviz
DESTINATION share/${PROJECT_NAME}
)
ament_package()
endif()

View File

@ -0,0 +1,108 @@
# Copyright (C) 2024 Bellande Robotics Sensors Research Innovation Center, Ronaldson Bellande
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import os
import sys
import subprocess
from launch import LaunchDescription
from launch_ros.actions import Node
from launch.actions import DeclareLaunchArgument
from launch.substitutions import LaunchConfiguration
def ros1_launch_description():
# Get command-line arguments
args = sys.argv[1:]
# Construct the ROS 1 launch command
roslaunch_command = ["roslaunch", "humanoid_robot_intelligence_control_system_object_detector", "object_detector_processor.launch"] + args
roslaunch_command.extend([
"usb_cam", "usb_cam_node", "name:=camera",
"video_device:=/dev/video0",
"image_width:=640",
"image_height:=480",
"pixel_format:=yuyv",
"camera_frame_id:=usb_cam",
"io_method:=mmap"
])
roslaunch_command.extend([
"ros_web_api_bellande_2d_computer_vision", "bellande_2d_computer_vision_object_detection.py", "name:=object_detection_node"
])
roslaunch_command.extend([
"humanoid_robot_intelligence_control_system_ball_detector", "object_detection_processor.py", "name:=object_detection_processor_node"
])
roslaunch_command.extend([
"rviz", "rviz", "name:=rviz",
"args:=-d $(find ros_web_api_bellande_2d_computer_vision)/rviz/visualization.rviz"
])
# Execute the launch command
subprocess.call(roslaunch_command)
def ros2_launch_description():
nodes_to_launch = []
nodes_to_launch.append(Node(
package='usb_cam',
executable='usb_cam_node',
name='camera',
output='screen',
parameters=[{
'video_device': '/dev/video0',
'image_width': 640,
'image_height': 480,
'pixel_format': 'yuyv',
'camera_frame_id': 'usb_cam',
'io_method': 'mmap'
}]
))
nodes_to_launch.append(Node(
package='ros_web_api_bellande_2d_computer_vision',
executable='bellande_2d_computer_vision_object_detection.py',
name='object_detection_node',
output='screen',
remappings=[('camera/image_raw', '/usb_cam/image_raw')]
))
nodes_to_launch.append(Node(
package='humanoid_robot_intelligence_control_system_object_detector',
executable='object_detection_processor.py',
name='object_detection_processor_node',
output='screen',
parameters=[{'yaml_path': '$(find ros_web_api_bellande_2d_computer_vision)/yaml/object_detection_params.yaml'}]
))
nodes_to_launch.append(Node(
package='rviz2',
executable='rviz2',
name='rviz',
arguments=['-d', '$(find ros_web_api_bellande_2d_computer_vision)/rviz/visualization.rviz']
))
return LaunchDescription(nodes_to_launch)
if __name__ == "__main__":
ros_version = os.getenv("ROS_VERSION")
if ros_version == "1":
ros1_launch_description()
elif ros_version == "2":
ros2_launch_description()
else:
print("Unsupported ROS version. Please set the ROS_VERSION environment variable to '1' for ROS 1 or '2' for ROS 2.")
sys.exit(1)

View File

@ -0,0 +1,41 @@
<?xml version="1.0"?>
<!--
Copyright (C) 2024 Bellande Robotics Sensors Research Innovation Center, Ronaldson Bellande
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
-->
<launch>
<!-- Launch USB camera -->
<node name="usb_cam" pkg="usb_cam" type="usb_cam_node" output="screen">
<param name="video_device" value="/dev/video0" />
<param name="image_width" value="640" />
<param name="image_height" value="480" />
<param name="pixel_format" value="yuyv" />
<param name="camera_frame_id" value="usb_cam" />
<param name="io_method" value="mmap"/>
</node>
<!-- Launch object detection node -->
<node name="object_detection_node" pkg="ros_web_api_bellande_2d_computer_vision" type="bellande_2d_computer_vision_object_detection.py" output="screen">
<remap from="camera/image_raw" to="/usb_cam/image_raw"/>
</node>
<!-- Launch object detection processor node -->
<node name="object_detection_processor_node" pkg="humanoid_robot_intelligence_control_system_ball_detector" type="object_detection_processor.py" output="screen">
<param name="yaml_path" value="$(find ros_web_api_bellande_2d_computer_vision)/yaml/object_detection_params.yaml"/>
</node>
<!-- Launch RViz -->
<node name="rviz" pkg="rviz" type="rviz" args="-d $(find ros_web_api_bellande_2d_computer_vision)/rviz/visualization.rviz" />
</launch>

View File

@ -16,12 +16,10 @@ the License.
-->
<package format="3">
<name>humanoid_robot_intelligence_control_system_ball_detector</name>
<version>0.3.2</version>
<name>humanoid_robot_intelligence_control_system_object_detector</name>
<version>0.0.1</version>
<description>
This package implements a circle-like shape detector of the input image
It requires and input image and publish, at frame rate, a marked image
and a stamped array of circle centers and radius.
This Package is for Object Detection, detecting objects like tools, or utilities
</description>
<license>Apache 2.0</license>
<maintainer email="ronaldsonbellande@gmail.com">Ronaldson Bellande</maintainer>

View File

@ -0,0 +1,111 @@
#!/usr/bin/env python3
# Copyright (C) 2024 Bellande Robotics Sensors Research Innovation Center, Ronaldson Bellande
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import rospy
import cv2
import numpy as np
from sensor_msgs.msg import Image, CameraInfo
from std_msgs.msg import Bool, String
from cv_bridge import CvBridge
import yaml
class ObjectDetectionProcessor:
def __init__(self):
rospy.init_node('object_detection_processor')
self.bridge = CvBridge()
self.enable = True
self.new_image_flag = False
self.load_params()
self.setup_ros()
def load_params(self):
param_path = rospy.get_param('~yaml_path', '')
if param_path:
with open(param_path, 'r') as file:
self.params = yaml.safe_load(file)
else:
self.set_default_params()
def set_default_params(self):
self.params = {
'debug': False,
'ellipse_size': 2,
# Add other default parameters as needed
}
def setup_ros(self):
self.image_pub = rospy.Publisher('image_out', Image, queue_size=10)
self.camera_info_pub = rospy.Publisher('camera_info', CameraInfo, queue_size=10)
rospy.Subscriber('enable', Bool, self.enable_callback)
rospy.Subscriber('image_in', Image, self.image_callback)
rospy.Subscriber('cameraInfo_in', CameraInfo, self.camera_info_callback)
rospy.Subscriber('object_detection_result', String, self.object_detection_callback)
def enable_callback(self, msg):
self.enable = msg.data
def image_callback(self, msg):
if not self.enable:
return
self.cv_image = self.bridge.imgmsg_to_cv2(msg, "bgr8")
self.new_image_flag = True
self.image_header = msg.header
def camera_info_callback(self, msg):
if not self.enable:
return
self.camera_info_msg = msg
def object_detection_callback(self, msg):
if not self.enable or not hasattr(self, 'cv_image'):
return
objects = eval(msg.data) # Assuming the data is a string representation of a list
self.process_detected_objects(objects)
def process_detected_objects(self, objects):
output_image = self.cv_image.copy()
for obj in objects:
x, y, w, h = obj['bbox']
cv2.rectangle(output_image, (int(x), int(y)), (int(x+w), int(y+h)), (0, 255, 0), 2)
cv2.putText(output_image, f"{obj['label']}: {obj['confidence']:.2f}",
(int(x), int(y-10)), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (0, 255, 0), 2)
self.publish_image(output_image)
def publish_image(self, image):
img_msg = self.bridge.cv2_to_imgmsg(image, encoding="bgr8")
img_msg.header = self.image_header
self.image_pub.publish(img_msg)
if hasattr(self, 'camera_info_msg'):
self.camera_info_pub.publish(self.camera_info_msg)
def run(self):
rate = rospy.Rate(30) # 30 Hz
while not rospy.is_shutdown():
if self.new_image_flag:
# The processing is done in object_detection_callback
self.new_image_flag = False
rate.sleep()
if __name__ == '__main__':
try:
processor = ObjectDetectionProcessor()
processor.run()
except rospy.ROSInterruptException:
pass

View File

@ -1,24 +0,0 @@
<?xml version="1.0"?>
<launch>
<param name="gazebo" value="false" type="bool" />
<param name="gazebo_robot_name" value="humanoid_robot_intelligence_control_system" />
<param name="offset_file_path" value="$(find humanoid_robot_intelligence_control_system_manager)/config/offset.yaml" />
<param name="robot_file_path" value="$(find humanoid_robot_intelligence_control_system_manager)/config/HUMANOID_ROBOT.robot" />
<param name="init_file_path" value="$(find humanoid_robot_intelligence_control_system_manager)/config/dxl_init_HUMANOID_ROBOT.yaml" />
<param name="device_name" value="/dev/ttyUSB0" />
<param name="/humanoid_robot_intelligence_control_system/direct_control/default_moving_time" value="0.04" />
<param name="/humanoid_robot_intelligence_control_system/direct_control/default_moving_angle" value="90" />
<!-- HUMANOID_ROBOT Manager -->
<node pkg="humanoid_robot_intelligence_control_system_manager" type="humanoid_robot_intelligence_control_system_manager" name="humanoid_robot_intelligence_control_system_manager" output="screen">
<param name="angle_unit" value="30" />
</node>
<!-- HUMANOID_ROBOT Localization -->
<node pkg="humanoid_robot_intelligence_control_system_localization" type="humanoid_robot_intelligence_control_system_localization" name="humanoid_robot_intelligence_control_system_localization" output="screen" />
<!-- HUMANOID_ROBOT Read-Write demo -->
<node pkg="humanoid_robot_intelligence_control_system_read_write_demo" type="read_write" name="humanoid_robot_intelligence_control_system_read_write" output="screen" />
</launch>

View File

@ -0,0 +1,121 @@
# Copyright (C) 2024 Bellande Robotics Sensors Research Innovation Center, Ronaldson Bellande
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import os
import sys
import subprocess
from launch import LaunchDescription
from launch_ros.actions import Node
from launch.actions import DeclareLaunchArgument
from launch.substitutions import LaunchConfiguration
def ros1_launch_description():
# Get command-line arguments
args = sys.argv[1:]
# Construct the ROS 1 launch command
roslaunch_command = ["roslaunch", "humanoid_robot_intelligence_control_system_read_write_demo", "humanoid_robot_intelligence_control_system_read_write.launch"] + args
# Add parameters
roslaunch_command.extend([
"gazebo:=false",
"gazebo_robot_name:=humanoid_robot_intelligence_control_system",
"offset_file_path:=$(find humanoid_robot_intelligence_control_system_manager)/config/offset.yaml",
"robot_file_path:=$(find humanoid_robot_intelligence_control_system_manager)/config/HUMANOID_ROBOT.robot",
"init_file_path:=$(find humanoid_robot_intelligence_control_system_manager)/config/dxl_init_HUMANOID_ROBOT.yaml",
"device_name:=/dev/ttyUSB0",
"/humanoid_robot_intelligence_control_system/direct_control/default_moving_time:=0.04",
"/humanoid_robot_intelligence_control_system/direct_control/default_moving_angle:=90"
])
# Add nodes
roslaunch_command.extend([
"humanoid_robot_intelligence_control_system_manager", "humanoid_robot_intelligence_control_system_manager", "angle_unit:=30",
"humanoid_robot_intelligence_control_system_localization", "humanoid_robot_intelligence_control_system_localization",
"humanoid_robot_intelligence_control_system_read_write_demo", "read_write"
])
# Execute the launch command
subprocess.call(roslaunch_command)
def ros2_launch_description():
# Declare launch arguments
gazebo_arg = DeclareLaunchArgument('gazebo', default_value='false')
gazebo_robot_name_arg = DeclareLaunchArgument('gazebo_robot_name', default_value='humanoid_robot_intelligence_control_system')
offset_file_path_arg = DeclareLaunchArgument('offset_file_path', default_value='$(find humanoid_robot_intelligence_control_system_manager)/config/offset.yaml')
robot_file_path_arg = DeclareLaunchArgument('robot_file_path', default_value='$(find humanoid_robot_intelligence_control_system_manager)/config/HUMANOID_ROBOT.robot')
init_file_path_arg = DeclareLaunchArgument('init_file_path', default_value='$(find humanoid_robot_intelligence_control_system_manager)/config/dxl_init_HUMANOID_ROBOT.yaml')
device_name_arg = DeclareLaunchArgument('device_name', default_value='/dev/ttyUSB0')
# Create a list to hold all nodes to be launched
nodes_to_launch = []
# Add HUMANOID_ROBOT Manager node
nodes_to_launch.append(Node(
package='humanoid_robot_intelligence_control_system_manager',
executable='humanoid_robot_intelligence_control_system_manager',
name='humanoid_robot_intelligence_control_system_manager',
output='screen',
parameters=[
{'angle_unit': 30},
{'gazebo': LaunchConfiguration('gazebo')},
{'gazebo_robot_name': LaunchConfiguration('gazebo_robot_name')},
{'offset_file_path': LaunchConfiguration('offset_file_path')},
{'robot_file_path': LaunchConfiguration('robot_file_path')},
{'init_file_path': LaunchConfiguration('init_file_path')},
{'device_name': LaunchConfiguration('device_name')},
{'/humanoid_robot_intelligence_control_system/direct_control/default_moving_time': 0.04},
{'/humanoid_robot_intelligence_control_system/direct_control/default_moving_angle': 90}
]
))
# Add HUMANOID_ROBOT Localization node
nodes_to_launch.append(Node(
package='humanoid_robot_intelligence_control_system_localization',
executable='humanoid_robot_intelligence_control_system_localization',
name='humanoid_robot_intelligence_control_system_localization',
output='screen'
))
# Add HUMANOID_ROBOT Read-Write demo node
nodes_to_launch.append(Node(
package='humanoid_robot_intelligence_control_system_read_write_demo',
executable='read_write',
name='humanoid_robot_intelligence_control_system_read_write',
output='screen'
))
# Return the LaunchDescription containing all nodes and arguments
return LaunchDescription([
gazebo_arg,
gazebo_robot_name_arg,
offset_file_path_arg,
robot_file_path_arg,
init_file_path_arg,
device_name_arg
] + nodes_to_launch)
if __name__ == "__main__":
ros_version = os.getenv("ROS_VERSION")
if ros_version == "1":
ros1_launch_description()
elif ros_version == "2":
ros2_launch_description()
else:
print("Unsupported ROS version. Please set the ROS_VERSION environment variable to '1' for ROS 1 or '2' for ROS 2.")
sys.exit(1)

View File

@ -0,0 +1,34 @@
<?xml version="1.0"?>
<launch>
<param name="gazebo" value="false" type="bool" />
<param name="gazebo_robot_name" value="humanoid_robot_intelligence_control_system" />
<param name="offset_file_path"
value="$(find humanoid_robot_intelligence_control_system_manager)/config/offset.yaml" />
<param name="robot_file_path"
value="$(find humanoid_robot_intelligence_control_system_manager)/config/HUMANOID_ROBOT.robot" />
<param name="init_file_path"
value="$(find humanoid_robot_intelligence_control_system_manager)/config/dxl_init_HUMANOID_ROBOT.yaml" />
<param name="device_name" value="/dev/ttyUSB0" />
<param name="/humanoid_robot_intelligence_control_system/direct_control/default_moving_time"
value="0.04" />
<param name="/humanoid_robot_intelligence_control_system/direct_control/default_moving_angle"
value="90" />
<!-- HUMANOID_ROBOT Manager -->
<node pkg="humanoid_robot_intelligence_control_system_manager"
type="humanoid_robot_intelligence_control_system_manager"
name="humanoid_robot_intelligence_control_system_manager" output="screen">
<param name="angle_unit" value="30" />
</node>
<!-- HUMANOID_ROBOT Localization -->
<node pkg="humanoid_robot_intelligence_control_system_localization"
type="humanoid_robot_intelligence_control_system_localization"
name="humanoid_robot_intelligence_control_system_localization" output="screen" />
<!-- HUMANOID_ROBOT Read-Write demo -->
<node pkg="humanoid_robot_intelligence_control_system_read_write_demo" type="read_write"
name="humanoid_robot_intelligence_control_system_read_write" output="screen" />
</launch>

View File

@ -1,3 +1,18 @@
# Copyright (C) 2024 Bellande Robotics Sensors Research Innovation Center, Ronaldson Bellande
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
from distutils.core import setup
from catkin_pkg.python_setup import generate_distutils_setup