Skip to content

Commit

Permalink
cleanup and slight fixes for ros node and ecto cell
Browse files Browse the repository at this point in the history
  • Loading branch information
tcavallari committed Dec 17, 2012
1 parent 9c7c314 commit 6aaa95c
Show file tree
Hide file tree
Showing 10 changed files with 111 additions and 101 deletions.
19 changes: 10 additions & 9 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -18,21 +18,22 @@ if (catkin_FOUND)
set(WITH_CATKIN ON)
set(BUILD_EXECUTABLE OFF)
project(object_recognition_by_parts)
if (${catkin_VERSION} VERSION_GREATER "0.5.28")
catkin_package(INCLUDE_DIRS include)
else()
catkin_stack()
catkin_project(${PROJECT_NAME}
INCLUDE_DIRS include
)
endif()
# install targets for all things python
if (WITH_ECTO)
catkin_python_setup()
find_package(ecto REQUIRED)
find_package(ecto_pcl REQUIRED)
find_package(object_recognition_core REQUIRED)
endif()

if (${catkin_VERSION} VERSION_GREATER "0.5.28")
catkin_package(INCLUDE_DIRS include)
else()
catkin_stack()
catkin_project(${PROJECT_NAME}
INCLUDE_DIRS include
)
endif()
# deal with cvmatio
find_package(cvmatio QUIET)
include_directories(SYSTEM ${cvmatio_INCLUDE_DIRS})
Expand Down Expand Up @@ -124,7 +125,7 @@ if(WITH_ECTO AND BUILD_TEST)
endif()

# add ecto cells
if (WITH_ECTO AND ${ecto_FOUND} AND ${object_recognition_core_FOUND})
if (WITH_ECTO AND ${ecto_FOUND} AND ${object_recognition_core_FOUND})
add_subdirectory(cells)
add_subdirectory(python)
endif()
Expand Down
13 changes: 7 additions & 6 deletions cells/detect.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
#include <boost/scoped_ptr.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <Eigen/Core>

#include <object_recognition_core/common/pose_result.h>
#include <object_recognition_core/db/ModelReader.h>
Expand Down Expand Up @@ -84,7 +85,6 @@ struct PartsBasedDetectorCell: public object_recognition_core::db::bases::ModelR
spore<PointCloud::ConstPtr> input_cloud_;
spore<cv::Mat> color_, depth_, camera_intrinsics_, output_;
spore<std::vector<PoseResult> > pose_results_;
spore<std::vector<std::vector<PointCloud, Eigen::aligned_allocator<PointCloud> > > > object_clusters_;

// the detector classes
boost::scoped_ptr<Visualize> visualizer_;
Expand Down Expand Up @@ -150,9 +150,6 @@ struct PartsBasedDetectorCell: public object_recognition_core::db::bases::ModelR
"The results of object recognition");
outputs.declare(&PartsBasedDetectorCell::output_, "image",
"The results of object recognition");
outputs.declare(&PartsBasedDetectorCell::object_clusters_,
"point3d_clusters",
"A vector containing a PointCloud for each recognized object");
}

/*! @brief configure the detector state
Expand Down Expand Up @@ -217,7 +214,6 @@ struct PartsBasedDetectorCell: public object_recognition_core::db::bases::ModelR
std::cout << "detector: process" << std::endl;

pose_results_->clear();
object_clusters_->clear();

image_pipeline::PinholeCameraModel camera_model;
camera_model.setParams(color_->size(), *camera_intrinsics_, cv::Mat(),
Expand Down Expand Up @@ -328,11 +324,16 @@ struct PartsBasedDetectorCell: public object_recognition_core::db::bases::ModelR
}

// Only one point of view for this object...
sensor_msgs::PointCloud2Ptr cluster_cloud (new sensor_msgs::PointCloud2());
std::vector<sensor_msgs::PointCloud2ConstPtr> ros_clouds (1);
pcl::toROSMsg(clusters[object_it], *(cluster_cloud));
ros_clouds[0] = cluster_cloud;
result.set_clouds(ros_clouds);

std::vector<PointCloud, Eigen::aligned_allocator<PointCloud> > object_cluster (1);
object_cluster[0] = clusters[object_it];

pose_results_->push_back(result);
object_clusters_->push_back(object_cluster);
}

return ecto::OK;
Expand Down
36 changes: 24 additions & 12 deletions include/PointCloudClusterer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -163,23 +163,33 @@ void PointCloudClusterer<PointType>::clusterObjects(

// expand a bit the box
Rect3d bbox = bounding_boxes[i];
bbox -= cv::Point3d(bbox.width * 0.1, bbox.height * 0.1,
bbox.depth * 0.1);
bbox.width *= 1.2;
bbox.height *= 1.2;
bbox.depth *= 1.2;

crop_box.setMin(
Eigen::Vector4f(bbox.tl().x, bbox.tl().y, bbox.tl().z, 0));
crop_box.setMax(
Eigen::Vector4f(bbox.br().x, bbox.br().y, bbox.br().z, 0));

crop_box.filter(indices->indices);

if(bbox.volume() >= 1E-6) // 1 cubic centimeter
{
bbox -= cv::Point3d(bbox.width * 0.1, bbox.height * 0.1,
bbox.depth * 0.1);
bbox.width *= 1.2;
bbox.height *= 1.2;
bbox.depth *= 1.2;

crop_box.setMin(
Eigen::Vector4f(bbox.tl().x, bbox.tl().y, bbox.tl().z, 0));
crop_box.setMax(
Eigen::Vector4f(bbox.br().x, bbox.br().y, bbox.br().z, 0));

crop_box.filter(indices->indices);
// std::cout << "Cropped obj " << i << ", remaining indices: " << indices->indices.size() << std::endl;
}
else
{
// std::cout << "Bounding box for obj " << i << " empty." << std::endl;
}
// add the points belonging to the bbox
boxed_indices[i] = indices;
}

std::cout << "Cropping ok..." << std::endl;

// Extract different clusters to remove parts of other objects appearing into the bounding boxes
pcl::EuclideanClusterExtraction<PointType> boxes_clusterer;
boxes_clusterer.setInputCloud(cloud);
Expand All @@ -198,9 +208,11 @@ void PointCloudClusterer<PointType>::clusterObjects(
continue;
}

std::cout << "Start clustering " << i << std::endl;
// keep the biggest one (should be the object)
boxes_clusterer.setIndices(boxed_indices[i]);
boxes_clusterer.extract(clusters);
// std::cout << "Clusterer extraction for object " << i << " succeeded with " << clusters.size() << " results" << std::endl;

// ROS_INFO("Extracted %zu clusters", clusters.size());

Expand Down
1 change: 0 additions & 1 deletion python/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,3 @@ install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/object_recognition_by_parts
COMPONENT main
FILES_MATCHING PATTERN "*.py"
)

1 change: 0 additions & 1 deletion python/object_recognition_by_parts/detector.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@ def declare_io(self, _p, i, o):

o.forward("pose_results", cell_name="detector")
o.forward("image", cell_name="detector")
o.forward("point3d_clusters", cell_name="detector")
o.forward("cloud_out", cell_name="mat_to_cloud", cell_key="point_cloud")

def configure(self, p, _i, _o):
Expand Down
Binary file removed python/object_recognition_by_parts/detector.pyc
Binary file not shown.
Binary file removed python/object_recognition_by_parts/publisher.pyc
Binary file not shown.
64 changes: 60 additions & 4 deletions ros/Messages.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -36,13 +36,11 @@
* Created: Sep 10, 2012
*/

#ifndef BOUNDINGBOX_HPP_
#define BOUNDINGBOX_HPP_

#include <boost/functional/hash.hpp>
#include <opencv2/core/core.hpp>
#include <ros/ros.h>
#include <visualization_msgs/MarkerArray.h>
#include <pcl/common/centroid.h>
#include "Candidate.hpp"
#include "types.hpp"
#include "Visualize.hpp"
Expand Down Expand Up @@ -172,5 +170,63 @@ void PartsBasedDetectorNode::messageMask(const vectorCandidate& candidates, Mat&
mask_pub_.publish(msg_out);
}

void PartsBasedDetectorNode::messageClusters(const std::vector<PointCloud>& clusters)
{
PointCloud all_clusters = clusters[0]; //there is at least one candidate
for(size_t i = 1; i < clusters.size(); ++i)
{
all_clusters += clusters[i];
}

cloud_pub_.publish(all_clusters);
}

void PartsBasedDetectorNode::messagePoses(const std_msgs::Header& header, const std::vector<PointCloud>& parts_centers)
{
// Pose from part centers
PoseArray poses;
poses.header = header;
poses.poses.clear();

#endif /* BOUNDINGBOX_HPP_ */
// for each object
for (int object_it = 0; object_it < parts_centers.size(); ++object_it)
{
const PointCloud& part_centers_cloud = parts_centers[object_it];

//compute centroid
Eigen::Vector4f centroid;
Eigen::Matrix3f covMat;

int point_count = 0;
if ((point_count = computeMeanAndCovarianceMatrix(
part_centers_cloud, covMat, centroid)) == 0)
{
ROS_WARN("Centroid not found...");
continue;
}

// normalize matrix
covMat /= point_count;

//eigen33 -> RF
Eigen::Matrix3f evecs;
Eigen::Vector3f evals;
pcl::eigen33(covMat, evecs, evals);
Eigen::Quaternion<float> quat(evecs);
quat.normalize();

Pose pose;
pose.position.x = centroid.x();
pose.position.y = centroid.y();
pose.position.z = centroid.z();

pose.orientation.w = quat.w();
pose.orientation.x = quat.x();
pose.orientation.y = quat.y();
pose.orientation.z = quat.z();

poses.poses.push_back(pose);
}

object_pose_pub_.publish(poses);
}
72 changes: 8 additions & 64 deletions ros/Node.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@
*
*/
#include <cstdio>
#include <pcl/common/centroid.h>
#include "Node.hpp"
#include "PointCloudClusterer.h"

Expand Down Expand Up @@ -79,7 +78,7 @@ bool PartsBasedDetectorNode::init(void)
bool ok = model.deserialize(modelfile);
if (!ok)
{
fprintf(stderr, "Error deserializing file\n");
ROS_ERROR("Error deserializing file\n");
return false;
}
pbd_.distributeModel(model);
Expand All @@ -93,7 +92,7 @@ bool PartsBasedDetectorNode::init(void)
bool ok = model.deserialize(modelfile);
if (!ok)
{
fprintf(stderr, "Error deserializing file\n");
ROS_ERROR("Error deserializing file\n");
return false;
}
pbd_.distributeModel(model);
Expand All @@ -102,7 +101,7 @@ bool PartsBasedDetectorNode::init(void)
#endif
else
{
fprintf(stderr, "Unsupported model format: %s\n", ext.c_str());
ROS_ERROR("Unsupported model format: %s\n", ext.c_str());
return false;
}

Expand All @@ -126,6 +125,7 @@ bool PartsBasedDetectorNode::init(void)
object_pose_pub_ = nh_.advertise<PoseArray>(ns_ + name_ + "/object_poses",
1);

ROS_INFO("Initialization successful");
// if we got here, everything is okay
return true;
}
Expand All @@ -140,7 +140,6 @@ void PartsBasedDetectorNode::depthCameraCallback(
void PartsBasedDetectorNode::detectorCallback(const ImageConstPtr& msg_d,
const ImageConstPtr& msg_rgb, const PointCloud::ConstPtr& msg_cloud)
{

typedef PointCloudClusterer<PointType> PointCloudClusterer;

// UNPACK PREAMBLE
Expand Down Expand Up @@ -170,6 +169,8 @@ void PartsBasedDetectorNode::detectorCallback(const ImageConstPtr& msg_d,
vectorCandidate candidates;
pbd_.detect(image_rgb, image_d, candidates);

ROS_DEBUG("Found %zu candidates.", candidates.size());

if (candidates.size() == 0)
return;

Expand All @@ -181,8 +182,6 @@ void PartsBasedDetectorNode::detectorCallback(const ImageConstPtr& msg_d,
Candidate::nonMaximaSuppression(image_rgb, candidates, 0.1); // 10% overlap allowed
}

ROS_INFO("Found %zu candidates.", candidates.size());

//pose calculation members
std::vector<Rect3d> bounding_boxes;
std::vector<float> scores;
Expand Down Expand Up @@ -227,62 +226,7 @@ void PartsBasedDetectorNode::detectorCallback(const ImageConstPtr& msg_d,
if (mask_pub_.getNumSubscribers() > 0)
messageMask(candidates, image_rgb, msg_rgb);
if (cloud_pub_.getNumSubscribers() > 0)
{
PointCloud all_clusters = clusters[0]; //there is at least one candidate
for(size_t i = 1; i < clusters.size(); ++i)
{
all_clusters += clusters[i];
}

cloud_pub_.publish(all_clusters);
}
messageClusters(clusters);
if (object_pose_pub_.getNumSubscribers() > 0)
{
// Pose from part centers
PoseArray poses;
poses.header = msg_cloud->header;
poses.poses.clear();

// for each object
for (int object_it = 0; object_it < part_centers.size(); ++object_it)
{
const PointCloud& part_centers_cloud = part_centers[object_it];

//compute centroid
Eigen::Vector4f centroid;
Eigen::Matrix3f covMat;

int point_count = 0;
if ((point_count = computeMeanAndCovarianceMatrix(
part_centers_cloud, covMat, centroid)) == 0)
{
ROS_WARN("Centroid not found...");
continue;
}

// normalize matrix
covMat /= point_count;

//eigen33 -> RF
Eigen::Matrix3f evecs;
Eigen::Vector3f evals;
pcl::eigen33(covMat, evecs, evals);
Eigen::Quaternion<float> quat(evecs);
quat.normalize();

Pose pose;
pose.position.x = centroid.x();
pose.position.y = centroid.y();
pose.position.z = centroid.z();

pose.orientation.w = quat.w();
pose.orientation.x = quat.x();
pose.orientation.y = quat.y();
pose.orientation.z = quat.z();

poses.poses.push_back(pose);
}

object_pose_pub_.publish(poses);
}
messagePoses(msg_cloud->header, part_centers);
}
6 changes: 2 additions & 4 deletions ros/Node.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -128,10 +128,6 @@ class PartsBasedDetectorNode {
PartsBasedDetectorNode() :
nh_(),
it_(nh_),
// image_sub_d_( it_, "kinect_head/depth_registered/image_rect", 1),
// image_sub_rgb_( it_, "kinect_head/rgb/image_rect_color", 1),
// info_sub_d_( nh_, "kinect_head/depth_registered/camera_info", 1),
// pointcloud_sub_(nh_, "kinect_head/depth_registered/points", 1),
image_sub_d_(it_, "image_depth_in", 1),
image_sub_rgb_(it_, "image_rgb_in", 1),
info_sub_d_(nh_, "depth_camera_info_in", 1),
Expand All @@ -154,6 +150,8 @@ class PartsBasedDetectorNode {
void messageImageDepth(cv::Mat& depth, const ImageConstPtr& msg_in);
void messageMask(const vectorCandidate& candidates, cv::Mat& rgb,
const ImageConstPtr& msg_in);
void messageClusters(const std::vector<PointCloud>& clusters);
void messagePoses(const std_msgs::Header& header, const std::vector<PointCloud>& parts_centers);

// callbacks
void depthCameraCallback(const CameraInfoConstPtr& info_msg);
Expand Down

0 comments on commit 6aaa95c

Please sign in to comment.