2016-05-03 22:23:18 +00:00
|
|
|
#include "ros/ros.h"
|
|
|
|
#include <image_transport/image_transport.h>
|
|
|
|
#include <cv_bridge/cv_bridge.h>
|
|
|
|
#include <sensor_msgs/image_encodings.h>
|
2016-06-14 10:53:22 +00:00
|
|
|
#include <papillon/BoundingBox.h>
|
2016-05-03 22:23:18 +00:00
|
|
|
|
|
|
|
#include <opencv/cv.h>
|
|
|
|
|
|
|
|
#include <sstream>
|
|
|
|
|
|
|
|
using namespace std;
|
|
|
|
|
|
|
|
class Traite_image {
|
|
|
|
public:
|
2016-06-15 15:29:51 +00:00
|
|
|
const static int SENSITIVITY_VALUE = 35;
|
2016-06-14 14:17:48 +00:00
|
|
|
const static int BLUR_Size = 9;
|
|
|
|
const static int CLOSE_SIZE = 20;
|
2016-06-15 15:29:51 +00:00
|
|
|
const static int ERODE_SIZE = 5;
|
|
|
|
const static int NB_FRAME_DROP = 3;
|
2016-05-30 14:49:44 +00:00
|
|
|
|
|
|
|
|
2016-06-14 14:56:12 +00:00
|
|
|
vector<cv::Mat> prevs;
|
2016-06-14 09:56:34 +00:00
|
|
|
cv::Mat last_T;
|
2016-05-03 22:23:18 +00:00
|
|
|
bool first = true;
|
2016-06-14 10:53:22 +00:00
|
|
|
int resize_f = 1;
|
2016-05-15 22:27:55 +00:00
|
|
|
|
|
|
|
int theObject[2] = {0,0};
|
2016-05-03 22:23:18 +00:00
|
|
|
|
|
|
|
ros::NodeHandle n;
|
|
|
|
|
|
|
|
image_transport::ImageTransport it;
|
2016-05-30 13:41:34 +00:00
|
|
|
image_transport::Publisher pub_img;
|
2016-06-14 14:17:48 +00:00
|
|
|
image_transport::Publisher pub_thres;
|
2016-06-14 09:56:34 +00:00
|
|
|
ros::Publisher pub_cmd;
|
2016-05-03 22:23:18 +00:00
|
|
|
image_transport::Subscriber sub;
|
|
|
|
|
|
|
|
|
|
|
|
Traite_image() : n("~"),it(n) {
|
2016-06-14 15:44:37 +00:00
|
|
|
pub_img = it.advertise("/papillon/image_out", 1);
|
|
|
|
pub_thres = it.advertise("/papillon/thres_out", 1);
|
|
|
|
pub_cmd = n.advertise<papillon::BoundingBox>("/papillon/bbox", 1);
|
2016-06-14 14:17:48 +00:00
|
|
|
sub = it.subscribe("/bebop/image_raw", 1, [this](const sensor_msgs::ImageConstPtr& img) -> void { this->on_image(img);},ros::VoidPtr(),image_transport::TransportHints("compressed"));
|
2016-05-03 22:23:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// This processes an image and publishes the result.
|
|
|
|
void on_image(const sensor_msgs::ImageConstPtr& msg) {
|
|
|
|
|
|
|
|
cv_bridge::CvImageConstPtr bridge_input;
|
|
|
|
try {
|
|
|
|
bridge_input = cv_bridge::toCvShare(msg,sensor_msgs::image_encodings::RGB8);
|
|
|
|
}
|
2016-06-14 09:56:34 +00:00
|
|
|
catch (cv::Exception& e) {
|
2016-05-03 22:23:18 +00:00
|
|
|
std::ostringstream errstr;
|
|
|
|
errstr << "cv_bridge exception caught: " << e.what();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2016-06-14 09:56:34 +00:00
|
|
|
//cv::Mat& input = const_cast<cv::Mat&>(bridge_input->image);
|
|
|
|
const cv::Mat& input = bridge_input->image;
|
|
|
|
cv::Mat next;
|
|
|
|
resize(input, next, cv::Size(input.size().width/resize_f, input.size().height/resize_f));
|
|
|
|
cv::Mat output;// = input.clone(); // (input.rows, input.cols, CV_32FC2);
|
2016-05-03 22:23:18 +00:00
|
|
|
if (first) {
|
2016-06-14 14:56:12 +00:00
|
|
|
for (int i = 0; i < NB_FRAME_DROP; ++i) {
|
|
|
|
prevs.push_back(next.clone());
|
|
|
|
}
|
2016-05-03 22:23:18 +00:00
|
|
|
first = false;
|
|
|
|
ROS_INFO("first done");
|
|
|
|
}
|
|
|
|
|
2016-06-14 09:56:34 +00:00
|
|
|
cv::Mat next_stab;
|
2016-06-14 14:56:12 +00:00
|
|
|
stabiliseImg(prevs.back(), next, next_stab);
|
2016-05-30 14:49:44 +00:00
|
|
|
int crop_ratio = 6;
|
|
|
|
float crop_x = next_stab.size().width/crop_ratio;
|
|
|
|
float crop_y = next_stab.size().height/crop_ratio;
|
|
|
|
float crop_w = next_stab.size().width*(1-2.0/crop_ratio);
|
|
|
|
float crop_h = next_stab.size().height*(1-2.0/crop_ratio);
|
2016-06-14 09:56:34 +00:00
|
|
|
cv::Rect myROI(crop_x, crop_y, crop_w, crop_h);
|
|
|
|
cv::Mat next_stab_cropped = next_stab(myROI);
|
2016-06-14 14:56:12 +00:00
|
|
|
cv::Mat prev_cropped = prevs.back()(myROI);
|
2016-06-14 14:17:48 +00:00
|
|
|
cv::Mat closed_thres;
|
|
|
|
searchForMovement(prev_cropped, next_stab_cropped, output, closed_thres);
|
2016-05-17 22:46:44 +00:00
|
|
|
|
2016-05-03 22:23:18 +00:00
|
|
|
|
2016-06-14 09:36:58 +00:00
|
|
|
pub_img.publish(cv_bridge::CvImage(msg->header, "rgb8", output).toImageMsg());
|
2016-06-14 14:17:48 +00:00
|
|
|
pub_thres.publish(cv_bridge::CvImage(msg->header, "mono8", closed_thres).toImageMsg());
|
2016-05-03 22:23:18 +00:00
|
|
|
// bridge_input is handled by a smart-pointer. No explicit delete needed.
|
2016-05-30 14:49:44 +00:00
|
|
|
|
|
|
|
//droneTracking(Rect(Point(0,0), output.size()));
|
2016-05-03 22:23:18 +00:00
|
|
|
|
2016-06-14 14:56:12 +00:00
|
|
|
prevs.pop_back();
|
|
|
|
prevs.insert(prevs.begin(), next.clone());
|
2016-05-03 22:23:18 +00:00
|
|
|
}
|
|
|
|
|
2016-05-15 22:27:55 +00:00
|
|
|
//int to string helper function
|
|
|
|
string intToString(int number){
|
|
|
|
|
|
|
|
//this function has a number input and string output
|
|
|
|
std::stringstream ss;
|
|
|
|
ss << number;
|
|
|
|
return ss.str();
|
|
|
|
}
|
|
|
|
|
2016-06-14 09:56:34 +00:00
|
|
|
void stabiliseImg(cv::Mat prev, cv::Mat cur, cv::Mat &output){
|
|
|
|
cv::Mat cur_grey, prev_grey;
|
|
|
|
cv::cvtColor(cur, cur_grey, cv::COLOR_BGR2GRAY);
|
|
|
|
cv::cvtColor(prev, prev_grey, cv::COLOR_BGR2GRAY);
|
2016-05-17 22:46:44 +00:00
|
|
|
|
|
|
|
// vector from prev to cur
|
2016-06-14 09:56:34 +00:00
|
|
|
vector <cv::Point2f> prev_corner, cur_corner;
|
|
|
|
vector <cv::Point2f> prev_corner2, cur_corner2;
|
2016-05-17 22:46:44 +00:00
|
|
|
vector <uchar> status;
|
|
|
|
vector <float> err;
|
|
|
|
|
2016-06-14 09:56:34 +00:00
|
|
|
cv::goodFeaturesToTrack(prev_grey, prev_corner, 200, 0.01, 30);
|
|
|
|
cv::calcOpticalFlowPyrLK(prev_grey, cur_grey, prev_corner, cur_corner, status, err);
|
2016-05-17 22:46:44 +00:00
|
|
|
|
2016-06-14 09:56:34 +00:00
|
|
|
// weed out bad cv::Matches
|
2016-05-17 22:46:44 +00:00
|
|
|
for(size_t i=0; i < status.size(); i++) {
|
|
|
|
if(status[i]) {
|
|
|
|
prev_corner2.push_back(prev_corner[i]);
|
|
|
|
cur_corner2.push_back(cur_corner[i]);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-06-14 09:56:34 +00:00
|
|
|
cv::Mat T = estimateRigidTransform(prev_corner2, cur_corner2, true); // false = rigid transform, no scaling/shearing
|
2016-05-24 09:57:41 +00:00
|
|
|
|
|
|
|
if(T.data == NULL) {
|
|
|
|
last_T.copyTo(T);
|
|
|
|
}
|
|
|
|
T.copyTo(last_T);
|
|
|
|
|
2016-06-14 09:56:34 +00:00
|
|
|
cv::Mat cur2;
|
2016-05-24 09:57:41 +00:00
|
|
|
|
2016-06-14 09:56:34 +00:00
|
|
|
cv::warpAffine(cur, cur2, T, cur.size(),cv::INTER_CUBIC|cv::WARP_INVERSE_MAP);
|
2016-05-24 09:57:41 +00:00
|
|
|
|
|
|
|
cur2.copyTo(output);
|
2016-05-17 22:46:44 +00:00
|
|
|
}
|
|
|
|
|
2016-06-14 14:17:48 +00:00
|
|
|
void searchForMovement(cv::Mat prev, cv::Mat cur, cv::Mat &output, cv::Mat &out2){
|
2016-06-14 09:56:34 +00:00
|
|
|
cv::Mat cur_grey, prev_grey;
|
2016-05-24 09:57:41 +00:00
|
|
|
cur.copyTo(output);
|
2016-06-14 09:56:34 +00:00
|
|
|
cv::cvtColor(prev, prev_grey, cv::COLOR_BGR2GRAY);
|
|
|
|
cv::cvtColor(cur, cur_grey, cv::COLOR_BGR2GRAY);
|
|
|
|
cv::GaussianBlur(prev_grey, prev_grey, cv::Size(BLUR_Size,BLUR_Size), 3.0);
|
|
|
|
cv::GaussianBlur(cur_grey, cur_grey, cv::Size(BLUR_Size,BLUR_Size), 3.0);
|
|
|
|
//blur(prev_grey, prev_grey, cv::Size(BLUR_Size, BLUR_Size));
|
|
|
|
//blur(cur_grey, cur_grey, cv::Size(BLUR_Size, BLUR_Size));
|
2016-05-24 09:57:41 +00:00
|
|
|
|
|
|
|
// Subtract the 2 last frames and threshold them
|
2016-06-14 09:56:34 +00:00
|
|
|
cv::Mat thres;
|
|
|
|
cv::absdiff(prev_grey,cur_grey,thres);
|
2016-06-07 12:55:41 +00:00
|
|
|
// threshold(thres, thres, SENSITIVITY_VALUE, 255, THRESH_BINARY);
|
|
|
|
// // Blur to eliminate noise
|
2016-06-14 09:56:34 +00:00
|
|
|
// blur(thres, thres, cv::Size(BLUR_Size, BLUR_Size));
|
2016-06-14 14:17:48 +00:00
|
|
|
|
|
|
|
cv::Mat element = getStructuringElement( cv::MORPH_ELLIPSE,
|
|
|
|
cv::Size( 2*ERODE_SIZE + 1, 2*ERODE_SIZE+1 ),
|
|
|
|
cv::Point( ERODE_SIZE, ERODE_SIZE ) );
|
|
|
|
// Apply the dilation operation
|
|
|
|
cv::erode(thres, thres, element );
|
|
|
|
|
2016-06-15 15:29:51 +00:00
|
|
|
thres.copyTo(out2);
|
2016-06-14 14:17:48 +00:00
|
|
|
|
|
|
|
|
2016-06-14 09:56:34 +00:00
|
|
|
cv::threshold(thres, thres, SENSITIVITY_VALUE, 255, cv::THRESH_BINARY);
|
2016-06-14 14:17:48 +00:00
|
|
|
|
2016-06-14 09:56:34 +00:00
|
|
|
cv::Mat closed_thres;
|
2016-06-14 12:45:44 +00:00
|
|
|
cv::Mat structuringElement = getStructuringElement(cv::MORPH_ELLIPSE, cv::Size(CLOSE_SIZE, CLOSE_SIZE));
|
2016-06-14 09:56:34 +00:00
|
|
|
cv::morphologyEx( thres, closed_thres, cv::MORPH_CLOSE, structuringElement );
|
2016-06-14 14:17:48 +00:00
|
|
|
// dilated_thres.copyTo(output);
|
|
|
|
|
|
|
|
|
2016-06-14 09:36:58 +00:00
|
|
|
//closed_thres.copyTo(output);
|
2016-05-24 09:57:41 +00:00
|
|
|
|
|
|
|
//notice how we use the '&' operator for objectDetected and output. This is because we wish
|
2016-05-15 22:27:55 +00:00
|
|
|
//to take the values passed into the function and manipulate them, rather than just working with a copy.
|
2016-05-24 09:57:41 +00:00
|
|
|
//eg. we draw to the output to be displayed in the main() function.
|
2016-05-15 22:27:55 +00:00
|
|
|
bool objectDetected = false;
|
2016-06-14 09:56:34 +00:00
|
|
|
cv::Mat temp;
|
2016-06-14 09:36:58 +00:00
|
|
|
closed_thres.copyTo(temp);
|
2016-05-15 22:27:55 +00:00
|
|
|
//these two vectors needed for output of findContours
|
2016-06-14 09:56:34 +00:00
|
|
|
vector< vector<cv::Point> > contours;
|
|
|
|
vector<cv::Vec4i> hierarchy;
|
2016-05-15 22:27:55 +00:00
|
|
|
//find contours of filtered image using openCV findContours function
|
|
|
|
//findContours(temp,contours,hierarchy,CV_RETR_CCOMP,CV_CHAIN_APPROX_SIMPLE );// retrieves all contours
|
2016-06-14 09:56:34 +00:00
|
|
|
cv::findContours(temp,contours,hierarchy,CV_RETR_EXTERNAL,CV_CHAIN_APPROX_SIMPLE );// retrieves external contours
|
2016-05-15 22:27:55 +00:00
|
|
|
|
|
|
|
//if contours vector is not empty, we have found some objects
|
|
|
|
if(contours.size()>0)objectDetected=true;
|
|
|
|
else objectDetected = false;
|
|
|
|
|
|
|
|
if(objectDetected){
|
|
|
|
//the largest contour is found at the end of the contours vector
|
|
|
|
//we will simply assume that the biggest contour is the object we are looking for.
|
2016-06-14 09:36:58 +00:00
|
|
|
//vector< vector<Point> > largestContourVec;
|
|
|
|
//largestContourVec.push_back(contours.at(contours.size()-1));
|
2016-05-15 22:27:55 +00:00
|
|
|
//make a bounding rectangle around the largest contour then find its centroid
|
2016-06-14 09:56:34 +00:00
|
|
|
//this will be the object's final esticv::Mated position.
|
2016-06-15 15:29:51 +00:00
|
|
|
cv::Rect objectBoundingRectangle;
|
|
|
|
for(int i=0; i<contours.size();i++)
|
|
|
|
{
|
|
|
|
objectBoundingRectangle = cv::boundingRect(contours[i]);
|
|
|
|
cv::rectangle(output, objectBoundingRectangle, cv::Scalar(0, 255, 0), 2);
|
|
|
|
}
|
|
|
|
cv::Rect objBRect = cv::boundingRect(contours.at(contours.size()-1));
|
|
|
|
//cv::rectangle(output, objBRect, cv::Scalar(0, 255, 0), 2);
|
|
|
|
papillon::BoundingBox bbox = papillon::BoundingBox();
|
|
|
|
bbox.x = objBRect.x / (float)cur.size().width;
|
|
|
|
bbox.y = objBRect.y / (float)cur.size().height;
|
|
|
|
bbox.width = objBRect.width / (float)cur.size().width;
|
|
|
|
bbox.height = objBRect.height / (float)cur.size().height;
|
|
|
|
pub_cmd.publish(bbox);
|
2016-05-15 22:27:55 +00:00
|
|
|
}
|
|
|
|
//make some temp x and y variables so we dont have to type out so much
|
2016-06-14 09:36:58 +00:00
|
|
|
//~ int x = objectBoundingRectangle.x;
|
|
|
|
//~ int y = objectBoundingRectangle.y;
|
|
|
|
//~ int width = objectBoundingRectangle.width;
|
|
|
|
//~ int height = objectBoundingRectangle.height;
|
2016-05-27 17:12:23 +00:00
|
|
|
|
|
|
|
//draw a rectangle around the object
|
2016-06-14 09:56:34 +00:00
|
|
|
//rectangle(output, Point(x,y), Point(x+width, y+height), cv::Scalar(0, 255, 0), 2);
|
2016-05-15 22:27:55 +00:00
|
|
|
|
|
|
|
//write the position of the object to the screen
|
2016-06-14 09:56:34 +00:00
|
|
|
//putText(output,"Tracking object at (" + intToString(x)+","+intToString(y)+")",Point(x,y),1,1,cv::Scalar(255,0,0),2);
|
2016-05-15 22:27:55 +00:00
|
|
|
}
|
|
|
|
|
2016-06-14 09:56:34 +00:00
|
|
|
inline bool isFlowCorrect(cv::Point2f u)
|
2016-05-03 22:23:18 +00:00
|
|
|
{
|
|
|
|
return !cvIsNaN(u.x) && !cvIsNaN(u.y) && fabs(u.x) < 1e9 && fabs(u.y) < 1e9;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
int main(int argc, char **argv)
|
|
|
|
{
|
|
|
|
ros::init(argc, argv, "test_opencv");
|
|
|
|
Traite_image dataset=Traite_image();
|
|
|
|
ros::spin();
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|