This commit is contained in:
lhark 2016-06-02 01:20:29 +02:00
parent e4ac1b70af
commit a259a853e4
2 changed files with 72 additions and 16 deletions

View file

@ -23,6 +23,7 @@ target_link_libraries(papillon ${catkin_LIBRARIES})
set_property (TARGET papillon APPEND PROPERTY INCLUDE_DIRECTORIES ${OpenCV_INCLUDE_DIRS})
set_property (TARGET papillon APPEND PROPERTY INCLUDE_DIRECTORIES ${catkin_INCLUDE_DIRS})
set_property (TARGET papillon APPEND PROPERTY LINK_LIBRARIES ${OpenCV_LIBRARIES})
set( CMAKE_EXPORT_COMPILE_COMMANDS 1 )
install(TARGETS papillon DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION})

View file

@ -16,9 +16,9 @@ class Traite_image {
public:
const static int THRESHOLD_DETECT_SENSITIVITY = 10;
const static int BLUR_SIZE = 5;
const static int THRESHOLD_MOV = 5;
const static int THRESHOLD_MOV = 5;
Mat prev;
Mat last_T;
bool first = true;
@ -77,7 +77,7 @@ class Traite_image {
pub_img.publish(cv_bridge::CvImage(msg->header, "rgb8", output).toImageMsg());
// bridge_input is handled by a smart-pointer. No explicit delete needed.
droneTracking(Rect(Point(0,0), output.size()));
//ROS_INFO("pub");
@ -182,26 +182,26 @@ class Traite_image {
//write the position of the object to the screen
putText(output,"Tracking object at (" + intToString(x)+","+intToString(y)+")",Point(x,y),1,1,Scalar(255,0,0),2);
}
void searchForMovementOptFlow(Mat prev, Mat cur, Mat &output){
Mat cur_grey, prev_grey;
cur.copyTo(output);
cvtColor(prev, prev_grey, COLOR_BGR2GRAY);
cvtColor(cur, cur_grey, COLOR_BGR2GRAY);
Mat flow;
calcOpticalFlowFarneback(prev_grey, cur_grey, flow, 0.5, 3, 15, 3, 5, 1.2, 0);
vector<Mat> flow_coord(2);
vector<Mat> flow_coord(2);
Mat flow_norm, angle;
split(flow, flow_coord);
cartToPolar(flow_coord[0], flow_coord[1], flow_norm, angle);
//threshold(flow_norm, flow_norm, THRESHOLD_DETECT_SENSITIVITY, 255, THRESH_BINARY);
// Blur to eliminate noise
blur(flow_norm, flow_norm, Size(BLUR_SIZE, BLUR_SIZE));
threshold(flow_norm, flow_norm, THRESHOLD_DETECT_SENSITIVITY, 255, THRESH_BINARY);
flow_norm.convertTo(flow_norm, CV_8U);
bool objectDetected = false;
Mat temp;
flow_norm.copyTo(temp);
@ -236,22 +236,77 @@ class Traite_image {
//write the position of the object to the screen
putText(output,"Tracking object at (" + intToString(x)+","+intToString(y)+")",Point(x,y),1,1,Scalar(255,0,0),2);
}
}
void trackingOptFlow(Mat prev, Mat cur, Mat &output) {
Mat cur_grey, prev_grey;
cur.copyTo(output);
cvtColor(prev, prev_grey, COLOR_BGR2GRAY);
cvtColor(cur, cur_grey, COLOR_BGR2GRAY);
Mat flow;
calcOpticalFlowFarneback(prev_grey, cur_grey, flow, 0.5, 3, 15, 3, 5, 1.2, 0);
vector<Mat> flow_coord(2);
Mat flow_norm, angle;
split(flow, flow_coord);
cartToPolar(flow_coord[0], flow_coord[1], flow_norm, angle);
//threshold(flow_norm, flow_norm, THRESHOLD_DETECT_SENSITIVITY, 255, THRESH_BINARY);
// Blur to eliminate noise
blur(flow_norm, flow_norm, Size(BLUR_SIZE, BLUR_SIZE));
threshold(flow_norm, flow_norm, THRESHOLD_DETECT_SENSITIVITY, 255, THRESH_BINARY);
flow_norm.convertTo(flow_norm, CV_8U);
bool objectDetected = false;
Mat temp;
flow_norm.copyTo(temp);
//these two vectors needed for output of findContours
vector< vector<Point> > contours;
vector<Vec4i> hierarchy;
//find contours of filtered image using openCV findContours function
//findContours(temp,contours,hierarchy,CV_RETR_CCOMP,CV_CHAIN_APPROX_SIMPLE );// retrieves all contours
findContours(temp,contours,hierarchy,CV_RETR_EXTERNAL,CV_CHAIN_APPROX_SIMPLE );// retrieves external contours
//if contours vector is not empty, we have found some objects
if(contours.size()>0)objectDetected=true;
else objectDetected = false;
if(objectDetected){
//the largest contour is found at the end of the contours vector
//we will simply assume that the biggest contour is the object we are looking for.
vector< vector<Point> > largestContourVec;
largestContourVec.push_back(contours.at(contours.size()-1));
//make a bounding rectangle around the largest contour then find its centroid
//this will be the object's final estimated position.
objectBoundingRectangle = boundingRect(largestContourVec.at(0));
}
//make some temp x and y variables so we dont have to type out so much
int x = objectBoundingRectangle.x;
int y = objectBoundingRectangle.y;
int width = objectBoundingRectangle.width;
int height = objectBoundingRectangle.height;
//draw a rectangle around the object
rectangle(output, Point(x,y), Point(x+width, y+height), Scalar(0, 255, 0), 2);
//write the position of the object to the screen
putText(output,"Tracking object at (" + intToString(x)+","+intToString(y)+")",Point(x,y),1,1,Scalar(255,0,0),2);
}
inline bool isFlowCorrect(Point2f u)
{
return !cvIsNaN(u.x) && !cvIsNaN(u.y) && fabs(u.x) < 1e9 && fabs(u.y) < 1e9;
}
void droneTracking(Rect img_size)
void droneTracking(Rect img_size)
{
Point2f centre_image = Point2f(img_size.width/2, img_size.height/2);
Point2f centre_rect = Point2f(objectBoundingRectangle.x + objectBoundingRectangle.width/2, objectBoundingRectangle.y + objectBoundingRectangle.height/2);
geometry_msgs::Twist twist = geometry_msgs::Twist();
if(centre_rect.x < centre_image.x-THRESHOLD_MOV)
{
twist.angular.z = 0.2;
@ -260,7 +315,7 @@ class Traite_image {
{
twist.angular.z = -0.2;
}
pub_cmd.publish(twist);
}
};