drone angular command

This commit is contained in:
samilyjcc 2016-05-30 15:41:34 +02:00
parent c3a9f8a8ea
commit d1687fb7cb

View file

@ -2,6 +2,7 @@
#include <image_transport/image_transport.h>
#include <cv_bridge/cv_bridge.h>
#include <sensor_msgs/image_encodings.h>
#include <geometry_msgs/Twist.h>
#include <opencv/cv.h>
@ -14,7 +15,6 @@ class Traite_image {
public:
const static int SENSITIVITY_VALUE = 30;
const static int BLUR_SIZE = 10;
const int HORIZONTAL_BORDER_CROP = 20; // In pixels. Crops the border to reduce the black borders from stabilisation being too noticeable.
Mat prev;
@ -28,12 +28,14 @@ class Traite_image {
ros::NodeHandle n;
image_transport::ImageTransport it;
image_transport::Publisher pub;
image_transport::Publisher pub_img;
ros::Publisher pub_cmd;
image_transport::Subscriber sub;
Traite_image() : n("~"),it(n) {
pub = it.advertise("/image_out", 1);
pub_img = it.advertise("/image_out", 1);
pub_cmd = n.advertise<geometry_msgs::Twist>("/vrep/drone/cmd_vel", 1);
sub = it.subscribe("/usb_cam/image_raw", 1, [this](const sensor_msgs::ImageConstPtr& img) -> void { this->on_image(img);},ros::VoidPtr(),image_transport::TransportHints("compressed"));
}
@ -71,9 +73,11 @@ class Traite_image {
searchForMovement(prev_cropped, next_stab_cropped, output);
pub.publish(cv_bridge::CvImage(msg->header, "rgb8", output).toImageMsg());
pub_img.publish(cv_bridge::CvImage(msg->header, "rgb8", output).toImageMsg());
// bridge_input is handled by a smart-pointer. No explicit delete needed.
droneTracking(Rect(Point(0,0), output.size()));
//ROS_INFO("pub");
prev = next.clone();
@ -182,113 +186,25 @@ class Traite_image {
return !cvIsNaN(u.x) && !cvIsNaN(u.y) && fabs(u.x) < 1e9 && fabs(u.y) < 1e9;
}
Vec3b computeColor(float fx, float fy)
void droneTracking(Rect img_size)
{
static bool first = true;
Point2f centre_image = Point2f(img_size.width/2, img_size.height/2);
Point2f centre_rect = Point2f(objectBoundingRectangle.x + objectBoundingRectangle.width/2, objectBoundingRectangle.y + objectBoundingRectangle.height/2);
// relative lengths of color transitions:
// these are chosen based on perceptual similarity
// (e.g. one can distinguish more shades between red and yellow
// than between yellow and green)
const int RY = 15;
const int YG = 6;
const int GC = 4;
const int CB = 11;
const int BM = 13;
const int MR = 6;
const int NCOLS = RY + YG + GC + CB + BM + MR;
static Vec3i colorWheel[NCOLS];
geometry_msgs::Twist twist = geometry_msgs::Twist();
if (first)
if(centre_rect.x < centre_image.x)
{
int k = 0;
for (int i = 0; i < RY; ++i, ++k)
colorWheel[k] = Vec3i(255, 255 * i / RY, 0);
for (int i = 0; i < YG; ++i, ++k)
colorWheel[k] = Vec3i(255 - 255 * i / YG, 255, 0);
for (int i = 0; i < GC; ++i, ++k)
colorWheel[k] = Vec3i(0, 255, 255 * i / GC);
for (int i = 0; i < CB; ++i, ++k)
colorWheel[k] = Vec3i(0, 255 - 255 * i / CB, 255);
for (int i = 0; i < BM; ++i, ++k)
colorWheel[k] = Vec3i(255 * i / BM, 0, 255);
for (int i = 0; i < MR; ++i, ++k)
colorWheel[k] = Vec3i(255, 0, 255 - 255 * i / MR);
first = false;
twist.angular.z = 0.2;
}
else if(centre_rect.x > centre_image.x)
{
twist.angular.z = -0.2;
}
const float rad = sqrt(fx * fx + fy * fy);
const float a = atan2(-fy, -fx) / (float)CV_PI;
const float fk = (a + 1.0f) / 2.0f * (NCOLS - 1);
const int k0 = static_cast<int>(fk);
const int k1 = (k0 + 1) % NCOLS;
const float f = fk - k0;
Vec3b pix;
for (int b = 0; b < 3; b++)
{
const float col0 = colorWheel[k0][b] / 255.f;
const float col1 = colorWheel[k1][b] / 255.f;
float col = (1 - f) * col0 + f * col1;
if (rad <= 1)
col = 1 - rad * (1 - col); // increase saturation with radius
else
col *= .75; // out of range
pix[2 - b] = static_cast<uchar>(255.f * col);
pub_cmd.publish(twist);
}
return pix;
}
void drawOpticalFlow(const Mat_<Point2f>& flow, Mat& dst, float maxmotion = -1)
{
dst.create(flow.size(), CV_8UC3);
dst.setTo(Scalar::all(0));
// determine motion range:
float maxrad = maxmotion;
if (maxmotion <= 0)
{
maxrad = 1;
for (int y = 0; y < flow.rows; ++y)
{
for (int x = 0; x < flow.cols; ++x)
{
Point2f u = flow(y, x);
if (!isFlowCorrect(u))
continue;
maxrad = max(maxrad, sqrt(u.x * u.x + u.y * u.y));
}
}
}
for (int y = 0; y < flow.rows; ++y)
{
for (int x = 0; x < flow.cols; ++x)
{
Point2f u = flow(y, x);
if (isFlowCorrect(u))
dst.at<Vec3b>(y, x) = computeColor(u.x / maxrad, u.y / maxrad);
}
}
}
};