From d1687fb7cbec3c56713aecfb0bff6bb8ebd98118 Mon Sep 17 00:00:00 2001 From: samilyjcc Date: Mon, 30 May 2016 15:41:34 +0200 Subject: [PATCH] drone angular command --- src/papillon.cpp | 142 ++++++++++------------------------------------- 1 file changed, 29 insertions(+), 113 deletions(-) diff --git a/src/papillon.cpp b/src/papillon.cpp index 1bc07bb..34dce88 100644 --- a/src/papillon.cpp +++ b/src/papillon.cpp @@ -2,6 +2,7 @@ #include #include #include +#include #include @@ -14,9 +15,8 @@ class Traite_image { public: const static int SENSITIVITY_VALUE = 30; const static int BLUR_SIZE = 10; - const int HORIZONTAL_BORDER_CROP = 20; // In pixels. Crops the border to reduce the black borders from stabilisation being too noticeable. - - + + Mat prev; Mat last_T; bool first = true; @@ -28,12 +28,14 @@ class Traite_image { ros::NodeHandle n; image_transport::ImageTransport it; - image_transport::Publisher pub; + image_transport::Publisher pub_img; + ros::Publisher pub_cmd; image_transport::Subscriber sub; Traite_image() : n("~"),it(n) { - pub = it.advertise("/image_out", 1); + pub_img = it.advertise("/image_out", 1); + pub_cmd = n.advertise("/vrep/drone/cmd_vel", 1); sub = it.subscribe("/usb_cam/image_raw", 1, [this](const sensor_msgs::ImageConstPtr& img) -> void { this->on_image(img);},ros::VoidPtr(),image_transport::TransportHints("compressed")); } @@ -71,8 +73,10 @@ class Traite_image { searchForMovement(prev_cropped, next_stab_cropped, output); - pub.publish(cv_bridge::CvImage(msg->header, "rgb8", output).toImageMsg()); + pub_img.publish(cv_bridge::CvImage(msg->header, "rgb8", output).toImageMsg()); // bridge_input is handled by a smart-pointer. No explicit delete needed. + + droneTracking(Rect(Point(0,0), output.size())); //ROS_INFO("pub"); @@ -182,113 +186,25 @@ class Traite_image { return !cvIsNaN(u.x) && !cvIsNaN(u.y) && fabs(u.x) < 1e9 && fabs(u.y) < 1e9; } - Vec3b computeColor(float fx, float fy) - { - static bool first = true; - - // relative lengths of color transitions: - // these are chosen based on perceptual similarity - // (e.g. one can distinguish more shades between red and yellow - // than between yellow and green) - const int RY = 15; - const int YG = 6; - const int GC = 4; - const int CB = 11; - const int BM = 13; - const int MR = 6; - const int NCOLS = RY + YG + GC + CB + BM + MR; - static Vec3i colorWheel[NCOLS]; - - if (first) - { - int k = 0; - - for (int i = 0; i < RY; ++i, ++k) - colorWheel[k] = Vec3i(255, 255 * i / RY, 0); - - for (int i = 0; i < YG; ++i, ++k) - colorWheel[k] = Vec3i(255 - 255 * i / YG, 255, 0); - - for (int i = 0; i < GC; ++i, ++k) - colorWheel[k] = Vec3i(0, 255, 255 * i / GC); - - for (int i = 0; i < CB; ++i, ++k) - colorWheel[k] = Vec3i(0, 255 - 255 * i / CB, 255); - - for (int i = 0; i < BM; ++i, ++k) - colorWheel[k] = Vec3i(255 * i / BM, 0, 255); - - for (int i = 0; i < MR; ++i, ++k) - colorWheel[k] = Vec3i(255, 0, 255 - 255 * i / MR); - - first = false; - } - - const float rad = sqrt(fx * fx + fy * fy); - const float a = atan2(-fy, -fx) / (float)CV_PI; - - const float fk = (a + 1.0f) / 2.0f * (NCOLS - 1); - const int k0 = static_cast(fk); - const int k1 = (k0 + 1) % NCOLS; - const float f = fk - k0; - - Vec3b pix; - - for (int b = 0; b < 3; b++) - { - const float col0 = colorWheel[k0][b] / 255.f; - const float col1 = colorWheel[k1][b] / 255.f; - - float col = (1 - f) * col0 + f * col1; - - if (rad <= 1) - col = 1 - rad * (1 - col); // increase saturation with radius - else - col *= .75; // out of range - - pix[2 - b] = static_cast(255.f * col); - } - - return pix; - } - - void drawOpticalFlow(const Mat_& flow, Mat& dst, float maxmotion = -1) - { - dst.create(flow.size(), CV_8UC3); - dst.setTo(Scalar::all(0)); - - // determine motion range: - float maxrad = maxmotion; - - if (maxmotion <= 0) - { - maxrad = 1; - for (int y = 0; y < flow.rows; ++y) - { - for (int x = 0; x < flow.cols; ++x) - { - Point2f u = flow(y, x); - - if (!isFlowCorrect(u)) - continue; - - maxrad = max(maxrad, sqrt(u.x * u.x + u.y * u.y)); - } - } - } - - for (int y = 0; y < flow.rows; ++y) - { - for (int x = 0; x < flow.cols; ++x) - { - Point2f u = flow(y, x); - - if (isFlowCorrect(u)) - dst.at(y, x) = computeColor(u.x / maxrad, u.y / maxrad); - } - } - } - + + void droneTracking(Rect img_size) + { + Point2f centre_image = Point2f(img_size.width/2, img_size.height/2); + Point2f centre_rect = Point2f(objectBoundingRectangle.x + objectBoundingRectangle.width/2, objectBoundingRectangle.y + objectBoundingRectangle.height/2); + + geometry_msgs::Twist twist = geometry_msgs::Twist(); + + if(centre_rect.x < centre_image.x) + { + twist.angular.z = 0.2; + } + else if(centre_rect.x > centre_image.x) + { + twist.angular.z = -0.2; + } + + pub_cmd.publish(twist); + } };