Paparazzi UAS  v5.18.0_stable
Paparazzi is a free software Unmanned Aircraft System.
wedgebug.c
Go to the documentation of this file.
1 /*
2  * Copyright (C) Ralph Rudi schmidt <ralph.r.schmidt@outlook.com>
3 
4  *
5  * This file is part of paparazzi
6  *
7  * paparazzi is free software; you can redistribute it and/or modify
8  * it under the terms of the GNU General Public License as published by
9  * the Free Software Foundation; either version 2, or (at your option)
10  * any later version.
11  *
12  * paparazzi is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15  * GNU General Public License for more details.
16  *
17  * You should have received a copy of the GNU General Public License
18  * along with paparazzi; see the file COPYING. If not, see
19  * <http://www.gnu.org/licenses/>.
20 
21  */
33 /*
34  *Note. Most variables are initialized in the initialization function. Variables
35  *marked with an "!" were taken into account when calculating the total memory consumption
36  */
37 
38 
39 // New section: Importing headers ----------------------------------------------------------------------------------------------------------------
40 
41 #include <stdio.h>
44 #include "modules/computer_vision/cv.h" // Required for the "cv_add_to_device" function
45 #include "modules/computer_vision/lib/vision/image.h"// For image-related structures
46 #include "pthread.h"
47 #include <stdint.h> // Needed for common types like uint8_t
48 #include "state.h"
49 #include "math/pprz_algebra_float.h"// Needed for vector operations, Euclidean distance, FloatVect3 and FloatRMat
50 #include "math/pprz_algebra.h"// Needed for vector operations (simple subtraction)
51 #include "math/pprz_geodetic_float.h"// Needed for NedCoor_f
52 #include "generated/flight_plan.h" // Needed for WP (waypoint) functions and WP IDs (as defined in "ralphthesis2020_stereo_cyberzoo.xml")
53 #include "firmwares/rotorcraft/autopilot_guided.h" // Needed for guidance functions such as "autopilot_guided_goto_ned" and "guidance_h_set_guided_heading"
54 #include <math.h> // needed for basic math functions
55 #include "autopilot.h" // Needed to set states (GUIDED vs NAV)
56 #include <time.h> // Needed to measure time
57 
58 
59 // New section --------------------------------------------------------------------------------------------------------------------------------------
60 // Defines
61 #ifndef WEDGEBUG_CAMERA_RIGHT_FPS
62 #define WEDGEBUG_CAMERA_RIGHT_FPS 0 //< Default FPS (zero means run at camera fps)
63 #endif
64 #ifndef WEDGEBUG_CAMERA_LEFT_FPS
65 #define WEDGEBUG_CAMERA_LEFT_FPS 0 //< Default FPS (zero means run at camera fps)
66 #endif
67 
68 
69 
70 // New section ----------------------------------------------------------------------------------------------------------------
71 // Define global variables
72 
73 // Declaring images
74 struct image_t img_left;
77 struct image_t img_left_int8_cropped; // Image obtained from left camera, converted into 8bit gray image
81 struct image_t img_middle_int8_cropped; // Image obtained after processing (morphological operations) of previous image
83 
86 
87 // New section: Global variables - Declarations ----------------------------------------------------------------------------------------------------------------
88 
89 
90 
91 // Declaring crop_t structure for information about the cropped image (after BM)
93 
94 // Declaring dimensions of images and kernels used
96 struct img_size_t
98 struct img_size_t
100 
101 // Declaring empty kernel for obtaining median
104 
105 // Delcaring structuring element sizes
114 
115 
116 // Declaring vectors to hold global 3d points
118  VSTARTwenu;
120  VSTARTwned;
139 
140 
141 // Declaring thresholds
148 
151 
152 // Declaring confidence parameters
157 
158 //int16_t edge_found_micro_confidence; //! This is the confidence that an edge was found - inside of the find_best_edge_coordinates function
168 
169 // Declaring boolean flags
170 uint8_t is_start_reached_flag; // Set to 1 if start position is reached, 0 otherwise.
180 uint8_t save_images_flag; // For report: Flag to indicate if images should be saved
181 
182 // Declaring principal points
183 struct point_t c_img;
185 
186 // Declaring edge search area
188 
189 
190 // Declaring rotation matrices and transition vectors for frame to frame transformations
191 // 1) Rotation matrix and transition vector to transform from world ENU frame to world NED frame
194 // 2) Rotation matrix and transition vector to transform from world ned frame to robot frame
197 // 3) Rotation matrix and transition vector to transform from robot frame to camera frame
198 struct FloatRMat Rcr;
199 struct FloatVect3 VCr;
200 
201 // Declaration and initialization of camera parameters
202 float b = WEDGEBUG_CAMERA_BASELINE /
203  1000.00;
204 uint16_t f = WEDGEBUG_CAMERA_FOCAL_LENGTH;
205 
206 
207 
208 // Define new structures + enums
216 };
217 enum navigation_state current_state ;// Default state is 0 i.e. nothing
218 
219 
224 };
225 
226 enum control_mode_state current_mode;// Default state is 0 i.e. nothing
227 
228 
229 
230 // This is a structure to save angles that are needed for the "Edge Scan" state
231 struct ES_angles {
232  float heading_initial; // This is the initial heading of the robot when the "Edge Scan" state is entered
233  float heading_max_left; // This is the maximum left turn angle (determines how much to the left the robot can look to search for edges), to search for edges
234  float heading_max_right;// This is the maximum right turn angle (determines how much to the right the robot can look to search for edges), to search for edges
235  uint8_t initiated; // This is a flag that can be set to check whether the structure is allowed to be overwritten (0=allowed, 1=forbidden)
236  uint8_t is_left_reached_flag; // This is a flag to check whether the left was scanned for an edge already
237  uint8_t is_right_reached_flag; // This is a flag to check whether the right was scanned for an edge already
238 };
240 
241 
242 // Declaring variables for time measurement
243 double time_state[NUMBER_OF_STATES]; // Double array for saving total time (clock cycles) spent in the states (position 0 = state 0 and so on)
244 double counter_state[NUMBER_OF_STATES]; // A counter to measure the total cycles that each state in the FSM (within the periodic function) went through
245 double counter_cycles; // A counter to measure the total cycles that the periodic function went through
246 clock_t clock_total_time; // Clock to measure total time (clock cycles)) it took for the robot to fly from start to goal
247 clock_t clock_total_time_current; // Clock to hold time measured at start of the current cycle of the periodic function
248 clock_t clock_total_time_previous; // Clock to hold time measured at start of the previous cycle of the periodic function
250 clock_t clock_FSM;
251 
252 // Other declarations
255 int N_disparities = 64;
257 int min_disparity = 0;//
258 float heading;
259 float max_edge_search_angle = M_PI /
260  2;
263 float distance_traveled; // Variable to hold the distance traveled of the robot (since start and up to the goal)
264 uint8_t number_of_states; // Variable to save the total number of states used in the finite state machine
267 
268 
269 // For debugging purpose. Allows for changing of state in simulation if 1. If 0, does not allow for state change. Useful if you want to execute only one state repeatedly
270 
271 uint8_t allow_state_change_MOVE_TO_GOAL; // From within state "MOVE_TO_GOAL"
272 uint8_t allow_state_change_POSITION_GOAL; // From within state "POSITION_GOAL"
273 uint8_t allow_state_change_WEDGEBUG_START; // From within state "WEDGEBUG_START"
274 uint8_t allow_state_change_MOVE_TO_EDGE; // From within state "MOVE_TO_EDGE"
275 uint8_t allow_state_change_POSITION_EDGE; // From within state "POSITION_EDGE"
276 uint8_t allow_state_change_EDGE_SCAN; // From within state "EDGE_SCAN"
277 
278 
281 
282 
283 // New section: Functions - Declaration ----------------------------------------------------------------------------------------------------------------
284 
285 // Supporting
286 const char *get_img_type(enum image_type img_type); // Function 1: Displays image type
287 void show_image_data(struct image_t *img); // Function 2: Displays image data
288 void show_image_entry(struct image_t *img, int entry_position,
289  const char *img_name); // Function 3: Displays pixel value of image
290 void show_rotation_matrix(struct FloatRMat *R);
291 
292 //Core
293 static struct image_t *copy_left_img_func(struct image_t
294  *img, uint8_t camera_id); // Function 1: Copies left image into a buffer (buf_left)
295 static struct image_t *copy_right_img_func(struct image_t
296  *img, uint8_t camera_id); // Function 2: Copies left image into a buffer (buf_right)
297 void UYVYs_interlacing_V(struct image_t *YY, struct image_t *left,
298  struct image_t *right); // Function 3: Copies gray pixel values of left and right UYVY images into merged YY image
299 void UYVYs_interlacing_H(struct image_t *merged, struct image_t *left, struct image_t *right);
300 
301 uint32_t maximum_intensity(struct image_t *img);
302 void thresholding_img(struct image_t *img, uint8_t threshold);
303 void principal_points(struct point_t *c_output, const struct point_t *c_old_input, struct crop_t *img_cropped_info);
304 float disp_to_depth(const uint8_t d, const float b, const uint16_t f);
305 uint8_t depth_to_disp(const float depth, const float b, const uint16_t f);
306 void Vi_to_Vc(struct FloatVect3 *scene_point, int32_t image_point_y, int32_t image_point_x, const uint8_t d,
307  const float b, const uint16_t f);
308 int32_t indx1d_a(const int32_t y, const int32_t x, const struct image_t *img);
309 int32_t indx1d_b(const int32_t y, const int32_t x, const struct img_size_t *img_dims);
310 int32_t indx1d_c(const int32_t y, const int32_t x, const uint16_t img_height, const uint16_t img_width);
311 
312 void Va_to_Vb(struct FloatVect3 *Vb, struct FloatVect3 *Va, struct FloatRMat *Rba, struct FloatVect3 *VOa);
313 void Vb_to_Va(struct FloatVect3 *Va, struct FloatVect3 *Vb, struct FloatRMat *Rba, struct FloatVect3 *VOa);
314 void Vw_to_Vc(struct FloatVect3 *Vc, struct FloatVect3 *Vw, struct FloatRMat *Rrw, struct FloatVect3 *VRw,
315  struct FloatRMat *Rcr, struct FloatVect3 *VCr, const uint8_t verbose);
316 void Vc_to_Vw(struct FloatVect3 *Vw, struct FloatVect3 *Vc, struct FloatRMat *Rrw, struct FloatVect3 *VRw,
317  struct FloatRMat *Rcr, struct FloatVect3 *VCr, const uint8_t verbose);
318 
319 float float_vect3_norm_two_points(struct FloatVect3 *V1, struct FloatVect3 *V2);
321 float heading_towards_setpoint_WNED(struct FloatVect3 *VSETPOINTwned);
322 uint8_t median_disparity_to_point(struct point_t *Vi, struct image_t *img, struct kernel_C1 *kernel_median);
323 
325  struct image_t *img_edges, struct image_t *img_disparity, struct crop_t *edge_search_area, uint8_t threshold,
326  int16_t max_confidence);
327 uint8_t is_setpoint_reached(struct FloatVect3 *VGOAL, struct FloatVect3 *VCURRENTPOSITION, float threshold);
328 
329 float float_norm_two_angles(float target_angle, float current_angle);
330 uint8_t is_heading_reached(float target_angle, float current_angle, float threshold);
331 uint8_t are_setpoint_and_angle_reached(struct FloatVect3 *VGOAL, struct FloatVect3 *VCURRENTPOSITION,
332  float threshold_setpoint, float target_angle, float current_angle, float threshold_angle);
333 void disp_to_depth_img(struct image_t *img8bit_input, struct image_t *img16bit_output);
335 
337 float dispfixed_to_disp(const int16_t d);
338 float disp_to_depth_16bit(const int16_t d, const float b, const uint16_t f);
339 void Vi_to_Vc_depth(struct FloatVect3 *scene_point, int32_t image_point_y, int32_t image_point_x,
340  const float depth /*depth in m*/, const uint16_t f);
341 void Vi_to_Vc16bit(struct FloatVect3 *scene_point, int32_t image_point_y, int32_t image_point_x, const uint16_t d,
342  const float b, const uint16_t f);
343 uint16_t median_depth_to_point(struct point_t *Vi, struct image_t *img, struct kernel_C1 *kernel_median);
345  struct image_t *img_edges, struct image_t *img_depth, struct crop_t *edge_search_area, uint16_t threshold,
346  int16_t max_confidence);
348 
349 
350 
351 
352 // New section: Functions - Definition ----------------------------------------------------------------------------------------------------------------
353 
354 // Supporting:
355 
356 // Function 1
357 const char *get_img_type(enum image_type img_type)
358 {
359  switch (img_type) {
360  case IMAGE_YUV422: return "IMAGE_YUV422";
361  case IMAGE_GRAYSCALE: return "IMAGE_GRAYSCALE";
362  case IMAGE_JPEG: return "IMAGE_JPEG";
363  case IMAGE_GRADIENT: return "IMAGE_GRADIENT";
364  default: return "Image type not found";
365  }
366 }
367 
368 
369 // Function 2
370 void show_image_data(struct image_t *img)
371 {
372  printf("Image-Type: %s\n", get_img_type(img->type));
373  printf("Image-Width: %d\n", img->w);
374  printf("Image-Height: %d\n", img->h);
375  printf("Image-Buf_Size: %d\n", img->buf_size);
376  printf("Image-Buf_Memory_Occupied: %lu\n", sizeof(img->buf));
377 }
378 
379 
380 // Function 3
381 void show_image_entry(struct image_t *img, int entry_position, const char *img_name)
382 {
383  printf("Pixel %d value - %s: %d\n", entry_position, img_name, ((uint8_t *)img->buf)[entry_position]);
384 }
385 
386 // Function 4
388 {
389  printf("[[%f, %f, %f]\n", R->m[0], R->m[1], R->m[2]);
390  printf(" [%f, %f, %f]\n", R->m[3], R->m[4], R->m[5]);
391  printf(" [%f, %f, %f]]\n", R->m[6], R->m[7], R->m[8]);
392 
393 }
394 
395 
396 
397 // External:
398 
399 // Function 1 - Returns the upper left coordinates of a square (x and y coordinates) and the offset in terms of width and height,
400 // given the number of disparity levels and the block size used by the block matching algorithm. This is need to crop an image
401 void post_disparity_crop_rect(struct crop_t *_img_cropped_info, struct img_size_t *_original_img_dims, const int disp_n,
402  const int block_size)
403 {
404 
405  uint16_t block_size_black = block_size / 2;
406  uint16_t left_black = disp_n + block_size_black;
407 
408 
409  _img_cropped_info->y = block_size_black;
410  _img_cropped_info->h = _original_img_dims->h - block_size_black;
411  _img_cropped_info->h = _img_cropped_info->h - _img_cropped_info->y;
412 
413  _img_cropped_info->x = left_black - 1;
414  _img_cropped_info->w = _original_img_dims->w - block_size_black;
415  _img_cropped_info->w = _img_cropped_info->w - _img_cropped_info->x;
416 }
417 
418 
419 // Function 2 - Sets finite state machine state (useful for the flight path blocks)
420 void set_state(uint8_t _state, uint8_t change_allowed)
421 {
422  if (change_allowed == 1) {current_state = _state;}
423 }
424 
425 
426 // Function 3 - Creates empty 8bit kernel
427 void kernel_create(struct kernel_C1 *kernel, uint16_t width, uint16_t height, enum image_type type)
428 {
429 
430  // Set the variables
431  kernel->type = type;
432  kernel->h = height;
433  kernel->w = width;
434 
435 
436  // Depending on the type the size differs
437  if (type == IMAGE_YUV422) {
438  kernel->buf_size = sizeof(uint8_t) * 2 * width * height;
439  } else if (type == IMAGE_JPEG) {
440  kernel->buf_size = sizeof(uint8_t) * 2 * width * height; // At maximum quality this is enough
441  } else if (type == IMAGE_GRADIENT) {
442  kernel->buf_size = sizeof(int16_t) * width * height;
443  } else if (type == IMAGE_INT16) {
444  kernel->buf_size = sizeof(int16_t) * width * height;
445  } else {
446  kernel->buf_size = sizeof(uint8_t) * width * height;
447  }
448 
449  kernel->buf_weights = malloc(kernel->buf_size);
450  kernel->buf_values = malloc(kernel->buf_size);
451 }
452 
453 
454 // Function 4 - Deletes 8bit kernel
455 void kernel_free(struct kernel_C1 *kernel)
456 {
457  if (kernel->buf_weights != NULL) {
458  free(kernel->buf_weights);
459  kernel->buf_weights = NULL;
460  }
461  if (kernel->buf_values != NULL) {
462  free(kernel->buf_values);
463  kernel->buf_values = NULL;
464  }
465 }
466 
467 
468 // Function 5 - Calculates median of a 8bit image
470 {
471  // Allocate an array of the same size and sort it.
472  uint32_t i, j;
473 
474  uint8_t dpSorted[n];
475  for (i = 0; i < n; ++i) {
476  dpSorted[i] = a[i];
477  }
478  for (i = n - 1; i > 0; --i) {
479  for (j = 0; j < i; ++j) {
480  if (dpSorted[j] > dpSorted[j + 1]) {
481  uint8_t dTemp = dpSorted[j];
482  dpSorted[j] = dpSorted[j + 1];
483  dpSorted[j + 1] = dTemp;
484  }
485  }
486  }
487 
488  // Middle or average of middle values in the sorted array.
489  uint8_t dMedian = 0;
490  if ((n % 2) == 0) {
491  dMedian = (dpSorted[n / 2] + dpSorted[(n / 2) - 1]) / 2.0;
492  } else {
493  dMedian = dpSorted[n / 2];
494  }
495  return dMedian;
496 }
497 
498 
499 // Function 6 - Calculates median of a 16bit image
501 {
502  // Allocate an array of the same size and sort it.
503  uint32_t i, j;
504 
505  uint16_t dpSorted[n];
506  for (i = 0; i < n; ++i) {
507  dpSorted[i] = a[i];
508  }
509  for (i = n - 1; i > 0; --i) {
510  for (j = 0; j < i; ++j) {
511  if (dpSorted[j] > dpSorted[j + 1]) {
512  uint16_t dTemp = dpSorted[j];
513  dpSorted[j] = dpSorted[j + 1];
514  dpSorted[j + 1] = dTemp;
515  }
516  }
517  }
518 
519  // Middle or average of middle values in the sorted array.
520  uint16_t dMedian = 0;
521  if ((n % 2) == 0) {
522  dMedian = (dpSorted[n / 2] + dpSorted[(n / 2) - 1]) / 2.0;
523  } else {
524  dMedian = dpSorted[n / 2];
525  }
526  return dMedian;
527 }
528 
529 
530 
531 
532 // Core:
533 
534 // Function 1
535 static struct image_t *copy_left_img_func(struct image_t *img, uint8_t camera_id __attribute__((unused)))
536 {
537  image_copy(img, &img_left);
538  //show_image_data(img);
539  //show_image_entry(&img_left, 10, "img_left");
540  return img;
541 }
542 
543 
544 // Function 2
545 static struct image_t *copy_right_img_func(struct image_t *img, uint8_t camera_id __attribute__((unused)))
546 {
547  image_copy(img, &img_right);
548  //show_image_data(img);
549  //show_image_entry(&img_right, 10, "img_right");
550  return img;
551 }
552 
553 
554 // Function 3
555 void UYVYs_interlacing_V(struct image_t *merged, struct image_t *left, struct image_t *right)
556 {
557  // Error messages
558  if (left->w != right->w || left->h != right->h) {
559  printf("The dimensions of the left and right image to not match!");
560  return;
561  }
562  if ((merged->w * merged->h) != (2 * right->w) * right->w) {
563  printf("The dimensions of the empty image template for merger are not sufficient to merge gray left and right pixel values.");
564  return;
565  }
566 
567  uint8_t *UYVY_left = left->buf;
568  uint8_t *UYVY_right = right->buf;
569  uint8_t *YY = merged->buf;
570  uint32_t loop_length = left->w * right->h;
571 
572  // Incrementing pointers to get to first gray value of the UYVY images
573  UYVY_left++;
574  UYVY_right++;
575 
576 
577  for (uint32_t i = 0; i < loop_length; i++) {
578  *YY = *UYVY_left; // Copies left gray pixel (Y) to the merged image YY, in first position
579  YY++; // Moving to second position of merged image YY
580  *YY = *UYVY_right; // Copies right gray pixel (Y) to the merged image YY, in second position
581  YY++; // Moving to the next position, in preparation to copy left gray pixel (Y) to the merged image YY
582  UYVY_left += 2; // Moving pointer to next gray pixel (Y), in the left image
583  UYVY_right += 2; // Moving pointer to next gray pixel (Y), in the right image
584  /*
585  * Note: Since the loop lenth is based on the UYVY image the size of the data should be (2 x w) x h.
586  * This is also the same size as for the new merged YY image.
587  * Thus incrementing the pointer for UYVY_left and right, in each iteration, does not lead to problems (same for YY image)
588  */
589  }
590 }
591 
592 
593 // Function 4
594 void UYVYs_interlacing_H(struct image_t *merged, struct image_t *left, struct image_t *right)
595 {
596  // Error messages
597  if (left->w != right->w || left->h != right->h) {
598  printf("The dimensions of the left and right image to not match!");
599  return;
600  }
601  if ((merged->w * merged->h) != (2 * right->w) * right->w) {
602  printf("The dimensions of the empty image template for merger are not sufficient to merge gray left and right pixel values.");
603  return;
604  }
605 
606  uint8_t *UYVY_left = left->buf;
607  uint8_t *UYVY_right = right->buf;
608  uint8_t *YY1 = merged->buf; // points to first row for pixels of left image
609  uint8_t *YY2 = YY1 + merged->w; // points to second row for pixels of right image
610 
611  // Incrementing pointers to get to first gray value of the UYVY images
612  UYVY_left++;
613  UYVY_right++;
614 
615  for (uint32_t i = 0; i < left->h; i++) {
616  //printf("Loop 1: %d\n", i);
617  for (uint32_t j = 0; j < left->w; j++) {
618  //printf("Loop 1: %d\n", j);
619  *YY1 = *UYVY_left;
620  *YY2 = *UYVY_right;
621  YY1++;
622  YY2++;
623  UYVY_left += 2;
624  UYVY_right += 2;
625  }
626  YY1 += merged->w; // Jumping pointer to second next row (i.e. over row with pixels from right image)
627  YY2 += merged->w; // Jumping pointer to second next row (i.e. over row with pixels from left image)
628  }
629 }
630 
631 
632 // Function 5 - Returns the maximum value in a uint8_t image
634 {
635  if (img->type == IMAGE_GRAYSCALE) {
636  uint32_t max = 0;
637  for (uint32_t i = 0; i < img->buf_size; i++) {
638  uint8_t *intensity = &((uint8_t *)img->buf)[i];
639 
640  if (*intensity > max) {
641  max = *intensity;
642  }
643  }
644  return max;
645  } else if (img->type == IMAGE_INT16) {
646  uint32_t max = 0;
647  for (uint32_t i = 0; i < (img->w * img->h); i++) {
648  uint16_t *intensity = &((uint16_t *)img->buf)[i];
649 
650  if (*intensity > max) {
651  max = *intensity;
652  printf("%d\n", *intensity);
653  printf("location = %d\n", i);
654  }
655  }
656  return max;
657  } else {
658  printf("ERROR: function does not support image type %d. Breaking out of function.", img->type);
659  return 1;
660  }
661 
662 
663 }
664 
665 
666 // Function 6 - Thresholds 8bit images given and turns all values >= threshold to 255
667 void thresholding_img(struct image_t *img, uint8_t threshold)
668 {
669  for (uint32_t i = 0; i < img->buf_size; i++) {
670  uint8_t *intensity = &((uint8_t *)img->buf)[i];
671 
672  if (*intensity >= threshold) {
673  *intensity = 127;
674  } else {
675  *intensity = 0;
676  }
677  }
678 }
679 
680 
681 // Function 7 - Calculates principal point coordinates for a cropped image, based on the x
682 // and y coordinates of the cropped area (upper left-hand side: crop_y and crop_x).
683 void principal_points(struct point_t *c_output, const struct point_t *c_old_input, struct crop_t *_img_cropped_info)
684 {
685  c_output->y = c_old_input->y - _img_cropped_info->y;
686  c_output->x = c_old_input->x - _img_cropped_info->x;
687 }
688 
689 
690 // Function 8a - Converts disparity to depth using focal length (in pixels) and baseline distance (in meters)
691 float disp_to_depth(const uint8_t _d, const float _b, const uint16_t _f)
692 {
693  return _b * _f / _d;
694 }
695 
696 
697 // Function 8c - Converts 16bit fixed number disparity (pixels * 16) to 16bit disparity (pixels)
698 float dispfixed_to_disp(const int16_t _d)
699 {
700  return (_d / 16.00);
701 }
702 
703 
704 // Function 8d - Converts 16bit fixed number disparity (pixels * 16) to 16bit depth (meters) using focal length (in pixels) and baseline distance (in meters)
705 float disp_to_depth_16bit(const int16_t _d, const float _b, const uint16_t _f)
706 {
707  return _b * _f / dispfixed_to_disp(_d);
708 }
709 
710 
711 // Function 8b - Converts depth to disparity using focal length (in pixels) and baseline distance (in meters)
712 uint8_t depth_to_disp(const float _depth, const float _b, const uint16_t _f)
713 {
714  return round(_b * _f / _depth);
715 }
716 
717 
718 
719 
720 // Function 9a - Calculates 3d points in a scene based on the 2d coordinates of the point in the
721 // image plane and the depth. d in in pixels, b is in meters and f is in pixels
722 void Vi_to_Vc(struct FloatVect3 *scene_point, int32_t image_point_y, int32_t image_point_x, const uint8_t _d,
723  const float _b, const uint16_t _f)
724 {
725  // Calculating Z
726  // In case disparity is 0 Z will be very very small to avoid detection of algorithm that
727  // calculates closest edge point to target goal
728  //printf("y=%d\n", image_point_y);
729  //printf("x=%d\n", image_point_x);
730  //printf("d=%d\n", d);
731 
732 
733  if (_d == 0) {
734  scene_point->z = 0.0001;
735  } else {
736  scene_point->z = disp_to_depth(_d, _b, _f);
737  }
738 
739 
740  //printf("Z=%f\n", scene_point->Z);
741 
742  // Calculating Y
743  scene_point->y = image_point_y * scene_point -> z / _f;
744 
745  // Calculating X
746  scene_point->x = image_point_x * scene_point -> z / _f;
747  //printf("Y (y=%d) = %f\n", image_point->y, scene_point->Y);
748  //printf("X (x=%d) = %f\n", image_point->x, scene_point->X);
749  //printf("Z (d=%d) = %f\n", d, scene_point->Z);
750 }
751 
752 
753 // Function 9b - Calculates 3d points in a scene based on the 2d coordinates of the point in the
754 // image plane and the depth. d in in pixels, b is in meters and f is in pixels
755 void Vi_to_Vc_depth(struct FloatVect3 *scene_point, int32_t image_point_y, int32_t image_point_x,
756  const float depth /*depth in m*/, const uint16_t _f)
757 {
758  // Calculating Z
759  scene_point->z = depth;
760 
761  // Calculating Y
762  scene_point->y = image_point_y * scene_point -> z / _f;
763 
764  // Calculating X
765  scene_point->x = image_point_x * scene_point -> z / _f;
766  //printf("Y (y=%d) = %f\n", image_point->y, scene_point->Y);
767  //printf("X (x=%d) = %f\n", image_point->x, scene_point->X);
768  //printf("Z (d=%d) = %f\n", d, scene_point->Z);
769 }
770 
771 
772 // Function 9c - Calculates 3d points in a scene based on the 2d coordinates of the point in the
773 // image plane and the depth. d in in pixels, b is in meters and f is in pixels
774 void Vi_to_Vc16bit(struct FloatVect3 *scene_point, int32_t image_point_y, int32_t image_point_x, const uint16_t _d,
775  const float _b, const uint16_t _f)
776 {
777  // Calculating Z
778  // In case disparity is 0 Z will be very very small to avoid detection of algorithm that
779  // calculates closest edge point to target goal
780  //printf("y=%d\n", image_point_y);
781  //printf("x=%d\n", image_point_x);
782  //printf("d=%d\n", d);
783 
784 
785  if (_d == 0) {
786  scene_point->z = 0.0001;
787  } else {
788  scene_point->z = disp_to_depth_16bit(_d, _b, _f);
789  }
790 
791 
792  //printf("Z=%f\n", scene_point->Z);
793 
794  // Calculating Y
795  scene_point->y = image_point_y * scene_point -> z / _f;
796 
797  // Calculating X
798  scene_point->x = image_point_x * scene_point -> z / _f;
799  //printf("Y (y=%d) = %f\n", image_point->y, scene_point->Y);
800  //printf("X (x=%d) = %f\n", image_point->x, scene_point->X);
801  //printf("Z (d=%d) = %f\n", d, scene_point->Z);
802 
803 }
804 
805 
806 // Function 10a - Converts 2d coordinates into 1d coordinates (for 1d arrays) - using struct image_t for dimensions
807 int32_t indx1d_a(const int32_t y, const int32_t x, const struct image_t *img)
808 {
809  if (x >= (img->w) || x < 0) {
810  printf("Error: index x=%d is out of bounds for axis 0 with size %d. Returning -1\n", x, img->w);
811  return -1;
812  } else if (y >= (img->h) || y < 0) {
813  printf("Error: index y=%d is out of bounds for axis 0 with size %d. Returning -1\n", y, img->h);
814  return -1;
815  } else {
816  return x + img->w * y;
817  }
818 }
819 
820 
821 // Function 10b - Converts 2d coordinates into 1d coordinates (for 1d arrays) - using struct img_size_t for dimensions
822 int32_t indx1d_b(const int32_t y, const int32_t x, const struct img_size_t *_img_dims)
823 {
824  if (x >= (_img_dims->w) || x < 0) {
825  printf("Error: index %d is out of bounds for axis 0 with size %d. Returning -1\n", x, _img_dims->w);
826  return -1;
827  } else if (y >= (_img_dims->h) || y < 0) {
828  printf("Error: index %d is out of bounds for axis 0 with size %d. Returning -1\n", y, _img_dims->h);
829  return -1;
830  } else {
831  return x + _img_dims->w * y;
832  }
833 }
834 
835 // Function 10c - Converts 2d coordinates into 1d coordinates (for 1d arrays) - using two uint16_t values for dimensions
836 int32_t indx1d_c(const int32_t y, const int32_t x, const uint16_t img_height, const uint16_t img_width)
837 {
838  if (x >= (img_width) || x < 0) {
839  printf("Error: index x=%d is out of bounds for axis 0 with size %d. Returning -1\n", x, img_width);
840  return -1;
841  } else if (y >= (img_height) || y < 0) {
842  printf("Error: index y=%d is out of bounds for axis 1 with size %d. Returning -1\n", y, img_height);
843  return -1;
844  } else {
845  return x + img_width * y;
846  }
847 }
848 
849 
850 // Function 11 - Function to convert point in coordinate system a to a point in the coordinate system b
851 void Va_to_Vb(struct FloatVect3 *Vb, struct FloatVect3 *Va, struct FloatRMat *Rba, struct FloatVect3 *VOa)
852 {
853 
854  struct FloatVect3 Va_temp;
855  Va_temp.x = Va->x;
856  Va_temp.y = Va->y;
857  Va_temp.z = Va->z;
858 
859 
860  // The following translates world vector coordinates into the agent coordinate system
861  Va_temp.x = Va->x - VOa->x;
862  Va_temp.y = Va->y - VOa->y;
863  Va_temp.z = Va->z - VOa->z;
864 
865  // In case the axes of the world coordinate system (w) and the agent coordinate system (a) do not
866  // coincide, they are adjusted with the rotation matrix R
867  float_rmat_vmult(Vb, Rba, &Va_temp);
868 }
869 
870 
871 // Function 12 - Function to convert point in coordinate system b back to a point in the coordinate system a
872 void Vb_to_Va(struct FloatVect3 *Va, struct FloatVect3 *Vb, struct FloatRMat *Rba, struct FloatVect3 *VOa)
873 {
874  // In case the axes of the agent coordinate system (a) and the world coordinate system (w) do not
875  // coincide, they are adjusted with the inverse rotation matrix R
876  float_rmat_transp_vmult(Va, Rba, Vb);
877 
878  // The following translates agent vector coordinates into the world coordinate system
879  Va->x = Va->x + VOa->x;
880  Va->y = Va->y + VOa->y;
881  Va->z = Va->z + VOa->z;
882 }
883 
884 
885 // Function 13 - Function wrapper to convert a point in the world coordinate system to a point in the camera coordinate system
886 void Vw_to_Vc(struct FloatVect3 *Vc, struct FloatVect3 *Vw, struct FloatRMat *Rrw, struct FloatVect3 *VRw,
887  struct FloatRMat *_Rcr, struct FloatVect3 *_VCr, const uint8_t verbose)
888 {
889  struct FloatVect3 Vr;
890 
891  // Print log only if enabled
892  if (verbose != 0) {
893  // Rotation matrix - World coordinate system expressed in the robot coordinate system
894  printf("Rrw\n");
895  printf("%f, %f, %f,\n", Rrw->m[0], Rrw->m[1], Rrw->m[2]);
896  printf("%f, %f, %f,\n", Rrw->m[3], Rrw->m[4], Rrw->m[5]);
897  printf("%f, %f, %f\n\n", Rrw->m[6], Rrw->m[7], Rrw->m[8]);
898 
899  // Vector coordinates - Robot in the world frame system
900  printf("VRw (drone location)\n");
901  printf(" %f\n %f\n %f\n\n", VRw->x, VRw->y, VRw->z);
902 
903  // World coordinates
904  printf("Vw\n");
905  printf(" %f\n %f\n %f\n\n", Vw->x, Vw->y, Vw->z);
906  }
907 
908 
909  // Robot coordinates from world coordinates
910  Va_to_Vb(&Vr, Vw, Rrw, VRw);
911 
912  // Print log only if enabled
913  if (verbose != 0) {
914  // Robot coordinates
915  printf("Vr\n");
916  printf(" %f\n %f\n %f\n\n", Vr.x, Vr.y, Vr.z);
917 
918  // Rotation matrix - Robot coordinate system expressed in the camera coordinate system
919  printf("Rcr\n");
920  printf("%f, %f, %f,\n", _Rcr->m[0], _Rcr->m[1], _Rcr->m[2]);
921  printf("%f, %f, %f,\n", _Rcr->m[3], _Rcr->m[4], _Rcr->m[5]);
922  printf("%f, %f, %f\n\n", _Rcr->m[6], _Rcr->m[7], _Rcr->m[8]);
923 
924  // Vector coordinates - Camera in the robot frame system
925  printf("VCa (camera location)\n");
926  printf(" %f\n %f\n %f\n\n", _VCr->x, _VCr->y, _VCr->z);
927  }
928 
929  // Camera coordinates from robot coordinates
930  Va_to_Vb(Vc, &Vr, _Rcr, _VCr);
931 
932  // Print log only if enabled
933  if (verbose != 0) {
934  // Camera coordinates
935  printf("Vc\n");
936  printf(" %f\n %f\n %f\n\n", Vc->x, Vc->y, Vc->z);
937  }
938 }
939 
940 
941 // Function 14 - Function wrapper to convert a point in the camera coordinate system back to a point in the world coordinate system
942 void Vc_to_Vw(struct FloatVect3 *Vw, struct FloatVect3 *Vc, struct FloatRMat *Rrw, struct FloatVect3 *VRw,
943  struct FloatRMat *_Rcr, struct FloatVect3 *_VCr, const uint8_t verbose)
944 {
945  struct FloatVect3 Vr;
946 
947  // Agent coordinates from camera coordinates
948  Vb_to_Va(&Vr, Vc, _Rcr, _VCr);
949 
950  // Print log only if enabled
951  if (verbose != 0) {
952  // Back to robot coordinates
953  printf("Vr - back calculated\n"); \
954  printf(" %f\n %f\n %f\n\n", Vr.x, Vr.y, Vr.z);
955  }
956 
957 
958  // World coordinates from a coordinates
959  Vb_to_Va(Vw, &Vr, Rrw, VRw);
960 
961  // Print log only if enabled
962  if (verbose != 0) {
963  // Back to world coordinates
964  printf("Vw - back calculated\n");
965  printf(" %f\n %f\n %f\n\n", Vw->x, Vw->y, Vw->z);
966  }
967 }
968 
969 // Function 15 - Function to obtain the Euclidean distance (in meters) between two 3d points
970 float float_vect3_norm_two_points(struct FloatVect3 *V1, struct FloatVect3 *V2)
971 {
972  struct FloatVect3 V_V1V2_diff;
973  VECT3_DIFF(V_V1V2_diff, *V1, *V2);
974  return float_vect3_norm(&V_V1V2_diff);
975 }
976 
977 // Function 16a - function to calculate angle between robot and specific waypoint
979 {
980  struct FloatVect2 VWPwt = {WaypointX(wp), WaypointY(wp)};
981  struct FloatVect2 difference;
982  float angle;
983 
984  VECT2_DIFF(difference, VWPwt, *stateGetPositionEnu_f());
985  angle = atan2f(difference.x, difference.y);
986  return angle;
987 }
988 
989 
990 // Function 16b - function to calculate angle between robot and specific setpoint
991 float heading_towards_setpoint_WNED(struct FloatVect3 *VSETPOINTwned)
992 {
993  struct FloatVect2 difference;
994  float angle;
995 
996  VECT2_DIFF(difference, *VSETPOINTwned, *stateGetPositionNed_f());
997  angle = atan2f(difference.y, difference.x);
998  return angle;
999 }
1000 
1001 
1002 // Function 17a - Function to calculate median disparity to a point (Vi) in an image (img), using a kernel structure (kernel_median)
1003 uint8_t median_disparity_to_point(struct point_t *Vi, struct image_t *img, struct kernel_C1 *kernel_median)
1004 {
1005  // Creating Start and stop coordinates of in the image coordinate system, based on kernel size
1006  uint8_t VSTARTi_y = Vi->y - (kernel_median->h / 2);
1007  uint8_t VSTARTi_x = Vi->x - (kernel_median->w / 2);
1008  uint8_t VSTOPi_y = Vi->y + (kernel_median->h / 2);
1009  uint8_t VSTOPi_x = Vi->x + (kernel_median->w / 2);
1010 
1011  // In case the upper bounds of the kernel are outside of the image area
1012  // (lower bound not important because of uint8_t type converting everything below 0 to 0):
1013  if (VSTOPi_y > img->h) {
1014  VSTOPi_y = (img->h - 1);
1015  }
1016  if (VSTOPi_x > img->w) {
1017  VSTOPi_x = (img->w - 1);
1018  }
1019 
1020 
1021 
1022  // Declaring kernel coordinates
1023  uint16_t Vk_y;
1024  uint16_t Vk_x;
1025 
1026  // Declaring 1d indices (for result of transforming 2d coordinates into 1d coordinate)
1027  int32_t index_img;
1028  int32_t index_kernel;
1029 
1030  // Declaring variable to store median in
1031  uint8_t median;
1032 
1033 
1034  // Here we get the median value of a block in the middle of an image using a kernel structure
1035  for (uint8_t Vi_y = VSTARTi_y; Vi_y < (VSTOPi_y + 1); Vi_y++) {
1036  for (uint8_t Vi_x = VSTARTi_x; Vi_x < (VSTOPi_x + 1); Vi_x++) {
1037  // Calculating kernel coordinates
1038  Vk_y = Vi_y - VSTARTi_y;
1039  Vk_x = Vi_x - VSTARTi_x;
1040 
1041  // Converting 2d indices to 1d indices
1042  index_img = indx1d_a(Vi_y, Vi_x, img);
1043  index_kernel = indx1d_c(Vk_y, Vk_x, median_kernel.h, median_kernel.w);
1044 
1045 
1046  // Saving disparity values of image underneath the kernel, into the kernel buffer
1047  ((uint8_t *) median_kernel.buf_values)[index_kernel] = ((uint8_t *) img->buf)[index_img];
1048  }
1049  }
1050 
1051  // Calculating median disparity value of values recoded by the kernel
1053 
1054  return median;
1055 }
1056 
1057 
1058 // Function 17b - Function to calculate median depth (cm) to a point (Vi) in a 16bit image (img), using a kernel structure (kernel_median)
1059 uint16_t median_depth_to_point(struct point_t *Vi, struct image_t *img, struct kernel_C1 *kernel_median)
1060 {
1061  // Creating Start and stop coordinates of in the image coordinate system, based on kernel size
1062  uint16_t VSTARTi_y = Vi->y - (kernel_median->h / 2);
1063  uint16_t VSTARTi_x = Vi->x - (kernel_median->w / 2);
1064  uint16_t VSTOPi_y = Vi->y + (kernel_median->h / 2);
1065  uint16_t VSTOPi_x = Vi->x + (kernel_median->w / 2);
1066 
1067  // In case the upper bounds of the kernel are outside of the image area
1068  // (lower bound not important because of uint8_t type converting everything below 0 to 0):
1069  if (VSTOPi_y > img->h) {
1070  VSTOPi_y = (img->h - 1);
1071  }
1072  if (VSTOPi_x > img->w) {
1073  VSTOPi_x = (img->w - 1);
1074  }
1075 
1076 
1077 
1078  // Declaring kernel coordinates
1079  uint16_t Vk_y;
1080  uint16_t Vk_x;
1081 
1082  // Declaring 1d indices (for result of transforming 2d coordinates into 1d coordinate)
1083  int32_t index_img;
1084  int32_t index_kernel;
1085 
1086  // Declaring variable to store median in
1087  uint16_t median;
1088 
1089 
1090  // Here we get the median value of a block in the middle of an image using a kernel structure
1091  for (uint16_t Vi_y = VSTARTi_y; Vi_y < (VSTOPi_y + 1); Vi_y++) {
1092  for (uint16_t Vi_x = VSTARTi_x; Vi_x < (VSTOPi_x + 1); Vi_x++) {
1093  // Calculating kernel coordinates
1094  Vk_y = Vi_y - VSTARTi_y;
1095  Vk_x = Vi_x - VSTARTi_x;
1096 
1097  // Converting 2d indices to 1d indices
1098  index_img = indx1d_a(Vi_y, Vi_x, img);
1099  index_kernel = indx1d_c(Vk_y, Vk_x, median_kernel.h, median_kernel.w);
1100 
1101 
1102  // Saving disparity values of image underneath the kernel, into the kernel buffer
1103  ((uint16_t *) median_kernel.buf_values)[index_kernel] = ((uint16_t *) img->buf)[index_img];
1104 
1105  }
1106  }
1107 
1108  // Calculating median disparity value of values recoded by the kernel
1110 
1111  return median;
1112 }
1113 
1114 
1115 
1116 
1117 // Function 18a - Function to find "best" (closest ideal pathway to goal from robot to edge to goal) - Using disparity image
1118 // Returns a 3d Vector to the best "edge" and 1 if any edge is found and 0 if no edge is found.
1120  struct FloatVect3
1121  *_VEDGECOORDINATESc, // Declared vector of coordinates of "best" edge detected in camera coordinate system
1122  struct FloatVect3 *VTARGETc, // Declared vector of coordinates of goal in camera coordinate system
1123  struct image_t *img_edges, // Image obtained from the external Sobel edge detection function = sobel_OCV
1124  struct image_t *img_disparity, // Image obtained after simple block matching
1125  struct crop_t *_edge_search_area, // This structure holds information about the window in which edges are searched in
1126  uint8_t threshold, // Above this disparity (pixels) edges are eligible for the WedgeBug algorithm (i.e. edges cannot be very far away)
1127  int16_t max_confidence // This is the max confidence that edges were found - Works like a threshold
1128 )
1129 {
1130  // Declarations
1131  struct FloatVect3
1132  VEDGEc; // A vector to save the point of an eligible detected edge point in the camera coordinate system.
1133  struct FloatVect3 VROBOTCENTERc; // A vector to hold camera center coordinates in the camera coordinate system.
1134  struct point_t
1135  VCLOSESTEDGEi; // A vector to save the point of the "best" eligible detected edge point in the image coordinate system.
1136  float f_distance_edge_to_goal; // Saves distance from edge to goal
1137  float f_distance_robot_to_edge; // Saves distance from robot to goal
1138  float distance; // This stores distance from edge to goal. Its initialized with 255 as basically any edge found will be closer than that and will replace 255 met
1139  uint8_t edge_value; // Variable to store the intensity value of a pixel in the img_edge
1140  uint8_t disparity; // variable to store the disparity level of a pixel in the img_disparity
1141  uint8_t disparity_best; // Variable to store disparity level of associated best edge found (for debugging)
1142  int16_t confidence; // Confidence = number of edge pixels found
1143  int32_t indx; // Variable to store 1d index calculated from 2d index
1144 
1145  // Initializations
1146  VROBOTCENTERc.x = 0.0; VROBOTCENTERc.y = 0.0; VROBOTCENTERc.z = 0.0;
1147  VCLOSESTEDGEi.x = 0; VCLOSESTEDGEi.y = 0;
1148  distance = 255;
1149  confidence = 0;
1150  disparity_best = 0;
1151 
1152 
1153  // Loop to calculate position (in image) of point closes to hypothetical target - Start
1154  for (uint16_t y = _edge_search_area->y; y < (_edge_search_area->y + _edge_search_area->h);
1155  y++) { //for (uint32_t y = _edge_search_area.y; y < (_edge_search_area.y + _edge_search_area.h); y++)
1156  for (uint16_t x = _edge_search_area->x; x < (_edge_search_area->x + _edge_search_area->w); x++) {
1157  indx = indx1d_a(y, x, img_edges); // We convert the 2d index [x,y] into a 1d index
1158  edge_value = ((uint8_t *) img_edges->buf)[indx]; // We save the intensity of the current point
1159  disparity = ((uint8_t *) img_disparity->buf)[indx]; // We save the disparity of the current point
1160 
1161  // Two conditions must be met for an edge to be considered a viable route for the drone:
1162  // 1) This disparity of the current coordinate (x,y) must coincide with an edge pixel
1163  // (as all non-edge pixels have been set to 0) - (edge_value != 0)
1164  // 2) The disparity of the current coordinate (x, y) must be above a certain threshold. This simulates vision cone - (disparity > threshold_disparity_of_edges)
1165  if ((edge_value != 0) && (disparity > threshold)) {
1166  // We increase the confidence for every edge found
1167  confidence++;
1168  Bound(confidence, 0, max_confidence);
1169 
1170  // We determine the offset from the principle point
1171  int32_t y_from_c = y - c_img_cropped.y; // NOTE. The variable "c_img_cropped" is a global variable
1172  int32_t x_from_c = x - c_img_cropped.x; // NOTE. The variable "c_img_cropped" is a global variable
1173  // We derive the 3d scene point using from the disparity saved earlier
1174  Vi_to_Vc(&VEDGEc, y_from_c, x_from_c, disparity, b, f); // NOTE. The variables "b" and "f" are a global variables
1175  // Calculating Euclidean distance (N2) - Edge to goal
1176  f_distance_edge_to_goal = float_vect3_norm_two_points(VTARGETc, &VEDGEc);
1177  // Calculating Euclidean distance (N2) - robot to edge
1178  f_distance_robot_to_edge = float_vect3_norm_two_points(&VEDGEc, &VROBOTCENTERc);
1179 
1180 
1181  // If current distance (using distance vector) is smaller than the previous minimum distance
1182  // measure then save new distance and point coordinates associated with it
1183  if ((f_distance_robot_to_edge + f_distance_edge_to_goal) < distance) {
1184  // Saving closest edge point, in camera coordinate system
1185  _VEDGECOORDINATESc->x = VEDGEc.x;
1186  _VEDGECOORDINATESc->y = VEDGEc.y;
1187  _VEDGECOORDINATESc->z = VEDGEc.z;
1188  // Saving disparity at point
1189  disparity_best = disparity;
1190  // Saving smallest distance
1191  distance = (f_distance_robot_to_edge + f_distance_edge_to_goal);
1192  // Saving closest edge point, in image coordinate system
1193  VCLOSESTEDGEi.y = y;
1194  VCLOSESTEDGEi.x = x;
1195 
1196  }
1197 
1198  //printf("x image = %d\n", x);
1199  //printf("y image = %d\n", y);
1200  //printf("x image from c = %d\n", x_from_c);
1201  //printf("y image from c = %d\n", y_from_c);
1202  //printf("d = %d\n", disparity);
1203  //printf("X scene from c = %f\n", scene_point.X);
1204  //printf("Y scene from c = %f\n", scene_point.Y);
1205  //printf("Z scene from c = %f\n", scene_point.Z);
1206  //printf("Closest edge [y,x] = [%d, %d]\n", (int)closest_edge.y, (int)closest_edge.x);
1207  //printf("Robot center coordinates = [%f, %f, %f]\n", VROBOTCENTERc.x, VROBOTCENTERc.y, VROBOTCENTERc.z);
1208  //printf("Edge coordinates = [%f, %f, %f]\n", VEDGEc.x, VEDGEc.y, VEDGEc.z);
1209  //printf("Distance to goal (m) = %f\n", distance);
1210  //printf("Distance to goal2 (m) = %f + %f\n\n", f_distance_robot_to_edge, f_distance_edge_to_goal);
1211  }
1212  }
1213  }
1214 
1215  if (confidence == max_confidence) {
1216  ((uint8_t *) img_edges->buf)[indx1d_a(VCLOSESTEDGEi.y, VCLOSESTEDGEi.x, img_edges)] = 255;
1217  printf("Viable closest edge found: [%d, %d] (disparity = %d)\n", VCLOSESTEDGEi.y, VCLOSESTEDGEi.x, disparity_best);
1218 
1219  printf("At distance: %f\n", distance);
1220  confidence = 0;
1221  return 1;
1222  } else {
1223  printf("No viable edge found\n");
1224  confidence = 0;
1225  return 0;
1226  }
1227 }
1228 
1229 
1230 
1231 // Function 18b - Function to find "best" (closest ideal pathway to goal from robot to edge to goal) - Using depth image
1232 // Returns a 3d Vector to the best "edge" and 1 if any edge is found and 0 if no edge is found.
1234  struct FloatVect3
1235  *_VEDGECOORDINATESc, // Declared vector of coordinates of "best" edge detected in camera coordinate system
1236  struct FloatVect3 *VTARGETc, // Declared vector of coordinates of goal in camera coordinate system
1237  struct image_t *img_edges, // Image obtained from the external Sobel edge detection function = sobel_OCV
1238  struct image_t *img_depth, // Image holding depth values (cm) obtained from the disparity image
1239  struct crop_t *_edge_search_area, // This structure holds information about the window in which edges are searched in
1240  uint16_t threshold, // Below this depth (cm) edges are eligible for the WedgeBug algorithm (i.e. edges cannot be very far away)
1241  int16_t max_confidence // This is the max confidence that edges were found - Works like a threshold
1242 )
1243 {
1244  // Declarations
1245  struct FloatVect3
1246  VEDGEc; // A vector to save the point of an eligible detected edge point in the camera coordinate system.
1247  struct FloatVect3 VROBOTCENTERc; // A vector to hold camera center coordinates in the camera coordinate system.
1248  struct point_t
1249  VCLOSESTEDGEi; // A vector to save the point of the "best" eligible detected edge point in the image coordinate system.
1250  float f_distance_edge_to_goal; // Saves distance from edge to goal
1251  float f_distance_robot_to_edge; // Saves distance from robot to goal
1252  float distance; // This stores distance from edge to goal. Its initialized with 255 as basically any edge found will be closer than that and will replace 255 meters
1253  uint8_t edge_value; // Variable to store the intensity value of a pixel in the img_edge
1254  uint16_t depth; // Variable to store the depth (in cm) of a pixel in the img_disparity
1255  uint16_t depth_best; // Variable to store depth (cm) level of associated best edge found (for debugging)
1256  int16_t confidence; // Confidence = number of edge pixels found
1257  int32_t indx; // Variable to store 1d index calculated from 2d index
1258 
1259  // Initializations
1260  VROBOTCENTERc.x = 0.0; VROBOTCENTERc.y = 0.0; VROBOTCENTERc.z = 0.0;
1261  VCLOSESTEDGEi.x = 0; VCLOSESTEDGEi.y = 0;
1262  distance = 255;
1263  confidence = 0;
1264  depth_best = 0;
1265 
1266 
1267  // Loop to calculate position (in image) of point closes to hypothetical target - Start
1268  for (uint16_t y = _edge_search_area->y; y < (_edge_search_area->y + _edge_search_area->h);
1269  y++) { //for (uint32_t y = _edge_search_area.y; y < (_edge_search_area.y + _edge_search_area.h); y++)
1270  for (uint16_t x = _edge_search_area->x; x < (_edge_search_area->x + _edge_search_area->w); x++) {
1271  indx = indx1d_a(y, x, img_edges); // We convert the 2d index [x,y] into a 1d index
1272  edge_value = ((uint8_t *) img_edges->buf)[indx]; // We save the intensity of the current point
1273  depth = ((uint16_t *) img_depth->buf)[indx];
1274 
1275  // Two conditions must be met for an edge to be considered a viable route for the drone:
1276  // 1) This disparity of the current coordinate (x,y) must coincide with an edge pixel
1277  // (as all non-edge pixels have been set to 0) - (edge_value != 0)
1278  // 2) The disparity of the current coordinate (x, y) must be above a certain threshold. This simulates vision cone - (disparity > threshold_disparity_of_edges)
1279  if ((edge_value != 0) && (depth < threshold)) {
1280  // We increase the confidence for every edge found
1281  confidence++;
1282  Bound(confidence, 0, max_confidence);
1283 
1284  // We determine the offset from the principle point
1285  int32_t y_from_c = y - c_img_cropped.y; // NOTE. The variable "c_img_cropped" is a global variable
1286  int32_t x_from_c = x - c_img_cropped.x; // NOTE. The variable "c_img_cropped" is a global variable
1287  // We derive the 3d scene point using the depth from the depth image (img_depth). Note depth is in cm, but function takes m. Thus, we convert
1288  Vi_to_Vc_depth(&VEDGEc, y_from_c, x_from_c, (depth / 100.00),
1289  f); // NOTE. The variables "b" and "f" are a global variables
1290 
1291  // Calculating Euclidean distance (N2) - Edge to goal
1292  f_distance_edge_to_goal = float_vect3_norm_two_points(VTARGETc, &VEDGEc);
1293  // Calculating Euclidean distance (N2) - robot to edge
1294  f_distance_robot_to_edge = float_vect3_norm_two_points(&VEDGEc, &VROBOTCENTERc);
1295 
1296 
1297  // If current distance (using distance vector) is smaller than the previous minimum distance
1298  // measure then save new distance and point coordinates associated with it
1299  if ((f_distance_robot_to_edge + f_distance_edge_to_goal) < distance) {
1300  // Saving closest edge point, in camera coordinate system
1301  _VEDGECOORDINATESc->x = VEDGEc.x;
1302  _VEDGECOORDINATESc->y = VEDGEc.y;
1303  _VEDGECOORDINATESc->z = VEDGEc.z;
1304  // Saving disparity at point
1305  depth_best = depth;
1306  // Saving smallest distance
1307  distance = (f_distance_robot_to_edge + f_distance_edge_to_goal);
1308  // Saving closest edge point, in image coordinate system
1309  VCLOSESTEDGEi.y = y;
1310  VCLOSESTEDGEi.x = x;
1311 
1312  }
1313 
1314  //printf("x image = %d\n", x);
1315  //printf("y image = %d\n", y);
1316  //printf("x image from c = %d\n", x_from_c);
1317  //printf("y image from c = %d\n", y_from_c);
1318  //printf("d = %d\n", disparity);
1319  //printf("X scene from c = %f\n", scene_point.X);
1320  //printf("Y scene from c = %f\n", scene_point.Y);
1321  //printf("Z scene from c = %f\n", scene_point.Z);
1322  //printf("Closest edge [y,x] = [%d, %d]\n", (int)closest_edge.y, (int)closest_edge.x);
1323  //printf("Robot center coordinates = [%f, %f, %f]\n", VROBOTCENTERc.x, VROBOTCENTERc.y, VROBOTCENTERc.z);
1324  //printf("Edge coordinates = [%f, %f, %f]\n", VEDGEc.x, VEDGEc.y, VEDGEc.z);
1325  //printf("Distance to goal (m) = %f\n", distance);
1326  //printf("Distance to goal2 (m) = %f + %f\n\n", f_distance_robot_to_edge, f_distance_edge_to_goal);
1327  }
1328  }
1329  }
1330 
1331  if (confidence == max_confidence) {
1332  ((uint8_t *) img_edges->buf)[indx1d_a(VCLOSESTEDGEi.y, VCLOSESTEDGEi.x, img_edges)] = 255;
1333  printf("Viable closest edge found: [%d, %d] (depth = %f)\n", VCLOSESTEDGEi.y, VCLOSESTEDGEi.x, (depth_best / 100.00));
1334 
1335  printf("At distance: %f\n", distance);
1336  confidence = 0;
1337  return 1;
1338  } else {
1339  printf("No viable edge found\n");
1340  confidence = 0;
1341  return 0;
1342  }
1343 }
1344 
1345 
1346 
1347 // Function 19 - Function to determine if setpoint was reached (it was reached if distance is below a set threshold)
1348 uint8_t is_setpoint_reached(struct FloatVect3 *VGOAL, struct FloatVect3 *VCURRENTPOSITION, float threshold)
1349 {
1350  if (float_vect3_norm_two_points(VGOAL, VCURRENTPOSITION) < threshold) {return 1;}
1351  else {return 0;}
1352 }
1353 
1354 
1355 // Function 20 - function to obtain the Euclidian distance between two angles (in radians)
1356 float float_norm_two_angles(float target_angle, float current_angle)
1357 {
1358  float target_current_diff;
1359  target_current_diff = target_angle - current_angle;
1360  //printf("Target angle = %f\n", target_angle);
1361  //printf("Current angle = %f\n", current_angle);
1362  //printf("difference in function = %f\n", sqrt(pow(target_current_diff, 2)));
1363  return sqrt(pow(target_current_diff, 2));
1364 }
1365 
1366 // Function 21 - Function to determine if set heading was reached (it was reached if the Euclidean distance is below a set threshold)
1367 uint8_t is_heading_reached(float target_angle, float current_angle, float threshold)
1368 {
1369  if (float_norm_two_angles(target_angle, current_angle) < threshold) {return 1;}
1370  else {return 0;}
1371 }
1372 
1373 // Function 22
1375  struct FloatVect3 *VGOAL, struct FloatVect3 *VCURRENTPOSITION, float threshold_setpoint,
1376  float target_angle, float current_angle, float threshold_angle)
1377 {
1378  if ((float_vect3_norm_two_points(VGOAL, VCURRENTPOSITION) < threshold_setpoint)
1379  && (float_norm_two_angles(target_angle, current_angle) < threshold_angle)) {return 1;}
1380  else {return 0;}
1381 }
1382 
1383 
1384 // Function 23 - Function to convert 16bit disparity (unaltered from OCV output i.e. fixed point 16bit numbers) into 16bit depth values (cm)
1385 void disp_to_depth_img(struct image_t *img_input, struct image_t *img16bit_output)
1386 {
1387  //int n = 89;
1388  float disparity = 1;
1389 
1390  //printf("C location %d = %d\n", n, ((int16_t*)img16bit_input->buf)[n]);
1391 
1392 
1393  // Converting disparity values into depth (cm)
1394  for (int32_t i = 0; i < (img_input->h * img_input->w); i++) {
1395 
1396  if (img_input->type == IMAGE_GRAYSCALE) {
1397  disparity = disp_to_depth(((uint8_t *)img_input->buf)[i], b, f);
1398  } else if (img_input->type == IMAGE_INT16) {
1399  disparity = disp_to_depth_16bit(((uint16_t *)img_input->buf)[i], b, f);
1400  } else {
1401  printf("ERROR: function does not support image type %d. Breaking out of function.", img_input->type);
1402  }
1403 
1404 
1405  /*
1406  if(i == n)
1407  {
1408  printf("Depth in meters at %d = %f\n", n, disparity);
1409  }*/
1410 
1411  disparity = disparity * 100;
1412 
1413  /*if(i == n)
1414  {
1415  printf("Depth in cm at %d = %f\n", n, disparity);
1416  }*/
1417 
1418 
1419  ((uint16_t *)img16bit_output->buf)[i] = round(disparity);
1420  }
1421  //printf("Depth in cm at %d = %d\n", n, ((int16_t*)img16bit_output->buf)[n]);
1422 }
1423 
1424 // Function 24 - Function that encapsulates all of the background processes. Originally this was in the periodic function,
1425 // but since it is used in the direct and the guidance navigation modes of the state machine, I made it a callable function
1426 // for convenience
1427 void background_processes(uint8_t _save_images_flag)
1428 {
1429  //Background processes
1430  // 1. Converting left and right image to 8bit grayscale for further processing
1431  image_to_grayscale(&img_left, &img_left_int8); // Converting left image from UYVY to gray scale for saving function
1432  image_to_grayscale(&img_right, &img_right_int8); // Converting right image from UYVY to gray scale for saving function
1433 
1434  // 2. Deriving disparity map from block matching (left image is reference image)
1436  1);// Creating cropped disparity map image
1437  // For report: creating image for saving 1
1438  if (_save_images_flag) {save_image_HM(&img_disparity_int8_cropped, "/home/dureade/Documents/paparazzi_images/for_report/b_img1_post_SBM.bmp", heat_map_type);}
1439 
1440  /*
1441  // Optional thresholding of disparity map
1442  uint8_t thresh = 1;
1443  for (int32_t i = 0; i < (img_disparity_int8_cropped.h*img_disparity_int8_cropped.w); i++)
1444  {
1445  uint8_t disparity = ((uint8_t*)img_disparity_int8_cropped)[i];
1446  if(disparity < thresh)
1447  {
1448  ((uint8_t*)img_disparity_int8_cropped)[i] = 0; // if below disparity assume object is indefinately away
1449  }
1450  }*/
1451 
1452  // 3. Morphological operations 1
1453  // Needed to smoove object boundaries and to remove noise removing noise
1455  // For report: creating image for saving 2
1456  if (_save_images_flag) {save_image_HM(&img_middle_int8_cropped, "/home/dureade/Documents/paparazzi_images/for_report/b_img2_post_opening_8bit.bmp", heat_map_type);}
1457 
1459  // For report: creating image for saving 3
1460  if (_save_images_flag) {save_image_HM(&img_middle_int8_cropped, "/home/dureade/Documents/paparazzi_images/for_report/b_img3_post_closing_8bit.bmp", heat_map_type);}
1461 
1463  // For report: creating image for saving 4
1464  if (_save_images_flag) {save_image_HM(&img_middle_int8_cropped, "/home/dureade/Documents/paparazzi_images/for_report/b_img4_post_dilation_8bit.bmp", heat_map_type);}
1465 
1466  // 4. Depth image
1468  // For report: creating image for saving 4
1469  if (_save_images_flag) {save_image_HM(&img_depth_int16_cropped, "/home/dureade/Documents/paparazzi_images/for_report/b_img5_post_depth_16bit.bmp", heat_map_type);}
1470 
1471  // 5. Sobel edge detection
1473  // For report: creating image for saving 5
1474  if (_save_images_flag) {save_image_gray(&img_edges_int8_cropped, "/home/dureade/Documents/paparazzi_images/for_report/b_img6_post_sobel_8bit.bmp");}
1475 }
1476 
1477 
1478 // Function 25 - Function that encapsulates all of the background processes. Originally this was in the periodic function,
1479 // but since it is used in the direct and the guidance navigation modes of the state machine, I made it a callable function
1480 // for convenience
1481 void background_processes_16bit(uint8_t _save_images_flag)
1482 {
1483  //Background processes
1484  // 1. Converting left and right image to 8bit grayscale for further processing
1485  image_to_grayscale(&img_left, &img_left_int8); // Converting left image from UYVY to gray scale for saving function
1486  image_to_grayscale(&img_right, &img_right_int8); // Converting right image from UYVY to gray scale for saving function
1487 
1488  // 2. Deriving disparity map from block matching (left image is reference image)
1490  1);// Creating cropped disparity map image
1491  // For report: creating image for saving 1
1492  if (_save_images_flag) {save_image_HM(&img_disparity_int16_cropped, "/home/dureade/Documents/paparazzi_images/for_report/b2_img1_post_SBM_16bit.bmp", heat_map_type);}
1493 
1494  //printf("maximum_intensity = %d\n", maximum_intensity(&img_disparity_int16_cropped));
1495 
1496  /*
1497  // Optional thresholding of disparity map
1498  uint8_t thresh = 1;
1499  for (int32_t i = 0; i < (img_disparity_int8_cropped.h*img_disparity_int8_cropped.w); i++)
1500  {
1501  uint8_t disparity = ((uint8_t*)img_disparity_int8_cropped)[i];
1502  if(disparity < thresh)
1503  {
1504  ((uint8_t*)img_disparity_int8_cropped)[i] = 0; // if below disparity assume object is indefinately away
1505  }
1506  }*/
1507 
1508  // 3. Morphological operations 1
1509  // Needed to smoove object boundaries and to remove noise removing noise
1510 
1512  // For report: creating image for saving 3
1513  if (_save_images_flag) {save_image_HM(&img_disparity_int16_cropped, "/home/dureade/Documents/paparazzi_images/for_report/b2_img2_post_closing_16bit.bmp", heat_map_type);}
1514 
1515 
1517  // For report: creating image for saving 2
1518  if (_save_images_flag) {save_image_HM(&img_disparity_int16_cropped, "/home/dureade/Documents/paparazzi_images/for_report/b2_img3_post_opening_16bit.bmp", heat_map_type);}
1519 
1520 
1521 
1523  // For report: creating image for saving 4
1524  if (_save_images_flag) {save_image_HM(&img_disparity_int16_cropped, "/home/dureade/Documents/paparazzi_images/for_report/b2_img4_post_dilation_16bit.bmp", heat_map_type);}
1525 
1526 
1527  // 4. Depth image
1529  // For report: creating image for saving 4
1530  if (_save_images_flag) {save_image_HM(&img_depth_int16_cropped, "/home/dureade/Documents/paparazzi_images/for_report/b2_img5_post_depth_16bit.bmp", heat_map_type);}
1531 
1532 
1533  // 5. Sobel edge detection
1535  // For report: creating image for saving 5
1536  if (_save_images_flag) {save_image_gray(&img_edges_int8_cropped, "/home/dureade/Documents/paparazzi_images/for_report/b2_img6_post_sobel_8bit.bmp");}
1537 
1538  // 6. Morphological operations 2
1539  // This is needed so that when using the edges as filters (to work on depth values
1540  // only found on edges) the underlying depth values are those of the foreground
1541  // and not the background
1543 }
1544 
1545 
1546 
1547 
1548 
1549 
1550 /*
1551  * a: Coordinate system a (i.e. the coordinate frame a)
1552  * b: Coordinate system b (i.e. the coordinate frame b)
1553  * w: World coordinate system (i.e. the world coordinate frame = x is depth, y is left and right, and z is altitude))
1554  * r: Robot coordinate system (i.e. the robot coordinate frame = x is depth, y is left and right, and z is altitude))
1555  * c: Camera Coordinate system (i.e. the camera coordinate frame = x is left and right, y is altitude and z is depth)
1556  * i: Image coordinate system (i.e. the image (plane) coordinate frame)
1557  * k: Kernel coordinate system (i.e. the kernel coordinate frame)
1558  *
1559  * Va: Vector coordinates, in the coordinate system a (i.e. a point in the coordinate system a)
1560  * Vb: Vector coordinates, in the coordinate system b (i.e. a point in the coordinate system b)
1561  * Vwned: Vector coordinates, in the world coordinate system (i.e. a point in the world coordinate system) -NED
1562  * Vwenu: Vector coordinates, in the world coordinate system two (i.e. a point in the world coordinate system two) - ENU
1563  * Vr: Vector coordinates, in the robot coordinate system (i.e. a point in the world coordinate system)
1564  * Vc: Vector coordinates, in the camera coordinate system (i.e. a point in the world coordinate system)
1565  * Vi: Vector coordinates, in the image coordinate system (i.e. a point in the image [plane] coordinates system)
1566  * Vk: Vector coordinates, in the kernel coordinate system (i.e. a point in the kernel coordinates system)
1567  *
1568  * R: Rotation matrix
1569  *
1570  * Rba: Rotation matrix of coordinate system a expressed in the coordinate system b
1571  * Rrw: Rotation matrix of world coordinate system expressed in the robot coordinate system
1572  * Rcr: Rotation matrix of robot coordinate system expressed in the camera coordinate system
1573  *
1574  * VRw: Vector coordinates of robot in the world coordinate system
1575  * VCr: Vector coordinates of camera in the robot coordinate system
1576  * VOa: Vector coordinates of the object (has its own object frame) in the the coordinate system a *
1577  */
1578 
1579 
1580 
1581 
1582 
1583 // New section: Init and periodic functions ----------------------------------------------------------------------------------------------------------------
1584 void wedgebug_init()
1585 {
1586  //printf("Wedgebug init function was called\n");
1587 
1588 
1589  // Images creation process:
1590  // Creation of images
1591  // Creating structure to hold dimensions of normal camera images
1592  img_dims.w = WEDGEBUG_CAMERA_LEFT_WIDTH; img_dims.h = WEDGEBUG_CAMERA_LEFT_HEIGHT;
1593 
1594 
1595  image_create(&img_left, img_dims.w, img_dims.h, IMAGE_YUV422); // To store left camera image
1596  image_create(&img_right, img_dims.w, img_dims.h, IMAGE_YUV422);// To store right camera image
1597  image_create(&img_left_int8, img_dims.w, img_dims.h, IMAGE_GRAYSCALE); // To store gray scale version of left image
1598  image_create(&img_right_int8, img_dims.w, img_dims.h, IMAGE_GRAYSCALE); // To store gray scale version of left image
1599 
1600 
1601  // Creation of images - Cropped:
1602  // Calculating cropped image details (x, y, width and height)
1604 
1605  // Creating structure to hold dimensions of cropped camera images
1607 
1608  printf("img_cropped_info [w, h, x, y] = [%d, %d, %d, %d]\n", img_cropped_info.w, img_cropped_info.h, img_cropped_info.x,
1610 
1611  // Creating empty images - cropped - 8bit
1613  IMAGE_GRAYSCALE);// To store cropped depth - 8 bit
1615  IMAGE_GRAYSCALE);// To store cropped depth - 8 bit
1617  IMAGE_GRAYSCALE); // To store intermediate image data from processing - 8 bit
1619  IMAGE_GRAYSCALE); // To store edges image data from processing - 8 bit
1620 
1621  // Creating empty images - cropped - 16bit
1623  IMAGE_INT16);// To store cropped disparity - 16 bit
1625  IMAGE_INT16);// To store cropped depth - 16 bit
1627  IMAGE_INT16);// To store cropped middle image - 16 bit
1628 
1629 
1630 
1631 
1632 
1633  // Creation of kernels:
1634  // Creating structure to hold dimensions of kernels
1635  K_median_w = 43;
1636  K_median_h = 43;
1637 
1639  // Creating empty kernel:
1642 
1643  printf("median_kernel16bit [buf_size, h, w, type] = [%d, %d, %d, %d]\n", median_kernel16bit.buf_size,
1645 
1646 
1647 
1648 
1649  // Adding callback functions
1650  cv_add_to_device(&WEDGEBUG_CAMERA_LEFT, copy_left_img_func, WEDGEBUG_CAMERA_LEFT_FPS, 0);
1651  cv_add_to_device(&WEDGEBUG_CAMERA_RIGHT, copy_right_img_func, WEDGEBUG_CAMERA_RIGHT_FPS, 1);
1652 
1653 
1654  //Initialization of constant rotation matrices and transition vectors for frame to frame transformations
1655  // 1) Rotation matrix and transition vector to transform from world ENU frame to world NED frame
1656  Rwnedwenu.m[0] = 0; Rwnedwenu.m[1] = 1; Rwnedwenu.m[2] = 0;
1657  Rwnedwenu.m[3] = 1; Rwnedwenu.m[4] = 0; Rwnedwenu.m[5] = 0;
1658  Rwnedwenu.m[6] = 0; Rwnedwenu.m[7] = 0; Rwnedwenu.m[8] = -1;
1659  VNEDwenu.x = 0;
1660  VNEDwenu.y = 0;
1661  VNEDwenu.z = 0;
1662  // 2) Rotation matrix and transition vector to transform from robot frame to camera frame
1663  Rcr.m[0] = 0; Rcr.m[1] = 1; Rcr.m[2] = 0;
1664  Rcr.m[3] = 0; Rcr.m[4] = 0; Rcr.m[5] = 1;
1665  Rcr.m[6] = 1; Rcr.m[7] = 0; Rcr.m[8] = 0;
1666  VCr.x = 0;// i.e the camera is x meters away from robot center
1667  VCr.y = 0;
1668  VCr.z = 0;
1669 
1670  // Initializing goal vector in the NED world coordinate system
1671  VGOALwenu.x = WaypointX(WP_GOAL);
1672  VGOALwenu.y = WaypointY(WP_GOAL);
1673  VGOALwenu.z = WaypointAlt(WP_GOAL);
1675 
1676 
1677  // Initializing start vector in the NED world coordinate system
1678  VSTARTwenu.x = WaypointX(WP_START);
1679  VSTARTwenu.y = WaypointY(WP_START);
1680  VSTARTwenu.z = WaypointAlt(WP_START);
1682 
1683 
1684  // Calculating principal points of normal image and cropped image
1685  c_img.y = img_dims.h / 2;
1686  c_img.x = img_dims.w / 2;
1688  &img_cropped_info); // Calculates principal points for cropped image, considering the original dimensions
1689 
1690 
1691  // Initializing structuring element sizes
1692  SE_opening_OCV = 13; // SE size for the opening operation
1693  SE_closing_OCV = 13; // SE size for the closing operation
1695  51;//25;//51;//301;// SE size for the first dilation operation (Decides where edges are detected, increase to increase drone safety zone NOTE. This functionality should be replaced with c space expansion)
1697  11; // SE size for the second dilation operation (see state 3 "WEDGEBUG_START" and state 6 "POSITION_EDGE")
1698  SE_sobel_OCV = 5; // SE size for the sobel operation, to detect edges
1699  SE_erosion_OCV =
1700  11; // SE size for the erosion operation (see state 3 "WEDGEBUG_START" and state 6 "POSITION_EDGE", its needed to "drag" the depth of the foreground objects over the edges detected)
1701 
1702  // Setting thresholds - non-calculated
1704  700;
1705  threshold_depth_of_edges = 770;
1707  5000; // Edges with a magnitude above this value are detected. Above this value, edges are given the value 127, otherwise they are given the value zero.
1708  threshold_distance_to_goal = 0.25; // Above this threshold (m), the goal is considered reached
1710  1.0;//Above this threshold (m), the goal is considered reached in DIRECT_CONTROL mode
1711  threshold_distance_to_angle = 0.0004; // Above this threshold (radians), the angle/heading is considered reached
1712 
1713  // Setting thresholds - calculated
1714  //threshold_median_disparity = depth_to_disp((threshold_median_depth / 100.00), b, f); //8// Above this median disparity, an obstacle is considered to block the way. >60 = close than 35cm
1715  //threshold_median_depth = (uint16_t) disp_to_depth(threshold_median_disparity, b, f) * 100;
1716  //threshold_disparity_of_edges = depth_to_disp((threshold_depth_of_edges / 100.00), b, f); //5// Above this underlying disparity value, edges are considers eligible for detection
1717 
1718  // Initializing confidence parameters
1719  obstacle_confidence = 0; // This is the confidence that an obstacle was spotted
1720  free_path_confidence = 0; // This is the confidence that no obstacle was spotted
1721  position_confidence = 0; // This is the confidence that the desired position was reached
1722  heading_confidence = 0; // This is the confidence that the desired heading is reached
1723  //edge_found_micro_confidence = 0; // This is the confidence that an edge was found
1724  edge_found_macro_confidence = 0; // This is the confidence that an edge was found
1725  no_edge_found_confidence = 0; // This is the confidence that no edge was found
1726  max_obstacle_confidence = 10; // This is the max confidence that an obstacle was spotted
1727  max_free_path_confidence = 5; // This is the max confidence that an obstacle was not spotted
1728  max_position_confidence = 30; // This is the max confidence that a specific position was reached
1729  max_heading_confidence = 5; // This is the max confidence that a specific heading was reached
1730  max_edge_found_micro_confidence = 1;//50 // This is the max confidence that edges were found
1732  1; // This is the max confidence that edges were found (can be higher if angular change is slower or speed of periodic function is increase)
1733  max_no_edge_found_confidence = 10; // This is the max confidence that no edges were found
1734 
1735  // Initializing boolean flags
1736  is_start_reached_flag = 0; // Set to 1 if start position is reached, 0 otherwise
1737  is_setpoint_reached_flag = 0; // Set to 1 if setpoint is reached, 0 otherwise
1738  is_obstacle_detected_flag = 0; // Set to 1 if obstacle is detected, 0 otherwise
1739  is_path_free_flag = 0; // Set to 1 if no obstacle is detected, 0 otherwise
1740  is_heading_reached_flag = 0; // Set to 1 if heading is reached, 0 otherwise
1741  is_edge_found_macro_flag = 0; // Set to 1 if best edge (according to macro confidence) was found, 0 otherwise
1742  is_edge_found_micro_flag = 0; // Set to 1 if best edge (according to micro confidence) was found, 0 otherwise
1743  is_no_edge_found_flag = 0; // Set to 1 if no edge was identified, 0 otherwise
1744  is_state_changed_flag = 0; // Set to 1 if state was changed, 0 otherwise
1746  initial_heading.is_left_reached_flag = 0; // The scan has not reached the left maximum angle yet
1747  initial_heading.is_right_reached_flag = 0;// The scan has not reached the right maximum angle yet
1748 
1751 
1752  save_images_flag = 1; // For report: Flag to indicate if images should be saved
1753 
1754 
1755 
1756  // Initializing area over which edges are searched in
1757 
1758  // p3DWedgeBug:
1759  edge_search_area.y = 0;
1761  edge_search_area.x = 0;
1763 
1764 // // p2DWedgeBug:
1765 // edge_search_area.y = 216/2;
1766 // edge_search_area.h = 1;
1767 // edge_search_area.x = 0;
1768 // edge_search_area.w = img_disparity_int8_cropped.w;
1769 
1770 
1771  // Initializing Edge scan structure
1772  initial_heading.initiated = 0; // 0 = it can be overwritten
1773 
1774 
1775 
1776  // Initializing debugging options
1777 
1778  allow_state_change_MOVE_TO_GOAL = 1; // Allows state change from within state "MOVE_TO_GOAL"
1779  allow_state_change_POSITION_GOAL = 1; // Allows state change from within state "POSITION_GOAL"
1780  allow_state_change_WEDGEBUG_START = 1; // Allows state change from within state "WEDGEBUG_START"
1781  allow_state_change_MOVE_TO_EDGE = 1; // Allows state change from within state "MOVE_TO_EDGE"
1782  allow_state_change_POSITION_EDGE = 1; // Allows state change from within state "POSITION_EDGE"
1783  allow_state_change_EDGE_SCAN = 1; // Allows state change from within state "EDGE_SCAN"
1784 
1785  // Other initializations
1786  previous_state = 0; // Variable for state machine memory
1787  previous_mode = 0;
1789  VSTARTwned.x; // Initializing a vector to hold the current position, which is needed for calculating the distance traveled
1793  0; // Clock to measure total time (clock cycles)) it took for the robot to fly from start to goal
1794  clock_total_time_current = 0; // Clock to hold time measured at start of the current cycle of the periodic function
1795  clock_total_time_previous = 0; // Clock to hold time measured at start of the previous cycle of the periodic function
1796  distance_traveled = 0; // Variable to hold the distance traveled of the robot (since start and up to the goal)
1797  number_of_states = NUMBER_OF_STATES; // Variable to save the total number of states used in the finite state machine
1799  99999; // Variable to hold distance from robot to edge to goal (used in EDGE_SCAN (9) state) - initialized with unreasonably high number
1801  clock_FSM = 0;
1802  heat_map_type = 2; // Heat map used when saving image
1803 
1804  /*
1805  * Heat maps:
1806  * 0 = COLORMAP_AUTUMN
1807  * 1 = COLORMAP_BONE
1808  * 2 = COLORMAP_JET
1809  * 3 = COLORMAP_WINTER
1810  * 4 = COLORMAP_RAINBOW
1811  * 5 = COLORMAP_OCEAN
1812  * 6 = COLORMAP_SUMMER
1813  * 7 = COLORMAP_SPRING
1814  * 8 = COLORMAP_COOL
1815  * 9 = COLORMAP_HSV
1816  * 10 = COLORMAP_PINK
1817  * 11 = COLORMAP_HOT
1818  */
1819 
1820  /*
1821  enum navigation_state {
1822  MOVE_TO_GOAL = 1,
1823  POSITION_GOAL = 2,
1824  WEDGEBUG_START = 3,
1825  MOVE_TO_EDGE = 4,
1826  POSITION_EDGE = 5,
1827  EDGE_SCAN = 6
1828 
1829  };*/
1830 
1831 }
1832 
1833 
1834 
1835 
1836 
1837 void wedgebug_periodic()
1838 {
1839  // your periodic code here.
1840  // freq = 15.0 Hz
1841  //printf("Wedgebug periodic function was called\n");
1842 
1843 
1844  /*
1845  enum control_mode_state {
1846  DIRECT_CONTROL = 1,
1847  AUTONOMOUS_GUIDED = 2,
1848  AUTONOMOUS_NAV = 3
1849  };
1850  */
1851 
1852  // Setting control mode state (needed as switch-case statement cannot use OR statment so that
1853  // AP_MODE_ATTITUDE_DIRECT and AP_MODE_ATTITUDE_Z_HOLD lead to the same outcome. Hence I use my
1854  // own states here
1855  switch (autopilot_get_mode()) {
1858  case (AP_MODE_GUIDED): {current_mode = AUTONOMOUS_GUIDED;} break;
1859  case (AP_MODE_NAV): {current_mode = AUTONOMOUS_NAV;} break;
1860  default: {printf("Unsupported control mode");} break;
1861  }
1862 
1863  printf("threshold_median_depth (m) = %f\n", threshold_median_depth / 100.00);
1864 
1865  // Debugging - setting default state
1866  //set_state(MOVE_TO_GOAL ,1);
1867  //printf("Current control mode %d\n", current_mode);
1868  //printf("Current state %d\n", current_state);
1869 
1870 
1871 
1872  // Cycle-dependent initializations
1873  //Initialization of dynamic rotation matrices and transition vectors for frame to frame transformations
1874  // Rotation matrix and transition vector to transform from world ned frame to robot frame
1879 
1880  // Initialization of robot heading
1882 
1883  // Initialization of robot location/orientation-dependent variables
1884  // Converting goal world NED coordinates into Camera coordinates
1886  Va_to_Vb(&VGOALc, &VGOALr, &Rcr, &VCr);
1887 
1888 
1889  // Checking is state was changed, if yes then all flags are reset and the is_mode_changed_flag
1890  // is set to 1 for this cycle only. Else, the is_mode_changed_flag is set to 0 again;
1891 
1892  if (current_mode != previous_mode) {
1893  // Setting flag signifying that the state was changed
1895 
1896  } else if (is_mode_changed_flag == 1) {
1898  }
1899 
1900  if (is_mode_changed_flag == 1) {printf("Mode was changed!!!!!!!!!!!!!!!!!!!1\n");}
1901  printf("is_mode_changed_flag = %d\n", is_mode_changed_flag);
1902 
1904 
1905 
1906 
1907 
1908  /*
1909  enum control_mode_state {
1910  DIRECT_CONTROL = 1,
1911  AUTONOMOUS_GUIDED = 2,
1912  AUTONOMOUS_NAV = 3
1913  };
1914  */
1915 
1916  /*
1917  enum navigation_state {
1918  MOVE_TO_GOAL = 1,
1919  POSITION_GOAL = 2,
1920  WEDGEBUG_START = 3,
1921  MOVE_TO_EDGE = 4,
1922  POSITION_EDGE = 5,
1923  EDGE_SCAN = 6
1924 
1925  };*/
1926  // Finite state machine - Only runs in guided mode
1927  //if ((autopilot_get_mode() == AP_MODE_GUIDED)) // If AP_MODE_GUIDED block - Start
1928 
1929  // Switch-case statement for current_mode
1930  switch (current_mode) { // Control mode state machine - Start
1931  // State 1 ################################################################################################################################################################### State 1: DIRECT_CONTROL
1932  case DIRECT_CONTROL: {
1933  printf("DIRECT_CONTROL = %d\n", DIRECT_CONTROL);
1934 
1935  // If the mode was just started, then we initialize the FSM with MOVE_TO_GOAL (1)
1936  if (is_mode_changed_flag) {
1937  set_state(MOVE_TO_GOAL, 1);
1938  }
1939 
1940  // We do not care about the height of the drone when measuring distance to goal in the DIRECT_CONTROL mode
1941  // So we create this pseudo 2d vector where the z coordinates are the same as the goal. This vector is used
1942  // to check if robot is close to goal
1943  struct FloatVect3 VR2dwned = {.x = VRwned.x, .y = VRwned.y, .z = VGOALwned.z};
1944 
1945 
1946  // Background processes - Includes image processing for use
1947  // Turn on for debugging
1948  //background_processes(save_images_flag);
1950 
1951 
1952  switch (current_state) { // Finite state machine - Start
1953  // State 1 --------------------------------------------------------------------------------------------------------------------------------------------------------------- State 1: MOVE_TO_GOAL
1954  case MOVE_TO_GOAL: {
1955  printf("MOVE_TO_GOAL = %d\n", MOVE_TO_GOAL);
1956  //median_disparity_in_front = median_disparity_to_point(&c_img_cropped, &img_disparity_int8_cropped, &median_kernel);
1957  //In case disparity is 0 (infinite distance or error we set it to one disparity
1958  // above the threshold as the likelihood that the object is too close is large (as opposed to it being infinitely far away)
1959  //printf("median_disparity_in_front = %d\n", median_disparity_in_front);
1960  //printf("median_depth_in_front = %f\n", disp_to_depth(median_disparity_in_front, b, f));
1961  //float depth = disp_to_depth(median_disparity_in_front, b, f); // median_depth_in_front / 100.00
1962 
1963 
1965  float depth = median_depth_in_front / 100.00;
1966  printf("median_depth_in_front = %f\n", depth);
1967  printf("depth to goal = %f\n", VGOALc.z);
1968 
1969 
1970  if ((median_depth_in_front < threshold_median_depth) && (depth < VGOALc.z)) {
1971  printf("Obstacle is in front of goal\n");
1972  } else {
1973  printf("The path to the goal is free!\n");
1974  }
1975 
1976 
1980  &VGOALc,//target_point,
1986 
1987 
1988 // // c. Checking if edges are located
1989 // // Running function to detect and save edge
1990 // is_edge_found_micro_flag =
1991 // find_best_edge_coordinates(
1992 // &VEDGECOORDINATESc,
1993 // &VGOALc,//target_point,
1994 // &img_edges_int8_cropped,
1995 // &img_middle_int8_cropped,
1996 // &edge_search_area,
1997 // threshold_disparity_of_edges,
1998 // max_edge_found_micro_confidence);
1999 //
2000 
2001 
2002 
2003  if (save_images_flag) {save_image_gray(&img_edges_int8_cropped, "/home/dureade/Documents/paparazzi_images/img_edges_int8_cropped_marked.bmp");}
2004 
2005 
2007  printf("Goal is reached\n");
2009  } else {
2010  // If this is the first cycle of this mode, then
2011  if (is_mode_changed_flag) {
2012  clock_total_time_current = clock();;
2013  }
2014 
2015  // ############ Metric 2 - Distance traveled (total)
2020 
2021  // If the Goal is reached, set is_setpoint_reached_flag to 1 and record time
2025 
2026  }
2027  }
2028  } break;
2029 
2030 
2031  // State 2 --------------------------------------------------------------------------------------------------------------------------------------------------------------- State 2: POSITION_GOAL
2032  case POSITION_GOAL: {
2033  printf("POSITION_GOAL = %d\n", POSITION_GOAL);
2034  printf("Total time to reach goal = %f\n", ((double)clock_total_time) / CLOCKS_PER_SEC);
2035  printf("Total distance_traveled = %f\n", distance_traveled);
2036 
2037  } break;
2038 
2039 
2040  // State 0 --------------------------------------------------------------------------------------------------------------------------------------------------------------- State 0
2041  default: {
2042  printf("default = %d\n", 0);
2043  }
2044  } // Finite state machine - End
2045 
2046 
2047  printf("Time elapsed since start = %f\n", (((double)clock()) - ((double)clock_total_time_current)) / CLOCKS_PER_SEC);
2048  printf("distance_traveled = %f\n", distance_traveled);
2049 
2050  } break; // DIRECT_CONTROL - End
2051 
2052 
2053  // State 2 ################################################################################################################################################################### State 2: AUTONOMOUS_GUIDED
2054  case AUTONOMOUS_GUIDED: {
2055  printf("AUTONOMOUS_GUIDED = %d\n", AUTONOMOUS_GUIDED);
2056 
2057  // Checking is state was changed, if yes then all flags are reset and the is_state_changed_flag
2058  // is set to 1 for this cycle only. Else, the is_state_changed_flag is set to 0 again;
2059  if (current_state != previous_state) {
2060  // Setting flag signifying that the state was changed
2062  // Reset flags
2063  is_start_reached_flag = 0; // Set to 1 if start position is reached, 0 otherwise
2064  is_setpoint_reached_flag = 0; // Set to 1 if setpoint is reached, 0 otherwise
2065  is_obstacle_detected_flag = 0; // Set to 1 if obstacle is detected, 0 otherwise
2066  is_path_free_flag = 0; // Set to 1 if no obstacle is detected, 0 otherwise
2067  is_heading_reached_flag = 0; // Set to 1 if heading is reached, 0 otherwise
2068  is_edge_found_macro_flag = 0; // Set to 1 if best edge (according to macro confidence) was found, 0 otherwise
2069  is_edge_found_micro_flag = 0; // Set to 1 if best edge (according to micro confidence) was found, 0 otherwise
2070  is_no_edge_found_flag = 0; // Set to 1 if no edge was identified, 0 otherwise
2071  initial_heading.initiated = 0; // 0 = it can be overwritten
2072  initial_heading.is_left_reached_flag = 0;// The scan has not reached the left maximum angle yet
2073  initial_heading.is_right_reached_flag = 0;// The scan has not reached the right maximum angle yet
2074  distance_robot_edge_goal = 99999; //
2075  } else if (is_state_changed_flag != 0) {
2077  }
2078 
2079  // Background processes only happens if the current state is not POSITION_GOAL
2080  if (current_state != POSITION_GOAL) {
2081  // ############ Metric 1 - Recording current time
2082  clock_total_time_current = clock();
2083  //printf("clock_total_time_current = %f\n", (double)clock_total_time_current);
2084  // In case we are in the position start state and the state has changed, initialize clock_total_time_previous
2086  printf("Metric 1 was started\n");
2087  //printf("clock_total_time_current set = %f\n", (double)clock_total_time_current);
2089  }
2090  //Else check time difference to previous cycle and add to clock_total_time
2091  else {
2094  //printf("clock_total_time_previous = %f\n", (double)clock_total_time_previous);
2095  }
2096 
2097  // ############ Metric 2 - Distance traveled (total)
2099 
2103 
2104  // ############ Metric 3 - Runtime average of background processes (see below) - Start:
2105  clock_t time; // Creating variable to hold time (number of cycles)
2106  time = clock(); // Saving current time, way below it is used to calculate time spent in a cycle
2107  counter_cycles++; // Counting how many times a state was activated (needed for average calculation)
2108 
2109  // Background processes - Includes image processing for use
2110  //background_processes(save_images_flag);
2112 
2113  // ############ Metric 3 - Runtime average of background processes (see below) - End:
2115 
2116  // ############ Metric 4 - Runtime average per state - Start:
2117  clock_FSM = clock(); // Saving current time, way below it is used to calculate time spent in a cycle
2118  counter_state[current_state]++; // Counting how many times a state was activated (needed for average calculation). This is done here as state may change in FSM
2119  }
2120 
2121  // Initializing previous_state variable for next cycle
2122  // This happens here and not above as the metric above depends on the previous state
2124 
2125  switch (current_state) { // Finite state machine - Start
2126  // State 1 --------------------------------------------------------------------------------------------------------------------------------------------------------------- State 1: MOVE_TO_GOAL
2127  case MOVE_TO_GOAL: {
2128  printf("MOVE_TO_GOAL = %d\n", MOVE_TO_GOAL);
2129  // 1. Moving robot towards the goal
2130  // Checking if goal is reached, if not continue to move
2132  printf("Goal is reached\n");
2134 
2135  }
2136  // Else, move to goal and check confidence that goal has been reached.
2137  else {
2138  // Sets setpoint to goal position and orientates drones towards the goal as well
2140 
2141 
2142  // If start appears to be reached increase confidence
2146  }
2147 
2148  // If the position_confidence is high enough, set is_start_reached_flag to 1 and reset position_confidence
2151  position_confidence = 0;
2152  }
2153  }
2154 
2155 
2156  // 2. Check if obstacle is in way, when one is detected change state (dependent on negative flag from 1)
2157  // If the setpoint has not been reached, keep looking for obstacles
2158  if (!is_setpoint_reached_flag) {
2159  // Checking if obstacle is in way, if not continue checking for it
2161  printf("Object detected!!!!!!!!\n");
2162  // 1. Seting setpoint to current location
2165  }
2166  // Else, check confidence that obstacle is there
2167  else { // This happens continuously, as long as the state is active and the goal has not been reached. It stops when the flag has been set below.
2168  // Calculate median depth in front
2170  float depth = median_depth_in_front / 100.00;
2171  printf("median_depth_in_front = %f\n", depth);
2172  printf("depth to goal = %f\n", VGOALc.z);
2173 
2174 
2175  // If obstacle appears to be detected AND its in front of goal point, increase obstacle_confidence
2177  && (depth < VGOALc.z)) { // NOTE. The first logical statement is in centimeters and the second in meters
2178  printf("Increasing confidence\n");
2181  printf("obstacle_confidence = %d\n", obstacle_confidence);
2182  printf("max_obstacle_confidence = %d\n", max_obstacle_confidence);
2183  } else {
2184  obstacle_confidence = 0;
2185  }
2186  // If the obstacle_confidence is high enough, set is_obstacle_detected_flag to 1 and reset obstacle_confidence
2189  obstacle_confidence = 0;
2190  // Setting holding point so that when state changes to wedgebug the drone stays at one spot, when looking for edges
2194  }
2195  }
2196  }
2197 
2198  printf("position_confidence = %d\n", position_confidence);
2199  printf("obstacle_confidence = %d\n", obstacle_confidence);
2200  } break;
2201 
2202 
2203  // State 2 --------------------------------------------------------------------------------------------------------------------------------------------------------------- State 2: POSITION_GOAL
2204  case POSITION_GOAL: {
2205  printf("POSITION_GOAL = %d\n", POSITION_GOAL);
2206  // Since the drone is at the goal we will swithc bach to the NAV mode
2207  //autopilot_mode_auto2 = AP_MODE_NAV;
2208  //autopilot_static_set_mode(AP_MODE_NAV);
2209 
2210  printf("Total time to reach goal = %f\n", ((double)clock_total_time) / CLOCKS_PER_SEC);
2211  printf("Total distance_traveled = %f\n", distance_traveled);
2212  printf("Average runtime of background processes = %f\n",
2213  (clock_background_processes / CLOCKS_PER_SEC / counter_cycles));
2214 
2215  for (uint8_t i = 0; i < number_of_states; i++) {
2216  printf("Average runtime of state %d = %f\n", i, (time_state[i] / CLOCKS_PER_SEC / counter_state[i]));
2217  }
2218  } break;
2219 
2220 
2221  // State 3 --------------------------------------------------------------------------------------------------------------------------------------------------------------- State 3: WEDGEBUG_START
2222  case WEDGEBUG_START: {
2223  printf("WEDGEBUG_START = %d\n", WEDGEBUG_START);
2224  /* Details:
2225  * 1) The robot checks if holding point was reached, at which it first detected that an obstacle was it its way
2226  * 2) If that holding point is reached, the robot then scans for any edges.
2227  *
2228  */
2229 
2230  // 1. Move robot towards holding point
2231  // Checking if holding point is reached, if not continue to move
2233  printf("Holding point is reached\n");
2234  }
2235  // Else, move to holding point and check confidence that holding point has been reached.
2236  else { // This happens continuously, as long as the state is active. It stops when the flag has been set below.
2237 
2238  // Sets setpoint to goal position and orientates drones towards the goal as well
2240  heading_towards_waypoint(WP_GOAL));
2241 
2242  // If holding point appears to be reached increase confidence
2246  }
2247 
2248  // If the position_confidence is high enough, set is_setpoint_reached_flag to 1 and reset position_confidence
2251  position_confidence = 0;
2252  }
2253  }
2254 
2255 
2256  // 2. Scan for edges and see if any exist or not, then change state (dependent on flag from 1).
2258  // a. Stopping drone movement (a second time, just to make sure)
2260 
2261  // b. Checking if edges are located
2262  // Running function to detect and save edge
2266  &VGOALc,//target_point,
2272 
2273 
2274  //printf("was_edge_found?: %d\n", was_edge_found);
2275 
2276 
2277  // d. In this if-else statement we check if an edge was detected. If no edge is detected (by find_best_edge_coordinates) no_edge_found_confidence is
2278  // increased, whereas if an edge is found the edge_found_macro_confidence is increased. If one of the confidences reaches the
2279  // maximum the respective flag is turned on.
2280  // If obstacle is in way
2283  }
2284  // Else if no edge is detected
2285  else if (is_no_edge_found_flag) {
2286  printf("Edge not found!!!!!!!!!!!!!!!!!\n");
2288  }
2289  // Else, check if micro edge has been detected (by find_best_edge_coordinates) or not and increase respective confidences.
2290  // Also, check if macro confidence of having detected an edge is reached or whether maximum confidence of having no edges
2291  // detected is reached. If any are reached, respective flags are set
2292  else {
2293  // This if statement increase the confidence
2294  // If edge is detected in find_best_edge_coordinates, increase edge_found_macro_confidence
2298  }
2299  // If no edge is detected in find_best_edge_coordinates, increase no_edge_found_confidence
2300  else {
2303  }
2304 
2305  // If the edge_found_macro_confidence is high enough, set is_edge_found_macro_flag to 1 and reset edge_found_macro_confidence and no_edge_found_confidence
2310 
2311  // Since we are sure that a suitable edge has been found, the camera coordinates are transformed to
2312  // world coordinates and saved in a global variable, for used in subsequent state to make drone move
2313  // towards this point
2317  //printf("Optimal edge found\n");
2318  //printf("Camera coordinates of edge: [%f, %f, %f]\n", VEDGECOORDINATESc.x, VEDGECOORDINATESc.y, VEDGECOORDINATESc.z);
2319  // Saving best edge in global variable. // Note. VPBESTEDGECOORDINATESwned is used in the MOVE_TO_EDGE (7) and POSITION_EDGE (8) states
2323 
2324  // Making snapshot of image with edge coordinates highlighted. Comment out if not needed
2325  if (save_images_flag) {save_image_gray(&img_edges_int8_cropped, "/home/dureade/Documents/paparazzi_images/img_edges_int8_cropped_marked.bmp");}
2326 
2327  }
2328  // If the no_edge_found_macro_confidence is high enough, set is_no_edge_found_macro_flag to 1 and reset edge_found_macro_confidence and no_edge_found_confidence
2333  }
2334  }
2335  printf("Edge found confidence = %d\n", edge_found_macro_confidence);
2336  printf("No Edge found confidence = %d\n", no_edge_found_confidence);
2337  }
2338  } break;
2339 
2340 
2341  // State 4 --------------------------------------------------------------------------------------------------------------------------------------------------------------- State 4: MOVE_TO_EDGE
2342  case MOVE_TO_EDGE: {
2343  printf("MOVE_TO_EDGE = %d\n", MOVE_TO_EDGE);
2344 
2345  //printf("Camera coordinates of edge setpoint = [%f, %f, %f]\n", VEDGECOORDINATESc.x, VEDGECOORDINATESc.y, VEDGECOORDINATESc.z);
2346  //printf("Robot coordinates of edge setpoint = [%f, %f, %f]\n", VEDGECOORDINATESr.x, VEDGECOORDINATESr.y, VEDGECOORDINATESr.z);
2347  printf("World NED coordinates of edge setpoint = [%f, %f, %f]\n", VPBESTEDGECOORDINATESwned.x,
2349  //printf("World ENU coordinates of edge setpoint = [%f, %f, %f]\n", VEDGECOORDINATESwenu.x, VEDGECOORDINATESwenu.y, VEDGECOORDINATESwenu.z);
2350 
2351 
2352  // 1. Orientate robot towards object edge detected
2353  // If correct angle is reached, nothing happens
2355  printf("Heading is reached\n");
2356  }
2357  // Else, adjust heading and check confidence that heading is reached
2358  else { // This happens continuously, as long as the state is active. It stops when the flag has been set below
2359  // Set desired heading
2361  &VPBESTEDGECOORDINATESwned));// heading_towards_waypoint(WP_GOAL));
2362 
2363  // If heading appears to be reached increase confidence
2368  }
2369 
2370  // If the heading_confidence is high enough, set is_heading_reached_flag to 1 and reset heading_confidence
2373  heading_confidence = 0;
2374  }
2375  }
2376 
2377 
2378  // 2.Move robot to edge detected (dependent on flag from 1)
2379  // If the robot faces the right direction, go to the object edge coordinates (NEW) detected
2381  // If edge setpoint is reached, stay at it
2383  printf("Edge is reached\n");
2385  heading_towards_waypoint(WP_GOAL));
2386  }
2387  // Else, move to edge setpoint and check confidence that edge setpoint is reached
2388  else { // This happens continuously, as long as the state is active. It stops when the flag has been set below.
2389 
2390  // Sets setpoint to edge position and orientates robot towards it as well
2393 
2394  // If edge appears to be reached increase confidence
2396  threshold_distance_to_goal)) { // && !is_setpoint_reached_flag)
2399  }
2400 
2401  // If the position_confidence is high enough, set is_setpoint_reached_flag to 1 and reset position_confidence
2404  position_confidence = 0;
2405  }
2406  }
2407  }
2408 
2409 
2410 
2411  // 3. Set next state (dependent on 1 and 2)
2412  // If the robot faces the correct angle AND the edge is then reached, the state is changed to "MOVE_TO_GOAL"
2414  printf("Position and Heading are reached\n");
2416 
2417  }
2418  printf("position_confidence = %d\n", position_confidence);
2419  printf("heading_confidence = %d\n", heading_confidence);
2420  } break;
2421 
2422 
2423  // State 5 --------------------------------------------------------------------------------------------------------------------------------------------------------------- State 5: POSITION_EDGE
2424  case POSITION_EDGE: {
2425  printf("POSITION_EDGE = %d\n", POSITION_EDGE);
2426 
2427  // This ensures that the robot stay on the edge at all times
2430 
2431  // 1. Orientates robot towards the final goal
2432  // If robot faces goal, robot stays on current edge
2434  printf("Heading is reached\n");
2436  }
2437  // Else, adjust heading and check confidence that heading is reached
2438  else { // This happens continuously, as long as the state is active. It stops when the flag has been set.
2439  // Set desired heading
2441 
2442  // If heading appears to be reached increase confidence
2446  }
2447 
2448  // If the heading_confidence is high enough, set is_heading_reached_flag to 1 and reset heading_confidence
2451  heading_confidence = 0;
2452  }
2453  }
2454 
2455 
2456  // 2. Check if object is blocking the path or not and set next state (dependent on flag from 1)
2457  // If the robot faces the right direction, check if obstacle exists or not and change state
2459  // In this if-else statement we check if an obstacle is blocking the path or not. If no obstacle is blocking the path the free_path_confidence is
2460  // increased, whereas if an obstacle is blocking the path the obstacle_confidence is increased. If one of the confidences reaches the
2461  // maximum the respective flag is turned on. These statements only execute if the robot faces the goal (see above)
2462  // If obstacle is in way
2464  printf("Object detected!!!!!!!!\n");
2465  // Setting new holding point for WEDGEBUG_START state
2470  }
2471  // Else if the path is clear
2472  else if (is_path_free_flag) {
2473  printf("Path is free\n");
2475  }
2476  // Else, check for obstacles and free path and check confidence that an obstacle is there or that the path is free
2477  else {
2478  // Calculate median depth in front
2480  &median_kernel16bit); // Median depth in centimeters (cm)
2481  printf("Median_depth_in_front (m) = %f\n", (median_depth_in_front / 100.00)); // Median depth in meters (m)
2482  printf("Depth to goal = %f\n", VGOALc.z); // Depth in meters (m)
2483 
2484  // This if-else statement increase the confidence
2485  // If obstacle appears to be detected AND its in front of goal point, increase obstacle_confidence
2487  && ((median_depth_in_front / 100.00) <
2488  VGOALc.z)) { // NOTE. The first logical statement is in centimeters and the second in meters
2492 
2493  }
2494  // If obstacle appears not detected, increase free_path_confidence
2495  else {
2498  obstacle_confidence = 0;
2499  }
2500  // If the obstacle_confidence is high enough, set is_obstacle_detected_flag to 1 and reset obstacle_confidence and free_path_confidence
2503  obstacle_confidence = 0;
2505  }
2506  // If the free_path_confidence is high enough, set is_path_free_flag to 1 and reset free_path_confidence and obstacle_confidence
2508  is_path_free_flag = 1;
2510  obstacle_confidence = 0;
2511  }
2512  }
2513 
2514  }
2515  printf("Obstacle confidence = %d\n", obstacle_confidence);
2516  printf("Free path confidence = %d\n", free_path_confidence);
2517  } break;
2518 
2519 
2520  // State 6 --------------------------------------------------------------------------------------------------------------------------------------------------------------- State 6: EDGE_SCAN
2521  case EDGE_SCAN: {
2522  printf("EDGE_SCAN = %d\n", EDGE_SCAN);
2523 
2524  // 1. The robot is tasked to stay at the holding point for the entire duration of this state
2525  // Making drone hover, so that it does not drift from its current position
2527 
2528  // 2. Checking if edges are located
2529  // Running function to detect and save edge
2533  &VGOALc,//target_point,
2539 
2540 
2541  // 3. The identified best edge is compared to the previous identified best edge, and replace if the total distance (robot-edge-goal)
2542  // has decreased (dependent on flag from 2)
2543  // If edge was found
2545  // First we convert edge camera coordinates into world coordinates (ned and enu)
2546  //VEDGECOORDINATESc.z = VEDGECOORDINATESc.z - 0.25;// add this to make sure drone does not exactly stop on the edge point identified (it might crash otherwise)
2550 
2551  // Second, we increase confidence that an edge was spotted
2553 
2554  // Third, we calculate the distance of the edge currently spotted, if distance is smaller
2555  // than previous distance VPBESTEDGECOORDINATESwned is updated with the coordinates
2556  // Note. VPBESTEDGECOORDINATESwned is used in the MOVE_TO_EDGE (7) and POSITION_EDGE (8) states
2557  // Calculating Euclidean distance (N2) - Edge to goal
2558  float f_distance_edge_to_goal = float_vect3_norm_two_points(&VGOALwned, &VEDGECOORDINATESwned);
2559  // Calculating Euclidean distance (N2) - robot to edge
2560  float f_distance_robot_to_edge = float_vect3_norm_two_points(&VEDGECOORDINATESwned, &VRwned);
2561  float distance_total = f_distance_robot_to_edge + f_distance_edge_to_goal;
2562  printf("distance_total =%f\n", distance_total);
2563  if (distance_robot_edge_goal > distance_total) {
2564  distance_robot_edge_goal = distance_total;
2568  }
2569  }
2570 
2571  printf("Current minimum distance =%f\n", distance_robot_edge_goal);
2572  printf("is_edge_found_micro_flag = %d\n", is_edge_found_micro_flag);
2573 
2574 
2575  // 4. Initializing current heading parameters (if they have not been initialized before)
2576  if (initial_heading.initiated == 0) {
2579  2); // this way, when the the center of the drone is facing left, its fov does not exceed the maximum angle to the left
2582  2); // this way, when the the center of the drone is facing right, its fov does not exceed the maximum angle to the right
2585  }
2586 
2587 
2588  // 5. The drone turns right to find any edges
2589  // Code for looking left
2590  // If left heading has been reached (i.e. if the flag is activated)
2592  printf("Left heading is/was reached\n");
2593  }
2594  // Else, adjust angle to to face more left and check confidence that left heading has been reached
2595  else { // This happens continuously, as long as the state is active. It stops when the flag has been set below
2596  // Set heading to maximum left heading
2598 
2599  // If heading appears to be reached increase confidence
2603  }
2604  // If the heading_confidence is high enough, set initial_heading.is_left_reached_flag to 1 and reset heading_confidence
2607  heading_confidence = 0;
2608  }
2609  }
2610 
2611 
2612 
2613  // 6. The drone turns right to find any edges (dependent on flag from 5)
2614  // Code for looking right - Only runs if robot has previously looked left (initial_heading.heading_max_left = 1)
2616  // If right heading has been reached (i.e. if the flag is activated)
2618  printf("Right heading is/was reached\n");
2619  }
2620  // Else, adjust angle to to face more right and check confidence that right heading has been reached
2621  else {
2622  // Set heading to maximum left heading
2624 
2625  // If heading appears to be reached increase confidence
2629  }
2630  // If the heading_confidence is high enough, set initial_heading.is_right_reached_flag to 1 and resets heading_confidence
2633  heading_confidence = 0;
2634  }
2635  }
2636  }
2637 
2638 
2639  // 7. Check if confidence of having found a suitable edge is large enough or not and set next state (dependent on flags from 5 and 6)
2642  printf("Edge has been found\n");
2644  } else if (is_no_edge_found_flag) {
2645  printf("Minimum has been encountered\n");
2646  } else {
2647 
2648  // If edge_found_macro_confidence is >= max_edge_found_macro_confidence then set is_edge_found_macro_flag
2649  // and reset edge_found_macro_confidence
2653  }
2654  // Else, set is_no_edge_found_flag and reset edge_found_macro_confidence
2655  else {
2658  }
2659 
2660  }
2661  }
2662 
2663  printf("heading_confidence = %d\n", heading_confidence);
2664  printf("edge_found_confidence = %d\n", edge_found_macro_confidence);
2665  } break;
2666 
2667  // State 0 --------------------------------------------------------------------------------------------------------------------------------------------------------------- State 0
2668  default: {
2669  printf("default = %d\n", 0);
2670  }
2671  break;
2672 
2673  } // Finite state machine - End
2674 
2675 
2676 
2677  // ############ Metric 4 - Runtime average per state - End:
2678  clock_FSM = clock() - clock_FSM; // Calculating time it took for the FSM to finish running
2679  time_state[current_state] += ((double)clock_FSM); // Adding calculated time to total time of current state
2680 
2681  printf("Time elapsed since start = %f\n", ((double)clock_total_time) / CLOCKS_PER_SEC);
2682  printf("distance_traveled = %f\n", distance_traveled);
2683  } break; // AUTONOMOUS_GUIDED - End
2684 
2685 
2686  // State 3 ################################################################################################################################################################### State 3: AUTONOMOUS_NAV
2687  case AUTONOMOUS_NAV: {
2688  printf("AUTONOMOUS_NAV = %d\n", AUTONOMOUS_NAV);
2689  // Background processes - Includes image processing for use
2691 
2692 
2694  float depth = median_depth_in_front / 100.00;
2695  printf("median_depth_in_front = %f\n", depth);
2696  printf("depth to goal = %f\n", VGOALc.z);
2697 
2698 
2699  if ((median_depth_in_front < threshold_median_depth) && (depth < VGOALc.z)) {
2700  printf("Obstacle is in front of goal\n");
2701  } else {
2702  printf("The path to the goal is free!\n");
2703  }
2704 
2705 
2709  &VGOALc,//target_point,
2715 
2716 
2717 // // c. Checking if edges are located
2718 // // Running function to detect and save edge
2719 // is_edge_found_micro_flag =
2720 // find_best_edge_coordinates(
2721 // &VEDGECOORDINATESc,
2722 // &VGOALc,//target_point,
2723 // &img_edges_int8_cropped,
2724 // &img_middle_int8_cropped,
2725 // &edge_search_area,
2726 // threshold_disparity_of_edges,
2727 // max_edge_found_micro_confidence);
2728 //
2729 
2730 
2731 
2732  if (save_images_flag) {save_image_gray(&img_edges_int8_cropped, "/home/dureade/Documents/paparazzi_images/img_edges_int8_cropped_marked.bmp");}
2733 
2734 
2735 
2736  } break; // AUTONOMOUS_NAV - End
2737 
2738 
2739 
2740  // State 0 ################################################################################################################################################################### State 0
2741  default:
2742  {printf("Unsupported control mode");} break;
2743 
2744  } // Control mode state machine - End
2745 
2746 
2747 
2748 
2749  /*
2750  // printing flags
2751  printf("is_start_reached_flag = %d\n", is_start_reached_flag);
2752  printf("is_setpoint_reached_flag = %d\n", is_setpoint_reached_flag);
2753  printf("is_obstacle_detected_flag = %d\n", is_obstacle_detected_flag);
2754  printf("is_path_free_flag = %d\n", is_path_free_flag);
2755  printf("is_heading_reached_flag = %d\n", is_heading_reached_flag);
2756  printf("is_state_changed_flag = %d\n", is_state_changed_flag);
2757  printf("initial_heading.initiated = %d\n", initial_heading.initiated);
2758  printf("initial_heading.is_left_reached_flag = %d\n", initial_heading.is_left_reached_flag);
2759  printf("initial_heading.is_right_reached_flag = %d\n", initial_heading.is_right_reached_flag);
2760 
2761  */
2762  printf("\n\n");
2763 
2764 
2765 
2766  save_image_gray(&img_left_int8, "/home/dureade/Documents/paparazzi_images/img_left_int8.bmp");
2767  save_image_gray(&img_right_int8, "/home/dureade/Documents/paparazzi_images/img_right_int8.bmp");
2768  save_image_HM(&img_disparity_int8_cropped, "/home/dureade/Documents/paparazzi_images/img_disparity_int8_cropped.bmp",
2769  heat_map_type);
2770  //save_image_gray(&img_left_int8_cropped, "/home/dureade/Documents/paparazzi_images/img_left_int8_cropped.bmp");
2771  save_image_HM(&img_middle_int8_cropped, "/home/dureade/Documents/paparazzi_images/img_intermediate_int8_cropped.bmp",
2772  heat_map_type);
2773  save_image_gray(&img_edges_int8_cropped, "/home/dureade/Documents/paparazzi_images/img_edges_int8_cropped.bmp");
2774 
2775 
2776 
2777  /*
2778  // Size of variables
2779  printf("img_left = %d\n", img_left.buf_size);
2780  printf("img_right = %d\n", img_right.buf_size);
2781  printf("img_left_int8 = %d\n", img_left_int8.buf_size);
2782  printf("img_left_int8_cropped = %d\n", img_left_int8_cropped.buf_size);
2783  printf("img_right_int8 = %d\n", img_right_int8.buf_size);
2784  printf("img_disparity_int8_cropped = %d\n", img_disparity_int8_cropped.buf_size);
2785  printf("img_edges_int8_cropped = %d\n", img_edges_int8_cropped.buf_size);
2786  printf("median_kernel = %d\n", median_kernel.buf_size);
2787  printf("SE_opening_OCV = %lu\n", sizeof(SE_opening_OCV));
2788  printf("VSTARTwenu.x = %lu\n", sizeof(VSTARTwenu.x));
2789  printf("current_state = %lu\n", sizeof(current_state));
2790  printf("current_mode = %lu\n", sizeof(current_mode));
2791  */
2792 
2793 
2794 
2795 
2796 
2797 
2798 
2799 
2800  /*
2801  * Heat maps:
2802  * 0 = COLORMAP_AUTUMN
2803  * 1 = COLORMAP_BONE
2804  * 2 = COLORMAP_JET
2805  * 3 = COLORMAP_WINTER
2806  * 4 = COLORMAP_RAINBOW
2807  * 5 = COLORMAP_OCEAN
2808  * 6 = COLORMAP_SUMMER
2809  * 7 = COLORMAP_SPRING
2810  * 8 = COLORMAP_COOL
2811  * 9 = COLORMAP_HSV
2812  * 10 = COLORMAP_PINK
2813  * 11 = COLORMAP_HOT
2814  */
2815 
2816 
2817 }
2818 
2819 
img_right
struct image_t img_right
Image obtained from left camera (UYVY format)
Definition: wedgebug.c:75
show_image_data
void show_image_data(struct image_t *img)
Definition: wedgebug.c:370
kernel_C1
Definition: wedgebug.h:42
uint16_t
unsigned short uint16_t
Definition: types.h:16
is_obstacle_detected_flag
uint8_t is_obstacle_detected_flag
Set to 1 if setpoint is reached, 0 otherwise.
Definition: wedgebug.c:172
crop_t::h
uint16_t h
height of the cropped area
Definition: image.h:96
img_left_int8
struct image_t img_left_int8
Image obtained from right camera (UYVY format)
Definition: wedgebug.c:76
heading_towards_setpoint_WNED
float heading_towards_setpoint_WNED(struct FloatVect3 *VSETPOINTwned)
image_create
void image_create(struct image_t *img, uint16_t width, uint16_t height, enum image_type type)
Create a new image.
Definition: image.c:43
ES_angles::heading_initial
float heading_initial
Definition: wedgebug.c:232
show_image_entry
void show_image_entry(struct image_t *img, int entry_position, const char *img_name)
Definition: wedgebug.c:381
stateGetPositionNed_f
static struct NedCoor_f * stateGetPositionNed_f(void)
Get position in local NED coordinates (float).
Definition: state.h:710
VEDGECOORDINATESc
struct FloatVect3 VEDGECOORDINATESc
Declared vector of coordinates of goal in camera coordinate system.
Definition: wedgebug.c:125
image_t::type
enum image_type type
The image type.
Definition: image.h:45
are_setpoint_and_angle_reached
uint8_t are_setpoint_and_angle_reached(struct FloatVect3 *VGOAL, struct FloatVect3 *VCURRENTPOSITION, float threshold_setpoint, float target_angle, float current_angle, float threshold_angle)
sobel_OCV
int sobel_OCV(struct image_t *img_input, const struct image_t *img_output, const int kernel_size, const int thr)
Definition: wedgebug_opencv.cpp:380
median_depth_in_front
uint16_t median_depth_in_front
Variable to hold the median disparity in front of the drone. Needed to see if obstacle is there.
Definition: wedgebug.c:262
Vw_to_Vc
void Vw_to_Vc(struct FloatVect3 *Vc, struct FloatVect3 *Vw, struct FloatRMat *Rrw, struct FloatVect3 *VRw, struct FloatRMat *Rcr, struct FloatVect3 *VCr, const uint8_t verbose)
FLOAT_ANGLE_NORMALIZE
#define FLOAT_ANGLE_NORMALIZE(_a)
Definition: pprz_algebra_float.h:99
AP_MODE_ATTITUDE_DIRECT
#define AP_MODE_ATTITUDE_DIRECT
Definition: autopilot_static.h:39
img_size_t::w
uint16_t w
The width.
Definition: image.h:87
c_img
struct point_t c_img
Definition: wedgebug.c:183
max_edge_search_angle
float max_edge_search_angle
Variable for storing the heading of the drone (psi in radians)
Definition: wedgebug.c:259
img_disparity_int16_cropped
struct image_t img_disparity_int16_cropped
Image obtained from the external sobel edge detection function = sobel_OCV.
Definition: wedgebug.c:84
img_middle_int8_cropped
struct image_t img_middle_int8_cropped
Image holding depth values (cm) obtained from the disparity image.
Definition: wedgebug.c:81
background_processes
void background_processes(uint8_t save_images_flag)
K_median_h
uint16_t K_median_h
Width of kernel for the median kernel.
Definition: wedgebug.c:113
VEDGECOORDINATESwned
struct FloatVect3 VEDGECOORDINATESwned
Declared vector of coordinates of "best" edge detected in robot coordinate system.
Definition: wedgebug.c:129
previous_state
uint8_t previous_state
Definition: wedgebug.c:253
median_kernel
struct kernel_C1 median_kernel
Dimensions of the kernel that detect median disparity in front of drone (for obstacle detection)
Definition: wedgebug.c:102
kernel_C1::buf_size
uint32_t buf_size
Size of values of weight buffer and values buffer.
Definition: wedgebug.h:46
dispfixed_to_disp
float dispfixed_to_disp(const int16_t d)
kernel_C1::buf_weights
void * buf_weights
Kernel weight buffer.
Definition: wedgebug.h:47
NedCoor_f::z
float z
in meters
Definition: pprz_geodetic_float.h:66
b
float b
Definition: wedgebug.c:202
copy_right_img_func
static struct image_t * copy_right_img_func(struct image_t *img, uint8_t camera_id)
allow_state_change_POSITION_GOAL
uint8_t allow_state_change_POSITION_GOAL
Definition: wedgebug.c:272
POSITION_GOAL
@ POSITION_GOAL
Definition: wedgebug.c:211
indx1d_b
int32_t indx1d_b(const int32_t y, const int32_t x, const struct img_size_t *img_dims)
is_path_free_flag
uint8_t is_path_free_flag
Set to 1 if obstacle is detected, 0 otherwise.
Definition: wedgebug.c:173
save_image_gray
int save_image_gray(struct image_t *img, char *myString)
Definition: wedgebug_opencv.cpp:69
threshold_median_disparity
uint8_t threshold_median_disparity
Edges with a magnitude above this value are detected. Above this value, edges are given the value 127...
Definition: wedgebug.c:143
img_size_t::h
uint16_t h
The height.
Definition: image.h:88
WaypointY
#define WaypointY(_wp)
Definition: common_nav.h:46
cv_add_to_device
struct video_listener * cv_add_to_device(struct video_config_t *device, cv_function func, uint16_t fps, uint8_t id)
Definition: cv.c:46
AUTONOMOUS_GUIDED
@ AUTONOMOUS_GUIDED
Definition: wedgebug.c:223
wedgebug_periodic
void wedgebug_periodic(void)
max_edge_found_macro_confidence
int16_t max_edge_found_macro_confidence
This is the max confidence that edges (micro-see above) were found.
Definition: wedgebug.c:166
current_mode
enum control_mode_state current_mode
Definition: wedgebug.c:226
MOVE_TO_GOAL
@ MOVE_TO_GOAL
Definition: wedgebug.c:210
stateGetPositionEnu_f
static struct EnuCoor_f * stateGetPositionEnu_f(void)
Get position in local ENU coordinates (float).
Definition: state.h:719
UYVYs_interlacing_V
void UYVYs_interlacing_V(struct image_t *YY, struct image_t *left, struct image_t *right)
clock_total_time
clock_t clock_total_time
Definition: wedgebug.c:246
kernel_C1::buf_values
void * buf_values
Kernel value buffer. These are the values underneath the kernel.
Definition: wedgebug.h:48
crop_t::w
uint16_t w
Width of the cropped area.
Definition: image.h:95
WaypointX
#define WaypointX(_wp)
Definition: common_nav.h:45
stateGetNedToBodyEulers_f
static struct FloatEulers * stateGetNedToBodyEulers_f(void)
Get vehicle body attitude euler angles (float).
Definition: state.h:1143
max_no_edge_found_confidence
int16_t max_no_edge_found_confidence
This is the max confidence that edges (macro-see above were found.
Definition: wedgebug.c:167
ES_angles::is_left_reached_flag
uint8_t is_left_reached_flag
Definition: wedgebug.c:236
VGOALr
struct FloatVect3 VGOALr
Declared vector of coordinates of goal in NED world coordinate system.
Definition: wedgebug.c:123
uint32_t
unsigned long uint32_t
Definition: types.h:18
Va_to_Vb
void Va_to_Vb(struct FloatVect3 *Vb, struct FloatVect3 *Va, struct FloatRMat *Rba, struct FloatVect3 *VOa)
VECT3_DIFF
#define VECT3_DIFF(_c, _a, _b)
Definition: pprz_algebra.h:182
Rwnedwenu
struct FloatRMat Rwnedwenu
This structure holds information about the window in which edges are searched in.
Definition: wedgebug.c:192
dilation_OCV
int dilation_OCV(struct image_t *img_input, const struct image_t *img_output, const int SE_size, const int iteration)
Definition: wedgebug_opencv.cpp:330
threshold_distance_to_goal_direct
float threshold_distance_to_goal_direct
Below this distance (in radians) it is considered that the robot has reached the target angle.
Definition: wedgebug.c:147
VGOALwenu
struct FloatVect3 VGOALwenu
Declared vector of coordinates of start position in NED world coordinate system.
Definition: wedgebug.c:121
image_t::w
uint16_t w
Image width.
Definition: image.h:46
max_obstacle_confidence
int16_t max_obstacle_confidence
This is the confidence that no edge was found.
Definition: wedgebug.c:161
is_total_timer_on_flag
uint8_t is_total_timer_on_flag
Definition: wedgebug.c:279
AP_MODE_ATTITUDE_Z_HOLD
#define AP_MODE_ATTITUDE_Z_HOLD
Definition: autopilot_static.h:44
indx1d_a
int32_t indx1d_a(const int32_t y, const int32_t x, const struct image_t *img)
allow_state_change_WEDGEBUG_START
uint8_t allow_state_change_WEDGEBUG_START
Definition: wedgebug.c:273
block_size_disparities
int block_size_disparities
Number of disparity levels (0-this number)
Definition: wedgebug.c:256
is_heading_reached_flag
uint8_t is_heading_reached_flag
Set to 1 if no obstacle is detected, 0 otherwise.
Definition: wedgebug.c:174
no_edge_found_confidence
int16_t no_edge_found_confidence
This is the confidence that an edge was found - outside of the find_best_edge_coordinates function.
Definition: wedgebug.c:160
principal_points
void principal_points(struct point_t *c_output, const struct point_t *c_old_input, struct crop_t *img_cropped_info)
threshold_depth_of_edges
uint16_t threshold_depth_of_edges
Below this median disparity, an obstacle is considered to block the way (i.e. the blocking obstacle n...
Definition: wedgebug.c:150
VCr
struct FloatVect3 VCr
Definition: wedgebug.c:199
img_right_int8
struct image_t img_right_int8
Definition: wedgebug.c:78
pprz_algebra_float.h
Paparazzi floating point algebra.
guidance_h_set_guided_pos
bool guidance_h_set_guided_pos(float x, float y)
Set horizontal position setpoint in GUIDED mode.
Definition: guidance_h.c:709
ES_angles::heading_max_right
float heading_max_right
Definition: wedgebug.c:234
copy_left_img_func
static struct image_t * copy_left_img_func(struct image_t *img, uint8_t camera_id)
img_disparity_int8_cropped
struct image_t img_disparity_int8_cropped
Image obtained from right camera, converted into 8bit gray image.
Definition: wedgebug.c:79
SE_closing_OCV
int SE_closing_OCV
SE size for the opening operation.
Definition: wedgebug.c:107
WEDGEBUG_START
@ WEDGEBUG_START
Definition: wedgebug.c:212
autopilot_guided_goto_ned
bool autopilot_guided_goto_ned(float x, float y, float z, float heading)
Set position and heading setpoints in GUIDED mode.
Definition: autopilot_guided.c:35
IMAGE_GRAYSCALE
@ IMAGE_GRAYSCALE
Grayscale image with only the Y part (uint8 per pixel)
Definition: image.h:37
Vi_to_Vc
void Vi_to_Vc(struct FloatVect3 *scene_point, int32_t image_point_y, int32_t image_point_x, const uint8_t d, const float b, const uint16_t f)
kernel_C1::type
enum image_type type
Definition: wedgebug.h:43
navigation_state
navigation_state
Camera focal length, in pixels (i.e. distance between camera.
Definition: wedgebug.c:209
allow_state_change_MOVE_TO_EDGE
uint8_t allow_state_change_MOVE_TO_EDGE
Definition: wedgebug.c:274
initial_heading
struct ES_angles initial_heading
Definition: wedgebug.c:239
heat_map_type
int heat_map_type
Variable to hold distance from robot to edge to goal (used in EDGE_SCAN (9) state)
Definition: wedgebug.c:266
image_t::buf_size
uint32_t buf_size
The buffer size.
Definition: image.h:53
max_position_confidence
int16_t max_position_confidence
This is the max confidence that an obstacle was not spotted.
Definition: wedgebug.c:163
is_heading_reached
uint8_t is_heading_reached(float target_angle, float current_angle, float threshold)
median_depth_to_point
uint16_t median_depth_to_point(struct point_t *Vi, struct image_t *img, struct kernel_C1 *kernel_median)
thresholding_img
void thresholding_img(struct image_t *img, uint8_t threshold)
VECT2_DIFF
#define VECT2_DIFF(_c, _a, _b)
Definition: pprz_algebra.h:92
crop_t
Definition: image.h:92
disp_to_depth_img
void disp_to_depth_img(struct image_t *img8bit_input, struct image_t *img16bit_output)
image_to_grayscale
void image_to_grayscale(struct image_t *input, struct image_t *output)
Convert an image to grayscale.
Definition: image.c:131
image_t::h
uint16_t h
Image height.
Definition: image.h:47
Vi_to_Vc_depth
void Vi_to_Vc_depth(struct FloatVect3 *scene_point, int32_t image_point_y, int32_t image_point_x, const float depth, const uint16_t f)
img_left_int8_cropped
struct image_t img_left_int8_cropped
Image obtained from left camera, converted into 8bit gray image.
Definition: wedgebug.c:77
AP_MODE_GUIDED
#define AP_MODE_GUIDED
Definition: autopilot_static.h:54
wedgebug_opencv.h
FloatVect2
Definition: pprz_algebra_float.h:49
FloatVect3
Definition: pprz_algebra_float.h:54
float_rmat_transp_vmult
void float_rmat_transp_vmult(struct FloatVect3 *vb, struct FloatRMat *m_b2a, struct FloatVect3 *va)
rotate 3D vector by transposed rotation matrix.
Definition: pprz_algebra_float.c:120
obstacle_confidence
int16_t obstacle_confidence
Below this depth (m) edges are eligible for the WedgeBug algorithm (i.e. edges cannot be very far awa...
Definition: wedgebug.c:153
float_rmat_vmult
void float_rmat_vmult(struct FloatVect3 *vb, struct FloatRMat *m_a2b, struct FloatVect3 *va)
rotate 3D vector by rotation matrix.
Definition: pprz_algebra_float.c:110
threshold_distance_to_goal_manual
float threshold_distance_to_goal_manual
Definition: wedgebug.c:280
VDISTANCEPOSITIONwned
struct FloatVect3 VDISTANCEPOSITIONwned
Declared vector of coordinates of "best" edge detected in ENU world coordinate system.
Definition: wedgebug.c:133
VEDGECOORDINATESwenu
struct FloatVect3 VEDGECOORDINATESwenu
Declared vector of coordinates of "best" edge detected in NED world coordinate system.
Definition: wedgebug.c:131
AUTONOMOUS_NAV
@ AUTONOMOUS_NAV
Definition: wedgebug.c:222
IMAGE_INT16
@ IMAGE_INT16
An image to hold disparity image data from openCV (int16 per pixel)
Definition: image.h:40
autopilot_guided.h
VGOALwned
struct FloatVect3 VGOALwned
Declared vector of coordinates of goal in ENU world coordinate system.
Definition: wedgebug.c:122
pprz_geodetic_float.h
Paparazzi floating point math for geodetic calculations.
wedgebug.h
threshold_disparity_of_edges
uint8_t threshold_disparity_of_edges
Above this median disparity, an obstacle is considered to block the way (i.e. the blocking obstacle n...
Definition: wedgebug.c:144
AP_MODE_NAV
#define AP_MODE_NAV
Definition: autopilot_static.h:48
cv.h
UYVYs_interlacing_H
void UYVYs_interlacing_H(struct image_t *merged, struct image_t *left, struct image_t *right)
float_vect3_norm
static float float_vect3_norm(struct FloatVect3 *v)
Definition: pprz_algebra_float.h:171
wedgebug_init
void wedgebug_init(void)
median_disparity_in_front
uint8_t median_disparity_in_front
The maximum angle (in adians) to the left and right of the drone, that edges can be detected in....
Definition: wedgebug.c:261
getMedian
uint8_t getMedian(uint8_t *a, uint32_t n)
VHOLDINGPOINTwned
struct FloatVect3 VHOLDINGPOINTwned
Declared a vector to hold the current position, which is needed for calculating the distance traveled...
Definition: wedgebug.c:135
SE_dilation_OCV_2
int SE_dilation_OCV_2
SE size for the first dilation operation.
Definition: wedgebug.c:109
background_processes_16bit
void background_processes_16bit(uint8_t save_images_flag)
kernel_median_dims
struct img_size_t kernel_median_dims
Dimension of image after it has been cropped to remove pixel error due to block matching limitations.
Definition: wedgebug.c:98
free_path_confidence
int16_t free_path_confidence
This is the confidence that an obstacle was spotted.
Definition: wedgebug.c:154
is_edge_found_micro_flag
uint8_t is_edge_found_micro_flag
Set to 1 if best edge (according to macro confidence) was found, 0 otherwise.
Definition: wedgebug.c:176
find_best_edge_coordinates
uint8_t find_best_edge_coordinates(struct FloatVect3 *VEDGECOORDINATESc, struct FloatVect3 *VTARGETc, struct image_t *img_edges, struct image_t *img_disparity, struct crop_t *edge_search_area, uint8_t threshold, int16_t max_confidence)
SBM_OCV
int SBM_OCV(struct image_t *img_disp, const struct image_t *img_left, const struct image_t *img_right, const int ndisparities, const int SADWindowSize, const bool cropped)
Definition: wedgebug_opencv.cpp:168
c_img_cropped
struct point_t c_img_cropped
Principal point of normal camera images.
Definition: wedgebug.c:184
threshold_median_depth
uint16_t threshold_median_depth
Below this distance (in meters) it is considered that the robot has reached the goal,...
Definition: wedgebug.c:149
DIRECT_CONTROL
@ DIRECT_CONTROL
Definition: wedgebug.c:221
heading_confidence
int16_t heading_confidence
This is the confidence that the desired position was reached.
Definition: wedgebug.c:156
SE_opening_OCV
int SE_opening_OCV
Definition: wedgebug.c:106
int16_t
signed short int16_t
Definition: types.h:17
indx1d_c
int32_t indx1d_c(const int32_t y, const int32_t x, const uint16_t img_height, const uint16_t img_width)
is_no_edge_found_flag
uint8_t is_no_edge_found_flag
Set to 1 if best edge (according to micro confidence) was found, 0 otherwise.
Definition: wedgebug.c:177
uint8_t
unsigned char uint8_t
Definition: types.h:14
min_disparity
int min_disparity
Block size used for the block matching (SBM) function.
Definition: wedgebug.c:257
POSITION_EDGE
@ POSITION_EDGE
Definition: wedgebug.c:214
threshold_edge_magnitude
int threshold_edge_magnitude
Declared vector of coordinates of previous "best" edge detected in NED world coordinate system.
Definition: wedgebug.c:142
previous_mode
uint8_t previous_mode
Variable that saves previous state the state machine was in, for some memory.
Definition: wedgebug.c:254
image_copy
void image_copy(struct image_t *input, struct image_t *output)
Copy an image from inut to output This will only work if the formats are the same.
Definition: image.c:89
max_free_path_confidence
int16_t max_free_path_confidence
This is the max confidence that an obstacle was spotted.
Definition: wedgebug.c:162
IMAGE_JPEG
@ IMAGE_JPEG
An JPEG encoded image (not per pixel encoded)
Definition: image.h:38
SE_dilation_OCV_1
int SE_dilation_OCV_1
SE size for the closing operation.
Definition: wedgebug.c:108
img_dims
struct img_size_t img_dims
Definition: wedgebug.c:95
edge_found_macro_confidence
int16_t edge_found_macro_confidence
This is the confidence that the desired heading is reached.
Definition: wedgebug.c:159
float_norm_two_angles
float float_norm_two_angles(float target_angle, float current_angle)
current_state
enum navigation_state current_state
Definition: wedgebug.c:217
image.h
autopilot.h
kernel_C1::w
uint16_t w
Definition: wedgebug.h:44
Vi_to_Vc16bit
void Vi_to_Vc16bit(struct FloatVect3 *scene_point, int32_t image_point_y, int32_t image_point_x, const uint16_t d, const float b, const uint16_t f)
control_mode_state
control_mode_state
Definition: wedgebug.c:220
guidance_h_set_guided_heading
bool guidance_h_set_guided_heading(float heading)
Set heading setpoint in GUIDED mode.
Definition: guidance_h.c:720
getMedian16bit
uint16_t getMedian16bit(uint16_t *a, uint32_t n)
edge_search_area
struct crop_t edge_search_area
Principal point of cropped camera images.
Definition: wedgebug.c:187
find_best_edge_coordinates2
uint8_t find_best_edge_coordinates2(struct FloatVect3 *VEDGECOORDINATESc, struct FloatVect3 *VTARGETc, struct image_t *img_edges, struct image_t *img_depth, struct crop_t *edge_search_area, uint16_t threshold, int16_t max_confidence)
float_vect3_norm_two_points
float float_vect3_norm_two_points(struct FloatVect3 *V1, struct FloatVect3 *V2)
f
uint16_t f
Camera baseline, in meters (i.e. horizontal distance between the two cameras of the stereo setup)
Definition: wedgebug.c:204
kernel_create
void kernel_create(struct kernel_C1 *kernel, uint16_t width, uint16_t height, enum image_type type)
stateGetNedToBodyRMat_f
static struct FloatRMat * stateGetNedToBodyRMat_f(void)
Get vehicle body attitude rotation matrix (float).
Definition: state.h:1137
FloatVect3::y
float y
Definition: pprz_algebra_float.h:56
point_t::x
uint32_t x
The x coordinate of the point.
Definition: image.h:59
NedCoor_f::y
float y
in meters
Definition: pprz_geodetic_float.h:65
Rcr
struct FloatRMat Rcr
Definition: wedgebug.c:198
depth_to_disp
uint8_t depth_to_disp(const float depth, const float b, const uint16_t f)
threshold_distance_to_goal
float threshold_distance_to_goal
Above this disparity edges are eligible for WedgeBug algorithm (i.e. edges cannot be very far away)
Definition: wedgebug.c:145
is_edge_found_macro_flag
uint8_t is_edge_found_macro_flag
Set to 1 if heading is reached, 0 otherwise.
Definition: wedgebug.c:175
post_disparity_crop_rect
void post_disparity_crop_rect(struct crop_t *_img_cropped_info, struct img_size_t *_original_img_dims, const int disp_n, const int block_size)
img_middle_int16_cropped
struct image_t img_middle_int16_cropped
Definition: wedgebug.c:85
guidance_v_set_guided_z
bool guidance_v_set_guided_z(float z)
Set z setpoint in GUIDED mode.
Definition: guidance_v.c:551
is_state_changed_flag
uint8_t is_state_changed_flag
Set to 1 if no edge was identified, 0 otherwise.
Definition: wedgebug.c:178
img_size_t
Definition: image.h:86
max_heading_confidence
int16_t max_heading_confidence
This is the max confidence that a specific position was reached.
Definition: wedgebug.c:164
VNEDwenu
struct FloatVect3 VNEDwenu
Definition: wedgebug.c:193
WaypointAlt
#define WaypointAlt(_wp)
waypoint altitude in m above MSL
Definition: common_nav.h:48
VPBESTEDGECOORDINATESwned
struct FloatVect3 VPBESTEDGECOORDINATESwned
Declared a vector to hold the position of a holding point (offten used to make sure drone stays still...
Definition: wedgebug.c:137
N_disparities
int N_disparities
Variable that saves previous mode to control the drone, for some memory.
Definition: wedgebug.c:255
heading_towards_waypoint
float heading_towards_waypoint(uint8_t wp)
IMAGE_YUV422
@ IMAGE_YUV422
UYVY format (uint16 per pixel)
Definition: image.h:36
FloatVect3::x
float x
Definition: pprz_algebra_float.h:55
Vc_to_Vw
void Vc_to_Vw(struct FloatVect3 *Vw, struct FloatVect3 *Vc, struct FloatRMat *Rrw, struct FloatVect3 *VRw, struct FloatRMat *Rcr, struct FloatVect3 *VCr, const uint8_t verbose)
counter_state
double counter_state[NUMBER_OF_STATES]
Definition: wedgebug.c:244
kernel_C1::h
uint16_t h
Kernel height.
Definition: wedgebug.h:45
WEDGEBUG_CAMERA_LEFT_FPS
#define WEDGEBUG_CAMERA_LEFT_FPS
Definition: wedgebug.c:65
int32_t
signed long int32_t
Definition: types.h:19
allow_state_change_EDGE_SCAN
uint8_t allow_state_change_EDGE_SCAN
Definition: wedgebug.c:276
opening_OCV
int opening_OCV(struct image_t *img_input, const struct image_t *img_output, const int SE_size, const int iteration)
Definition: wedgebug_opencv.cpp:277
SE_erosion_OCV
int SE_erosion_OCV
SE size for the second dilation operation (see state 3 "WEDGEBUG_START" and state 6 "POSITION_EDGE" )
Definition: wedgebug.c:110
position_confidence
int16_t position_confidence
This is the confidence that no obstacle was spotted.
Definition: wedgebug.c:155
point_t::y
uint32_t y
The y coordinate of the point.
Definition: image.h:60
EDGE_SCAN
@ EDGE_SCAN
Definition: wedgebug.c:215
get_img_type
const char * get_img_type(enum image_type img_type)
Definition: wedgebug.c:357
K_median_w
uint16_t K_median_w
SE size for the Sobel operation, to detect edges.
Definition: wedgebug.c:112
clock_total_time_previous
clock_t clock_total_time_previous
Definition: wedgebug.c:248
guidance_h_hover_enter
void guidance_h_hover_enter(void)
Definition: guidance_h.c:542
crop_t::y
uint16_t y
Start position y (vertical)
Definition: image.h:94
img_cropped_info
struct crop_t img_cropped_info
Definition: wedgebug.c:92
clock_FSM
clock_t clock_FSM
Definition: wedgebug.c:250
save_images_flag
uint8_t save_images_flag
Set to 1 if control mode of drone is changed, 0 otherwise.
Definition: wedgebug.c:180
img_left
struct image_t img_left
Definition: wedgebug.c:74
img_cropped_dims
struct img_size_t img_cropped_dims
Dimensions of images captured by the camera (left and right have same dimension)
Definition: wedgebug.c:96
ES_angles
Definition: wedgebug.c:231
image_type
image_type
Definition: image.h:35
allow_state_change_POSITION_EDGE
uint8_t allow_state_change_POSITION_EDGE
Definition: wedgebug.c:275
erosion_OCV
int erosion_OCV(struct image_t *img_input, const struct image_t *img_output, const int SE_size, const int iteration)
Definition: wedgebug_opencv.cpp:355
median_disparity_to_point
uint8_t median_disparity_to_point(struct point_t *Vi, struct image_t *img, struct kernel_C1 *kernel_median)
time_state
double time_state[NUMBER_OF_STATES]
Definition: wedgebug.c:243
kernel_free
void kernel_free(struct kernel_C1 *kernel)
type
timer subsystem type(config options) --------------------------------------------(advanced timers using RCC_APB1) TIM1 adc(if USE_AD_TIM1) radio_control/ppm(if USE_PPM_TIM1
allow_state_change_MOVE_TO_GOAL
uint8_t allow_state_change_MOVE_TO_GOAL
Definition: wedgebug.c:271
MOVE_TO_EDGE
@ MOVE_TO_EDGE
Definition: wedgebug.c:213
VSTARTwned
struct FloatVect3 VSTARTwned
Declared vector of coordinates of start position in ENU world coordinate system.
Definition: wedgebug.c:119
clock_background_processes
clock_t clock_background_processes
Definition: wedgebug.c:249
float_vect_copy
static void float_vect_copy(float *a, const float *b, const int n)
a = b
Definition: pprz_algebra_float.h:546
disp_to_depth_16bit
float disp_to_depth_16bit(const int16_t d, const float b, const uint16_t f)
clock_total_time_current
clock_t clock_total_time_current
Definition: wedgebug.c:247
FloatRMat
rotation matrix
Definition: pprz_algebra_float.h:77
Vb_to_Va
void Vb_to_Va(struct FloatVect3 *Va, struct FloatVect3 *Vb, struct FloatRMat *Rba, struct FloatVect3 *VOa)
SE_sobel_OCV
int SE_sobel_OCV
SE size for the erosion operation (see state 3 "WEDGEBUG_START" and state 6 "POSITION_EDGE",...
Definition: wedgebug.c:111
FloatVect3::z
float z
Definition: pprz_algebra_float.h:57
is_setpoint_reached
uint8_t is_setpoint_reached(struct FloatVect3 *VGOAL, struct FloatVect3 *VCURRENTPOSITION, float threshold)
pprz_algebra.h
Paparazzi generic algebra macros.
max_edge_found_micro_confidence
int16_t max_edge_found_micro_confidence
This is the max confidence that a specific heading was reached.
Definition: wedgebug.c:165
VSTARTwenu
struct FloatVect3 VSTARTwenu
Height of kernel for the median kernel.
Definition: wedgebug.c:117
NedCoor_f::x
float x
in meters
Definition: pprz_geodetic_float.h:64
Rrwned
struct FloatRMat Rrwned
Definition: wedgebug.c:195
image_t::buf
void * buf
Image buffer (depending on the image_type)
Definition: image.h:54
is_setpoint_reached_flag
uint8_t is_setpoint_reached_flag
Definition: wedgebug.c:171
distance_traveled
float distance_traveled
Variable to hold the median depth in front of the drone. Needed to see if obstacle is there.
Definition: wedgebug.c:263
WEDGEBUG_CAMERA_RIGHT_FPS
#define WEDGEBUG_CAMERA_RIGHT_FPS
Definition: wedgebug.c:62
IMAGE_GRADIENT
@ IMAGE_GRADIENT
An image gradient (int16 per pixel)
Definition: image.h:39
autopilot_get_mode
uint8_t autopilot_get_mode(void)
get autopilot mode
Definition: autopilot.c:184
ES_angles::heading_max_left
float heading_max_left
Definition: wedgebug.c:233
state.h
disp_to_depth
float disp_to_depth(const uint8_t d, const float b, const uint16_t f)
number_of_states
uint8_t number_of_states
Definition: wedgebug.c:264
FloatEulers::psi
float psi
in radians
Definition: pprz_algebra_float.h:87
save_image_HM
int save_image_HM(struct image_t *img, char *myString, int const heatmap)
Definition: wedgebug_opencv.cpp:116
distance_robot_edge_goal
float distance_robot_edge_goal
Definition: wedgebug.c:265
median_kernel16bit
struct kernel_C1 median_kernel16bit
Definition: wedgebug.c:103
VGOALc
struct FloatVect3 VGOALc
Declared vector of coordinates of goal in robot coordinate system.
Definition: wedgebug.c:124
FloatRMat::m
float m[3 *3]
Definition: pprz_algebra_float.h:78
ES_angles::is_right_reached_flag
uint8_t is_right_reached_flag
Definition: wedgebug.c:237
point_t
Definition: image.h:58
crop_t::x
uint16_t x
Start position x (horizontal)
Definition: image.h:93
threshold_distance_to_angle
float threshold_distance_to_angle
Below this distance (in meters) it is considered that the robot has reached the goal.
Definition: wedgebug.c:146
img_depth_int16_cropped
struct image_t img_depth_int16_cropped
Image obtained after simple block matching.
Definition: wedgebug.c:80
VEDGECOORDINATESr
struct FloatVect3 VEDGECOORDINATESr
Declared vector of coordinates of "best" edge detected in camera coordinate system.
Definition: wedgebug.c:127
maximum_intensity
uint32_t maximum_intensity(struct image_t *img)
VRwned
struct FloatVect3 VRwned
Definition: wedgebug.c:196
image_t
Definition: image.h:44
set_state
void set_state(uint8_t _state, uint8_t change_allowed)
ES_angles::initiated
uint8_t initiated
Definition: wedgebug.c:235
img_edges_int8_cropped
struct image_t img_edges_int8_cropped
Definition: wedgebug.c:82
show_rotation_matrix
void show_rotation_matrix(struct FloatRMat *R)
Definition: wedgebug.c:387
closing_OCV
int closing_OCV(struct image_t *img_input, const struct image_t *img_output, const int SE_size, const int iteration)
Definition: wedgebug_opencv.cpp:304
is_start_reached_flag
uint8_t is_start_reached_flag
This is the max confidence that no edges were found.
Definition: wedgebug.c:170
is_mode_changed_flag
uint8_t is_mode_changed_flag
Set to 1 if state was changed, 0 otherwise.
Definition: wedgebug.c:179
heading
float heading
Definition: wedgebug.c:258
counter_cycles
double counter_cycles
Definition: wedgebug.c:245