Paparazzi UAS  v5.17_devel-24-g2ae834f
Paparazzi is a free software Unmanned Aircraft System.
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Modules Pages
wedgebug.c
Go to the documentation of this file.
1 /*
2  * Copyright (C) Ralph Rudi schmidt <ralph.r.schmidt@outlook.com>
3 
4  *
5  * This file is part of paparazzi
6  *
7  * paparazzi is free software; you can redistribute it and/or modify
8  * it under the terms of the GNU General Public License as published by
9  * the Free Software Foundation; either version 2, or (at your option)
10  * any later version.
11  *
12  * paparazzi is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15  * GNU General Public License for more details.
16  *
17  * You should have received a copy of the GNU General Public License
18  * along with paparazzi; see the file COPYING. If not, see
19  * <http://www.gnu.org/licenses/>.
20 
21  */
33 /*
34  *Note. Most variables are initialized in the initialization function. Variables
35  *marked with an "!" were taken into account when calculating the total memory consumption
36  */
37 
38 
39 // New section: Importing headers ----------------------------------------------------------------------------------------------------------------
40 
41 #include <stdio.h>
44 #include "modules/computer_vision/cv.h" // Required for the "cv_add_to_device" function
45 #include "modules/computer_vision/lib/vision/image.h"// For image-related structures
46 #include "pthread.h"
47 #include <stdint.h> // Needed for common types like uint8_t
48 #include "state.h"
49 #include "math/pprz_algebra_float.h"// Needed for vector operations, Euclidean distance, FloatVect3 and FloatRMat
50 #include "math/pprz_algebra.h"// Needed for vector operations (simple subtraction)
51 #include "math/pprz_geodetic_float.h"// Needed for NedCoor_f
52 #include "generated/flight_plan.h" // Needed for WP (waypoint) functions and WP IDs (as defined in "ralphthesis2020_stereo_cyberzoo.xml")
53 #include "firmwares/rotorcraft/autopilot_guided.h" // Needed for guidance functions such as "autopilot_guided_goto_ned" and "guidance_h_set_guided_heading"
54 #include <math.h> // needed for basic math functions
55 #include "autopilot.h" // Needed to set states (GUIDED vs NAV)
56 #include <time.h> // Needed to measure time
57 
58 
59 // New section --------------------------------------------------------------------------------------------------------------------------------------
60 // Defines
61 #ifndef WEDGEBUG_CAMERA_RIGHT_FPS
62 #define WEDGEBUG_CAMERA_RIGHT_FPS 0 //< Default FPS (zero means run at camera fps)
63 #endif
64 #ifndef WEDGEBUG_CAMERA_LEFT_FPS
65 #define WEDGEBUG_CAMERA_LEFT_FPS 0 //< Default FPS (zero means run at camera fps)
66 #endif
67 
68 
69 
70 // New section ----------------------------------------------------------------------------------------------------------------
71 // Define global variables
72 
73 // Declaring images
74 struct image_t img_left;
77 struct image_t img_left_int8_cropped; // Image obtained from left camera, converted into 8bit gray image
81 struct image_t img_middle_int8_cropped; // Image obtained after processing (morphological operations) of previous image
83 
86 
87 // New section: Global variables - Declarations ----------------------------------------------------------------------------------------------------------------
88 
89 
90 
91 // Declaring crop_t structure for information about the cropped image (after BM)
93 
94 // Declaring dimensions of images and kernels used
96 struct img_size_t
98 struct img_size_t
100 
101 // Declaring empty kernel for obtaining median
104 
105 // Delcaring structuring element sizes
114 
115 
116 // Declaring vectors to hold global 3d points
118  VSTARTwenu;
120  VSTARTwned;
139 
140 
141 // Declaring thresholds
148 
151 
152 // Declaring confidence parameters
157 
158 //int16_t edge_found_micro_confidence; //! This is the confidence that an edge was found - inside of the find_best_edge_coordinates function
168 
169 // Declaring boolean flags
170 uint8_t is_start_reached_flag; // Set to 1 if start position is reached, 0 otherwise.
180 uint8_t save_images_flag; // For report: Flag to indicate if images should be saved
181 
182 // Declaring principal points
183 struct point_t c_img;
185 
186 // Declaring edge search area
188 
189 
190 // Declaring rotation matrices and transition vectors for frame to frame transformations
191 // 1) Rotation matrix and transition vector to transform from world ENU frame to world NED frame
194 // 2) Rotation matrix and transition vector to transform from world ned frame to robot frame
197 // 3) Rotation matrix and transition vector to transform from robot frame to camera frame
198 struct FloatRMat Rcr;
199 struct FloatVect3 VCr;
200 
201 // Declaration and initialization of camera parameters
202 float b = WEDGEBUG_CAMERA_BASELINE /
203  1000.00;
204 uint16_t f = WEDGEBUG_CAMERA_FOCAL_LENGTH;
205 
206 
207 
208 // Define new structures + enums
216 };
217 enum navigation_state current_state ;// Default state is 0 i.e. nothing
218 
219 
224 };
225 
226 enum control_mode_state current_mode;// Default state is 0 i.e. nothing
227 
228 
229 
230 // This is a structure to save angles that are needed for the "Edge Scan" state
231 struct ES_angles {
232  float heading_initial; // This is the initial heading of the robot when the "Edge Scan" state is entered
233  float heading_max_left; // This is the maximum left turn angle (determines how much to the left the robot can look to search for edges), to search for edges
234  float heading_max_right;// This is the maximum right turn angle (determines how much to the right the robot can look to search for edges), to search for edges
235  uint8_t initiated; // This is a flag that can be set to check whether the structure is allowed to be overwritten (0=allowed, 1=forbidden)
236  uint8_t is_left_reached_flag; // This is a flag to check whether the left was scanned for an edge already
237  uint8_t is_right_reached_flag; // This is a flag to check whether the right was scanned for an edge already
238 };
240 
241 
242 // Declaring variables for time measurement
243 double time_state[NUMBER_OF_STATES]; // Double array for saving total time (clock cycles) spent in the states (position 0 = state 0 and so on)
244 double counter_state[NUMBER_OF_STATES]; // A counter to measure the total cycles that each state in the FSM (within the periodic function) went through
245 double counter_cycles; // A counter to measure the total cycles that the periodic function went through
246 clock_t clock_total_time; // Clock to measure total time (clock cycles)) it took for the robot to fly from start to goal
247 clock_t clock_total_time_current; // Clock to hold time measured at start of the current cycle of the periodic function
248 clock_t clock_total_time_previous; // Clock to hold time measured at start of the previous cycle of the periodic function
250 clock_t clock_FSM;
251 
252 // Other declarations
255 int N_disparities = 64;
257 int min_disparity = 0;//
258 float heading;
259 float max_edge_search_angle = M_PI /
260  2;
263 float distance_traveled; // Variable to hold the distance traveled of the robot (since start and up to the goal)
264 uint8_t number_of_states; // Variable to save the total number of states used in the finite state machine
267 
268 
269 // For debugging purpose. Allows for changing of state in simulation if 1. If 0, does not allow for state change. Useful if you want to execute only one state repeatedly
270 
271 uint8_t allow_state_change_MOVE_TO_GOAL; // From within state "MOVE_TO_GOAL"
272 uint8_t allow_state_change_POSITION_GOAL; // From within state "POSITION_GOAL"
273 uint8_t allow_state_change_WEDGEBUG_START; // From within state "WEDGEBUG_START"
274 uint8_t allow_state_change_MOVE_TO_EDGE; // From within state "MOVE_TO_EDGE"
275 uint8_t allow_state_change_POSITION_EDGE; // From within state "POSITION_EDGE"
276 uint8_t allow_state_change_EDGE_SCAN; // From within state "EDGE_SCAN"
277 
278 
281 
282 
283 // New section: Functions - Declaration ----------------------------------------------------------------------------------------------------------------
284 
285 // Supporting
286 const char *get_img_type(enum image_type img_type); // Function 1: Displays image type
287 void show_image_data(struct image_t *img); // Function 2: Displays image data
288 void show_image_entry(struct image_t *img, int entry_position,
289  const char *img_name); // Function 3: Displays pixel value of image
290 void show_rotation_matrix(struct FloatRMat *R);
291 
292 // External
293 void post_disparity_crop_rect(struct crop_t *img_cropped_info, struct img_size_t *original_img_dims, const int disp_n,
294  const int block_size);
295 void set_state(uint8_t state, uint8_t change_allowed);
296 void kernel_create(struct kernel_C1 *kernel, uint16_t width, uint16_t height, enum image_type type);
297 void kernel_free(struct kernel_C1 *kernel);
299 
300 //Core
301 static struct image_t *copy_left_img_func(struct image_t
302  *img); // Function 1: Copies left image into a buffer (buf_left)
303 static struct image_t *copy_right_img_func(struct image_t
304  *img); // Function 2: Copies left image into a buffer (buf_right)
305 void UYVYs_interlacing_V(struct image_t *YY, struct image_t *left,
306  struct image_t *right); // Function 3: Copies gray pixel values of left and right UYVY images into merged YY image
307 void UYVYs_interlacing_H(struct image_t *merged, struct image_t *left, struct image_t *right);
308 
309 uint32_t maximum_intensity(struct image_t *img);
310 void thresholding_img(struct image_t *img, uint8_t threshold);
311 void principal_points(struct point_t *c_output, const struct point_t *c_old_input, struct crop_t *img_cropped_info);
312 float disp_to_depth(const uint8_t d, const float b, const uint16_t f);
313 uint8_t depth_to_disp(const float depth, const float b, const uint16_t f);
314 void Vi_to_Vc(struct FloatVect3 *scene_point, int32_t image_point_y, int32_t image_point_x, const uint8_t d,
315  const float b, const uint16_t f);
316 int32_t indx1d_a(const int32_t y, const int32_t x, const struct image_t *img);
317 int32_t indx1d_b(const int32_t y, const int32_t x, const struct img_size_t *img_dims);
318 int32_t indx1d_c(const int32_t y, const int32_t x, const uint16_t img_height, const uint16_t img_width);
319 
320 void Va_to_Vb(struct FloatVect3 *Vb, struct FloatVect3 *Va, struct FloatRMat *Rba, struct FloatVect3 *VOa);
321 void Vb_to_Va(struct FloatVect3 *Va, struct FloatVect3 *Vb, struct FloatRMat *Rba, struct FloatVect3 *VOa);
322 void Vw_to_Vc(struct FloatVect3 *Vc, struct FloatVect3 *Vw, struct FloatRMat *Rrw, struct FloatVect3 *VRw,
323  struct FloatRMat *Rcr, struct FloatVect3 *VCr, const uint8_t verbose);
324 void Vc_to_Vw(struct FloatVect3 *Vw, struct FloatVect3 *Vc, struct FloatRMat *Rrw, struct FloatVect3 *VRw,
325  struct FloatRMat *Rcr, struct FloatVect3 *VCr, const uint8_t verbose);
326 
327 float float_vect3_norm_two_points(struct FloatVect3 *V1, struct FloatVect3 *V2);
329 float heading_towards_setpoint_WNED(struct FloatVect3 *VSETPOINTwned);
330 uint8_t median_disparity_to_point(struct point_t *Vi, struct image_t *img, struct kernel_C1 *kernel_median);
331 
333  struct image_t *img_edges, struct image_t *img_disparity, struct crop_t *edge_search_area, uint8_t threshold,
334  int16_t max_confidence);
335 uint8_t is_setpoint_reached(struct FloatVect3 *VGOAL, struct FloatVect3 *VCURRENTPOSITION, float threshold);
336 
337 float float_norm_two_angles(float target_angle, float current_angle);
338 uint8_t is_heading_reached(float target_angle, float current_angle, float threshold);
339 uint8_t are_setpoint_and_angle_reached(struct FloatVect3 *VGOAL, struct FloatVect3 *VCURRENTPOSITION,
340  float threshold_setpoint, float target_angle, float current_angle, float threshold_angle);
341 void disp_to_depth_img(struct image_t *img8bit_input, struct image_t *img16bit_output);
343 
345 float dispfixed_to_disp(const int16_t d);
346 float disp_to_depth_16bit(const int16_t d, const float b, const uint16_t f);
347 void Vi_to_Vc_depth(struct FloatVect3 *scene_point, int32_t image_point_y, int32_t image_point_x,
348  const float depth /*depth in m*/, const uint16_t f);
349 void Vi_to_Vc16bit(struct FloatVect3 *scene_point, int32_t image_point_y, int32_t image_point_x, const uint16_t d,
350  const float b, const uint16_t f);
351 uint16_t median_depth_to_point(struct point_t *Vi, struct image_t *img, struct kernel_C1 *kernel_median);
353  struct image_t *img_edges, struct image_t *img_depth, struct crop_t *edge_search_area, uint16_t threshold,
354  int16_t max_confidence);
356 
357 
358 
359 
360 // New section: Functions - Definition ----------------------------------------------------------------------------------------------------------------
361 
362 // Supporting:
363 
364 // Function 1
365 const char *get_img_type(enum image_type img_type)
366 {
367  switch (img_type) {
368  case IMAGE_YUV422: return "IMAGE_YUV422";
369  case IMAGE_GRAYSCALE: return "IMAGE_GRAYSCALE";
370  case IMAGE_JPEG: return "IMAGE_JPEG";
371  case IMAGE_GRADIENT: return "IMAGE_GRADIENT";
372  default: return "Image type not found";
373  }
374 }
375 
376 
377 // Function 2
378 void show_image_data(struct image_t *img)
379 {
380  printf("Image-Type: %s\n", get_img_type(img->type));
381  printf("Image-Width: %d\n", img->w);
382  printf("Image-Height: %d\n", img->h);
383  printf("Image-Buf_Size: %d\n", img->buf_size);
384  printf("Image-Buf_Memory_Occupied: %lu\n", sizeof(img->buf));
385 }
386 
387 
388 // Function 3
389 void show_image_entry(struct image_t *img, int entry_position, const char *img_name)
390 {
391  printf("Pixel %d value - %s: %d\n", entry_position, img_name, ((uint8_t *)img->buf)[entry_position]);
392 }
393 
394 // Function 4
396 {
397  printf("[[%f, %f, %f]\n", R->m[0], R->m[1], R->m[2]);
398  printf(" [%f, %f, %f]\n", R->m[3], R->m[4], R->m[5]);
399  printf(" [%f, %f, %f]]\n", R->m[6], R->m[7], R->m[8]);
400 
401 }
402 
403 
404 
405 // External:
406 
407 // Function 1 - Returns the upper left coordinates of a square (x and y coordinates) and the offset in terms of width and height,
408 // given the number of disparity levels and the block size used by the block matching algorithm. This is need to crop an image
409 void post_disparity_crop_rect(struct crop_t *img_cropped_info, struct img_size_t *original_img_dims, const int disp_n,
410  const int block_size)
411 {
412 
413  uint16_t block_size_black = block_size / 2;
414  uint16_t left_black = disp_n + block_size_black;
415 
416 
417  img_cropped_info->y = block_size_black;
418  img_cropped_info->h = original_img_dims->h - block_size_black;
419  img_cropped_info->h = img_cropped_info->h - img_cropped_info->y;
420 
421  img_cropped_info->x = left_black - 1;
422  img_cropped_info->w = original_img_dims->w - block_size_black;
423  img_cropped_info->w = img_cropped_info->w - img_cropped_info->x;
424 }
425 
426 
427 // Function 2 - Sets finite state machine state (useful for the flight path blocks)
428 void set_state(uint8_t state, uint8_t change_allowed)
429 {
430  if (change_allowed == 1) {current_state = state;}
431 }
432 
433 
434 // Function 3 - Creates empty 8bit kernel
435 void kernel_create(struct kernel_C1 *kernel, uint16_t width, uint16_t height, enum image_type type)
436 {
437 
438  // Set the variables
439  kernel->type = type;
440  kernel->h = height;
441  kernel->w = width;
442 
443 
444  // Depending on the type the size differs
445  if (type == IMAGE_YUV422) {
446  kernel->buf_size = sizeof(uint8_t) * 2 * width * height;
447  } else if (type == IMAGE_JPEG) {
448  kernel->buf_size = sizeof(uint8_t) * 2 * width * height; // At maximum quality this is enough
449  } else if (type == IMAGE_GRADIENT) {
450  kernel->buf_size = sizeof(int16_t) * width * height;
451  } else if (type == IMAGE_INT16) {
452  kernel->buf_size = sizeof(int16_t) * width * height;
453  } else {
454  kernel->buf_size = sizeof(uint8_t) * width * height;
455  }
456 
457  kernel->buf_weights = malloc(kernel->buf_size);
458  kernel->buf_values = malloc(kernel->buf_size);
459 }
460 
461 
462 // Function 4 - Deletes 8bit kernel
463 void kernel_free(struct kernel_C1 *kernel)
464 {
465  if (kernel->buf_weights != NULL) {
466  free(kernel->buf_weights);
467  kernel->buf_weights = NULL;
468  }
469  if (kernel->buf_values != NULL) {
470  free(kernel->buf_values);
471  kernel->buf_values = NULL;
472  }
473 }
474 
475 
476 // Function 5 - Calculates median of a 8bit image
478 {
479  // Allocate an array of the same size and sort it.
480  uint32_t i, j;
481 
482  uint8_t dpSorted[n];
483  for (i = 0; i < n; ++i) {
484  dpSorted[i] = a[i];
485  }
486  for (i = n - 1; i > 0; --i) {
487  for (j = 0; j < i; ++j) {
488  if (dpSorted[j] > dpSorted[j + 1]) {
489  uint8_t dTemp = dpSorted[j];
490  dpSorted[j] = dpSorted[j + 1];
491  dpSorted[j + 1] = dTemp;
492  }
493  }
494  }
495 
496  // Middle or average of middle values in the sorted array.
497  uint8_t dMedian = 0;
498  if ((n % 2) == 0) {
499  dMedian = (dpSorted[n / 2] + dpSorted[(n / 2) - 1]) / 2.0;
500  } else {
501  dMedian = dpSorted[n / 2];
502  }
503  return dMedian;
504 }
505 
506 
507 // Function 6 - Calculates median of a 16bit image
509 {
510  // Allocate an array of the same size and sort it.
511  uint32_t i, j;
512 
513  uint16_t dpSorted[n];
514  for (i = 0; i < n; ++i) {
515  dpSorted[i] = a[i];
516  }
517  for (i = n - 1; i > 0; --i) {
518  for (j = 0; j < i; ++j) {
519  if (dpSorted[j] > dpSorted[j + 1]) {
520  uint16_t dTemp = dpSorted[j];
521  dpSorted[j] = dpSorted[j + 1];
522  dpSorted[j + 1] = dTemp;
523  }
524  }
525  }
526 
527  // Middle or average of middle values in the sorted array.
528  uint16_t dMedian = 0;
529  if ((n % 2) == 0) {
530  dMedian = (dpSorted[n / 2] + dpSorted[(n / 2) - 1]) / 2.0;
531  } else {
532  dMedian = dpSorted[n / 2];
533  }
534  return dMedian;
535 }
536 
537 
538 
539 
540 // Core:
541 
542 // Function 1
543 static struct image_t *copy_left_img_func(struct image_t *img)
544 {
545  image_copy(img, &img_left);
546  //show_image_data(img);
547  //show_image_entry(&img_left, 10, "img_left");
548  return img;
549 }
550 
551 
552 // Function 2
553 static struct image_t *copy_right_img_func(struct image_t *img)
554 {
555  image_copy(img, &img_right);
556  //show_image_data(img);
557  //show_image_entry(&img_right, 10, "img_right");
558  return img;
559 }
560 
561 
562 // Function 3
563 void UYVYs_interlacing_V(struct image_t *merged, struct image_t *left, struct image_t *right)
564 {
565  // Error messages
566  if (left->w != right->w || left->h != right->h) {
567  printf("The dimensions of the left and right image to not match!");
568  return;
569  }
570  if ((merged->w * merged->h) != (2 * right->w) * right->w) {
571  printf("The dimensions of the empty image template for merger are not sufficient to merge gray left and right pixel values.");
572  return;
573  }
574 
575  uint8_t *UYVY_left = left->buf;
576  uint8_t *UYVY_right = right->buf;
577  uint8_t *YY = merged->buf;
578  uint32_t loop_length = left->w * right->h;
579 
580  // Incrementing pointers to get to first gray value of the UYVY images
581  UYVY_left++;
582  UYVY_right++;
583 
584 
585  for (uint32_t i = 0; i < loop_length; i++) {
586  *YY = *UYVY_left; // Copies left gray pixel (Y) to the merged image YY, in first position
587  YY++; // Moving to second position of merged image YY
588  *YY = *UYVY_right; // Copies right gray pixel (Y) to the merged image YY, in second position
589  YY++; // Moving to the next position, in preparation to copy left gray pixel (Y) to the merged image YY
590  UYVY_left += 2; // Moving pointer to next gray pixel (Y), in the left image
591  UYVY_right += 2; // Moving pointer to next gray pixel (Y), in the right image
592  /*
593  * Note: Since the loop lenth is based on the UYVY image the size of the data should be (2 x w) x h.
594  * This is also the same size as for the new merged YY image.
595  * Thus incrementing the pointer for UYVY_left and right, in each iteration, does not lead to problems (same for YY image)
596  */
597  }
598 }
599 
600 
601 // Function 4
602 void UYVYs_interlacing_H(struct image_t *merged, struct image_t *left, struct image_t *right)
603 {
604  // Error messages
605  if (left->w != right->w || left->h != right->h) {
606  printf("The dimensions of the left and right image to not match!");
607  return;
608  }
609  if ((merged->w * merged->h) != (2 * right->w) * right->w) {
610  printf("The dimensions of the empty image template for merger are not sufficient to merge gray left and right pixel values.");
611  return;
612  }
613 
614  uint8_t *UYVY_left = left->buf;
615  uint8_t *UYVY_right = right->buf;
616  uint8_t *YY1 = merged->buf; // points to first row for pixels of left image
617  uint8_t *YY2 = YY1 + merged->w; // points to second row for pixels of right image
618 
619  // Incrementing pointers to get to first gray value of the UYVY images
620  UYVY_left++;
621  UYVY_right++;
622 
623  for (uint32_t i = 0; i < left->h; i++) {
624  //printf("Loop 1: %d\n", i);
625  for (uint32_t j = 0; j < left->w; j++) {
626  //printf("Loop 1: %d\n", j);
627  *YY1 = *UYVY_left;
628  *YY2 = *UYVY_right;
629  YY1++;
630  YY2++;
631  UYVY_left += 2;
632  UYVY_right += 2;
633  }
634  YY1 += merged->w; // Jumping pointer to second next row (i.e. over row with pixels from right image)
635  YY2 += merged->w; // Jumping pointer to second next row (i.e. over row with pixels from left image)
636  }
637 }
638 
639 
640 // Function 5 - Returns the maximum value in a uint8_t image
642 {
643  if (img->type == IMAGE_GRAYSCALE) {
644  uint32_t max = 0;
645  for (uint32_t i = 0; i < img->buf_size; i++) {
646  uint8_t *intensity = &((uint8_t *)img->buf)[i];
647 
648  if (*intensity > max) {
649  max = *intensity;
650  }
651  }
652  return max;
653  } else if (img->type == IMAGE_INT16) {
654  uint32_t max = 0;
655  for (uint32_t i = 0; i < (img->w * img->h); i++) {
656  uint16_t *intensity = &((uint16_t *)img->buf)[i];
657 
658  if (*intensity > max) {
659  max = *intensity;
660  printf("%d\n", *intensity);
661  printf("location = %d\n", i);
662  }
663  }
664  return max;
665  } else {
666  printf("ERROR: function does not support image type %d. Breaking out of function.", img->type);
667  return 1;
668  }
669 
670 
671 }
672 
673 
674 // Function 6 - Thresholds 8bit images given and turns all values >= threshold to 255
675 void thresholding_img(struct image_t *img, uint8_t threshold)
676 {
677  for (uint32_t i = 0; i < img->buf_size; i++) {
678  uint8_t *intensity = &((uint8_t *)img->buf)[i];
679 
680  if (*intensity >= threshold) {
681  *intensity = 127;
682  } else {
683  *intensity = 0;
684  }
685  }
686 }
687 
688 
689 // Function 7 - Calculates principal point coordinates for a cropped image, based on the x
690 // and y coordinates of the cropped area (upper left-hand side: crop_y and crop_x).
691 void principal_points(struct point_t *c_output, const struct point_t *c_old_input, struct crop_t *img_cropped_info)
692 {
693  c_output->y = c_old_input->y - img_cropped_info->y;
694  c_output->x = c_old_input->x - img_cropped_info->x;
695 }
696 
697 
698 // Function 8a - Converts disparity to depth using focal length (in pixels) and baseline distance (in meters)
699 float disp_to_depth(const uint8_t d, const float b, const uint16_t f)
700 {
701  return b * f / d;
702 }
703 
704 
705 // Function 8c - Converts 16bit fixed number disparity (pixels * 16) to 16bit disparity (pixels)
707 {
708  return (d / 16.00);
709 }
710 
711 
712 // Function 8d - Converts 16bit fixed number disparity (pixels * 16) to 16bit depth (meters) using focal length (in pixels) and baseline distance (in meters)
713 float disp_to_depth_16bit(const int16_t d, const float b, const uint16_t f)
714 {
715  return b * f / dispfixed_to_disp(d);
716 }
717 
718 
719 // Function 8b - Converts depth to disparity using focal length (in pixels) and baseline distance (in meters)
720 uint8_t depth_to_disp(const float depth, const float b, const uint16_t f)
721 {
722  return round(b * f / depth);
723 }
724 
725 
726 
727 
728 // Function 9a - Calculates 3d points in a scene based on the 2d coordinates of the point in the
729 // image plane and the depth. d in in pixels, b is in meters and f is in pixels
730 void Vi_to_Vc(struct FloatVect3 *scene_point, int32_t image_point_y, int32_t image_point_x, const uint8_t d,
731  const float b, const uint16_t f)
732 {
733  // Calculating Z
734  // In case disparity is 0 Z will be very very small to avoid detection of algorithm that
735  // calculates closest edge point to target goal
736  //printf("y=%d\n", image_point_y);
737  //printf("x=%d\n", image_point_x);
738  //printf("d=%d\n", d);
739 
740 
741  if (d == 0) {
742  scene_point->z = 0.0001;
743  } else {
744  scene_point->z = disp_to_depth(d, b, f);
745  }
746 
747 
748  //printf("Z=%f\n", scene_point->Z);
749 
750  // Calculating Y
751  scene_point->y = image_point_y * scene_point -> z / f;
752 
753  // Calculating X
754  scene_point->x = image_point_x * scene_point -> z / f;
755  //printf("Y (y=%d) = %f\n", image_point->y, scene_point->Y);
756  //printf("X (x=%d) = %f\n", image_point->x, scene_point->X);
757  //printf("Z (d=%d) = %f\n", d, scene_point->Z);
758 }
759 
760 
761 // Function 9b - Calculates 3d points in a scene based on the 2d coordinates of the point in the
762 // image plane and the depth. d in in pixels, b is in meters and f is in pixels
763 void Vi_to_Vc_depth(struct FloatVect3 *scene_point, int32_t image_point_y, int32_t image_point_x,
764  const float depth /*depth in m*/, const uint16_t f)
765 {
766  // Calculating Z
767  scene_point->z = depth;
768 
769  // Calculating Y
770  scene_point->y = image_point_y * scene_point -> z / f;
771 
772  // Calculating X
773  scene_point->x = image_point_x * scene_point -> z / f;
774  //printf("Y (y=%d) = %f\n", image_point->y, scene_point->Y);
775  //printf("X (x=%d) = %f\n", image_point->x, scene_point->X);
776  //printf("Z (d=%d) = %f\n", d, scene_point->Z);
777 }
778 
779 
780 // Function 9c - Calculates 3d points in a scene based on the 2d coordinates of the point in the
781 // image plane and the depth. d in in pixels, b is in meters and f is in pixels
782 void Vi_to_Vc16bit(struct FloatVect3 *scene_point, int32_t image_point_y, int32_t image_point_x, const uint16_t d,
783  const float b, const uint16_t f)
784 {
785  // Calculating Z
786  // In case disparity is 0 Z will be very very small to avoid detection of algorithm that
787  // calculates closest edge point to target goal
788  //printf("y=%d\n", image_point_y);
789  //printf("x=%d\n", image_point_x);
790  //printf("d=%d\n", d);
791 
792 
793  if (d == 0) {
794  scene_point->z = 0.0001;
795  } else {
796  scene_point->z = disp_to_depth_16bit(d, b, f);
797  }
798 
799 
800  //printf("Z=%f\n", scene_point->Z);
801 
802  // Calculating Y
803  scene_point->y = image_point_y * scene_point -> z / f;
804 
805  // Calculating X
806  scene_point->x = image_point_x * scene_point -> z / f;
807  //printf("Y (y=%d) = %f\n", image_point->y, scene_point->Y);
808  //printf("X (x=%d) = %f\n", image_point->x, scene_point->X);
809  //printf("Z (d=%d) = %f\n", d, scene_point->Z);
810 
811 }
812 
813 
814 // Function 10a - Converts 2d coordinates into 1d coordinates (for 1d arrays) - using struct image_t for dimensions
815 int32_t indx1d_a(const int32_t y, const int32_t x, const struct image_t *img)
816 {
817  if (x >= (img->w) || x < 0) {
818  printf("Error: index x=%d is out of bounds for axis 0 with size %d. Returning -1\n", x, img->w);
819  return -1;
820  } else if (y >= (img->h) || y < 0) {
821  printf("Error: index y=%d is out of bounds for axis 0 with size %d. Returning -1\n", y, img->h);
822  return -1;
823  } else {
824  return x + img->w * y;
825  }
826 }
827 
828 
829 // Function 10b - Converts 2d coordinates into 1d coordinates (for 1d arrays) - using struct img_size_t for dimensions
830 int32_t indx1d_b(const int32_t y, const int32_t x, const struct img_size_t *img_dims)
831 {
832  if (x >= (img_dims->w) || x < 0) {
833  printf("Error: index %d is out of bounds for axis 0 with size %d. Returning -1\n", x, img_dims->w);
834  return -1;
835  } else if (y >= (img_dims->h) || y < 0) {
836  printf("Error: index %d is out of bounds for axis 0 with size %d. Returning -1\n", y, img_dims->h);
837  return -1;
838  } else {
839  return x + img_dims->w * y;
840  }
841 }
842 
843 // Function 10c - Converts 2d coordinates into 1d coordinates (for 1d arrays) - using two uint16_t values for dimensions
844 int32_t indx1d_c(const int32_t y, const int32_t x, const uint16_t img_height, const uint16_t img_width)
845 {
846  if (x >= (img_width) || x < 0) {
847  printf("Error: index x=%d is out of bounds for axis 0 with size %d. Returning -1\n", x, img_width);
848  return -1;
849  } else if (y >= (img_height) || y < 0) {
850  printf("Error: index y=%d is out of bounds for axis 1 with size %d. Returning -1\n", y, img_height);
851  return -1;
852  } else {
853  return x + img_width * y;
854  }
855 }
856 
857 
858 // Function 11 - Function to convert point in coordinate system a to a point in the coordinate system b
859 void Va_to_Vb(struct FloatVect3 *Vb, struct FloatVect3 *Va, struct FloatRMat *Rba, struct FloatVect3 *VOa)
860 {
861 
862  struct FloatVect3 Va_temp;
863  Va_temp.x = Va->x;
864  Va_temp.y = Va->y;
865  Va_temp.z = Va->z;
866 
867 
868  // The following translates world vector coordinates into the agent coordinate system
869  Va_temp.x = Va->x - VOa->x;
870  Va_temp.y = Va->y - VOa->y;
871  Va_temp.z = Va->z - VOa->z;
872 
873  // In case the axes of the world coordinate system (w) and the agent coordinate system (a) do not
874  // coincide, they are adjusted with the rotation matrix R
875  float_rmat_vmult(Vb, Rba, &Va_temp);
876 }
877 
878 
879 // Function 12 - Function to convert point in coordinate system b back to a point in the coordinate system a
880 void Vb_to_Va(struct FloatVect3 *Va, struct FloatVect3 *Vb, struct FloatRMat *Rba, struct FloatVect3 *VOa)
881 {
882  // In case the axes of the agent coordinate system (a) and the world coordinate system (w) do not
883  // coincide, they are adjusted with the inverse rotation matrix R
884  float_rmat_transp_vmult(Va, Rba, Vb);
885 
886  // The following translates agent vector coordinates into the world coordinate system
887  Va->x = Va->x + VOa->x;
888  Va->y = Va->y + VOa->y;
889  Va->z = Va->z + VOa->z;
890 }
891 
892 
893 // Function 13 - Function wrapper to convert a point in the world coordinate system to a point in the camera coordinate system
894 void Vw_to_Vc(struct FloatVect3 *Vc, struct FloatVect3 *Vw, struct FloatRMat *Rrw, struct FloatVect3 *VRw,
895  struct FloatRMat *Rcr, struct FloatVect3 *VCr, const uint8_t verbose)
896 {
897  struct FloatVect3 Vr;
898 
899  // Print log only if enabled
900  if (verbose != 0) {
901  // Rotation matrix - World coordinate system expressed in the robot coordinate system
902  printf("Rrw\n");
903  printf("%f, %f, %f,\n", Rrw->m[0], Rrw->m[1], Rrw->m[2]);
904  printf("%f, %f, %f,\n", Rrw->m[3], Rrw->m[4], Rrw->m[5]);
905  printf("%f, %f, %f\n\n", Rrw->m[6], Rrw->m[7], Rrw->m[8]);
906 
907  // Vector coordinates - Robot in the world frame system
908  printf("VRw (drone location)\n");
909  printf(" %f\n %f\n %f\n\n", VRw->x, VRw->y, VRw->z);
910 
911  // World coordinates
912  printf("Vw\n");
913  printf(" %f\n %f\n %f\n\n", Vw->x, Vw->y, Vw->z);
914  }
915 
916 
917  // Robot coordinates from world coordinates
918  Va_to_Vb(&Vr, Vw, Rrw, VRw);
919 
920  // Print log only if enabled
921  if (verbose != 0) {
922  // Robot coordinates
923  printf("Vr\n");
924  printf(" %f\n %f\n %f\n\n", Vr.x, Vr.y, Vr.z);
925 
926  // Rotation matrix - Robot coordinate system expressed in the camera coordinate system
927  printf("Rcr\n");
928  printf("%f, %f, %f,\n", Rcr->m[0], Rcr->m[1], Rcr->m[2]);
929  printf("%f, %f, %f,\n", Rcr->m[3], Rcr->m[4], Rcr->m[5]);
930  printf("%f, %f, %f\n\n", Rcr->m[6], Rcr->m[7], Rcr->m[8]);
931 
932  // Vector coordinates - Camera in the robot frame system
933  printf("VCa (camera location)\n");
934  printf(" %f\n %f\n %f\n\n", VCr->x, VCr->y, VCr->z);
935  }
936 
937  // Camera coordinates from robot coordinates
938  Va_to_Vb(Vc, &Vr, Rcr, VCr);
939 
940  // Print log only if enabled
941  if (verbose != 0) {
942  // Camera coordinates
943  printf("Vc\n");
944  printf(" %f\n %f\n %f\n\n", Vc->x, Vc->y, Vc->z);
945  }
946 }
947 
948 
949 // Function 14 - Function wrapper to convert a point in the camera coordinate system back to a point in the world coordinate system
950 void Vc_to_Vw(struct FloatVect3 *Vw, struct FloatVect3 *Vc, struct FloatRMat *Rrw, struct FloatVect3 *VRw,
951  struct FloatRMat *Rcr, struct FloatVect3 *VCr, const uint8_t verbose)
952 {
953  struct FloatVect3 Vr;
954 
955  // Agent coordinates from camera coordinates
956  Vb_to_Va(&Vr, Vc, Rcr, VCr);
957 
958  // Print log only if enabled
959  if (verbose != 0) {
960  // Back to robot coordinates
961  printf("Vr - back calculated\n"); \
962  printf(" %f\n %f\n %f\n\n", Vr.x, Vr.y, Vr.z);
963  }
964 
965 
966  // World coordinates from a coordinates
967  Vb_to_Va(Vw, &Vr, Rrw, VRw);
968 
969  // Print log only if enabled
970  if (verbose != 0) {
971  // Back to world coordinates
972  printf("Vw - back calculated\n");
973  printf(" %f\n %f\n %f\n\n", Vw->x, Vw->y, Vw->z);
974  }
975 }
976 
977 // Function 15 - Function to obtain the Euclidean distance (in meters) between two 3d points
978 float float_vect3_norm_two_points(struct FloatVect3 *V1, struct FloatVect3 *V2)
979 {
980  struct FloatVect3 V_V1V2_diff;
981  VECT3_DIFF(V_V1V2_diff, *V1, *V2);
982  return float_vect3_norm(&V_V1V2_diff);
983 }
984 
985 // Function 16a - function to calculate angle between robot and specific waypoint
987 {
988  struct FloatVect2 VWPwt = {WaypointX(wp), WaypointY(wp)};
989  struct FloatVect2 difference;
990  float angle;
991 
992  VECT2_DIFF(difference, VWPwt, *stateGetPositionEnu_f());
993  angle = atan2f(difference.x, difference.y);
994  return angle;
995 }
996 
997 
998 // Function 16b - function to calculate angle between robot and specific setpoint
999 float heading_towards_setpoint_WNED(struct FloatVect3 *VSETPOINTwned)
1000 {
1001  struct FloatVect2 difference;
1002  float angle;
1003 
1004  VECT2_DIFF(difference, *VSETPOINTwned, *stateGetPositionNed_f());
1005  angle = atan2f(difference.y, difference.x);
1006  return angle;
1007 }
1008 
1009 
1010 // Function 17a - Function to calculate median disparity to a point (Vi) in an image (img), using a kernel structure (kernel_median)
1011 uint8_t median_disparity_to_point(struct point_t *Vi, struct image_t *img, struct kernel_C1 *kernel_median)
1012 {
1013  // Creating Start and stop coordinates of in the image coordinate system, based on kernel size
1014  uint8_t VSTARTi_y = Vi->y - (kernel_median->h / 2);
1015  uint8_t VSTARTi_x = Vi->x - (kernel_median->w / 2);
1016  uint8_t VSTOPi_y = Vi->y + (kernel_median->h / 2);
1017  uint8_t VSTOPi_x = Vi->x + (kernel_median->w / 2);
1018 
1019  // In case the upper bounds of the kernel are outside of the image area
1020  // (lower bound not important because of uint8_t type converting everything below 0 to 0):
1021  if (VSTOPi_y > img->h) {
1022  VSTOPi_y = (img->h - 1);
1023  }
1024  if (VSTOPi_x > img->w) {
1025  VSTOPi_x = (img->w - 1);
1026  }
1027 
1028 
1029 
1030  // Declaring kernel coordinates
1031  uint16_t Vk_y;
1032  uint16_t Vk_x;
1033 
1034  // Declaring 1d indices (for result of transforming 2d coordinates into 1d coordinate)
1035  int32_t index_img;
1036  int32_t index_kernel;
1037 
1038  // Declaring variable to store median in
1039  uint8_t median;
1040 
1041 
1042  // Here we get the median value of a block in the middle of an image using a kernel structure
1043  for (uint8_t Vi_y = VSTARTi_y; Vi_y < (VSTOPi_y + 1); Vi_y++) {
1044  for (uint8_t Vi_x = VSTARTi_x; Vi_x < (VSTOPi_x + 1); Vi_x++) {
1045  // Calculating kernel coordinates
1046  Vk_y = Vi_y - VSTARTi_y;
1047  Vk_x = Vi_x - VSTARTi_x;
1048 
1049  // Converting 2d indices to 1d indices
1050  index_img = indx1d_a(Vi_y, Vi_x, img);
1051  index_kernel = indx1d_c(Vk_y, Vk_x, median_kernel.h, median_kernel.w);
1052 
1053 
1054  // Saving disparity values of image underneath the kernel, into the kernel buffer
1055  ((uint8_t *) median_kernel.buf_values)[index_kernel] = ((uint8_t *) img->buf)[index_img];
1056  }
1057  }
1058 
1059  // Calculating median disparity value of values recoded by the kernel
1061 
1062  return median;
1063 }
1064 
1065 
1066 // Function 17b - Function to calculate median depth (cm) to a point (Vi) in a 16bit image (img), using a kernel structure (kernel_median)
1067 uint16_t median_depth_to_point(struct point_t *Vi, struct image_t *img, struct kernel_C1 *kernel_median)
1068 {
1069  // Creating Start and stop coordinates of in the image coordinate system, based on kernel size
1070  uint16_t VSTARTi_y = Vi->y - (kernel_median->h / 2);
1071  uint16_t VSTARTi_x = Vi->x - (kernel_median->w / 2);
1072  uint16_t VSTOPi_y = Vi->y + (kernel_median->h / 2);
1073  uint16_t VSTOPi_x = Vi->x + (kernel_median->w / 2);
1074 
1075  // In case the upper bounds of the kernel are outside of the image area
1076  // (lower bound not important because of uint8_t type converting everything below 0 to 0):
1077  if (VSTOPi_y > img->h) {
1078  VSTOPi_y = (img->h - 1);
1079  }
1080  if (VSTOPi_x > img->w) {
1081  VSTOPi_x = (img->w - 1);
1082  }
1083 
1084 
1085 
1086  // Declaring kernel coordinates
1087  uint16_t Vk_y;
1088  uint16_t Vk_x;
1089 
1090  // Declaring 1d indices (for result of transforming 2d coordinates into 1d coordinate)
1091  int32_t index_img;
1092  int32_t index_kernel;
1093 
1094  // Declaring variable to store median in
1095  uint16_t median;
1096 
1097 
1098  // Here we get the median value of a block in the middle of an image using a kernel structure
1099  for (uint16_t Vi_y = VSTARTi_y; Vi_y < (VSTOPi_y + 1); Vi_y++) {
1100  for (uint16_t Vi_x = VSTARTi_x; Vi_x < (VSTOPi_x + 1); Vi_x++) {
1101  // Calculating kernel coordinates
1102  Vk_y = Vi_y - VSTARTi_y;
1103  Vk_x = Vi_x - VSTARTi_x;
1104 
1105  // Converting 2d indices to 1d indices
1106  index_img = indx1d_a(Vi_y, Vi_x, img);
1107  index_kernel = indx1d_c(Vk_y, Vk_x, median_kernel.h, median_kernel.w);
1108 
1109 
1110  // Saving disparity values of image underneath the kernel, into the kernel buffer
1111  ((uint16_t *) median_kernel.buf_values)[index_kernel] = ((uint16_t *) img->buf)[index_img];
1112 
1113  }
1114  }
1115 
1116  // Calculating median disparity value of values recoded by the kernel
1118 
1119  return median;
1120 }
1121 
1122 
1123 
1124 
1125 // Function 18a - Function to find "best" (closest ideal pathway to goal from robot to edge to goal) - Using disparity image
1126 // Returns a 3d Vector to the best "edge" and 1 if any edge is found and 0 if no edge is found.
1128  struct FloatVect3
1129  *VEDGECOORDINATESc, // Declared vector of coordinates of "best" edge detected in camera coordinate system
1130  struct FloatVect3 *VTARGETc, // Declared vector of coordinates of goal in camera coordinate system
1131  struct image_t *img_edges, // Image obtained from the external Sobel edge detection function = sobel_OCV
1132  struct image_t *img_disparity, // Image obtained after simple block matching
1133  struct crop_t *edge_search_area, // This structure holds information about the window in which edges are searched in
1134  uint8_t threshold, // Above this disparity (pixels) edges are eligible for the WedgeBug algorithm (i.e. edges cannot be very far away)
1135  int16_t max_confidence // This is the max confidence that edges were found - Works like a threshold
1136 )
1137 {
1138  // Declarations
1139  struct FloatVect3
1140  VEDGEc; // A vector to save the point of an eligible detected edge point in the camera coordinate system.
1141  struct FloatVect3 VROBOTCENTERc; // A vector to hold camera center coordinates in the camera coordinate system.
1142  struct point_t
1143  VCLOSESTEDGEi; // A vector to save the point of the "best" eligible detected edge point in the image coordinate system.
1144  float f_distance_edge_to_goal; // Saves distance from edge to goal
1145  float f_distance_robot_to_edge; // Saves distance from robot to goal
1146  float distance; // This stores distance from edge to goal. Its initialized with 255 as basically any edge found will be closer than that and will replace 255 met
1147  uint8_t edge_value; // Variable to store the intensity value of a pixel in the img_edge
1148  uint8_t disparity; // variable to store the disparity level of a pixel in the img_disparity
1149  uint8_t disparity_best; // Variable to store disparity level of associated best edge found (for debugging)
1150  int16_t confidence; // Confidence = number of edge pixels found
1151  int32_t indx; // Variable to store 1d index calculated from 2d index
1152 
1153  // Initializations
1154  VROBOTCENTERc.x = 0.0; VROBOTCENTERc.y = 0.0; VROBOTCENTERc.z = 0.0;
1155  VCLOSESTEDGEi.x = 0; VCLOSESTEDGEi.y = 0;
1156  distance = 255;
1157  confidence = 0;
1158  disparity_best = 0;
1159 
1160 
1161  // Loop to calculate position (in image) of point closes to hypothetical target - Start
1162  for (uint16_t y = edge_search_area->y; y < (edge_search_area->y + edge_search_area->h);
1163  y++) { //for (uint32_t y = edge_search_area.y; y < (edge_search_area.y + edge_search_area.h); y++)
1164  for (uint16_t x = edge_search_area->x; x < (edge_search_area->x + edge_search_area->w); x++) {
1165  indx = indx1d_a(y, x, img_edges); // We convert the 2d index [x,y] into a 1d index
1166  edge_value = ((uint8_t *) img_edges->buf)[indx]; // We save the intensity of the current point
1167  disparity = ((uint8_t *) img_disparity->buf)[indx]; // We save the disparity of the current point
1168 
1169  // Two conditions must be met for an edge to be considered a viable route for the drone:
1170  // 1) This disparity of the current coordinate (x,y) must coincide with an edge pixel
1171  // (as all non-edge pixels have been set to 0) - (edge_value != 0)
1172  // 2) The disparity of the current coordinate (x, y) must be above a certain threshold. This simulates vision cone - (disparity > threshold_disparity_of_edges)
1173  if ((edge_value != 0) && (disparity > threshold)) {
1174  // We increase the confidence for every edge found
1175  confidence++;
1176  Bound(confidence, 0, max_confidence);
1177 
1178  // We determine the offset from the principle point
1179  int32_t y_from_c = y - c_img_cropped.y; // NOTE. The variable "c_img_cropped" is a global variable
1180  int32_t x_from_c = x - c_img_cropped.x; // NOTE. The variable "c_img_cropped" is a global variable
1181  // We derive the 3d scene point using from the disparity saved earlier
1182  Vi_to_Vc(&VEDGEc, y_from_c, x_from_c, disparity, b, f); // NOTE. The variables "b" and "f" are a global variables
1183  // Calculating Euclidean distance (N2) - Edge to goal
1184  f_distance_edge_to_goal = float_vect3_norm_two_points(VTARGETc, &VEDGEc);
1185  // Calculating Euclidean distance (N2) - robot to edge
1186  f_distance_robot_to_edge = float_vect3_norm_two_points(&VEDGEc, &VROBOTCENTERc);
1187 
1188 
1189  // If current distance (using distance vector) is smaller than the previous minimum distance
1190  // measure then save new distance and point coordinates associated with it
1191  if ((f_distance_robot_to_edge + f_distance_edge_to_goal) < distance) {
1192  // Saving closest edge point, in camera coordinate system
1193  VEDGECOORDINATESc->x = VEDGEc.x;
1194  VEDGECOORDINATESc->y = VEDGEc.y;
1195  VEDGECOORDINATESc->z = VEDGEc.z;
1196  // Saving disparity at point
1197  disparity_best = disparity;
1198  // Saving smallest distance
1199  distance = (f_distance_robot_to_edge + f_distance_edge_to_goal);
1200  // Saving closest edge point, in image coordinate system
1201  VCLOSESTEDGEi.y = y;
1202  VCLOSESTEDGEi.x = x;
1203 
1204  }
1205 
1206  //printf("x image = %d\n", x);
1207  //printf("y image = %d\n", y);
1208  //printf("x image from c = %d\n", x_from_c);
1209  //printf("y image from c = %d\n", y_from_c);
1210  //printf("d = %d\n", disparity);
1211  //printf("X scene from c = %f\n", scene_point.X);
1212  //printf("Y scene from c = %f\n", scene_point.Y);
1213  //printf("Z scene from c = %f\n", scene_point.Z);
1214  //printf("Closest edge [y,x] = [%d, %d]\n", (int)closest_edge.y, (int)closest_edge.x);
1215  //printf("Robot center coordinates = [%f, %f, %f]\n", VROBOTCENTERc.x, VROBOTCENTERc.y, VROBOTCENTERc.z);
1216  //printf("Edge coordinates = [%f, %f, %f]\n", VEDGEc.x, VEDGEc.y, VEDGEc.z);
1217  //printf("Distance to goal (m) = %f\n", distance);
1218  //printf("Distance to goal2 (m) = %f + %f\n\n", f_distance_robot_to_edge, f_distance_edge_to_goal);
1219  }
1220  }
1221  }
1222 
1223  if (confidence == max_confidence) {
1224  ((uint8_t *) img_edges->buf)[indx1d_a(VCLOSESTEDGEi.y, VCLOSESTEDGEi.x, img_edges)] = 255;
1225  printf("Viable closest edge found: [%d, %d] (disparity = %d)\n", VCLOSESTEDGEi.y, VCLOSESTEDGEi.x, disparity_best);
1226 
1227  printf("At distance: %f\n", distance);
1228  confidence = 0;
1229  return 1;
1230  } else {
1231  printf("No viable edge found\n");
1232  confidence = 0;
1233  return 0;
1234  }
1235 }
1236 
1237 
1238 
1239 // Function 18b - Function to find "best" (closest ideal pathway to goal from robot to edge to goal) - Using depth image
1240 // Returns a 3d Vector to the best "edge" and 1 if any edge is found and 0 if no edge is found.
1242  struct FloatVect3
1243  *VEDGECOORDINATESc, // Declared vector of coordinates of "best" edge detected in camera coordinate system
1244  struct FloatVect3 *VTARGETc, // Declared vector of coordinates of goal in camera coordinate system
1245  struct image_t *img_edges, // Image obtained from the external Sobel edge detection function = sobel_OCV
1246  struct image_t *img_depth, // Image holding depth values (cm) obtained from the disparity image
1247  struct crop_t *edge_search_area, // This structure holds information about the window in which edges are searched in
1248  uint16_t threshold, // Below this depth (cm) edges are eligible for the WedgeBug algorithm (i.e. edges cannot be very far away)
1249  int16_t max_confidence // This is the max confidence that edges were found - Works like a threshold
1250 )
1251 {
1252  // Declarations
1253  struct FloatVect3
1254  VEDGEc; // A vector to save the point of an eligible detected edge point in the camera coordinate system.
1255  struct FloatVect3 VROBOTCENTERc; // A vector to hold camera center coordinates in the camera coordinate system.
1256  struct point_t
1257  VCLOSESTEDGEi; // A vector to save the point of the "best" eligible detected edge point in the image coordinate system.
1258  float f_distance_edge_to_goal; // Saves distance from edge to goal
1259  float f_distance_robot_to_edge; // Saves distance from robot to goal
1260  float distance; // This stores distance from edge to goal. Its initialized with 255 as basically any edge found will be closer than that and will replace 255 meters
1261  uint8_t edge_value; // Variable to store the intensity value of a pixel in the img_edge
1262  uint16_t depth; // Variable to store the depth (in cm) of a pixel in the img_disparity
1263  uint16_t depth_best; // Variable to store depth (cm) level of associated best edge found (for debugging)
1264  int16_t confidence; // Confidence = number of edge pixels found
1265  int32_t indx; // Variable to store 1d index calculated from 2d index
1266 
1267  // Initializations
1268  VROBOTCENTERc.x = 0.0; VROBOTCENTERc.y = 0.0; VROBOTCENTERc.z = 0.0;
1269  VCLOSESTEDGEi.x = 0; VCLOSESTEDGEi.y = 0;
1270  distance = 255;
1271  confidence = 0;
1272  depth_best = 0;
1273 
1274 
1275  // Loop to calculate position (in image) of point closes to hypothetical target - Start
1276  for (uint16_t y = edge_search_area->y; y < (edge_search_area->y + edge_search_area->h);
1277  y++) { //for (uint32_t y = edge_search_area.y; y < (edge_search_area.y + edge_search_area.h); y++)
1278  for (uint16_t x = edge_search_area->x; x < (edge_search_area->x + edge_search_area->w); x++) {
1279  indx = indx1d_a(y, x, img_edges); // We convert the 2d index [x,y] into a 1d index
1280  edge_value = ((uint8_t *) img_edges->buf)[indx]; // We save the intensity of the current point
1281  depth = ((uint16_t *) img_depth->buf)[indx];
1282 
1283  // Two conditions must be met for an edge to be considered a viable route for the drone:
1284  // 1) This disparity of the current coordinate (x,y) must coincide with an edge pixel
1285  // (as all non-edge pixels have been set to 0) - (edge_value != 0)
1286  // 2) The disparity of the current coordinate (x, y) must be above a certain threshold. This simulates vision cone - (disparity > threshold_disparity_of_edges)
1287  if ((edge_value != 0) && (depth < threshold)) {
1288  // We increase the confidence for every edge found
1289  confidence++;
1290  Bound(confidence, 0, max_confidence);
1291 
1292  // We determine the offset from the principle point
1293  int32_t y_from_c = y - c_img_cropped.y; // NOTE. The variable "c_img_cropped" is a global variable
1294  int32_t x_from_c = x - c_img_cropped.x; // NOTE. The variable "c_img_cropped" is a global variable
1295  // We derive the 3d scene point using the depth from the depth image (img_depth). Note depth is in cm, but function takes m. Thus, we convert
1296  Vi_to_Vc_depth(&VEDGEc, y_from_c, x_from_c, (depth / 100.00),
1297  f); // NOTE. The variables "b" and "f" are a global variables
1298 
1299  // Calculating Euclidean distance (N2) - Edge to goal
1300  f_distance_edge_to_goal = float_vect3_norm_two_points(VTARGETc, &VEDGEc);
1301  // Calculating Euclidean distance (N2) - robot to edge
1302  f_distance_robot_to_edge = float_vect3_norm_two_points(&VEDGEc, &VROBOTCENTERc);
1303 
1304 
1305  // If current distance (using distance vector) is smaller than the previous minimum distance
1306  // measure then save new distance and point coordinates associated with it
1307  if ((f_distance_robot_to_edge + f_distance_edge_to_goal) < distance) {
1308  // Saving closest edge point, in camera coordinate system
1309  VEDGECOORDINATESc->x = VEDGEc.x;
1310  VEDGECOORDINATESc->y = VEDGEc.y;
1311  VEDGECOORDINATESc->z = VEDGEc.z;
1312  // Saving disparity at point
1313  depth_best = depth;
1314  // Saving smallest distance
1315  distance = (f_distance_robot_to_edge + f_distance_edge_to_goal);
1316  // Saving closest edge point, in image coordinate system
1317  VCLOSESTEDGEi.y = y;
1318  VCLOSESTEDGEi.x = x;
1319 
1320  }
1321 
1322  //printf("x image = %d\n", x);
1323  //printf("y image = %d\n", y);
1324  //printf("x image from c = %d\n", x_from_c);
1325  //printf("y image from c = %d\n", y_from_c);
1326  //printf("d = %d\n", disparity);
1327  //printf("X scene from c = %f\n", scene_point.X);
1328  //printf("Y scene from c = %f\n", scene_point.Y);
1329  //printf("Z scene from c = %f\n", scene_point.Z);
1330  //printf("Closest edge [y,x] = [%d, %d]\n", (int)closest_edge.y, (int)closest_edge.x);
1331  //printf("Robot center coordinates = [%f, %f, %f]\n", VROBOTCENTERc.x, VROBOTCENTERc.y, VROBOTCENTERc.z);
1332  //printf("Edge coordinates = [%f, %f, %f]\n", VEDGEc.x, VEDGEc.y, VEDGEc.z);
1333  //printf("Distance to goal (m) = %f\n", distance);
1334  //printf("Distance to goal2 (m) = %f + %f\n\n", f_distance_robot_to_edge, f_distance_edge_to_goal);
1335  }
1336  }
1337  }
1338 
1339  if (confidence == max_confidence) {
1340  ((uint8_t *) img_edges->buf)[indx1d_a(VCLOSESTEDGEi.y, VCLOSESTEDGEi.x, img_edges)] = 255;
1341  printf("Viable closest edge found: [%d, %d] (depth = %f)\n", VCLOSESTEDGEi.y, VCLOSESTEDGEi.x, (depth_best / 100.00));
1342 
1343  printf("At distance: %f\n", distance);
1344  confidence = 0;
1345  return 1;
1346  } else {
1347  printf("No viable edge found\n");
1348  confidence = 0;
1349  return 0;
1350  }
1351 }
1352 
1353 
1354 
1355 // Function 19 - Function to determine if setpoint was reached (it was reached if distance is below a set threshold)
1356 uint8_t is_setpoint_reached(struct FloatVect3 *VGOAL, struct FloatVect3 *VCURRENTPOSITION, float threshold)
1357 {
1358  if (float_vect3_norm_two_points(VGOAL, VCURRENTPOSITION) < threshold) {return 1;}
1359  else {return 0;}
1360 }
1361 
1362 
1363 // Function 20 - function to obtain the Euclidian distance between two angles (in radians)
1364 float float_norm_two_angles(float target_angle, float current_angle)
1365 {
1366  float target_current_diff;
1367  target_current_diff = target_angle - current_angle;
1368  //printf("Target angle = %f\n", target_angle);
1369  //printf("Current angle = %f\n", current_angle);
1370  //printf("difference in function = %f\n", sqrt(pow(target_current_diff, 2)));
1371  return sqrt(pow(target_current_diff, 2));
1372 }
1373 
1374 // Function 21 - Function to determine if set heading was reached (it was reached if the Euclidean distance is below a set threshold)
1375 uint8_t is_heading_reached(float target_angle, float current_angle, float threshold)
1376 {
1377  if (float_norm_two_angles(target_angle, current_angle) < threshold) {return 1;}
1378  else {return 0;}
1379 }
1380 
1381 // Function 22
1383  struct FloatVect3 *VGOAL, struct FloatVect3 *VCURRENTPOSITION, float threshold_setpoint,
1384  float target_angle, float current_angle, float threshold_angle)
1385 {
1386  if ((float_vect3_norm_two_points(VGOAL, VCURRENTPOSITION) < threshold_setpoint)
1387  && (float_norm_two_angles(target_angle, current_angle) < threshold_angle)) {return 1;}
1388  else {return 0;}
1389 }
1390 
1391 
1392 // Function 23 - Function to convert 16bit disparity (unaltered from OCV output i.e. fixed point 16bit numbers) into 16bit depth values (cm)
1393 void disp_to_depth_img(struct image_t *img_input, struct image_t *img16bit_output)
1394 {
1395  //int n = 89;
1396  float disparity = 1;
1397 
1398  //printf("C location %d = %d\n", n, ((int16_t*)img16bit_input->buf)[n]);
1399 
1400 
1401  // Converting disparity values into depth (cm)
1402  for (int32_t i = 0; i < (img_input->h * img_input->w); i++) {
1403 
1404  if (img_input->type == IMAGE_GRAYSCALE) {
1405  disparity = disp_to_depth(((uint8_t *)img_input->buf)[i], b, f);
1406  } else if (img_input->type == IMAGE_INT16) {
1407  disparity = disp_to_depth_16bit(((uint16_t *)img_input->buf)[i], b, f);
1408  } else {
1409  printf("ERROR: function does not support image type %d. Breaking out of function.", img_input->type);
1410  }
1411 
1412 
1413  /*
1414  if(i == n)
1415  {
1416  printf("Depth in meters at %d = %f\n", n, disparity);
1417  }*/
1418 
1419  disparity = disparity * 100;
1420 
1421  /*if(i == n)
1422  {
1423  printf("Depth in cm at %d = %f\n", n, disparity);
1424  }*/
1425 
1426 
1427  ((uint16_t *)img16bit_output->buf)[i] = round(disparity);
1428  }
1429  //printf("Depth in cm at %d = %d\n", n, ((int16_t*)img16bit_output->buf)[n]);
1430 }
1431 
1432 // Function 24 - Function that encapsulates all of the background processes. Originally this was in the periodic function,
1433 // but since it is used in the direct and the guidance navigation modes of the state machine, I made it a callable function
1434 // for convenience
1436 {
1437  //Background processes
1438  // 1. Converting left and right image to 8bit grayscale for further processing
1439  image_to_grayscale(&img_left, &img_left_int8); // Converting left image from UYVY to gray scale for saving function
1440  image_to_grayscale(&img_right, &img_right_int8); // Converting right image from UYVY to gray scale for saving function
1441 
1442  // 2. Deriving disparity map from block matching (left image is reference image)
1444  1);// Creating cropped disparity map image
1445  // For report: creating image for saving 1
1446  if (save_images_flag) {save_image_HM(&img_disparity_int8_cropped, "/home/dureade/Documents/paparazzi_images/for_report/b_img1_post_SBM.bmp", heat_map_type);}
1447 
1448  /*
1449  // Optional thresholding of disparity map
1450  uint8_t thresh = 1;
1451  for (int32_t i = 0; i < (img_disparity_int8_cropped.h*img_disparity_int8_cropped.w); i++)
1452  {
1453  uint8_t disparity = ((uint8_t*)img_disparity_int8_cropped)[i];
1454  if(disparity < thresh)
1455  {
1456  ((uint8_t*)img_disparity_int8_cropped)[i] = 0; // if below disparity assume object is indefinately away
1457  }
1458  }*/
1459 
1460  // 3. Morphological operations 1
1461  // Needed to smoove object boundaries and to remove noise removing noise
1463  // For report: creating image for saving 2
1464  if (save_images_flag) {save_image_HM(&img_middle_int8_cropped, "/home/dureade/Documents/paparazzi_images/for_report/b_img2_post_opening_8bit.bmp", heat_map_type);}
1465 
1467  // For report: creating image for saving 3
1468  if (save_images_flag) {save_image_HM(&img_middle_int8_cropped, "/home/dureade/Documents/paparazzi_images/for_report/b_img3_post_closing_8bit.bmp", heat_map_type);}
1469 
1471  // For report: creating image for saving 4
1472  if (save_images_flag) {save_image_HM(&img_middle_int8_cropped, "/home/dureade/Documents/paparazzi_images/for_report/b_img4_post_dilation_8bit.bmp", heat_map_type);}
1473 
1474  // 4. Depth image
1476  // For report: creating image for saving 4
1477  if (save_images_flag) {save_image_HM(&img_depth_int16_cropped, "/home/dureade/Documents/paparazzi_images/for_report/b_img5_post_depth_16bit.bmp", heat_map_type);}
1478 
1479  // 5. Sobel edge detection
1481  // For report: creating image for saving 5
1482  if (save_images_flag) {save_image_gray(&img_edges_int8_cropped, "/home/dureade/Documents/paparazzi_images/for_report/b_img6_post_sobel_8bit.bmp");}
1483 }
1484 
1485 
1486 // Function 25 - Function that encapsulates all of the background processes. Originally this was in the periodic function,
1487 // but since it is used in the direct and the guidance navigation modes of the state machine, I made it a callable function
1488 // for convenience
1490 {
1491  //Background processes
1492  // 1. Converting left and right image to 8bit grayscale for further processing
1493  image_to_grayscale(&img_left, &img_left_int8); // Converting left image from UYVY to gray scale for saving function
1494  image_to_grayscale(&img_right, &img_right_int8); // Converting right image from UYVY to gray scale for saving function
1495 
1496  // 2. Deriving disparity map from block matching (left image is reference image)
1498  1);// Creating cropped disparity map image
1499  // For report: creating image for saving 1
1500  if (save_images_flag) {save_image_HM(&img_disparity_int16_cropped, "/home/dureade/Documents/paparazzi_images/for_report/b2_img1_post_SBM_16bit.bmp", heat_map_type);}
1501 
1502  //printf("maximum_intensity = %d\n", maximum_intensity(&img_disparity_int16_cropped));
1503 
1504  /*
1505  // Optional thresholding of disparity map
1506  uint8_t thresh = 1;
1507  for (int32_t i = 0; i < (img_disparity_int8_cropped.h*img_disparity_int8_cropped.w); i++)
1508  {
1509  uint8_t disparity = ((uint8_t*)img_disparity_int8_cropped)[i];
1510  if(disparity < thresh)
1511  {
1512  ((uint8_t*)img_disparity_int8_cropped)[i] = 0; // if below disparity assume object is indefinately away
1513  }
1514  }*/
1515 
1516  // 3. Morphological operations 1
1517  // Needed to smoove object boundaries and to remove noise removing noise
1518 
1520  // For report: creating image for saving 3
1521  if (save_images_flag) {save_image_HM(&img_disparity_int16_cropped, "/home/dureade/Documents/paparazzi_images/for_report/b2_img2_post_closing_16bit.bmp", heat_map_type);}
1522 
1523 
1525  // For report: creating image for saving 2
1526  if (save_images_flag) {save_image_HM(&img_disparity_int16_cropped, "/home/dureade/Documents/paparazzi_images/for_report/b2_img3_post_opening_16bit.bmp", heat_map_type);}
1527 
1528 
1529 
1531  // For report: creating image for saving 4
1532  if (save_images_flag) {save_image_HM(&img_disparity_int16_cropped, "/home/dureade/Documents/paparazzi_images/for_report/b2_img4_post_dilation_16bit.bmp", heat_map_type);}
1533 
1534 
1535  // 4. Depth image
1537  // For report: creating image for saving 4
1538  if (save_images_flag) {save_image_HM(&img_depth_int16_cropped, "/home/dureade/Documents/paparazzi_images/for_report/b2_img5_post_depth_16bit.bmp", heat_map_type);}
1539 
1540 
1541  // 5. Sobel edge detection
1543  // For report: creating image for saving 5
1544  if (save_images_flag) {save_image_gray(&img_edges_int8_cropped, "/home/dureade/Documents/paparazzi_images/for_report/b2_img6_post_sobel_8bit.bmp");}
1545 
1546  // 6. Morphological operations 2
1547  // This is needed so that when using the edges as filters (to work on depth values
1548  // only found on edges) the underlying depth values are those of the foreground
1549  // and not the background
1551 }
1552 
1553 
1554 
1555 
1556 
1557 
1558 /*
1559  * a: Coordinate system a (i.e. the coordinate frame a)
1560  * b: Coordinate system b (i.e. the coordinate frame b)
1561  * w: World coordinate system (i.e. the world coordinate frame = x is depth, y is left and right, and z is altitude))
1562  * r: Robot coordinate system (i.e. the robot coordinate frame = x is depth, y is left and right, and z is altitude))
1563  * c: Camera Coordinate system (i.e. the camera coordinate frame = x is left and right, y is altitude and z is depth)
1564  * i: Image coordinate system (i.e. the image (plane) coordinate frame)
1565  * k: Kernel coordinate system (i.e. the kernel coordinate frame)
1566  *
1567  * Va: Vector coordinates, in the coordinate system a (i.e. a point in the coordinate system a)
1568  * Vb: Vector coordinates, in the coordinate system b (i.e. a point in the coordinate system b)
1569  * Vwned: Vector coordinates, in the world coordinate system (i.e. a point in the world coordinate system) -NED
1570  * Vwenu: Vector coordinates, in the world coordinate system two (i.e. a point in the world coordinate system two) - ENU
1571  * Vr: Vector coordinates, in the robot coordinate system (i.e. a point in the world coordinate system)
1572  * Vc: Vector coordinates, in the camera coordinate system (i.e. a point in the world coordinate system)
1573  * Vi: Vector coordinates, in the image coordinate system (i.e. a point in the image [plane] coordinates system)
1574  * Vk: Vector coordinates, in the kernel coordinate system (i.e. a point in the kernel coordinates system)
1575  *
1576  * R: Rotation matrix
1577  *
1578  * Rba: Rotation matrix of coordinate system a expressed in the coordinate system b
1579  * Rrw: Rotation matrix of world coordinate system expressed in the robot coordinate system
1580  * Rcr: Rotation matrix of robot coordinate system expressed in the camera coordinate system
1581  *
1582  * VRw: Vector coordinates of robot in the world coordinate system
1583  * VCr: Vector coordinates of camera in the robot coordinate system
1584  * VOa: Vector coordinates of the object (has its own object frame) in the the coordinate system a *
1585  */
1586 
1587 
1588 
1589 
1590 
1591 // New section: Init and periodic functions ----------------------------------------------------------------------------------------------------------------
1593 {
1594  //printf("Wedgebug init function was called\n");
1595 
1596 
1597  // Images creation process:
1598  // Creation of images
1599  // Creating structure to hold dimensions of normal camera images
1600  img_dims.w = WEDGEBUG_CAMERA_LEFT_WIDTH; img_dims.h = WEDGEBUG_CAMERA_LEFT_HEIGHT;
1601 
1602 
1603  image_create(&img_left, img_dims.w, img_dims.h, IMAGE_YUV422); // To store left camera image
1604  image_create(&img_right, img_dims.w, img_dims.h, IMAGE_YUV422);// To store right camera image
1605  image_create(&img_left_int8, img_dims.w, img_dims.h, IMAGE_GRAYSCALE); // To store gray scale version of left image
1606  image_create(&img_right_int8, img_dims.w, img_dims.h, IMAGE_GRAYSCALE); // To store gray scale version of left image
1607 
1608 
1609  // Creation of images - Cropped:
1610  // Calculating cropped image details (x, y, width and height)
1612 
1613  // Creating structure to hold dimensions of cropped camera images
1615 
1616  printf("img_cropped_info [w, h, x, y] = [%d, %d, %d, %d]\n", img_cropped_info.w, img_cropped_info.h, img_cropped_info.x,
1618 
1619  // Creating empty images - cropped - 8bit
1621  IMAGE_GRAYSCALE);// To store cropped depth - 8 bit
1623  IMAGE_GRAYSCALE);// To store cropped depth - 8 bit
1625  IMAGE_GRAYSCALE); // To store intermediate image data from processing - 8 bit
1627  IMAGE_GRAYSCALE); // To store edges image data from processing - 8 bit
1628 
1629  // Creating empty images - cropped - 16bit
1631  IMAGE_INT16);// To store cropped disparity - 16 bit
1633  IMAGE_INT16);// To store cropped depth - 16 bit
1635  IMAGE_INT16);// To store cropped middle image - 16 bit
1636 
1637 
1638 
1639 
1640 
1641  // Creation of kernels:
1642  // Creating structure to hold dimensions of kernels
1643  K_median_w = 43;
1644  K_median_h = 43;
1645 
1647  // Creating empty kernel:
1650 
1651  printf("median_kernel16bit [buf_size, h, w, type] = [%d, %d, %d, %d]\n", median_kernel16bit.buf_size,
1653 
1654 
1655 
1656 
1657  // Adding callback functions
1660 
1661 
1662  //Initialization of constant rotation matrices and transition vectors for frame to frame transformations
1663  // 1) Rotation matrix and transition vector to transform from world ENU frame to world NED frame
1664  Rwnedwenu.m[0] = 0; Rwnedwenu.m[1] = 1; Rwnedwenu.m[2] = 0;
1665  Rwnedwenu.m[3] = 1; Rwnedwenu.m[4] = 0; Rwnedwenu.m[5] = 0;
1666  Rwnedwenu.m[6] = 0; Rwnedwenu.m[7] = 0; Rwnedwenu.m[8] = -1;
1667  VNEDwenu.x = 0;
1668  VNEDwenu.y = 0;
1669  VNEDwenu.z = 0;
1670  // 2) Rotation matrix and transition vector to transform from robot frame to camera frame
1671  Rcr.m[0] = 0; Rcr.m[1] = 1; Rcr.m[2] = 0;
1672  Rcr.m[3] = 0; Rcr.m[4] = 0; Rcr.m[5] = 1;
1673  Rcr.m[6] = 1; Rcr.m[7] = 0; Rcr.m[8] = 0;
1674  VCr.x = 0;// i.e the camera is x meters away from robot center
1675  VCr.y = 0;
1676  VCr.z = 0;
1677 
1678  // Initializing goal vector in the NED world coordinate system
1679  VGOALwenu.x = WaypointX(WP_GOAL);
1680  VGOALwenu.y = WaypointY(WP_GOAL);
1681  VGOALwenu.z = WaypointAlt(WP_GOAL);
1683 
1684 
1685  // Initializing start vector in the NED world coordinate system
1686  VSTARTwenu.x = WaypointX(WP_START);
1687  VSTARTwenu.y = WaypointY(WP_START);
1688  VSTARTwenu.z = WaypointAlt(WP_START);
1690 
1691 
1692  // Calculating principal points of normal image and cropped image
1693  c_img.y = img_dims.h / 2;
1694  c_img.x = img_dims.w / 2;
1696  &img_cropped_info); // Calculates principal points for cropped image, considering the original dimensions
1697 
1698 
1699  // Initializing structuring element sizes
1700  SE_opening_OCV = 13; // SE size for the opening operation
1701  SE_closing_OCV = 13; // SE size for the closing operation
1703  51;//25;//51;//301;// SE size for the first dilation operation (Decides where edges are detected, increase to increase drone safety zone NOTE. This functionality should be replaced with c space expansion)
1705  11; // SE size for the second dilation operation (see state 3 "WEDGEBUG_START" and state 6 "POSITION_EDGE")
1706  SE_sobel_OCV = 5; // SE size for the sobel operation, to detect edges
1707  SE_erosion_OCV =
1708  11; // SE size for the erosion operation (see state 3 "WEDGEBUG_START" and state 6 "POSITION_EDGE", its needed to "drag" the depth of the foreground objects over the edges detected)
1709 
1710  // Setting thresholds - non-calculated
1712  700;
1713  threshold_depth_of_edges = 770;
1715  5000; // Edges with a magnitude above this value are detected. Above this value, edges are given the value 127, otherwise they are given the value zero.
1716  threshold_distance_to_goal = 0.25; // Above this threshold (m), the goal is considered reached
1718  1.0;//Above this threshold (m), the goal is considered reached in DIRECT_CONTROL mode
1719  threshold_distance_to_angle = 0.0004; // Above this threshold (radians), the angle/heading is considered reached
1720 
1721  // Setting thresholds - calculated
1722  //threshold_median_disparity = depth_to_disp((threshold_median_depth / 100.00), b, f); //8// Above this median disparity, an obstacle is considered to block the way. >60 = close than 35cm
1723  //threshold_median_depth = (uint16_t) disp_to_depth(threshold_median_disparity, b, f) * 100;
1724  //threshold_disparity_of_edges = depth_to_disp((threshold_depth_of_edges / 100.00), b, f); //5// Above this underlying disparity value, edges are considers eligible for detection
1725 
1726  // Initializing confidence parameters
1727  obstacle_confidence = 0; // This is the confidence that an obstacle was spotted
1728  free_path_confidence = 0; // This is the confidence that no obstacle was spotted
1729  position_confidence = 0; // This is the confidence that the desired position was reached
1730  heading_confidence = 0; // This is the confidence that the desired heading is reached
1731  //edge_found_micro_confidence = 0; // This is the confidence that an edge was found
1732  edge_found_macro_confidence = 0; // This is the confidence that an edge was found
1733  no_edge_found_confidence = 0; // This is the confidence that no edge was found
1734  max_obstacle_confidence = 10; // This is the max confidence that an obstacle was spotted
1735  max_free_path_confidence = 5; // This is the max confidence that an obstacle was not spotted
1736  max_position_confidence = 30; // This is the max confidence that a specific position was reached
1737  max_heading_confidence = 5; // This is the max confidence that a specific heading was reached
1738  max_edge_found_micro_confidence = 1;//50 // This is the max confidence that edges were found
1740  1; // This is the max confidence that edges were found (can be higher if angular change is slower or speed of periodic function is increase)
1741  max_no_edge_found_confidence = 10; // This is the max confidence that no edges were found
1742 
1743  // Initializing boolean flags
1744  is_start_reached_flag = 0; // Set to 1 if start position is reached, 0 otherwise
1745  is_setpoint_reached_flag = 0; // Set to 1 if setpoint is reached, 0 otherwise
1746  is_obstacle_detected_flag = 0; // Set to 1 if obstacle is detected, 0 otherwise
1747  is_path_free_flag = 0; // Set to 1 if no obstacle is detected, 0 otherwise
1748  is_heading_reached_flag = 0; // Set to 1 if heading is reached, 0 otherwise
1749  is_edge_found_macro_flag = 0; // Set to 1 if best edge (according to macro confidence) was found, 0 otherwise
1750  is_edge_found_micro_flag = 0; // Set to 1 if best edge (according to micro confidence) was found, 0 otherwise
1751  is_no_edge_found_flag = 0; // Set to 1 if no edge was identified, 0 otherwise
1752  is_state_changed_flag = 0; // Set to 1 if state was changed, 0 otherwise
1754  initial_heading.is_left_reached_flag = 0; // The scan has not reached the left maximum angle yet
1755  initial_heading.is_right_reached_flag = 0;// The scan has not reached the right maximum angle yet
1756 
1759 
1760  save_images_flag = 1; // For report: Flag to indicate if images should be saved
1761 
1762 
1763 
1764  // Initializing area over which edges are searched in
1765 
1766  // p3DWedgeBug:
1767  edge_search_area.y = 0;
1769  edge_search_area.x = 0;
1771 
1772 // // p2DWedgeBug:
1773 // edge_search_area.y = 216/2;
1774 // edge_search_area.h = 1;
1775 // edge_search_area.x = 0;
1776 // edge_search_area.w = img_disparity_int8_cropped.w;
1777 
1778 
1779  // Initializing Edge scan structure
1780  initial_heading.initiated = 0; // 0 = it can be overwritten
1781 
1782 
1783 
1784  // Initializing debugging options
1785 
1786  allow_state_change_MOVE_TO_GOAL = 1; // Allows state change from within state "MOVE_TO_GOAL"
1787  allow_state_change_POSITION_GOAL = 1; // Allows state change from within state "POSITION_GOAL"
1788  allow_state_change_WEDGEBUG_START = 1; // Allows state change from within state "WEDGEBUG_START"
1789  allow_state_change_MOVE_TO_EDGE = 1; // Allows state change from within state "MOVE_TO_EDGE"
1790  allow_state_change_POSITION_EDGE = 1; // Allows state change from within state "POSITION_EDGE"
1791  allow_state_change_EDGE_SCAN = 1; // Allows state change from within state "EDGE_SCAN"
1792 
1793  // Other initializations
1794  previous_state = 0; // Variable for state machine memory
1795  previous_mode = 0;
1797  VSTARTwned.x; // Initializing a vector to hold the current position, which is needed for calculating the distance traveled
1801  0; // Clock to measure total time (clock cycles)) it took for the robot to fly from start to goal
1802  clock_total_time_current = 0; // Clock to hold time measured at start of the current cycle of the periodic function
1803  clock_total_time_previous = 0; // Clock to hold time measured at start of the previous cycle of the periodic function
1804  distance_traveled = 0; // Variable to hold the distance traveled of the robot (since start and up to the goal)
1805  number_of_states = NUMBER_OF_STATES; // Variable to save the total number of states used in the finite state machine
1807  99999; // Variable to hold distance from robot to edge to goal (used in EDGE_SCAN (9) state) - initialized with unreasonably high number
1809  clock_FSM = 0;
1810  heat_map_type = 2; // Heat map used when saving image
1811 
1812  /*
1813  * Heat maps:
1814  * 0 = COLORMAP_AUTUMN
1815  * 1 = COLORMAP_BONE
1816  * 2 = COLORMAP_JET
1817  * 3 = COLORMAP_WINTER
1818  * 4 = COLORMAP_RAINBOW
1819  * 5 = COLORMAP_OCEAN
1820  * 6 = COLORMAP_SUMMER
1821  * 7 = COLORMAP_SPRING
1822  * 8 = COLORMAP_COOL
1823  * 9 = COLORMAP_HSV
1824  * 10 = COLORMAP_PINK
1825  * 11 = COLORMAP_HOT
1826  */
1827 
1828  /*
1829  enum navigation_state {
1830  MOVE_TO_GOAL = 1,
1831  POSITION_GOAL = 2,
1832  WEDGEBUG_START = 3,
1833  MOVE_TO_EDGE = 4,
1834  POSITION_EDGE = 5,
1835  EDGE_SCAN = 6
1836 
1837  };*/
1838 
1839 }
1840 
1841 
1842 
1843 
1844 
1846 {
1847  // your periodic code here.
1848  // freq = 15.0 Hz
1849  //printf("Wedgebug periodic function was called\n");
1850 
1851 
1852  /*
1853  enum control_mode_state {
1854  DIRECT_CONTROL = 1,
1855  AUTONOMOUS_GUIDED = 2,
1856  AUTONOMOUS_NAV = 3
1857  };
1858  */
1859 
1860  // Setting control mode state (needed as switch-case statement cannot use OR statment so that
1861  // AP_MODE_ATTITUDE_DIRECT and AP_MODE_ATTITUDE_Z_HOLD lead to the same outcome. Hence I use my
1862  // own states here
1863  switch (autopilot_get_mode()) {
1866  case (AP_MODE_GUIDED): {current_mode = AUTONOMOUS_GUIDED;} break;
1867  case (AP_MODE_NAV): {current_mode = AUTONOMOUS_NAV;} break;
1868  default: {printf("Unsupported control mode");} break;
1869  }
1870 
1871  printf("threshold_median_depth (m) = %f\n", threshold_median_depth / 100.00);
1872 
1873  // Debugging - setting default state
1874  //set_state(MOVE_TO_GOAL ,1);
1875  //printf("Current control mode %d\n", current_mode);
1876  //printf("Current state %d\n", current_state);
1877 
1878 
1879 
1880  // Cycle-dependent initializations
1881  //Initialization of dynamic rotation matrices and transition vectors for frame to frame transformations
1882  // Rotation matrix and transition vector to transform from world ned frame to robot frame
1887 
1888  // Initialization of robot heading
1890 
1891  // Initialization of robot location/orientation-dependent variables
1892  // Converting goal world NED coordinates into Camera coordinates
1894  Va_to_Vb(&VGOALc, &VGOALr, &Rcr, &VCr);
1895 
1896 
1897  // Checking is state was changed, if yes then all flags are reset and the is_mode_changed_flag
1898  // is set to 1 for this cycle only. Else, the is_mode_changed_flag is set to 0 again;
1899 
1900  if (current_mode != previous_mode) {
1901  // Setting flag signifying that the state was changed
1903 
1904  } else if (is_mode_changed_flag == 1) {
1906  }
1907 
1908  if (is_mode_changed_flag == 1) {printf("Mode was changed!!!!!!!!!!!!!!!!!!!1\n");}
1909  printf("is_mode_changed_flag = %d\n", is_mode_changed_flag);
1910 
1912 
1913 
1914 
1915 
1916  /*
1917  enum control_mode_state {
1918  DIRECT_CONTROL = 1,
1919  AUTONOMOUS_GUIDED = 2,
1920  AUTONOMOUS_NAV = 3
1921  };
1922  */
1923 
1924  /*
1925  enum navigation_state {
1926  MOVE_TO_GOAL = 1,
1927  POSITION_GOAL = 2,
1928  WEDGEBUG_START = 3,
1929  MOVE_TO_EDGE = 4,
1930  POSITION_EDGE = 5,
1931  EDGE_SCAN = 6
1932 
1933  };*/
1934  // Finite state machine - Only runs in guided mode
1935  //if ((autopilot_get_mode() == AP_MODE_GUIDED)) // If AP_MODE_GUIDED block - Start
1936 
1937  // Switch-case statement for current_mode
1938  switch (current_mode) { // Control mode state machine - Start
1939  // State 1 ################################################################################################################################################################### State 1: DIRECT_CONTROL
1940  case DIRECT_CONTROL: {
1941  printf("DIRECT_CONTROL = %d\n", DIRECT_CONTROL);
1942 
1943  // If the mode was just started, then we initialize the FSM with MOVE_TO_GOAL (1)
1944  if (is_mode_changed_flag) {
1945  set_state(MOVE_TO_GOAL, 1);
1946  }
1947 
1948  // We do not care about the height of the drone when measuring distance to goal in the DIRECT_CONTROL mode
1949  // So we create this pseudo 2d vector where the z coordinates are the same as the goal. This vector is used
1950  // to check if robot is close to goal
1951  struct FloatVect3 VR2dwned = {.x = VRwned.x, .y = VRwned.y, .z = VGOALwned.z};
1952 
1953 
1954  // Background processes - Includes image processing for use
1955  // Turn on for debugging
1956  //background_processes(save_images_flag);
1958 
1959 
1960  switch (current_state) { // Finite state machine - Start
1961  // State 1 --------------------------------------------------------------------------------------------------------------------------------------------------------------- State 1: MOVE_TO_GOAL
1962  case MOVE_TO_GOAL: {
1963  printf("MOVE_TO_GOAL = %d\n", MOVE_TO_GOAL);
1964  //median_disparity_in_front = median_disparity_to_point(&c_img_cropped, &img_disparity_int8_cropped, &median_kernel);
1965  //In case disparity is 0 (infinite distance or error we set it to one disparity
1966  // above the threshold as the likelihood that the object is too close is large (as opposed to it being infinitely far away)
1967  //printf("median_disparity_in_front = %d\n", median_disparity_in_front);
1968  //printf("median_depth_in_front = %f\n", disp_to_depth(median_disparity_in_front, b, f));
1969  //float depth = disp_to_depth(median_disparity_in_front, b, f); // median_depth_in_front / 100.00
1970 
1971 
1973  float depth = median_depth_in_front / 100.00;
1974  printf("median_depth_in_front = %f\n", depth);
1975  printf("depth to goal = %f\n", VGOALc.z);
1976 
1977 
1978  if ((median_depth_in_front < threshold_median_depth) && (depth < VGOALc.z)) {
1979  printf("Obstacle is in front of goal\n");
1980  } else {
1981  printf("The path to the goal is free!\n");
1982  }
1983 
1984 
1988  &VGOALc,//target_point,
1994 
1995 
1996 // // c. Checking if edges are located
1997 // // Running function to detect and save edge
1998 // is_edge_found_micro_flag =
1999 // find_best_edge_coordinates(
2000 // &VEDGECOORDINATESc,
2001 // &VGOALc,//target_point,
2002 // &img_edges_int8_cropped,
2003 // &img_middle_int8_cropped,
2004 // &edge_search_area,
2005 // threshold_disparity_of_edges,
2006 // max_edge_found_micro_confidence);
2007 //
2008 
2009 
2010 
2011  if (save_images_flag) {save_image_gray(&img_edges_int8_cropped, "/home/dureade/Documents/paparazzi_images/img_edges_int8_cropped_marked.bmp");}
2012 
2013 
2015  printf("Goal is reached\n");
2017  } else {
2018  // If this is the first cycle of this mode, then
2019  if (is_mode_changed_flag) {
2020  clock_total_time_current = clock();;
2021  }
2022 
2023  // ############ Metric 2 - Distance traveled (total)
2028 
2029  // If the Goal is reached, set is_setpoint_reached_flag to 1 and record time
2033 
2034  }
2035  }
2036  } break;
2037 
2038 
2039  // State 2 --------------------------------------------------------------------------------------------------------------------------------------------------------------- State 2: POSITION_GOAL
2040  case POSITION_GOAL: {
2041  printf("POSITION_GOAL = %d\n", POSITION_GOAL);
2042  printf("Total time to reach goal = %f\n", ((double)clock_total_time) / CLOCKS_PER_SEC);
2043  printf("Total distance_traveled = %f\n", distance_traveled);
2044 
2045  } break;
2046 
2047 
2048  // State 0 --------------------------------------------------------------------------------------------------------------------------------------------------------------- State 0
2049  default: {
2050  printf("default = %d\n", 0);
2051  }
2052  } // Finite state machine - End
2053 
2054 
2055  printf("Time elapsed since start = %f\n", (((double)clock()) - ((double)clock_total_time_current)) / CLOCKS_PER_SEC);
2056  printf("distance_traveled = %f\n", distance_traveled);
2057 
2058  } break; // DIRECT_CONTROL - End
2059 
2060 
2061  // State 2 ################################################################################################################################################################### State 2: AUTONOMOUS_GUIDED
2062  case AUTONOMOUS_GUIDED: {
2063  printf("AUTONOMOUS_GUIDED = %d\n", AUTONOMOUS_GUIDED);
2064 
2065  // Checking is state was changed, if yes then all flags are reset and the is_state_changed_flag
2066  // is set to 1 for this cycle only. Else, the is_state_changed_flag is set to 0 again;
2067  if (current_state != previous_state) {
2068  // Setting flag signifying that the state was changed
2070  // Reset flags
2071  is_start_reached_flag = 0; // Set to 1 if start position is reached, 0 otherwise
2072  is_setpoint_reached_flag = 0; // Set to 1 if setpoint is reached, 0 otherwise
2073  is_obstacle_detected_flag = 0; // Set to 1 if obstacle is detected, 0 otherwise
2074  is_path_free_flag = 0; // Set to 1 if no obstacle is detected, 0 otherwise
2075  is_heading_reached_flag = 0; // Set to 1 if heading is reached, 0 otherwise
2076  is_edge_found_macro_flag = 0; // Set to 1 if best edge (according to macro confidence) was found, 0 otherwise
2077  is_edge_found_micro_flag = 0; // Set to 1 if best edge (according to micro confidence) was found, 0 otherwise
2078  is_no_edge_found_flag = 0; // Set to 1 if no edge was identified, 0 otherwise
2079  initial_heading.initiated = 0; // 0 = it can be overwritten
2080  initial_heading.is_left_reached_flag = 0;// The scan has not reached the left maximum angle yet
2081  initial_heading.is_right_reached_flag = 0;// The scan has not reached the right maximum angle yet
2082  distance_robot_edge_goal = 99999; //
2083  } else if (is_state_changed_flag != 0) {
2085  }
2086 
2087  // Background processes only happens if the current state is not POSITION_GOAL
2088  if (current_state != POSITION_GOAL) {
2089  // ############ Metric 1 - Recording current time
2090  clock_total_time_current = clock();
2091  //printf("clock_total_time_current = %f\n", (double)clock_total_time_current);
2092  // In case we are in the position start state and the state has changed, initialize clock_total_time_previous
2094  printf("Metric 1 was started\n");
2095  //printf("clock_total_time_current set = %f\n", (double)clock_total_time_current);
2097  }
2098  //Else check time difference to previous cycle and add to clock_total_time
2099  else {
2102  //printf("clock_total_time_previous = %f\n", (double)clock_total_time_previous);
2103  }
2104 
2105  // ############ Metric 2 - Distance traveled (total)
2107 
2111 
2112  // ############ Metric 3 - Runtime average of background processes (see below) - Start:
2113  clock_t time; // Creating variable to hold time (number of cycles)
2114  time = clock(); // Saving current time, way below it is used to calculate time spent in a cycle
2115  counter_cycles++; // Counting how many times a state was activated (needed for average calculation)
2116 
2117  // Background processes - Includes image processing for use
2118  //background_processes(save_images_flag);
2120 
2121  // ############ Metric 3 - Runtime average of background processes (see below) - End:
2123 
2124  // ############ Metric 4 - Runtime average per state - Start:
2125  clock_FSM = clock(); // Saving current time, way below it is used to calculate time spent in a cycle
2126  counter_state[current_state]++; // Counting how many times a state was activated (needed for average calculation). This is done here as state may change in FSM
2127  }
2128 
2129  // Initializing previous_state variable for next cycle
2130  // This happens here and not above as the metric above depends on the previous state
2132 
2133  switch (current_state) { // Finite state machine - Start
2134  // State 1 --------------------------------------------------------------------------------------------------------------------------------------------------------------- State 1: MOVE_TO_GOAL
2135  case MOVE_TO_GOAL: {
2136  printf("MOVE_TO_GOAL = %d\n", MOVE_TO_GOAL);
2137  // 1. Moving robot towards the goal
2138  // Checking if goal is reached, if not continue to move
2140  printf("Goal is reached\n");
2142 
2143  }
2144  // Else, move to goal and check confidence that goal has been reached.
2145  else {
2146  // Sets setpoint to goal position and orientates drones towards the goal as well
2148 
2149 
2150  // If start appears to be reached increase confidence
2154  }
2155 
2156  // If the position_confidence is high enough, set is_start_reached_flag to 1 and reset position_confidence
2159  position_confidence = 0;
2160  }
2161  }
2162 
2163 
2164  // 2. Check if obstacle is in way, when one is detected change state (dependent on negative flag from 1)
2165  // If the setpoint has not been reached, keep looking for obstacles
2166  if (!is_setpoint_reached_flag) {
2167  // Checking if obstacle is in way, if not continue checking for it
2169  printf("Object detected!!!!!!!!\n");
2170  // 1. Seting setpoint to current location
2173  }
2174  // Else, check confidence that obstacle is there
2175  else { // This happens continuously, as long as the state is active and the goal has not been reached. It stops when the flag has been set below.
2176  // Calculate median depth in front
2178  float depth = median_depth_in_front / 100.00;
2179  printf("median_depth_in_front = %f\n", depth);
2180  printf("depth to goal = %f\n", VGOALc.z);
2181 
2182 
2183  // If obstacle appears to be detected AND its in front of goal point, increase obstacle_confidence
2185  && (depth < VGOALc.z)) { // NOTE. The first logical statement is in centimeters and the second in meters
2186  printf("Increasing confidence\n");
2189  printf("obstacle_confidence = %d\n", obstacle_confidence);
2190  printf("max_obstacle_confidence = %d\n", max_obstacle_confidence);
2191  } else {
2192  obstacle_confidence = 0;
2193  }
2194  // If the obstacle_confidence is high enough, set is_obstacle_detected_flag to 1 and reset obstacle_confidence
2197  obstacle_confidence = 0;
2198  // Setting holding point so that when state changes to wedgebug the drone stays at one spot, when looking for edges
2202  }
2203  }
2204  }
2205 
2206  printf("position_confidence = %d\n", position_confidence);
2207  printf("obstacle_confidence = %d\n", obstacle_confidence);
2208  } break;
2209 
2210 
2211  // State 2 --------------------------------------------------------------------------------------------------------------------------------------------------------------- State 2: POSITION_GOAL
2212  case POSITION_GOAL: {
2213  printf("POSITION_GOAL = %d\n", POSITION_GOAL);
2214  // Since the drone is at the goal we will swithc bach to the NAV mode
2215  //autopilot_mode_auto2 = AP_MODE_NAV;
2216  //autopilot_static_set_mode(AP_MODE_NAV);
2217 
2218  printf("Total time to reach goal = %f\n", ((double)clock_total_time) / CLOCKS_PER_SEC);
2219  printf("Total distance_traveled = %f\n", distance_traveled);
2220  printf("Average runtime of background processes = %f\n",
2221  (clock_background_processes / CLOCKS_PER_SEC / counter_cycles));
2222 
2223  for (uint8_t i = 0; i < number_of_states; i++) {
2224  printf("Average runtime of state %d = %f\n", i, (time_state[i] / CLOCKS_PER_SEC / counter_state[i]));
2225  }
2226  } break;
2227 
2228 
2229  // State 3 --------------------------------------------------------------------------------------------------------------------------------------------------------------- State 3: WEDGEBUG_START
2230  case WEDGEBUG_START: {
2231  printf("WEDGEBUG_START = %d\n", WEDGEBUG_START);
2232  /* Details:
2233  * 1) The robot checks if holding point was reached, at which it first detected that an obstacle was it its way
2234  * 2) If that holding point is reached, the robot then scans for any edges.
2235  *
2236  */
2237 
2238  // 1. Move robot towards holding point
2239  // Checking if holding point is reached, if not continue to move
2241  printf("Holding point is reached\n");
2242  }
2243  // Else, move to holding point and check confidence that holding point has been reached.
2244  else { // This happens continuously, as long as the state is active. It stops when the flag has been set below.
2245 
2246  // Sets setpoint to goal position and orientates drones towards the goal as well
2248  heading_towards_waypoint(WP_GOAL));
2249 
2250  // If holding point appears to be reached increase confidence
2254  }
2255 
2256  // If the position_confidence is high enough, set is_setpoint_reached_flag to 1 and reset position_confidence
2259  position_confidence = 0;
2260  }
2261  }
2262 
2263 
2264  // 2. Scan for edges and see if any exist or not, then change state (dependent on flag from 1).
2266  // a. Stopping drone movement (a second time, just to make sure)
2268 
2269  // b. Checking if edges are located
2270  // Running function to detect and save edge
2274  &VGOALc,//target_point,
2280 
2281 
2282  //printf("was_edge_found?: %d\n", was_edge_found);
2283 
2284 
2285  // d. In this if-else statement we check if an edge was detected. If no edge is detected (by find_best_edge_coordinates) no_edge_found_confidence is
2286  // increased, whereas if an edge is found the edge_found_macro_confidence is increased. If one of the confidences reaches the
2287  // maximum the respective flag is turned on.
2288  // If obstacle is in way
2291  }
2292  // Else if no edge is detected
2293  else if (is_no_edge_found_flag) {
2294  printf("Edge not found!!!!!!!!!!!!!!!!!\n");
2296  }
2297  // Else, check if micro edge has been detected (by find_best_edge_coordinates) or not and increase respective confidences.
2298  // Also, check if macro confidence of having detected an edge is reached or whether maximum confidence of having no edges
2299  // detected is reached. If any are reached, respective flags are set
2300  else {
2301  // This if statement increase the confidence
2302  // If edge is detected in find_best_edge_coordinates, increase edge_found_macro_confidence
2306  }
2307  // If no edge is detected in find_best_edge_coordinates, increase no_edge_found_confidence
2308  else {
2311  }
2312 
2313  // If the edge_found_macro_confidence is high enough, set is_edge_found_macro_flag to 1 and reset edge_found_macro_confidence and no_edge_found_confidence
2318 
2319  // Since we are sure that a suitable edge has been found, the camera coordinates are transformed to
2320  // world coordinates and saved in a global variable, for used in subsequent state to make drone move
2321  // towards this point
2325  //printf("Optimal edge found\n");
2326  //printf("Camera coordinates of edge: [%f, %f, %f]\n", VEDGECOORDINATESc.x, VEDGECOORDINATESc.y, VEDGECOORDINATESc.z);
2327  // Saving best edge in global variable. // Note. VPBESTEDGECOORDINATESwned is used in the MOVE_TO_EDGE (7) and POSITION_EDGE (8) states
2331 
2332  // Making snapshot of image with edge coordinates highlighted. Comment out if not needed
2333  if (save_images_flag) {save_image_gray(&img_edges_int8_cropped, "/home/dureade/Documents/paparazzi_images/img_edges_int8_cropped_marked.bmp");}
2334 
2335  }
2336  // If the no_edge_found_macro_confidence is high enough, set is_no_edge_found_macro_flag to 1 and reset edge_found_macro_confidence and no_edge_found_confidence
2341  }
2342  }
2343  printf("Edge found confidence = %d\n", edge_found_macro_confidence);
2344  printf("No Edge found confidence = %d\n", no_edge_found_confidence);
2345  }
2346  } break;
2347 
2348 
2349  // State 4 --------------------------------------------------------------------------------------------------------------------------------------------------------------- State 4: MOVE_TO_EDGE
2350  case MOVE_TO_EDGE: {
2351  printf("MOVE_TO_EDGE = %d\n", MOVE_TO_EDGE);
2352 
2353  //printf("Camera coordinates of edge setpoint = [%f, %f, %f]\n", VEDGECOORDINATESc.x, VEDGECOORDINATESc.y, VEDGECOORDINATESc.z);
2354  //printf("Robot coordinates of edge setpoint = [%f, %f, %f]\n", VEDGECOORDINATESr.x, VEDGECOORDINATESr.y, VEDGECOORDINATESr.z);
2355  printf("World NED coordinates of edge setpoint = [%f, %f, %f]\n", VPBESTEDGECOORDINATESwned.x,
2357  //printf("World ENU coordinates of edge setpoint = [%f, %f, %f]\n", VEDGECOORDINATESwenu.x, VEDGECOORDINATESwenu.y, VEDGECOORDINATESwenu.z);
2358 
2359 
2360  // 1. Orientate robot towards object edge detected
2361  // If correct angle is reached, nothing happens
2363  printf("Heading is reached\n");
2364  }
2365  // Else, adjust heading and check confidence that heading is reached
2366  else { // This happens continuously, as long as the state is active. It stops when the flag has been set below
2367  // Set desired heading
2369  &VPBESTEDGECOORDINATESwned));// heading_towards_waypoint(WP_GOAL));
2370 
2371  // If heading appears to be reached increase confidence
2376  }
2377 
2378  // If the heading_confidence is high enough, set is_heading_reached_flag to 1 and reset heading_confidence
2381  heading_confidence = 0;
2382  }
2383  }
2384 
2385 
2386  // 2.Move robot to edge detected (dependent on flag from 1)
2387  // If the robot faces the right direction, go to the object edge coordinates (NEW) detected
2389  // If edge setpoint is reached, stay at it
2391  printf("Edge is reached\n");
2393  heading_towards_waypoint(WP_GOAL));
2394  }
2395  // Else, move to edge setpoint and check confidence that edge setpoint is reached
2396  else { // This happens continuously, as long as the state is active. It stops when the flag has been set below.
2397 
2398  // Sets setpoint to edge position and orientates robot towards it as well
2401 
2402  // If edge appears to be reached increase confidence
2404  threshold_distance_to_goal)) { // && !is_setpoint_reached_flag)
2407  }
2408 
2409  // If the position_confidence is high enough, set is_setpoint_reached_flag to 1 and reset position_confidence
2412  position_confidence = 0;
2413  }
2414  }
2415  }
2416 
2417 
2418 
2419  // 3. Set next state (dependent on 1 and 2)
2420  // If the robot faces the correct angle AND the edge is then reached, the state is changed to "MOVE_TO_GOAL"
2422  printf("Position and Heading are reached\n");
2424 
2425  }
2426  printf("position_confidence = %d\n", position_confidence);
2427  printf("heading_confidence = %d\n", heading_confidence);
2428  } break;
2429 
2430 
2431  // State 5 --------------------------------------------------------------------------------------------------------------------------------------------------------------- State 5: POSITION_EDGE
2432  case POSITION_EDGE: {
2433  printf("POSITION_EDGE = %d\n", POSITION_EDGE);
2434 
2435  // This ensures that the robot stay on the edge at all times
2438 
2439  // 1. Orientates robot towards the final goal
2440  // If robot faces goal, robot stays on current edge
2442  printf("Heading is reached\n");
2444  }
2445  // Else, adjust heading and check confidence that heading is reached
2446  else { // This happens continuously, as long as the state is active. It stops when the flag has been set.
2447  // Set desired heading
2449 
2450  // If heading appears to be reached increase confidence
2454  }
2455 
2456  // If the heading_confidence is high enough, set is_heading_reached_flag to 1 and reset heading_confidence
2459  heading_confidence = 0;
2460  }
2461  }
2462 
2463 
2464  // 2. Check if object is blocking the path or not and set next state (dependent on flag from 1)
2465  // If the robot faces the right direction, check if obstacle exists or not and change state
2467  // In this if-else statement we check if an obstacle is blocking the path or not. If no obstacle is blocking the path the free_path_confidence is
2468  // increased, whereas if an obstacle is blocking the path the obstacle_confidence is increased. If one of the confidences reaches the
2469  // maximum the respective flag is turned on. These statements only execute if the robot faces the goal (see above)
2470  // If obstacle is in way
2472  printf("Object detected!!!!!!!!\n");
2473  // Setting new holding point for WEDGEBUG_START state
2478  }
2479  // Else if the path is clear
2480  else if (is_path_free_flag) {
2481  printf("Path is free\n");
2483  }
2484  // Else, check for obstacles and free path and check confidence that an obstacle is there or that the path is free
2485  else {
2486  // Calculate median depth in front
2488  &median_kernel16bit); // Median depth in centimeters (cm)
2489  printf("Median_depth_in_front (m) = %f\n", (median_depth_in_front / 100.00)); // Median depth in meters (m)
2490  printf("Depth to goal = %f\n", VGOALc.z); // Depth in meters (m)
2491 
2492  // This if-else statement increase the confidence
2493  // If obstacle appears to be detected AND its in front of goal point, increase obstacle_confidence
2495  && ((median_depth_in_front / 100.00) <
2496  VGOALc.z)) { // NOTE. The first logical statement is in centimeters and the second in meters
2500 
2501  }
2502  // If obstacle appears not detected, increase free_path_confidence
2503  else {
2506  obstacle_confidence = 0;
2507  }
2508  // If the obstacle_confidence is high enough, set is_obstacle_detected_flag to 1 and reset obstacle_confidence and free_path_confidence
2511  obstacle_confidence = 0;
2513  }
2514  // If the free_path_confidence is high enough, set is_path_free_flag to 1 and reset free_path_confidence and obstacle_confidence
2516  is_path_free_flag = 1;
2518  obstacle_confidence = 0;
2519  }
2520  }
2521 
2522  }
2523  printf("Obstacle confidence = %d\n", obstacle_confidence);
2524  printf("Free path confidence = %d\n", free_path_confidence);
2525  } break;
2526 
2527 
2528  // State 6 --------------------------------------------------------------------------------------------------------------------------------------------------------------- State 6: EDGE_SCAN
2529  case EDGE_SCAN: {
2530  printf("EDGE_SCAN = %d\n", EDGE_SCAN);
2531 
2532  // 1. The robot is tasked to stay at the holding point for the entire duration of this state
2533  // Making drone hover, so that it does not drift from its current position
2535 
2536  // 2. Checking if edges are located
2537  // Running function to detect and save edge
2541  &VGOALc,//target_point,
2547 
2548 
2549  // 3. The identified best edge is compared to the previous identified best edge, and replace if the total distance (robot-edge-goal)
2550  // has decreased (dependent on flag from 2)
2551  // If edge was found
2553  // First we convert edge camera coordinates into world coordinates (ned and enu)
2554  //VEDGECOORDINATESc.z = VEDGECOORDINATESc.z - 0.25;// add this to make sure drone does not exactly stop on the edge point identified (it might crash otherwise)
2558 
2559  // Second, we increase confidence that an edge was spotted
2561 
2562  // Third, we calculate the distance of the edge currently spotted, if distance is smaller
2563  // than previous distance VPBESTEDGECOORDINATESwned is updated with the coordinates
2564  // Note. VPBESTEDGECOORDINATESwned is used in the MOVE_TO_EDGE (7) and POSITION_EDGE (8) states
2565  // Calculating Euclidean distance (N2) - Edge to goal
2566  float f_distance_edge_to_goal = float_vect3_norm_two_points(&VGOALwned, &VEDGECOORDINATESwned);
2567  // Calculating Euclidean distance (N2) - robot to edge
2568  float f_distance_robot_to_edge = float_vect3_norm_two_points(&VEDGECOORDINATESwned, &VRwned);
2569  float distance_total = f_distance_robot_to_edge + f_distance_edge_to_goal;
2570  printf("distance_total =%f\n", distance_total);
2571  if (distance_robot_edge_goal > distance_total) {
2572  distance_robot_edge_goal = distance_total;
2576  }
2577  }
2578 
2579  printf("Current minimum distance =%f\n", distance_robot_edge_goal);
2580  printf("is_edge_found_micro_flag = %d\n", is_edge_found_micro_flag);
2581 
2582 
2583  // 4. Initializing current heading parameters (if they have not been initialized before)
2584  if (initial_heading.initiated == 0) {
2587  2); // this way, when the the center of the drone is facing left, its fov does not exceed the maximum angle to the left
2590  2); // this way, when the the center of the drone is facing right, its fov does not exceed the maximum angle to the right
2593  }
2594 
2595 
2596  // 5. The drone turns right to find any edges
2597  // Code for looking left
2598  // If left heading has been reached (i.e. if the flag is activated)
2600  printf("Left heading is/was reached\n");
2601  }
2602  // Else, adjust angle to to face more left and check confidence that left heading has been reached
2603  else { // This happens continuously, as long as the state is active. It stops when the flag has been set below
2604  // Set heading to maximum left heading
2606 
2607  // If heading appears to be reached increase confidence
2611  }
2612  // If the heading_confidence is high enough, set initial_heading.is_left_reached_flag to 1 and reset heading_confidence
2615  heading_confidence = 0;
2616  }
2617  }
2618 
2619 
2620 
2621  // 6. The drone turns right to find any edges (dependent on flag from 5)
2622  // Code for looking right - Only runs if robot has previously looked left (initial_heading.heading_max_left = 1)
2624  // If right heading has been reached (i.e. if the flag is activated)
2626  printf("Right heading is/was reached\n");
2627  }
2628  // Else, adjust angle to to face more right and check confidence that right heading has been reached
2629  else {
2630  // Set heading to maximum left heading
2632 
2633  // If heading appears to be reached increase confidence
2637  }
2638  // If the heading_confidence is high enough, set initial_heading.is_right_reached_flag to 1 and resets heading_confidence
2641  heading_confidence = 0;
2642  }
2643  }
2644  }
2645 
2646 
2647  // 7. Check if confidence of having found a suitable edge is large enough or not and set next state (dependent on flags from 5 and 6)
2650  printf("Edge has been found\n");
2652  } else if (is_no_edge_found_flag) {
2653  printf("Minimum has been encountered\n");
2654  } else {
2655 
2656  // If edge_found_macro_confidence is >= max_edge_found_macro_confidence then set is_edge_found_macro_flag
2657  // and reset edge_found_macro_confidence
2661  }
2662  // Else, set is_no_edge_found_flag and reset edge_found_macro_confidence
2663  else {
2666  }
2667 
2668  }
2669  }
2670 
2671  printf("heading_confidence = %d\n", heading_confidence);
2672  printf("edge_found_confidence = %d\n", edge_found_macro_confidence);
2673  } break;
2674 
2675  // State 0 --------------------------------------------------------------------------------------------------------------------------------------------------------------- State 0
2676  default: {
2677  printf("default = %d\n", 0);
2678  }
2679  break;
2680 
2681  } // Finite state machine - End
2682 
2683 
2684 
2685  // ############ Metric 4 - Runtime average per state - End:
2686  clock_FSM = clock() - clock_FSM; // Calculating time it took for the FSM to finish running
2687  time_state[current_state] += ((double)clock_FSM); // Adding calculated time to total time of current state
2688 
2689  printf("Time elapsed since start = %f\n", ((double)clock_total_time) / CLOCKS_PER_SEC);
2690  printf("distance_traveled = %f\n", distance_traveled);
2691  } break; // AUTONOMOUS_GUIDED - End
2692 
2693 
2694  // State 3 ################################################################################################################################################################### State 3: AUTONOMOUS_NAV
2695  case AUTONOMOUS_NAV: {
2696  printf("AUTONOMOUS_NAV = %d\n", AUTONOMOUS_NAV);
2697  // Background processes - Includes image processing for use
2699 
2700 
2702  float depth = median_depth_in_front / 100.00;
2703  printf("median_depth_in_front = %f\n", depth);
2704  printf("depth to goal = %f\n", VGOALc.z);
2705 
2706 
2707  if ((median_depth_in_front < threshold_median_depth) && (depth < VGOALc.z)) {
2708  printf("Obstacle is in front of goal\n");
2709  } else {
2710  printf("The path to the goal is free!\n");
2711  }
2712 
2713 
2717  &VGOALc,//target_point,
2723 
2724 
2725 // // c. Checking if edges are located
2726 // // Running function to detect and save edge
2727 // is_edge_found_micro_flag =
2728 // find_best_edge_coordinates(
2729 // &VEDGECOORDINATESc,
2730 // &VGOALc,//target_point,
2731 // &img_edges_int8_cropped,
2732 // &img_middle_int8_cropped,
2733 // &edge_search_area,
2734 // threshold_disparity_of_edges,
2735 // max_edge_found_micro_confidence);
2736 //
2737 
2738 
2739 
2740  if (save_images_flag) {save_image_gray(&img_edges_int8_cropped, "/home/dureade/Documents/paparazzi_images/img_edges_int8_cropped_marked.bmp");}
2741 
2742 
2743 
2744  } break; // AUTONOMOUS_NAV - End
2745 
2746 
2747 
2748  // State 0 ################################################################################################################################################################### State 0
2749  default:
2750  {printf("Unsupported control mode");} break;
2751 
2752  } // Control mode state machine - End
2753 
2754 
2755 
2756 
2757  /*
2758  // printing flags
2759  printf("is_start_reached_flag = %d\n", is_start_reached_flag);
2760  printf("is_setpoint_reached_flag = %d\n", is_setpoint_reached_flag);
2761  printf("is_obstacle_detected_flag = %d\n", is_obstacle_detected_flag);
2762  printf("is_path_free_flag = %d\n", is_path_free_flag);
2763  printf("is_heading_reached_flag = %d\n", is_heading_reached_flag);
2764  printf("is_state_changed_flag = %d\n", is_state_changed_flag);
2765  printf("initial_heading.initiated = %d\n", initial_heading.initiated);
2766  printf("initial_heading.is_left_reached_flag = %d\n", initial_heading.is_left_reached_flag);
2767  printf("initial_heading.is_right_reached_flag = %d\n", initial_heading.is_right_reached_flag);
2768 
2769  */
2770  printf("\n\n");
2771 
2772 
2773 
2774  save_image_gray(&img_left_int8, "/home/dureade/Documents/paparazzi_images/img_left_int8.bmp");
2775  save_image_gray(&img_right_int8, "/home/dureade/Documents/paparazzi_images/img_right_int8.bmp");
2776  save_image_HM(&img_disparity_int8_cropped, "/home/dureade/Documents/paparazzi_images/img_disparity_int8_cropped.bmp",
2777  heat_map_type);
2778  //save_image_gray(&img_left_int8_cropped, "/home/dureade/Documents/paparazzi_images/img_left_int8_cropped.bmp");
2779  save_image_HM(&img_middle_int8_cropped, "/home/dureade/Documents/paparazzi_images/img_intermediate_int8_cropped.bmp",
2780  heat_map_type);
2781  save_image_gray(&img_edges_int8_cropped, "/home/dureade/Documents/paparazzi_images/img_edges_int8_cropped.bmp");
2782 
2783 
2784 
2785  /*
2786  // Size of variables
2787  printf("img_left = %d\n", img_left.buf_size);
2788  printf("img_right = %d\n", img_right.buf_size);
2789  printf("img_left_int8 = %d\n", img_left_int8.buf_size);
2790  printf("img_left_int8_cropped = %d\n", img_left_int8_cropped.buf_size);
2791  printf("img_right_int8 = %d\n", img_right_int8.buf_size);
2792  printf("img_disparity_int8_cropped = %d\n", img_disparity_int8_cropped.buf_size);
2793  printf("img_edges_int8_cropped = %d\n", img_edges_int8_cropped.buf_size);
2794  printf("median_kernel = %d\n", median_kernel.buf_size);
2795  printf("SE_opening_OCV = %lu\n", sizeof(SE_opening_OCV));
2796  printf("VSTARTwenu.x = %lu\n", sizeof(VSTARTwenu.x));
2797  printf("current_state = %lu\n", sizeof(current_state));
2798  printf("current_mode = %lu\n", sizeof(current_mode));
2799  */
2800 
2801 
2802 
2803 
2804 
2805 
2806 
2807 
2808  /*
2809  * Heat maps:
2810  * 0 = COLORMAP_AUTUMN
2811  * 1 = COLORMAP_BONE
2812  * 2 = COLORMAP_JET
2813  * 3 = COLORMAP_WINTER
2814  * 4 = COLORMAP_RAINBOW
2815  * 5 = COLORMAP_OCEAN
2816  * 6 = COLORMAP_SUMMER
2817  * 7 = COLORMAP_SPRING
2818  * 8 = COLORMAP_COOL
2819  * 9 = COLORMAP_HSV
2820  * 10 = COLORMAP_PINK
2821  * 11 = COLORMAP_HOT
2822  */
2823 
2824 
2825 }
2826 
2827 
clock_t clock_total_time
Definition: wedgebug.c:246
bool guidance_v_set_guided_z(float z)
Set z setpoint in GUIDED mode.
Definition: guidance_v.c:551
#define FLOAT_ANGLE_NORMALIZE(_a)
uint16_t getMedian16bit(uint16_t *a, uint32_t n)
Definition: wedgebug.c:508
unsigned short uint16_t
Definition: types.h:16
uint32_t buf_size
Size of values of weight buffer and values buffer.
Definition: wedgebug.h:46
struct FloatVect3 VGOALwenu
Declared vector of coordinates of start position in NED world coordinate system.
Definition: wedgebug.c:121
int closing_OCV(struct image_t *img_input, const struct image_t *img_output, const int SE_size, const int iteration)
float float_vect3_norm_two_points(struct FloatVect3 *V1, struct FloatVect3 *V2)
Definition: wedgebug.c:978
struct image_t img_left_int8_cropped
Image obtained from left camera, converted into 8bit gray image.
Definition: wedgebug.c:77
Definition: image.h:92
uint32_t buf_size
The buffer size.
Definition: image.h:53
#define WaypointAlt(_wp)
waypoint altitude in m above MSL
Definition: common_nav.h:48
float y
in meters
float disp_to_depth(const uint8_t d, const float b, const uint16_t f)
Definition: wedgebug.c:699
image_type
Definition: image.h:35
struct image_t img_depth_int16_cropped
Image obtained after simple block matching.
Definition: wedgebug.c:80
struct img_size_t img_cropped_dims
Dimensions of images captured by the camera (left and right have same dimension)
Definition: wedgebug.c:96
int SE_closing_OCV
SE size for the opening operation.
Definition: wedgebug.c:107
struct FloatVect3 VEDGECOORDINATESr
Declared vector of coordinates of "best" edge detected in camera coordinate system.
Definition: wedgebug.c:127
int save_image_gray(struct image_t *img, char *myString)
void UYVYs_interlacing_V(struct image_t *YY, struct image_t *left, struct image_t *right)
Definition: wedgebug.c:563
uint16_t h
The height.
Definition: image.h:88
float heading
Definition: wedgebug.c:258
uint16_t h
height of the cropped area
Definition: image.h:96
static struct FloatEulers * stateGetNedToBodyEulers_f(void)
Get vehicle body attitude euler angles (float).
Definition: state.h:1143
int16_t free_path_confidence
This is the confidence that an obstacle was spotted.
Definition: wedgebug.c:154
uint8_t is_left_reached_flag
Definition: wedgebug.c:236
int heat_map_type
Variable to hold distance from robot to edge to goal (used in EDGE_SCAN (9) state) ...
Definition: wedgebug.c:266
void set_state(uint8_t state, uint8_t change_allowed)
Definition: wedgebug.c:428
struct img_size_t kernel_median_dims
Dimension of image after it has been cropped to remove pixel error due to block matching limitations...
Definition: wedgebug.c:98
int block_size_disparities
Number of disparity levels (0-this number)
Definition: wedgebug.c:256
void UYVYs_interlacing_H(struct image_t *merged, struct image_t *left, struct image_t *right)
Definition: wedgebug.c:602
double time_state[NUMBER_OF_STATES]
Definition: wedgebug.c:243
struct FloatVect3 VRwned
Definition: wedgebug.c:196
uint8_t threshold_median_disparity
Edges with a magnitude above this value are detected. Above this value, edges are given the value 127...
Definition: wedgebug.c:143
enum image_type type
Definition: wedgebug.h:43
#define VECT3_DIFF(_c, _a, _b)
Definition: pprz_algebra.h:182
#define WEDGEBUG_CAMERA_LEFT_FPS
Definition: wedgebug.c:65
uint8_t is_total_timer_on_flag
Definition: wedgebug.c:279
float heading_initial
Definition: wedgebug.c:232
uint16_t K_median_h
Width of kernel for the median kernel.
Definition: wedgebug.c:113
enum navigation_state current_state
Definition: wedgebug.c:217
void image_create(struct image_t *img, uint16_t width, uint16_t height, enum image_type type)
Create a new image.
Definition: image.c:43
Definition: image.h:44
uint16_t h
Kernel height.
Definition: wedgebug.h:45
bool guidance_h_set_guided_pos(float x, float y)
Set horizontal position setpoint in GUIDED mode.
Definition: guidance_h.c:709
uint16_t w
Definition: wedgebug.h:44
float psi
in radians
struct FloatRMat Rwnedwenu
This structure holds information about the window in which edges are searched in. ...
Definition: wedgebug.c:192
double counter_state[NUMBER_OF_STATES]
Definition: wedgebug.c:244
#define VECT2_DIFF(_c, _a, _b)
Definition: pprz_algebra.h:92
Autopilot guided mode interface.
void image_copy(struct image_t *input, struct image_t *output)
Copy an image from inut to output This will only work if the formats are the same.
Definition: image.c:85
int16_t max_edge_found_micro_confidence
This is the max confidence that a specific heading was reached.
Definition: wedgebug.c:165
int16_t max_heading_confidence
This is the max confidence that a specific position was reached.
Definition: wedgebug.c:164
void Vi_to_Vc(struct FloatVect3 *scene_point, int32_t image_point_y, int32_t image_point_x, const uint8_t d, const float b, const uint16_t f)
Definition: wedgebug.c:730
int16_t max_no_edge_found_confidence
This is the max confidence that edges (macro-see above were found.
Definition: wedgebug.c:167
struct image_t img_right
Image obtained from left camera (UYVY format)
Definition: wedgebug.c:75
uint16_t y
Start position y (vertical)
Definition: image.h:94
int N_disparities
Variable that saves previous mode to control the drone, for some memory.
Definition: wedgebug.c:255
uint8_t find_best_edge_coordinates(struct FloatVect3 *VEDGECOORDINATESc, struct FloatVect3 *VTARGETc, struct image_t *img_edges, struct image_t *img_disparity, struct crop_t *edge_search_area, uint8_t threshold, int16_t max_confidence)
Definition: wedgebug.c:1127
uint16_t x
Start position x (horizontal)
Definition: image.h:93
int16_t obstacle_confidence
Below this depth (m) edges are eligible for the WedgeBug algorithm (i.e. edges cannot be very far awa...
Definition: wedgebug.c:153
uint32_t x
The x coordinate of the point.
Definition: image.h:59
float threshold_distance_to_angle
Below this distance (in meters) it is considered that the robot has reached the goal.
Definition: wedgebug.c:146
uint8_t save_images_flag
Set to 1 if control mode of drone is changed, 0 otherwise.
Definition: wedgebug.c:180
int16_t max_free_path_confidence
This is the max confidence that an obstacle was spotted.
Definition: wedgebug.c:162
int16_t max_obstacle_confidence
This is the confidence that no edge was found.
Definition: wedgebug.c:161
static struct EnuCoor_f * stateGetPositionEnu_f(void)
Get position in local ENU coordinates (float).
Definition: state.h:719
void background_processes(uint8_t save_images_flag)
Definition: wedgebug.c:1435
int SE_opening_OCV
Definition: wedgebug.c:106
uint8_t is_edge_found_macro_flag
Set to 1 if heading is reached, 0 otherwise.
Definition: wedgebug.c:175
const char * get_img_type(enum image_type img_type)
Definition: wedgebug.c:365
struct FloatVect3 VGOALr
Declared vector of coordinates of goal in NED world coordinate system.
Definition: wedgebug.c:123
uint8_t is_state_changed_flag
Set to 1 if no edge was identified, 0 otherwise.
Definition: wedgebug.c:178
float heading_max_right
Definition: wedgebug.c:234
uint8_t is_right_reached_flag
Definition: wedgebug.c:237
void guidance_h_hover_enter(void)
Definition: guidance_h.c:542
clock_t clock_total_time_previous
Definition: wedgebug.c:248
float threshold_distance_to_goal_manual
Definition: wedgebug.c:280
struct video_listener * cv_add_to_device(struct video_config_t *device, cv_function func, uint16_t fps)
Definition: cv.c:46
uint8_t previous_mode
Variable that saves previous state the state machine was in, for some memory.
Definition: wedgebug.c:254
uint8_t is_path_free_flag
Set to 1 if obstacle is detected, 0 otherwise.
Definition: wedgebug.c:173
struct image_t img_left_int8
Image obtained from right camera (UYVY format)
Definition: wedgebug.c:76
struct crop_t edge_search_area
Principal point of cropped camera images.
Definition: wedgebug.c:187
float float_norm_two_angles(float target_angle, float current_angle)
Definition: wedgebug.c:1364
uint8_t is_heading_reached_flag
Set to 1 if no obstacle is detected, 0 otherwise.
Definition: wedgebug.c:174
float z
in meters
void kernel_free(struct kernel_C1 *kernel)
Definition: wedgebug.c:463
uint8_t is_edge_found_micro_flag
Set to 1 if best edge (according to macro confidence) was found, 0 otherwise.
Definition: wedgebug.c:176
static struct FloatRMat * stateGetNedToBodyRMat_f(void)
Get vehicle body attitude rotation matrix (float).
Definition: state.h:1137
uint32_t maximum_intensity(struct image_t *img)
Definition: wedgebug.c:641
uint16_t w
Width of the cropped area.
Definition: image.h:95
navigation_state
Camera focal length, in pixels (i.e. distance between camera.
Definition: wedgebug.c:209
static float float_vect3_norm(struct FloatVect3 *v)
uint8_t median_disparity_to_point(struct point_t *Vi, struct image_t *img, struct kernel_C1 *kernel_median)
Definition: wedgebug.c:1011
Paparazzi floating point math for geodetic calculations.
int SE_dilation_OCV_2
SE size for the first dilation operation.
Definition: wedgebug.c:109
void Vi_to_Vc16bit(struct FloatVect3 *scene_point, int32_t image_point_y, int32_t image_point_x, const uint16_t d, const float b, const uint16_t f)
Definition: wedgebug.c:782
uint8_t is_start_reached_flag
This is the max confidence that no edges were found.
Definition: wedgebug.c:170
int dilation_OCV(struct image_t *img_input, const struct image_t *img_output, const int SE_size, const int iteration)
int16_t edge_found_macro_confidence
This is the confidence that the desired heading is reached.
Definition: wedgebug.c:159
uint8_t allow_state_change_WEDGEBUG_START
Definition: wedgebug.c:273
struct img_size_t img_dims
Definition: wedgebug.c:95
void show_image_entry(struct image_t *img, int entry_position, const char *img_name)
Definition: wedgebug.c:389
Image helper functions like resizing, color filter, converters...
int16_t no_edge_found_confidence
This is the confidence that an edge was found - outside of the find_best_edge_coordinates function...
Definition: wedgebug.c:160
void float_rmat_vmult(struct FloatVect3 *vb, struct FloatRMat *m_a2b, struct FloatVect3 *va)
rotate 3D vector by rotation matrix.
struct point_t c_img_cropped
Principal point of normal camera images.
Definition: wedgebug.c:184
void float_rmat_transp_vmult(struct FloatVect3 *vb, struct FloatRMat *m_b2a, struct FloatVect3 *va)
rotate 3D vector by transposed rotation matrix.
struct FloatVect3 VHOLDINGPOINTwned
Declared a vector to hold the current position, which is needed for calculating the distance traveled...
Definition: wedgebug.c:135
uint8_t is_heading_reached(float target_angle, float current_angle, float threshold)
Definition: wedgebug.c:1375
enum control_mode_state current_mode
Definition: wedgebug.c:226
uint8_t getMedian(uint8_t *a, uint32_t n)
Definition: wedgebug.c:477
Paparazzi floating point algebra.
#define WaypointX(_wp)
Definition: common_nav.h:45
An image to hold disparity image data from openCV (int16 per pixel)
Definition: image.h:40
Paparazzi generic algebra macros.
uint8_t allow_state_change_POSITION_GOAL
Definition: wedgebug.c:272
uint16_t threshold_median_depth
Below this distance (in meters) it is considered that the robot has reached the goal, in DIRECT_CONTROL mode.
Definition: wedgebug.c:149
void thresholding_img(struct image_t *img, uint8_t threshold)
Definition: wedgebug.c:675
void disp_to_depth_img(struct image_t *img8bit_input, struct image_t *img16bit_output)
Definition: wedgebug.c:1393
struct crop_t img_cropped_info
Definition: wedgebug.c:92
uint16_t w
Image width.
Definition: image.h:46
struct point_t c_img
Definition: wedgebug.c:183
float threshold_distance_to_goal
Above this disparity edges are eligible for WedgeBug algorithm (i.e. edges cannot be very far away) ...
Definition: wedgebug.c:145
uint8_t is_no_edge_found_flag
Set to 1 if best edge (according to micro confidence) was found, 0 otherwise.
Definition: wedgebug.c:177
unsigned long uint32_t
Definition: types.h:18
#define AP_MODE_GUIDED
void principal_points(struct point_t *c_output, const struct point_t *c_old_input, struct crop_t *img_cropped_info)
Definition: wedgebug.c:691
Computer vision framework for onboard processing.
uint16_t h
Image height.
Definition: image.h:47
signed short int16_t
Definition: types.h:17
double counter_cycles
Definition: wedgebug.c:245
struct image_t img_right_int8
Definition: wedgebug.c:78
void * buf_weights
Kernel weight buffer.
Definition: wedgebug.h:47
control_mode_state
Definition: wedgebug.c:220
void * buf
Image buffer (depending on the image_type)
Definition: image.h:54
uint8_t find_best_edge_coordinates2(struct FloatVect3 *VEDGECOORDINATESc, struct FloatVect3 *VTARGETc, struct image_t *img_edges, struct image_t *img_depth, struct crop_t *edge_search_area, uint16_t threshold, int16_t max_confidence)
Definition: wedgebug.c:1241
float heading_towards_waypoint(uint8_t wp)
Definition: wedgebug.c:986
uint8_t threshold_disparity_of_edges
Above this median disparity, an obstacle is considered to block the way (i.e. the blocking obstacle n...
Definition: wedgebug.c:144
clock_t clock_FSM
Definition: wedgebug.c:250
void Vb_to_Va(struct FloatVect3 *Va, struct FloatVect3 *Vb, struct FloatRMat *Rba, struct FloatVect3 *VOa)
Definition: wedgebug.c:880
int32_t indx1d_c(const int32_t y, const int32_t x, const uint16_t img_height, const uint16_t img_width)
Definition: wedgebug.c:844
uint8_t is_obstacle_detected_flag
Set to 1 if setpoint is reached, 0 otherwise.
Definition: wedgebug.c:172
uint16_t f
Camera baseline, in meters (i.e. horizontal distance between the two cameras of the stereo setup) ...
Definition: wedgebug.c:204
void wedgebug_periodic()
Definition: wedgebug.c:1845
float b
Definition: wedgebug.c:202
int threshold_edge_magnitude
Declared vector of coordinates of previous "best" edge detected in NED world coordinate system...
Definition: wedgebug.c:142
struct image_t img_disparity_int8_cropped
Image obtained from right camera, converted into 8bit gray image.
Definition: wedgebug.c:79
void show_image_data(struct image_t *img)
Definition: wedgebug.c:378
#define WEDGEBUG_CAMERA_RIGHT_FPS
Definition: wedgebug.c:62
float disp_to_depth_16bit(const int16_t d, const float b, const uint16_t f)
Definition: wedgebug.c:713
clock_t clock_total_time_current
Definition: wedgebug.c:247
uint32_t y
The y coordinate of the point.
Definition: image.h:60
uint8_t number_of_states
Definition: wedgebug.c:264
void image_to_grayscale(struct image_t *input, struct image_t *output)
Convert an image to grayscale.
Definition: image.c:127
#define WaypointY(_wp)
Definition: common_nav.h:46
Definition: image.h:58
signed long int32_t
Definition: types.h:19
void * buf_values
Kernel value buffer. These are the values underneath the kernel.
Definition: wedgebug.h:48
struct ES_angles initial_heading
Definition: wedgebug.c:239
float m[3 *3]
int16_t max_edge_found_macro_confidence
This is the max confidence that edges (micro-see above) were found.
Definition: wedgebug.c:166
clock_t clock_background_processes
Definition: wedgebug.c:249
int SBM_OCV(struct image_t *img_disp, const struct image_t *img_left, const struct image_t *img_right, const int ndisparities, const int SADWindowSize, const bool cropped)
Core autopilot interface common to all firmwares.
void Vi_to_Vc_depth(struct FloatVect3 *scene_point, int32_t image_point_y, int32_t image_point_x, const float depth, const uint16_t f)
Definition: wedgebug.c:763
uint8_t allow_state_change_EDGE_SCAN
Definition: wedgebug.c:276
struct FloatVect3 VGOALc
Declared vector of coordinates of goal in robot coordinate system.
Definition: wedgebug.c:124
static struct image_t * copy_left_img_func(struct image_t *img)
Definition: wedgebug.c:543
#define AP_MODE_NAV
unsigned char uint8_t
Definition: types.h:14
struct FloatVect3 VCr
Definition: wedgebug.c:199
uint8_t allow_state_change_MOVE_TO_GOAL
Definition: wedgebug.c:271
struct FloatRMat Rcr
Definition: wedgebug.c:198
uint8_t depth_to_disp(const float depth, const float b, const uint16_t f)
Definition: wedgebug.c:720
API to get/set the generic vehicle states.
float heading_max_left
Definition: wedgebug.c:233
int32_t indx1d_a(const int32_t y, const int32_t x, const struct image_t *img)
Definition: wedgebug.c:815
float max_edge_search_angle
Variable for storing the heading of the drone (psi in radians)
Definition: wedgebug.c:259
struct image_t img_left
Definition: wedgebug.c:74
uint8_t is_setpoint_reached(struct FloatVect3 *VGOAL, struct FloatVect3 *VCURRENTPOSITION, float threshold)
Definition: wedgebug.c:1356
int SE_sobel_OCV
SE size for the erosion operation (see state 3 "WEDGEBUG_START" and state 6 "POSITION_EDGE", its needed to "drag" the depth of the foreground objects over the edges detected)
Definition: wedgebug.c:111
int32_t indx1d_b(const int32_t y, const int32_t x, const struct img_size_t *img_dims)
Definition: wedgebug.c:830
void kernel_create(struct kernel_C1 *kernel, uint16_t width, uint16_t height, enum image_type type)
Definition: wedgebug.c:435
struct FloatVect3 VSTARTwenu
Height of kernel for the median kernel.
Definition: wedgebug.c:117
uint8_t median_disparity_in_front
The maximum angle (in adians) to the left and right of the drone, that edges can be detected in...
Definition: wedgebug.c:261
int min_disparity
Block size used for the block matching (SBM) function.
Definition: wedgebug.c:257
struct FloatRMat Rrwned
Definition: wedgebug.c:195
int16_t position_confidence
This is the confidence that no obstacle was spotted.
Definition: wedgebug.c:155
struct FloatVect3 VEDGECOORDINATESc
Declared vector of coordinates of goal in camera coordinate system.
Definition: wedgebug.c:125
struct FloatVect3 VEDGECOORDINATESwned
Declared vector of coordinates of "best" edge detected in robot coordinate system.
Definition: wedgebug.c:129
uint8_t previous_state
Definition: wedgebug.c:253
uint8_t is_setpoint_reached_flag
Definition: wedgebug.c:171
static void float_vect_copy(float *a, const float *b, const int n)
a = b
uint8_t allow_state_change_POSITION_EDGE
Definition: wedgebug.c:275
struct image_t img_disparity_int16_cropped
Image obtained from the external sobel edge detection function = sobel_OCV.
Definition: wedgebug.c:84
float heading_towards_setpoint_WNED(struct FloatVect3 *VSETPOINTwned)
Definition: wedgebug.c:999
uint16_t median_depth_to_point(struct point_t *Vi, struct image_t *img, struct kernel_C1 *kernel_median)
Definition: wedgebug.c:1067
UYVY format (uint16 per pixel)
Definition: image.h:36
rotation matrix
static struct image_t * copy_right_img_func(struct image_t *img)
Definition: wedgebug.c:553
struct FloatVect3 VNEDwenu
Definition: wedgebug.c:193
void show_rotation_matrix(struct FloatRMat *R)
Definition: wedgebug.c:395
float dispfixed_to_disp(const int16_t d)
Definition: wedgebug.c:706
int save_image_HM(struct image_t *img, char *myString, int const heatmap)
float threshold_distance_to_goal_direct
Below this distance (in radians) it is considered that the robot has reached the target angle...
Definition: wedgebug.c:147
struct FloatVect3 VEDGECOORDINATESwenu
Declared vector of coordinates of "best" edge detected in NED world coordinate system.
Definition: wedgebug.c:131
float distance_robot_edge_goal
Definition: wedgebug.c:265
Grayscale image with only the Y part (uint8 per pixel)
Definition: image.h:37
struct kernel_C1 median_kernel16bit
Definition: wedgebug.c:103
bool guidance_h_set_guided_heading(float heading)
Set heading setpoint in GUIDED mode.
Definition: guidance_h.c:720
#define AP_MODE_ATTITUDE_DIRECT
void post_disparity_crop_rect(struct crop_t *img_cropped_info, struct img_size_t *original_img_dims, const int disp_n, const int block_size)
Definition: wedgebug.c:409
uint8_t are_setpoint_and_angle_reached(struct FloatVect3 *VGOAL, struct FloatVect3 *VCURRENTPOSITION, float threshold_setpoint, float target_angle, float current_angle, float threshold_angle)
Definition: wedgebug.c:1382
int16_t max_position_confidence
This is the max confidence that an obstacle was not spotted.
Definition: wedgebug.c:163
uint16_t median_depth_in_front
Variable to hold the median disparity in front of the drone. Needed to see if obstacle is there...
Definition: wedgebug.c:262
void Va_to_Vb(struct FloatVect3 *Vb, struct FloatVect3 *Va, struct FloatRMat *Rba, struct FloatVect3 *VOa)
Definition: wedgebug.c:859
An image gradient (int16 per pixel)
Definition: image.h:39
int sobel_OCV(struct image_t *img_input, const struct image_t *img_output, const int kernel_size, const int thr)
int opening_OCV(struct image_t *img_input, const struct image_t *img_output, const int SE_size, const int iteration)
uint16_t K_median_w
SE size for the Sobel operation, to detect edges.
Definition: wedgebug.c:112
uint16_t threshold_depth_of_edges
Below this median disparity, an obstacle is considered to block the way (i.e. the blocking obstacle n...
Definition: wedgebug.c:150
struct kernel_C1 median_kernel
Dimensions of the kernel that detect median disparity in front of drone (for obstacle detection) ...
Definition: wedgebug.c:102
void background_processes_16bit(uint8_t save_images_flag)
Definition: wedgebug.c:1489
uint8_t initiated
Definition: wedgebug.c:235
uint16_t w
The width.
Definition: image.h:87
static struct NedCoor_f * stateGetPositionNed_f(void)
Get position in local NED coordinates (float).
Definition: state.h:710
struct FloatVect3 VGOALwned
Declared vector of coordinates of goal in ENU world coordinate system.
Definition: wedgebug.c:122
#define AP_MODE_ATTITUDE_Z_HOLD
struct FloatVect3 VSTARTwned
Declared vector of coordinates of start position in ENU world coordinate system.
Definition: wedgebug.c:119
void Vw_to_Vc(struct FloatVect3 *Vc, struct FloatVect3 *Vw, struct FloatRMat *Rrw, struct FloatVect3 *VRw, struct FloatRMat *Rcr, struct FloatVect3 *VCr, const uint8_t verbose)
Definition: wedgebug.c:894
void Vc_to_Vw(struct FloatVect3 *Vw, struct FloatVect3 *Vc, struct FloatRMat *Rrw, struct FloatVect3 *VRw, struct FloatRMat *Rcr, struct FloatVect3 *VCr, const uint8_t verbose)
Definition: wedgebug.c:950
int erosion_OCV(struct image_t *img_input, const struct image_t *img_output, const int SE_size, const int iteration)
uint8_t is_mode_changed_flag
Set to 1 if state was changed, 0 otherwise.
Definition: wedgebug.c:179
int SE_erosion_OCV
SE size for the second dilation operation (see state 3 "WEDGEBUG_START" and state 6 "POSITION_EDGE" )...
Definition: wedgebug.c:110
bool autopilot_guided_goto_ned(float x, float y, float z, float heading)
Set position and heading setpoints in GUIDED mode.
struct FloatVect3 VDISTANCEPOSITIONwned
Declared vector of coordinates of "best" edge detected in ENU world coordinate system.
Definition: wedgebug.c:133
struct image_t img_edges_int8_cropped
Definition: wedgebug.c:82
struct FloatVect3 VPBESTEDGECOORDINATESwned
Declared a vector to hold the position of a holding point (offten used to make sure drone stays still...
Definition: wedgebug.c:137
void wedgebug_init()
Definition: wedgebug.c:1592
float distance_traveled
Variable to hold the median depth in front of the drone. Needed to see if obstacle is there...
Definition: wedgebug.c:263
struct State state
Definition: state.c:36
int SE_dilation_OCV_1
SE size for the closing operation.
Definition: wedgebug.c:108
uint8_t allow_state_change_MOVE_TO_EDGE
Definition: wedgebug.c:274
int16_t heading_confidence
This is the confidence that the desired position was reached.
Definition: wedgebug.c:156
uint8_t autopilot_get_mode(void)
get autopilot mode
Definition: autopilot.c:184
float x
in meters
An JPEG encoded image (not per pixel encoded)
Definition: image.h:38
enum image_type type
The image type.
Definition: image.h:45
struct image_t img_middle_int16_cropped
Definition: wedgebug.c:85
struct image_t img_middle_int8_cropped
Image holding depth values (cm) obtained from the disparity image.
Definition: wedgebug.c:81